mirror of
https://github.com/apache/superset.git
synced 2026-04-30 13:34:20 +00:00
Compare commits
530 Commits
webpack-op
...
fix-radar
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6b7394e789 | ||
|
|
5a8eab3b25 | ||
|
|
15969fdf94 | ||
|
|
9b15e04bc4 | ||
|
|
fd947a097d | ||
|
|
e1383d3821 | ||
|
|
c131205ff1 | ||
|
|
b6df88a134 | ||
|
|
629b137bb0 | ||
|
|
db959a6463 | ||
|
|
4041150660 | ||
|
|
bcb43327b1 | ||
|
|
63c8bbf3eb | ||
|
|
24b1666273 | ||
|
|
86b795cd36 | ||
|
|
bc0bf94680 | ||
|
|
f5d64176f6 | ||
|
|
4f0020d0df | ||
|
|
c83eda9551 | ||
|
|
a36e636a58 | ||
|
|
f5d3627468 | ||
|
|
8eeed49547 | ||
|
|
00933a27af | ||
|
|
2bc33beec4 | ||
|
|
e1c1de1b94 | ||
|
|
26743dfcee | ||
|
|
8b0bda3bad | ||
|
|
a8a6254ea2 | ||
|
|
be4bc3dec5 | ||
|
|
6e02d19b0d | ||
|
|
662f0fa8f4 | ||
|
|
56bf17f879 | ||
|
|
b92909d621 | ||
|
|
8f35a3ec8c | ||
|
|
a4a092794a | ||
|
|
174750c9dd | ||
|
|
f2c0686346 | ||
|
|
c2afae51cb | ||
|
|
6e1d1ad18b | ||
|
|
ab22bb1878 | ||
|
|
e0ed652ed8 | ||
|
|
103fedaf92 | ||
|
|
50fe7483ae | ||
|
|
37f626f5e2 | ||
|
|
b1693f625a | ||
|
|
f0dc1e7527 | ||
|
|
6c7f089ebb | ||
|
|
68a81c3989 | ||
|
|
5222f940cc | ||
|
|
45ea11c1b6 | ||
|
|
b624919d2f | ||
|
|
b5cb5f4525 | ||
|
|
4a70065e5f | ||
|
|
7d77dc4fd2 | ||
|
|
6f69c84d10 | ||
|
|
6b96b37c38 | ||
|
|
b7435f84f0 | ||
|
|
7bc349c3c3 | ||
|
|
fd4e45aafc | ||
|
|
b339d7ad20 | ||
|
|
cedd186c21 | ||
|
|
c6c9114b40 | ||
|
|
f4a05a5ffd | ||
|
|
a82f916a71 | ||
|
|
ff0529c932 | ||
|
|
c0f83a7467 | ||
|
|
9bb3a5782d | ||
|
|
5ec710efc6 | ||
|
|
5866f3ec83 | ||
|
|
01801e3c36 | ||
|
|
d319543377 | ||
|
|
5392bafe28 | ||
|
|
89ce7ba0b0 | ||
|
|
376a1f49d3 | ||
|
|
6042ea8f28 | ||
|
|
78efb62781 | ||
|
|
e9d5079986 | ||
|
|
c6e0abbe13 | ||
|
|
4f166a03f5 | ||
|
|
29b62f7c0a | ||
|
|
09ee3e2a1d | ||
|
|
121e424a7f | ||
|
|
66c1a6a875 | ||
|
|
b26c373f4d | ||
|
|
4dd318ca68 | ||
|
|
ce6d5f5551 | ||
|
|
9e3052968b | ||
|
|
3f1ef2a283 | ||
|
|
bc3e19d0a2 | ||
|
|
850801f510 | ||
|
|
710af87faf | ||
|
|
6612343f33 | ||
|
|
c399295a4e | ||
|
|
e34644d983 | ||
|
|
cc0097c87a | ||
|
|
d71e655a4b | ||
|
|
99e69c32ee | ||
|
|
a2c164a77d | ||
|
|
78d2a584b7 | ||
|
|
f0c8c12c1a | ||
|
|
34cd741e9b | ||
|
|
1684ddc7e6 | ||
|
|
e35145c816 | ||
|
|
4adf44a43c | ||
|
|
cd5a94305c | ||
|
|
b4602aaf28 | ||
|
|
3e69ba1384 | ||
|
|
41bf215367 | ||
|
|
06deaebe19 | ||
|
|
6a13ab8920 | ||
|
|
f1a222d356 | ||
|
|
a87bedf31a | ||
|
|
890b6079b9 | ||
|
|
9c62456487 | ||
|
|
414cdbf83a | ||
|
|
df06bdf33b | ||
|
|
449f51aed5 | ||
|
|
c9e2c7037e | ||
|
|
a49a15f990 | ||
|
|
eb39ddbfe3 | ||
|
|
974d36d35e | ||
|
|
b64e3254fc | ||
|
|
9907db9e1a | ||
|
|
b4dd64aa24 | ||
|
|
6e049225f9 | ||
|
|
831369a44b | ||
|
|
7c9c30db1d | ||
|
|
0c6d868483 | ||
|
|
777760b096 | ||
|
|
e8ad096173 | ||
|
|
2f6f5c6778 | ||
|
|
832e028b39 | ||
|
|
d92af9c95c | ||
|
|
12435159db | ||
|
|
8695239372 | ||
|
|
29b4c40e43 | ||
|
|
53471072f4 | ||
|
|
bf902b2240 | ||
|
|
4b4912ba99 | ||
|
|
fa890ecb23 | ||
|
|
67af8bd730 | ||
|
|
f5eca4fe0b | ||
|
|
057423ed92 | ||
|
|
7dbe608d27 | ||
|
|
d8d4b75a11 | ||
|
|
664047f3fb | ||
|
|
1e20b048d3 | ||
|
|
6c1806df74 | ||
|
|
d97d991b5f | ||
|
|
90e18e37d0 | ||
|
|
c5a2bc5484 | ||
|
|
2ecc7e4f56 | ||
|
|
9f79c5ab4d | ||
|
|
e7721a8c4d | ||
|
|
c8f5089f7a | ||
|
|
a0ea905a7a | ||
|
|
b8fd1a30ee | ||
|
|
ff9ae54ae9 | ||
|
|
a16de15015 | ||
|
|
079e40144e | ||
|
|
a3f3a35c20 | ||
|
|
4fdeab8dad | ||
|
|
9ea58381f4 | ||
|
|
85d51f5c9a | ||
|
|
3b1d763421 | ||
|
|
91ab123860 | ||
|
|
8e021b0c82 | ||
|
|
7aa89db8d0 | ||
|
|
d3ba2755e8 | ||
|
|
0b0e0e9ce8 | ||
|
|
979f890cd5 | ||
|
|
89b6d7fb68 | ||
|
|
644882faff | ||
|
|
edfcbed24f | ||
|
|
f45ab70080 | ||
|
|
33aa9030bf | ||
|
|
4c3aae7583 | ||
|
|
c5dd52bcc9 | ||
|
|
eae7cf81b0 | ||
|
|
20e5df501e | ||
|
|
68e8d9858c | ||
|
|
99238dccbb | ||
|
|
626736bdd3 | ||
|
|
c2de749d0e | ||
|
|
9ad9ea67cf | ||
|
|
82595df6f9 | ||
|
|
281d1a8ec4 | ||
|
|
d2e0e2b79c | ||
|
|
05409d51da | ||
|
|
e98194cdd3 | ||
|
|
317532752c | ||
|
|
c90e45a373 | ||
|
|
8decc9e45f | ||
|
|
8053833e1f | ||
|
|
07221d8859 | ||
|
|
c1abe1ec44 | ||
|
|
b3dfd4930a | ||
|
|
fc844d3dfd | ||
|
|
d8686c2d12 | ||
|
|
90388885db | ||
|
|
33370eaa5c | ||
|
|
2b53b1800e | ||
|
|
807dcddc28 | ||
|
|
a45ce1e8d1 | ||
|
|
3d5128735b | ||
|
|
6173a6c329 | ||
|
|
813e79fa9f | ||
|
|
c0e92b1639 | ||
|
|
ef08ccbaa2 | ||
|
|
93d759c689 | ||
|
|
0d24ce0ef9 | ||
|
|
a4902a3685 | ||
|
|
16b08e333d | ||
|
|
15cf06699a | ||
|
|
fe33661821 | ||
|
|
2b98f326e8 | ||
|
|
d7e0ee6ceb | ||
|
|
ce367d6427 | ||
|
|
6c3886aad0 | ||
|
|
5af4e61aff | ||
|
|
5766c36372 | ||
|
|
61b72f0c0b | ||
|
|
d79f7b28c2 | ||
|
|
84b52b2323 | ||
|
|
eacb234872 | ||
|
|
6317a91541 | ||
|
|
128c45e2d3 | ||
|
|
4d6b4f8343 | ||
|
|
789049d386 | ||
|
|
cf7ce31054 | ||
|
|
2c851b7580 | ||
|
|
f4105e9ed2 | ||
|
|
74733ae310 | ||
|
|
1d823a0be5 | ||
|
|
00429558c2 | ||
|
|
dae6acf028 | ||
|
|
822d72c57d | ||
|
|
c02a0a00f4 | ||
|
|
a08c18febe | ||
|
|
479a5d2f72 | ||
|
|
793fbac405 | ||
|
|
167dacc2e4 | ||
|
|
00883c395c | ||
|
|
83071d0e5f | ||
|
|
b0dac046e6 | ||
|
|
8dcae810d4 | ||
|
|
b43e2ac8f4 | ||
|
|
bc02f05613 | ||
|
|
90651dfe3e | ||
|
|
c583eec4c7 | ||
|
|
0f07d78e01 | ||
|
|
22fe985cfc | ||
|
|
ace8a3adb7 | ||
|
|
4c4b5e8c64 | ||
|
|
2c37ddb2f6 | ||
|
|
b06a9edfd6 | ||
|
|
5140250421 | ||
|
|
88cf2d5c39 | ||
|
|
422a07b382 | ||
|
|
f820f9a976 | ||
|
|
c27aee2b14 | ||
|
|
7ce1a3445c | ||
|
|
42a3c523ae | ||
|
|
bb46dd93be | ||
|
|
b207f0616d | ||
|
|
9dcf788f47 | ||
|
|
6900bc1855 | ||
|
|
b09bfd7889 | ||
|
|
2d8892958e | ||
|
|
f9a43921c5 | ||
|
|
e74efd3072 | ||
|
|
d5a5bd46d2 | ||
|
|
e422e3c620 | ||
|
|
b269d920a9 | ||
|
|
de2bce6f47 | ||
|
|
e061116032 | ||
|
|
878bcbd8c7 | ||
|
|
093135ff30 | ||
|
|
734f8ed4c3 | ||
|
|
dcc9628f31 | ||
|
|
321d105c42 | ||
|
|
460aec7bc9 | ||
|
|
ffe9244458 | ||
|
|
fa09d8187a | ||
|
|
9da30956c0 | ||
|
|
9c7835a244 | ||
|
|
ad057324b7 | ||
|
|
2c583d1584 | ||
|
|
15fbb195e9 | ||
|
|
5867b87680 | ||
|
|
52563d3eea | ||
|
|
21348c418a | ||
|
|
af3589fe91 | ||
|
|
937d40cdde | ||
|
|
319a860f23 | ||
|
|
d3b854a833 | ||
|
|
650fa5ccfb | ||
|
|
db70c7912c | ||
|
|
3160607aaf | ||
|
|
eec54affc3 | ||
|
|
31d6f5a639 | ||
|
|
60424c4ccd | ||
|
|
60bbd72028 | ||
|
|
a78968c68e | ||
|
|
1c3ec21e0f | ||
|
|
8d1fb9c82d | ||
|
|
f01493277f | ||
|
|
0f6bd5ea83 | ||
|
|
0030f46d2d | ||
|
|
06f8f8e608 | ||
|
|
a144464506 | ||
|
|
2770bc0865 | ||
|
|
bcc61bd933 | ||
|
|
38c46fcafd | ||
|
|
f3e7c64de6 | ||
|
|
f9f8c5d07a | ||
|
|
c5f4a7f302 | ||
|
|
389aae270b | ||
|
|
e97eb71a52 | ||
|
|
5a8488af36 | ||
|
|
205cff3a94 | ||
|
|
649a0dec6c | ||
|
|
e8990f4a36 | ||
|
|
acf91e1f60 | ||
|
|
6ed9dae2f7 | ||
|
|
ea5879bf2b | ||
|
|
c7c3b1b0e9 | ||
|
|
c64018d421 | ||
|
|
53d944d013 | ||
|
|
9aa8b09505 | ||
|
|
8984f88a3e | ||
|
|
386aa93e24 | ||
|
|
0cd0fcdecb | ||
|
|
cde2d49c95 | ||
|
|
9e5876dc17 | ||
|
|
1064ad5d58 | ||
|
|
7db0589340 | ||
|
|
c590e90c87 | ||
|
|
101d3fa78d | ||
|
|
468bb5f47a | ||
|
|
1c1494d3e0 | ||
|
|
5fc11fb706 | ||
|
|
f73d61a597 | ||
|
|
3f46bcf142 | ||
|
|
aa67525b70 | ||
|
|
568f6d958b | ||
|
|
b12f515185 | ||
|
|
732de4ac7f | ||
|
|
e4bdb28ba2 | ||
|
|
a87a13c3ab | ||
|
|
19e8a7049b | ||
|
|
a21f184058 | ||
|
|
f4efce3475 | ||
|
|
23d9f46d30 | ||
|
|
6478bb7eab | ||
|
|
962fd4cca3 | ||
|
|
640d4f09bd | ||
|
|
7e2b7941f3 | ||
|
|
aa74ba3da2 | ||
|
|
1b375b715c | ||
|
|
09c1987de4 | ||
|
|
827042f12f | ||
|
|
925938b4d1 | ||
|
|
65c4d39c31 | ||
|
|
4b0e907c3d | ||
|
|
687f762457 | ||
|
|
6eb87e04c0 | ||
|
|
5fe6ef268e | ||
|
|
51e090d67a | ||
|
|
fc5dde15fe | ||
|
|
14f798afec | ||
|
|
6d117ffbb5 | ||
|
|
fcd166149c | ||
|
|
7482b20f7b | ||
|
|
1d6423e71f | ||
|
|
7cf7267085 | ||
|
|
f5fff5eaad | ||
|
|
e4e07eef5a | ||
|
|
b74da7963b | ||
|
|
7383e4348b | ||
|
|
983aa827a8 | ||
|
|
78cd635b7a | ||
|
|
dfb9af36df | ||
|
|
a02a2f5a96 | ||
|
|
dca3efb3dd | ||
|
|
43a97f86f5 | ||
|
|
b5ac415afc | ||
|
|
4417e6eaef | ||
|
|
f8fe780f52 | ||
|
|
eec374426f | ||
|
|
550123882c | ||
|
|
79f21b09d2 | ||
|
|
63843c5682 | ||
|
|
8960db4132 | ||
|
|
66c22f896b | ||
|
|
840773e626 | ||
|
|
2874096e27 | ||
|
|
b86572b084 | ||
|
|
e4f6e55e89 | ||
|
|
c1cbc334ad | ||
|
|
249fdf444a | ||
|
|
547b8b9314 | ||
|
|
ab6045691e | ||
|
|
fc8710f50a | ||
|
|
aacfe4d667 | ||
|
|
f235787703 | ||
|
|
4ca5846c7f | ||
|
|
754ccd0448 | ||
|
|
d4bd20ffb4 | ||
|
|
4bad1a258f | ||
|
|
ffb6913706 | ||
|
|
6531101517 | ||
|
|
bbf7586fe8 | ||
|
|
ef57318259 | ||
|
|
8050e351ed | ||
|
|
c2d7cf388d | ||
|
|
274aa143d3 | ||
|
|
855011360a | ||
|
|
44ff462718 | ||
|
|
9661afff16 | ||
|
|
1a43654207 | ||
|
|
822441e0bd | ||
|
|
66f1e1f714 | ||
|
|
b5e62753b7 | ||
|
|
aae8fda11d | ||
|
|
bebcd3dcdd | ||
|
|
5e0de04eab | ||
|
|
472ca9d8f6 | ||
|
|
41ed37ab02 | ||
|
|
bbdc195a3b | ||
|
|
740fbf72d7 | ||
|
|
8a2aada58d | ||
|
|
7bd53a84d5 | ||
|
|
5f18e849c1 | ||
|
|
49876c3f13 | ||
|
|
399b709aaf | ||
|
|
a477d84729 | ||
|
|
5acd03876b | ||
|
|
3a6fdf8bdf | ||
|
|
9cd3a8d5b0 | ||
|
|
840a920aba | ||
|
|
e4b3ecd372 | ||
|
|
f29eafd044 | ||
|
|
4f1a837bd3 | ||
|
|
d5a4815836 | ||
|
|
210537a4d2 | ||
|
|
0eca79cb6c | ||
|
|
550d893c38 | ||
|
|
71dca5c076 | ||
|
|
7f72b062d1 | ||
|
|
d68f8d11fe | ||
|
|
b54de611d3 | ||
|
|
f3c5d1c608 | ||
|
|
9e17304523 | ||
|
|
0460415bcb | ||
|
|
b4c5f65a5f | ||
|
|
62486a7dd1 | ||
|
|
8e32aca282 | ||
|
|
c7ae5c587a | ||
|
|
2cea1bcc82 | ||
|
|
5ff2dfb8e2 | ||
|
|
4b54b2b953 | ||
|
|
7e4570bc9c | ||
|
|
78d7df02cb | ||
|
|
ee36cf058c | ||
|
|
668194d574 | ||
|
|
c13cb8a1f2 | ||
|
|
72e87d6980 | ||
|
|
a986a61b5f | ||
|
|
5484db34f9 | ||
|
|
109e6c69ff | ||
|
|
e311bc1ca5 | ||
|
|
553d41fedd | ||
|
|
2996d9ab9c | ||
|
|
bf1da0f91f | ||
|
|
ca12a1d466 | ||
|
|
7c90323649 | ||
|
|
27e42b5091 | ||
|
|
65c72c12c4 | ||
|
|
bcf649a116 | ||
|
|
e371de4ea3 | ||
|
|
b27df30222 | ||
|
|
892349d7c9 | ||
|
|
dec8a65730 | ||
|
|
3084cebd6b | ||
|
|
879e795147 | ||
|
|
509c0a6aab | ||
|
|
91d1648230 | ||
|
|
640dac1eff | ||
|
|
9321ab9deb | ||
|
|
a193d790b2 | ||
|
|
b382ef1058 | ||
|
|
488e7b4692 | ||
|
|
be7df6c16f | ||
|
|
ddd964e49a | ||
|
|
a275a86d12 | ||
|
|
1c99f399c2 | ||
|
|
c38c7a29fd | ||
|
|
0fde78cb17 | ||
|
|
5df3317ad0 | ||
|
|
e88c4e1100 | ||
|
|
d85bb8b6a0 | ||
|
|
fd9c2b399e | ||
|
|
e8daa63d7e | ||
|
|
640d6f848a | ||
|
|
8c5e525c57 | ||
|
|
e87feba45d | ||
|
|
9d3438a8b3 | ||
|
|
fd6ef4f870 | ||
|
|
d94bd413e5 | ||
|
|
0c2f697f6d | ||
|
|
b831bed7cd | ||
|
|
d9554622c0 | ||
|
|
89ad48764a | ||
|
|
5ab3c5ea99 | ||
|
|
fdc7273d56 | ||
|
|
a59a0fe475 | ||
|
|
16e29c466e | ||
|
|
35d109389f | ||
|
|
e5b561c87f | ||
|
|
3999c0fc41 | ||
|
|
e2a1ce220e | ||
|
|
369346c03d | ||
|
|
d09ccb2504 | ||
|
|
c514b6b0b9 | ||
|
|
040f27e6da | ||
|
|
a5fdf6d14a | ||
|
|
16c9418ee1 | ||
|
|
d8aba2f7e6 | ||
|
|
db11a2a308 |
12
.asf.yaml
12
.asf.yaml
@@ -17,7 +17,14 @@
|
||||
|
||||
# https://cwiki.apache.org/confluence/display/INFRA/.asf.yaml+features+for+git+repositories
|
||||
---
|
||||
notifications:
|
||||
commits: commits@superset.apache.org
|
||||
issues: notifications@superset.apache.org
|
||||
pullrequests: notifications@superset.apache.org
|
||||
discussions: notifications@superset.apache.org
|
||||
|
||||
github:
|
||||
del_branch_on_merge: true
|
||||
description: "Apache Superset is a Data Visualization and Data Exploration Platform"
|
||||
homepage: https://superset.apache.org/
|
||||
labels:
|
||||
@@ -47,6 +54,8 @@ github:
|
||||
projects: true
|
||||
# Enable wiki for documentation
|
||||
wiki: true
|
||||
# Enable discussions
|
||||
discussions: true
|
||||
|
||||
enabled_merge_buttons:
|
||||
squash: true
|
||||
@@ -75,16 +84,13 @@ github:
|
||||
- dependency-review
|
||||
- frontend-build
|
||||
- pre-commit (current)
|
||||
- pre-commit (next)
|
||||
- pre-commit (previous)
|
||||
- test-mysql
|
||||
- test-postgres (current)
|
||||
- test-postgres (next)
|
||||
- test-postgres-hive
|
||||
- test-postgres-presto
|
||||
- test-sqlite
|
||||
- unit-tests (current)
|
||||
- unit-tests (next)
|
||||
|
||||
required_pull_request_reviews:
|
||||
dismiss_stale_reviews: false
|
||||
|
||||
@@ -34,7 +34,6 @@
|
||||
**/*.sqllite
|
||||
**/*.swp
|
||||
**/.terser-plugin-cache/
|
||||
**/.storybook/
|
||||
**/node_modules/
|
||||
|
||||
tests/
|
||||
|
||||
6
.github/CODEOWNERS
vendored
6
.github/CODEOWNERS
vendored
@@ -12,11 +12,11 @@
|
||||
|
||||
# Notify Helm Chart maintainers about changes in it
|
||||
|
||||
/helm/superset/ @craig-rueda @dpgaspar @villebro @nytai @michael-s-molina
|
||||
/helm/superset/ @craig-rueda @dpgaspar @villebro @nytai @michael-s-molina @mistercrunch @rusackas @Antonio-RiveroMartnez
|
||||
|
||||
# Notify E2E test maintainers of changes
|
||||
|
||||
/superset-frontend/cypress-base/ @sadpandajoe @geido @eschutho @rusackas @betodealmeida
|
||||
/superset-frontend/cypress-base/ @sadpandajoe @geido @eschutho @rusackas @betodealmeida @mistercrunch
|
||||
|
||||
# Notify PMC members of changes to GitHub Actions
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
|
||||
# Notify PMC members of changes to required GitHub Actions
|
||||
|
||||
/.asf.yaml @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @kgabryje @dpgaspar
|
||||
/.asf.yaml @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @kgabryje @dpgaspar @Antonio-RiveroMartnez
|
||||
|
||||
# Maps are a finicky contribution process we care about
|
||||
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
2
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
@@ -41,7 +41,7 @@ body:
|
||||
label: Superset version
|
||||
options:
|
||||
- master / latest-dev
|
||||
- "4.1.1"
|
||||
- "4.1.2"
|
||||
- "4.0.2"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
23
.github/actions/change-detector/label-draft-pr.yml
vendored
Normal file
23
.github/actions/change-detector/label-draft-pr.yml
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
name: Label Draft PRs
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- converted_to_draft
|
||||
jobs:
|
||||
label-draft:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check if the PR is a draft
|
||||
id: check-draft
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
script: |
|
||||
const isDraft = context.payload.pull_request.draft;
|
||||
core.setOutput('isDraft', isDraft);
|
||||
- name: Add `review:draft` Label
|
||||
if: steps.check-draft.outputs.isDraft == 'true'
|
||||
uses: actions-ecosystem/action-add-labels@v1
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
labels: "review:draft"
|
||||
8
.github/actions/setup-backend/action.yml
vendored
8
.github/actions/setup-backend/action.yml
vendored
@@ -26,11 +26,12 @@ runs:
|
||||
shell: bash
|
||||
run: |
|
||||
if [ "${{ inputs.python-version }}" = "current" ]; then
|
||||
echo "PYTHON_VERSION=3.10" >> $GITHUB_ENV
|
||||
elif [ "${{ inputs.python-version }}" = "next" ]; then
|
||||
echo "PYTHON_VERSION=3.11" >> $GITHUB_ENV
|
||||
elif [ "${{ inputs.python-version }}" = "next" ]; then
|
||||
# currently disabled in GHA matrixes because of library compatibility issues
|
||||
echo "PYTHON_VERSION=3.12" >> $GITHUB_ENV
|
||||
elif [ "${{ inputs.python-version }}" = "previous" ]; then
|
||||
echo "PYTHON_VERSION=3.9" >> $GITHUB_ENV
|
||||
echo "PYTHON_VERSION=3.10" >> $GITHUB_ENV
|
||||
else
|
||||
echo "PYTHON_VERSION=${{ inputs.python-version }}" >> $GITHUB_ENV
|
||||
fi
|
||||
@@ -43,6 +44,7 @@ runs:
|
||||
run: |
|
||||
if [ "${{ inputs.install-superset }}" = "true" ]; then
|
||||
sudo apt-get update && sudo apt-get -y install libldap2-dev libsasl2-dev
|
||||
|
||||
pip install --upgrade pip setuptools wheel uv
|
||||
|
||||
if [ "${{ inputs.requirements-type }}" = "dev" ]; then
|
||||
|
||||
16
.github/dependabot.yml
vendored
16
.github/dependabot.yml
vendored
@@ -1,4 +1,5 @@
|
||||
version: 2
|
||||
enable-beta-ecosystems: true
|
||||
updates:
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
@@ -21,9 +22,14 @@ updates:
|
||||
versioning-strategy: increase
|
||||
|
||||
|
||||
# - package-ecosystem: "pip"
|
||||
# NOTE: as dependabot isn't compatible with our usage of `uv pip compile` we're using
|
||||
# `supersetbot` instead
|
||||
# NOTE: `uv` support is in beta, more details here:
|
||||
# https://github.com/dependabot/dependabot-core/pull/10040#issuecomment-2696978430
|
||||
- package-ecosystem: "uv"
|
||||
directory: "requirements/"
|
||||
open-pull-requests-limit: 10
|
||||
labels:
|
||||
- uv
|
||||
- dependabot
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: ".github/actions"
|
||||
@@ -322,6 +328,10 @@ updates:
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/superset-frontend/packages/superset-ui-core/"
|
||||
ignore:
|
||||
# not until React >= 18.0.0
|
||||
- dependency-name: "react-markdown"
|
||||
- dependency-name: "remark-gfm"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
labels:
|
||||
|
||||
1
.github/workflows/dependency-review.yml
vendored
1
.github/workflows/dependency-review.yml
vendored
@@ -30,6 +30,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
- name: "Dependency Review"
|
||||
uses: actions/dependency-review-action@v4
|
||||
continue-on-error: true
|
||||
with:
|
||||
fail-on-severity: critical
|
||||
# compatible/incompatible licenses addressed here: https://www.apache.org/legal/resolved.html
|
||||
|
||||
48
.github/workflows/docker.yml
vendored
48
.github/workflows/docker.yml
vendored
@@ -14,6 +14,7 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
|
||||
setup_matrix:
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
@@ -36,6 +37,7 @@ jobs:
|
||||
env:
|
||||
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
|
||||
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
IMAGE_TAG: apache/superset:GHA-${{ matrix.build_preset }}-${{ github.run_id }}
|
||||
|
||||
steps:
|
||||
|
||||
@@ -71,35 +73,65 @@ jobs:
|
||||
# Single platform builds in pull_request context to speed things up
|
||||
if [ "${{ github.event_name }}" = "push" ]; then
|
||||
PLATFORM_ARG="--platform linux/arm64 --platform linux/amd64"
|
||||
# can only --load images in single-platform builds
|
||||
PUSH_OR_LOAD="--push"
|
||||
elif [ "${{ github.event_name }}" = "pull_request" ]; then
|
||||
PLATFORM_ARG="--platform linux/amd64"
|
||||
PUSH_OR_LOAD="--load"
|
||||
fi
|
||||
|
||||
supersetbot docker \
|
||||
--push \
|
||||
$PUSH_OR_LOAD \
|
||||
--preset ${{ matrix.build_preset }} \
|
||||
--context "$EVENT" \
|
||||
--context-ref "$RELEASE" $FORCE_LATEST \
|
||||
--extra-flags "--build-arg INCLUDE_CHROMIUM=false" \
|
||||
--extra-flags "--build-arg INCLUDE_CHROMIUM=false --tag $IMAGE_TAG" \
|
||||
$PLATFORM_ARG
|
||||
|
||||
# in the context of push (using multi-platform build), we need to pull the image locally
|
||||
- name: Docker pull
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
|
||||
run: docker pull apache/superset:GHA-${GITHUB_RUN_ID}
|
||||
if: github.event_name == 'push' && (steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker)
|
||||
run: docker pull $IMAGE_TAG
|
||||
|
||||
- name: Print docker stats
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
|
||||
run: |
|
||||
IMAGE_ID=$(docker images --filter "label=sha=${{ github.sha }}" --format "{{.ID}}" | head -n 1)
|
||||
echo "SHA: ${{ github.sha }}"
|
||||
echo "IMAGE: $IMAGE_ID"
|
||||
docker images $IMAGE_ID
|
||||
docker history $IMAGE_ID
|
||||
echo "IMAGE: $IMAGE_TAG"
|
||||
docker images $IMAGE_TAG
|
||||
docker history $IMAGE_TAG
|
||||
|
||||
- name: docker-compose sanity check
|
||||
if: (steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker) && (matrix.build_preset == 'dev' || matrix.build_preset == 'lean')
|
||||
shell: bash
|
||||
run: |
|
||||
export SUPERSET_BUILD_TARGET=${{ matrix.build_preset }}
|
||||
# This should reuse the CACHED image built in the previous steps
|
||||
docker compose build superset-init --build-arg DEV_MODE=false --build-arg INCLUDE_CHROMIUM=false
|
||||
docker compose up superset-init --exit-code-from superset-init
|
||||
|
||||
docker-compose-image-tag:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Check for file changes
|
||||
id: check
|
||||
uses: ./.github/actions/change-detector/
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Setup Docker Environment
|
||||
if: steps.check.outputs.docker
|
||||
uses: ./.github/actions/setup-docker
|
||||
with:
|
||||
dockerhub-user: ${{ secrets.DOCKERHUB_USER }}
|
||||
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
build: "false"
|
||||
install-docker-compose: "true"
|
||||
- name: docker-compose sanity check
|
||||
if: steps.check.outputs.docker
|
||||
shell: bash
|
||||
run: |
|
||||
docker compose -f docker-compose-image-tag.yml up superset-init --exit-code-from superset-init
|
||||
|
||||
2
.github/workflows/embedded-sdk-release.yml
vendored
2
.github/workflows/embedded-sdk-release.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
node-version-file: './superset-embedded-sdk/.nvmrc'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
- run: npm ci
|
||||
- run: npm run ci:release
|
||||
|
||||
2
.github/workflows/embedded-sdk-test.yml
vendored
2
.github/workflows/embedded-sdk-test.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
node-version-file: './superset-embedded-sdk/.nvmrc'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
- run: npm ci
|
||||
- run: npm test
|
||||
|
||||
442
.github/workflows/ephemeral-env.yml
vendored
442
.github/workflows/ephemeral-env.yml
vendored
@@ -1,145 +1,157 @@
|
||||
name: Ephemeral env workflow
|
||||
|
||||
# Example manual trigger: gh workflow run ephemeral-env.yml --ref fix_ephemerals --field comment_body="/testenv up" --field issue_number=666
|
||||
# Example manual trigger:
|
||||
# gh workflow run ephemeral-env.yml --ref fix_ephemerals --field label_name="testenv-up" --field issue_number=666
|
||||
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
pull_request_target:
|
||||
types:
|
||||
- labeled
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
comment_body:
|
||||
description: 'Comment body to simulate /testenv command'
|
||||
label_name:
|
||||
description: 'Label name to simulate label-based /testenv trigger'
|
||||
required: true
|
||||
default: '/testenv up'
|
||||
default: 'testenv-up'
|
||||
issue_number:
|
||||
description: 'Issue or PR number'
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
ephemeral-env-comment:
|
||||
ephemeral-env-label:
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.inputs.issue_number || github.event.issue.number || github.run_id }}-comment
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}-label
|
||||
cancel-in-progress: true
|
||||
name: Evaluate ephemeral env comment trigger (/testenv)
|
||||
name: Evaluate ephemeral env label trigger
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
pull-requests: write
|
||||
outputs:
|
||||
slash-command: ${{ steps.eval-body.outputs.result }}
|
||||
slash-command: ${{ steps.eval-label.outputs.result }}
|
||||
feature-flags: ${{ steps.eval-feature-flags.outputs.result }}
|
||||
sha: ${{ steps.get-sha.outputs.sha }}
|
||||
env:
|
||||
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
|
||||
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
steps:
|
||||
- name: Debug
|
||||
run: |
|
||||
echo "Comment on PR #${{ github.event.issue.number }} by ${{ github.event.issue.user.login }}, ${{ github.event.comment.author_association }}"
|
||||
- name: Check for the "testenv-up" label
|
||||
id: eval-label
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
|
||||
LABEL_NAME="${{ github.event.inputs.label_name }}"
|
||||
else
|
||||
LABEL_NAME="${{ github.event.label.name }}"
|
||||
fi
|
||||
|
||||
- name: Eval comment body for /testenv slash command
|
||||
uses: actions/github-script@v7
|
||||
env:
|
||||
COMMENT_BODY: ${{ github.event.inputs.comment_body || github.event.comment.body }}
|
||||
id: eval-body
|
||||
with:
|
||||
result-encoding: string
|
||||
script: |
|
||||
const pattern = /^\/testenv (up|down)/;
|
||||
const result = pattern.exec(process.env.COMMENT_BODY || '');
|
||||
return result === null ? 'noop' : result[1];
|
||||
echo "Evaluating label: $LABEL_NAME"
|
||||
|
||||
- name: Looking for feature flags
|
||||
uses: actions/github-script@v7
|
||||
env:
|
||||
COMMENT_BODY: ${{ github.event.inputs.comment_body || github.event.comment.body }}
|
||||
id: eval-feature-flags
|
||||
with:
|
||||
script: |
|
||||
const pattern = /FEATURE_(\w+)=(\w+)/g;
|
||||
let results = [];
|
||||
[...process.env.COMMENT_BODY.matchAll(pattern)].forEach(match => {
|
||||
const config = {
|
||||
name: `SUPERSET_FEATURE_${match[1]}`,
|
||||
value: match[2],
|
||||
};
|
||||
results.push(config);
|
||||
});
|
||||
return results;
|
||||
if [[ "$LABEL_NAME" == "testenv-up" ]]; then
|
||||
echo "result=up" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "result=noop" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Get event SHA
|
||||
id: get-sha
|
||||
if: steps.eval-label.outputs.result == 'up'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
let prSha;
|
||||
|
||||
// If event is workflow_dispatch, use the issue_number from inputs
|
||||
if (context.eventName === "workflow_dispatch") {
|
||||
const prNumber = "${{ github.event.inputs.issue_number }}";
|
||||
if (!prNumber) {
|
||||
console.log("No PR number found.");
|
||||
return;
|
||||
}
|
||||
|
||||
// Fetch PR details using the provided issue_number
|
||||
const { data: pr } = await github.rest.pulls.get({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
pull_number: prNumber
|
||||
});
|
||||
|
||||
prSha = pr.head.sha;
|
||||
} else {
|
||||
// If it's not workflow_dispatch, use the PR head sha from the event
|
||||
prSha = context.payload.pull_request.head.sha;
|
||||
}
|
||||
|
||||
console.log(`PR SHA: ${prSha}`);
|
||||
core.setOutput("sha", prSha);
|
||||
|
||||
- name: Looking for feature flags in PR description
|
||||
uses: actions/github-script@v7
|
||||
id: eval-feature-flags
|
||||
if: steps.eval-label.outputs.result == 'up'
|
||||
with:
|
||||
script: |
|
||||
const description = context.payload.pull_request
|
||||
? context.payload.pull_request.body || ''
|
||||
: context.payload.inputs.pr_description || '';
|
||||
|
||||
const pattern = /FEATURE_(\w+)=(\w+)/g;
|
||||
let results = [];
|
||||
[...description.matchAll(pattern)].forEach(match => {
|
||||
const config = {
|
||||
name: `SUPERSET_FEATURE_${match[1]}`,
|
||||
value: match[2],
|
||||
};
|
||||
results.push(config);
|
||||
});
|
||||
|
||||
return results;
|
||||
|
||||
- name: Reply with confirmation comment
|
||||
uses: actions/github-script@v7
|
||||
if: steps.eval-label.outputs.result == 'up'
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
const action = '${{ steps.eval-label.outputs.result }}';
|
||||
const user = context.actor;
|
||||
const runId = context.runId;
|
||||
const workflowUrl = `${context.serverUrl}/${context.repo.owner}/${context.repo.repo}/actions/runs/${runId}`;
|
||||
|
||||
const issueNumber = context.payload.pull_request
|
||||
? context.payload.pull_request.number
|
||||
: context.payload.inputs.issue_number;
|
||||
|
||||
if (!issueNumber) {
|
||||
throw new Error("Issue number is not available.");
|
||||
}
|
||||
|
||||
const body = `@${user} Processing your ephemeral environment request [here](${workflowUrl}).` +
|
||||
` Action: **${action}**.` +
|
||||
` More information on [how to use or configure ephemeral environments]` +
|
||||
`(https://superset.apache.org/docs/contributing/howtos/#github-ephemeral-environments)`;
|
||||
|
||||
- name: Limit to committers
|
||||
if: >
|
||||
steps.eval-body.outputs.result != 'noop' &&
|
||||
github.event_name == 'issue_comment' &&
|
||||
github.event.comment.author_association != 'MEMBER' &&
|
||||
github.event.comment.author_association != 'OWNER'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
script: |
|
||||
const errMsg = '@${{ github.event.comment.user.login }} Ephemeral environment creation is currently limited to committers.';
|
||||
github.rest.issues.createComment({
|
||||
issue_number: ${{ github.event.issue.number }},
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: errMsg
|
||||
});
|
||||
core.setFailed(errMsg);
|
||||
|
||||
- name: Reply with confirmation comment
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
const issueNumber = ${{ github.event.inputs.issue_number || github.event.issue.number }};
|
||||
const user = '${{ github.event.comment.user.login || github.actor }}';
|
||||
const action = '${{ steps.eval-body.outputs.result }}';
|
||||
const runId = context.runId;
|
||||
const workflowUrl = `${context.serverUrl}/${context.repo.owner}/${context.repo.repo}/actions/runs/${runId}`;
|
||||
const body = `@${user} Processing your ephemeral environment request [here](${workflowUrl}).`;
|
||||
if (action !== 'noop') {
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: issueNumber,
|
||||
body,
|
||||
});
|
||||
}
|
||||
else {
|
||||
core.setFailed('No ephemeral environment action detected.');
|
||||
}
|
||||
|
||||
ephemeral-docker-build:
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.inputs.issue_number || github.event.issue.number || github.run_id }}-build
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}-build
|
||||
cancel-in-progress: true
|
||||
needs: ephemeral-env-comment
|
||||
if: needs.ephemeral-env-comment.outputs.slash-command == 'up'
|
||||
needs: ephemeral-env-label
|
||||
if: needs.ephemeral-env-label.outputs.slash-command == 'up'
|
||||
name: ephemeral-docker-build
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Get Info from comment
|
||||
uses: actions/github-script@v7
|
||||
id: get-pr-info
|
||||
with:
|
||||
script: |
|
||||
const request = {
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
pull_number: ${{ github.event.inputs.issue_number || github.event.issue.number }},
|
||||
};
|
||||
core.info(`Getting PR #${request.pull_number} from ${request.owner}/${request.repo}`);
|
||||
const pr = await github.rest.pulls.get(request);
|
||||
return pr.data;
|
||||
|
||||
- name: Debug
|
||||
id: get-sha
|
||||
run: |
|
||||
echo "sha=${{ fromJSON(steps.get-pr-info.outputs.result).head.sha }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} : ${{steps.get-sha.outputs.sha}} )"
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ needs.ephemeral-env-label.outputs.sha }} : ${{steps.get-sha.outputs.sha}} )"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ steps.get-sha.outputs.sha }}
|
||||
ref: ${{ needs.ephemeral-env-label.outputs.sha }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Docker Environment
|
||||
@@ -181,14 +193,15 @@ jobs:
|
||||
env:
|
||||
ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }}
|
||||
ECR_REPOSITORY: superset-ci
|
||||
IMAGE_TAG: apache/superset:${{ steps.get-sha.outputs.sha }}-ci
|
||||
IMAGE_TAG: apache/superset:${{ needs.ephemeral-env-label.outputs.sha }}-ci
|
||||
PR_NUMBER: ${{ github.event.inputs.issue_number || github.event.pull_request.number }}
|
||||
run: |
|
||||
docker tag $IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-ci
|
||||
docker tag $IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:pr-$PR_NUMBER-ci
|
||||
docker push -a $ECR_REGISTRY/$ECR_REPOSITORY
|
||||
|
||||
ephemeral-env-up:
|
||||
needs: [ephemeral-env-comment, ephemeral-docker-build]
|
||||
if: needs.ephemeral-env-comment.outputs.slash-command == 'up'
|
||||
needs: [ephemeral-env-label, ephemeral-docker-build]
|
||||
if: needs.ephemeral-env-label.outputs.slash-command == 'up'
|
||||
name: Spin up an ephemeral environment
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
@@ -196,120 +209,125 @@ jobs:
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: us-west-2
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: us-west-2
|
||||
|
||||
- name: Login to Amazon ECR
|
||||
id: login-ecr
|
||||
uses: aws-actions/amazon-ecr-login@v2
|
||||
- name: Login to Amazon ECR
|
||||
id: login-ecr
|
||||
uses: aws-actions/amazon-ecr-login@v2
|
||||
|
||||
- name: Check target image exists in ECR
|
||||
id: check-image
|
||||
continue-on-error: true
|
||||
run: |
|
||||
aws ecr describe-images \
|
||||
--registry-id $(echo "${{ steps.login-ecr.outputs.registry }}" | grep -Eo "^[0-9]+") \
|
||||
--repository-name superset-ci \
|
||||
--image-ids imageTag=pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-ci
|
||||
- name: Check target image exists in ECR
|
||||
id: check-image
|
||||
continue-on-error: true
|
||||
env:
|
||||
PR_NUMBER: ${{ github.event.inputs.issue_number || github.event.pull_request.number }}
|
||||
run: |
|
||||
aws ecr describe-images \
|
||||
--registry-id $(echo "${{ steps.login-ecr.outputs.registry }}" | grep -Eo "^[0-9]+") \
|
||||
--repository-name superset-ci \
|
||||
--image-ids imageTag=pr-$PR_NUMBER-ci
|
||||
|
||||
- name: Fail on missing container image
|
||||
if: steps.check-image.outcome == 'failure'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
script: |
|
||||
const errMsg = '@${{ github.event.comment.user.login }} Container image not yet published for this PR. Please try again when build is complete.';
|
||||
github.rest.issues.createComment({
|
||||
issue_number: ${{ github.event.inputs.issue_number || github.event.issue.number }},
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: errMsg
|
||||
});
|
||||
core.setFailed(errMsg);
|
||||
- name: Fail on missing container image
|
||||
if: steps.check-image.outcome == 'failure'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
script: |
|
||||
const errMsg = '@${{ github.event.comment.user.login }} Container image not yet published for this PR. Please try again when build is complete.';
|
||||
github.rest.issues.createComment({
|
||||
issue_number: ${{ github.event.inputs.issue_number || github.event.pull_request.number }},
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: errMsg
|
||||
});
|
||||
core.setFailed(errMsg);
|
||||
|
||||
- name: Fill in the new image ID in the Amazon ECS task definition
|
||||
id: task-def
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: .github/workflows/ecs-task-definition.json
|
||||
container-name: superset-ci
|
||||
image: ${{ steps.login-ecr.outputs.registry }}/superset-ci:pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-ci
|
||||
- name: Fill in the new image ID in the Amazon ECS task definition
|
||||
id: task-def
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: .github/workflows/ecs-task-definition.json
|
||||
container-name: superset-ci
|
||||
image: ${{ steps.login-ecr.outputs.registry }}/superset-ci:pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-ci
|
||||
|
||||
- name: Update env vars in the Amazon ECS task definition
|
||||
run: |
|
||||
cat <<< "$(jq '.containerDefinitions[0].environment += ${{ needs.ephemeral-env-comment.outputs.feature-flags }}' < ${{ steps.task-def.outputs.task-definition }})" > ${{ steps.task-def.outputs.task-definition }}
|
||||
- name: Update env vars in the Amazon ECS task definition
|
||||
run: |
|
||||
cat <<< "$(jq '.containerDefinitions[0].environment += ${{ needs.ephemeral-env-label.outputs.feature-flags }}' < ${{ steps.task-def.outputs.task-definition }})" > ${{ steps.task-def.outputs.task-definition }}
|
||||
|
||||
- name: Describe ECS service
|
||||
id: describe-services
|
||||
run: |
|
||||
echo "active=$(aws ecs describe-services --cluster superset-ci --services pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-service | jq '.services[] | select(.status == "ACTIVE") | any')" >> $GITHUB_OUTPUT
|
||||
- name: Create ECS service
|
||||
id: create-service
|
||||
if: steps.describe-services.outputs.active != 'true'
|
||||
env:
|
||||
ECR_SUBNETS: subnet-0e15a5034b4121710,subnet-0e8efef4a72224974
|
||||
ECR_SECURITY_GROUP: sg-092ff3a6ae0574d91
|
||||
run: |
|
||||
aws ecs create-service \
|
||||
--cluster superset-ci \
|
||||
--service-name pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-service \
|
||||
--task-definition superset-ci \
|
||||
--launch-type FARGATE \
|
||||
--desired-count 1 \
|
||||
--platform-version LATEST \
|
||||
--network-configuration "awsvpcConfiguration={subnets=[$ECR_SUBNETS],securityGroups=[$ECR_SECURITY_GROUP],assignPublicIp=ENABLED}" \
|
||||
--tags key=pr,value=${{ github.event.inputs.issue_number || github.event.issue.number }} key=github_user,value=${{ github.actor }}
|
||||
- name: Deploy Amazon ECS task definition
|
||||
id: deploy-task
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
|
||||
with:
|
||||
task-definition: ${{ steps.task-def.outputs.task-definition }}
|
||||
service: pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-service
|
||||
cluster: superset-ci
|
||||
wait-for-service-stability: true
|
||||
wait-for-minutes: 10
|
||||
- name: Describe ECS service
|
||||
id: describe-services
|
||||
run: |
|
||||
echo "active=$(aws ecs describe-services --cluster superset-ci --services pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-service | jq '.services[] | select(.status == "ACTIVE") | any')" >> $GITHUB_OUTPUT
|
||||
- name: Create ECS service
|
||||
id: create-service
|
||||
if: steps.describe-services.outputs.active != 'true'
|
||||
env:
|
||||
ECR_SUBNETS: subnet-0e15a5034b4121710,subnet-0e8efef4a72224974
|
||||
ECR_SECURITY_GROUP: sg-092ff3a6ae0574d91
|
||||
PR_NUMBER: ${{ github.event.inputs.issue_number || github.event.pull_request.number }}
|
||||
run: |
|
||||
aws ecs create-service \
|
||||
--cluster superset-ci \
|
||||
--service-name pr-$PR_NUMBER-service \
|
||||
--task-definition superset-ci \
|
||||
--launch-type FARGATE \
|
||||
--desired-count 1 \
|
||||
--platform-version LATEST \
|
||||
--network-configuration "awsvpcConfiguration={subnets=[$ECR_SUBNETS],securityGroups=[$ECR_SECURITY_GROUP],assignPublicIp=ENABLED}" \
|
||||
--tags key=pr,value=$PR_NUMBER key=github_user,value=${{ github.actor }}
|
||||
- name: Deploy Amazon ECS task definition
|
||||
id: deploy-task
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
|
||||
with:
|
||||
task-definition: ${{ steps.task-def.outputs.task-definition }}
|
||||
service: pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-service
|
||||
cluster: superset-ci
|
||||
wait-for-service-stability: true
|
||||
wait-for-minutes: 10
|
||||
|
||||
- name: List tasks
|
||||
id: list-tasks
|
||||
run: |
|
||||
echo "task=$(aws ecs list-tasks --cluster superset-ci --service-name pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-service | jq '.taskArns | first')" >> $GITHUB_OUTPUT
|
||||
- name: Get network interface
|
||||
id: get-eni
|
||||
run: |
|
||||
echo "eni=$(aws ecs describe-tasks --cluster superset-ci --tasks ${{ steps.list-tasks.outputs.task }} | jq '.tasks | .[0] | .attachments | .[0] | .details | map(select(.name==\"networkInterfaceId\")) | .[0] | .value')" >> $GITHUB_OUTPUT
|
||||
- name: Get public IP
|
||||
id: get-ip
|
||||
run: |
|
||||
echo "ip=$(aws ec2 describe-network-interfaces --network-interface-ids ${{ steps.get-eni.outputs.eni }} | jq -r '.NetworkInterfaces | first | .Association.PublicIp')" >> $GITHUB_OUTPUT
|
||||
- name: Comment (success)
|
||||
if: ${{ success() }}
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{github.token}}
|
||||
script: |
|
||||
github.rest.issues.createComment({
|
||||
issue_number: ${{ github.event.inputs.issue_number || github.event.issue.number }},
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: '@${{ github.event.inputs.user_login || github.event.comment.user.login }} Ephemeral environment spinning up at http://${{ steps.get-ip.outputs.ip }}:8080. Credentials are `admin`/`admin`. Please allow several minutes for bootstrapping and startup.'
|
||||
})
|
||||
- name: Comment (failure)
|
||||
if: ${{ failure() }}
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{github.token}}
|
||||
script: |
|
||||
github.rest.issues.createComment({
|
||||
issue_number: ${{ github.event.inputs.issue_number || github.event.issue.number }},
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: '@${{ github.event.inputs.user_login || github.event.comment.user.login }} Ephemeral environment creation failed. Please check the Actions logs for details.'
|
||||
})
|
||||
- name: List tasks
|
||||
id: list-tasks
|
||||
run: |
|
||||
echo "task=$(aws ecs list-tasks --cluster superset-ci --service-name pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-service | jq '.taskArns | first')" >> $GITHUB_OUTPUT
|
||||
- name: Get network interface
|
||||
id: get-eni
|
||||
run: |
|
||||
echo "eni=$(aws ecs describe-tasks --cluster superset-ci --tasks ${{ steps.list-tasks.outputs.task }} | jq '.tasks[0].attachments[0].details | map(select(.name=="networkInterfaceId"))[0].value')" >> $GITHUB_OUTPUT
|
||||
- name: Get public IP
|
||||
id: get-ip
|
||||
run: |
|
||||
echo "ip=$(aws ec2 describe-network-interfaces --network-interface-ids ${{ steps.get-eni.outputs.eni }} | jq -r '.NetworkInterfaces | first | .Association.PublicIp')" >> $GITHUB_OUTPUT
|
||||
- name: Comment (success)
|
||||
if: ${{ success() }}
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{github.token}}
|
||||
script: |
|
||||
const issue_number = context.payload.inputs?.issue_number || context.issue.number;
|
||||
github.rest.issues.createComment({
|
||||
issue_number: issue_number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: `@${{ github.actor }} Ephemeral environment spinning up at http://${{ steps.get-ip.outputs.ip }}:8080. Credentials are 'admin'/'admin'. Please allow several minutes for bootstrapping and startup.`
|
||||
});
|
||||
- name: Comment (failure)
|
||||
if: ${{ failure() }}
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{github.token}}
|
||||
script: |
|
||||
const issue_number = context.payload.inputs?.issue_number || context.issue.number;
|
||||
github.rest.issues.createComment({
|
||||
issue_number: issue_number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: '@${{ github.event.inputs.user_login || github.event.comment.user.login }} Ephemeral environment creation failed. Please check the Actions logs for details.'
|
||||
})
|
||||
|
||||
33
.github/workflows/pre-commit.yml
vendored
33
.github/workflows/pre-commit.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["current", "next", "previous"]
|
||||
python-version: ["current", "previous"]
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v4
|
||||
@@ -38,12 +38,39 @@ jobs:
|
||||
echo "HOMEBREW_CELLAR=$HOMEBREW_CELLAR" >>"${GITHUB_ENV}"
|
||||
echo "HOMEBREW_REPOSITORY=$HOMEBREW_REPOSITORY" >>"${GITHUB_ENV}"
|
||||
brew install norwoodj/tap/helm-docs
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Install Frontend Dependencies
|
||||
run: |
|
||||
cd superset-frontend
|
||||
npm ci
|
||||
|
||||
- name: Install Docs Dependencies
|
||||
run: |
|
||||
cd docs
|
||||
yarn install --immutable
|
||||
|
||||
- name: pre-commit
|
||||
run: |
|
||||
set +e # Don't exit immediately on failure
|
||||
export SKIP=eslint-frontend,type-checking-frontend
|
||||
pre-commit run --all-files
|
||||
if [ $? -ne 0 ] || ! git diff --quiet --exit-code; then
|
||||
echo "❌ Pre-commit check failed."
|
||||
PRE_COMMIT_EXIT_CODE=$?
|
||||
git diff --quiet --exit-code
|
||||
GIT_DIFF_EXIT_CODE=$?
|
||||
if [ "${PRE_COMMIT_EXIT_CODE}" -ne 0 ] || [ "${GIT_DIFF_EXIT_CODE}" -ne 0 ]; then
|
||||
if [ "${PRE_COMMIT_EXIT_CODE}" -ne 0 ]; then
|
||||
echo "❌ Pre-commit check failed (exit code: ${EXIT_CODE})."
|
||||
else
|
||||
echo "❌ Git working directory is dirty."
|
||||
echo "📌 This likely means that pre-commit made changes that were not committed."
|
||||
echo "🔍 Modified files:"
|
||||
git diff --name-only
|
||||
fi
|
||||
|
||||
echo "🚒 To prevent/address this CI issue, please install/use pre-commit locally."
|
||||
echo "📖 More details here: https://superset.apache.org/docs/contributing/development#git-hooks"
|
||||
exit 1
|
||||
|
||||
10
.github/workflows/release.yml
vendored
10
.github/workflows/release.yml
vendored
@@ -24,13 +24,7 @@ jobs:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
name: Bump version and publish package(s)
|
||||
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [20]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
@@ -46,11 +40,11 @@ jobs:
|
||||
git fetch --prune --unshallow
|
||||
git tag -d `git tag | grep -E '^trigger-'`
|
||||
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
- name: Install Node.js
|
||||
if: env.HAS_TAGS
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
|
||||
- name: Cache npm
|
||||
if: env.HAS_TAGS
|
||||
|
||||
@@ -26,7 +26,6 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
browser: ["chrome"]
|
||||
node: [20]
|
||||
env:
|
||||
SUPERSET_ENV: development
|
||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||
@@ -40,7 +39,7 @@ jobs:
|
||||
APPLITOOLS_BATCH_NAME: Superset Cypress
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:15-alpine
|
||||
image: postgres:16-alpine
|
||||
env:
|
||||
POSTGRES_USER: superset
|
||||
POSTGRES_PASSWORD: superset
|
||||
@@ -66,7 +65,7 @@ jobs:
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node }}
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
- name: Install npm dependencies
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
with:
|
||||
|
||||
@@ -28,9 +28,6 @@ jobs:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
runs-on: ubuntu-24.04
|
||||
strategy:
|
||||
matrix:
|
||||
node: [20]
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v4
|
||||
@@ -41,7 +38,7 @@ jobs:
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node }}
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
- name: Install eyes-storybook dependencies
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
with:
|
||||
|
||||
2
.github/workflows/superset-cli.yml
vendored
2
.github/workflows/superset-cli.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:15-alpine
|
||||
image: postgres:16-alpine
|
||||
env:
|
||||
POSTGRES_USER: superset
|
||||
POSTGRES_PASSWORD: superset
|
||||
|
||||
4
.github/workflows/superset-docs-deploy.yml
vendored
4
.github/workflows/superset-docs-deploy.yml
vendored
@@ -35,10 +35,10 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Set up Node.js 20
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
node-version-file: './docs/.nvmrc'
|
||||
- name: Setup Python
|
||||
uses: ./.github/actions/setup-backend/
|
||||
- uses: actions/setup-java@v4
|
||||
|
||||
7
.github/workflows/superset-docs-verify.yml
vendored
7
.github/workflows/superset-docs-verify.yml
vendored
@@ -24,11 +24,10 @@ jobs:
|
||||
- uses: JustinBeckwith/linkinator-action@v1.11.0
|
||||
continue-on-error: true # This will make the job advisory (non-blocking, no red X)
|
||||
with:
|
||||
paths: "**/*.md, **/*.mdx"
|
||||
paths: "**/*.md, **/*.mdx, !superset-frontend/CHANGELOG.md"
|
||||
linksToSkip: >-
|
||||
^https://github.com/apache/(superset|incubator-superset)/(pull|issue)/\d+,
|
||||
http://localhost:8088/,
|
||||
docker/.env-non-dev,
|
||||
http://127.0.0.1:3000/,
|
||||
http://localhost:9001/,
|
||||
https://charts.bitnami.com/bitnami,
|
||||
@@ -61,10 +60,10 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Set up Node.js 20
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
node-version-file: './docs/.nvmrc'
|
||||
- name: yarn install
|
||||
run: |
|
||||
yarn install --check-cache
|
||||
|
||||
8
.github/workflows/superset-e2e.yml
vendored
8
.github/workflows/superset-e2e.yml
vendored
@@ -53,7 +53,7 @@ jobs:
|
||||
USE_DASHBOARD: ${{ github.event.inputs.use_dashboard == 'true'|| (github.ref == 'refs/heads/master' && 'true') || 'false' }}
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:15-alpine
|
||||
image: postgres:16-alpine
|
||||
env:
|
||||
POSTGRES_USER: superset
|
||||
POSTGRES_PASSWORD: superset
|
||||
@@ -109,7 +109,7 @@ jobs:
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
- name: Install npm dependencies
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
@@ -138,7 +138,7 @@ jobs:
|
||||
run: cypress-run-all ${{ env.USE_DASHBOARD }}
|
||||
- name: Upload Artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
if: github.event_name == 'workflow_dispatch' && (steps.check.outputs.python || steps.check.outputs.frontend)
|
||||
if: failure()
|
||||
with:
|
||||
path: ${{ github.workspace }}/superset-frontend/cypress-base/cypress/screenshots
|
||||
name: cypress-artifact-${{ github.run_id }}-${{ github.job }}
|
||||
name: cypress-artifact-${{ github.run_id }}-${{ github.job }}-${{ matrix.browser }}-${{ matrix.parallel_id }}
|
||||
|
||||
192
.github/workflows/superset-frontend.yml
vendored
192
.github/workflows/superset-frontend.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: Frontend
|
||||
name: "Frontend Build CI (unit tests, linting & sanity checks)"
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -13,68 +13,168 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
TAG: apache/superset:GHA-${{ github.run_id }}
|
||||
|
||||
jobs:
|
||||
frontend-build:
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
should-run: ${{ steps.check.outputs.frontend }}
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Check npm lock file version
|
||||
run: ./scripts/ci_check_npm_lock_version.sh ./superset-frontend/package-lock.json
|
||||
- name: Check for file changes
|
||||
|
||||
- name: Check for File Changes
|
||||
id: check
|
||||
uses: ./.github/actions/change-detector/
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Setup Node.js
|
||||
|
||||
- name: Build Docker Image
|
||||
if: steps.check.outputs.frontend
|
||||
uses: actions/setup-node@v4
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
docker buildx build \
|
||||
-t $TAG \
|
||||
--cache-from=type=registry,ref=apache/superset-cache:3.10-slim-bookworm \
|
||||
--target superset-node-ci \
|
||||
.
|
||||
|
||||
- name: Save Docker Image as Artifact
|
||||
if: steps.check.outputs.frontend
|
||||
run: |
|
||||
docker save $TAG | gzip > docker-image.tar.gz
|
||||
|
||||
- name: Upload Docker Image Artifact
|
||||
if: steps.check.outputs.frontend
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Install dependencies
|
||||
if: steps.check.outputs.frontend
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
name: docker-image
|
||||
path: docker-image.tar.gz
|
||||
|
||||
sharded-jest-tests:
|
||||
needs: frontend-build
|
||||
if: needs.frontend-build.outputs.should-run == 'true'
|
||||
strategy:
|
||||
matrix:
|
||||
shard: [1, 2, 3, 4, 5, 6, 7, 8]
|
||||
fail-fast: false
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Download Docker Image Artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
run: npm-install
|
||||
- name: eslint
|
||||
if: steps.check.outputs.frontend
|
||||
working-directory: ./superset-frontend
|
||||
name: docker-image
|
||||
|
||||
- name: Load Docker Image
|
||||
run: docker load < docker-image.tar.gz
|
||||
|
||||
- name: npm run test with coverage
|
||||
run: |
|
||||
npm run eslint -- . --quiet
|
||||
- name: tsc
|
||||
if: steps.check.outputs.frontend
|
||||
working-directory: ./superset-frontend
|
||||
run: |
|
||||
npm run type
|
||||
- name: Build plugins packages
|
||||
if: steps.check.outputs.frontend
|
||||
working-directory: ./superset-frontend
|
||||
run: npm run plugins:build
|
||||
- name: Build plugins Storybook
|
||||
if: steps.check.outputs.frontend
|
||||
working-directory: ./superset-frontend
|
||||
run: npm run plugins:build-storybook
|
||||
- name: superset-ui/core coverage
|
||||
if: steps.check.outputs.frontend
|
||||
working-directory: ./superset-frontend
|
||||
run: |
|
||||
npm run core:cover
|
||||
- name: unit tests
|
||||
if: steps.check.outputs.frontend
|
||||
working-directory: ./superset-frontend
|
||||
run: |
|
||||
npm run test -- --coverage --silent
|
||||
# todo: remove this step when fix generator as a project in root jest.config.js
|
||||
- name: generator-superset unit tests
|
||||
if: steps.check.outputs.frontend
|
||||
working-directory: ./superset-frontend/packages/generator-superset
|
||||
run: npm run test
|
||||
- name: Upload code coverage
|
||||
mkdir -p ${{ github.workspace }}/superset-frontend/coverage
|
||||
docker run \
|
||||
-v ${{ github.workspace }}/superset-frontend/coverage:/app/superset-frontend/coverage \
|
||||
--rm $TAG \
|
||||
bash -c \
|
||||
"npm run test -- --coverage --shard=${{ matrix.shard }}/8 --coverageReporters=json-summary"
|
||||
|
||||
- name: Upload Coverage Artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: coverage-artifacts-${{ matrix.shard }}
|
||||
path: superset-frontend/coverage
|
||||
|
||||
report-coverage:
|
||||
needs: [sharded-jest-tests]
|
||||
if: needs.frontend-build.outputs.should-run == 'true'
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Download Coverage Artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: coverage-artifacts-*
|
||||
path: coverage/
|
||||
|
||||
- name: Show Files
|
||||
run: find coverage/
|
||||
|
||||
- name: Merge Code Coverage
|
||||
run: npx nyc merge coverage/ merged-output/coverage-summary.json
|
||||
|
||||
- name: Upload Code Coverage
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: javascript
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
files: merged-output/coverage-summary.json
|
||||
slug: apache/superset
|
||||
|
||||
core-cover:
|
||||
needs: frontend-build
|
||||
if: needs.frontend-build.outputs.should-run == 'true'
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Download Docker Image Artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
- name: Load Docker Image
|
||||
run: docker load < docker-image.tar.gz
|
||||
|
||||
- name: superset-ui/core coverage
|
||||
run: |
|
||||
docker run --rm $TAG bash -c \
|
||||
"npm run core:cover"
|
||||
|
||||
lint-frontend:
|
||||
needs: frontend-build
|
||||
if: needs.frontend-build.outputs.should-run == 'true'
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Download Docker Image Artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
- name: Load Docker Image
|
||||
run: docker load < docker-image.tar.gz
|
||||
|
||||
- name: eslint
|
||||
run: |
|
||||
docker run --rm $TAG bash -c \
|
||||
"npm i && npm run eslint -- . --quiet"
|
||||
|
||||
- name: tsc
|
||||
run: |
|
||||
docker run --rm $TAG bash -c \
|
||||
"npm run type"
|
||||
|
||||
validate-frontend:
|
||||
needs: frontend-build
|
||||
if: needs.frontend-build.outputs.should-run == 'true'
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Download Docker Image Artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
- name: Load Docker Image
|
||||
run: docker load < docker-image.tar.gz
|
||||
|
||||
- name: Build Plugins Packages
|
||||
run: |
|
||||
docker run --rm $TAG bash -c \
|
||||
"npm run plugins:build"
|
||||
|
||||
- name: Build Plugins Storybook
|
||||
run: |
|
||||
docker run --rm $TAG bash -c \
|
||||
"npm run plugins:build-storybook"
|
||||
|
||||
2
.github/workflows/superset-helm-lint.yml
vendored
2
.github/workflows/superset-helm-lint.yml
vendored
@@ -25,7 +25,7 @@ jobs:
|
||||
- name: Set up Helm
|
||||
uses: azure/setup-helm@v4
|
||||
with:
|
||||
version: v3.5.4
|
||||
version: v3.16.4
|
||||
|
||||
- name: Setup Python
|
||||
uses: ./.github/actions/setup-backend/
|
||||
|
||||
@@ -77,7 +77,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["current", "next", "previous"]
|
||||
python-version: ["current", "previous"]
|
||||
env:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||
@@ -85,7 +85,7 @@ jobs:
|
||||
SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:15-alpine
|
||||
image: postgres:16-alpine
|
||||
env:
|
||||
POSTGRES_USER: superset
|
||||
POSTGRES_PASSWORD: superset
|
||||
|
||||
@@ -25,7 +25,7 @@ jobs:
|
||||
SUPERSET__SQLALCHEMY_EXAMPLES_URI: presto://localhost:15433/memory/default
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:15-alpine
|
||||
image: postgres:16-alpine
|
||||
env:
|
||||
POSTGRES_USER: superset
|
||||
POSTGRES_PASSWORD: superset
|
||||
@@ -94,7 +94,7 @@ jobs:
|
||||
UPLOAD_FOLDER: /tmp/.superset/uploads/
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:15-alpine
|
||||
image: postgres:16-alpine
|
||||
env:
|
||||
POSTGRES_USER: superset
|
||||
POSTGRES_PASSWORD: superset
|
||||
|
||||
@@ -19,7 +19,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["current", "next"]
|
||||
python-version: ["previous", "current"]
|
||||
env:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
steps:
|
||||
|
||||
2
.github/workflows/superset-translations.yml
vendored
2
.github/workflows/superset-translations.yml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
if: steps.check.outputs.frontend
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '18'
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
- name: Install dependencies
|
||||
if: steps.check.outputs.frontend
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
|
||||
4
.github/workflows/tech-debt.yml
vendored
4
.github/workflows/tech-debt.yml
vendored
@@ -32,10 +32,10 @@ jobs:
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
|
||||
- name: Install Dependencies
|
||||
run: npm install
|
||||
run: npm ci
|
||||
working-directory: ./superset-frontend
|
||||
|
||||
- name: Run Script
|
||||
|
||||
9
.gitignore
vendored
9
.gitignore
vendored
@@ -21,6 +21,7 @@
|
||||
*.swp
|
||||
__pycache__
|
||||
|
||||
.aider*
|
||||
.local
|
||||
.cache
|
||||
.bento*
|
||||
@@ -50,7 +51,6 @@ env
|
||||
venv*
|
||||
env_py3
|
||||
envpy3
|
||||
env36
|
||||
local_config.py
|
||||
/superset_config.py
|
||||
/superset_text.yml
|
||||
@@ -66,7 +66,10 @@ superset-websocket/config.json
|
||||
*.js.map
|
||||
node_modules
|
||||
npm-debug.log*
|
||||
superset/static/assets
|
||||
superset/static/assets/*
|
||||
!superset/static/assets/.gitkeep
|
||||
superset/static/uploads/*
|
||||
!superset/static/uploads/.gitkeep
|
||||
superset/static/version_info.json
|
||||
superset-frontend/**/esm/*
|
||||
superset-frontend/**/lib/*
|
||||
@@ -104,6 +107,7 @@ ghostdriver.log
|
||||
testCSV.csv
|
||||
.terser-plugin-cache/
|
||||
apache-superset-*.tar.gz*
|
||||
apache_superset-*.tar.gz*
|
||||
release.json
|
||||
|
||||
# Translation-related files
|
||||
@@ -122,3 +126,4 @@ docker/*local*
|
||||
# Jest test report
|
||||
test-report.html
|
||||
superset/static/stats/statistics.html
|
||||
.aider*
|
||||
|
||||
@@ -20,7 +20,7 @@ repos:
|
||||
hooks:
|
||||
- id: auto-walrus
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v1.13.0
|
||||
rev: v1.15.0
|
||||
hooks:
|
||||
- id: mypy
|
||||
args: [--check-untyped-defs]
|
||||
@@ -57,9 +57,30 @@ repos:
|
||||
hooks:
|
||||
- id: prettier
|
||||
additional_dependencies:
|
||||
- prettier@3.3.3
|
||||
- prettier@3.5.3
|
||||
args: ["--ignore-path=./superset-frontend/.prettierignore"]
|
||||
files: "superset-frontend"
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: eslint-frontend
|
||||
name: eslint (frontend)
|
||||
entry: ./scripts/eslint.sh
|
||||
language: system
|
||||
pass_filenames: true
|
||||
files: ^superset-frontend/.*\.(js|jsx|ts|tsx)$
|
||||
- id: eslint-docs
|
||||
name: eslint (docs)
|
||||
entry: bash -c 'cd docs && FILES=$(echo "$@" | sed "s|docs/||g") && yarn eslint --fix --ext .js,.jsx,.ts,.tsx --quiet $FILES'
|
||||
language: system
|
||||
pass_filenames: true
|
||||
files: ^docs/.*\.(js|jsx|ts|tsx)$
|
||||
- id: type-checking-frontend
|
||||
name: Type-Checking (Frontend)
|
||||
entry: bash -c './scripts/check-type.js package=superset-frontend excludeDeclarationDir=cypress-base'
|
||||
language: system
|
||||
files: ^superset-frontend\/.*\.(js|jsx|ts|tsx)$
|
||||
exclude: ^superset-frontend/cypress-base\/
|
||||
require_serial: true
|
||||
# blacklist unsafe functions like make_url (see #19526)
|
||||
- repo: https://github.com/skorokithakis/blacklist-pre-commit-hook
|
||||
rev: e2f070289d8eddcaec0b580d3bde29437e7c8221
|
||||
@@ -71,9 +92,11 @@ repos:
|
||||
hooks:
|
||||
- id: helm-docs
|
||||
files: helm
|
||||
verbose: false
|
||||
args: ["--log-level", "error"]
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.8.0
|
||||
rev: v0.9.7
|
||||
hooks:
|
||||
- id: ruff
|
||||
args: [ --fix ]
|
||||
args: [--fix]
|
||||
- id: ruff-format
|
||||
|
||||
50
CHANGELOG/4.1.1.md
Normal file
50
CHANGELOG/4.1.1.md
Normal file
@@ -0,0 +1,50 @@
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
## Change Log
|
||||
|
||||
### 4.1 (Fri Nov 15 22:13:57 2024 +0530)
|
||||
|
||||
**Database Migrations**
|
||||
|
||||
**Features**
|
||||
|
||||
**Fixes**
|
||||
|
||||
- [#30886](https://github.com/apache/superset/pull/30886) fix: blocks UI elements on right side (@samarsrivastav)
|
||||
- [#30859](https://github.com/apache/superset/pull/30859) fix(package.json): Pin luxon version to unblock master (@geido)
|
||||
- [#30588](https://github.com/apache/superset/pull/30588) fix(explore): column data type tooltip format (@mistercrunch)
|
||||
- [#29911](https://github.com/apache/superset/pull/29911) fix: Rename database from 'couchbasedb' to 'couchbase' in documentation and db_engine_specs (@ayush-couchbase)
|
||||
- [#30828](https://github.com/apache/superset/pull/30828) fix(TimezoneSelector): Failing unit tests due to timezone change (@geido)
|
||||
- [#30875](https://github.com/apache/superset/pull/30875) fix: don't show metadata for embedded dashboards (@sadpandajoe)
|
||||
- [#30851](https://github.com/apache/superset/pull/30851) fix: Graph chart colors (@michael-s-molina)
|
||||
- [#29867](https://github.com/apache/superset/pull/29867) fix(capitalization): Capitalizing a button. (@rusackas)
|
||||
- [#29782](https://github.com/apache/superset/pull/29782) fix(translations): Translate embedded errors (@rusackas)
|
||||
- [#29772](https://github.com/apache/superset/pull/29772) fix: Fixing incomplete string escaping. (@rusackas)
|
||||
- [#29725](https://github.com/apache/superset/pull/29725) fix(frontend/docker, ci): fix borked Docker build due to Lerna v8 uplift (@hainenber)
|
||||
|
||||
**Others**
|
||||
|
||||
- [#30576](https://github.com/apache/superset/pull/30576) chore: add link to Superset when report error (@eschutho)
|
||||
- [#29786](https://github.com/apache/superset/pull/29786) refactor(Slider): Upgrade Slider to Antd 5 (@geido)
|
||||
- [#29674](https://github.com/apache/superset/pull/29674) refactor(ChartCreation): Migrate tests to RTL (@rtexelm)
|
||||
- [#29843](https://github.com/apache/superset/pull/29843) refactor(controls): Migrate AdhocMetricOption.test to RTL (@rtexelm)
|
||||
- [#29845](https://github.com/apache/superset/pull/29845) refactor(controls): Migrate MetricDefinitionValue.test to RTL (@rtexelm)
|
||||
- [#28424](https://github.com/apache/superset/pull/28424) docs: Check markdown files for bad links using linkinator (@rusackas)
|
||||
- [#29768](https://github.com/apache/superset/pull/29768) docs(contributing): fix broken link to translations sub-section (@sfirke)
|
||||
83
CHANGELOG/4.1.2.md
Normal file
83
CHANGELOG/4.1.2.md
Normal file
@@ -0,0 +1,83 @@
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
## Change Log
|
||||
|
||||
### 4.1.2 (Fri Mar 7 13:28:05 2025 -0800)
|
||||
|
||||
**Database Migrations**
|
||||
|
||||
- [#32538](https://github.com/apache/superset/pull/32538) fix(migrations): Handle comparator None in old time comparison migration (@Antonio-RiveroMartnez)
|
||||
- [#32155](https://github.com/apache/superset/pull/32155) fix(migrations): Handle no params in time comparison migration (@Antonio-RiveroMartnez)
|
||||
- [#31185](https://github.com/apache/superset/pull/31185) fix: check for column before adding in migrations (@betodealmeida)
|
||||
|
||||
**Features**
|
||||
|
||||
- [#29974](https://github.com/apache/superset/pull/29974) feat(sqllab): Adds refresh button to table metadata in SQL Lab (@Usiel)
|
||||
|
||||
**Fixes**
|
||||
|
||||
- [#32515](https://github.com/apache/superset/pull/32515) fix(sqllab): Allow clear on schema and catalog (@justinpark)
|
||||
- [#32500](https://github.com/apache/superset/pull/32500) fix: dashboard, chart and dataset import validation (@dpgaspar)
|
||||
- [#31353](https://github.com/apache/superset/pull/31353) fix(sqllab): duplicate error message (@betodealmeida)
|
||||
- [#31407](https://github.com/apache/superset/pull/31407) fix: Big Number side cut fixed (@fardin-developer)
|
||||
- [#31480](https://github.com/apache/superset/pull/31480) fix(sunburst): Use metric label from verbose map (@gerbermichi)
|
||||
- [#31427](https://github.com/apache/superset/pull/31427) fix(tags): clean up bulk create api and schema (@villebro)
|
||||
- [#31334](https://github.com/apache/superset/pull/31334) fix(docs): add custom editUrl path for intro page (@dwgrossberg)
|
||||
- [#31353](https://github.com/apache/superset/pull/31353) fix(sqllab): duplicate error message (@betodealmeida)
|
||||
- [#31323](https://github.com/apache/superset/pull/31323) fix: Use clickhouse sqlglot dialect for YDB (@vgvoleg)
|
||||
- [#31198](https://github.com/apache/superset/pull/31198) fix: add more clickhouse disallowed functions on config (@dpgaspar)
|
||||
- [#31194](https://github.com/apache/superset/pull/31194) fix(embedded): Hide anchor links in embedded mode (@Vitor-Avila)
|
||||
- [#31960](https://github.com/apache/superset/pull/31960) fix(sqllab): Missing allowHTML props in ResultTableExtension (@justinpark)
|
||||
- [#31332](https://github.com/apache/superset/pull/31332) fix: prevent multiple pvm errors on migration (@eschutho)
|
||||
- [#31437](https://github.com/apache/superset/pull/31437) fix(database import): Gracefully handle error to get catalog schemas (@Vitor-Avila)
|
||||
- [#31173](https://github.com/apache/superset/pull/31173) fix: cache-warmup fails (@nsivarajan)
|
||||
- [#30442](https://github.com/apache/superset/pull/30442) fix(fe/src/dashboard): optional chaining for possibly nullable parent attribute in LayoutItem type (@hainenber)
|
||||
- [#31639](https://github.com/apache/superset/pull/31639) fix(sqllab): unable to update saved queries (@DamianPendrak)
|
||||
- [#29898](https://github.com/apache/superset/pull/29898) fix: parse pandas pivot null values (@eschutho)
|
||||
- [#31414](https://github.com/apache/superset/pull/31414) fix(Pivot Table): Fix column width to respect currency config (@Vitor-Avila)
|
||||
- [#31335](https://github.com/apache/superset/pull/31335) fix(histogram): axis margin padding consistent with other graphs (@tatiana-cherne)
|
||||
- [#31301](https://github.com/apache/superset/pull/31301) fix(AllEntitiesTable): show Tags (@alexandrusoare)
|
||||
- [#31329](https://github.com/apache/superset/pull/31329) fix: pass string to `process_template` (@betodealmeida)
|
||||
- [#31341](https://github.com/apache/superset/pull/31341) fix(pinot): remove query aliases from SELECT and ORDER BY clauses in Pinot (@yuribogomolov)
|
||||
- [#31308](https://github.com/apache/superset/pull/31308) fix: annotations on horizontal bar chart (@DamianPendrak)
|
||||
- [#31294](https://github.com/apache/superset/pull/31294) fix(sqllab): Remove update_saved_query_exec_info to reduce lag (@justinpark)
|
||||
- [#30897](https://github.com/apache/superset/pull/30897) fix: Exception handling for SQL Lab views (@michael-s-molina)
|
||||
- [#31199](https://github.com/apache/superset/pull/31199) fix(Databricks): Escape catalog and schema names in pre-queries (@Vitor-Avila)
|
||||
- [#31265](https://github.com/apache/superset/pull/31265) fix(trino): db session error in handle cursor (@justinpark)
|
||||
- [#31024](https://github.com/apache/superset/pull/31024) fix(dataset): use sqlglot for DML check (@betodealmeida)
|
||||
- [#29885](https://github.com/apache/superset/pull/29885) fix: add mutator to get_columns_description (@eschutho)
|
||||
- [#30821](https://github.com/apache/superset/pull/30821) fix: x axis title disappears when editing bar chart (@DamianPendrak)
|
||||
- [#31181](https://github.com/apache/superset/pull/31181) fix: Time-series Line Chart Display unnecessary total (@michael-s-molina)
|
||||
- [#31163](https://github.com/apache/superset/pull/31163) fix(Dashboard): Backward compatible shared_label_colors field (@geido)
|
||||
- [#31156](https://github.com/apache/superset/pull/31156) fix: check orderby (@betodealmeida)
|
||||
- [#31154](https://github.com/apache/superset/pull/31154) fix: Remove unwanted commit on Trino's handle_cursor (@michael-s-molina)
|
||||
- [#31151](https://github.com/apache/superset/pull/31151) fix: Revert "feat(trino): Add functionality to upload data (#29164)" (@michael-s-molina)
|
||||
- [#31031](https://github.com/apache/superset/pull/31031) fix(Dashboard): Ensure shared label colors are updated (@geido)
|
||||
- [#30967](https://github.com/apache/superset/pull/30967) fix(release validation): scripts now support RSA and EDDSA keys. (@rusackas)
|
||||
- [#30881](https://github.com/apache/superset/pull/30881) fix(Dashboard): Native & Cross-Filters Scoping Performance (@geido)
|
||||
- [#30887](https://github.com/apache/superset/pull/30887) fix(imports): import query_context for imports with charts (@lindenh)
|
||||
- [#31008](https://github.com/apache/superset/pull/31008) fix(explore): verified props is not updated (@justinpark)
|
||||
- [#30646](https://github.com/apache/superset/pull/30646) fix(Dashboard): Retain colors when color scheme not set (@geido)
|
||||
- [#30962](https://github.com/apache/superset/pull/30962) fix(Dashboard): Exclude edit param in async screenshot (@geido)
|
||||
|
||||
**Others**
|
||||
|
||||
- [#32043](https://github.com/apache/superset/pull/32043) chore: Skip the creation of secondary perms during catalog migrations (@Vitor-Avila)
|
||||
- [#30865](https://github.com/apache/superset/pull/30865) docs: Updating 4.1 Release Notes (@yousoph)
|
||||
96
Dockerfile
96
Dockerfile
@@ -18,16 +18,19 @@
|
||||
######################################################################
|
||||
# Node stage to deal with static asset construction
|
||||
######################################################################
|
||||
ARG PY_VER=3.10-slim-bookworm
|
||||
ARG PY_VER=3.11.11-slim-bookworm
|
||||
|
||||
# If BUILDPLATFORM is null, set it to 'amd64' (or leave as is otherwise).
|
||||
ARG BUILDPLATFORM=${BUILDPLATFORM:-amd64}
|
||||
|
||||
# Include translations in the final build
|
||||
ARG BUILD_TRANSLATIONS="false"
|
||||
|
||||
######################################################################
|
||||
# superset-node used for building frontend assets
|
||||
# superset-node-ci used as a base for building frontend assets and CI
|
||||
######################################################################
|
||||
FROM --platform=${BUILDPLATFORM} node:20-bullseye-slim AS superset-node
|
||||
ARG BUILD_TRANSLATIONS="false" # Include translations in the final build
|
||||
FROM --platform=${BUILDPLATFORM} node:20-bookworm-slim AS superset-node-ci
|
||||
ARG BUILD_TRANSLATIONS
|
||||
ENV BUILD_TRANSLATIONS=${BUILD_TRANSLATIONS}
|
||||
ARG DEV_MODE="false" # Skip frontend build in dev mode
|
||||
ENV DEV_MODE=${DEV_MODE}
|
||||
@@ -53,6 +56,10 @@ RUN mkdir -p /app/superset/static/assets \
|
||||
/app/superset/translations
|
||||
|
||||
# Mount package files and install dependencies if not in dev mode
|
||||
# NOTE: we mount packages and plugins as they are referenced in package.json as workspaces
|
||||
# ideally we'd COPY only their package.json. Here npm ci will be cached as long
|
||||
# as the full content of these folders don't change, yielding a decent cache reuse rate.
|
||||
# Note that's it's not possible selectively COPY of mount using blobs.
|
||||
RUN --mount=type=bind,source=./superset-frontend/package.json,target=./package.json \
|
||||
--mount=type=bind,source=./superset-frontend/package-lock.json,target=./package-lock.json \
|
||||
--mount=type=cache,target=/root/.cache \
|
||||
@@ -66,9 +73,13 @@ RUN --mount=type=bind,source=./superset-frontend/package.json,target=./package.j
|
||||
# Runs the webpack build process
|
||||
COPY superset-frontend /app/superset-frontend
|
||||
|
||||
######################################################################
|
||||
# superset-node used for compile frontend assets
|
||||
######################################################################
|
||||
FROM superset-node-ci AS superset-node
|
||||
|
||||
# Build the frontend if not in dev mode
|
||||
RUN --mount=type=cache,target=/app/superset-frontend/.temp_cache \
|
||||
--mount=type=cache,target=/root/.npm \
|
||||
RUN --mount=type=cache,target=/root/.npm \
|
||||
if [ "$DEV_MODE" = "false" ]; then \
|
||||
echo "Running 'npm run ${BUILD_CMD}'"; \
|
||||
npm run ${BUILD_CMD}; \
|
||||
@@ -91,21 +102,14 @@ RUN if [ "$BUILD_TRANSLATIONS" = "true" ]; then \
|
||||
# Base python layer
|
||||
######################################################################
|
||||
FROM python:${PY_VER} AS python-base
|
||||
ARG BUILD_TRANSLATIONS="false" # Include translations in the final build
|
||||
ENV BUILD_TRANSLATIONS=${BUILD_TRANSLATIONS}
|
||||
ARG DEV_MODE="false" # Skip frontend build in dev mode
|
||||
ENV DEV_MODE=${DEV_MODE}
|
||||
|
||||
ENV LANG=C.UTF-8 \
|
||||
LC_ALL=C.UTF-8 \
|
||||
SUPERSET_ENV=production \
|
||||
FLASK_APP="superset.app:create_app()" \
|
||||
PYTHONPATH="/app/pythonpath" \
|
||||
SUPERSET_HOME="/app/superset_home" \
|
||||
SUPERSET_PORT=8088
|
||||
ARG SUPERSET_HOME="/app/superset_home"
|
||||
ENV SUPERSET_HOME=${SUPERSET_HOME}
|
||||
|
||||
|
||||
RUN useradd --user-group -d ${SUPERSET_HOME} -m --no-log-init --shell /bin/bash superset
|
||||
RUN mkdir -p $SUPERSET_HOME
|
||||
RUN useradd --user-group -d ${SUPERSET_HOME} -m --no-log-init --shell /bin/bash superset \
|
||||
&& chmod -R 1777 $SUPERSET_HOME \
|
||||
&& chown -R superset:superset $SUPERSET_HOME
|
||||
|
||||
# Some bash scripts needed throughout the layers
|
||||
COPY --chmod=755 docker/*.sh /app/docker/
|
||||
@@ -116,28 +120,18 @@ RUN pip install --no-cache-dir --upgrade uv
|
||||
RUN uv venv /app/.venv
|
||||
ENV PATH="/app/.venv/bin:${PATH}"
|
||||
|
||||
# Install Playwright and optionally setup headless browsers
|
||||
ARG INCLUDE_CHROMIUM="true"
|
||||
ARG INCLUDE_FIREFOX="false"
|
||||
RUN --mount=type=cache,target=/root/.cache/uv\
|
||||
if [ "$INCLUDE_CHROMIUM" = "true" ] || [ "$INCLUDE_FIREFOX" = "true" ]; then \
|
||||
uv pip install playwright && \
|
||||
playwright install-deps && \
|
||||
if [ "$INCLUDE_CHROMIUM" = "true" ]; then playwright install chromium; fi && \
|
||||
if [ "$INCLUDE_FIREFOX" = "true" ]; then playwright install firefox; fi; \
|
||||
else \
|
||||
echo "Skipping browser installation"; \
|
||||
fi
|
||||
|
||||
######################################################################
|
||||
# Python translation compiler layer
|
||||
######################################################################
|
||||
FROM python-base AS python-translation-compiler
|
||||
|
||||
ARG BUILD_TRANSLATIONS
|
||||
ENV BUILD_TRANSLATIONS=${BUILD_TRANSLATIONS}
|
||||
|
||||
# Install Python dependencies using docker/pip-install.sh
|
||||
COPY requirements/translations.txt requirements/
|
||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||
/app/docker/pip-install.sh --requires-build-essential -r requirements/translations.txt
|
||||
. /app/.venv/bin/activate && /app/docker/pip-install.sh --requires-build-essential -r requirements/translations.txt
|
||||
|
||||
COPY superset/translations/ /app/translations_mo/
|
||||
RUN if [ "$BUILD_TRANSLATIONS" = "true" ]; then \
|
||||
@@ -150,13 +144,20 @@ RUN if [ "$BUILD_TRANSLATIONS" = "true" ]; then \
|
||||
# Python APP common layer
|
||||
######################################################################
|
||||
FROM python-base AS python-common
|
||||
|
||||
ENV SUPERSET_HOME="/app/superset_home" \
|
||||
HOME="/app/superset_home" \
|
||||
SUPERSET_ENV="production" \
|
||||
FLASK_APP="superset.app:create_app()" \
|
||||
PYTHONPATH="/app/pythonpath" \
|
||||
SUPERSET_PORT="8088"
|
||||
|
||||
# Copy the entrypoints, make them executable in userspace
|
||||
COPY --chmod=755 docker/entrypoints /app/docker/entrypoints
|
||||
|
||||
WORKDIR /app
|
||||
# Set up necessary directories and user
|
||||
RUN mkdir -p \
|
||||
${SUPERSET_HOME} \
|
||||
${PYTHONPATH} \
|
||||
superset/static \
|
||||
requirements \
|
||||
@@ -165,6 +166,19 @@ RUN mkdir -p \
|
||||
requirements \
|
||||
&& touch superset/static/version_info.json
|
||||
|
||||
# Install Playwright and optionally setup headless browsers
|
||||
ARG INCLUDE_CHROMIUM="true"
|
||||
ARG INCLUDE_FIREFOX="false"
|
||||
RUN --mount=type=cache,target=${SUPERSET_HOME}/.cache/uv \
|
||||
if [ "$INCLUDE_CHROMIUM" = "true" ] || [ "$INCLUDE_FIREFOX" = "true" ]; then \
|
||||
uv pip install playwright && \
|
||||
playwright install-deps && \
|
||||
if [ "$INCLUDE_CHROMIUM" = "true" ]; then playwright install chromium; fi && \
|
||||
if [ "$INCLUDE_FIREFOX" = "true" ]; then playwright install firefox; fi; \
|
||||
else \
|
||||
echo "Skipping browser installation"; \
|
||||
fi
|
||||
|
||||
# Copy required files for Python build
|
||||
COPY pyproject.toml setup.py MANIFEST.in README.md ./
|
||||
COPY superset-frontend/package.json superset-frontend/
|
||||
@@ -205,12 +219,11 @@ FROM python-common AS lean
|
||||
|
||||
# Install Python dependencies using docker/pip-install.sh
|
||||
COPY requirements/base.txt requirements/
|
||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||
RUN --mount=type=cache,target=${SUPERSET_HOME}/.cache/uv \
|
||||
/app/docker/pip-install.sh --requires-build-essential -r requirements/base.txt
|
||||
# Install the superset package
|
||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||
RUN --mount=type=cache,target=${SUPERSET_HOME}/.cache/uv \
|
||||
uv pip install .
|
||||
|
||||
RUN python -m compileall /app/superset
|
||||
|
||||
USER superset
|
||||
@@ -229,12 +242,13 @@ RUN /app/docker/apt-install.sh \
|
||||
# Copy development requirements and install them
|
||||
COPY requirements/*.txt requirements/
|
||||
# Install Python dependencies using docker/pip-install.sh
|
||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||
RUN --mount=type=cache,target=${SUPERSET_HOME}/.cache/uv \
|
||||
/app/docker/pip-install.sh --requires-build-essential -r requirements/development.txt
|
||||
# Install the superset package
|
||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||
RUN --mount=type=cache,target=${SUPERSET_HOME}/.cache/uv \
|
||||
uv pip install .
|
||||
|
||||
RUN uv pip install .[postgres]
|
||||
RUN python -m compileall /app/superset
|
||||
|
||||
USER superset
|
||||
@@ -243,5 +257,7 @@ USER superset
|
||||
# CI image...
|
||||
######################################################################
|
||||
FROM lean AS ci
|
||||
|
||||
USER root
|
||||
RUN uv pip install .[postgres]
|
||||
USER superset
|
||||
CMD ["/app/docker/entrypoints/docker-ci.sh"]
|
||||
|
||||
22
README.md
22
README.md
@@ -20,11 +20,11 @@ under the License.
|
||||
# Superset
|
||||
|
||||
[](https://opensource.org/license/apache-2-0)
|
||||
[](https://github.com/apache/superset/tree/latest)
|
||||
[](https://github.com/apache/superset/actions)
|
||||
[](https://badge.fury.io/py/apache-superset)
|
||||
[](https://github.com/apache/superset/releases/latest)
|
||||
[](https://github.com/apache/superset/actions)
|
||||
[](https://badge.fury.io/py/apache_superset)
|
||||
[](https://codecov.io/github/apache/superset)
|
||||
[](https://pypi.python.org/pypi/apache-superset)
|
||||
[](https://pypi.python.org/pypi/apache_superset)
|
||||
[](http://bit.ly/join-superset-slack)
|
||||
[](https://superset.apache.org)
|
||||
|
||||
@@ -72,8 +72,10 @@ Superset provides:
|
||||
## Screenshots & Gifs
|
||||
|
||||
**Video Overview**
|
||||
|
||||
<!-- File hosted here https://github.com/apache/superset-site/raw/lfs/superset-video-4k.mp4 -->
|
||||
[superset-video-4k.webm](https://github.com/apache/superset/assets/812905/da036bc2-150c-4ee7-80f9-75e63210ff76)
|
||||
|
||||
[superset-video-1080p.webm](https://github.com/user-attachments/assets/b37388f7-a971-409c-96a7-90c4e31322e6)
|
||||
|
||||
<br/>
|
||||
|
||||
@@ -137,6 +139,7 @@ Here are some of the major database solutions that are supported:
|
||||
<img src="https://superset.apache.org/img/databases/sap-hana.png" alt="oceanbase" border="0" width="220" />
|
||||
<img src="https://superset.apache.org/img/databases/denodo.png" alt="denodo" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/ydb.svg" alt="ydb" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/tdengine.png" alt="TDengine" border="0" width="200" />
|
||||
</p>
|
||||
|
||||
**A more comprehensive list of supported databases** along with the configuration instructions can be found [here](https://superset.apache.org/docs/configuration/databases).
|
||||
@@ -145,7 +148,7 @@ Want to add support for your datastore or data engine? Read more [here](https://
|
||||
|
||||
## Installation and Configuration
|
||||
|
||||
[Extended documentation for Superset](https://superset.apache.org/docs/installation/docker-compose)
|
||||
Try out Superset's [quickstart](https://superset.apache.org/docs/quickstart/) guide or learn about [the options for production deployments](https://superset.apache.org/docs/installation/architecture/).
|
||||
|
||||
## Get Involved
|
||||
|
||||
@@ -154,7 +157,7 @@ Want to add support for your datastore or data engine? Read more [here](https://
|
||||
and please read our [Slack Community Guidelines](https://github.com/apache/superset/blob/master/CODE_OF_CONDUCT.md#slack-community-guidelines)
|
||||
- [Join our dev@superset.apache.org Mailing list](https://lists.apache.org/list.html?dev@superset.apache.org). To join, simply send an email to [dev-subscribe@superset.apache.org](mailto:dev-subscribe@superset.apache.org)
|
||||
- If you want to help troubleshoot GitHub Issues involving the numerous database drivers that Superset supports, please consider adding your name and the databases you have access to on the [Superset Database Familiarity Rolodex](https://docs.google.com/spreadsheets/d/1U1qxiLvOX0kBTUGME1AHHi6Ywel6ECF8xk_Qy-V9R8c/edit#gid=0)
|
||||
- Join Superset's Town Hall and [Operational Model](https://preset.io/blog/the-superset-operational-model-wants-you/) recurring meetings. Meeting info is available on the [Superset Community Calendar](https://superset.apache.org/community)
|
||||
- Join Superset's Town Hall and [Operational Model](https://preset.io/blog/the-superset-operational-model-wants-you/) recurring meetings. Meeting info is available on the [Superset Community Calendar](https://superset.apache.org/community)
|
||||
|
||||
## Contributor Guide
|
||||
|
||||
@@ -182,14 +185,16 @@ Understanding the Superset Points of View
|
||||
- [Building New Database Connectors](https://preset.io/blog/building-database-connector/)
|
||||
- [Create Your First Dashboard](https://superset.apache.org/docs/using-superset/creating-your-first-dashboard/)
|
||||
- [Comprehensive Tutorial for Contributing Code to Apache Superset
|
||||
](https://preset.io/blog/tutorial-contributing-code-to-apache-superset/)
|
||||
](https://preset.io/blog/tutorial-contributing-code-to-apache-superset/)
|
||||
- [Resources to master Superset by Preset](https://preset.io/resources/)
|
||||
|
||||
- Deploying Superset
|
||||
|
||||
- [Official Docker image](https://hub.docker.com/r/apache/superset)
|
||||
- [Helm Chart](https://github.com/apache/superset/tree/master/helm/superset)
|
||||
|
||||
- Recordings of Past [Superset Community Events](https://preset.io/events)
|
||||
|
||||
- [Mixed Time Series Charts](https://preset.io/events/mixed-time-series-visualization-in-superset-workshop/)
|
||||
- [How the Bing Team Customized Superset for the Internal Self-Serve Data & Analytics Platform](https://preset.io/events/how-the-bing-team-heavily-customized-superset-for-their-internal-data/)
|
||||
- [Live Demo: Visualizing MongoDB and Pinot Data using Trino](https://preset.io/events/2021-04-13-visualizing-mongodb-and-pinot-data-using-trino/)
|
||||
@@ -197,6 +202,7 @@ Understanding the Superset Points of View
|
||||
- [Building a Database Connector for Superset](https://preset.io/events/2021-02-16-building-a-database-connector-for-superset/)
|
||||
|
||||
- Visualizations
|
||||
|
||||
- [Creating Viz Plugins](https://superset.apache.org/docs/contributing/creating-viz-plugins/)
|
||||
- [Managing and Deploying Custom Viz Plugins](https://medium.com/nmc-techblog/apache-superset-manage-custom-viz-plugins-in-production-9fde1a708e55)
|
||||
- [Why Apache Superset is Betting on Apache ECharts](https://preset.io/blog/2021-4-1-why-echarts/)
|
||||
|
||||
@@ -20,7 +20,7 @@ RUN useradd --user-group --create-home --no-log-init --shell /bin/bash superset
|
||||
|
||||
# Configure environment
|
||||
ENV LANG=C.UTF-8 \
|
||||
LC_ALL=C.UTF-8
|
||||
LC_ALL=C.UTF-8
|
||||
|
||||
RUN apt-get update -y
|
||||
|
||||
@@ -30,14 +30,14 @@ RUN apt-get install -y apt-transport-https apt-utils
|
||||
# Install superset dependencies
|
||||
# https://superset.apache.org/docs/installation/installing-superset-from-scratch
|
||||
RUN apt-get install -y build-essential libssl-dev \
|
||||
libffi-dev python3-dev libsasl2-dev libldap2-dev libxi-dev chromium
|
||||
libffi-dev python3-dev libsasl2-dev libldap2-dev libxi-dev chromium zstd
|
||||
|
||||
# Install nodejs for custom build
|
||||
# https://nodejs.org/en/download/package-manager/
|
||||
RUN set -eux; \
|
||||
curl -sL https://deb.nodesource.com/setup_18.x | bash -; \
|
||||
apt-get install -y nodejs; \
|
||||
node --version;
|
||||
curl -sL https://deb.nodesource.com/setup_20.x | bash -; \
|
||||
apt-get install -y nodejs; \
|
||||
node --version;
|
||||
RUN if ! which npm; then apt-get install -y npm; fi
|
||||
|
||||
RUN mkdir -p /home/superset
|
||||
@@ -50,21 +50,21 @@ ARG SUPERSET_RELEASE_RC_TARBALL
|
||||
# Can fetch source from svn or copy tarball from local mounted directory
|
||||
COPY $SUPERSET_RELEASE_RC_TARBALL ./
|
||||
RUN tar -xvf *.tar.gz
|
||||
WORKDIR /home/superset/apache-superset-$VERSION/superset-frontend
|
||||
WORKDIR /home/superset/apache_superset-$VERSION/superset-frontend
|
||||
|
||||
RUN npm ci \
|
||||
&& npm run build \
|
||||
&& rm -rf node_modules
|
||||
&& npm run build \
|
||||
&& rm -rf node_modules
|
||||
|
||||
WORKDIR /home/superset/apache-superset-$VERSION
|
||||
WORKDIR /home/superset/apache_superset-$VERSION
|
||||
RUN pip install --upgrade setuptools pip \
|
||||
&& pip install -r requirements/base.txt \
|
||||
&& pip install --no-cache-dir .
|
||||
&& pip install -r requirements/base.txt \
|
||||
&& pip install --no-cache-dir .
|
||||
|
||||
RUN flask fab babel-compile --target superset/translations
|
||||
|
||||
ENV PATH=/home/superset/superset/bin:$PATH \
|
||||
PYTHONPATH=/home/superset/superset/:$PYTHONPATH \
|
||||
SUPERSET_TESTENV=true
|
||||
PYTHONPATH=/home/superset/superset/ \
|
||||
SUPERSET_TESTENV=true
|
||||
COPY from_tarball_entrypoint.sh /entrypoint.sh
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
|
||||
@@ -20,7 +20,7 @@ RUN useradd --user-group --create-home --no-log-init --shell /bin/bash superset
|
||||
|
||||
# Configure environment
|
||||
ENV LANG=C.UTF-8 \
|
||||
LC_ALL=C.UTF-8
|
||||
LC_ALL=C.UTF-8
|
||||
|
||||
RUN apt-get update -y
|
||||
|
||||
@@ -29,13 +29,16 @@ RUN apt-get install -y apt-transport-https apt-utils
|
||||
|
||||
# Install superset dependencies
|
||||
# https://superset.apache.org/docs/installation/installing-superset-from-scratch
|
||||
RUN apt-get install -y build-essential libssl-dev \
|
||||
libffi-dev python3-dev libsasl2-dev libldap2-dev libxi-dev chromium
|
||||
RUN apt-get install -y subversion build-essential libssl-dev \
|
||||
libffi-dev python3-dev libsasl2-dev libldap2-dev libxi-dev chromium zstd
|
||||
|
||||
# Install nodejs for custom build
|
||||
# https://nodejs.org/en/download/package-manager/
|
||||
RUN curl -sL https://deb.nodesource.com/setup_16.x | bash - \
|
||||
&& apt-get install -y nodejs
|
||||
RUN set -eux; \
|
||||
curl -sL https://deb.nodesource.com/setup_20.x | bash -; \
|
||||
apt-get install -y nodejs; \
|
||||
node --version;
|
||||
RUN if ! which npm; then apt-get install -y npm; fi
|
||||
|
||||
RUN mkdir -p /home/superset
|
||||
RUN chown superset /home/superset
|
||||
@@ -46,22 +49,20 @@ ARG VERSION
|
||||
# Can fetch source from svn or copy tarball from local mounted directory
|
||||
RUN svn co https://dist.apache.org/repos/dist/dev/superset/$VERSION ./
|
||||
RUN tar -xvf *.tar.gz
|
||||
WORKDIR apache-superset-$VERSION
|
||||
WORKDIR /home/superset/apache_superset-$VERSION/superset-frontend
|
||||
|
||||
RUN cd superset-frontend \
|
||||
&& npm ci \
|
||||
&& npm run build \
|
||||
&& rm -rf node_modules
|
||||
RUN npm ci \
|
||||
&& npm run build \
|
||||
&& rm -rf node_modules
|
||||
|
||||
|
||||
WORKDIR /home/superset/apache-superset-$VERSION
|
||||
WORKDIR /home/superset/apache_superset-$VERSION
|
||||
RUN pip install --upgrade setuptools pip \
|
||||
&& pip install -r requirements/base.txt \
|
||||
&& pip install --no-cache-dir .
|
||||
&& pip install -r requirements/base.txt \
|
||||
&& pip install --no-cache-dir .
|
||||
|
||||
RUN flask fab babel-compile --target superset/translations
|
||||
|
||||
ENV PATH=/home/superset/superset/bin:$PATH \
|
||||
PYTHONPATH=/home/superset/superset/:$PYTHONPATH
|
||||
PYTHONPATH=/home/superset/superset/
|
||||
COPY from_tarball_entrypoint.sh /entrypoint.sh
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
|
||||
@@ -123,10 +123,10 @@ SUPERSET_RC=1
|
||||
SUPERSET_GITHUB_BRANCH=1.5
|
||||
SUPERSET_PGP_FULLNAME=villebro@apache.org
|
||||
SUPERSET_VERSION_RC=1.5.1rc1
|
||||
SUPERSET_RELEASE=apache-superset-1.5.1
|
||||
SUPERSET_RELEASE_RC=apache-superset-1.5.1rc1
|
||||
SUPERSET_RELEASE_TARBALL=apache-superset-1.5.1-source.tar.gz
|
||||
SUPERSET_RELEASE_RC_TARBALL=apache-superset-1.5.1rc1-source.tar.gz
|
||||
SUPERSET_RELEASE=apache_superset-1.5.1
|
||||
SUPERSET_RELEASE_RC=apache_superset-1.5.1rc1
|
||||
SUPERSET_RELEASE_TARBALL=apache_superset-1.5.1-source.tar.gz
|
||||
SUPERSET_RELEASE_RC_TARBALL=apache_superset-1.5.1rc1-source.tar.gz
|
||||
SUPERSET_TMP_ASF_SITE_PATH=/tmp/incubator-superset-site-1.5.1
|
||||
-------------------------------
|
||||
```
|
||||
@@ -380,7 +380,7 @@ Official instructions:
|
||||
https://www.apache.org/info/verification.html
|
||||
|
||||
We now have a handy script for anyone validating a release to use. The core of it is in this very folder, `verify_release.py`. Just make sure you have all three release files in the same directory (`{some version}.tar.gz`, `{some version}.tar.gz.asc` and `{some version}tar.gz.sha512`). Then you can pass this script the path to the `.gz` file like so:
|
||||
`python verify_release.py ~/path/tp/apache-superset-{version/candidate}-source.tar.gz`
|
||||
`python verify_release.py ~/path/tp/apache_superset-{version/candidate}-source.tar.gz`
|
||||
|
||||
If all goes well, you will see this result in your terminal:
|
||||
|
||||
@@ -452,10 +452,13 @@ cd ../
|
||||
|
||||
|
||||
# Compile translations for the backend
|
||||
./scripts/translations/generate_po_files.sh
|
||||
./scripts/translations/generate_mo_files.sh
|
||||
|
||||
# update build version number
|
||||
sed -i '' "s/version_string = .*/version_string = \"$SUPERSET_VERSION\"/" setup.py
|
||||
|
||||
# build the python distribution
|
||||
python -m build
|
||||
python setup.py sdist
|
||||
```
|
||||
|
||||
Publish to PyPI
|
||||
@@ -467,7 +470,7 @@ while requesting access to push packages.
|
||||
|
||||
```bash
|
||||
twine upload dist/apache_superset-${SUPERSET_VERSION}-py3-none-any.whl
|
||||
twine upload dist/apache-superset-${SUPERSET_VERSION}.tar.gz
|
||||
twine upload dist/apache_superset-${SUPERSET_VERSION}.tar.gz
|
||||
```
|
||||
|
||||
Set your username to `__token__`
|
||||
|
||||
@@ -232,8 +232,7 @@ class GitChangeLog:
|
||||
for log in self._logs:
|
||||
yield {
|
||||
"pr_number": log.pr_number,
|
||||
"pr_link": f"https://github.com/{SUPERSET_REPO}/pull/"
|
||||
f"{log.pr_number}",
|
||||
"pr_link": f"https://github.com/{SUPERSET_REPO}/pull/{log.pr_number}",
|
||||
"message": log.message,
|
||||
"time": log.time,
|
||||
"author": log.author,
|
||||
@@ -323,9 +322,9 @@ class BaseParameters:
|
||||
|
||||
|
||||
def print_title(message: str) -> None:
|
||||
print(f"{50*'-'}")
|
||||
print(f"{50 * '-'}")
|
||||
print(message)
|
||||
print(f"{50*'-'}")
|
||||
print(f"{50 * '-'}")
|
||||
|
||||
|
||||
@click.group()
|
||||
@@ -349,14 +348,14 @@ def compare(base_parameters: BaseParameters) -> None:
|
||||
previous_logs = base_parameters.previous_logs
|
||||
current_logs = base_parameters.current_logs
|
||||
print_title(
|
||||
f"Pull requests from " f"{current_logs.git_ref} not in {previous_logs.git_ref}"
|
||||
f"Pull requests from {current_logs.git_ref} not in {previous_logs.git_ref}"
|
||||
)
|
||||
previous_diff_logs = previous_logs.diff(current_logs)
|
||||
for diff_log in previous_diff_logs:
|
||||
print(f"{diff_log}")
|
||||
|
||||
print_title(
|
||||
f"Pull requests from " f"{previous_logs.git_ref} not in {current_logs.git_ref}"
|
||||
f"Pull requests from {previous_logs.git_ref} not in {current_logs.git_ref}"
|
||||
)
|
||||
current_diff_logs = current_logs.diff(previous_logs)
|
||||
for diff_log in current_diff_logs:
|
||||
|
||||
@@ -31,7 +31,7 @@ The official source release:
|
||||
https://downloads.apache.org/{{ project_module }}/{{ version }}
|
||||
|
||||
The PyPI package:
|
||||
https://pypi.org/project/apache-superset/{{ version }}
|
||||
https://pypi.org/project/apache_superset/{{ version }}
|
||||
|
||||
The CHANGELOG for the release:
|
||||
https://github.com/apache/{{ project_module }}/blob/{{ version }}/CHANGELOG/{{ version }}.md
|
||||
|
||||
@@ -32,7 +32,7 @@ else
|
||||
SUPERSET_VERSION="${1}"
|
||||
SUPERSET_RC="${2}"
|
||||
SUPERSET_PGP_FULLNAME="${3}"
|
||||
SUPERSET_RELEASE_RC_TARBALL="apache-superset-${SUPERSET_VERSION_RC}-source.tar.gz"
|
||||
SUPERSET_RELEASE_RC_TARBALL="apache_superset-${SUPERSET_VERSION_RC}-source.tar.gz"
|
||||
fi
|
||||
|
||||
SUPERSET_VERSION_RC="${SUPERSET_VERSION}rc${SUPERSET_RC}"
|
||||
|
||||
@@ -22,7 +22,7 @@ if [ -z "${SUPERSET_VERSION_RC}" ] || [ -z "${SUPERSET_SVN_DEV_PATH}" ] || [ -z
|
||||
exit 1
|
||||
fi
|
||||
|
||||
SUPERSET_RELEASE_RC=apache-superset-"${SUPERSET_VERSION_RC}"
|
||||
SUPERSET_RELEASE_RC=apache_superset-"${SUPERSET_VERSION_RC}"
|
||||
SUPERSET_RELEASE_RC_TARBALL="${SUPERSET_RELEASE_RC}"-source.tar.gz
|
||||
SUPERSET_RELEASE_RC_BASE_PATH="${SUPERSET_SVN_DEV_PATH}"/"${SUPERSET_VERSION_RC}"
|
||||
SUPERSET_RELEASE_RC_TARBALL_PATH="${SUPERSET_RELEASE_RC_BASE_PATH}"/"${SUPERSET_RELEASE_RC_TARBALL}"
|
||||
|
||||
@@ -50,8 +50,8 @@ else
|
||||
export SUPERSET_GITHUB_BRANCH="${VERSION_MAJOR}.${VERSION_MINOR}"
|
||||
export SUPERSET_PGP_FULLNAME="${2}"
|
||||
export SUPERSET_VERSION_RC="${SUPERSET_VERSION}rc${VERSION_RC}"
|
||||
export SUPERSET_RELEASE=apache-superset-"${SUPERSET_VERSION}"
|
||||
export SUPERSET_RELEASE_RC=apache-superset-"${SUPERSET_VERSION_RC}"
|
||||
export SUPERSET_RELEASE=apache_superset-"${SUPERSET_VERSION}"
|
||||
export SUPERSET_RELEASE_RC=apache_superset-"${SUPERSET_VERSION_RC}"
|
||||
export SUPERSET_RELEASE_TARBALL="${SUPERSET_RELEASE}"-source.tar.gz
|
||||
export SUPERSET_RELEASE_RC_TARBALL="${SUPERSET_RELEASE_RC}"-source.tar.gz
|
||||
export SUPERSET_TMP_ASF_SITE_PATH="/tmp/incubator-superset-site-${SUPERSET_VERSION}"
|
||||
|
||||
@@ -27,7 +27,7 @@ if [ -z "${SUPERSET_SVN_DEV_PATH}" ]; then
|
||||
fi
|
||||
|
||||
if [[ -n ${1} ]] && [[ ${1} == "local" ]]; then
|
||||
SUPERSET_RELEASE_RC=apache-superset-"${SUPERSET_VERSION_RC}"
|
||||
SUPERSET_RELEASE_RC=apache_superset-"${SUPERSET_VERSION_RC}"
|
||||
SUPERSET_RELEASE_RC_TARBALL="${SUPERSET_RELEASE_RC}"-source.tar.gz
|
||||
SUPERSET_TARBALL_PATH="${SUPERSET_SVN_DEV_PATH}"/${SUPERSET_VERSION_RC}/${SUPERSET_RELEASE_RC_TARBALL}
|
||||
SUPERSET_TMP_TARBALL_FILENAME=_tmp_"${SUPERSET_VERSION_RC}".tar.gz
|
||||
|
||||
@@ -38,7 +38,7 @@ get_pip_command() {
|
||||
PYTHON=$(get_python_command)
|
||||
PIP=$(get_pip_command)
|
||||
|
||||
# Get the release directory's path. If you unzip an Apache release and just run the npm script to validate the release, this will be a file name like `apache-superset-x.x.xrcx-source.tar.gz`
|
||||
# Get the release directory's path. If you unzip an Apache release and just run the npm script to validate the release, this will be a file name like `apache_superset-x.x.xrcx-source.tar.gz`
|
||||
RELEASE_ZIP_PATH="../../$(basename "$(dirname "$(pwd)")")-source.tar.gz"
|
||||
|
||||
# Install dependencies from requirements.txt if the file exists
|
||||
|
||||
@@ -44,12 +44,11 @@ These features are **finished** but currently being tested. They are usable, but
|
||||
- ALLOW_FULL_CSV_EXPORT
|
||||
- CACHE_IMPERSONATION
|
||||
- CONFIRM_DASHBOARD_DIFF
|
||||
- DRILL_TO_DETAIL
|
||||
- DYNAMIC_PLUGINS
|
||||
- DATE_FORMAT_IN_EMAIL_SUBJECT: [(docs)](https://superset.apache.org/docs/configuration/alerts-reports#commons)
|
||||
- ENABLE_SUPERSET_META_DB: [(docs)](https://superset.apache.org/docs/configuration/databases/#querying-across-databases)
|
||||
- ESTIMATE_QUERY_COST
|
||||
- GLOBAL_ASYNC_QUERIES [(docs)](https://github.com/apache/superset/blob/master/CONTRIBUTING.md#async-chart-queries)
|
||||
- HORIZONTAL_FILTER_BAR
|
||||
- IMPERSONATE_WITH_EMAIL_PREFIX
|
||||
- PLAYWRIGHT_REPORTS_AND_THUMBNAILS
|
||||
- RLS_IN_SQLLAB
|
||||
@@ -63,9 +62,8 @@ These features flags are **safe for production**. They have been tested and will
|
||||
[//]: # "PLEASE KEEP THESE LISTS SORTED ALPHABETICALLY"
|
||||
|
||||
### Flags on the path to feature launch and flag deprecation/removal
|
||||
|
||||
- DASHBOARD_VIRTUALIZATION
|
||||
- DRILL_BY
|
||||
- DISABLE_LEGACY_DATASOURCE_EDITOR
|
||||
|
||||
### Flags retained for runtime configuration
|
||||
|
||||
@@ -79,6 +77,7 @@ independently. This new framework will also allow for non-boolean configurations
|
||||
- ALLOW_ADHOC_SUBQUERY
|
||||
- DASHBOARD_RBAC [(docs)](https://superset.apache.org/docs/using-superset/creating-your-first-dashboard#manage-access-to-dashboards)
|
||||
- DATAPANEL_CLOSED_BY_DEFAULT
|
||||
- DRILL_BY
|
||||
- DRUID_JOINS
|
||||
- EMBEDDABLE_CHARTS
|
||||
- EMBEDDED_SUPERSET
|
||||
@@ -98,6 +97,6 @@ These features flags currently default to True and **will be removed in a future
|
||||
[//]: # "PLEASE KEEP THE LIST SORTED ALPHABETICALLY"
|
||||
|
||||
- AVOID_COLORS_COLLISION
|
||||
- DASHBOARD_CROSS_FILTERS
|
||||
- DRILL_TO_DETAIL
|
||||
- ENABLE_JAVASCRIPT_CONTROLS
|
||||
- KV_STORE
|
||||
|
||||
@@ -25,8 +25,8 @@ all you have to do is file a simple PR [like this one](https://github.com/apache
|
||||
the categorization is inaccurate, please file a PR with your correction as well.
|
||||
Join our growing community!
|
||||
|
||||
|
||||
### Sharing Economy
|
||||
|
||||
- [Airbnb](https://github.com/airbnb)
|
||||
- [Faasos](https://faasos.com/) [@shashanksingh]
|
||||
- [Free2Move](https://www.free2move.com/) [@PaoloTerzi]
|
||||
@@ -36,6 +36,7 @@ Join our growing community!
|
||||
- [Ontruck](https://www.ontruck.com/)
|
||||
|
||||
### Financial Services
|
||||
|
||||
- [Aktia Bank plc](https://www.aktia.com)
|
||||
- [American Express](https://www.americanexpress.com) [@TheLastSultan]
|
||||
- [bumper](https://www.bumper.co/) [@vasu-ram, @JamiePercival]
|
||||
@@ -43,42 +44,52 @@ Join our growing community!
|
||||
- [Capital Service S.A.](https://capitalservice.pl) [@pkonarzewski]
|
||||
- [Clark.de](https://clark.de/)
|
||||
- [KarrotPay](https://www.daangnpay.com/)
|
||||
- [Remita](https://remita.net) [@mujibishola]
|
||||
- [Taveo](https://www.taveo.com) [@codek]
|
||||
- [Unit](https://www.unit.co/about-us) [@amitmiran137]
|
||||
- [Wise](https://wise.com) [@koszti]
|
||||
- [Xendit](https://xendit.co/) [@LieAlbertTriAdrian]
|
||||
- [Cover Genius](https://covergenius.com/)
|
||||
|
||||
### Gaming
|
||||
|
||||
- [Popoko VM Games Studio](https://popoko.live)
|
||||
|
||||
### E-Commerce
|
||||
|
||||
- [AiHello](https://www.aihello.com) [@ganeshkrishnan1]
|
||||
- [Bazaar Technologies](https://www.bazaartech.com) [@umair-abro]
|
||||
- [Dragonpass](https://www.dragonpass.com.cn/) [@zhxjdwh]
|
||||
- [Dropit Shopping](https://www.dropit.shop/) [@dropit-dev]
|
||||
- [Fanatics](https://www.fanatics.com/) [@coderfender]
|
||||
- [Fordeal](https://www.fordeal.com) [@Renkai]
|
||||
- [Fynd](https://www.fynd.com/) [@darpanjain07]
|
||||
- [GFG - Global Fashion Group](https://global-fashion-group.com) [@ksaagariconic]
|
||||
- [GoTo/Gojek](https://www.gojek.io/) [@gwthm-in]
|
||||
- [HuiShouBao](https://www.huishoubao.com/) [@Yukinoshita-Yukino]
|
||||
- [Now](https://www.now.vn/) [@davidkohcw]
|
||||
- [Qunar](https://www.qunar.com/) [@flametest]
|
||||
- [Rakuten Viki](https://www.viki.com)
|
||||
- [Shopee](https://shopee.sg) [@xiaohanyu]
|
||||
- [Shopkick](https://www.shopkick.com) [@LAlbertalli]
|
||||
- [ShopUp](https://www.shopup.org/) [@gwthm-in]
|
||||
- [Tails.com](https://tails.com/gb/) [@alanmcruickshank]
|
||||
- [THE ICONIC](https://theiconic.com.au/) [@ksaagariconic]
|
||||
- [Utair](https://www.utair.ru) [@utair-digital]
|
||||
- [VkusVill](https://vkusvill.ru/) [@ETselikov]
|
||||
- [Zalando](https://www.zalando.com) [@dmigo]
|
||||
- [Zalora](https://www.zalora.com) [@ksaagariconic]
|
||||
- [Zepto](https://www.zeptonow.com/) [@gwthm-in]
|
||||
|
||||
### Enterprise Technology
|
||||
|
||||
- [A3Data](https://a3data.com.br) [@neylsoncrepalde]
|
||||
- [Analytics Aura](https://analyticsaura.com/) [@Analytics-Aura]
|
||||
- [Apollo GraphQL](https://www.apollographql.com/) [@evans]
|
||||
- [Astronomer](https://www.astronomer.io) [@ryw]
|
||||
- [Avesta Technologies](https://avestatechnologies.com/) [@TheRum]
|
||||
- [Caizin](https://caizin.com/) [@tejaskatariya]
|
||||
- [Canonical](https://canonical.com)
|
||||
- [Careem](https://www.careem.com/) [@samraHanif0340]
|
||||
- [Cloudsmith](https://cloudsmith.io) [@alancarson]
|
||||
- [Cyberhaven](https://www.cyberhaven.com/) [@toliver-ch]
|
||||
@@ -103,6 +114,7 @@ Join our growing community!
|
||||
- [Ona](https://ona.io) [@pld]
|
||||
- [Orange](https://www.orange.com) [@icsu]
|
||||
- [Oslandia](https://oslandia.com)
|
||||
- [Oxylabs](https://oxylabs.io/) [@rytis-ulys]
|
||||
- [Peak AI](https://www.peak.ai/) [@azhar22k]
|
||||
- [PeopleDoc](https://www.people-doc.com) [@rodo]
|
||||
- [PlaidCloud](https://www.plaidcloud.com)
|
||||
@@ -110,8 +122,11 @@ Join our growing community!
|
||||
- [PubNub](https://pubnub.com) [@jzucker2]
|
||||
- [ReadyTech](https://www.readytech.io)
|
||||
- [Reward Gateway](https://www.rewardgateway.com)
|
||||
- [RIADVICE](https://riadvice.tn) [@riadvice]
|
||||
- [ScopeAI](https://www.getscopeai.com) [@iloveluce]
|
||||
- [shipmnts](https://shipmnts.com)
|
||||
- [Showmax](https://showmax.com) [@bobek]
|
||||
- [SingleStore](https://www.singlestore.com/)
|
||||
- [TechAudit](https://www.techaudit.info) [@ETselikov]
|
||||
- [Tenable](https://www.tenable.com) [@dflionis]
|
||||
- [Tentacle](https://www.linkedin.com/company/tentacle-cmi/) [@jdclarke5]
|
||||
@@ -119,11 +134,14 @@ Join our growing community!
|
||||
- [Tobii](https://www.tobii.com/) [@dwa]
|
||||
- [Tooploox](https://www.tooploox.com/) [@jakubczaplicki]
|
||||
- [Unvired](https://unvired.com) [@srinisubramanian]
|
||||
- [Virtuoso QA](https://www.virtuosoqa.com)
|
||||
- [Whale](https://whale.im)
|
||||
- [Windsor.ai](https://www.windsor.ai/) [@octaviancorlade]
|
||||
- [WinWin Network马上赢](https://brandct.cn/) [@wenbinye]
|
||||
- [Zeta](https://www.zeta.tech/) [@shaikidris]
|
||||
|
||||
### Media & Entertainment
|
||||
|
||||
- [6play](https://www.6play.fr) [@CoryChaplin]
|
||||
- [bilibili](https://www.bilibili.com) [@Moinheart]
|
||||
- [BurdaForward](https://www.burda-forward.de/en/)
|
||||
@@ -136,8 +154,10 @@ Join our growing community!
|
||||
- [Zaihang](https://www.zaih.com/)
|
||||
|
||||
### Education
|
||||
|
||||
- [Aveti Learning](https://avetilearning.com/) [@TheShubhendra]
|
||||
- [Brilliant.org](https://brilliant.org/)
|
||||
- [Open edX](https://openedx.org/)
|
||||
- [Platzi.com](https://platzi.com/)
|
||||
- [Sunbird](https://www.sunbird.org/) [@eksteporg]
|
||||
- [The GRAPH Network](https://thegraphnetwork.org/) [@fccoelho]
|
||||
@@ -146,6 +166,7 @@ Join our growing community!
|
||||
- [WikiMedia Foundation](https://wikimediafoundation.org) [@vg]
|
||||
|
||||
### Energy
|
||||
|
||||
- [Airboxlab](https://foobot.io) [@antoine-galataud]
|
||||
- [DouroECI](https://www.douroeci.com/) [@nunohelibeires]
|
||||
- [Safaricom](https://www.safaricom.co.ke/) [@mmutiso]
|
||||
@@ -153,6 +174,7 @@ Join our growing community!
|
||||
- [Wattbewerb](https://wattbewerb.de/) [@wattbewerb]
|
||||
|
||||
### Healthcare
|
||||
|
||||
- [Amino](https://amino.com) [@shkr]
|
||||
- [Bluesquare](https://www.bluesquarehub.com/) [@madewulf]
|
||||
- [Care](https://www.getcare.io/) [@alandao2021]
|
||||
@@ -165,24 +187,30 @@ Join our growing community!
|
||||
- [2070Health](https://2070health.com/)
|
||||
|
||||
### HR / Staffing
|
||||
|
||||
- [Swile](https://www.swile.co/) [@PaoloTerzi]
|
||||
- [Symmetrics](https://www.symmetrics.fyi)
|
||||
- [bluquist](https://bluquist.com/)
|
||||
|
||||
### Government / Non-Profit
|
||||
### Government
|
||||
|
||||
- [City of Ann Arbor, MI](https://www.a2gov.org/) [@sfirke]
|
||||
- [RIS3 Strategy of CZ, MIT CR](https://www.ris3.cz/) [@RIS3CZ]
|
||||
- [NRLM - Sarathi, India](https://pib.gov.in/PressReleasePage.aspx?PRID=1999586)
|
||||
|
||||
### Travel
|
||||
|
||||
- [Agoda](https://www.agoda.com/) [@lostseaway, @maiake, @obombayo]
|
||||
- [HomeToGo](https://hometogo.com/) [@pedromartinsteenstrup]
|
||||
- [Skyscanner](https://www.skyscanner.net/) [@cleslie, @stanhoucke]
|
||||
|
||||
### Others
|
||||
|
||||
- [10Web](https://10web.io/)
|
||||
- [AI inside](https://inside.ai/en/)
|
||||
- [Automattic](https://automattic.com/) [@Khrol, @Usiel]
|
||||
- [Dropbox](https://www.dropbox.com/) [@bkyryliuk]
|
||||
- [Flowbird](https://flowbird.com) [@EmmanuelCbd]
|
||||
- [GEOTAB](https://www.geotab.com) [@JZ6]
|
||||
- [Grassroot](https://www.grassrootinstitute.org/)
|
||||
- [Increff](https://www.increff.com/) [@ishansinghania]
|
||||
|
||||
@@ -43,8 +43,8 @@ under the License.
|
||||
| can this form post on ResetPasswordView |:heavy_check_mark:|O|O|O|
|
||||
| can this form get on ResetMyPasswordView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can this form post on ResetMyPasswordView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can this form get on UserInfoEditView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can this form post on UserInfoEditView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can this form get on UserInfoEditView |:heavy_check_mark:|O|O|O|
|
||||
| can this form post on UserInfoEditView |:heavy_check_mark:|O|O|O|
|
||||
| can show on UserDBModelView |:heavy_check_mark:|O|O|O|
|
||||
| can edit on UserDBModelView |:heavy_check_mark:|O|O|O|
|
||||
| can delete on UserDBModelView |:heavy_check_mark:|O|O|O|
|
||||
@@ -65,7 +65,6 @@ under the License.
|
||||
| can get on MenuApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can list on AsyncEventsRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can invalidate on CacheRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can function names on Database |:heavy_check_mark:|O|O|O|
|
||||
| can csv upload on Database |:heavy_check_mark:|O|O|O|
|
||||
| can excel upload on Database |:heavy_check_mark:|O|O|O|
|
||||
| can query form data on Api |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
@@ -76,7 +75,6 @@ under the License.
|
||||
| can get on Datasource |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can my queries on SqlLab |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can log on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can schemas access for csv upload on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can import dashboards on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can schemas on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can sqllab history on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
@@ -118,8 +116,6 @@ under the License.
|
||||
| menu access on Data |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Databases |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Datasets |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Upload a CSV |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Upload Excel |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Charts |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Dashboards |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on SQL Lab |:heavy_check_mark:|O|O|:heavy_check_mark:|
|
||||
@@ -129,13 +125,6 @@ under the License.
|
||||
| all datasource access on all_datasource_access |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| all database access on all_database_access |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| all query access on all_query_access |:heavy_check_mark:|O|O|O|
|
||||
| can edit on UserOAuthModelView |:heavy_check_mark:|O|O|O|
|
||||
| can list on UserOAuthModelView |:heavy_check_mark:|O|O|O|
|
||||
| can show on UserOAuthModelView |:heavy_check_mark:|O|O|O|
|
||||
| can userinfo on UserOAuthModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can add on UserOAuthModelView |:heavy_check_mark:|O|O|O|
|
||||
| can delete on UserOAuthModelView |:heavy_check_mark:|O|O|O|
|
||||
| userinfoedit on UserOAuthModelView |:heavy_check_mark:|O|O|O|
|
||||
| can write on DynamicPlugin |:heavy_check_mark:|O|O|O|
|
||||
| can edit on DynamicPlugin |:heavy_check_mark:|O|O|O|
|
||||
| can list on DynamicPlugin |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
@@ -192,7 +181,6 @@ under the License.
|
||||
| can share chart on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can this form get on ColumnarToDatabaseView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can this form post on ColumnarToDatabaseView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Upload a Columnar file |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can export on Chart |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can write on DashboardFilterStateRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can read on DashboardFilterStateRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
|
||||
25
UPDATING.md
25
UPDATING.md
@@ -24,16 +24,37 @@ assists people when migrating to a new version.
|
||||
|
||||
## Next
|
||||
|
||||
- [31198](https://github.com/apache/superset/pull/31198) Disallows by default the use of the following ClickHouse functions: "version", "currentDatabase", "hostName".
|
||||
- [32317](https://github.com/apache/superset/pull/32317) The horizontal filter bar feature is now out of testing/beta development and its feature flag `HORIZONTAL_FILTER_BAR` has been removed.
|
||||
- [31976](https://github.com/apache/superset/pull/31976) Removed the `DISABLE_LEGACY_DATASOURCE_EDITOR` feature flag. The previous value of the feature flag was `True` and now the feature is permanently removed.
|
||||
- [31959](https://github.com/apache/superset/pull/32000) Removes CSV_UPLOAD_MAX_SIZE config, use your web server to control file upload size.
|
||||
- [31959](https://github.com/apache/superset/pull/31959) Removes the following endpoints from data uploads: `/api/v1/database/<id>/<file type>_upload` and `/api/v1/database/<file type>_metadata`, in favour of new one (Details on the PR). And simplifies permissions.
|
||||
- [31844](https://github.com/apache/superset/pull/31844) The `ALERT_REPORTS_EXECUTE_AS` and `THUMBNAILS_EXECUTE_AS` config parameters have been renamed to `ALERT_REPORTS_EXECUTORS` and `THUMBNAILS_EXECUTORS` respectively. A new config flag `CACHE_WARMUP_EXECUTORS` has also been introduced to be able to control which user is used to execute cache warmup tasks. Finally, the config flag `THUMBNAILS_SELENIUM_USER` has been removed. To use a fixed executor for async tasks, use the new `FixedExecutor` class. See the config and docs for more info on setting up different executor profiles.
|
||||
- [31894](https://github.com/apache/superset/pull/31894) Domain sharding is deprecated in favor of HTTP2. The `SUPERSET_WEBSERVER_DOMAINS` configuration will be removed in the next major version (6.0)
|
||||
- [31794](https://github.com/apache/superset/pull/31794) Removed the previously deprecated `DASHBOARD_CROSS_FILTERS` feature flag
|
||||
- [31774](https://github.com/apache/superset/pull/31774): Fixes the spelling of the `USE-ANALAGOUS-COLORS` feature flag. Please update any scripts/configuration item to use the new/corrected `USE-ANALOGOUS-COLORS` flag spelling.
|
||||
- [31582](https://github.com/apache/superset/pull/31582) Removed the legacy Area, Bar, Event Flow, Heatmap, Histogram, Line, Sankey, and Sankey Loop charts. They were all automatically migrated to their ECharts counterparts with the exception of the Event Flow and Sankey Loop charts which were removed as they were not actively maintained and not widely used. If you were using the Event Flow or Sankey Loop charts, you will need to find an alternative solution.
|
||||
- [29798](https://github.com/apache/superset/pull/29798) Since 3.1.0, the intial schedule for an alert or report was mistakenly offset by the specified timezone's relation to UTC. The initial schedule should now begin at the correct time.
|
||||
- [30021](https://github.com/apache/superset/pull/30021) The `dev` layer in our Dockerfile no long includes firefox binaries, only Chromium to reduce bloat/docker-build-time.
|
||||
- [30099](https://github.com/apache/superset/pull/30099) Translations are no longer included in the default docker image builds. If your environment requires translations, you'll want to set the docker build arg `BUILD_TRANSACTION=true`.
|
||||
- [31262](https://github.com/apache/superset/pull/31262) NOTE: deprecated `pylint` in favor of `ruff` as our only python linter. Only affect development workflows positively (not the release itself). It should cover most important rules, be much faster, but some things linting rules that were enforced before may not be enforce in the exact same way as before.
|
||||
- [31173](https://github.com/apache/superset/pull/31173) Modified `fetch_csrf_token` to align with HTTP standards, particularly regarding how cookies are handled. If you encounter any issues related to CSRF functionality, please report them as a new issue and reference this PR for context.
|
||||
- [31413](https://github.com/apache/superset/pull/31413) Enable the DATE_FORMAT_IN_EMAIL_SUBJECT feature flag to allow users to specify a date format for the email subject, which will then be replaced with the actual date.
|
||||
- [31385](https://github.com/apache/superset/pull/31385) Significant docker refactor, reducing access levels for the `superset` user, streamlining layer building, ...
|
||||
- [31503](https://github.com/apache/superset/pull/31503) Deprecating python 3.9.x support, 3.11 is now the recommended version and 3.10 is still supported over the Superset 5.0 lifecycle.
|
||||
- [29121](https://github.com/apache/superset/pull/29121) Removed the `css`, `position_json`, and `json_metadata` from the payload of the dashboard list endpoint (`GET api/v1/dashboard`) for performance reasons.
|
||||
- [29163](https://github.com/apache/superset/pull/29163) Removed the `SHARE_QUERIES_VIA_KV_STORE` and `KV_STORE` feature flags and changed the way Superset shares SQL Lab queries to use permalinks. The legacy `/kv` API was removed but we still support legacy links in 5.0. In 6.0, only permalinks will be supported.
|
||||
- [25166](https://github.com/apache/superset/pull/25166) Changed the default configuration of `UPLOAD_FOLDER` from `/app/static/uploads/` to `/static/uploads/`. It also removed the unused `IMG_UPLOAD_FOLDER` and `IMG_UPLOAD_URL` configuration options.
|
||||
- [30284](https://github.com/apache/superset/pull/30284) Deprecated GLOBAL_ASYNC_QUERIES_REDIS_CONFIG in favor of the new GLOBAL_ASYNC_QUERIES_CACHE_BACKEND configuration. To leverage Redis Sentinel, set CACHE_TYPE to RedisSentinelCache, or use RedisCache for standalone Redis
|
||||
- [31961](https://github.com/apache/superset/pull/31961) Upgraded React from version 16.13.1 to 17.0.2. If you are using custom frontend extensions or plugins, you may need to update them to be compatible with React 17.
|
||||
- [31260](https://github.com/apache/superset/pull/31260) Docker images now use `uv pip install` instead of `pip install` to manage the python envrionment. Most docker-based deployments will be affected, whether you derive one of the published images, or have custom bootstrap script that install python libraries (drivers)
|
||||
- [32432](https://github.com/apache/superset/pull/31260) Moves the List Roles FAB view to the frontend and requires `FAB_ADD_SECURITY_API` to be enabled in the configuration and `superset init` to be executed.
|
||||
|
||||
### Potential Downtime
|
||||
|
||||
## 4.1.2
|
||||
|
||||
- [31198](https://github.com/apache/superset/pull/31198) Disallows by default the use of the following ClickHouse functions: "version", "currentDatabase", "hostName".
|
||||
- [31173](https://github.com/apache/superset/pull/31173) Modified `fetch_csrf_token` to align with HTTP standards, particularly regarding how cookies are handled. If you encounter any issues related to CSRF functionality, please report them as a new issue and reference this PR for context.
|
||||
|
||||
## 4.1.0
|
||||
|
||||
- [29274](https://github.com/apache/superset/pull/29274): We made it easier to trigger CI on your
|
||||
|
||||
@@ -22,9 +22,6 @@
|
||||
# unique random secure passwords and SECRET_KEY.
|
||||
# -----------------------------------------------------------------------
|
||||
x-superset-image: &superset-image apachesuperset.docker.scarf.sh/apache/superset:${TAG:-latest-dev}
|
||||
x-superset-depends-on: &superset-depends-on
|
||||
- db
|
||||
- redis
|
||||
x-superset-volumes:
|
||||
&superset-volumes # /app/pythonpath_docker will be appended to the PYTHONPATH in the final container
|
||||
- ./docker:/app/docker
|
||||
@@ -44,7 +41,7 @@ services:
|
||||
required: true
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
image: postgres:15
|
||||
image: postgres:16
|
||||
container_name: superset_db
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
@@ -64,8 +61,12 @@ services:
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- 8088:8088
|
||||
depends_on: *superset-depends-on
|
||||
depends_on:
|
||||
superset-init:
|
||||
condition: service_completed_successfully
|
||||
volumes: *superset-volumes
|
||||
environment:
|
||||
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||
|
||||
superset-init:
|
||||
image: *superset-image
|
||||
@@ -76,11 +77,18 @@ services:
|
||||
required: true
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
depends_on: *superset-depends-on
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_started
|
||||
redis:
|
||||
condition: service_started
|
||||
user: "root"
|
||||
volumes: *superset-volumes
|
||||
healthcheck:
|
||||
disable: true
|
||||
environment:
|
||||
SUPERSET_LOAD_EXAMPLES: "${SUPERSET_LOAD_EXAMPLES:-yes}"
|
||||
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||
|
||||
superset-worker:
|
||||
image: *superset-image
|
||||
@@ -92,7 +100,9 @@ services:
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
restart: unless-stopped
|
||||
depends_on: *superset-depends-on
|
||||
depends_on:
|
||||
superset-init:
|
||||
condition: service_completed_successfully
|
||||
user: "root"
|
||||
volumes: *superset-volumes
|
||||
healthcheck:
|
||||
@@ -101,6 +111,8 @@ services:
|
||||
"CMD-SHELL",
|
||||
"celery -A superset.tasks.celery_app:app inspect ping -d celery@$$HOSTNAME",
|
||||
]
|
||||
environment:
|
||||
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||
|
||||
superset-worker-beat:
|
||||
image: *superset-image
|
||||
@@ -112,11 +124,15 @@ services:
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
restart: unless-stopped
|
||||
depends_on: *superset-depends-on
|
||||
depends_on:
|
||||
superset-init:
|
||||
condition: service_completed_successfully
|
||||
user: "root"
|
||||
volumes: *superset-volumes
|
||||
healthcheck:
|
||||
disable: true
|
||||
environment:
|
||||
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||
|
||||
volumes:
|
||||
superset_home:
|
||||
|
||||
@@ -21,9 +21,6 @@
|
||||
# create you own docker environment file (docker/.env) with your own
|
||||
# unique random secure passwords and SECRET_KEY.
|
||||
# -----------------------------------------------------------------------
|
||||
x-superset-depends-on: &superset-depends-on
|
||||
- db
|
||||
- redis
|
||||
x-superset-volumes:
|
||||
&superset-volumes # /app/pythonpath_docker will be appended to the PYTHONPATH in the final container
|
||||
- ./docker:/app/docker
|
||||
@@ -49,7 +46,7 @@ services:
|
||||
required: true
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
image: postgres:15
|
||||
image: postgres:16
|
||||
container_name: superset_db
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
@@ -70,8 +67,12 @@ services:
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- 8088:8088
|
||||
depends_on: *superset-depends-on
|
||||
depends_on:
|
||||
superset-init:
|
||||
condition: service_completed_successfully
|
||||
volumes: *superset-volumes
|
||||
environment:
|
||||
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||
|
||||
superset-init:
|
||||
container_name: superset_init
|
||||
@@ -83,11 +84,18 @@ services:
|
||||
required: true
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
depends_on: *superset-depends-on
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_started
|
||||
redis:
|
||||
condition: service_started
|
||||
user: "root"
|
||||
volumes: *superset-volumes
|
||||
healthcheck:
|
||||
disable: true
|
||||
environment:
|
||||
SUPERSET_LOAD_EXAMPLES: "${SUPERSET_LOAD_EXAMPLES:-yes}"
|
||||
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||
|
||||
superset-worker:
|
||||
build:
|
||||
@@ -100,7 +108,9 @@ services:
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
restart: unless-stopped
|
||||
depends_on: *superset-depends-on
|
||||
depends_on:
|
||||
superset-init:
|
||||
condition: service_completed_successfully
|
||||
user: "root"
|
||||
volumes: *superset-volumes
|
||||
healthcheck:
|
||||
@@ -109,6 +119,8 @@ services:
|
||||
"CMD-SHELL",
|
||||
"celery -A superset.tasks.celery_app:app inspect ping -d celery@$$HOSTNAME",
|
||||
]
|
||||
environment:
|
||||
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||
|
||||
superset-worker-beat:
|
||||
build:
|
||||
@@ -121,11 +133,15 @@ services:
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
restart: unless-stopped
|
||||
depends_on: *superset-depends-on
|
||||
depends_on:
|
||||
superset-init:
|
||||
condition: service_completed_successfully
|
||||
user: "root"
|
||||
volumes: *superset-volumes
|
||||
healthcheck:
|
||||
disable: true
|
||||
environment:
|
||||
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||
|
||||
volumes:
|
||||
superset_home:
|
||||
|
||||
@@ -22,9 +22,6 @@
|
||||
# unique random secure passwords and SECRET_KEY.
|
||||
# -----------------------------------------------------------------------
|
||||
x-superset-user: &superset-user root
|
||||
x-superset-depends-on: &superset-depends-on
|
||||
- db
|
||||
- redis
|
||||
x-superset-volumes: &superset-volumes
|
||||
# /app/pythonpath_docker will be appended to the PYTHONPATH in the final container
|
||||
- ./docker:/app/docker
|
||||
@@ -70,7 +67,7 @@ services:
|
||||
required: true
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
image: postgres:15
|
||||
image: postgres:16
|
||||
container_name: superset_db
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
@@ -92,13 +89,18 @@ services:
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- 8088:8088
|
||||
# When in cypress-mode ->
|
||||
- 8081:8081
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
user: *superset-user
|
||||
depends_on: *superset-depends-on
|
||||
depends_on:
|
||||
superset-init:
|
||||
condition: service_completed_successfully
|
||||
volumes: *superset-volumes
|
||||
environment:
|
||||
CYPRESS_CONFIG: "${CYPRESS_CONFIG:-}"
|
||||
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||
|
||||
superset-websocket:
|
||||
container_name: superset_websocket
|
||||
@@ -143,11 +145,17 @@ services:
|
||||
required: true
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
depends_on: *superset-depends-on
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_started
|
||||
redis:
|
||||
condition: service_started
|
||||
user: *superset-user
|
||||
volumes: *superset-volumes
|
||||
environment:
|
||||
CYPRESS_CONFIG: "${CYPRESS_CONFIG:-}"
|
||||
SUPERSET_LOAD_EXAMPLES: "${SUPERSET_LOAD_EXAMPLES:-yes}"
|
||||
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||
healthcheck:
|
||||
disable: true
|
||||
|
||||
@@ -167,6 +175,10 @@ services:
|
||||
BUILD_SUPERSET_FRONTEND_IN_DOCKER: true
|
||||
NPM_RUN_PRUNE: false
|
||||
SCARF_ANALYTICS: "${SCARF_ANALYTICS:-}"
|
||||
# configuring the dev-server to use the host.docker.internal to connect to the backend
|
||||
superset: "http://superset:8088"
|
||||
ports:
|
||||
- "127.0.0.1:9000:9000" # exposing the dynamic webpack dev server
|
||||
container_name: superset_node
|
||||
command: ["/app/docker/docker-frontend.sh"]
|
||||
env_file:
|
||||
@@ -174,7 +186,6 @@ services:
|
||||
required: true
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
depends_on: *superset-depends-on
|
||||
volumes: *superset-volumes
|
||||
|
||||
superset-worker:
|
||||
@@ -189,8 +200,12 @@ services:
|
||||
required: false
|
||||
environment:
|
||||
CELERYD_CONCURRENCY: 2
|
||||
CYPRESS_CONFIG: "${CYPRESS_CONFIG:-}"
|
||||
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||
restart: unless-stopped
|
||||
depends_on: *superset-depends-on
|
||||
depends_on:
|
||||
superset-init:
|
||||
condition: service_completed_successfully
|
||||
user: *superset-user
|
||||
volumes: *superset-volumes
|
||||
extra_hosts:
|
||||
@@ -212,11 +227,15 @@ services:
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
restart: unless-stopped
|
||||
depends_on: *superset-depends-on
|
||||
depends_on:
|
||||
- superset-worker
|
||||
user: *superset-user
|
||||
volumes: *superset-volumes
|
||||
healthcheck:
|
||||
disable: true
|
||||
environment:
|
||||
CYPRESS_CONFIG: "${CYPRESS_CONFIG:-}"
|
||||
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||
|
||||
superset-tests-worker:
|
||||
build:
|
||||
@@ -237,8 +256,11 @@ services:
|
||||
REDIS_RESULTS_DB: 3
|
||||
REDIS_HOST: localhost
|
||||
CELERYD_CONCURRENCY: 8
|
||||
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||
network_mode: host
|
||||
depends_on: *superset-depends-on
|
||||
depends_on:
|
||||
superset-init:
|
||||
condition: service_completed_successfully
|
||||
user: *superset-user
|
||||
volumes: *superset-volumes
|
||||
healthcheck:
|
||||
|
||||
@@ -15,6 +15,8 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
# Allowing python to print() in docker
|
||||
PYTHONUNBUFFERED=1
|
||||
|
||||
COMPOSE_PROJECT_NAME=superset
|
||||
DEV_MODE=true
|
||||
@@ -60,7 +62,7 @@ MAPBOX_API_KEY=''
|
||||
|
||||
# Make sure you set this to a unique secure random value on production
|
||||
SUPERSET_SECRET_KEY=TEST_NON_DEV_SECRET
|
||||
|
||||
ENABLE_PLAYWRIGHT=false
|
||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true
|
||||
BUILD_SUPERSET_FRONTEND_IN_DOCKER=true
|
||||
SUPERSET_LOG_LEVEL=info
|
||||
|
||||
@@ -68,7 +68,7 @@ Don't forget to reload the page to take the new frontend into account though.
|
||||
|
||||
## Production
|
||||
|
||||
It is possible to run Superset in non-development mode by using [`docker-compose-non-dev.yml`](../docker-compose-non-dev.yml). This file excludes the volumes needed for development and uses [`./docker/.env-non-dev`](./.env-non-dev) which sets the variable `SUPERSET_ENV` to `production`.
|
||||
It is possible to run Superset in non-development mode by using [`docker-compose-non-dev.yml`](../docker-compose-non-dev.yml). This file excludes the volumes needed for development.
|
||||
|
||||
## Resource Constraints
|
||||
|
||||
|
||||
@@ -20,19 +20,30 @@ set -eo pipefail
|
||||
|
||||
# Make python interactive
|
||||
if [ "$DEV_MODE" == "true" ]; then
|
||||
echo "Reinstalling the app in editable mode"
|
||||
uv pip install -e .
|
||||
if [ "$(whoami)" = "root" ] && command -v uv > /dev/null 2>&1; then
|
||||
echo "Reinstalling the app in editable mode"
|
||||
uv pip install -e .
|
||||
fi
|
||||
fi
|
||||
REQUIREMENTS_LOCAL="/app/docker/requirements-local.txt"
|
||||
PORT=${PORT:-8088}
|
||||
# If Cypress run – overwrite the password for admin and export env variables
|
||||
if [ "$CYPRESS_CONFIG" == "true" ]; then
|
||||
export SUPERSET_CONFIG=tests.integration_tests.superset_test_config
|
||||
export SUPERSET_TESTENV=true
|
||||
export SUPERSET__SQLALCHEMY_DATABASE_URI=postgresql+psycopg2://superset:superset@db:5432/superset
|
||||
export POSTGRES_DB=superset_cypress
|
||||
export SUPERSET__SQLALCHEMY_DATABASE_URI=postgresql+psycopg2://superset:superset@db:5432/superset_cypress
|
||||
PORT=8081
|
||||
fi
|
||||
if [[ "$DATABASE_DIALECT" == postgres* ]] ; then
|
||||
if [[ "$DATABASE_DIALECT" == postgres* ]] && [ "$(whoami)" = "root" ]; then
|
||||
# older images may not have the postgres dev requirements installed
|
||||
echo "Installing postgres requirements"
|
||||
uv pip install -e .[postgres]
|
||||
if command -v uv > /dev/null 2>&1; then
|
||||
# Use uv in newer images
|
||||
uv pip install -e .[postgres]
|
||||
else
|
||||
# Use pip in older images
|
||||
pip install -e .[postgres]
|
||||
fi
|
||||
fi
|
||||
#
|
||||
# Make sure we have dev requirements installed
|
||||
@@ -57,7 +68,7 @@ case "${1}" in
|
||||
;;
|
||||
app)
|
||||
echo "Starting web app (using development server)..."
|
||||
flask run -p 8088 --with-threads --reload --debugger --host=0.0.0.0
|
||||
flask run -p $PORT --with-threads --reload --debugger --host=0.0.0.0
|
||||
;;
|
||||
app-gunicorn)
|
||||
echo "Starting web app..."
|
||||
|
||||
28
docker/docker-entrypoint-initdb.d/cypress-init.sh
Executable file
28
docker/docker-entrypoint-initdb.d/cypress-init.sh
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
# Creates the examples database and respective user. This database location
|
||||
# and access credentials are defined on the environment variables
|
||||
# ------------------------------------------------------------------------
|
||||
set -e
|
||||
|
||||
psql -v ON_ERROR_STOP=1 --username "${POSTGRES_USER}" <<-EOSQL
|
||||
CREATE DATABASE superset_cypress;
|
||||
EOSQL
|
||||
@@ -36,7 +36,9 @@ if [ "$BUILD_SUPERSET_FRONTEND_IN_DOCKER" = "true" ]; then
|
||||
npm install
|
||||
|
||||
echo "Start webpack dev server"
|
||||
npm run dev
|
||||
# start the webpack dev server, serving dynamically at http://localhost:9000
|
||||
# it proxies to the backend served at http://localhost:8088
|
||||
npm run dev-server
|
||||
|
||||
else
|
||||
echo "Skipping frontend build steps - YOU NEED TO RUN IT MANUALLY ON THE HOST!"
|
||||
|
||||
@@ -30,24 +30,18 @@ fi
|
||||
|
||||
echo_step() {
|
||||
cat <<EOF
|
||||
|
||||
######################################################################
|
||||
|
||||
|
||||
Init Step ${1}/${STEP_CNT} [${2}] -- ${3}
|
||||
|
||||
|
||||
######################################################################
|
||||
|
||||
EOF
|
||||
}
|
||||
ADMIN_PASSWORD="${ADMIN_PASSWORD:-admin}"
|
||||
# If Cypress run – overwrite the password for admin and export env variables
|
||||
if [ "$CYPRESS_CONFIG" == "true" ]; then
|
||||
ADMIN_PASSWORD="general"
|
||||
export SUPERSET_CONFIG=tests.integration_tests.superset_test_config
|
||||
export SUPERSET_TESTENV=true
|
||||
export SUPERSET__SQLALCHEMY_DATABASE_URI=postgresql+psycopg2://superset:superset@db:5432/superset
|
||||
export POSTGRES_DB=superset_cypress
|
||||
export SUPERSET__SQLALCHEMY_DATABASE_URI=postgresql+psycopg2://superset:superset@db:5432/superset_cypress
|
||||
fi
|
||||
# Initialize the database
|
||||
echo_step "1" "Starting" "Applying DB migrations"
|
||||
@@ -56,12 +50,16 @@ echo_step "1" "Complete" "Applying DB migrations"
|
||||
|
||||
# Create an admin user
|
||||
echo_step "2" "Starting" "Setting up admin user ( admin / $ADMIN_PASSWORD )"
|
||||
superset fab create-admin \
|
||||
--username admin \
|
||||
--firstname Superset \
|
||||
--lastname Admin \
|
||||
--email admin@superset.com \
|
||||
--password "$ADMIN_PASSWORD"
|
||||
if [ "$CYPRESS_CONFIG" == "true" ]; then
|
||||
superset load_test_users
|
||||
else
|
||||
superset fab create-admin \
|
||||
--username admin \
|
||||
--email admin@superset.com \
|
||||
--password "$ADMIN_PASSWORD" \
|
||||
--firstname Superset \
|
||||
--lastname Admin
|
||||
fi
|
||||
echo_step "2" "Complete" "Setting up admin user"
|
||||
# Create default roles and permissions
|
||||
echo_step "3" "Starting" "Setting up roles and perms"
|
||||
@@ -73,10 +71,9 @@ if [ "$SUPERSET_LOAD_EXAMPLES" = "yes" ]; then
|
||||
echo_step "4" "Starting" "Loading examples"
|
||||
# If Cypress run which consumes superset_test_config – load required data for tests
|
||||
if [ "$CYPRESS_CONFIG" == "true" ]; then
|
||||
superset load_test_users
|
||||
superset load_examples --load-test-data
|
||||
else
|
||||
superset load_examples --force
|
||||
superset load_examples
|
||||
fi
|
||||
echo_step "4" "Complete" "Loading examples"
|
||||
fi
|
||||
|
||||
@@ -23,4 +23,4 @@
|
||||
export SERVER_THREADS_AMOUNT=8
|
||||
# start up the web server
|
||||
|
||||
/usr/bin/run-server.sh
|
||||
/app/docker/entrypoints/run-server.sh
|
||||
|
||||
@@ -112,6 +112,12 @@ http {
|
||||
proxy_set_header Host $host;
|
||||
}
|
||||
|
||||
location /static {
|
||||
proxy_pass http://host.docker.internal:9000; # Proxy to superset-node
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
}
|
||||
|
||||
location / {
|
||||
proxy_pass http://superset_app;
|
||||
proxy_set_header Host $host;
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
#
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
from celery.schedules import crontab
|
||||
from flask_caching.backends.filesystemcache import FileSystemCache
|
||||
@@ -104,6 +105,21 @@ WEBDRIVER_BASEURL = "http://superset:8088/" # When using docker compose baseurl
|
||||
WEBDRIVER_BASEURL_USER_FRIENDLY = WEBDRIVER_BASEURL
|
||||
SQLLAB_CTAS_NO_LIMIT = True
|
||||
|
||||
log_level_text = os.getenv("SUPERSET_LOG_LEVEL", "INFO")
|
||||
LOG_LEVEL = getattr(logging, log_level_text.upper(), logging.INFO)
|
||||
|
||||
if os.getenv("CYPRESS_CONFIG") == "true":
|
||||
# When running the service as a cypress backend, we need to import the config
|
||||
# located @ tests/integration_tests/superset_test_config.py
|
||||
base_dir = os.path.dirname(__file__)
|
||||
module_folder = os.path.abspath(
|
||||
os.path.join(base_dir, "../../tests/integration_tests/")
|
||||
)
|
||||
sys.path.insert(0, module_folder)
|
||||
from superset_test_config import * # noqa
|
||||
|
||||
sys.path.pop(0)
|
||||
|
||||
#
|
||||
# Optionally import superset_config_docker.py (which will have been included on
|
||||
# the PYTHONPATH) in order to allow for local settings to be overridden
|
||||
@@ -113,7 +129,7 @@ try:
|
||||
from superset_config_docker import * # noqa
|
||||
|
||||
logger.info(
|
||||
f"Loaded your Docker configuration at " f"[{superset_config_docker.__file__}]"
|
||||
f"Loaded your Docker configuration at [{superset_config_docker.__file__}]"
|
||||
)
|
||||
except ImportError:
|
||||
logger.info("Using default Docker config...")
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
/* eslint-env node */
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
@@ -16,33 +17,31 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { t } from '@superset-ui/core';
|
||||
import { ControlPanelConfig } from '@superset-ui/chart-controls';
|
||||
|
||||
const config: ControlPanelConfig = {
|
||||
controlPanelSections: [
|
||||
{
|
||||
label: t('Query'),
|
||||
expanded: true,
|
||||
controlSetRows: [
|
||||
['groupby'],
|
||||
['metric'],
|
||||
['adhoc_filters'],
|
||||
['row_limit'],
|
||||
],
|
||||
},
|
||||
{
|
||||
label: t('Chart Options'),
|
||||
expanded: true,
|
||||
controlSetRows: [['color_scheme']],
|
||||
},
|
||||
module.exports = {
|
||||
extends: [
|
||||
'eslint:recommended',
|
||||
'plugin:@typescript-eslint/recommended',
|
||||
'plugin:react/recommended',
|
||||
'plugin:prettier/recommended',
|
||||
],
|
||||
controlOverrides: {
|
||||
groupby: {
|
||||
label: t('Source / Target'),
|
||||
description: t('Choose a source and a target'),
|
||||
parser: '@typescript-eslint/parser',
|
||||
parserOptions: {
|
||||
ecmaFeatures: {
|
||||
jsx: true,
|
||||
},
|
||||
ecmaVersion: 2020,
|
||||
sourceType: 'module',
|
||||
},
|
||||
plugins: ['@typescript-eslint', 'react', 'prettier'],
|
||||
rules: {
|
||||
'react/react-in-jsx-scope': 'off',
|
||||
'react/prop-types': 'off',
|
||||
'@typescript-eslint/explicit-module-boundary-types': 'off',
|
||||
},
|
||||
settings: {
|
||||
react: {
|
||||
version: 'detect',
|
||||
},
|
||||
},
|
||||
ignorePatterns: ['build/**/*', '.docusaurus/**/*', 'node_modules/**/*'],
|
||||
};
|
||||
|
||||
export default config;
|
||||
@@ -1 +1 @@
|
||||
v20.16.0
|
||||
v20.18.3
|
||||
|
||||
@@ -18,6 +18,6 @@ under the License.
|
||||
-->
|
||||
|
||||
This is the public documentation site for Superset, built using
|
||||
[Docusaurus 2](https://docusaurus.io/). See
|
||||
[Docusaurus 3](https://docusaurus.io/). See
|
||||
[CONTRIBUTING.md](../CONTRIBUTING.md#documentation) for documentation on
|
||||
contributing to documentation.
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
/* eslint-env node */
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
|
||||
@@ -4,7 +4,6 @@ hide_title: true
|
||||
sidebar_position: 10
|
||||
---
|
||||
|
||||
import { Buffer } from 'buffer/index.js';
|
||||
import SwaggerUI from 'swagger-ui-react';
|
||||
import openapi from '/resources/openapi.json';
|
||||
import 'swagger-ui-react/swagger-ui.css';
|
||||
|
||||
@@ -25,6 +25,9 @@ Alerts and reports are disabled by default. To turn them on, you need to do some
|
||||
- At least one of those must be configured, depending on what you want to use:
|
||||
- emails: `SMTP_*` settings
|
||||
- Slack messages: `SLACK_API_TOKEN`
|
||||
- Users can customize the email subject by including date code placeholders, which will automatically be replaced with the corresponding UTC date when the email is sent. To enable this functionality, activate the `"DATE_FORMAT_IN_EMAIL_SUBJECT"` [feature flag](/docs/configuration/configuring-superset#feature-flags). This enables date formatting in email subjects, preventing all reporting emails from being grouped into the same thread (optional for the reporting feature).
|
||||
- Use date codes from [strftime.org](https://strftime.org/) to create the email subject.
|
||||
- If no date code is provided, the original string will be used as the email subject.
|
||||
|
||||
##### Disable dry-run mode
|
||||
|
||||
@@ -89,6 +92,7 @@ You can find documentation about each field in the default `config.py` in the Gi
|
||||
You need to replace default values with your custom Redis, Slack and/or SMTP config.
|
||||
|
||||
Superset uses Celery beat and Celery worker(s) to send alerts and reports.
|
||||
|
||||
- The beat is the scheduler that tells the worker when to perform its tasks. This schedule is defined when you create the alert or report.
|
||||
- The worker will process the tasks that need to be performed when an alert or report is fired.
|
||||
|
||||
@@ -140,7 +144,7 @@ SLACK_API_TOKEN = "xoxb-"
|
||||
SMTP_HOST = "smtp.sendgrid.net" # change to your host
|
||||
SMTP_PORT = 2525 # your port, e.g. 587
|
||||
SMTP_STARTTLS = True
|
||||
SMTP_SSL_SERVER_AUTH = True # If your using an SMTP server with a valid certificate
|
||||
SMTP_SSL_SERVER_AUTH = True # If you're using an SMTP server with a valid certificate
|
||||
SMTP_SSL = False
|
||||
SMTP_USER = "your_user" # use the empty string "" if using an unauthenticated SMTP server
|
||||
SMTP_PASSWORD = "your_password" # use the empty string "" if using an unauthenticated SMTP server
|
||||
@@ -177,15 +181,13 @@ By default, Alerts and Reports are executed as the owner of the alert/report obj
|
||||
just change the config as follows (`admin` in this example):
|
||||
|
||||
```python
|
||||
from superset.tasks.types import ExecutorType
|
||||
from superset.tasks.types import FixedExecutor
|
||||
|
||||
THUMBNAIL_SELENIUM_USER = 'admin'
|
||||
ALERT_REPORTS_EXECUTE_AS = [ExecutorType.SELENIUM]
|
||||
ALERT_REPORTS_EXECUTORS = [FixedExecutor("admin")]
|
||||
```
|
||||
|
||||
Please refer to `ExecutorType` in the codebase for other executor types.
|
||||
|
||||
|
||||
**Important notes**
|
||||
|
||||
- Be mindful of the concurrency setting for celery (using `-c 4`). Selenium/webdriver instances can
|
||||
@@ -197,7 +199,6 @@ Please refer to `ExecutorType` in the codebase for other executor types.
|
||||
- Adjust `WEBDRIVER_BASEURL` in your configuration file if celery workers can’t access Superset via
|
||||
its default value of `http://0.0.0.0:8080/`.
|
||||
|
||||
|
||||
It's also possible to specify a minimum interval between each report's execution through the config file:
|
||||
|
||||
``` python
|
||||
@@ -303,6 +304,7 @@ One symptom of an invalid connection to an email server is receiving an error of
|
||||
Confirm via testing that your outbound email configuration is correct. Here is the simplest test, for an un-authenticated email SMTP email service running on port 25. If you are sending over SSL, for instance, study how [Superset's codebase sends emails](https://github.com/apache/superset/blob/master/superset/utils/core.py#L818) and then test with those commands and arguments.
|
||||
|
||||
Start Python in your worker environment, replace all example values, and run:
|
||||
|
||||
```python
|
||||
import smtplib
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
@@ -324,6 +326,7 @@ mailserver.quit()
|
||||
This should send an email.
|
||||
|
||||
Possible fixes:
|
||||
|
||||
- Some cloud hosts disable outgoing unauthenticated SMTP email to prevent spam. For instance, [Azure blocks port 25 by default on some machines](https://learn.microsoft.com/en-us/azure/virtual-network/troubleshoot-outbound-smtp-connectivity). Enable that port or use another sending method.
|
||||
- Use another set of SMTP credentials that you verify works in this setup.
|
||||
|
||||
|
||||
@@ -42,13 +42,13 @@ CELERY_CONFIG = CeleryConfig
|
||||
|
||||
To start a Celery worker to leverage the configuration, run the following command:
|
||||
|
||||
```
|
||||
```bash
|
||||
celery --app=superset.tasks.celery_app:app worker --pool=prefork -O fair -c 4
|
||||
```
|
||||
|
||||
To start a job which schedules periodic background jobs, run the following command:
|
||||
|
||||
```
|
||||
```bash
|
||||
celery --app=superset.tasks.celery_app:app beat
|
||||
```
|
||||
|
||||
@@ -93,12 +93,12 @@ issues arise. Please clear your existing results cache store when upgrading an e
|
||||
|
||||
Flower is a web based tool for monitoring the Celery cluster which you can install from pip:
|
||||
|
||||
```python
|
||||
```bash
|
||||
pip install flower
|
||||
```
|
||||
|
||||
You can run flower using:
|
||||
|
||||
```
|
||||
```bash
|
||||
celery --app=superset.tasks.celery_app:app flower
|
||||
```
|
||||
|
||||
@@ -17,6 +17,7 @@ Caching can be configured by providing dictionaries in
|
||||
`superset_config.py` that comply with [the Flask-Caching config specifications](https://flask-caching.readthedocs.io/en/latest/#configuring-flask-caching).
|
||||
|
||||
The following cache configurations can be customized in this way:
|
||||
|
||||
- Dashboard filter state (required): `FILTER_STATE_CACHE_CONFIG`.
|
||||
- Explore chart form data (required): `EXPLORE_FORM_DATA_CACHE_CONFIG`
|
||||
- Metadata cache (optional): `CACHE_CONFIG`
|
||||
@@ -81,7 +82,7 @@ See [Async Queries via Celery](/docs/configuration/async-queries-celery) for det
|
||||
|
||||
## Caching Thumbnails
|
||||
|
||||
This is an optional feature that can be turned on by activating it’s [feature flag](/docs/configuration/configuring-superset#feature-flags) on config:
|
||||
This is an optional feature that can be turned on by activating its [feature flag](/docs/configuration/configuring-superset#feature-flags) on config:
|
||||
|
||||
```
|
||||
FEATURE_FLAGS = {
|
||||
@@ -94,13 +95,11 @@ By default thumbnails are rendered per user, and will fall back to the Selenium
|
||||
To always render thumbnails as a fixed user (`admin` in this example), use the following configuration:
|
||||
|
||||
```python
|
||||
from superset.tasks.types import ExecutorType
|
||||
from superset.tasks.types import FixedExecutor
|
||||
|
||||
THUMBNAIL_SELENIUM_USER = "admin"
|
||||
THUMBNAIL_EXECUTE_AS = [ExecutorType.SELENIUM]
|
||||
THUMBNAIL_EXECUTORS = [FixedExecutor("admin")]
|
||||
```
|
||||
|
||||
|
||||
For this feature you will need a cache system and celery workers. All thumbnails are stored on cache
|
||||
and are processed asynchronously by the workers.
|
||||
|
||||
@@ -130,8 +129,6 @@ def init_thumbnail_cache(app: Flask) -> S3Cache:
|
||||
|
||||
|
||||
THUMBNAIL_CACHE_CONFIG = init_thumbnail_cache
|
||||
# Async selenium thumbnail task will use the following user
|
||||
THUMBNAIL_SELENIUM_USER = "Admin"
|
||||
```
|
||||
|
||||
Using the above example cache keys for dashboards will be `superset_thumb__dashboard__{ID}`. You can
|
||||
|
||||
@@ -117,7 +117,7 @@ Your deployment must use a complex, unique key.
|
||||
### Rotating to a newer SECRET_KEY
|
||||
|
||||
If you wish to change your existing SECRET_KEY, add the existing SECRET_KEY to your `superset_config.py` file as
|
||||
`PREVIOUS_SECRET_KEY = `and provide your new key as `SECRET_KEY =`. You can find your current SECRET_KEY with these
|
||||
`PREVIOUS_SECRET_KEY =`and provide your new key as `SECRET_KEY =`. You can find your current SECRET_KEY with these
|
||||
commands - if running Superset with Docker, execute from within the Superset application container:
|
||||
|
||||
```python
|
||||
@@ -141,10 +141,10 @@ database engine on a separate host or container.
|
||||
|
||||
Superset supports the following database engines/versions:
|
||||
|
||||
| Database Engine | Supported Versions |
|
||||
| ----------------------------------------- | ---------------------------------- |
|
||||
| [PostgreSQL](https://www.postgresql.org/) | 10.X, 11.X, 12.X, 13.X, 14.X, 15.X |
|
||||
| [MySQL](https://www.mysql.com/) | 5.7, 8.X |
|
||||
| Database Engine | Supported Versions |
|
||||
| ----------------------------------------- | ---------------------------------------- |
|
||||
| [PostgreSQL](https://www.postgresql.org/) | 10.X, 11.X, 12.X, 13.X, 14.X, 15.X, 16.X |
|
||||
| [MySQL](https://www.mysql.com/) | 5.7, 8.X |
|
||||
|
||||
Use the following database drivers and connection strings:
|
||||
|
||||
@@ -283,7 +283,7 @@ class CustomSsoSecurityManager(SupersetSecurityManager):
|
||||
...
|
||||
```
|
||||
|
||||
This file must be located at the same directory than `superset_config.py` with the name
|
||||
This file must be located in the same directory as `superset_config.py` with the name
|
||||
`custom_sso_security_manager.py`. Finally, add the following 2 lines to `superset_config.py`:
|
||||
|
||||
```
|
||||
@@ -300,6 +300,7 @@ CUSTOM_SECURITY_MANAGER = CustomSsoSecurityManager
|
||||
- If an OAuth2 authorization server supports OpenID Connect 1.0, you could configure its configuration
|
||||
document URL only without providing `api_base_url`, `access_token_url`, `authorize_url` and other
|
||||
required options like user info endpoint, jwks uri etc. For instance:
|
||||
|
||||
```python
|
||||
OAUTH_PROVIDERS = [
|
||||
{ 'name':'egaSSO',
|
||||
@@ -313,12 +314,15 @@ CUSTOM_SECURITY_MANAGER = CustomSsoSecurityManager
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
### Keycloak-Specific Configuration using Flask-OIDC
|
||||
|
||||
If you are using Keycloak as OpenID Connect 1.0 Provider, the above configuration based on [`Authlib`](https://authlib.org/) might not work. In this case using [`Flask-OIDC`](https://pypi.org/project/flask-oidc/) is a viable option.
|
||||
|
||||
Make sure the pip package [`Flask-OIDC`](https://pypi.org/project/flask-oidc/) is installed on the webserver. This was succesfully tested using version 2.2.0. This package requires [`Flask-OpenID`](https://pypi.org/project/Flask-OpenID/) as a dependency.
|
||||
Make sure the pip package [`Flask-OIDC`](https://pypi.org/project/flask-oidc/) is installed on the webserver. This was successfully tested using version 2.2.0. This package requires [`Flask-OpenID`](https://pypi.org/project/Flask-OpenID/) as a dependency.
|
||||
|
||||
The following code defines a new security manager. Add it to a new file named `keycloak_security_manager.py`, placed in the same directory as your `superset_config.py` file.
|
||||
|
||||
```python
|
||||
from flask_appbuilder.security.manager import AUTH_OID
|
||||
from superset.security import SupersetSecurityManager
|
||||
@@ -373,7 +377,9 @@ class AuthOIDCView(AuthOIDView):
|
||||
return redirect(
|
||||
oidc.client_secrets.get('issuer') + '/protocol/openid-connect/logout?redirect_uri=' + quote(redirect_url))
|
||||
```
|
||||
|
||||
Then add to your `superset_config.py` file:
|
||||
|
||||
```python
|
||||
from keycloak_security_manager import OIDCSecurityManager
|
||||
from flask_appbuilder.security.manager import AUTH_OID, AUTH_REMOTE_USER, AUTH_DB, AUTH_LDAP, AUTH_OAUTH
|
||||
@@ -393,7 +399,9 @@ AUTH_USER_REGISTRATION = True
|
||||
# The default user self registration role
|
||||
AUTH_USER_REGISTRATION_ROLE = 'Public'
|
||||
```
|
||||
|
||||
Store your client-specific OpenID information in a file called `client_secret.json`. Create this file in the same directory as `superset_config.py`:
|
||||
|
||||
```json
|
||||
{
|
||||
"<myOpenIDProvider>": {
|
||||
@@ -410,6 +418,7 @@ Store your client-specific OpenID information in a file called `client_secret.js
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## LDAP Authentication
|
||||
|
||||
FAB supports authenticating user credentials against an LDAP server.
|
||||
@@ -432,6 +441,7 @@ AUTH_ROLES_MAPPING = {
|
||||
"superset_admins": ["Admin"],
|
||||
}
|
||||
```
|
||||
|
||||
### Mapping LDAP groups to Superset roles
|
||||
|
||||
The following `AUTH_ROLES_MAPPING` dictionary would map the LDAP DN "cn=superset_users,ou=groups,dc=example,dc=com" to the Superset roles "Gamma" as well as "Alpha", and the LDAP DN "cn=superset_admins,ou=groups,dc=example,dc=com" to the Superset role "Admin".
|
||||
@@ -442,6 +452,7 @@ AUTH_ROLES_MAPPING = {
|
||||
"cn=superset_admins,ou=groups,dc=example,dc=com": ["Admin"],
|
||||
}
|
||||
```
|
||||
|
||||
Note: This requires `AUTH_LDAP_SEARCH` to be set. For more details, please see the [FAB Security documentation](https://flask-appbuilder.readthedocs.io/en/latest/security.html).
|
||||
|
||||
### Syncing roles at login
|
||||
@@ -475,7 +486,7 @@ def FLASK_APP_MUTATOR(app: Flask) -> None:
|
||||
|
||||
To support a diverse set of users, Superset has some features that are not enabled by default. For
|
||||
example, some users have stronger security restrictions, while some others may not. So Superset
|
||||
allow users to enable or disable some features by config. For feature owners, you can add optional
|
||||
allows users to enable or disable some features by config. For feature owners, you can add optional
|
||||
functionalities in Superset, but will be only affected by a subset of users.
|
||||
|
||||
You can enable or disable features with flag from `superset_config.py`:
|
||||
|
||||
@@ -31,18 +31,17 @@ install new database drivers into your Superset configuration.
|
||||
|
||||
### Supported Databases and Dependencies
|
||||
|
||||
|
||||
Some of the recommended packages are shown below. Please refer to
|
||||
[pyproject.toml](https://github.com/apache/superset/blob/master/pyproject.toml) for the versions that
|
||||
are compatible with Superset.
|
||||
|
||||
| <div style={{width: '150px'}}>Database</div> | PyPI package | Connection String |
|
||||
| --------------------------------------------------------- | ---------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------ |
|
||||
| [AWS Athena](/docs/configuration/databases#aws-athena) | `pip install pyathena[pandas]` , `pip install PyAthenaJDBC` | `awsathena+rest://{access_key_id}:{access_key}@athena.{region}.amazonaws.com/{schema}?s3_staging_dir={s3_staging_dir}&... ` |
|
||||
| [AWS Athena](/docs/configuration/databases#aws-athena) | `pip install pyathena[pandas]` , `pip install PyAthenaJDBC` | `awsathena+rest://{access_key_id}:{access_key}@athena.{region}.amazonaws.com/{schema}?s3_staging_dir={s3_staging_dir}&...` |
|
||||
| [AWS DynamoDB](/docs/configuration/databases#aws-dynamodb) | `pip install pydynamodb` | `dynamodb://{access_key_id}:{secret_access_key}@dynamodb.{region_name}.amazonaws.com?connector=superset` |
|
||||
| [AWS Redshift](/docs/configuration/databases#aws-redshift) | `pip install sqlalchemy-redshift` | ` redshift+psycopg2://<userName>:<DBPassword>@<AWS End Point>:5439/<Database Name>` |
|
||||
| [AWS Redshift](/docs/configuration/databases#aws-redshift) | `pip install sqlalchemy-redshift` | `redshift+psycopg2://<userName>:<DBPassword>@<AWS End Point>:5439/<Database Name>` |
|
||||
| [Apache Doris](/docs/configuration/databases#apache-doris) | `pip install pydoris` | `doris://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>` |
|
||||
| [Apache Drill](/docs/configuration/databases#apache-drill) | `pip install sqlalchemy-drill` | `drill+sadrill:// For JDBC drill+jdbc://` |
|
||||
| [Apache Drill](/docs/configuration/databases#apache-drill) | `pip install sqlalchemy-drill` | `drill+sadrill://<username>:<password>@<host>:<port>/<storage_plugin>`, often useful: `?use_ssl=True/False` |
|
||||
| [Apache Druid](/docs/configuration/databases#apache-druid) | `pip install pydruid` | `druid://<User>:<password>@<Host>:<Port-default-9088>/druid/v2/sql` |
|
||||
| [Apache Hive](/docs/configuration/databases#hive) | `pip install pyhive` | `hive://hive@{hostname}:{port}/{database}` |
|
||||
| [Apache Impala](/docs/configuration/databases#apache-impala) | `pip install impyla` | `impala://{hostname}:{port}/{database}` |
|
||||
@@ -68,21 +67,24 @@ are compatible with Superset.
|
||||
| [IBM Netezza Performance Server](/docs/configuration/databases#ibm-netezza-performance-server) | `pip install nzalchemy` | `netezza+nzpy://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [MySQL](/docs/configuration/databases#mysql) | `pip install mysqlclient` | `mysql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [OceanBase](/docs/configuration/databases#oceanbase) | `pip install oceanbase_py` | `oceanbase://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [Oracle](/docs/configuration/databases#oracle) | `pip install cx_Oracle` | `oracle://` |
|
||||
| [Oracle](/docs/configuration/databases#oracle) | `pip install cx_Oracle` | `oracle://<username>:<password>@<hostname>:<port>` |
|
||||
| [Parseable](/docs/configuration/databases#parseable) | `pip install sqlalchemy-parseable` | `parseable://<UserName>:<DBPassword>@<Database Host>/<Stream Name>` |
|
||||
| [PostgreSQL](/docs/configuration/databases#postgres) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [Presto](/docs/configuration/databases#presto) | `pip install pyhive` | `presto://` |
|
||||
| [Presto](/docs/configuration/databases#presto) | `pip install pyhive` | `presto://{username}:{password}@{hostname}:{port}/{database}` |
|
||||
| [Rockset](/docs/configuration/databases#rockset) | `pip install rockset-sqlalchemy` | `rockset://<api_key>:@<api_server>` |
|
||||
| [SAP Hana](/docs/configuration/databases#hana) | `pip install hdbcli sqlalchemy-hana` or `pip install apache-superset[hana]` | `hana://{username}:{password}@{host}:{port}` |
|
||||
| [SAP Hana](/docs/configuration/databases#hana) | `pip install hdbcli sqlalchemy-hana` or `pip install apache_superset[hana]` | `hana://{username}:{password}@{host}:{port}` |
|
||||
| [StarRocks](/docs/configuration/databases#starrocks) | `pip install starrocks` | `starrocks://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>` |
|
||||
| [Snowflake](/docs/configuration/databases#snowflake) | `pip install snowflake-sqlalchemy` | `snowflake://{user}:{password}@{account}.{region}/{database}?role={role}&warehouse={warehouse}` |
|
||||
| SQLite | No additional library needed | `sqlite://path/to/file.db?check_same_thread=false` |
|
||||
| [SQL Server](/docs/configuration/databases#sql-server) | `pip install pymssql` | `mssql+pymssql://` |
|
||||
| [SQL Server](/docs/configuration/databases#sql-server) | `pip install pymssql` | `mssql+pymssql://<Username>:<Password>@<Host>:<Port-default:1433>/<Database Name>` |
|
||||
| [TDengine](/docs/configuration/databases#tdengine) | `pip install taospy` `pip install taos-ws-py` | `taosws://<user>:<password>@<host>:<port>` |
|
||||
| [Teradata](/docs/configuration/databases#teradata) | `pip install teradatasqlalchemy` | `teradatasql://{user}:{password}@{host}` |
|
||||
| [TimescaleDB](/docs/configuration/databases#timescaledb) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>:<Port>/<Database Name>` |
|
||||
| [Trino](/docs/configuration/databases#trino) | `pip install trino` | `trino://{username}:{password}@{hostname}:{port}/{catalog}` |
|
||||
| [Vertica](/docs/configuration/databases#vertica) | `pip install sqlalchemy-vertica-python` | `vertica+vertica_python://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [YDB](/docs/configuration/databases#ydb) | `pip install ydb-sqlalchemy` | `ydb://{host}:{port}/{database_name}` |
|
||||
| [YugabyteDB](/docs/configuration/databases#yugabytedb) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
|
||||
---
|
||||
|
||||
Note that many other databases are supported, the main criteria being the existence of a functional
|
||||
@@ -183,7 +185,6 @@ purposes of isolating the problem.
|
||||
|
||||
Repeat this process for each type of database you want Superset to connect to.
|
||||
|
||||
|
||||
### Database-specific Instructions
|
||||
|
||||
#### Ascend.io
|
||||
@@ -209,14 +210,12 @@ You'll need the following setting values to form the connection string:
|
||||
- **Catalog**: Catalog Name
|
||||
- **Database**: Database Name
|
||||
|
||||
|
||||
Here's what the connection string looks like:
|
||||
|
||||
```
|
||||
doris://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>
|
||||
```
|
||||
|
||||
|
||||
#### AWS Athena
|
||||
|
||||
##### PyAthenaJDBC
|
||||
@@ -246,6 +245,7 @@ awsathena+rest://{aws_access_key_id}:{aws_secret_access_key}@athena.{region_name
|
||||
```
|
||||
|
||||
The PyAthena library also allows to assume a specific IAM role which you can define by adding following parameters in Superset's Athena database connection UI under ADVANCED --> Other --> ENGINE PARAMETERS.
|
||||
|
||||
```json
|
||||
{
|
||||
"connect_args": {
|
||||
@@ -268,7 +268,6 @@ dynamodb://{aws_access_key_id}:{aws_secret_access_key}@dynamodb.{region_name}.am
|
||||
|
||||
To get more documentation, please visit: [PyDynamoDB WIKI](https://github.com/passren/PyDynamoDB/wiki/5.-Superset).
|
||||
|
||||
|
||||
#### AWS Redshift
|
||||
|
||||
The [sqlalchemy-redshift](https://pypi.org/project/sqlalchemy-redshift/) library is the recommended
|
||||
@@ -284,7 +283,6 @@ You'll need to set the following values to form the connection string:
|
||||
- **Database Name**: Database Name
|
||||
- **Port**: default 5439
|
||||
|
||||
|
||||
##### psycopg2
|
||||
|
||||
Here's what the SQLALCHEMY URI looks like:
|
||||
@@ -293,7 +291,6 @@ Here's what the SQLALCHEMY URI looks like:
|
||||
redshift+psycopg2://<userName>:<DBPassword>@<AWS End Point>:5439/<Database Name>
|
||||
```
|
||||
|
||||
|
||||
##### redshift_connector
|
||||
|
||||
Here's what the SQLALCHEMY URI looks like:
|
||||
@@ -302,8 +299,7 @@ Here's what the SQLALCHEMY URI looks like:
|
||||
redshift+redshift_connector://<userName>:<DBPassword>@<AWS End Point>:5439/<Database Name>
|
||||
```
|
||||
|
||||
|
||||
###### Using IAM-based credentials with Redshift cluster:
|
||||
###### Using IAM-based credentials with Redshift cluster
|
||||
|
||||
[Amazon redshift cluster](https://docs.aws.amazon.com/redshift/latest/mgmt/working-with-clusters.html) also supports generating temporary IAM-based database user credentials.
|
||||
|
||||
@@ -314,10 +310,10 @@ You have to define the following arguments in Superset's redshift database conne
|
||||
```
|
||||
{"connect_args":{"iam":true,"database":"<database>","cluster_identifier":"<cluster_identifier>","db_user":"<db_user>"}}
|
||||
```
|
||||
|
||||
and SQLALCHEMY URI should be set to `redshift+redshift_connector://`
|
||||
|
||||
|
||||
###### Using IAM-based credentials with Redshift serverless:
|
||||
###### Using IAM-based credentials with Redshift serverless
|
||||
|
||||
[Redshift serverless](https://docs.aws.amazon.com/redshift/latest/mgmt/serverless-whatis.html) supports connection using IAM roles.
|
||||
|
||||
@@ -329,8 +325,6 @@ You have to define the following arguments in Superset's redshift database conne
|
||||
{"connect_args":{"iam":true,"is_serverless":true,"serverless_acct_id":"<aws account number>","serverless_work_group":"<redshift work group>","database":"<database>","user":"IAMR:<superset iam role name>"}}
|
||||
```
|
||||
|
||||
|
||||
|
||||
#### ClickHouse
|
||||
|
||||
To use ClickHouse with Superset, you will need to install the `clickhouse-connect` Python library:
|
||||
@@ -363,8 +357,6 @@ uses the default user without a password (and doesn't encrypt the connection):
|
||||
clickhousedb://localhost/default
|
||||
```
|
||||
|
||||
|
||||
|
||||
#### CockroachDB
|
||||
|
||||
The recommended connector library for CockroachDB is
|
||||
@@ -376,13 +368,12 @@ The expected connection string is formatted as follows:
|
||||
cockroachdb://root@{hostname}:{port}/{database}?sslmode=disable
|
||||
```
|
||||
|
||||
|
||||
|
||||
#### Couchbase
|
||||
|
||||
The Couchbase's Superset connection is designed to support two services: Couchbase Analytics and Couchbase Columnar.
|
||||
The recommended connector library for couchbase is
|
||||
[couchbase-sqlalchemy](https://github.com/couchbase/couchbase-sqlalchemy).
|
||||
|
||||
```
|
||||
pip install couchbase-sqlalchemy
|
||||
```
|
||||
@@ -393,22 +384,25 @@ The expected connection string is formatted as follows:
|
||||
couchbase://{username}:{password}@{hostname}:{port}?truststorepath={certificate path}?ssl={true/false}
|
||||
```
|
||||
|
||||
|
||||
#### CrateDB
|
||||
|
||||
The connector library for CrateDB is [sqlalchemy-cratedb].
|
||||
We recommend to add the following item to your `requirements.txt` file:
|
||||
|
||||
```
|
||||
sqlalchemy-cratedb>=0.40.1,<1
|
||||
```
|
||||
|
||||
An SQLAlchemy connection string for [CrateDB Self-Managed] on localhost,
|
||||
for evaluation purposes, looks like this:
|
||||
|
||||
```
|
||||
crate://crate@127.0.0.1:4200
|
||||
```
|
||||
|
||||
An SQLAlchemy connection string for connecting to [CrateDB Cloud] looks like
|
||||
this:
|
||||
|
||||
```
|
||||
crate://<username>:<password>@<clustername>.cratedb.net:4200/?ssl=true
|
||||
```
|
||||
@@ -416,6 +410,7 @@ crate://<username>:<password>@<clustername>.cratedb.net:4200/?ssl=true
|
||||
Follow the steps [here](/docs/configuration/databases#installing-database-drivers)
|
||||
to install the CrateDB connector package when setting up Superset locally using
|
||||
Docker Compose.
|
||||
|
||||
```
|
||||
echo "sqlalchemy-cratedb" >> ./docker/requirements-local.txt
|
||||
```
|
||||
@@ -424,7 +419,6 @@ echo "sqlalchemy-cratedb" >> ./docker/requirements-local.txt
|
||||
[CrateDB Self-Managed]: https://cratedb.com/product/self-managed
|
||||
[sqlalchemy-cratedb]: https://pypi.org/project/sqlalchemy-cratedb/
|
||||
|
||||
|
||||
#### Databend
|
||||
|
||||
The recommended connector library for Databend is [databend-sqlalchemy](https://pypi.org/project/databend-sqlalchemy/).
|
||||
@@ -442,7 +436,6 @@ Here's a connection string example of Superset connecting to a Databend database
|
||||
databend://user:password@localhost:8000/default?secure=false
|
||||
```
|
||||
|
||||
|
||||
#### Databricks
|
||||
|
||||
Databricks now offer a native DB API 2.0 driver, `databricks-sql-connector`, that can be used with the `sqlalchemy-databricks` dialect. You can install both with:
|
||||
@@ -526,7 +519,6 @@ For a connection to a SQL endpoint you need to use the HTTP path from the endpoi
|
||||
{"connect_args": {"http_path": "/sql/1.0/endpoints/****", "driver_path": "/path/to/odbc/driver"}}
|
||||
```
|
||||
|
||||
|
||||
#### Denodo
|
||||
|
||||
The recommended connector library for Denodo is
|
||||
@@ -538,7 +530,6 @@ The expected connection string is formatted as follows (default port is 9996):
|
||||
denodo://{username}:{password}@{hostname}:{port}/{database}
|
||||
```
|
||||
|
||||
|
||||
#### Dremio
|
||||
|
||||
The recommended connector library for Dremio is
|
||||
@@ -559,7 +550,6 @@ dremio+flight://{username}:{password}@{host}:{port}/dremio
|
||||
This [blog post by Dremio](https://www.dremio.com/tutorials/dremio-apache-superset/) has some
|
||||
additional helpful instructions on connecting Superset to Dremio.
|
||||
|
||||
|
||||
#### Apache Drill
|
||||
|
||||
##### SQLAlchemy
|
||||
@@ -601,8 +591,6 @@ We recommend reading the
|
||||
the [GitHub README](https://github.com/JohnOmernik/sqlalchemy-drill#usage-with-odbc) to learn how to
|
||||
work with Drill through ODBC.
|
||||
|
||||
|
||||
|
||||
import useBaseUrl from "@docusaurus/useBaseUrl";
|
||||
|
||||
#### Apache Druid
|
||||
@@ -616,6 +604,7 @@ The connection string looks like:
|
||||
```
|
||||
druid://<User>:<password>@<Host>:<Port-default-9088>/druid/v2/sql
|
||||
```
|
||||
|
||||
Here's a breakdown of the key components of this connection string:
|
||||
|
||||
- `User`: username portion of the credentials needed to connect to your database
|
||||
@@ -644,7 +633,7 @@ To disable SSL verification, add the following to the **Extras** field:
|
||||
```
|
||||
engine_params:
|
||||
{"connect_args":
|
||||
{"scheme": "https", "ssl_verify_cert": false}}
|
||||
{"scheme": "https", "ssl_verify_cert": false}}
|
||||
```
|
||||
|
||||
##### Aggregations
|
||||
@@ -668,7 +657,6 @@ much like you would create an aggregation manually, but specify `postagg` as a `
|
||||
then have to provide a valid json post-aggregation definition (as specified in the Druid docs) in
|
||||
the JSON field.
|
||||
|
||||
|
||||
#### Elasticsearch
|
||||
|
||||
The recommended connector library for Elasticsearch is
|
||||
@@ -717,7 +705,7 @@ Then register your table with the alias name logstash_all
|
||||
By default, Superset uses UTC time zone for elasticsearch query. If you need to specify a time zone,
|
||||
please edit your Database and enter the settings of your specified time zone in the Other > ENGINE PARAMETERS:
|
||||
|
||||
```
|
||||
```json
|
||||
{
|
||||
"connect_args": {
|
||||
"time_zone": "Asia/Shanghai"
|
||||
@@ -739,8 +727,6 @@ To disable SSL verification, add the following to the **SQLALCHEMY URI** field:
|
||||
elasticsearch+https://{user}:{password}@{host}:9200/?verify_certs=False
|
||||
```
|
||||
|
||||
|
||||
|
||||
#### Exasol
|
||||
|
||||
The recommended connector library for Exasol is
|
||||
@@ -752,7 +738,6 @@ The connection string for Exasol looks like this:
|
||||
exa+pyodbc://{username}:{password}@{hostname}:{port}/my_schema?CONNECTIONLCALL=en_US.UTF-8&driver=EXAODBC
|
||||
```
|
||||
|
||||
|
||||
#### Firebird
|
||||
|
||||
The recommended connector library for Firebird is [sqlalchemy-firebird](https://pypi.org/project/sqlalchemy-firebird/).
|
||||
@@ -770,7 +755,6 @@ Here's a connection string example of Superset connecting to a local Firebird da
|
||||
firebird+fdb://SYSDBA:masterkey@192.168.86.38:3050//Library/Frameworks/Firebird.framework/Versions/A/Resources/examples/empbuild/employee.fdb
|
||||
```
|
||||
|
||||
|
||||
#### Firebolt
|
||||
|
||||
The recommended connector library for Firebolt is [firebolt-sqlalchemy](https://pypi.org/project/firebolt-sqlalchemy/).
|
||||
@@ -801,7 +785,7 @@ The recommended connector library for BigQuery is
|
||||
Follow the steps [here](/docs/configuration/databases#installing-drivers-in-docker-images) about how to
|
||||
install new database drivers when setting up Superset locally via docker compose.
|
||||
|
||||
```
|
||||
```bash
|
||||
echo "sqlalchemy-bigquery" >> ./docker/requirements-local.txt
|
||||
```
|
||||
|
||||
@@ -814,7 +798,7 @@ credentials file (as a JSON).
|
||||
appropriate BigQuery datasets, and download the JSON configuration file for the service account.
|
||||
2. In Superset, you can either upload that JSON or add the JSON blob in the following format (this should be the content of your credential JSON file):
|
||||
|
||||
```
|
||||
```json
|
||||
{
|
||||
"type": "service_account",
|
||||
"project_id": "...",
|
||||
@@ -842,7 +826,7 @@ credentials file (as a JSON).
|
||||
Go to the **Advanced** tab, Add a JSON blob to the **Secure Extra** field in the database configuration form with
|
||||
the following format:
|
||||
|
||||
```
|
||||
```json
|
||||
{
|
||||
"credentials_info": <contents of credentials JSON file>
|
||||
}
|
||||
@@ -850,7 +834,7 @@ credentials file (as a JSON).
|
||||
|
||||
The resulting file should have this structure:
|
||||
|
||||
```
|
||||
```json
|
||||
{
|
||||
"credentials_info": {
|
||||
"type": "service_account",
|
||||
@@ -877,8 +861,6 @@ To be able to upload CSV or Excel files to BigQuery in Superset, you'll need to
|
||||
Currently, the Google BigQuery Python SDK is not compatible with `gevent`, due to some dynamic monkeypatching on python core library by `gevent`.
|
||||
So, when you deploy Superset with `gunicorn` server, you have to use worker type except `gevent`.
|
||||
|
||||
|
||||
|
||||
#### Google Sheets
|
||||
|
||||
Google Sheets has a very limited
|
||||
@@ -889,7 +871,6 @@ There are a few steps involved in connecting Superset to Google Sheets. This
|
||||
[tutorial](https://preset.io/blog/2020-06-01-connect-superset-google-sheets/) has the most up to date
|
||||
instructions on setting up this connection.
|
||||
|
||||
|
||||
#### Hana
|
||||
|
||||
The recommended connector library is [sqlalchemy-hana](https://github.com/SAP/sqlalchemy-hana).
|
||||
@@ -900,7 +881,6 @@ The connection string is formatted as follows:
|
||||
hana://{username}:{password}@{host}:{port}
|
||||
```
|
||||
|
||||
|
||||
#### Apache Hive
|
||||
|
||||
The [pyhive](https://pypi.org/project/PyHive/) library is the recommended way to connect to Hive through SQLAlchemy.
|
||||
@@ -911,7 +891,6 @@ The expected connection string is formatted as follows:
|
||||
hive://hive@{hostname}:{port}/{database}
|
||||
```
|
||||
|
||||
|
||||
#### Hologres
|
||||
|
||||
Hologres is a real-time interactive analytics service developed by Alibaba Cloud. It is fully compatible with PostgreSQL 11 and integrates seamlessly with the big data ecosystem.
|
||||
@@ -930,7 +909,6 @@ The connection string looks like:
|
||||
postgresql+psycopg2://{username}:{password}@{host}:{port}/{database}
|
||||
```
|
||||
|
||||
|
||||
#### IBM DB2
|
||||
|
||||
The [IBM_DB_SA](https://github.com/ibmdb/python-ibmdbsa/tree/master/ibm_db_sa) library provides a
|
||||
@@ -948,7 +926,6 @@ There are two DB2 dialect versions implemented in SQLAlchemy. If you are connect
|
||||
ibm_db_sa://{username}:{passport}@{hostname}:{port}/{database}
|
||||
```
|
||||
|
||||
|
||||
#### Apache Impala
|
||||
|
||||
The recommended connector library to Apache Impala is [impyla](https://github.com/cloudera/impyla).
|
||||
@@ -959,7 +936,6 @@ The expected connection string is formatted as follows:
|
||||
impala://{hostname}:{port}/{database}
|
||||
```
|
||||
|
||||
|
||||
#### Kusto
|
||||
|
||||
The recommended connector library for Kusto is
|
||||
@@ -980,7 +956,6 @@ kustokql+https://{cluster_url}/{database}?azure_ad_client_id={azure_ad_client_id
|
||||
Make sure the user has privileges to access and use all required
|
||||
databases/tables/views.
|
||||
|
||||
|
||||
#### Apache Kylin
|
||||
|
||||
The recommended connector library for Apache Kylin is
|
||||
@@ -992,10 +967,6 @@ The expected connection string is formatted as follows:
|
||||
kylin://<username>:<password>@<hostname>:<port>/<project>?<param1>=<value1>&<param2>=<value2>
|
||||
```
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
#### MySQL
|
||||
|
||||
The recommended connector library for MySQL is [mysqlclient](https://pypi.org/project/mysqlclient/).
|
||||
@@ -1020,7 +991,6 @@ One problem with `mysqlclient` is that it will fail to connect to newer MySQL da
|
||||
mysql+mysqlconnector://{username}:{password}@{host}/{database}
|
||||
```
|
||||
|
||||
|
||||
#### IBM Netezza Performance Server
|
||||
|
||||
The [nzalchemy](https://pypi.org/project/nzalchemy/) library provides a
|
||||
@@ -1037,21 +1007,19 @@ netezza+nzpy://{username}:{password}@{hostname}:{port}/{database}
|
||||
The [sqlalchemy-oceanbase](https://pypi.org/project/oceanbase_py/) library is the recommended
|
||||
way to connect to OceanBase through SQLAlchemy.
|
||||
|
||||
|
||||
The connection string for OceanBase looks like this:
|
||||
|
||||
```
|
||||
oceanbase://<User>:<Password>@<Host>:<Port>/<Database>
|
||||
```
|
||||
|
||||
|
||||
#### Ocient DB
|
||||
|
||||
The recommended connector library for Ocient is [sqlalchemy-ocient](https://pypi.org/project/sqlalchemy-ocient).
|
||||
|
||||
##### Install the Ocient Driver
|
||||
|
||||
```
|
||||
```bash
|
||||
pip install sqlalchemy-ocient
|
||||
```
|
||||
|
||||
@@ -1074,8 +1042,25 @@ The connection string is formatted as follows:
|
||||
oracle://<username>:<password>@<hostname>:<port>
|
||||
```
|
||||
|
||||
#### Parseable
|
||||
|
||||
[Parseable](https://www.parseable.io) is a distributed log analytics database that provides SQL-like query interface for log data. The recommended connector library is [sqlalchemy-parseable](https://github.com/parseablehq/sqlalchemy-parseable).
|
||||
|
||||
The connection string is formatted as follows:
|
||||
|
||||
```
|
||||
parseable://<username>:<password>@<hostname>:<port>/<stream_name>
|
||||
```
|
||||
|
||||
For example:
|
||||
|
||||
```
|
||||
parseable://admin:admin@demo.parseable.com:443/ingress-nginx
|
||||
```
|
||||
|
||||
Note: The stream_name in the URI represents the Parseable logstream you want to query. You can use both HTTP (port 80) and HTTPS (port 443) connections.
|
||||
|
||||
>>>>>>>
|
||||
#### Apache Pinot
|
||||
|
||||
The recommended connector library for Apache Pinot is [pinotdb](https://pypi.org/project/pinotdb/).
|
||||
@@ -1094,7 +1079,8 @@ pinot://<username>:<password>@<pinot-broker-host>:<pinot-broker-port>/query/sql?
|
||||
|
||||
If you want to use explore view or joins, window functions, etc. then enable [multi-stage query engine](https://docs.pinot.apache.org/reference/multi-stage-engine).
|
||||
Add below argument while creating database connection in Advanced -> Other -> ENGINE PARAMETERS
|
||||
```
|
||||
|
||||
```json
|
||||
{"connect_args":{"use_multistage_engine":"true"}}
|
||||
```
|
||||
|
||||
@@ -1134,7 +1120,6 @@ More information about PostgreSQL connection options can be found in the
|
||||
and the
|
||||
[PostgreSQL docs](https://www.postgresql.org/docs/9.1/libpq-connect.html#LIBPQ-PQCONNECTDBPARAMS).
|
||||
|
||||
|
||||
#### Presto
|
||||
|
||||
The [pyhive](https://pypi.org/project/PyHive/) library is the recommended way to connect to Presto through SQLAlchemy.
|
||||
@@ -1160,7 +1145,7 @@ presto://datascientist:securepassword@presto.example.com:8080/hive
|
||||
By default Superset assumes the most recent version of Presto is being used when querying the
|
||||
datasource. If you’re using an older version of Presto, you can configure it in the extra parameter:
|
||||
|
||||
```
|
||||
```json
|
||||
{
|
||||
"version": "0.123"
|
||||
}
|
||||
@@ -1168,7 +1153,7 @@ datasource. If you’re using an older version of Presto, you can configure it i
|
||||
|
||||
SSL Secure extra add json config to extra connection information.
|
||||
|
||||
```
|
||||
```json
|
||||
{
|
||||
"connect_args":
|
||||
{"protocol": "https",
|
||||
@@ -1177,8 +1162,6 @@ SSL Secure extra add json config to extra connection information.
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
|
||||
#### RisingWave
|
||||
|
||||
The recommended connector library for RisingWave is
|
||||
@@ -1190,7 +1173,6 @@ The expected connection string is formatted as follows:
|
||||
risingwave://root@{hostname}:{port}/{database}?sslmode=disable
|
||||
```
|
||||
|
||||
|
||||
#### Rockset
|
||||
|
||||
The connection string for Rockset is:
|
||||
@@ -1210,7 +1192,6 @@ rockset://{api key}:@{api server}/{VI ID}
|
||||
|
||||
For more complete instructions, we recommend the [Rockset documentation](https://docs.rockset.com/apache-superset/).
|
||||
|
||||
|
||||
#### Snowflake
|
||||
|
||||
##### Install Snowflake Driver
|
||||
@@ -1218,7 +1199,7 @@ For more complete instructions, we recommend the [Rockset documentation](https:/
|
||||
Follow the steps [here](/docs/configuration/databases#installing-database-drivers) about how to
|
||||
install new database drivers when setting up Superset locally via docker compose.
|
||||
|
||||
```
|
||||
```bash
|
||||
echo "snowflake-sqlalchemy" >> ./docker/requirements-local.txt
|
||||
```
|
||||
|
||||
@@ -1251,7 +1232,7 @@ To connect Snowflake with Key Pair Authentication, you need to add the following
|
||||
|
||||
***Please note that you need to merge multi-line private key content to one line and insert `\n` between each line***
|
||||
|
||||
```
|
||||
```json
|
||||
{
|
||||
"auth_method": "keypair",
|
||||
"auth_params": {
|
||||
@@ -1263,7 +1244,7 @@ To connect Snowflake with Key Pair Authentication, you need to add the following
|
||||
|
||||
If your private key is stored on server, you can replace "privatekey_body" with “privatekey_path” in parameter.
|
||||
|
||||
```
|
||||
```json
|
||||
{
|
||||
"auth_method": "keypair",
|
||||
"auth_params": {
|
||||
@@ -1284,7 +1265,6 @@ The connection string for Solr looks like this:
|
||||
solr://{username}:{password}@{host}:{port}/{server_path}/{collection}[/?use_ssl=true|false]
|
||||
```
|
||||
|
||||
|
||||
#### Apache Spark SQL
|
||||
|
||||
The recommended connector library for Apache Spark SQL [pyhive](https://pypi.org/project/PyHive/).
|
||||
@@ -1302,12 +1282,13 @@ The recommended connector library for SQL Server is [pymssql](https://github.com
|
||||
The connection string for SQL Server looks like this:
|
||||
|
||||
```
|
||||
mssql+pymssql://<Username>:<Password>@<Host>:<Port-default:1433>/<Database Name>/?Encrypt=yes
|
||||
mssql+pymssql://<Username>:<Password>@<Host>:<Port-default:1433>/<Database Name>
|
||||
```
|
||||
|
||||
It is also possible to connect using [pyodbc](https://pypi.org/project/pyodbc) with the parameter [odbc_connect](https://docs.sqlalchemy.org/en/14/dialects/mssql.html#pass-through-exact-pyodbc-string)
|
||||
|
||||
The connection string for SQL Server looks like this:
|
||||
|
||||
```
|
||||
mssql+pyodbc:///?odbc_connect=Driver%3D%7BODBC+Driver+17+for+SQL+Server%7D%3BServer%3Dtcp%3A%3Cmy_server%3E%2C1433%3BDatabase%3Dmy_database%3BUid%3Dmy_user_name%3BPwd%3Dmy_password%3BEncrypt%3Dyes%3BConnection+Timeout%3D30
|
||||
```
|
||||
@@ -1336,6 +1317,24 @@ starrocks://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>
|
||||
StarRocks maintains their Superset docuementation [here](https://docs.starrocks.io/docs/integrations/BI_integrations/Superset/).
|
||||
:::
|
||||
|
||||
#### TDengine
|
||||
|
||||
[TDengine](https://www.tdengine.com) is a High-Performance, Scalable Time-Series Database for Industrial IoT and provides SQL-like query interface.
|
||||
|
||||
The recommended connector library for TDengine is [taospy](https://pypi.org/project/taospy/) and [taos-ws-py](https://pypi.org/project/taos-ws-py/)
|
||||
|
||||
The expected connection string is formatted as follows:
|
||||
|
||||
```
|
||||
taosws://<user>:<password>@<host>:<port>
|
||||
```
|
||||
|
||||
For example:
|
||||
|
||||
```
|
||||
taosws://root:taosdata@127.0.0.1:6041
|
||||
```
|
||||
|
||||
#### Teradata
|
||||
|
||||
The recommended connector library is
|
||||
@@ -1357,7 +1356,7 @@ here: https://downloads.teradata.com/download/connectivity/odbc-driver/linux
|
||||
|
||||
Here are the required environment variables:
|
||||
|
||||
```
|
||||
```bash
|
||||
export ODBCINI=/.../teradata/client/ODBC_64/odbc.ini
|
||||
export ODBCINST=/.../teradata/client/ODBC_64/odbcinst.ini
|
||||
```
|
||||
@@ -1366,8 +1365,8 @@ We recommend using the first library because of the
|
||||
lack of requirement around ODBC drivers and
|
||||
because it's more regularly updated.
|
||||
|
||||
|
||||
#### TimescaleDB
|
||||
|
||||
[TimescaleDB](https://www.timescale.com) is the open-source relational database for time-series and analytics to build powerful data-intensive applications.
|
||||
TimescaleDB is a PostgreSQL extension, and you can use the standard PostgreSQL connector library, [psycopg2](https://www.psycopg.org/docs/), to connect to the database.
|
||||
|
||||
@@ -1399,31 +1398,38 @@ postgresql://{username}:{password}@{host}:{port}/{database name}?sslmode=require
|
||||
|
||||
[Learn more about TimescaleDB!](https://docs.timescale.com/)
|
||||
|
||||
|
||||
#### Trino
|
||||
|
||||
Supported trino version 352 and higher
|
||||
|
||||
##### Connection String
|
||||
|
||||
The connection string format is as follows:
|
||||
|
||||
```
|
||||
trino://{username}:{password}@{hostname}:{port}/{catalog}
|
||||
```
|
||||
|
||||
If you are running Trino with docker on local machine, please use the following connection URL
|
||||
|
||||
```
|
||||
trino://trino@host.docker.internal:8080
|
||||
```
|
||||
|
||||
##### Authentications
|
||||
|
||||
###### 1. Basic Authentication
|
||||
|
||||
You can provide `username`/`password` in the connection string or in the `Secure Extra` field at `Advanced / Security`
|
||||
* In Connection String
|
||||
|
||||
- In Connection String
|
||||
|
||||
```
|
||||
trino://{username}:{password}@{hostname}:{port}/{catalog}
|
||||
```
|
||||
|
||||
* In `Secure Extra` field
|
||||
- In `Secure Extra` field
|
||||
|
||||
```json
|
||||
{
|
||||
"auth_method": "basic",
|
||||
@@ -1437,7 +1443,9 @@ You can provide `username`/`password` in the connection string or in the `Secure
|
||||
NOTE: if both are provided, `Secure Extra` always takes higher priority.
|
||||
|
||||
###### 2. Kerberos Authentication
|
||||
|
||||
In `Secure Extra` field, config as following example:
|
||||
|
||||
```json
|
||||
{
|
||||
"auth_method": "kerberos",
|
||||
@@ -1454,7 +1462,9 @@ All fields in `auth_params` are passed directly to the [`KerberosAuthentication`
|
||||
NOTE: Kerberos authentication requires installing the [`trino-python-client`](https://github.com/trinodb/trino-python-client) locally with either the `all` or `kerberos` optional features, i.e., installing `trino[all]` or `trino[kerberos]` respectively.
|
||||
|
||||
###### 3. Certificate Authentication
|
||||
|
||||
In `Secure Extra` field, config as following example:
|
||||
|
||||
```json
|
||||
{
|
||||
"auth_method": "certificate",
|
||||
@@ -1468,7 +1478,9 @@ In `Secure Extra` field, config as following example:
|
||||
All fields in `auth_params` are passed directly to the [`CertificateAuthentication`](https://github.com/trinodb/trino-python-client/blob/0.315.0/trino/auth.py#L416) class.
|
||||
|
||||
###### 4. JWT Authentication
|
||||
|
||||
Config `auth_method` and provide token in `Secure Extra` field
|
||||
|
||||
```json
|
||||
{
|
||||
"auth_method": "jwt",
|
||||
@@ -1479,8 +1491,10 @@ Config `auth_method` and provide token in `Secure Extra` field
|
||||
```
|
||||
|
||||
###### 5. Custom Authentication
|
||||
|
||||
To use custom authentication, first you need to add it into
|
||||
`ALLOWED_EXTRA_AUTHENTICATIONS` allow list in Superset config file:
|
||||
|
||||
```python
|
||||
from your.module import AuthClass
|
||||
from another.extra import auth_method
|
||||
@@ -1494,6 +1508,7 @@ ALLOWED_EXTRA_AUTHENTICATIONS: Dict[str, Dict[str, Callable[..., Any]]] = {
|
||||
```
|
||||
|
||||
Then in `Secure Extra` field:
|
||||
|
||||
```json
|
||||
{
|
||||
"auth_method": "custom_auth",
|
||||
@@ -1509,8 +1524,8 @@ or factory function (which returns an `Authentication` instance) to `auth_method
|
||||
All fields in `auth_params` are passed directly to your class/function.
|
||||
|
||||
**Reference**:
|
||||
* [Trino-Superset-Podcast](https://trino.io/episodes/12.html)
|
||||
|
||||
- [Trino-Superset-Podcast](https://trino.io/episodes/12.html)
|
||||
|
||||
#### Vertica
|
||||
|
||||
@@ -1537,8 +1552,6 @@ Other parameters:
|
||||
|
||||
- Load Balancer - Backup Host
|
||||
|
||||
|
||||
|
||||
#### YDB
|
||||
|
||||
The recommended connector library for [YDB](https://ydb.tech/) is
|
||||
@@ -1553,6 +1566,7 @@ ydb://{host}:{port}/{database_name}
|
||||
```
|
||||
|
||||
##### Protocol
|
||||
|
||||
You can specify `protocol` in the `Secure Extra` field at `Advanced / Security`:
|
||||
|
||||
```
|
||||
@@ -1563,9 +1577,10 @@ You can specify `protocol` in the `Secure Extra` field at `Advanced / Security`:
|
||||
|
||||
Default is `grpc`.
|
||||
|
||||
|
||||
##### Authentication Methods
|
||||
|
||||
###### Static Credentials
|
||||
|
||||
To use `Static Credentials` you should provide `username`/`password` in the `Secure Extra` field at `Advanced / Security`:
|
||||
|
||||
```
|
||||
@@ -1577,8 +1592,8 @@ To use `Static Credentials` you should provide `username`/`password` in the `Sec
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
###### Access Token Credentials
|
||||
|
||||
To use `Access Token Credentials` you should provide `token` in the `Secure Extra` field at `Advanced / Security`:
|
||||
|
||||
```
|
||||
@@ -1589,8 +1604,8 @@ To use `Access Token Credentials` you should provide `token` in the `Secure Extr
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
##### Service Account Credentials
|
||||
|
||||
To use Service Account Credentials, you should provide `service_account_json` in the `Secure Extra` field at `Advanced / Security`:
|
||||
|
||||
```
|
||||
@@ -1608,8 +1623,6 @@ To use Service Account Credentials, you should provide `service_account_json` in
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
|
||||
#### YugabyteDB
|
||||
|
||||
[YugabyteDB](https://www.yugabyte.com/) is a distributed SQL database built on top of PostgreSQL.
|
||||
@@ -1624,8 +1637,6 @@ The connection string looks like:
|
||||
postgresql://{username}:{password}@{host}:{port}/{database}
|
||||
```
|
||||
|
||||
|
||||
|
||||
## Connecting through the UI
|
||||
|
||||
Here is the documentation on how to leverage the new DB Connection UI. This will provide admins the ability to enhance the UX for users who want to connect to new databases.
|
||||
@@ -1698,9 +1709,6 @@ For databases like MySQL and Postgres that use the standard format of `engine+dr
|
||||
|
||||
For other databases you need to implement these methods yourself. The BigQuery DB engine spec is a good example of how to do that.
|
||||
|
||||
|
||||
|
||||
|
||||
### Extra Database Settings
|
||||
|
||||
##### Deeper SQLAlchemy Integration
|
||||
@@ -1764,9 +1772,7 @@ You can use the `Extra` field in the **Edit Databases** form to configure SSL:
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
|
||||
## Misc.
|
||||
## Misc
|
||||
|
||||
### Querying across databases
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ version: 1
|
||||
The superset cli allows you to import and export datasources from and to YAML. Datasources include
|
||||
databases. The data is expected to be organized in the following hierarchy:
|
||||
|
||||
```
|
||||
```text
|
||||
├──databases
|
||||
| ├──database_1
|
||||
| | ├──table_1
|
||||
@@ -30,13 +30,13 @@ databases. The data is expected to be organized in the following hierarchy:
|
||||
|
||||
You can print your current datasources to stdout by running:
|
||||
|
||||
```
|
||||
```bash
|
||||
superset export_datasources
|
||||
```
|
||||
|
||||
To save your datasources to a ZIP file run:
|
||||
|
||||
```
|
||||
```bash
|
||||
superset export_datasources -f <filename>
|
||||
```
|
||||
|
||||
@@ -55,7 +55,7 @@ Alternatively, you can export datasources using the UI:
|
||||
|
||||
In order to obtain an **exhaustive list of all fields** you can import using the YAML import run:
|
||||
|
||||
```
|
||||
```bash
|
||||
superset export_datasource_schema
|
||||
```
|
||||
|
||||
@@ -65,13 +65,13 @@ As a reminder, you can use the `-b` flag to include back references.
|
||||
|
||||
In order to import datasources from a ZIP file, run:
|
||||
|
||||
```
|
||||
```bash
|
||||
superset import_datasources -p <path / filename>
|
||||
```
|
||||
|
||||
The optional username flag **-u** sets the user used for the datasource import. The default is 'admin'. Example:
|
||||
|
||||
```
|
||||
```bash
|
||||
superset import_datasources -p <path / filename> -u 'admin'
|
||||
```
|
||||
|
||||
@@ -81,7 +81,7 @@ superset import_datasources -p <path / filename> -u 'admin'
|
||||
|
||||
When using Superset version 4.x.x to import from an older version (2.x.x or 3.x.x) importing is supported as the command `legacy_import_datasources` and expects a JSON or directory of JSONs. The options are `-r` for recursive and `-u` for specifying a user. Example of legacy import without options:
|
||||
|
||||
```
|
||||
```bash
|
||||
superset legacy_import_datasources -p <path or filename>
|
||||
```
|
||||
|
||||
@@ -89,21 +89,21 @@ superset legacy_import_datasources -p <path or filename>
|
||||
|
||||
When using an older Superset version (2.x.x & 3.x.x) of Superset, the command is `import_datasources`. ZIP and YAML files are supported and to switch between them the feature flag `VERSIONED_EXPORT` is used. When `VERSIONED_EXPORT` is `True`, `import_datasources` expects a ZIP file, otherwise YAML. Example:
|
||||
|
||||
```
|
||||
```bash
|
||||
superset import_datasources -p <path or filename>
|
||||
```
|
||||
|
||||
When `VERSIONED_EXPORT` is `False`, if you supply a path all files ending with **yaml** or **yml** will be parsed. You can apply
|
||||
additional flags (e.g. to search the supplied path recursively):
|
||||
|
||||
```
|
||||
```bash
|
||||
superset import_datasources -p <path> -r
|
||||
```
|
||||
|
||||
The sync flag **-s** takes parameters in order to sync the supplied elements with your file. Be
|
||||
careful this can delete the contents of your meta database. Example:
|
||||
|
||||
```
|
||||
```bash
|
||||
superset import_datasources -p <path / filename> -s columns,metrics
|
||||
```
|
||||
|
||||
@@ -115,7 +115,7 @@ If you don’t supply the sync flag (**-s**) importing will only add and update
|
||||
E.g. you can add a verbose_name to the column ds in the table random_time_series from the example
|
||||
datasets by saving the following YAML to file and then running the **import_datasources** command.
|
||||
|
||||
```
|
||||
```yaml
|
||||
databases:
|
||||
- database_name: main
|
||||
tables:
|
||||
|
||||
@@ -11,15 +11,14 @@ version: 1
|
||||
To configure CORS, or cross-origin resource sharing, the following dependency must be installed:
|
||||
|
||||
```python
|
||||
pip install apache-superset[cors]
|
||||
pip install apache_superset[cors]
|
||||
```
|
||||
|
||||
The following keys in `superset_config.py` can be specified to configure CORS:
|
||||
|
||||
- `ENABLE_CORS`: Must be set to `True` in order to enable CORS
|
||||
- `CORS_OPTIONS`: options passed to Flask-CORS
|
||||
([documentation](https://flask-cors.corydolphin.com/en/latest/api.html#extension))
|
||||
|
||||
([documentation](https://flask-cors.readthedocs.io/en/latest/api.html#extension))
|
||||
|
||||
## HTTP headers
|
||||
|
||||
@@ -27,7 +26,6 @@ Note that Superset bundles [flask-talisman](https://pypi.org/project/talisman/)
|
||||
Self-described as a small Flask extension that handles setting HTTP headers that can help
|
||||
protect against a few common web application security issues.
|
||||
|
||||
|
||||
## HTML Embedding of Dashboards and Charts
|
||||
|
||||
There are two ways to embed a dashboard: Using the [SDK](https://www.npmjs.com/package/@superset-ui/embedded-sdk) or embedding a direct link. Note that in the latter case everybody who knows the link is able to access the dashboard.
|
||||
@@ -39,14 +37,16 @@ This works by first changing the content security policy (CSP) of [flask-talisma
|
||||
#### Changing flask-talisman CSP
|
||||
|
||||
Add to `superset_config.py` the entire `TALISMAN_CONFIG` section from `config.py` and include a `frame-ancestors` section:
|
||||
|
||||
```python
|
||||
TALISMAN_ENABLED = True
|
||||
TALISMAN_CONFIG = {
|
||||
"content_security_policy": {
|
||||
...
|
||||
"frame-ancestors": ["*.my-domain.com", "*.another-domain.com"],
|
||||
"frame-ancestors": ["*.my-domain.com", "*.another-domain.com"],
|
||||
...
|
||||
```
|
||||
|
||||
Restart Superset for this configuration change to take effect.
|
||||
|
||||
#### Making a Dashboard Public
|
||||
@@ -69,6 +69,7 @@ Now anybody can directly access the dashboard's URL. You can embed it in an ifra
|
||||
>
|
||||
</iframe>
|
||||
```
|
||||
|
||||
#### Embedding a Chart
|
||||
|
||||
A chart's embed code can be generated by going to a chart's edit view and then clicking at the top right on `...` > `Share` > `Embed code`
|
||||
@@ -85,11 +86,10 @@ SUPERSET_FEATURE_EMBEDDED_SUPERSET=true
|
||||
|
||||
## CSRF settings
|
||||
|
||||
Similarly, [flask-wtf](https://flask-wtf.readthedocs.io/en/0.15.x/config/) is used manage
|
||||
Similarly, [flask-wtf](https://flask-wtf.readthedocs.io/en/0.15.x/config/) is used to manage
|
||||
some CSRF configurations. If you need to exempt endpoints from CSRF (e.g. if you are
|
||||
running a custom auth postback endpoint), you can add the endpoints to `WTF_CSRF_EXEMPT_LIST`:
|
||||
|
||||
|
||||
## SSH Tunneling
|
||||
|
||||
1. Turn on feature flag
|
||||
@@ -105,9 +105,12 @@ running a custom auth postback endpoint), you can add the endpoints to `WTF_CSRF
|
||||
3. Verify data is flowing
|
||||
- Once SSH tunneling has been enabled, go to SQL Lab and write a query to verify data is properly flowing.
|
||||
|
||||
|
||||
## Domain Sharding
|
||||
|
||||
:::note
|
||||
Domain Sharding is deprecated as of Superset 5.0.0, and will be removed in Superset 6.0.0. Please Enable HTTP2 to keep more open connections per domain.
|
||||
:::
|
||||
|
||||
Chrome allows up to 6 open connections per domain at a time. When there are more than 6 slices in
|
||||
dashboard, a lot of time fetch requests are queued up and wait for next available socket.
|
||||
[PR 5039](https://github.com/apache/superset/pull/5039) adds domain sharding to Superset,
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
---
|
||||
title: Setup SSH Tunneling
|
||||
hide_title: true
|
||||
sidebar_position: 8
|
||||
version: 1
|
||||
---
|
||||
@@ -77,6 +77,7 @@ In the UI you can assign a set of parameters as JSON
|
||||
"my_table": "foo"
|
||||
}
|
||||
```
|
||||
|
||||
The parameters become available in your SQL (example: `SELECT * FROM {{ my_table }}` ) by using Jinja templating syntax.
|
||||
SQL Lab template parameters are stored with the dataset as `TEMPLATE PARAMETERS`.
|
||||
|
||||
@@ -103,7 +104,6 @@ GROUP BY action
|
||||
|
||||
Note ``_filters`` is not stored with the dataset. It's only used within the SQL Lab UI.
|
||||
|
||||
|
||||
Besides default Jinja templating, SQL lab also supports self-defined template processor by setting
|
||||
the `CUSTOM_TEMPLATE_PROCESSORS` in your superset configuration. The values in this dictionary
|
||||
overwrite the default Jinja template processors of the specified database engine. The example below
|
||||
@@ -186,7 +186,7 @@ cache hit in the future and Superset can retrieve cached data.
|
||||
You can disable the inclusion of the `username` value in the calculation of the
|
||||
cache key by adding the following parameter to your Jinja code:
|
||||
|
||||
```
|
||||
```python
|
||||
{{ current_username(add_to_cache_keys=False) }}
|
||||
```
|
||||
|
||||
@@ -201,7 +201,7 @@ cache hit in the future and Superset can retrieve cached data.
|
||||
You can disable the inclusion of the account `id` value in the calculation of the
|
||||
cache key by adding the following parameter to your Jinja code:
|
||||
|
||||
```
|
||||
```python
|
||||
{{ current_user_id(add_to_cache_keys=False) }}
|
||||
```
|
||||
|
||||
@@ -216,10 +216,40 @@ cache hit in the future and Superset can retrieve cached data.
|
||||
You can disable the inclusion of the email value in the calculation of the
|
||||
cache key by adding the following parameter to your Jinja code:
|
||||
|
||||
```
|
||||
```python
|
||||
{{ current_user_email(add_to_cache_keys=False) }}
|
||||
```
|
||||
|
||||
**Current User Roles**
|
||||
|
||||
The `{{ current_user_roles() }}` macro returns an array of roles for the logged in user.
|
||||
|
||||
If you have caching enabled in your Superset configuration, then by default the roles value will be used
|
||||
by Superset when calculating the cache key. A cache key is a unique identifier that determines if there's a
|
||||
cache hit in the future and Superset can retrieve cached data.
|
||||
|
||||
You can disable the inclusion of the roles value in the calculation of the
|
||||
cache key by adding the following parameter to your Jinja code:
|
||||
|
||||
```python
|
||||
{{ current_user_roles(add_to_cache_keys=False) }}
|
||||
```
|
||||
|
||||
You can json-stringify the array by adding `|tojson` to your Jinja code:
|
||||
```python
|
||||
{{ current_user_roles()|tojson }}
|
||||
```
|
||||
|
||||
You can use the `|where_in` filter to use your roles in a SQL statement. For example, if `current_user_roles()` returns `['admin', 'viewer']`, the following template:
|
||||
```python
|
||||
SELECT * FROM users WHERE role IN {{ current_user_roles()|where_in }}
|
||||
```
|
||||
|
||||
Will be rendered as:
|
||||
```sql
|
||||
SELECT * FROM users WHERE role IN ('admin', 'viewer')
|
||||
```
|
||||
|
||||
**Custom URL Parameters**
|
||||
|
||||
The `{{ url_param('custom_variable') }}` macro lets you define arbitrary URL
|
||||
@@ -273,7 +303,7 @@ You can retrieve the value for a specific filter as a list using `{{ filter_valu
|
||||
This is useful if:
|
||||
|
||||
- You want to use a filter component to filter a query where the name of filter component column doesn't match the one in the select statement
|
||||
- You want to have the ability for filter inside the main query for performance purposes
|
||||
- You want to have the ability to filter inside the main query for performance purposes
|
||||
|
||||
Here's a concrete example:
|
||||
|
||||
@@ -301,7 +331,7 @@ This is useful if:
|
||||
|
||||
Here's a concrete example:
|
||||
|
||||
```
|
||||
```sql
|
||||
WITH RECURSIVE
|
||||
superiors(employee_id, manager_id, full_name, level, lineage) AS (
|
||||
SELECT
|
||||
@@ -357,6 +387,7 @@ considerably improve performance, as many databases and query engines are able t
|
||||
if the temporal filter is placed on the inner query, as opposed to the outer query.
|
||||
|
||||
The macro takes the following parameters:
|
||||
|
||||
- `column`: Name of the temporal column. Leave undefined to reference the time range from a Dashboard Native Time Range
|
||||
filter (when present).
|
||||
- `default`: The default value to fall back to if the time filter is not present, or has the value `No filter`
|
||||
@@ -370,6 +401,7 @@ The macro takes the following parameters:
|
||||
filter should only apply to the inner query.
|
||||
|
||||
The return type has the following properties:
|
||||
|
||||
- `from_expr`: the start of the time filter (if any)
|
||||
- `to_expr`: the end of the time filter (if any)
|
||||
- `time_range`: The applied time range
|
||||
@@ -410,6 +442,7 @@ LIMIT 1000;
|
||||
|
||||
When using the `default` parameter, the templated query can be simplified, as the endpoints will always be defined
|
||||
(to use a fixed time range, you can also use something like `default="2024-08-27 : 2024-09-03"`)
|
||||
|
||||
```
|
||||
{% set time_filter = get_time_filter("dttm", default="Last week", remove_filter=True) %}
|
||||
SELECT
|
||||
@@ -429,19 +462,19 @@ To use the macro, first you need to find the ID of the dataset. This can be done
|
||||
|
||||
Once you have the ID you can query it as if it were a table:
|
||||
|
||||
```
|
||||
```sql
|
||||
SELECT * FROM {{ dataset(42) }} LIMIT 10
|
||||
```
|
||||
|
||||
If you want to select the metric definitions as well, in addition to the columns, you need to pass an additional keyword argument:
|
||||
|
||||
```
|
||||
```sql
|
||||
SELECT * FROM {{ dataset(42, include_metrics=True) }} LIMIT 10
|
||||
```
|
||||
|
||||
Since metrics are aggregations, the resulting SQL expression will be grouped by all non-metric columns. You can specify a subset of columns to group by instead:
|
||||
|
||||
```
|
||||
```sql
|
||||
SELECT * FROM {{ dataset(42, include_metrics=True, columns=["ds", "category"]) }} LIMIT 10
|
||||
```
|
||||
|
||||
@@ -458,3 +491,37 @@ This macro avoids copy/paste, allowing users to centralize the metric definition
|
||||
|
||||
The `dataset_id` parameter is optional, and if not provided Superset will use the current dataset from context (for example, when using this macro in the Chart Builder, by default the `macro_key` will be searched in the dataset powering the chart).
|
||||
The parameter can be used in SQL Lab, or when fetching a metric from another dataset.
|
||||
|
||||
## Available Filters
|
||||
|
||||
Superset supports [builtin filters from the Jinja2 templating package](https://jinja.palletsprojects.com/en/stable/templates/#builtin-filters). Custom filters have also been implemented:
|
||||
|
||||
**Where In**
|
||||
Parses a list into a SQL-compatible statement. This is useful with macros that return an array (for example the `filter_values` macro):
|
||||
|
||||
```
|
||||
Dashboard filter with "First", "Second" and "Third" options selected
|
||||
{{ filter_values('column') }} => ["First", "Second", "Third"]
|
||||
{{ filter_values('column')|where_in }} => ('First', 'Second', 'Third')
|
||||
```
|
||||
|
||||
By default, this filter returns `()` (as a string) in case the value is null. The `default_to_none` parameter can be se to `True` to return null in this case:
|
||||
|
||||
```
|
||||
Dashboard filter without any value applied
|
||||
{{ filter_values('column') }} => ()
|
||||
{{ filter_values('column')|where_in(default_to_none=True) }} => None
|
||||
```
|
||||
|
||||
**To Datetime**
|
||||
|
||||
Loads a string as a `datetime` object. This is useful when performing date operations. For example:
|
||||
```
|
||||
{% set from_expr = get_time_filter("dttm", strftime="%Y-%m-%d").from_expr %}
|
||||
{% set to_expr = get_time_filter("dttm", strftime="%Y-%m-%d").to_expr %}
|
||||
{% if (to_expr|to_datetime(format="%Y-%m-%d") - from_expr|to_datetime(format="%Y-%m-%d")).days > 100 %}
|
||||
do something
|
||||
{% else %}
|
||||
do something else
|
||||
{% endif %}
|
||||
```
|
||||
|
||||
@@ -24,7 +24,7 @@ The challenge however lies with the slew of [database engines](/docs/configurati
|
||||
|
||||
For example the following is a comparison of MySQL and Presto,
|
||||
|
||||
```
|
||||
```python
|
||||
import pandas as pd
|
||||
from sqlalchemy import create_engine
|
||||
|
||||
@@ -41,7 +41,7 @@ pd.read_sql_query(
|
||||
|
||||
which outputs `{"ts":{"0":1640995200000}}` (which infers the UTC timezone per the Epoch time definition) and `{"ts":{"0":"2022-01-01 00:00:00.000"}}` (without an explicit timezone) respectively and thus are treated differently in JavaScript:
|
||||
|
||||
```
|
||||
```js
|
||||
new Date(1640995200000)
|
||||
> Sat Jan 01 2022 13:00:00 GMT+1300 (New Zealand Daylight Time)
|
||||
|
||||
|
||||
@@ -26,9 +26,9 @@ More references:
|
||||
Here's a list of repositories that contain Superset-related packages:
|
||||
|
||||
- [apache/superset](https://github.com/apache/superset)
|
||||
is the main repository containing the `apache-superset` Python package
|
||||
is the main repository containing the `apache_superset` Python package
|
||||
distributed on
|
||||
[pypi](https://pypi.org/project/apache-superset/). This repository
|
||||
[pypi](https://pypi.org/project/apache_superset/). This repository
|
||||
also includes Superset's main TypeScript/JavaScript bundles and react apps under
|
||||
the [superset-frontend](https://github.com/apache/superset/tree/master/superset-frontend)
|
||||
folder.
|
||||
|
||||
@@ -52,7 +52,7 @@ Note that:
|
||||
[docker-compose.yml](https://github.com/apache/superset/blob/master/docker-compose.yml)
|
||||
- The local repository is mounted within the services, meaning updating
|
||||
the code on the host will be reflected in the docker images
|
||||
- Superset is served at localhost:8088/
|
||||
- Superset is served at localhost:9000/
|
||||
- You can login with admin/admin
|
||||
|
||||
:::note
|
||||
@@ -75,15 +75,32 @@ configured to be secure.
|
||||
### Supported environment variables
|
||||
|
||||
Affecting the Docker build process:
|
||||
|
||||
- **SUPERSET_BUILD_TARGET (default=dev):** which --target to build, either `lean` or `dev` are commonly used
|
||||
- **INCLUDE_FIREFOX (default=false):** whether to include the Firefox headless browser in the build
|
||||
- **INCLUDE_CHROMIUM (default=false):** whether to include the Firefox headless browser in the build
|
||||
- **BUILD_TRANSLATIONS(default=false):** whether to compile the translations from the .po files available
|
||||
- **SUPERSET_LOAD_EXAMPLES (default=yes):** whether to load the examples into the database upon startup,
|
||||
save some precious time on startup by `SUPERSET_LOAD_EXAMPLES=no docker compose up`
|
||||
- **SUPERSET_LOG_LEVEL (default=info)**: Can be set to debug, info, warning, error, critical
|
||||
for more verbose logging
|
||||
|
||||
For more env vars that affect your configuration, see this
|
||||
[superset_config.py](https://github.com/apache/superset/blob/master/docker/pythonpath_dev/superset_config.py)
|
||||
used in the `docker compose` context to assign env vars to the superset configuration.
|
||||
|
||||
### Accessing the postgres database
|
||||
|
||||
Sometimes it's useful to access the database in the docker container directly.
|
||||
You can enter a `psql` shell (the official Postgres client) by running the following command:
|
||||
|
||||
```bash
|
||||
docker compose exec db psql -U superset
|
||||
```
|
||||
|
||||
Also note that the database is exposed on port 5432, so you can connect to it using your favorite
|
||||
Postgres client or even SQL Lab itselft directly in Superset by creating a new database connection
|
||||
to `localhost:5432`.
|
||||
|
||||
### Nuking the postgres database
|
||||
|
||||
@@ -117,7 +134,8 @@ instance, but many people like to run that tooling from their host.
|
||||
|
||||
Assuming you already have a way to setup your python environments
|
||||
like `pyenv`, `virtualenv` or something else, all you should have to
|
||||
do is to install our dev, pinned python requirements bundle
|
||||
do is to install our dev, pinned python requirements bundle, after installing
|
||||
the prerequisites mentioned in [OS Dependencies](https://superset.apache.org/docs/installation/pypi/#os-dependencies)
|
||||
|
||||
```bash
|
||||
pip install -r requirements/development.txt
|
||||
@@ -252,22 +270,22 @@ If you have made changes to the FAB-managed templates, which are not built the s
|
||||
If you add a new requirement or update an existing requirement (per the `install_requires` section in `setup.py`) you must recompile (freeze) the Python dependencies to ensure that for CI, testing, etc. the build is deterministic. This can be achieved via,
|
||||
|
||||
```bash
|
||||
$ python3 -m venv venv
|
||||
$ source venv/bin/activate
|
||||
$ python3 -m pip install -r requirements/development.txt
|
||||
$ ./scripts/uv-pip-compile.sh
|
||||
python3 -m venv venv
|
||||
source venv/bin/activate
|
||||
python3 -m pip install -r requirements/development.txt
|
||||
./scripts/uv-pip-compile.sh
|
||||
```
|
||||
|
||||
When upgrading the version number of a single package, you should run `./scripts/uv-pip-compile.sh` with the `-P` flag:
|
||||
|
||||
```bash
|
||||
$ ./scripts/uv-pip-compile.sh -P some-package-to-upgrade
|
||||
./scripts/uv-pip-compile.sh -P some-package-to-upgrade
|
||||
```
|
||||
|
||||
To bring all dependencies up to date as per the restrictions defined in `setup.py` and `requirements/*.in`, run `./scripts/uv-pip-compile.sh --upgrade`
|
||||
|
||||
```bash
|
||||
$ ./scripts/uv-pip-compile.sh --upgrade
|
||||
./scripts/uv-pip-compile.sh --upgrade
|
||||
```
|
||||
|
||||
This should be done periodically, but it is recommended to do thorough manual testing of the application to ensure no breaking changes have been introduced that aren't caught by the unit and integration tests.
|
||||
@@ -488,12 +506,10 @@ pre-commit install
|
||||
|
||||
A series of checks will now run when you make a git commit.
|
||||
|
||||
|
||||
## Linting
|
||||
|
||||
See [how tos](/docs/contributing/howtos#linting)
|
||||
|
||||
|
||||
## GitHub Actions and `act`
|
||||
|
||||
:::tip
|
||||
@@ -506,6 +522,7 @@ For more targetted iteration, see the `gh workflow run --ref {BRANCH}` subcomman
|
||||
|
||||
For automation and CI/CD, Superset makes extensive use of GitHub Actions (GHA). You
|
||||
can find all of the workflows and other assets under the `.github/` folder. This includes:
|
||||
|
||||
- running the backend unit test suites (`tests/`)
|
||||
- running the frontend test suites (`superset-frontend/src/**.*.test.*`)
|
||||
- running our Cypress end-to-end tests (`superset-frontend/cypress-base/`)
|
||||
@@ -547,6 +564,7 @@ act pull_request --job {workflow_name} --secret GITHUB_TOKEN=$GITHUB_TOKEN --con
|
||||
```
|
||||
|
||||
In the example above, notice that:
|
||||
|
||||
- we target a specific workflow, using `--job`
|
||||
- we pass a secret using `--secret`, as many jobs require read access (public) to the repo
|
||||
- we simulate a `pull_request` event by specifying it as the first arg,
|
||||
@@ -626,71 +644,6 @@ To run a single test file:
|
||||
npm run test -- path/to/file.js
|
||||
```
|
||||
|
||||
### Integration Testing
|
||||
|
||||
We use [Cypress](https://www.cypress.io/) for integration tests. To open Cypress and explore tests first setup and run test server:
|
||||
|
||||
```bash
|
||||
export SUPERSET_CONFIG=tests.integration_tests.superset_test_config
|
||||
export SUPERSET_TESTENV=true
|
||||
export CYPRESS_BASE_URL="http://localhost:8081"
|
||||
superset db upgrade
|
||||
superset load_test_users
|
||||
superset load-examples --load-test-data
|
||||
superset init
|
||||
superset run --port 8081
|
||||
```
|
||||
|
||||
Run Cypress tests:
|
||||
|
||||
```bash
|
||||
cd superset-frontend
|
||||
npm run build-instrumented
|
||||
|
||||
cd cypress-base
|
||||
npm install
|
||||
|
||||
# run tests via headless Chrome browser (requires Chrome 64+)
|
||||
npm run cypress-run-chrome
|
||||
|
||||
# run tests from a specific file
|
||||
npm run cypress-run-chrome -- --spec cypress/e2e/explore/link.test.ts
|
||||
|
||||
# run specific file with video capture
|
||||
npm run cypress-run-chrome -- --spec cypress/e2e/dashboard/index.test.js --config video=true
|
||||
|
||||
# to open the cypress ui
|
||||
npm run cypress-debug
|
||||
|
||||
# to point cypress to a url other than the default (http://localhost:8088) set the environment variable before running the script
|
||||
# e.g., CYPRESS_BASE_URL="http://localhost:9000"
|
||||
CYPRESS_BASE_URL=<your url> npm run cypress open
|
||||
```
|
||||
|
||||
See [`superset-frontend/cypress_build.sh`](https://github.com/apache/superset/blob/master/superset-frontend/cypress_build.sh).
|
||||
|
||||
As an alternative you can use docker compose environment for testing:
|
||||
|
||||
Make sure you have added below line to your /etc/hosts file:
|
||||
`127.0.0.1 db`
|
||||
|
||||
If you already have launched Docker environment please use the following command to ensure a fresh database instance:
|
||||
`docker compose down -v`
|
||||
|
||||
Launch environment:
|
||||
|
||||
`CYPRESS_CONFIG=true docker compose up --build`
|
||||
|
||||
It will serve the backend and frontend on port 8088.
|
||||
|
||||
Run Cypress tests:
|
||||
|
||||
```bash
|
||||
cd cypress-base
|
||||
npm install
|
||||
npm run cypress open
|
||||
```
|
||||
|
||||
### Debugging Server App
|
||||
|
||||
#### Local
|
||||
@@ -833,7 +786,7 @@ To debug Flask running in POD inside a kubernetes cluster, you'll need to make s
|
||||
add: ["SYS_PTRACE"]
|
||||
```
|
||||
|
||||
See (set capabilities for a container)[https://kubernetes.io/docs/tasks/configure-pod-container/security-context/#set-capabilities-for-a-container] for more details.
|
||||
See [set capabilities for a container](https://kubernetes.io/docs/tasks/configure-pod-container/security-context/#set-capabilities-for-a-container) for more details.
|
||||
|
||||
Once the pod is running as root and has the SYS_PTRACE capability it will be able to debug the Flask app.
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ version: 1
|
||||
|
||||
The latest documentation and tutorial are available at https://superset.apache.org/.
|
||||
|
||||
The documentation site is built using [Docusaurus 2](https://docusaurus.io/), a modern
|
||||
The documentation site is built using [Docusaurus 3](https://docusaurus.io/), a modern
|
||||
static website generator, the source for which resides in `./docs`.
|
||||
|
||||
### Local Development
|
||||
@@ -223,36 +223,39 @@ To run a single test file:
|
||||
npm run test -- path/to/file.js
|
||||
```
|
||||
|
||||
### e2e Integration Testing
|
||||
### E2E Integration Testing
|
||||
|
||||
For e2e testing, we recommend that you use a `docker-compose` backed-setup
|
||||
|
||||
Alternatively, you can go lower level and set things up in your
|
||||
development environment by following these steps:
|
||||
|
||||
First set up a python/flask backend:
|
||||
For E2E testing, we recommend that you use a `docker compose` backend
|
||||
|
||||
```bash
|
||||
export SUPERSET_CONFIG=tests.integration_tests.superset_test_config
|
||||
export SUPERSET_TESTENV=true
|
||||
export CYPRESS_BASE_URL="http://localhost:8081"
|
||||
superset db upgrade
|
||||
superset load_test_users
|
||||
superset init
|
||||
superset load-examples --load-test-data
|
||||
superset run --port 8081
|
||||
CYPRESS_CONFIG=true docker compose up --build
|
||||
```
|
||||
`docker compose` will get to work and expose a Cypress-ready Superset app.
|
||||
This app uses a different database schema (`superset_cypress`) to keep it isolated from
|
||||
your other dev environmen(s)t, a specific set of examples, and a set of configurations that
|
||||
aligns with the expectations within the end-to-end tests. Also note that it's served on a
|
||||
different port than the default port for the backend (`8088`).
|
||||
|
||||
Now in another terminal, let's get ready to execute some Cypress commands. First, tell cypress
|
||||
to connect to the Cypress-ready Superset backend.
|
||||
|
||||
```
|
||||
CYPRESS_BASE_URL=http://localhost:8081
|
||||
```
|
||||
|
||||
In another terminal, prepare the frontend and run Cypress tests:
|
||||
|
||||
```bash
|
||||
cd superset-frontend
|
||||
npm run build-instrumented
|
||||
|
||||
cd cypress-base
|
||||
# superset-frontend/cypress-base is the base folder for everything Cypress-related
|
||||
# It's essentially its own npm app, with its own dependencies, configurations and utilities
|
||||
cd superset-frontend/cypress-base
|
||||
npm install
|
||||
|
||||
# run tests via headless Chrome browser (requires Chrome 64+)
|
||||
# use interactive mode to run tests, while keeping memory usage contained
|
||||
# this will fire up an interactive Cypress UI
|
||||
# as you alter the code, the tests will re-run automatically, and you can visualize each
|
||||
# and every step for debugging purposes
|
||||
npx cypress open --config numTestsKeptInMemory=5
|
||||
|
||||
# to run the test suite on the command line using chrome (same as CI)
|
||||
npm run cypress-run-chrome
|
||||
|
||||
# run tests from a specific file
|
||||
@@ -264,9 +267,6 @@ npm run cypress-run-chrome -- --spec cypress/e2e/dashboard/index.test.js --confi
|
||||
# to open the cypress ui
|
||||
npm run cypress-debug
|
||||
|
||||
# to point cypress to a url other than the default (http://localhost:8088) set the environment variable before running the script
|
||||
# e.g., CYPRESS_BASE_URL="http://localhost:9000"
|
||||
CYPRESS_BASE_URL=<your url> npm run cypress open
|
||||
```
|
||||
|
||||
See [`superset-frontend/cypress_build.sh`](https://github.com/apache/superset/blob/master/superset-frontend/cypress_build.sh).
|
||||
@@ -411,7 +411,7 @@ See [set capabilities for a container](https://kubernetes.io/docs/tasks/configur
|
||||
|
||||
Once the pod is running as root and has the `SYS_PTRACE` capability it will be able to debug the Flask app.
|
||||
|
||||
You can follow the same instructions as in `docker compose`. Enter the pod and install the required library and packages; gdb, netstat and debugpy.
|
||||
You can follow the same instructions as in `docker compose`. Enter the pod and install the required library and packages: gdb, netstat and debugpy.
|
||||
|
||||
Often in a Kubernetes environment nodes are not addressable from outside the cluster. VSCode will thus be unable to remotely connect to port 5678 on a Kubernetes node. In order to do this you need to create a tunnel that port forwards 5678 to your local machine.
|
||||
|
||||
@@ -608,3 +608,27 @@ If using the eslint extension with vscode, put the following in your workspace `
|
||||
"superset-frontend"
|
||||
]
|
||||
```
|
||||
|
||||
## GitHub Ephemeral Environments
|
||||
|
||||
On any given pull request on GitHub, it's possible to create a temporary environment/deployment
|
||||
by simply adding the label `testenv-up` to the PR. Once you add the `testenv-up` label, a
|
||||
GitHub Action will be triggered that will:
|
||||
|
||||
- build a docker image
|
||||
- deploy it in EC2 (sponsored by the folks at [Preset](https://preset.io))
|
||||
- write a comment on the PR with a link to the ephemeral environment
|
||||
|
||||
For more advanced use cases, it's possible to set a feature flag on the PR body, which will
|
||||
take effect on the ephemeral environment. For example, if you want to set the `TAGGING_SYSTEM`
|
||||
feature flag to `true`, you can add the following line to the PR body/description:
|
||||
|
||||
```
|
||||
FEATURE_TAGGING_SYSTEM=true
|
||||
```
|
||||
|
||||
Simarly, it's possible to disable feature flags with:
|
||||
|
||||
```
|
||||
FEATURE_TAGGING_SYSTEM=false
|
||||
```
|
||||
|
||||
@@ -3,7 +3,7 @@ sidebar_position: 6
|
||||
version: 1
|
||||
---
|
||||
|
||||
# Misc.
|
||||
# Miscellaneous
|
||||
|
||||
## Reporting a Security Vulnerability
|
||||
|
||||
@@ -11,7 +11,7 @@ Please report security vulnerabilities to private@superset.apache.org.
|
||||
|
||||
In the event a community member discovers a security flaw in Superset, it is important to follow the [Apache Security Guidelines](https://www.apache.org/security/committers.html) and release a fix as quickly as possible before public disclosure. Reporting security vulnerabilities through the usual GitHub Issues channel is not ideal as it will publicize the flaw before a fix can be applied.
|
||||
|
||||
### SQL Lab Async
|
||||
## SQL Lab Async
|
||||
|
||||
It's possible to configure a local database to operate in `async` mode,
|
||||
to work on `async` related features.
|
||||
@@ -46,7 +46,7 @@ Note that:
|
||||
to your production environment, and use the similar broker as well as
|
||||
results backend configuration
|
||||
|
||||
### Async Chart Queries
|
||||
## Async Chart Queries
|
||||
|
||||
It's possible to configure database queries for charts to operate in `async` mode. This is especially useful for dashboards with many charts that may otherwise be affected by browser connection limits. To enable async queries for dashboards and Explore, the following dependencies are required:
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ import InteractiveSVG from '../../src/components/InteractiveERDSVG';
|
||||
|
||||
# Resources
|
||||
|
||||
## Entity-Relationship Diagram
|
||||
## Entity-Relationship Diagram
|
||||
|
||||
Here is our interactive ERD:
|
||||
|
||||
|
||||
@@ -66,7 +66,7 @@ For running long query from Sql Lab, by default Superset allows it run as long a
|
||||
being killed by celery. If you want to increase the time for running query, you can specify the
|
||||
timeout in configuration. For example:
|
||||
|
||||
```
|
||||
```python
|
||||
SQLLAB_ASYNC_TIME_LIMIT_SEC = 60 * 60 * 6
|
||||
```
|
||||
|
||||
@@ -78,7 +78,7 @@ come back within client-side timeout (60 seconds by default), Superset will disp
|
||||
to avoid gateway timeout message. If you have a longer gateway timeout limit, you can change the
|
||||
timeout settings in **superset_config.py**:
|
||||
|
||||
```
|
||||
```python
|
||||
SUPERSET_WEBSERVER_TIMEOUT = 60
|
||||
```
|
||||
|
||||
@@ -87,7 +87,7 @@ SUPERSET_WEBSERVER_TIMEOUT = 60
|
||||
You need to register a free account at [Mapbox.com](https://www.mapbox.com), obtain an API key, and add it
|
||||
to **.env** at the key MAPBOX_API_KEY:
|
||||
|
||||
```
|
||||
```python
|
||||
MAPBOX_API_KEY = "longstringofalphanumer1c"
|
||||
```
|
||||
|
||||
@@ -99,7 +99,7 @@ refreshed - especially if some data is slow moving, or run heavy queries. To exc
|
||||
from the timed refresh process, add the `timed_refresh_immune_slices` key to the dashboard JSON
|
||||
Metadata field:
|
||||
|
||||
```
|
||||
```json
|
||||
{
|
||||
"filter_immune_slices": [],
|
||||
"expanded_slices": {},
|
||||
@@ -115,7 +115,7 @@ Slice refresh will also be staggered over the specified period. You can turn off
|
||||
setting the `stagger_refresh` to false and modify the stagger period by setting `stagger_time` to a
|
||||
value in milliseconds in the JSON Metadata field:
|
||||
|
||||
```
|
||||
```json
|
||||
{
|
||||
"stagger_refresh": false,
|
||||
"stagger_time": 2500
|
||||
@@ -125,7 +125,7 @@ value in milliseconds in the JSON Metadata field:
|
||||
Here, the entire dashboard will refresh at once if periodic refresh is on. The stagger time of 2.5
|
||||
seconds is ignored.
|
||||
|
||||
**Why does ‘flask fab’ or superset freezed/hung/not responding when started (my home directory is
|
||||
**Why does ‘flask fab’ or superset freeze/hang/not responding when started (my home directory is
|
||||
NFS mounted)?**
|
||||
|
||||
By default, Superset creates and uses an SQLite database at `~/.superset/superset.db`. SQLite is
|
||||
@@ -137,7 +137,7 @@ You can override this path using the **SUPERSET_HOME** environment variable.
|
||||
Another workaround is to change where superset stores the sqlite database by adding the following in
|
||||
`superset_config.py`:
|
||||
|
||||
```
|
||||
```python
|
||||
SQLALCHEMY_DATABASE_URI = 'sqlite:////new/location/superset.db?check_same_thread=false'
|
||||
```
|
||||
|
||||
@@ -157,12 +157,12 @@ table afterwards to configure the Columns tab, check the appropriate boxes and s
|
||||
|
||||
To clarify, the database backend is an OLTP database used by Superset to store its internal
|
||||
information like your list of users and dashboard definitions. While Superset supports a
|
||||
[variety of databases as data *sources*](/docs/configuration/databases#installing-database-drivers),
|
||||
[variety of databases as data _sources_](/docs/configuration/databases#installing-database-drivers),
|
||||
only a few database engines are supported for use as the OLTP backend / metadata store.
|
||||
|
||||
Superset is tested using MySQL, PostgreSQL, and SQLite backends. It’s recommended you install
|
||||
Superset on one of these database servers for production. Installation on other OLTP databases
|
||||
may work but isn’t tested. It has been reported that [Microsoft SQL Server does *not*
|
||||
may work but isn’t tested. It has been reported that [Microsoft SQL Server does _not_
|
||||
work as a Superset backend](https://github.com/apache/superset/issues/18961). Column-store,
|
||||
non-OLTP databases are not designed for this type of workload.
|
||||
|
||||
@@ -213,7 +213,7 @@ SQLAlchemy and DBAPI scope. This includes features like:
|
||||
Beyond the SQLAlchemy connector, it’s also possible, though much more involved, to extend Superset
|
||||
and write your own connector. The only example of this at the moment is the Druid connector, which
|
||||
is getting superseded by Druid’s growing SQL support and the recent availability of a DBAPI and
|
||||
SQLAlchemy driver. If the database you are considering integrating has any kind of of SQL support,
|
||||
SQLAlchemy driver. If the database you are considering integrating has any kind of SQL support,
|
||||
it’s probably preferable to go the SQLAlchemy route. Note that for a native connector to be possible
|
||||
the database needs to have support for running OLAP-type queries and should be able to do things that
|
||||
are typical in basic SQL:
|
||||
@@ -236,7 +236,7 @@ made to cover more and more use cases.
|
||||
The API available is documented using [Swagger](https://swagger.io/) and the documentation can be
|
||||
made available under **/swagger/v1** by enabling the following flag in `superset_config.py`:
|
||||
|
||||
```
|
||||
```python
|
||||
FAB_API_SWAGGER_UI = True
|
||||
```
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ This page is meant to give new administrators an understanding of Superset's com
|
||||
## Components
|
||||
|
||||
A Superset installation is made up of these components:
|
||||
|
||||
1. The Superset application itself
|
||||
2. A metadata database
|
||||
3. A caching layer (optional, but necessary for some features)
|
||||
@@ -22,6 +23,7 @@ A Superset installation is made up of these components:
|
||||
### Optional components and associated features
|
||||
|
||||
The optional components above are necessary to enable these features:
|
||||
|
||||
- [Alerts and Reports](/docs/configuration/alerts-reports)
|
||||
- [Caching](/docs/configuration/cache)
|
||||
- [Async Queries](/docs/configuration/async-queries-celery/)
|
||||
@@ -36,6 +38,7 @@ Here are further details on each component.
|
||||
### The Superset Application
|
||||
|
||||
This is the core application. Superset operates like this:
|
||||
|
||||
- A user visits a chart or dashboard
|
||||
- That triggers a SQL query to the data warehouse holding the underlying dataset
|
||||
- The resulting data is served up in a data visualization
|
||||
@@ -45,13 +48,14 @@ This is the core application. Superset operates like this:
|
||||
|
||||
This is where chart and dashboard definitions, user information, logs, etc. are stored. Superset is tested to work with PostgreSQL and MySQL databases as the metadata database (not be confused with a data source like your data warehouse, which could be a much greater variety of options like Snowflake, Redshift, etc.).
|
||||
|
||||
Some installation methods like our Quickstart and PyPI come configured by default to use a SQLite on-disk database. And in a Docker Compose installation, the data would be stored in a PostgresQL container volume. Neither of these cases are recommended for production instances of Superset.
|
||||
Some installation methods like our Quickstart and PyPI come configured by default to use a SQLite on-disk database. And in a Docker Compose installation, the data would be stored in a PostgreSQL container volume. Neither of these cases are recommended for production instances of Superset.
|
||||
|
||||
For production, a properly-configured, managed, standalone database is recommended. No matter what database you use, you should plan to back it up regularly.
|
||||
|
||||
### Caching Layer
|
||||
|
||||
The caching layer serves two main functions:
|
||||
|
||||
- Store the results of queries to your data warehouse so that when a chart is loaded twice, it pulls from the cache the second time, speeding up the application and reducing load on your data warehouse.
|
||||
- Act as a message broker for the worker, enabling the Alerts & Reports, async queries, and thumbnail caching features.
|
||||
|
||||
|
||||
@@ -59,14 +59,13 @@ Here are the build presets that are exposed through the `supersetbot docker` uti
|
||||
this specific SHA, which could be from a `master` merge, or release.
|
||||
- `websocket-latest`: The WebSocket image for use in a Superset cluster.
|
||||
|
||||
|
||||
|
||||
For insights or modifications to the build matrix and tagging conventions,
|
||||
check the [supersetbot docker](https://github.com/apache-superset/supersetbot)
|
||||
subcommand and the [docker.yml](https://github.com/apache/superset/blob/master/.github/workflows/docker.yml)
|
||||
GitHub action.
|
||||
|
||||
## Key ARGs in Dockerfile
|
||||
|
||||
- `BUILD_TRANSLATIONS`: whether to build the translations into the image. For the
|
||||
frontend build this tells webpack to strip out all locales other than `en` from
|
||||
the `moment-timezone` library. For the backendthis skips compiling the
|
||||
|
||||
@@ -17,7 +17,7 @@ Since `docker compose` is primarily designed to run a set of containers on **a s
|
||||
and can't support requirements for **high availability**, we do not support nor recommend
|
||||
using our `docker compose` constructs to support production-type use-cases. For single host
|
||||
environments, we recommend using [minikube](https://minikube.sigs.k8s.io/docs/start/) along
|
||||
our [installing on k8s](https://superset.apache.org/docs/installation/running-on-kubernetes)
|
||||
with our [installing on k8s](https://superset.apache.org/docs/installation/running-on-kubernetes)
|
||||
documentation.
|
||||
:::
|
||||
|
||||
@@ -43,7 +43,6 @@ Note that there are 3 major ways we support to run `docker compose`:
|
||||
`export TAG=4.0.0-dev` or `export TAG=3.0.0-dev`, with `latest-dev` being the default.
|
||||
That's because The `dev` builds happen to package the `psycopg2-binary` required to connect
|
||||
to the Postgres database launched as part of the `docker compose` builds.
|
||||
``
|
||||
|
||||
More on these two approaches after setting up the requirements for either.
|
||||
|
||||
@@ -121,6 +120,13 @@ Here various release tags, github SHA, and latest `master` can be referenced by
|
||||
Refer to the docker-related documentation to learn more about existing tags you can point to
|
||||
from Docker Hub.
|
||||
|
||||
:::note
|
||||
For option #2 and #3, we recommend checking out the release tag from the better repository
|
||||
(ie: `git checkout 4.0.0`) for more guaranteed results. This ensures that the `docker-compose.*.yml`
|
||||
configurations and that the mounted `docker/` scripts are in sync with the image you are
|
||||
looking to fire up.
|
||||
:::
|
||||
|
||||
## `docker compose` tips & configuration
|
||||
|
||||
:::caution
|
||||
|
||||
@@ -150,16 +150,20 @@ Superset requires a Python DB-API database driver and a SQLAlchemy
|
||||
dialect to be installed for each datastore you want to connect to.
|
||||
|
||||
See [Install Database Drivers](/docs/configuration/databases) for more information.
|
||||
It is recommended that you refer to versions listed in
|
||||
[pyproject.toml](https://github.com/apache/superset/blob/master/pyproject.toml)
|
||||
instead of hard-coding them in your bootstrap script, as seen below.
|
||||
|
||||
:::
|
||||
|
||||
The following example installs the drivers for BigQuery and Elasticsearch, allowing you to connect to these data sources within your Superset setup:
|
||||
|
||||
```yaml
|
||||
bootstrapScript: |
|
||||
#!/bin/bash
|
||||
pip install psycopg2==2.9.6 \
|
||||
sqlalchemy-bigquery==1.6.1 \
|
||||
elasticsearch-dbapi==0.2.5 &&\
|
||||
uv pip install .[postgres] \
|
||||
.[bigquery] \
|
||||
.[elasticsearch] &&\
|
||||
if [ ! -f ~/bootstrap ]; then echo "Running Superset with uid {{ .Values.runAsUser }}" > ~/bootstrap; fi
|
||||
```
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ import useBaseUrl from "@docusaurus/useBaseUrl";
|
||||
<img src={useBaseUrl("/img/pypi.png" )} width="150" />
|
||||
<br /><br />
|
||||
|
||||
This page describes how to install Superset using the `apache-superset` package [published on PyPI](https://pypi.org/project/apache-superset/).
|
||||
This page describes how to install Superset using the `apache_superset` package [published on PyPI](https://pypi.org/project/apache_superset/).
|
||||
|
||||
## OS Dependencies
|
||||
|
||||
@@ -124,10 +124,10 @@ command line.
|
||||
|
||||
### Installing and Initializing Superset
|
||||
|
||||
First, start by installing `apache-superset`:
|
||||
First, start by installing `apache_superset`:
|
||||
|
||||
```bash
|
||||
pip install apache-superset
|
||||
pip install apache_superset
|
||||
```
|
||||
|
||||
Then, define mandatory configurations, SECRET_KEY and FLASK_APP:
|
||||
|
||||
@@ -32,7 +32,7 @@ docker compose up
|
||||
To upgrade superset in a native installation, run the following commands:
|
||||
|
||||
```bash
|
||||
pip install apache-superset --upgrade
|
||||
pip install apache_superset --upgrade
|
||||
```
|
||||
|
||||
## Upgrading the Metadata Database
|
||||
|
||||
@@ -22,7 +22,7 @@ page.
|
||||
### 1. Get Superset
|
||||
|
||||
```bash
|
||||
$ git clone https://github.com/apache/superset
|
||||
git clone https://github.com/apache/superset
|
||||
```
|
||||
|
||||
### 2. Start the latest official release of Superset
|
||||
@@ -31,6 +31,9 @@ $ git clone https://github.com/apache/superset
|
||||
# Enter the repository you just cloned
|
||||
$ cd superset
|
||||
|
||||
# Set the repo to the state associated with the latest official version
|
||||
$ git checkout tags/4.1.1
|
||||
|
||||
# Fire up Superset using Docker Compose
|
||||
$ docker compose -f docker-compose-image-tag.yml up
|
||||
```
|
||||
@@ -58,7 +61,7 @@ password: admin
|
||||
Once you're done with Superset, you can stop and delete just like any other container environment:
|
||||
|
||||
```bash
|
||||
$ docker compose down
|
||||
docker compose down
|
||||
```
|
||||
|
||||
:::tip
|
||||
|
||||
@@ -115,7 +115,7 @@ the models and views they can access, and that Finance role that is a collection
|
||||
A user can have multiple roles associated with them. For example, an executive on the Finance
|
||||
team could be granted **Gamma**, **Finance**, and the **Executive** roles. The **Executive**
|
||||
role could provide access to a set of data sources and dashboards made available only to executives.
|
||||
In the **Dashboards** view, a user can only see the ones they have access too
|
||||
In the **Dashboards** view, a user can only see the ones they have access to
|
||||
based on the roles and permissions that were attributed.
|
||||
|
||||
### Row Level Security
|
||||
@@ -224,17 +224,17 @@ this warning using the `CONTENT_SECURITY_POLICY_WARNING` key in `config.py`.
|
||||
|
||||
#### CSP Requirements
|
||||
|
||||
* Superset needs the `style-src unsafe-inline` CSP directive in order to operate.
|
||||
- Superset needs the `style-src unsafe-inline` CSP directive in order to operate.
|
||||
|
||||
```
|
||||
style-src 'self' 'unsafe-inline'
|
||||
```
|
||||
|
||||
* Only scripts marked with a [nonce](https://content-security-policy.com/nonce/) can be loaded and executed.
|
||||
- Only scripts marked with a [nonce](https://content-security-policy.com/nonce/) can be loaded and executed.
|
||||
Nonce is a random string automatically generated by Talisman on each page load.
|
||||
You can get current nonce value by calling jinja macro `csp_nonce()`.
|
||||
|
||||
```
|
||||
```html
|
||||
<script nonce="{{ csp_nonce() }}">
|
||||
/* my script */
|
||||
</script>
|
||||
@@ -253,17 +253,19 @@ You can get current nonce value by calling jinja macro `csp_nonce()`.
|
||||
connect-src 'self' https://api.mapbox.com https://events.mapbox.com
|
||||
```
|
||||
|
||||
* Other CSP directives default to `'self'` to limit content to the same origin as the Superset server.
|
||||
- Cartodiagram charts request map data (image and json) from external resources that can be edited by users,
|
||||
and therefore either require a list of allowed domains to request from or a wildcard (`'*'`) for `img-src` and `connect-src`.
|
||||
|
||||
- Other CSP directives default to `'self'` to limit content to the same origin as the Superset server.
|
||||
|
||||
In order to adjust provided CSP configuration to your needs, follow the instructions and examples provided in
|
||||
[Content Security Policy Reference](https://content-security-policy.com/)
|
||||
|
||||
|
||||
#### Other Talisman security considerations
|
||||
|
||||
Setting `TALISMAN_ENABLED = True` will invoke Talisman's protection with its default arguments,
|
||||
of which `content_security_policy` is only one. Those can be found in the
|
||||
[Talisman documentation](https://pypi.org/project/flask-talisman/) under _Options_.
|
||||
[Talisman documentation](https://pypi.org/project/flask-talisman/) under *Options*.
|
||||
These generally improve security, but administrators should be aware of their existence.
|
||||
|
||||
In particular, the option of `force_https = True` (`False` by default) may break Superset's Alerts & Reports
|
||||
@@ -278,6 +280,49 @@ TALISMAN_CONFIG = {
|
||||
"content_security_policy": { ...
|
||||
```
|
||||
|
||||
#### Configuring Talisman in Superset
|
||||
|
||||
Talisman settings in Superset can be modified using superset_config.py. If you need to adjust security policies, you can override the default configuration.
|
||||
|
||||
Example: Overriding Talisman Configuration in superset_config.py for loading images form s3 or other external sources.
|
||||
|
||||
```python
|
||||
TALISMAN_CONFIG = {
|
||||
"content_security_policy": {
|
||||
"base-uri": ["'self'"],
|
||||
"default-src": ["'self'"],
|
||||
"img-src": [
|
||||
"'self'",
|
||||
"blob:",
|
||||
"data:",
|
||||
"https://apachesuperset.gateway.scarf.sh",
|
||||
"https://static.scarf.sh/",
|
||||
# "https://cdn.brandfolder.io", # Uncomment when SLACK_ENABLE_AVATARS is True # noqa: E501
|
||||
"ows.terrestris.de",
|
||||
"aws.s3.com", # Add Your Bucket or external data source
|
||||
],
|
||||
"worker-src": ["'self'", "blob:"],
|
||||
"connect-src": [
|
||||
"'self'",
|
||||
"https://api.mapbox.com",
|
||||
"https://events.mapbox.com",
|
||||
],
|
||||
"object-src": "'none'",
|
||||
"style-src": [
|
||||
"'self'",
|
||||
"'unsafe-inline'",
|
||||
],
|
||||
"script-src": ["'self'", "'strict-dynamic'"],
|
||||
},
|
||||
"content_security_policy_nonce_in": ["script-src"],
|
||||
"force_https": False,
|
||||
"session_cookie_secure": False,
|
||||
}
|
||||
```
|
||||
|
||||
# For more information on setting up Talisman, please refer to
|
||||
https://superset.apache.org/docs/configuration/networking-settings/#changing-flask-talisman-csp
|
||||
|
||||
### Reporting Security Vulnerabilities
|
||||
|
||||
Apache Software Foundation takes a rigorous standpoint in annihilating the security issues in its
|
||||
|
||||
@@ -12,8 +12,12 @@ import useBaseUrl from "@docusaurus/useBaseUrl";
|
||||
This section is focused on documentation for end-users who will be using Superset
|
||||
for the data analysis and exploration workflow
|
||||
(data analysts, business analysts, data
|
||||
scientists, etc). In addition to this site, [Preset.io](http://preset.io/) maintains an updated set of end-user
|
||||
scientists, etc).
|
||||
|
||||
:::tip
|
||||
In addition to this site, [Preset.io](http://preset.io/) maintains an updated set of end-user
|
||||
documentation at [docs.preset.io](https://docs.preset.io/).
|
||||
:::
|
||||
|
||||
This tutorial targets someone who wants to create charts and dashboards in Superset. We’ll show you
|
||||
how to connect Superset to a new database and configure a table in that database for analysis.
|
||||
@@ -48,7 +52,6 @@ Please note, if you are trying to connect to another locally running database (w
|
||||
|
||||
Once you've clicked that link you only need to specify two things (the database name and SQLAlchemy URI):
|
||||
|
||||
|
||||
<img src={useBaseUrl("/img/tutorial/tutorial_03b_connection_string_details.png" )} width="600" />{" "} <br/><br/>
|
||||
|
||||
As noted in the text below the form, you should refer to the SQLAlchemy documentation on
|
||||
@@ -104,7 +107,7 @@ Aggregate functions are allowed and encouraged for metrics.
|
||||
|
||||
You can also certify metrics if you'd like for your team in this view.
|
||||
|
||||
2. Virtual calculated columns: you can write SQL queries that
|
||||
1. Virtual calculated columns: you can write SQL queries that
|
||||
customize the appearance and behavior
|
||||
of a specific column (e.g. `CAST(recovery_rate as float)`).
|
||||
Aggregate functions aren't allowed in calculated columns.
|
||||
@@ -176,26 +179,40 @@ into a position you like onto the underlying grid.
|
||||
|
||||
Congrats! You’ve successfully linked, analyzed, and visualized data in Superset. There are a wealth
|
||||
of other table configuration and visualization options, so please start exploring and creating
|
||||
slices and dashboards of your own
|
||||
slices and dashboards of your own.
|
||||
|
||||
ֿ
|
||||
### Manage access to Dashboards
|
||||
|
||||
Access to dashboards is managed via owners (users that have edit permissions to the dashboard).
|
||||
|
||||
Access to dashboards is managed via owners (users that have edit permissions to the dashboard)
|
||||
Non-owner users access can be managed in two different ways. The dashboard needs to be published to be visible to other users.
|
||||
|
||||
Non-owner users access can be managed two different ways:
|
||||
|
||||
1. Dataset permissions - if you add to the relevant role permissions to datasets it automatically grants implicit access to all dashboards that uses those permitted datasets
|
||||
2. Dashboard roles - if you enable **DASHBOARD_RBAC** [feature flag](/docs/configuration/configuring-superset#feature-flags) then you be able to manage which roles can access the dashboard
|
||||
1. Dataset permissions - if you add to the relevant role permissions to datasets it automatically grants implicit access to all dashboards that uses those permitted datasets.
|
||||
2. Dashboard roles - if you enable [**DASHBOARD_RBAC** feature flag](/docs/configuration/configuring-superset#feature-flags) then you will be able to manage which roles can access the dashboard
|
||||
- Granting a role access to a dashboard will bypass dataset level checks. Having dashboard access implicitly grants read access to all the featured charts in the dashboard, and thereby also all the associated datasets.
|
||||
- If no roles are specified for a dashboard, regular **Dataset permissions** will apply.
|
||||
|
||||
<img src={useBaseUrl("/img/tutorial/tutorial_dashboard_access.png" )} />
|
||||
|
||||
### Publishing a Dashboard
|
||||
|
||||
If you would like to make your dashboard available to other users, click on the `Draft` button next to the
|
||||
title of your dashboard.
|
||||
|
||||
<img src={useBaseUrl("/img/tutorial/publish_button_dashboard.png" )} />
|
||||
|
||||
:::warning
|
||||
Draft dashboards are only visible to the dashboard owners and admins. Published dashboards are visible to all users with access to the underlying datasets or if RBAC is enabled, to the roles that have been granted access to the dashboard.
|
||||
:::
|
||||
|
||||
### Mark a Dashboard as Favorite
|
||||
|
||||
You can mark a dashboard as a favorite by clicking on the star icon next to the title of your dashboard. This makes it easier to find it in the list of dashboards or on the home page.
|
||||
|
||||
### Customizing dashboard
|
||||
|
||||
The following URL parameters can be used to modify how the dashboard is rendered:
|
||||
|
||||
- `standalone`:
|
||||
- `0` (default): dashboard is displayed normally
|
||||
- `1`: Top Navigation is hidden
|
||||
|
||||
@@ -13,7 +13,7 @@ In this tutorial, we will introduce key concepts in Apache Superset through the
|
||||
real dataset which contains the flights made by employees of a UK-based organization in 2011. The
|
||||
following information about each flight is given:
|
||||
|
||||
- The traveller’s department. For the purposes of this tutorial the departments have been renamed
|
||||
- The traveler’s department. For the purposes of this tutorial the departments have been renamed
|
||||
Orange, Yellow and Purple.
|
||||
- The cost of the ticket.
|
||||
- The travel class (Economy, Premium Economy, Business and First Class).
|
||||
|
||||
@@ -17,14 +17,13 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
// @ts-check
|
||||
// Note: type annotations allow type checking and IDEs autocompletion
|
||||
import type { Config } from '@docusaurus/types';
|
||||
import type { Options, ThemeConfig } from '@docusaurus/preset-classic';
|
||||
import { themes } from 'prism-react-renderer';
|
||||
|
||||
const lightCodeTheme = require("prism-react-renderer").themes.github;
|
||||
const darkCodeTheme = require("prism-react-renderer").themes.vsDark;
|
||||
const { github: lightCodeTheme, vsDark: darkCodeTheme } = themes;
|
||||
|
||||
/** @type {import('@docusaurus/types').Config} */
|
||||
const config = {
|
||||
const config: Config = {
|
||||
title: 'Superset',
|
||||
tagline:
|
||||
'Apache Superset is a modern data exploration and visualization platform',
|
||||
@@ -33,8 +32,8 @@ const config = {
|
||||
onBrokenLinks: 'throw',
|
||||
onBrokenMarkdownLinks: 'throw',
|
||||
favicon: '/img/favicon.ico',
|
||||
organizationName: 'apache', // Usually your GitHub org/user name.
|
||||
projectName: 'superset', // Usually your repo name.
|
||||
organizationName: 'apache',
|
||||
projectName: 'superset',
|
||||
themes: ['@saucelabs/theme-github-codeblock'],
|
||||
plugins: [
|
||||
[
|
||||
@@ -199,105 +198,103 @@ const config = {
|
||||
presets: [
|
||||
[
|
||||
'@docusaurus/preset-classic',
|
||||
/** @type {import('@docusaurus/preset-classic').Options} */
|
||||
({
|
||||
{
|
||||
docs: {
|
||||
sidebarPath: require.resolve('./sidebars.js'),
|
||||
editUrl:
|
||||
({versionDocsDirPath, docPath}) => {
|
||||
editUrl: ({ versionDocsDirPath, docPath }) => {
|
||||
if (docPath === 'intro.md') {
|
||||
return 'https://github.com/apache/superset/edit/master/README.md'
|
||||
return 'https://github.com/apache/superset/edit/master/README.md';
|
||||
}
|
||||
return `https://github.com/apache/superset/edit/master/docs/${versionDocsDirPath}/${docPath}`
|
||||
}
|
||||
return `https://github.com/apache/superset/edit/master/docs/${versionDocsDirPath}/${docPath}`;
|
||||
},
|
||||
},
|
||||
blog: {
|
||||
showReadingTime: true,
|
||||
// Please change this to your repo.
|
||||
editUrl: 'https://github.com/facebook/docusaurus/edit/main/website/blog/',
|
||||
editUrl:
|
||||
'https://github.com/facebook/docusaurus/edit/main/website/blog/',
|
||||
},
|
||||
theme: {
|
||||
customCss: require.resolve('./src/styles/custom.css'),
|
||||
},
|
||||
}),
|
||||
} satisfies Options,
|
||||
],
|
||||
],
|
||||
|
||||
themeConfig:
|
||||
/** @type {import('@docusaurus/preset-classic').ThemeConfig} */
|
||||
({
|
||||
colorMode: {
|
||||
defaultMode: 'light',
|
||||
disableSwitch: true,
|
||||
themeConfig: {
|
||||
colorMode: {
|
||||
defaultMode: 'dark',
|
||||
disableSwitch: false,
|
||||
respectPrefersColorScheme: true,
|
||||
},
|
||||
algolia: {
|
||||
appId: 'WR5FASX5ED',
|
||||
apiKey: 'd0d22810f2e9b614ffac3a73b26891fe',
|
||||
indexName: 'superset-apache',
|
||||
},
|
||||
navbar: {
|
||||
logo: {
|
||||
alt: 'Superset Logo',
|
||||
src: '/img/superset-logo-horiz.svg',
|
||||
srcDark: '/img/superset-logo-horiz-dark.svg',
|
||||
},
|
||||
algolia: {
|
||||
appId: 'WR5FASX5ED',
|
||||
apiKey: 'd0d22810f2e9b614ffac3a73b26891fe',
|
||||
indexName: 'superset-apache',
|
||||
},
|
||||
navbar: {
|
||||
logo: {
|
||||
alt: 'Superset Logo',
|
||||
src: '/img/superset-logo-horiz.svg',
|
||||
srcDark: '/img/superset-logo-horiz-dark.svg',
|
||||
items: [
|
||||
{
|
||||
label: 'Documentation',
|
||||
to: '/docs/intro',
|
||||
items: [
|
||||
{
|
||||
label: 'Getting Started',
|
||||
to: '/docs/intro',
|
||||
},
|
||||
{
|
||||
label: 'FAQ',
|
||||
to: '/docs/faq',
|
||||
},
|
||||
],
|
||||
},
|
||||
items: [
|
||||
{
|
||||
label: 'Documentation',
|
||||
to: '/docs/intro',
|
||||
items: [
|
||||
{
|
||||
label: 'Getting Started',
|
||||
to: '/docs/intro',
|
||||
},
|
||||
{
|
||||
label: 'FAQ',
|
||||
to: '/docs/faq',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
label: 'Community',
|
||||
to: '/community',
|
||||
items: [
|
||||
{
|
||||
label: 'Resources',
|
||||
href: '/community',
|
||||
},
|
||||
{
|
||||
label: 'GitHub',
|
||||
href: 'https://github.com/apache/superset',
|
||||
},
|
||||
{
|
||||
label: 'Slack',
|
||||
href: 'http://bit.ly/join-superset-slack',
|
||||
},
|
||||
{
|
||||
label: 'Mailing List',
|
||||
href: 'https://lists.apache.org/list.html?dev@superset.apache.org',
|
||||
},
|
||||
{
|
||||
label: 'Stack Overflow',
|
||||
href: 'https://stackoverflow.com/questions/tagged/apache-superset',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
href: '/docs/intro',
|
||||
position: 'right',
|
||||
className: 'default-button-theme get-started-button',
|
||||
label: 'Get Started',
|
||||
},
|
||||
{
|
||||
href: 'https://github.com/apache/superset',
|
||||
position: 'right',
|
||||
className: 'github-button',
|
||||
},
|
||||
],
|
||||
},
|
||||
footer: {
|
||||
links: [],
|
||||
copyright: `
|
||||
{
|
||||
label: 'Community',
|
||||
to: '/community',
|
||||
items: [
|
||||
{
|
||||
label: 'Resources',
|
||||
href: '/community',
|
||||
},
|
||||
{
|
||||
label: 'GitHub',
|
||||
href: 'https://github.com/apache/superset',
|
||||
},
|
||||
{
|
||||
label: 'Slack',
|
||||
href: 'http://bit.ly/join-superset-slack',
|
||||
},
|
||||
{
|
||||
label: 'Mailing List',
|
||||
href: 'https://lists.apache.org/list.html?dev@superset.apache.org',
|
||||
},
|
||||
{
|
||||
label: 'Stack Overflow',
|
||||
href: 'https://stackoverflow.com/questions/tagged/apache-superset',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
href: '/docs/intro',
|
||||
position: 'right',
|
||||
className: 'default-button-theme get-started-button',
|
||||
label: 'Get Started',
|
||||
},
|
||||
{
|
||||
href: 'https://github.com/apache/superset',
|
||||
position: 'right',
|
||||
className: 'github-button',
|
||||
},
|
||||
],
|
||||
},
|
||||
footer: {
|
||||
links: [],
|
||||
copyright: `
|
||||
<div class="footer__applitools">
|
||||
We use <a href="https://applitools.com/" target="_blank" rel="nofollow"><img src="/img/applitools.png" title="Applitools" /></a>
|
||||
</div>
|
||||
@@ -320,24 +317,51 @@ const config = {
|
||||
<!-- telemetry/analytics pixel: -->
|
||||
<img referrerPolicy="no-referrer-when-downgrade" src="https://static.scarf.sh/a.png?x-pxid=39ae6855-95fc-4566-86e5-360d542b0a68" />
|
||||
`,
|
||||
},
|
||||
prism: {
|
||||
theme: lightCodeTheme,
|
||||
darkTheme: darkCodeTheme,
|
||||
},
|
||||
docs: {
|
||||
sidebar: {
|
||||
hideable: true,
|
||||
},
|
||||
prism: {
|
||||
theme: lightCodeTheme,
|
||||
darkTheme: darkCodeTheme,
|
||||
},
|
||||
docs: {
|
||||
sidebar: {
|
||||
hideable: true,
|
||||
}
|
||||
},
|
||||
}),
|
||||
},
|
||||
} satisfies ThemeConfig,
|
||||
scripts: [
|
||||
'/script/matomo.js',
|
||||
// {
|
||||
// src: 'https://www.bugherd.com/sidebarv2.js?apikey=enilpiu7bgexxsnoqfjtxa',
|
||||
// async: true,
|
||||
// },
|
||||
'/script/matomo.js',
|
||||
{
|
||||
src: 'https://widget.kapa.ai/kapa-widget.bundle.js',
|
||||
async: true,
|
||||
'data-website-id': 'c6a8a8b8-3127-48f9-97a7-51e9e10d20d0',
|
||||
'data-project-name': 'Apache Superset',
|
||||
'data-project-color': '#FFFFFF',
|
||||
'data-project-logo':
|
||||
'https://images.seeklogo.com/logo-png/50/2/superset-icon-logo-png_seeklogo-500354.png',
|
||||
'data-modal-override-open-id': 'ask-ai-input',
|
||||
'data-modal-override-open-class': 'search-input',
|
||||
'data-modal-disclaimer':
|
||||
'This is a custom LLM for Apache Superset with access to all [documentation](superset.apache.org/docs/intro/), [GitHub Open Issues, PRs and READMEs](github.com/apache/superset). Companies deploy assistants like this ([built by kapa.ai](https://kapa.ai)) on docs via [website widget](https://docs.kapa.ai/integrations/website-widget) (Docker, Reddit), in [support forms](https://docs.kapa.ai/integrations/support-form-deflector) for ticket deflection (Monday.com, Mapbox), or as [Slack bots](https://docs.kapa.ai/integrations/slack-bot) with private sources.',
|
||||
'data-modal-example-questions':
|
||||
'How do I install Superset?,How can I contribute to Superset?',
|
||||
'data-button-text-color': 'rgb(81,166,197)',
|
||||
'data-modal-header-bg-color': '#ffffff',
|
||||
'data-modal-title-color': 'rgb(81,166,197)',
|
||||
'data-modal-title': 'Apache Superset AI',
|
||||
'data-modal-disclaimer-text-color': '#000000',
|
||||
'data-consent-required': 'true',
|
||||
'data-consent-screen-disclaimer':
|
||||
"By clicking \"I agree, let's chat\", you consent to the use of the AI assistant in accordance with kapa.ai's [Privacy Policy](https://www.kapa.ai/content/privacy-policy). This service uses reCAPTCHA, which requires your consent to Google's [Privacy Policy](https://policies.google.com/privacy) and [Terms of Service](https://policies.google.com/terms). By proceeding, you explicitly agree to both kapa.ai's and Google's privacy policies.",
|
||||
},
|
||||
],
|
||||
customFields: {
|
||||
matomoUrl: 'https://analytics.apache.org',
|
||||
matomoSiteId: '22',
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = config;
|
||||
export default config;
|
||||
@@ -14,43 +14,41 @@
|
||||
"serve": "yarn run _init && docusaurus serve",
|
||||
"write-translations": "docusaurus write-translations",
|
||||
"write-heading-ids": "docusaurus write-heading-ids",
|
||||
"typecheck": "tsc"
|
||||
"typecheck": "tsc",
|
||||
"eslint": "eslint . --ext .js,.jsx,.ts,.tsx"
|
||||
},
|
||||
"dependencies": {
|
||||
"@algolia/client-search": "^5.15.0",
|
||||
"@ant-design/icons": "^5.5.2",
|
||||
"@docsearch/react": "^3.6.3",
|
||||
"@docusaurus/core": "^3.5.2",
|
||||
"@docusaurus/plugin-client-redirects": "^3.5.2",
|
||||
"@docusaurus/preset-classic": "^3.5.2",
|
||||
"@emotion/core": "^10.1.1",
|
||||
"@emotion/styled": "^10.0.27",
|
||||
"@mdx-js/react": "^3.1.0",
|
||||
"@saucelabs/theme-github-codeblock": "^0.3.0",
|
||||
"@superset-ui/style": "^0.14.23",
|
||||
"@svgr/webpack": "^8.1.0",
|
||||
"antd": "^5.22.2",
|
||||
"buffer": "^6.0.3",
|
||||
"clsx": "^2.1.1",
|
||||
"antd": "^5.24.5",
|
||||
"docusaurus-plugin-less": "^2.0.2",
|
||||
"file-loader": "^6.2.0",
|
||||
"less": "^4.2.1",
|
||||
"less": "^4.2.2",
|
||||
"less-loader": "^11.0.0",
|
||||
"prism-react-renderer": "^2.4.0",
|
||||
"prism-react-renderer": "^2.4.1",
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"react-github-btn": "^1.4.0",
|
||||
"react-svg-pan-zoom": "^3.13.1",
|
||||
"stream": "^0.0.3",
|
||||
"swagger-ui-react": "^5.18.2",
|
||||
"url-loader": "^4.1.1"
|
||||
"swagger-ui-react": "^5.20.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@docusaurus/module-type-aliases": "^3.6.3",
|
||||
"@docusaurus/tsconfig": "^3.6.3",
|
||||
"@docusaurus/module-type-aliases": "^3.7.0",
|
||||
"@docusaurus/tsconfig": "^3.7.0",
|
||||
"@types/react": "^18.3.12",
|
||||
"typescript": "^5.7.2",
|
||||
"webpack": "^5.96.1"
|
||||
"@typescript-eslint/eslint-plugin": "^5.0.0",
|
||||
"@typescript-eslint/parser": "^5.0.0",
|
||||
"eslint": "^8.0.0",
|
||||
"eslint-config-prettier": "^10.1.1",
|
||||
"eslint-plugin-prettier": "^4.0.0",
|
||||
"eslint-plugin-react": "^7.0.0",
|
||||
"prettier": "^2.0.0",
|
||||
"typescript": "~5.8.2",
|
||||
"webpack": "^5.98.0"
|
||||
},
|
||||
"browserslist": {
|
||||
"production": [
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
/* eslint-env node */
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
@@ -39,42 +40,52 @@ const sidebars = {
|
||||
{
|
||||
type: 'category',
|
||||
label: 'Installation',
|
||||
items: [{
|
||||
type: 'autogenerated',
|
||||
dirName: 'installation',
|
||||
}]
|
||||
items: [
|
||||
{
|
||||
type: 'autogenerated',
|
||||
dirName: 'installation',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'category',
|
||||
label: 'Configuration',
|
||||
items: [{
|
||||
type: 'autogenerated',
|
||||
dirName: 'configuration',
|
||||
}]
|
||||
items: [
|
||||
{
|
||||
type: 'autogenerated',
|
||||
dirName: 'configuration',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'category',
|
||||
label: 'Using Superset',
|
||||
items: [{
|
||||
type: 'autogenerated',
|
||||
dirName: 'using-superset',
|
||||
}]
|
||||
items: [
|
||||
{
|
||||
type: 'autogenerated',
|
||||
dirName: 'using-superset',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'category',
|
||||
label: 'Contributing',
|
||||
items: [{
|
||||
type: 'autogenerated',
|
||||
dirName: 'contributing',
|
||||
}]
|
||||
items: [
|
||||
{
|
||||
type: 'autogenerated',
|
||||
dirName: 'contributing',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'category',
|
||||
label: 'Security',
|
||||
items: [{
|
||||
type: 'autogenerated',
|
||||
dirName: 'security',
|
||||
}]
|
||||
items: [
|
||||
{
|
||||
type: 'autogenerated',
|
||||
dirName: 'security',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'doc',
|
||||
@@ -87,7 +98,6 @@ const sidebars = {
|
||||
id: 'api',
|
||||
},
|
||||
],
|
||||
|
||||
};
|
||||
|
||||
module.exports = sidebars;
|
||||
|
||||
@@ -94,7 +94,7 @@ const StyledSectionHeaderH2 = styled(StyledSectionHeader)`
|
||||
`;
|
||||
|
||||
interface SectionHeaderProps {
|
||||
level: any;
|
||||
level: 'h1' | 'h2';
|
||||
title: string;
|
||||
subtitle?: string | ReactNode;
|
||||
dark?: boolean;
|
||||
@@ -115,7 +115,7 @@ const SectionHeader = ({
|
||||
<StyledRoot dark={!!dark}>
|
||||
<Heading className="title">{title}</Heading>
|
||||
<img className="line" src="/img/community/line.png" alt="line" />
|
||||
{subtitle && <p className="subtitle">{subtitle}</p>}
|
||||
{subtitle && <div className="subtitle">{subtitle}</div>}
|
||||
</StyledRoot>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -86,7 +86,7 @@ const communityLinks = [
|
||||
];
|
||||
|
||||
const StyledJoinCommunity = styled('section')`
|
||||
background-color: var(--ifm-off-section-background);
|
||||
background-color: var(--ifm-background-color);
|
||||
border-bottom: 1px solid var(--ifm-border-color);
|
||||
.list {
|
||||
max-width: 540px;
|
||||
@@ -118,7 +118,7 @@ const StyledJoinCommunity = styled('section')`
|
||||
.description {
|
||||
font-size: 14px;
|
||||
line-height: 20px;
|
||||
color: var(--ifm-secondary-text);
|
||||
color: var(--ifm-font-base-color);
|
||||
margin-top: -8px;
|
||||
margin-bottom: 23px;
|
||||
${mq[1]} {
|
||||
@@ -143,22 +143,6 @@ const StyledCalendarIframe = styled('iframe')`
|
||||
}
|
||||
`;
|
||||
|
||||
const StyledNewsletterIframe = styled('iframe')`
|
||||
display: block;
|
||||
max-width: 1080px;
|
||||
width: calc(100% - 40px);
|
||||
height: 285px;
|
||||
margin: 30px auto 20px;
|
||||
border: 0;
|
||||
@media (max-width: 1200px) {
|
||||
height: 380px;
|
||||
}
|
||||
@media (max-width: 679px) {
|
||||
height: 680px;
|
||||
margin-top: 15px;
|
||||
}
|
||||
`;
|
||||
|
||||
const StyledLink = styled('a')`
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
@@ -182,10 +166,9 @@ const StyledLink = styled('a')`
|
||||
const FinePrint = styled('div')`
|
||||
font-size: 14px;
|
||||
color: var(--ifm-secondary-text);
|
||||
`
|
||||
`;
|
||||
|
||||
const Community = () => {
|
||||
|
||||
const [showCalendar, setShowCalendar] = useState(false); // State to control calendar visibility
|
||||
|
||||
const toggleCalendar = () => {
|
||||
@@ -218,14 +201,17 @@ const Community = () => {
|
||||
className="title"
|
||||
href={url}
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
aria-label={ariaLabel}
|
||||
>
|
||||
<img className="icon" src={`/img/community/${image}`} />
|
||||
</a>
|
||||
}
|
||||
title={
|
||||
<a className="title" href={url} target="_blank">
|
||||
{title}
|
||||
<a href={url} target="_blank" rel="noreferrer">
|
||||
<p className="title" style={{ marginBottom: 0 }}>
|
||||
{title}
|
||||
</p>
|
||||
</a>
|
||||
}
|
||||
description={<p className="description">{description}</p>}
|
||||
@@ -246,16 +232,22 @@ const Community = () => {
|
||||
<StyledLink
|
||||
href="https://calendar.google.com/calendar/u/0/r?cid=superset.committers@gmail.com"
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
>
|
||||
<img src="/img/calendar-icon.svg" alt="calendar-icon" />
|
||||
Subscribe to the Superset Community Calendar
|
||||
</StyledLink>
|
||||
<br />
|
||||
<StyledLink onClick={toggleCalendar}>
|
||||
<img src="/img/calendar-icon.svg" alt="calendar-icon" />
|
||||
<img src="/img/calendar-icon.svg" alt="calendar-icon" />
|
||||
{showCalendar ? 'Hide Calendar' : 'Display Calendar*'}
|
||||
</StyledLink>
|
||||
{!showCalendar && <FinePrint><sup>*</sup>Clicking on this link will load and send data from and to Google.</FinePrint>}
|
||||
{!showCalendar && (
|
||||
<FinePrint>
|
||||
<sup>*</sup>Clicking on this link will load and send data
|
||||
from and to Google.
|
||||
</FinePrint>
|
||||
)}
|
||||
</>
|
||||
}
|
||||
/>
|
||||
|
||||
@@ -16,7 +16,6 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
// @ts-nocheck
|
||||
import { useRef, useState, useEffect } from 'react';
|
||||
import Layout from '@theme/Layout';
|
||||
import Link from '@docusaurus/Link';
|
||||
@@ -29,8 +28,6 @@ import SectionHeader from '../components/SectionHeader';
|
||||
import BlurredSection from '../components/BlurredSection';
|
||||
import '../styles/main.less';
|
||||
|
||||
// @ts-ignore
|
||||
|
||||
const features = [
|
||||
{
|
||||
image: 'powerful-yet-easy.jpg',
|
||||
@@ -207,7 +204,6 @@ const StyledFeaturesList = styled('ul')`
|
||||
.item {
|
||||
text-align: left;
|
||||
border: 1px solid var(--ifm-border-color);
|
||||
background-color: #ffffff;
|
||||
border-radius: 10px;
|
||||
overflow: hidden;
|
||||
display: flex;
|
||||
@@ -230,7 +226,6 @@ const StyledFeaturesList = styled('ul')`
|
||||
}
|
||||
.title {
|
||||
font-size: 24px;
|
||||
color: var(--ifm-primary-text);
|
||||
margin: 10px 0 0;
|
||||
${mq[1]} {
|
||||
font-size: 23px;
|
||||
@@ -240,7 +235,6 @@ const StyledFeaturesList = styled('ul')`
|
||||
.description {
|
||||
font-size: 17px;
|
||||
line-height: 23px;
|
||||
color: var(--ifm-secondary-text);
|
||||
margin: 5px 0 0;
|
||||
${mq[1]} {
|
||||
font-size: 16px;
|
||||
@@ -647,7 +641,10 @@ export default function Home(): JSX.Element {
|
||||
</div>
|
||||
</Carousel>
|
||||
<video autoPlay muted controls loop>
|
||||
<source src="https://superset.staged.apache.org/superset-video-4k.mp4" type="video/mp4" />
|
||||
<source
|
||||
src="https://superset.staged.apache.org/superset-video-4k.mp4"
|
||||
type="video/mp4"
|
||||
/>
|
||||
</video>
|
||||
</StyledSliderSection>
|
||||
<StyledKeyFeatures>
|
||||
|
||||
@@ -137,4 +137,9 @@ export const Databases = [
|
||||
href: 'https://www.denodo.com/',
|
||||
imgName: 'denodo.png',
|
||||
},
|
||||
{
|
||||
title: 'TDengine',
|
||||
href: 'https://www.tdengine.com/',
|
||||
imgName: 'tdengine.png',
|
||||
},
|
||||
];
|
||||
|
||||
@@ -58,7 +58,6 @@ ul.dropdown__menu svg {
|
||||
--ifm-code-font-size: 95%;
|
||||
--ifm-menu-link-padding-vertical: 12px;
|
||||
--doc-sidebar-width: 350px !important;
|
||||
--ifm-navbar-height: none;
|
||||
--ifm-font-family-base: Roboto;
|
||||
--ifm-footer-background-color: #173036;
|
||||
--ifm-footer-color: #87939a;
|
||||
@@ -69,3 +68,15 @@ ul.dropdown__menu svg {
|
||||
--ifm-code-padding-vertical: 3px;
|
||||
--ifm-code-padding-horizontal: 5px;
|
||||
}
|
||||
|
||||
[data-theme='dark'] {
|
||||
--ifm-color-primary: #25c2a0;
|
||||
--ifm-color-primary-dark: #21af90;
|
||||
--ifm-color-primary-darker: #1fa588;
|
||||
--ifm-color-primary-darkest: #1a8870;
|
||||
--ifm-color-primary-light: #29d5b0;
|
||||
--ifm-color-primary-lighter: #32d8b4;
|
||||
--ifm-color-primary-lightest: #4fddbf;
|
||||
--ifm-font-base-color: #bbb5ac;
|
||||
--ifm-border-color: #797063;
|
||||
}
|
||||
|
||||
@@ -114,7 +114,6 @@ a > span > svg {
|
||||
.navbar {
|
||||
font-size: 14px;
|
||||
font-weight: 400;
|
||||
background-color: #fff;
|
||||
transition: all 0.5s;
|
||||
|
||||
.get-started-button {
|
||||
@@ -190,7 +189,7 @@ a > span > svg {
|
||||
.navbar .DocSearch {
|
||||
--docsearch-text-color: #187384;
|
||||
--docsearch-muted-color: #187384;
|
||||
--docsearch-searchbox-background: #fff;
|
||||
--docsearch-searchbox-background: var(--ifm-navbar-background-color);
|
||||
border: 1px solid #187384;
|
||||
border-radius: 10px;
|
||||
|
||||
@@ -257,13 +256,3 @@ a > span > svg {
|
||||
height: 28px;
|
||||
}
|
||||
}
|
||||
|
||||
/* Edit Button */
|
||||
|
||||
.edit-page-link {
|
||||
position: sticky;
|
||||
bottom: 0px;
|
||||
right: 0px;
|
||||
border-radius: 10px;
|
||||
background-color: #ccc;
|
||||
}
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import styled from '@emotion/styled';
|
||||
import DocItem from '@theme-original/DocItem';
|
||||
|
||||
const EditPageLink = styled('a')`
|
||||
position: fixed;
|
||||
bottom: 40px;
|
||||
right: 10px;
|
||||
padding: 1rem;
|
||||
padding-left: 4rem;
|
||||
background-color: #444;
|
||||
border-radius: 10px;
|
||||
z-index: 9999;
|
||||
background-image: url('/img/github-dark.png');
|
||||
background-size: 2rem;
|
||||
background-position: 1rem center;
|
||||
background-repeat: no-repeat;
|
||||
transition: background-color 0.3s; /* Smooth transition for hover effect */
|
||||
bpx-shadow: 0 0 0 0 rgba(0,0,0,0); /* Smooth transition for hover effect */
|
||||
scale: .9;
|
||||
transition: all 0.3s;
|
||||
transform-origin: bottom right;
|
||||
|
||||
&:hover {
|
||||
background-color: #333;
|
||||
box-shadow: 5px 5px 10px 0 rgba(0,0,0,0.3);
|
||||
scale: 1;
|
||||
}
|
||||
`;
|
||||
|
||||
export default function DocItemWrapper(props) {
|
||||
return (
|
||||
<>
|
||||
<EditPageLink href={props.content.metadata.editUrl} target="_blank" rel="noopener noreferrer">
|
||||
Edit this page on GitHub
|
||||
</EditPageLink>
|
||||
<DocItem {...props} />
|
||||
</>
|
||||
);
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user