mirror of
https://github.com/apache/superset.git
synced 2026-05-03 06:54:19 +00:00
Compare commits
848 Commits
remove-mor
...
docs_opena
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c778c15259 | ||
|
|
dc0d542054 | ||
|
|
0cd3a12daa | ||
|
|
35b30480f0 | ||
|
|
6d1f17bd46 | ||
|
|
ab899e71e7 | ||
|
|
6b9d8708d3 | ||
|
|
bc1e8e07cf | ||
|
|
82526865d2 | ||
|
|
02c8c9c752 | ||
|
|
6475188e6a | ||
|
|
6e485c9f70 | ||
|
|
b49e5857c9 | ||
|
|
13ced58261 | ||
|
|
ed36674a99 | ||
|
|
99aa3a6507 | ||
|
|
f045a73e2d | ||
|
|
7791674f24 | ||
|
|
9f0ae77341 | ||
|
|
5a9e366c0a | ||
|
|
c22c532a5c | ||
|
|
6db3a4d9d2 | ||
|
|
17d7b72f3b | ||
|
|
fee33dd0cf | ||
|
|
65605b4a54 | ||
|
|
e304f2d5ad | ||
|
|
4e0c261c9d | ||
|
|
22de26cd77 | ||
|
|
339ba96600 | ||
|
|
3c6091144b | ||
|
|
ef14b529b8 | ||
|
|
2a97a6ec1f | ||
|
|
fa6548939e | ||
|
|
418c673699 | ||
|
|
13f77a7416 | ||
|
|
303a80a316 | ||
|
|
2392ac6827 | ||
|
|
01ce4b987e | ||
|
|
2f308a85d8 | ||
|
|
e8d60509a0 | ||
|
|
d6f80eaae7 | ||
|
|
a5f986fec5 | ||
|
|
141d0252f2 | ||
|
|
c029b532d4 | ||
|
|
13816443ba | ||
|
|
2c4e22e598 | ||
|
|
aea776a131 | ||
|
|
d2360b533b | ||
|
|
de84a534ac | ||
|
|
ac636c73ae | ||
|
|
6a586fe4fd | ||
|
|
fbd8ae2888 | ||
|
|
7e4fde7a14 | ||
|
|
150b9a0168 | ||
|
|
f7b7aace38 | ||
|
|
f78c94c988 | ||
|
|
74ff8dc724 | ||
|
|
8aa127eac2 | ||
|
|
3729016a0d | ||
|
|
b6628cdfd2 | ||
|
|
ae48dba3e1 | ||
|
|
09364d182c | ||
|
|
99ed968289 | ||
|
|
8fa3b8d7e3 | ||
|
|
7530487760 | ||
|
|
79afc2b545 | ||
|
|
8c94f9c435 | ||
|
|
b589d44dfb | ||
|
|
4140261797 | ||
|
|
00f1fdb3c4 | ||
|
|
172e5dd095 | ||
|
|
a53907a646 | ||
|
|
be1b8d6751 | ||
|
|
26ff734ef9 | ||
|
|
0e18246999 | ||
|
|
7333ffd41e | ||
|
|
7dc5019b9d | ||
|
|
93fa39a14f | ||
|
|
342e6f3ab0 | ||
|
|
013379eb86 | ||
|
|
bc0ffe0d10 | ||
|
|
5f62deaa36 | ||
|
|
ff8605b723 | ||
|
|
45c77a1976 | ||
|
|
8cb71b8d3b | ||
|
|
2233c02720 | ||
|
|
839215148a | ||
|
|
c1eeb63d89 | ||
|
|
7b9ebbe735 | ||
|
|
a5a91d5e48 | ||
|
|
e1f5c49df7 | ||
|
|
3c1fc0b722 | ||
|
|
05faf2f352 | ||
|
|
347c174099 | ||
|
|
5656d69c04 | ||
|
|
ac4df8d06b | ||
|
|
bcd136cee1 | ||
|
|
7ab8534ef6 | ||
|
|
014b39290b | ||
|
|
4f97b739b1 | ||
|
|
d88cba92c0 | ||
|
|
5304bed4ed | ||
|
|
37194a41ec | ||
|
|
d75ff9e784 | ||
|
|
164a07e2be | ||
|
|
44bd200885 | ||
|
|
8242692541 | ||
|
|
09b92e7d08 | ||
|
|
31ac3898ad | ||
|
|
c1159c53e3 | ||
|
|
deb6aedddb | ||
|
|
ed0cd5e7b0 | ||
|
|
9280b4d2a9 | ||
|
|
3a57857707 | ||
|
|
6b7394e789 | ||
|
|
5a8eab3b25 | ||
|
|
15969fdf94 | ||
|
|
9b15e04bc4 | ||
|
|
fd947a097d | ||
|
|
e1383d3821 | ||
|
|
c131205ff1 | ||
|
|
b6df88a134 | ||
|
|
629b137bb0 | ||
|
|
db959a6463 | ||
|
|
4041150660 | ||
|
|
bcb43327b1 | ||
|
|
63c8bbf3eb | ||
|
|
24b1666273 | ||
|
|
86b795cd36 | ||
|
|
bc0bf94680 | ||
|
|
f5d64176f6 | ||
|
|
4f0020d0df | ||
|
|
c83eda9551 | ||
|
|
a36e636a58 | ||
|
|
f5d3627468 | ||
|
|
8eeed49547 | ||
|
|
00933a27af | ||
|
|
2bc33beec4 | ||
|
|
e1c1de1b94 | ||
|
|
26743dfcee | ||
|
|
8b0bda3bad | ||
|
|
a8a6254ea2 | ||
|
|
be4bc3dec5 | ||
|
|
6e02d19b0d | ||
|
|
662f0fa8f4 | ||
|
|
56bf17f879 | ||
|
|
b92909d621 | ||
|
|
8f35a3ec8c | ||
|
|
a4a092794a | ||
|
|
174750c9dd | ||
|
|
f2c0686346 | ||
|
|
c2afae51cb | ||
|
|
6e1d1ad18b | ||
|
|
ab22bb1878 | ||
|
|
e0ed652ed8 | ||
|
|
103fedaf92 | ||
|
|
50fe7483ae | ||
|
|
37f626f5e2 | ||
|
|
b1693f625a | ||
|
|
f0dc1e7527 | ||
|
|
6c7f089ebb | ||
|
|
68a81c3989 | ||
|
|
5222f940cc | ||
|
|
45ea11c1b6 | ||
|
|
b624919d2f | ||
|
|
b5cb5f4525 | ||
|
|
4a70065e5f | ||
|
|
7d77dc4fd2 | ||
|
|
6f69c84d10 | ||
|
|
6b96b37c38 | ||
|
|
b7435f84f0 | ||
|
|
7bc349c3c3 | ||
|
|
fd4e45aafc | ||
|
|
b339d7ad20 | ||
|
|
cedd186c21 | ||
|
|
c6c9114b40 | ||
|
|
f4a05a5ffd | ||
|
|
a82f916a71 | ||
|
|
ff0529c932 | ||
|
|
c0f83a7467 | ||
|
|
9bb3a5782d | ||
|
|
5ec710efc6 | ||
|
|
5866f3ec83 | ||
|
|
01801e3c36 | ||
|
|
d319543377 | ||
|
|
5392bafe28 | ||
|
|
89ce7ba0b0 | ||
|
|
376a1f49d3 | ||
|
|
6042ea8f28 | ||
|
|
78efb62781 | ||
|
|
e9d5079986 | ||
|
|
c6e0abbe13 | ||
|
|
4f166a03f5 | ||
|
|
29b62f7c0a | ||
|
|
09ee3e2a1d | ||
|
|
121e424a7f | ||
|
|
66c1a6a875 | ||
|
|
b26c373f4d | ||
|
|
4dd318ca68 | ||
|
|
ce6d5f5551 | ||
|
|
9e3052968b | ||
|
|
3f1ef2a283 | ||
|
|
bc3e19d0a2 | ||
|
|
850801f510 | ||
|
|
710af87faf | ||
|
|
6612343f33 | ||
|
|
c399295a4e | ||
|
|
e34644d983 | ||
|
|
cc0097c87a | ||
|
|
d71e655a4b | ||
|
|
99e69c32ee | ||
|
|
a2c164a77d | ||
|
|
78d2a584b7 | ||
|
|
f0c8c12c1a | ||
|
|
34cd741e9b | ||
|
|
1684ddc7e6 | ||
|
|
e35145c816 | ||
|
|
4adf44a43c | ||
|
|
cd5a94305c | ||
|
|
b4602aaf28 | ||
|
|
3e69ba1384 | ||
|
|
41bf215367 | ||
|
|
06deaebe19 | ||
|
|
6a13ab8920 | ||
|
|
f1a222d356 | ||
|
|
a87bedf31a | ||
|
|
890b6079b9 | ||
|
|
9c62456487 | ||
|
|
414cdbf83a | ||
|
|
df06bdf33b | ||
|
|
449f51aed5 | ||
|
|
c9e2c7037e | ||
|
|
a49a15f990 | ||
|
|
eb39ddbfe3 | ||
|
|
974d36d35e | ||
|
|
b64e3254fc | ||
|
|
9907db9e1a | ||
|
|
b4dd64aa24 | ||
|
|
6e049225f9 | ||
|
|
831369a44b | ||
|
|
7c9c30db1d | ||
|
|
0c6d868483 | ||
|
|
777760b096 | ||
|
|
e8ad096173 | ||
|
|
2f6f5c6778 | ||
|
|
832e028b39 | ||
|
|
d92af9c95c | ||
|
|
12435159db | ||
|
|
8695239372 | ||
|
|
29b4c40e43 | ||
|
|
53471072f4 | ||
|
|
bf902b2240 | ||
|
|
4b4912ba99 | ||
|
|
fa890ecb23 | ||
|
|
67af8bd730 | ||
|
|
f5eca4fe0b | ||
|
|
057423ed92 | ||
|
|
7dbe608d27 | ||
|
|
d8d4b75a11 | ||
|
|
664047f3fb | ||
|
|
1e20b048d3 | ||
|
|
6c1806df74 | ||
|
|
d97d991b5f | ||
|
|
90e18e37d0 | ||
|
|
c5a2bc5484 | ||
|
|
2ecc7e4f56 | ||
|
|
9f79c5ab4d | ||
|
|
e7721a8c4d | ||
|
|
c8f5089f7a | ||
|
|
a0ea905a7a | ||
|
|
b8fd1a30ee | ||
|
|
ff9ae54ae9 | ||
|
|
a16de15015 | ||
|
|
079e40144e | ||
|
|
a3f3a35c20 | ||
|
|
4fdeab8dad | ||
|
|
9ea58381f4 | ||
|
|
85d51f5c9a | ||
|
|
3b1d763421 | ||
|
|
91ab123860 | ||
|
|
8e021b0c82 | ||
|
|
7aa89db8d0 | ||
|
|
d3ba2755e8 | ||
|
|
0b0e0e9ce8 | ||
|
|
979f890cd5 | ||
|
|
89b6d7fb68 | ||
|
|
644882faff | ||
|
|
edfcbed24f | ||
|
|
f45ab70080 | ||
|
|
33aa9030bf | ||
|
|
4c3aae7583 | ||
|
|
c5dd52bcc9 | ||
|
|
eae7cf81b0 | ||
|
|
20e5df501e | ||
|
|
68e8d9858c | ||
|
|
99238dccbb | ||
|
|
626736bdd3 | ||
|
|
c2de749d0e | ||
|
|
9ad9ea67cf | ||
|
|
82595df6f9 | ||
|
|
281d1a8ec4 | ||
|
|
d2e0e2b79c | ||
|
|
05409d51da | ||
|
|
e98194cdd3 | ||
|
|
317532752c | ||
|
|
c90e45a373 | ||
|
|
8decc9e45f | ||
|
|
8053833e1f | ||
|
|
07221d8859 | ||
|
|
c1abe1ec44 | ||
|
|
b3dfd4930a | ||
|
|
fc844d3dfd | ||
|
|
d8686c2d12 | ||
|
|
90388885db | ||
|
|
33370eaa5c | ||
|
|
2b53b1800e | ||
|
|
807dcddc28 | ||
|
|
a45ce1e8d1 | ||
|
|
3d5128735b | ||
|
|
6173a6c329 | ||
|
|
813e79fa9f | ||
|
|
c0e92b1639 | ||
|
|
ef08ccbaa2 | ||
|
|
93d759c689 | ||
|
|
0d24ce0ef9 | ||
|
|
a4902a3685 | ||
|
|
16b08e333d | ||
|
|
15cf06699a | ||
|
|
fe33661821 | ||
|
|
2b98f326e8 | ||
|
|
d7e0ee6ceb | ||
|
|
ce367d6427 | ||
|
|
6c3886aad0 | ||
|
|
5af4e61aff | ||
|
|
5766c36372 | ||
|
|
61b72f0c0b | ||
|
|
d79f7b28c2 | ||
|
|
84b52b2323 | ||
|
|
eacb234872 | ||
|
|
6317a91541 | ||
|
|
128c45e2d3 | ||
|
|
4d6b4f8343 | ||
|
|
789049d386 | ||
|
|
cf7ce31054 | ||
|
|
2c851b7580 | ||
|
|
f4105e9ed2 | ||
|
|
74733ae310 | ||
|
|
1d823a0be5 | ||
|
|
00429558c2 | ||
|
|
dae6acf028 | ||
|
|
822d72c57d | ||
|
|
c02a0a00f4 | ||
|
|
a08c18febe | ||
|
|
479a5d2f72 | ||
|
|
793fbac405 | ||
|
|
167dacc2e4 | ||
|
|
00883c395c | ||
|
|
83071d0e5f | ||
|
|
b0dac046e6 | ||
|
|
8dcae810d4 | ||
|
|
b43e2ac8f4 | ||
|
|
bc02f05613 | ||
|
|
90651dfe3e | ||
|
|
c583eec4c7 | ||
|
|
0f07d78e01 | ||
|
|
22fe985cfc | ||
|
|
ace8a3adb7 | ||
|
|
4c4b5e8c64 | ||
|
|
2c37ddb2f6 | ||
|
|
b06a9edfd6 | ||
|
|
5140250421 | ||
|
|
88cf2d5c39 | ||
|
|
422a07b382 | ||
|
|
f820f9a976 | ||
|
|
c27aee2b14 | ||
|
|
7ce1a3445c | ||
|
|
42a3c523ae | ||
|
|
bb46dd93be | ||
|
|
b207f0616d | ||
|
|
9dcf788f47 | ||
|
|
6900bc1855 | ||
|
|
b09bfd7889 | ||
|
|
2d8892958e | ||
|
|
f9a43921c5 | ||
|
|
e74efd3072 | ||
|
|
d5a5bd46d2 | ||
|
|
e422e3c620 | ||
|
|
b269d920a9 | ||
|
|
de2bce6f47 | ||
|
|
e061116032 | ||
|
|
878bcbd8c7 | ||
|
|
093135ff30 | ||
|
|
734f8ed4c3 | ||
|
|
dcc9628f31 | ||
|
|
321d105c42 | ||
|
|
460aec7bc9 | ||
|
|
ffe9244458 | ||
|
|
fa09d8187a | ||
|
|
9da30956c0 | ||
|
|
9c7835a244 | ||
|
|
ad057324b7 | ||
|
|
2c583d1584 | ||
|
|
15fbb195e9 | ||
|
|
5867b87680 | ||
|
|
52563d3eea | ||
|
|
21348c418a | ||
|
|
af3589fe91 | ||
|
|
937d40cdde | ||
|
|
319a860f23 | ||
|
|
d3b854a833 | ||
|
|
650fa5ccfb | ||
|
|
db70c7912c | ||
|
|
3160607aaf | ||
|
|
eec54affc3 | ||
|
|
31d6f5a639 | ||
|
|
60424c4ccd | ||
|
|
60bbd72028 | ||
|
|
a78968c68e | ||
|
|
1c3ec21e0f | ||
|
|
8d1fb9c82d | ||
|
|
f01493277f | ||
|
|
0f6bd5ea83 | ||
|
|
0030f46d2d | ||
|
|
06f8f8e608 | ||
|
|
a144464506 | ||
|
|
2770bc0865 | ||
|
|
bcc61bd933 | ||
|
|
38c46fcafd | ||
|
|
f3e7c64de6 | ||
|
|
f9f8c5d07a | ||
|
|
c5f4a7f302 | ||
|
|
389aae270b | ||
|
|
e97eb71a52 | ||
|
|
5a8488af36 | ||
|
|
205cff3a94 | ||
|
|
649a0dec6c | ||
|
|
e8990f4a36 | ||
|
|
acf91e1f60 | ||
|
|
6ed9dae2f7 | ||
|
|
ea5879bf2b | ||
|
|
c7c3b1b0e9 | ||
|
|
c64018d421 | ||
|
|
53d944d013 | ||
|
|
9aa8b09505 | ||
|
|
8984f88a3e | ||
|
|
386aa93e24 | ||
|
|
0cd0fcdecb | ||
|
|
cde2d49c95 | ||
|
|
9e5876dc17 | ||
|
|
1064ad5d58 | ||
|
|
7db0589340 | ||
|
|
c590e90c87 | ||
|
|
101d3fa78d | ||
|
|
468bb5f47a | ||
|
|
1c1494d3e0 | ||
|
|
5fc11fb706 | ||
|
|
f73d61a597 | ||
|
|
3f46bcf142 | ||
|
|
aa67525b70 | ||
|
|
568f6d958b | ||
|
|
b12f515185 | ||
|
|
732de4ac7f | ||
|
|
e4bdb28ba2 | ||
|
|
a87a13c3ab | ||
|
|
19e8a7049b | ||
|
|
a21f184058 | ||
|
|
f4efce3475 | ||
|
|
23d9f46d30 | ||
|
|
6478bb7eab | ||
|
|
962fd4cca3 | ||
|
|
640d4f09bd | ||
|
|
7e2b7941f3 | ||
|
|
aa74ba3da2 | ||
|
|
1b375b715c | ||
|
|
09c1987de4 | ||
|
|
827042f12f | ||
|
|
925938b4d1 | ||
|
|
65c4d39c31 | ||
|
|
4b0e907c3d | ||
|
|
687f762457 | ||
|
|
6eb87e04c0 | ||
|
|
5fe6ef268e | ||
|
|
51e090d67a | ||
|
|
fc5dde15fe | ||
|
|
14f798afec | ||
|
|
6d117ffbb5 | ||
|
|
fcd166149c | ||
|
|
7482b20f7b | ||
|
|
1d6423e71f | ||
|
|
7cf7267085 | ||
|
|
f5fff5eaad | ||
|
|
e4e07eef5a | ||
|
|
b74da7963b | ||
|
|
7383e4348b | ||
|
|
983aa827a8 | ||
|
|
78cd635b7a | ||
|
|
dfb9af36df | ||
|
|
a02a2f5a96 | ||
|
|
dca3efb3dd | ||
|
|
43a97f86f5 | ||
|
|
b5ac415afc | ||
|
|
4417e6eaef | ||
|
|
f8fe780f52 | ||
|
|
eec374426f | ||
|
|
550123882c | ||
|
|
79f21b09d2 | ||
|
|
63843c5682 | ||
|
|
8960db4132 | ||
|
|
66c22f896b | ||
|
|
840773e626 | ||
|
|
2874096e27 | ||
|
|
b86572b084 | ||
|
|
e4f6e55e89 | ||
|
|
c1cbc334ad | ||
|
|
249fdf444a | ||
|
|
547b8b9314 | ||
|
|
ab6045691e | ||
|
|
fc8710f50a | ||
|
|
aacfe4d667 | ||
|
|
f235787703 | ||
|
|
4ca5846c7f | ||
|
|
754ccd0448 | ||
|
|
d4bd20ffb4 | ||
|
|
4bad1a258f | ||
|
|
ffb6913706 | ||
|
|
6531101517 | ||
|
|
bbf7586fe8 | ||
|
|
ef57318259 | ||
|
|
8050e351ed | ||
|
|
c2d7cf388d | ||
|
|
274aa143d3 | ||
|
|
855011360a | ||
|
|
44ff462718 | ||
|
|
9661afff16 | ||
|
|
1a43654207 | ||
|
|
822441e0bd | ||
|
|
66f1e1f714 | ||
|
|
b5e62753b7 | ||
|
|
aae8fda11d | ||
|
|
bebcd3dcdd | ||
|
|
5e0de04eab | ||
|
|
472ca9d8f6 | ||
|
|
41ed37ab02 | ||
|
|
bbdc195a3b | ||
|
|
740fbf72d7 | ||
|
|
8a2aada58d | ||
|
|
7bd53a84d5 | ||
|
|
5f18e849c1 | ||
|
|
49876c3f13 | ||
|
|
399b709aaf | ||
|
|
a477d84729 | ||
|
|
5acd03876b | ||
|
|
3a6fdf8bdf | ||
|
|
9cd3a8d5b0 | ||
|
|
840a920aba | ||
|
|
e4b3ecd372 | ||
|
|
f29eafd044 | ||
|
|
4f1a837bd3 | ||
|
|
d5a4815836 | ||
|
|
210537a4d2 | ||
|
|
0eca79cb6c | ||
|
|
550d893c38 | ||
|
|
71dca5c076 | ||
|
|
7f72b062d1 | ||
|
|
d68f8d11fe | ||
|
|
b54de611d3 | ||
|
|
f3c5d1c608 | ||
|
|
9e17304523 | ||
|
|
0460415bcb | ||
|
|
b4c5f65a5f | ||
|
|
62486a7dd1 | ||
|
|
8e32aca282 | ||
|
|
c7ae5c587a | ||
|
|
2cea1bcc82 | ||
|
|
5ff2dfb8e2 | ||
|
|
4b54b2b953 | ||
|
|
7e4570bc9c | ||
|
|
78d7df02cb | ||
|
|
ee36cf058c | ||
|
|
668194d574 | ||
|
|
c13cb8a1f2 | ||
|
|
72e87d6980 | ||
|
|
a986a61b5f | ||
|
|
5484db34f9 | ||
|
|
109e6c69ff | ||
|
|
e311bc1ca5 | ||
|
|
553d41fedd | ||
|
|
2996d9ab9c | ||
|
|
bf1da0f91f | ||
|
|
ca12a1d466 | ||
|
|
7c90323649 | ||
|
|
27e42b5091 | ||
|
|
65c72c12c4 | ||
|
|
bcf649a116 | ||
|
|
e371de4ea3 | ||
|
|
b27df30222 | ||
|
|
892349d7c9 | ||
|
|
dec8a65730 | ||
|
|
3084cebd6b | ||
|
|
879e795147 | ||
|
|
509c0a6aab | ||
|
|
91d1648230 | ||
|
|
640dac1eff | ||
|
|
9321ab9deb | ||
|
|
a193d790b2 | ||
|
|
b382ef1058 | ||
|
|
488e7b4692 | ||
|
|
be7df6c16f | ||
|
|
ddd964e49a | ||
|
|
a275a86d12 | ||
|
|
1c99f399c2 | ||
|
|
c38c7a29fd | ||
|
|
0fde78cb17 | ||
|
|
5df3317ad0 | ||
|
|
e88c4e1100 | ||
|
|
d85bb8b6a0 | ||
|
|
fd9c2b399e | ||
|
|
e8daa63d7e | ||
|
|
640d6f848a | ||
|
|
8c5e525c57 | ||
|
|
e87feba45d | ||
|
|
9d3438a8b3 | ||
|
|
fd6ef4f870 | ||
|
|
d94bd413e5 | ||
|
|
0c2f697f6d | ||
|
|
b831bed7cd | ||
|
|
d9554622c0 | ||
|
|
89ad48764a | ||
|
|
5ab3c5ea99 | ||
|
|
fdc7273d56 | ||
|
|
a59a0fe475 | ||
|
|
16e29c466e | ||
|
|
35d109389f | ||
|
|
e5b561c87f | ||
|
|
3999c0fc41 | ||
|
|
e2a1ce220e | ||
|
|
369346c03d | ||
|
|
d09ccb2504 | ||
|
|
c514b6b0b9 | ||
|
|
040f27e6da | ||
|
|
a5fdf6d14a | ||
|
|
16c9418ee1 | ||
|
|
d8aba2f7e6 | ||
|
|
db11a2a308 | ||
|
|
f362c6f508 | ||
|
|
7458c4bbd5 | ||
|
|
531f1b6aa4 | ||
|
|
723ef591a5 | ||
|
|
e51b95ffa8 | ||
|
|
9da65d6bfd | ||
|
|
88cde7225e | ||
|
|
e788b858d0 | ||
|
|
c0feb99f0e | ||
|
|
567380ffe1 | ||
|
|
a5e36c9aab | ||
|
|
4c380b48e7 | ||
|
|
3375e65486 | ||
|
|
a1adb7f31c | ||
|
|
9b28a6eed6 | ||
|
|
8be69aa647 | ||
|
|
48510d2ffb | ||
|
|
04077ce934 | ||
|
|
092faa019b | ||
|
|
1f17b975d6 | ||
|
|
15ede02c25 | ||
|
|
4bccf36375 | ||
|
|
21e794a66f | ||
|
|
e1f98e246f | ||
|
|
bf56a327f4 | ||
|
|
cd200f07a5 | ||
|
|
4ff9aac1fa | ||
|
|
988da2c477 | ||
|
|
f510f42b96 | ||
|
|
43314dc8db | ||
|
|
423a0fefa5 | ||
|
|
fd57fce977 | ||
|
|
d8fbaa4cbe | ||
|
|
a6e05f4558 | ||
|
|
42f4490bbc | ||
|
|
852e9ae173 | ||
|
|
26ec6b78eb | ||
|
|
d6a82f7852 | ||
|
|
232e2055aa | ||
|
|
955efdac1a | ||
|
|
654701af4c | ||
|
|
73d21a87ae | ||
|
|
878c7f0267 | ||
|
|
cb78c778dd | ||
|
|
310558508e | ||
|
|
3daca8d251 | ||
|
|
28b01c5464 | ||
|
|
1fbdd21133 | ||
|
|
55a6c3a10a | ||
|
|
fc45647440 | ||
|
|
3bfead66c4 | ||
|
|
b54a97b5f0 | ||
|
|
931f69d6c7 | ||
|
|
ae8c6865c9 | ||
|
|
0133bab038 | ||
|
|
48c5ee4f8b | ||
|
|
9315a8838c | ||
|
|
592564b623 | ||
|
|
79aff6827c | ||
|
|
079e7327a2 | ||
|
|
48864ce8c7 | ||
|
|
2816a70af3 | ||
|
|
6af22a9cdd | ||
|
|
827fe06903 | ||
|
|
45815d8642 | ||
|
|
cf5c770adc | ||
|
|
638f82b46d | ||
|
|
e0e1eea9ce | ||
|
|
27c7240185 | ||
|
|
5ca2a8f670 | ||
|
|
2d60a2d48c | ||
|
|
b70c8ee7a8 | ||
|
|
a3fd7423b0 | ||
|
|
f679a18e82 | ||
|
|
77f3764fea | ||
|
|
1e0c04fc15 | ||
|
|
56b973f3cc | ||
|
|
3479574bd4 | ||
|
|
aa55751b1d | ||
|
|
6c2aade375 | ||
|
|
f51f19bcba | ||
|
|
1d44662b1d | ||
|
|
25f4226dbb | ||
|
|
dd1ba96adf | ||
|
|
d4888fa4af | ||
|
|
b3559f644c | ||
|
|
fe80fb1090 | ||
|
|
43efa05113 | ||
|
|
e5e3f9e210 | ||
|
|
468dfed416 | ||
|
|
3564740255 | ||
|
|
8020729ced | ||
|
|
deec63bb5b | ||
|
|
339d491dfc | ||
|
|
d66ac9f3f4 | ||
|
|
06fb330569 | ||
|
|
ce0e06a935 | ||
|
|
5006f97f70 | ||
|
|
24d001e498 | ||
|
|
eab888c63a | ||
|
|
3d3c09d299 | ||
|
|
97dde8c485 | ||
|
|
14682b9054 | ||
|
|
93ba8e16c3 | ||
|
|
dbcb473040 | ||
|
|
f0811c8863 | ||
|
|
0166db9663 | ||
|
|
c26f073134 | ||
|
|
45668e31fc | ||
|
|
529aed5da1 | ||
|
|
09802acf0d | ||
|
|
9224051b80 | ||
|
|
fd9d3301f6 | ||
|
|
68499a1199 | ||
|
|
f077323e6f | ||
|
|
7f2e752796 | ||
|
|
97683ec052 | ||
|
|
73164c61ad | ||
|
|
564c168420 | ||
|
|
95f4fe0cb8 | ||
|
|
bbc6d374ea | ||
|
|
316da5e5f5 | ||
|
|
e2b9b8e9fd | ||
|
|
7154b8d40f | ||
|
|
fcb3ff3a41 | ||
|
|
342cfc41ec | ||
|
|
aa7d3b0f96 | ||
|
|
3e28bd2cfa | ||
|
|
cc1eec69df | ||
|
|
3fa0de4293 | ||
|
|
2ad8af71b5 | ||
|
|
b648cc1168 | ||
|
|
f24bf873bf | ||
|
|
e0a5033596 | ||
|
|
ef14d58c64 | ||
|
|
547a4adef5 | ||
|
|
5256a2f194 | ||
|
|
0560c2615d | ||
|
|
ff282492a1 | ||
|
|
312dc1c749 | ||
|
|
1e26c34758 | ||
|
|
decaba72c3 | ||
|
|
7e8c77e636 | ||
|
|
ba99980cf4 | ||
|
|
c62f722f99 | ||
|
|
3fd23508bc | ||
|
|
9ff9e0299b | ||
|
|
6488ced3d3 | ||
|
|
9a2be95159 | ||
|
|
ef4e03c9fe | ||
|
|
ca2f0288e5 | ||
|
|
ca63760a4b | ||
|
|
83924f7e10 | ||
|
|
c4a56c3f6e | ||
|
|
cf134ab3aa | ||
|
|
043c585008 | ||
|
|
0d346d4414 | ||
|
|
9067371234 | ||
|
|
40fe05c5e2 | ||
|
|
e3bdfb5def | ||
|
|
55f0713a2f | ||
|
|
5aee59cc3a | ||
|
|
94d3774d9e | ||
|
|
b665254f39 | ||
|
|
4dc8cce8e8 | ||
|
|
d206a20ce7 | ||
|
|
6fcc282a4e | ||
|
|
93c35a7ba5 | ||
|
|
9dfa8d5f8f | ||
|
|
87504056fe | ||
|
|
429c18f9e8 | ||
|
|
5bddc81f60 | ||
|
|
9837b4a61e | ||
|
|
454f143661 | ||
|
|
7376dfc6e9 | ||
|
|
838d47d578 | ||
|
|
14e81d0a9a | ||
|
|
f68c2b2454 | ||
|
|
814c3dfecc | ||
|
|
b8aade776b | ||
|
|
e092e6002d | ||
|
|
673754d16e | ||
|
|
27deeb2f51 | ||
|
|
9a7a84c7a0 | ||
|
|
a3d2588313 | ||
|
|
5c87fee282 | ||
|
|
b24323d500 | ||
|
|
824aca85d0 | ||
|
|
1e4098a29e | ||
|
|
3aa8f32ca9 | ||
|
|
bf42ea70ba | ||
|
|
d69da5f0f5 | ||
|
|
078257dd1b | ||
|
|
8c1c2570b3 | ||
|
|
a80803566d | ||
|
|
f551f5b7b6 | ||
|
|
1978cde4f1 | ||
|
|
c5f6cc6382 | ||
|
|
e9e2c0bee8 | ||
|
|
33a9817388 | ||
|
|
91301bcd5b | ||
|
|
67ad7da5cc | ||
|
|
e0deb704f9 | ||
|
|
abf3790ea6 |
23
.asf.yaml
23
.asf.yaml
@@ -17,7 +17,14 @@
|
|||||||
|
|
||||||
# https://cwiki.apache.org/confluence/display/INFRA/.asf.yaml+features+for+git+repositories
|
# https://cwiki.apache.org/confluence/display/INFRA/.asf.yaml+features+for+git+repositories
|
||||||
---
|
---
|
||||||
|
notifications:
|
||||||
|
commits: commits@superset.apache.org
|
||||||
|
issues: notifications@superset.apache.org
|
||||||
|
pullrequests: notifications@superset.apache.org
|
||||||
|
discussions: notifications@superset.apache.org
|
||||||
|
|
||||||
github:
|
github:
|
||||||
|
del_branch_on_merge: true
|
||||||
description: "Apache Superset is a Data Visualization and Data Exploration Platform"
|
description: "Apache Superset is a Data Visualization and Data Exploration Platform"
|
||||||
homepage: https://superset.apache.org/
|
homepage: https://superset.apache.org/
|
||||||
labels:
|
labels:
|
||||||
@@ -47,12 +54,17 @@ github:
|
|||||||
projects: true
|
projects: true
|
||||||
# Enable wiki for documentation
|
# Enable wiki for documentation
|
||||||
wiki: true
|
wiki: true
|
||||||
|
# Enable discussions
|
||||||
|
discussions: true
|
||||||
|
|
||||||
enabled_merge_buttons:
|
enabled_merge_buttons:
|
||||||
squash: true
|
squash: true
|
||||||
merge: false
|
merge: false
|
||||||
rebase: false
|
rebase: false
|
||||||
|
|
||||||
|
ghp_branch: gh-pages
|
||||||
|
ghp_path: /
|
||||||
|
|
||||||
protected_branches:
|
protected_branches:
|
||||||
master:
|
master:
|
||||||
required_status_checks:
|
required_status_checks:
|
||||||
@@ -69,18 +81,16 @@ github:
|
|||||||
- cypress-matrix (3, chrome)
|
- cypress-matrix (3, chrome)
|
||||||
- cypress-matrix (4, chrome)
|
- cypress-matrix (4, chrome)
|
||||||
- cypress-matrix (5, chrome)
|
- cypress-matrix (5, chrome)
|
||||||
|
- dependency-review
|
||||||
- frontend-build
|
- frontend-build
|
||||||
- pre-commit (current)
|
- pre-commit (current)
|
||||||
- pre-commit (next)
|
|
||||||
- pre-commit (previous)
|
- pre-commit (previous)
|
||||||
- test-mysql
|
- test-mysql
|
||||||
- test-postgres (current)
|
- test-postgres (current)
|
||||||
- test-postgres (next)
|
|
||||||
- test-postgres-hive
|
- test-postgres-hive
|
||||||
- test-postgres-presto
|
- test-postgres-presto
|
||||||
- test-sqlite
|
- test-sqlite
|
||||||
- unit-tests (current)
|
- unit-tests (current)
|
||||||
- unit-tests (next)
|
|
||||||
|
|
||||||
required_pull_request_reviews:
|
required_pull_request_reviews:
|
||||||
dismiss_stale_reviews: false
|
dismiss_stale_reviews: false
|
||||||
@@ -88,3 +98,10 @@ github:
|
|||||||
required_approving_review_count: 1
|
required_approving_review_count: 1
|
||||||
|
|
||||||
required_signatures: false
|
required_signatures: false
|
||||||
|
gh-pages:
|
||||||
|
required_pull_request_reviews:
|
||||||
|
dismiss_stale_reviews: false
|
||||||
|
require_code_owner_reviews: true
|
||||||
|
required_approving_review_count: 1
|
||||||
|
|
||||||
|
required_signatures: false
|
||||||
|
|||||||
@@ -34,7 +34,6 @@
|
|||||||
**/*.sqllite
|
**/*.sqllite
|
||||||
**/*.swp
|
**/*.swp
|
||||||
**/.terser-plugin-cache/
|
**/.terser-plugin-cache/
|
||||||
**/.storybook/
|
|
||||||
**/node_modules/
|
**/node_modules/
|
||||||
|
|
||||||
tests/
|
tests/
|
||||||
@@ -42,6 +41,8 @@ docs/
|
|||||||
install/
|
install/
|
||||||
superset-frontend/cypress-base/
|
superset-frontend/cypress-base/
|
||||||
superset-frontend/coverage/
|
superset-frontend/coverage/
|
||||||
|
superset-frontend/.temp_cache/
|
||||||
superset/static/assets/
|
superset/static/assets/
|
||||||
superset-websocket/dist/
|
superset-websocket/dist/
|
||||||
venv
|
venv
|
||||||
|
.venv
|
||||||
|
|||||||
1
.gitattributes
vendored
1
.gitattributes
vendored
@@ -1,2 +1,3 @@
|
|||||||
docker/**/*.sh text eol=lf
|
docker/**/*.sh text eol=lf
|
||||||
*.svg binary
|
*.svg binary
|
||||||
|
*.ipynb binary
|
||||||
|
|||||||
10
.github/CODEOWNERS
vendored
10
.github/CODEOWNERS
vendored
@@ -12,21 +12,21 @@
|
|||||||
|
|
||||||
# Notify Helm Chart maintainers about changes in it
|
# Notify Helm Chart maintainers about changes in it
|
||||||
|
|
||||||
/helm/superset/ @craig-rueda @dpgaspar @villebro @nytai @michael-s-molina
|
/helm/superset/ @craig-rueda @dpgaspar @villebro @nytai @michael-s-molina @mistercrunch @rusackas @Antonio-RiveroMartnez
|
||||||
|
|
||||||
# Notify E2E test maintainers of changes
|
# Notify E2E test maintainers of changes
|
||||||
|
|
||||||
/superset-frontend/cypress-base/ @jinghua-qa @geido @eschutho @rusackas @betodealmeida
|
/superset-frontend/cypress-base/ @sadpandajoe @geido @eschutho @rusackas @betodealmeida @mistercrunch
|
||||||
|
|
||||||
# Notify PMC members of changes to GitHub Actions
|
# Notify PMC members of changes to GitHub Actions
|
||||||
|
|
||||||
/.github/ @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @john-bodley @kgabryje @dpgaspar
|
/.github/ @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @kgabryje @dpgaspar
|
||||||
|
|
||||||
# Notify PMC members of changes to required GitHub Actions
|
# Notify PMC members of changes to required GitHub Actions
|
||||||
|
|
||||||
/.asf.yaml @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @john-bodley @kgabryje @dpgaspar
|
/.asf.yaml @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @kgabryje @dpgaspar @Antonio-RiveroMartnez
|
||||||
|
|
||||||
# Maps are a finnicky contribution process we care about
|
# Maps are a finicky contribution process we care about
|
||||||
|
|
||||||
**/*.geojson @villebro @rusackas
|
**/*.geojson @villebro @rusackas
|
||||||
/superset-frontend/plugins/legacy-plugin-chart-country-map/ @villebro @rusackas
|
/superset-frontend/plugins/legacy-plugin-chart-country-map/ @villebro @rusackas
|
||||||
|
|||||||
4
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
4
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
@@ -41,8 +41,8 @@ body:
|
|||||||
label: Superset version
|
label: Superset version
|
||||||
options:
|
options:
|
||||||
- master / latest-dev
|
- master / latest-dev
|
||||||
- "4.1.0"
|
- "4.1.2"
|
||||||
- "3.1.3"
|
- "4.0.2"
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
- type: dropdown
|
- type: dropdown
|
||||||
|
|||||||
23
.github/actions/change-detector/label-draft-pr.yml
vendored
Normal file
23
.github/actions/change-detector/label-draft-pr.yml
vendored
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
name: Label Draft PRs
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- converted_to_draft
|
||||||
|
jobs:
|
||||||
|
label-draft:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Check if the PR is a draft
|
||||||
|
id: check-draft
|
||||||
|
uses: actions/github-script@v6
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const isDraft = context.payload.pull_request.draft;
|
||||||
|
core.setOutput('isDraft', isDraft);
|
||||||
|
- name: Add `review:draft` Label
|
||||||
|
if: steps.check-draft.outputs.isDraft == 'true'
|
||||||
|
uses: actions-ecosystem/action-add-labels@v1
|
||||||
|
with:
|
||||||
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
labels: "review:draft"
|
||||||
17
.github/actions/setup-backend/action.yml
vendored
17
.github/actions/setup-backend/action.yml
vendored
@@ -26,11 +26,12 @@ runs:
|
|||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
if [ "${{ inputs.python-version }}" = "current" ]; then
|
if [ "${{ inputs.python-version }}" = "current" ]; then
|
||||||
echo "PYTHON_VERSION=3.10" >> $GITHUB_ENV
|
|
||||||
elif [ "${{ inputs.python-version }}" = "next" ]; then
|
|
||||||
echo "PYTHON_VERSION=3.11" >> $GITHUB_ENV
|
echo "PYTHON_VERSION=3.11" >> $GITHUB_ENV
|
||||||
|
elif [ "${{ inputs.python-version }}" = "next" ]; then
|
||||||
|
# currently disabled in GHA matrixes because of library compatibility issues
|
||||||
|
echo "PYTHON_VERSION=3.12" >> $GITHUB_ENV
|
||||||
elif [ "${{ inputs.python-version }}" = "previous" ]; then
|
elif [ "${{ inputs.python-version }}" = "previous" ]; then
|
||||||
echo "PYTHON_VERSION=3.9" >> $GITHUB_ENV
|
echo "PYTHON_VERSION=3.10" >> $GITHUB_ENV
|
||||||
else
|
else
|
||||||
echo "PYTHON_VERSION=${{ inputs.python-version }}" >> $GITHUB_ENV
|
echo "PYTHON_VERSION=${{ inputs.python-version }}" >> $GITHUB_ENV
|
||||||
fi
|
fi
|
||||||
@@ -43,11 +44,15 @@ runs:
|
|||||||
run: |
|
run: |
|
||||||
if [ "${{ inputs.install-superset }}" = "true" ]; then
|
if [ "${{ inputs.install-superset }}" = "true" ]; then
|
||||||
sudo apt-get update && sudo apt-get -y install libldap2-dev libsasl2-dev
|
sudo apt-get update && sudo apt-get -y install libldap2-dev libsasl2-dev
|
||||||
pip install --upgrade pip setuptools wheel
|
|
||||||
|
pip install --upgrade pip setuptools wheel uv
|
||||||
|
|
||||||
if [ "${{ inputs.requirements-type }}" = "dev" ]; then
|
if [ "${{ inputs.requirements-type }}" = "dev" ]; then
|
||||||
pip install -r requirements/development.txt
|
uv pip install --system -r requirements/development.txt
|
||||||
elif [ "${{ inputs.requirements-type }}" = "base" ]; then
|
elif [ "${{ inputs.requirements-type }}" = "base" ]; then
|
||||||
pip install -r requirements/base.txt
|
uv pip install --system -r requirements/base.txt
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
uv pip install --system -e .
|
||||||
fi
|
fi
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|||||||
69
.github/actions/setup-docker/action.yml
vendored
Normal file
69
.github/actions/setup-docker/action.yml
vendored
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
name: "Setup Docker Environment"
|
||||||
|
description: "Reusable steps for setting up QEMU, Docker Buildx, DockerHub login, Supersetbot, and optionally Docker Compose"
|
||||||
|
inputs:
|
||||||
|
build:
|
||||||
|
description: "Used for building?"
|
||||||
|
required: false
|
||||||
|
default: "false"
|
||||||
|
dockerhub-user:
|
||||||
|
description: "DockerHub username"
|
||||||
|
required: false
|
||||||
|
dockerhub-token:
|
||||||
|
description: "DockerHub token"
|
||||||
|
required: false
|
||||||
|
install-docker-compose:
|
||||||
|
description: "Flag to install Docker Compose"
|
||||||
|
required: false
|
||||||
|
default: "true"
|
||||||
|
login-to-dockerhub:
|
||||||
|
description: "Whether you want to log into dockerhub"
|
||||||
|
required: false
|
||||||
|
default: "true"
|
||||||
|
outputs: {}
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
steps:
|
||||||
|
|
||||||
|
- name: Set up QEMU
|
||||||
|
if: ${{ inputs.build == 'true' }}
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
if: ${{ inputs.build == 'true' }}
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Try to login to DockerHub
|
||||||
|
if: ${{ inputs.login-to-dockerhub == 'true' }}
|
||||||
|
continue-on-error: true
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
username: ${{ inputs.dockerhub-user }}
|
||||||
|
password: ${{ inputs.dockerhub-token }}
|
||||||
|
|
||||||
|
- name: Install Docker Compose
|
||||||
|
if: ${{ inputs.install-docker-compose == 'true' }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y ca-certificates curl
|
||||||
|
sudo install -m 0755 -d /etc/apt/keyrings
|
||||||
|
|
||||||
|
# Download and save the Docker GPG key in the correct format
|
||||||
|
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /etc/apt/keyrings/docker.gpg
|
||||||
|
|
||||||
|
# Ensure the key file is readable
|
||||||
|
sudo chmod a+r /etc/apt/keyrings/docker.gpg
|
||||||
|
|
||||||
|
# Add the Docker repository using the correct key
|
||||||
|
echo \
|
||||||
|
"deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/ubuntu \
|
||||||
|
$(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \
|
||||||
|
sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
|
||||||
|
|
||||||
|
# Update package lists and install Docker Compose plugin
|
||||||
|
sudo apt update
|
||||||
|
sudo apt install -y docker-compose-plugin
|
||||||
|
|
||||||
|
- name: Docker Version Info
|
||||||
|
shell: bash
|
||||||
|
run: docker info
|
||||||
17
.github/dependabot.yml
vendored
17
.github/dependabot.yml
vendored
@@ -1,4 +1,5 @@
|
|||||||
version: 2
|
version: 2
|
||||||
|
enable-beta-ecosystems: true
|
||||||
updates:
|
updates:
|
||||||
|
|
||||||
- package-ecosystem: "github-actions"
|
- package-ecosystem: "github-actions"
|
||||||
@@ -21,10 +22,14 @@ updates:
|
|||||||
versioning-strategy: increase
|
versioning-strategy: increase
|
||||||
|
|
||||||
|
|
||||||
# - package-ecosystem: "pip"
|
# NOTE: `uv` support is in beta, more details here:
|
||||||
# NOTE: as dependabot isn't compatible with our python
|
# https://github.com/dependabot/dependabot-core/pull/10040#issuecomment-2696978430
|
||||||
# dependency setup (pip-compile-multi), we'll be using
|
- package-ecosystem: "uv"
|
||||||
# `supersetbot` instead
|
directory: "requirements/"
|
||||||
|
open-pull-requests-limit: 10
|
||||||
|
labels:
|
||||||
|
- uv
|
||||||
|
- dependabot
|
||||||
|
|
||||||
- package-ecosystem: "npm"
|
- package-ecosystem: "npm"
|
||||||
directory: ".github/actions"
|
directory: ".github/actions"
|
||||||
@@ -323,6 +328,10 @@ updates:
|
|||||||
|
|
||||||
- package-ecosystem: "npm"
|
- package-ecosystem: "npm"
|
||||||
directory: "/superset-frontend/packages/superset-ui-core/"
|
directory: "/superset-frontend/packages/superset-ui-core/"
|
||||||
|
ignore:
|
||||||
|
# not until React >= 18.0.0
|
||||||
|
- dependency-name: "react-markdown"
|
||||||
|
- dependency-name: "remark-gfm"
|
||||||
schedule:
|
schedule:
|
||||||
interval: "monthly"
|
interval: "monthly"
|
||||||
labels:
|
labels:
|
||||||
|
|||||||
5
.github/labeler.yml
vendored
5
.github/labeler.yml
vendored
@@ -127,6 +127,11 @@
|
|||||||
- any-glob-to-any-file:
|
- any-glob-to-any-file:
|
||||||
- 'superset/translations/es/**'
|
- 'superset/translations/es/**'
|
||||||
|
|
||||||
|
"i18n:persian":
|
||||||
|
- changed-files:
|
||||||
|
- any-glob-to-any-file:
|
||||||
|
- 'superset/translations/fa/**'
|
||||||
|
|
||||||
############################################
|
############################################
|
||||||
# Sub-projects and monorepo packages
|
# Sub-projects and monorepo packages
|
||||||
############################################
|
############################################
|
||||||
|
|||||||
8
.github/workflows/bashlib.sh
vendored
8
.github/workflows/bashlib.sh
vendored
@@ -145,6 +145,7 @@ cypress-install() {
|
|||||||
|
|
||||||
cypress-run-all() {
|
cypress-run-all() {
|
||||||
local USE_DASHBOARD=$1
|
local USE_DASHBOARD=$1
|
||||||
|
local APP_ROOT=$2
|
||||||
cd "$GITHUB_WORKSPACE/superset-frontend/cypress-base"
|
cd "$GITHUB_WORKSPACE/superset-frontend/cypress-base"
|
||||||
|
|
||||||
# Start Flask and run it in background
|
# Start Flask and run it in background
|
||||||
@@ -152,7 +153,12 @@ cypress-run-all() {
|
|||||||
# so errors can print to stderr.
|
# so errors can print to stderr.
|
||||||
local flasklog="${HOME}/flask.log"
|
local flasklog="${HOME}/flask.log"
|
||||||
local port=8081
|
local port=8081
|
||||||
export CYPRESS_BASE_URL="http://localhost:${port}"
|
CYPRESS_BASE_URL="http://localhost:${port}"
|
||||||
|
if [ -n "$APP_ROOT" ]; then
|
||||||
|
export SUPERSET_APP_ROOT=$APP_ROOT
|
||||||
|
CYPRESS_BASE_URL=${CYPRESS_BASE_URL}${APP_ROOT}
|
||||||
|
fi
|
||||||
|
export CYPRESS_BASE_URL
|
||||||
|
|
||||||
nohup flask run --no-debugger -p $port >"$flasklog" 2>&1 </dev/null &
|
nohup flask run --no-debugger -p $port >"$flasklog" 2>&1 </dev/null &
|
||||||
local flaskProcessId=$!
|
local flaskProcessId=$!
|
||||||
|
|||||||
17
.github/workflows/bump-python-package.yml
vendored
17
.github/workflows/bump-python-package.yml
vendored
@@ -14,10 +14,16 @@ on:
|
|||||||
required: true
|
required: true
|
||||||
description: Max number of PRs to open (0 for no limit)
|
description: Max number of PRs to open (0 for no limit)
|
||||||
default: 5
|
default: 5
|
||||||
|
extra-flags:
|
||||||
|
required: false
|
||||||
|
default: --only-base
|
||||||
|
description: Additional flags to pass to the bump-python command
|
||||||
|
#schedule:
|
||||||
|
# - cron: '0 0 * * *' # Runs daily at midnight UTC
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
bump-python-package:
|
bump-python-package:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
permissions:
|
||||||
actions: write
|
actions: write
|
||||||
contents: write
|
contents: write
|
||||||
@@ -39,8 +45,8 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: "3.10"
|
||||||
|
|
||||||
- name: Install pip-compile-multi
|
- name: Install uv
|
||||||
run: pip install pip-compile-multi
|
run: pip install uv
|
||||||
|
|
||||||
- name: supersetbot bump-python -p "${{ github.event.inputs.package }}"
|
- name: supersetbot bump-python -p "${{ github.event.inputs.package }}"
|
||||||
env:
|
env:
|
||||||
@@ -59,10 +65,13 @@ jobs:
|
|||||||
GROUP_OPT="-g ${{ github.event.inputs.group }}"
|
GROUP_OPT="-g ${{ github.event.inputs.group }}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
EXTRA_FLAGS="${{ github.event.inputs.extra-flags }}"
|
||||||
|
|
||||||
supersetbot bump-python \
|
supersetbot bump-python \
|
||||||
--verbose \
|
--verbose \
|
||||||
--use-current-repo \
|
--use-current-repo \
|
||||||
--include-subpackages \
|
--include-subpackages \
|
||||||
--limit ${{ github.event.inputs.limit }} \
|
--limit ${{ github.event.inputs.limit }} \
|
||||||
$PACKAGE_OPT \
|
$PACKAGE_OPT \
|
||||||
$GROUP_OPT
|
$GROUP_OPT \
|
||||||
|
$EXTRA_FLAGS
|
||||||
|
|||||||
2
.github/workflows/cancel_duplicates.yml
vendored
2
.github/workflows/cancel_duplicates.yml
vendored
@@ -9,7 +9,7 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
cancel-duplicate-runs:
|
cancel-duplicate-runs:
|
||||||
name: Cancel duplicate workflow runs
|
name: Cancel duplicate workflow runs
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
permissions:
|
||||||
actions: write
|
actions: write
|
||||||
contents: read
|
contents: read
|
||||||
|
|||||||
43
.github/workflows/check-python-deps.yml
vendored
Normal file
43
.github/workflows/check-python-deps.yml
vendored
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
name: Check python dependencies
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- "master"
|
||||||
|
- "[0-9].[0-9]*"
|
||||||
|
pull_request:
|
||||||
|
types: [synchronize, opened, reopened, ready_for_review]
|
||||||
|
|
||||||
|
# cancel previous workflow jobs for PRs
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check-python-deps:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
steps:
|
||||||
|
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
submodules: recursive
|
||||||
|
fetch-depth: 1
|
||||||
|
|
||||||
|
- name: Setup Python
|
||||||
|
if: steps.check.outputs.python
|
||||||
|
uses: ./.github/actions/setup-backend/
|
||||||
|
|
||||||
|
- name: Run uv
|
||||||
|
if: steps.check.outputs.python
|
||||||
|
run: ./scripts/uv-pip-compile.sh
|
||||||
|
|
||||||
|
- name: Check for uncommitted changes
|
||||||
|
run: |
|
||||||
|
if [[ -n "$(git diff)" ]]; then
|
||||||
|
echo "ERROR: The pinned dependencies are not up-to-date."
|
||||||
|
echo "Please run './scripts/uv-pip-compile.sh' and commit the changes."
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
echo "Pinned dependencies are up-to-date."
|
||||||
|
fi
|
||||||
@@ -19,7 +19,7 @@ concurrency:
|
|||||||
jobs:
|
jobs:
|
||||||
check_db_migration_conflict:
|
check_db_migration_conflict:
|
||||||
name: Check DB migration conflict
|
name: Check DB migration conflict
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
|
|||||||
2
.github/workflows/codeql-analysis.yml
vendored
2
.github/workflows/codeql-analysis.yml
vendored
@@ -17,7 +17,7 @@ concurrency:
|
|||||||
jobs:
|
jobs:
|
||||||
analyze:
|
analyze:
|
||||||
name: Analyze
|
name: Analyze
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
permissions:
|
||||||
actions: read
|
actions: read
|
||||||
contents: read
|
contents: read
|
||||||
|
|||||||
40
.github/workflows/dependency-review.yml
vendored
40
.github/workflows/dependency-review.yml
vendored
@@ -5,19 +5,32 @@
|
|||||||
# Source repository: https://github.com/actions/dependency-review-action
|
# Source repository: https://github.com/actions/dependency-review-action
|
||||||
# Public documentation: https://docs.github.com/en/code-security/supply-chain-security/understanding-your-software-supply-chain/about-dependency-review#dependency-review-enforcement
|
# Public documentation: https://docs.github.com/en/code-security/supply-chain-security/understanding-your-software-supply-chain/about-dependency-review#dependency-review-enforcement
|
||||||
name: "Dependency Review"
|
name: "Dependency Review"
|
||||||
on: [pull_request]
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- "master"
|
||||||
|
- "[0-9].[0-9]*"
|
||||||
|
pull_request:
|
||||||
|
types: [synchronize, opened, reopened, ready_for_review]
|
||||||
|
|
||||||
|
# cancel previous workflow jobs for PRs
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
dependency-review:
|
dependency-review:
|
||||||
runs-on: ubuntu-22.04
|
if: github.event_name == 'pull_request'
|
||||||
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: "Checkout Repository"
|
- name: "Checkout Repository"
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
- name: "Dependency Review"
|
- name: "Dependency Review"
|
||||||
uses: actions/dependency-review-action@v4
|
uses: actions/dependency-review-action@v4
|
||||||
|
continue-on-error: true
|
||||||
with:
|
with:
|
||||||
fail-on-severity: critical
|
fail-on-severity: critical
|
||||||
# compatible/incompatible licenses addressed here: https://www.apache.org/legal/resolved.html
|
# compatible/incompatible licenses addressed here: https://www.apache.org/legal/resolved.html
|
||||||
@@ -32,4 +45,25 @@ jobs:
|
|||||||
# license: https://applitools.com/legal/open-source-terms-of-use/
|
# license: https://applitools.com/legal/open-source-terms-of-use/
|
||||||
# pkg:npm/node-forge@1.3.1
|
# pkg:npm/node-forge@1.3.1
|
||||||
# selecting BSD-3-Clause licensing terms for node-forge to ensure compatibility with Apache
|
# selecting BSD-3-Clause licensing terms for node-forge to ensure compatibility with Apache
|
||||||
allow-dependencies-licenses: pkg:npm/store2@2.14.2, pkg:npm/applitools/core, pkg:npm/applitools/core-base, pkg:npm/applitools/css-tree, pkg:npm/applitools/ec-client, pkg:npm/applitools/eg-socks5-proxy-server, pkg:npm/applitools/eyes, pkg:npm/applitools/eyes-cypress, pkg:npm/applitools/nml-client, pkg:npm/applitools/tunnel-client, pkg:npm/applitools/utils, pkg:npm/node-forge@1.3.1, pkg:npm/rgbcolor
|
allow-dependencies-licenses: pkg:npm/store2@2.14.2, pkg:npm/applitools/core, pkg:npm/applitools/core-base, pkg:npm/applitools/css-tree, pkg:npm/applitools/ec-client, pkg:npm/applitools/eg-socks5-proxy-server, pkg:npm/applitools/eyes, pkg:npm/applitools/eyes-cypress, pkg:npm/applitools/nml-client, pkg:npm/applitools/tunnel-client, pkg:npm/applitools/utils, pkg:npm/node-forge@1.3.1, pkg:npm/rgbcolor, pkg:npm/jszip@3.10.1
|
||||||
|
|
||||||
|
python-dependency-liccheck:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
steps:
|
||||||
|
- name: "Checkout Repository"
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Python
|
||||||
|
uses: ./.github/actions/setup-backend/
|
||||||
|
with:
|
||||||
|
requirements-type: base
|
||||||
|
|
||||||
|
- name: "Set up liccheck"
|
||||||
|
run: |
|
||||||
|
uv pip install --system liccheck
|
||||||
|
- name: "Run liccheck"
|
||||||
|
run: |
|
||||||
|
# run the checks
|
||||||
|
liccheck -R output.txt
|
||||||
|
# Print the report
|
||||||
|
cat output.txt
|
||||||
|
|||||||
79
.github/workflows/docker.yml
vendored
79
.github/workflows/docker.yml
vendored
@@ -14,21 +14,22 @@ concurrency:
|
|||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
|
||||||
setup_matrix:
|
setup_matrix:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
outputs:
|
outputs:
|
||||||
matrix_config: ${{ steps.set_matrix.outputs.matrix_config }}
|
matrix_config: ${{ steps.set_matrix.outputs.matrix_config }}
|
||||||
steps:
|
steps:
|
||||||
- id: set_matrix
|
- id: set_matrix
|
||||||
run: |
|
run: |
|
||||||
MATRIX_CONFIG=$(if [ "${{ github.event_name }}" == "pull_request" ]; then echo '["dev"]'; else echo '["dev", "lean", "py310", "websocket", "dockerize", "py311"]'; fi)
|
MATRIX_CONFIG=$(if [ "${{ github.event_name }}" == "pull_request" ]; then echo '["dev", "lean"]'; else echo '["dev", "lean", "py310", "websocket", "dockerize", "py311"]'; fi)
|
||||||
echo "matrix_config=${MATRIX_CONFIG}" >> $GITHUB_OUTPUT
|
echo "matrix_config=${MATRIX_CONFIG}" >> $GITHUB_OUTPUT
|
||||||
echo $GITHUB_OUTPUT
|
echo $GITHUB_OUTPUT
|
||||||
|
|
||||||
docker-build:
|
docker-build:
|
||||||
name: docker-build
|
name: docker-build
|
||||||
needs: setup_matrix
|
needs: setup_matrix
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
build_preset: ${{fromJson(needs.setup_matrix.outputs.matrix_config)}}
|
build_preset: ${{fromJson(needs.setup_matrix.outputs.matrix_config)}}
|
||||||
@@ -36,6 +37,7 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
|
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
|
||||||
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
|
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
IMAGE_TAG: apache/superset:GHA-${{ matrix.build_preset }}-${{ github.run_id }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
@@ -50,21 +52,13 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Setup Docker Environment
|
||||||
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
|
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
|
||||||
uses: docker/setup-qemu-action@v3
|
uses: ./.github/actions/setup-docker
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
|
|
||||||
- name: Try to login to DockerHub
|
|
||||||
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
|
|
||||||
continue-on-error: true
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USER }}
|
dockerhub-user: ${{ secrets.DOCKERHUB_USER }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
build: "true"
|
||||||
|
|
||||||
- name: Setup supersetbot
|
- name: Setup supersetbot
|
||||||
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
|
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
|
||||||
@@ -79,12 +73,65 @@ jobs:
|
|||||||
# Single platform builds in pull_request context to speed things up
|
# Single platform builds in pull_request context to speed things up
|
||||||
if [ "${{ github.event_name }}" = "push" ]; then
|
if [ "${{ github.event_name }}" = "push" ]; then
|
||||||
PLATFORM_ARG="--platform linux/arm64 --platform linux/amd64"
|
PLATFORM_ARG="--platform linux/arm64 --platform linux/amd64"
|
||||||
|
# can only --load images in single-platform builds
|
||||||
|
PUSH_OR_LOAD="--push"
|
||||||
elif [ "${{ github.event_name }}" = "pull_request" ]; then
|
elif [ "${{ github.event_name }}" = "pull_request" ]; then
|
||||||
PLATFORM_ARG="--platform linux/amd64"
|
PLATFORM_ARG="--platform linux/amd64"
|
||||||
|
PUSH_OR_LOAD="--load"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
supersetbot docker \
|
supersetbot docker \
|
||||||
|
$PUSH_OR_LOAD \
|
||||||
--preset ${{ matrix.build_preset }} \
|
--preset ${{ matrix.build_preset }} \
|
||||||
--context "$EVENT" \
|
--context "$EVENT" \
|
||||||
--context-ref "$RELEASE" $FORCE_LATEST \
|
--context-ref "$RELEASE" $FORCE_LATEST \
|
||||||
|
--extra-flags "--build-arg INCLUDE_CHROMIUM=false --tag $IMAGE_TAG" \
|
||||||
$PLATFORM_ARG
|
$PLATFORM_ARG
|
||||||
|
|
||||||
|
# in the context of push (using multi-platform build), we need to pull the image locally
|
||||||
|
- name: Docker pull
|
||||||
|
if: github.event_name == 'push' && (steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker)
|
||||||
|
run: docker pull $IMAGE_TAG
|
||||||
|
|
||||||
|
- name: Print docker stats
|
||||||
|
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
|
||||||
|
run: |
|
||||||
|
echo "SHA: ${{ github.sha }}"
|
||||||
|
echo "IMAGE: $IMAGE_TAG"
|
||||||
|
docker images $IMAGE_TAG
|
||||||
|
docker history $IMAGE_TAG
|
||||||
|
|
||||||
|
- name: docker-compose sanity check
|
||||||
|
if: (steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker) && (matrix.build_preset == 'dev' || matrix.build_preset == 'lean')
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
export SUPERSET_BUILD_TARGET=${{ matrix.build_preset }}
|
||||||
|
# This should reuse the CACHED image built in the previous steps
|
||||||
|
docker compose build superset-init --build-arg DEV_MODE=false --build-arg INCLUDE_CHROMIUM=false
|
||||||
|
docker compose up superset-init --exit-code-from superset-init
|
||||||
|
|
||||||
|
docker-compose-image-tag:
|
||||||
|
runs-on: ubuntu-24.04
|
||||||
|
steps:
|
||||||
|
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
- name: Check for file changes
|
||||||
|
id: check
|
||||||
|
uses: ./.github/actions/change-detector/
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: Setup Docker Environment
|
||||||
|
if: steps.check.outputs.docker
|
||||||
|
uses: ./.github/actions/setup-docker
|
||||||
|
with:
|
||||||
|
dockerhub-user: ${{ secrets.DOCKERHUB_USER }}
|
||||||
|
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
build: "false"
|
||||||
|
install-docker-compose: "true"
|
||||||
|
- name: docker-compose sanity check
|
||||||
|
if: steps.check.outputs.docker
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
docker compose -f docker-compose-image-tag.yml up superset-init --exit-code-from superset-init
|
||||||
|
|||||||
6
.github/workflows/embedded-sdk-release.yml
vendored
6
.github/workflows/embedded-sdk-release.yml
vendored
@@ -8,7 +8,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
config:
|
config:
|
||||||
runs-on: "ubuntu-22.04"
|
runs-on: ubuntu-24.04
|
||||||
outputs:
|
outputs:
|
||||||
has-secrets: ${{ steps.check.outputs.has-secrets }}
|
has-secrets: ${{ steps.check.outputs.has-secrets }}
|
||||||
steps:
|
steps:
|
||||||
@@ -23,7 +23,7 @@ jobs:
|
|||||||
build:
|
build:
|
||||||
needs: config
|
needs: config
|
||||||
if: needs.config.outputs.has-secrets
|
if: needs.config.outputs.has-secrets
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: superset-embedded-sdk
|
working-directory: superset-embedded-sdk
|
||||||
@@ -31,7 +31,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: "20"
|
node-version-file: './superset-embedded-sdk/.nvmrc'
|
||||||
registry-url: 'https://registry.npmjs.org'
|
registry-url: 'https://registry.npmjs.org'
|
||||||
- run: npm ci
|
- run: npm ci
|
||||||
- run: npm run ci:release
|
- run: npm run ci:release
|
||||||
|
|||||||
4
.github/workflows/embedded-sdk-test.yml
vendored
4
.github/workflows/embedded-sdk-test.yml
vendored
@@ -13,7 +13,7 @@ concurrency:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
embedded-sdk-test:
|
embedded-sdk-test:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: superset-embedded-sdk
|
working-directory: superset-embedded-sdk
|
||||||
@@ -21,7 +21,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: "20"
|
node-version-file: './superset-embedded-sdk/.nvmrc'
|
||||||
registry-url: 'https://registry.npmjs.org'
|
registry-url: 'https://registry.npmjs.org'
|
||||||
- run: npm ci
|
- run: npm ci
|
||||||
- run: npm test
|
- run: npm test
|
||||||
|
|||||||
4
.github/workflows/ephemeral-env-pr-close.yml
vendored
4
.github/workflows/ephemeral-env-pr-close.yml
vendored
@@ -6,7 +6,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
config:
|
config:
|
||||||
runs-on: "ubuntu-22.04"
|
runs-on: ubuntu-24.04
|
||||||
outputs:
|
outputs:
|
||||||
has-secrets: ${{ steps.check.outputs.has-secrets }}
|
has-secrets: ${{ steps.check.outputs.has-secrets }}
|
||||||
steps:
|
steps:
|
||||||
@@ -22,7 +22,7 @@ jobs:
|
|||||||
needs: config
|
needs: config
|
||||||
if: needs.config.outputs.has-secrets
|
if: needs.config.outputs.has-secrets
|
||||||
name: Cleanup ephemeral envs
|
name: Cleanup ephemeral envs
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
permissions:
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
steps:
|
steps:
|
||||||
|
|||||||
264
.github/workflows/ephemeral-env.yml
vendored
264
.github/workflows/ephemeral-env.yml
vendored
@@ -1,132 +1,181 @@
|
|||||||
name: Ephemeral env workflow
|
name: Ephemeral env workflow
|
||||||
|
|
||||||
|
# Example manual trigger:
|
||||||
|
# gh workflow run ephemeral-env.yml --ref fix_ephemerals --field label_name="testenv-up" --field issue_number=666
|
||||||
|
|
||||||
on:
|
on:
|
||||||
issue_comment:
|
pull_request_target:
|
||||||
types: [created]
|
types:
|
||||||
|
- labeled
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
label_name:
|
||||||
|
description: 'Label name to simulate label-based /testenv trigger'
|
||||||
|
required: true
|
||||||
|
default: 'testenv-up'
|
||||||
|
issue_number:
|
||||||
|
description: 'Issue or PR number'
|
||||||
|
required: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
config:
|
ephemeral-env-label:
|
||||||
runs-on: "ubuntu-22.04"
|
|
||||||
if: github.event.issue.pull_request
|
|
||||||
outputs:
|
|
||||||
has-secrets: ${{ steps.check.outputs.has-secrets }}
|
|
||||||
steps:
|
|
||||||
- name: "Check for secrets"
|
|
||||||
id: check
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
if [ -n "${{ (secrets.AWS_ACCESS_KEY_ID != '' && secrets.AWS_SECRET_ACCESS_KEY != '') || '' }}" ]; then
|
|
||||||
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
|
|
||||||
fi
|
|
||||||
|
|
||||||
ephemeral-env-comment:
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.event.issue.number || github.run_id }}-comment
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}-label
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
needs: config
|
name: Evaluate ephemeral env label trigger
|
||||||
if: needs.config.outputs.has-secrets
|
runs-on: ubuntu-24.04
|
||||||
name: Evaluate ephemeral env comment trigger (/testenv)
|
|
||||||
runs-on: ubuntu-22.04
|
|
||||||
permissions:
|
permissions:
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
outputs:
|
outputs:
|
||||||
slash-command: ${{ steps.eval-body.outputs.result }}
|
slash-command: ${{ steps.eval-label.outputs.result }}
|
||||||
feature-flags: ${{ steps.eval-feature-flags.outputs.result }}
|
feature-flags: ${{ steps.eval-feature-flags.outputs.result }}
|
||||||
|
sha: ${{ steps.get-sha.outputs.sha }}
|
||||||
|
env:
|
||||||
|
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
|
||||||
|
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Debug
|
- name: Check for the "testenv-up" label
|
||||||
|
id: eval-label
|
||||||
run: |
|
run: |
|
||||||
echo "Comment on PR #${{ github.event.issue.number }} by ${{ github.event.issue.user.login }}, ${{ github.event.comment.author_association }}"
|
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
|
||||||
|
LABEL_NAME="${{ github.event.inputs.label_name }}"
|
||||||
|
else
|
||||||
|
LABEL_NAME="${{ github.event.label.name }}"
|
||||||
|
fi
|
||||||
|
|
||||||
- name: Eval comment body for /testenv slash command
|
echo "Evaluating label: $LABEL_NAME"
|
||||||
|
|
||||||
|
if [[ "$LABEL_NAME" == "testenv-up" ]]; then
|
||||||
|
echo "result=up" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "result=noop" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Get event SHA
|
||||||
|
id: get-sha
|
||||||
|
if: steps.eval-label.outputs.result == 'up'
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@v7
|
||||||
id: eval-body
|
|
||||||
with:
|
with:
|
||||||
result-encoding: string
|
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
script: |
|
script: |
|
||||||
const pattern = /^\/testenv (up|down)/
|
let prSha;
|
||||||
const result = pattern.exec(context.payload.comment.body)
|
|
||||||
return result === null ? 'noop' : result[1]
|
|
||||||
|
|
||||||
- name: Eval comment body for feature flags
|
// If event is workflow_dispatch, use the issue_number from inputs
|
||||||
|
if (context.eventName === "workflow_dispatch") {
|
||||||
|
const prNumber = "${{ github.event.inputs.issue_number }}";
|
||||||
|
if (!prNumber) {
|
||||||
|
console.log("No PR number found.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch PR details using the provided issue_number
|
||||||
|
const { data: pr } = await github.rest.pulls.get({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
pull_number: prNumber
|
||||||
|
});
|
||||||
|
|
||||||
|
prSha = pr.head.sha;
|
||||||
|
} else {
|
||||||
|
// If it's not workflow_dispatch, use the PR head sha from the event
|
||||||
|
prSha = context.payload.pull_request.head.sha;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`PR SHA: ${prSha}`);
|
||||||
|
core.setOutput("sha", prSha);
|
||||||
|
|
||||||
|
- name: Looking for feature flags in PR description
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@v7
|
||||||
id: eval-feature-flags
|
id: eval-feature-flags
|
||||||
|
if: steps.eval-label.outputs.result == 'up'
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
|
const description = context.payload.pull_request
|
||||||
|
? context.payload.pull_request.body || ''
|
||||||
|
: context.payload.inputs.pr_description || '';
|
||||||
|
|
||||||
const pattern = /FEATURE_(\w+)=(\w+)/g;
|
const pattern = /FEATURE_(\w+)=(\w+)/g;
|
||||||
let results = [];
|
let results = [];
|
||||||
[...context.payload.comment.body.matchAll(pattern)].forEach(match => {
|
[...description.matchAll(pattern)].forEach(match => {
|
||||||
const config = {
|
const config = {
|
||||||
name: `SUPERSET_FEATURE_${match[1]}`,
|
name: `SUPERSET_FEATURE_${match[1]}`,
|
||||||
value: match[2],
|
value: match[2],
|
||||||
};
|
};
|
||||||
results.push(config);
|
results.push(config);
|
||||||
});
|
});
|
||||||
|
|
||||||
return results;
|
return results;
|
||||||
|
|
||||||
- name: Limit to committers
|
- name: Reply with confirmation comment
|
||||||
if: >
|
|
||||||
steps.eval-body.outputs.result != 'noop' &&
|
|
||||||
github.event.comment.author_association != 'MEMBER' &&
|
|
||||||
github.event.comment.author_association != 'OWNER'
|
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@v7
|
||||||
|
if: steps.eval-label.outputs.result == 'up'
|
||||||
with:
|
with:
|
||||||
github-token: ${{github.token}}
|
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
script: |
|
script: |
|
||||||
const errMsg = '@${{ github.event.comment.user.login }} Ephemeral environment creation is currently limited to committers.'
|
const action = '${{ steps.eval-label.outputs.result }}';
|
||||||
github.rest.issues.createComment({
|
const user = context.actor;
|
||||||
issue_number: ${{ github.event.issue.number }},
|
const runId = context.runId;
|
||||||
|
const workflowUrl = `${context.serverUrl}/${context.repo.owner}/${context.repo.repo}/actions/runs/${runId}`;
|
||||||
|
|
||||||
|
const issueNumber = context.payload.pull_request
|
||||||
|
? context.payload.pull_request.number
|
||||||
|
: context.payload.inputs.issue_number;
|
||||||
|
|
||||||
|
if (!issueNumber) {
|
||||||
|
throw new Error("Issue number is not available.");
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = `@${user} Processing your ephemeral environment request [here](${workflowUrl}).` +
|
||||||
|
` Action: **${action}**.` +
|
||||||
|
` More information on [how to use or configure ephemeral environments]` +
|
||||||
|
`(https://superset.apache.org/docs/contributing/howtos/#github-ephemeral-environments)`;
|
||||||
|
|
||||||
|
|
||||||
|
await github.rest.issues.createComment({
|
||||||
owner: context.repo.owner,
|
owner: context.repo.owner,
|
||||||
repo: context.repo.repo,
|
repo: context.repo.repo,
|
||||||
body: errMsg
|
issue_number: issueNumber,
|
||||||
})
|
body,
|
||||||
core.setFailed(errMsg)
|
});
|
||||||
|
|
||||||
ephemeral-docker-build:
|
ephemeral-docker-build:
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.event.issue.number || github.run_id }}-build
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}-build
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
needs: ephemeral-env-comment
|
needs: ephemeral-env-label
|
||||||
|
if: needs.ephemeral-env-label.outputs.slash-command == 'up'
|
||||||
name: ephemeral-docker-build
|
name: ephemeral-docker-build
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: Get Info from comment
|
- name: "Checkout ${{ github.ref }} ( ${{ needs.ephemeral-env-label.outputs.sha }} : ${{steps.get-sha.outputs.sha}} )"
|
||||||
uses: actions/github-script@v7
|
|
||||||
id: get-pr-info
|
|
||||||
with:
|
|
||||||
script: |
|
|
||||||
const request = {
|
|
||||||
owner: context.repo.owner,
|
|
||||||
repo: context.repo.repo,
|
|
||||||
pull_number: ${{ github.event.issue.number }},
|
|
||||||
}
|
|
||||||
core.info(`Getting PR #${request.pull_number} from ${request.owner}/${request.repo}`)
|
|
||||||
const pr = await github.rest.pulls.get(request);
|
|
||||||
return pr.data;
|
|
||||||
|
|
||||||
- name: Debug
|
|
||||||
id: get-sha
|
|
||||||
run: |
|
|
||||||
echo "sha=${{ fromJSON(steps.get-pr-info.outputs.result).head.sha }}" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} : ${{steps.get-sha.outputs.sha}} )"
|
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ steps.get-sha.outputs.sha }}
|
ref: ${{ needs.ephemeral-env-label.outputs.sha }}
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Setup Docker Environment
|
||||||
uses: docker/setup-qemu-action@v3
|
uses: ./.github/actions/setup-docker
|
||||||
|
with:
|
||||||
|
dockerhub-user: ${{ secrets.DOCKERHUB_USER }}
|
||||||
|
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
build: "true"
|
||||||
|
install-docker-compose: "false"
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Setup supersetbot
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: ./.github/actions/setup-supersetbot/
|
||||||
|
|
||||||
- name: Build ephemeral env image
|
- name: Build ephemeral env image
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
./scripts/build_docker.py \
|
supersetbot docker \
|
||||||
"ci" \
|
--push \
|
||||||
"pull_request" \
|
--load \
|
||||||
--build_context_ref ${{ github.event.issue.number }}
|
--preset ci \
|
||||||
|
--platform linux/amd64 \
|
||||||
|
--context-ref "$RELEASE" \
|
||||||
|
--extra-flags "--build-arg INCLUDE_CHROMIUM=false"
|
||||||
|
|
||||||
- name: Configure AWS credentials
|
- name: Configure AWS credentials
|
||||||
uses: aws-actions/configure-aws-credentials@v4
|
uses: aws-actions/configure-aws-credentials@v4
|
||||||
@@ -144,16 +193,17 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }}
|
ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }}
|
||||||
ECR_REPOSITORY: superset-ci
|
ECR_REPOSITORY: superset-ci
|
||||||
IMAGE_TAG: apache/superset:${{ steps.get-sha.outputs.sha }}-ci
|
IMAGE_TAG: apache/superset:${{ needs.ephemeral-env-label.outputs.sha }}-ci
|
||||||
|
PR_NUMBER: ${{ github.event.inputs.issue_number || github.event.pull_request.number }}
|
||||||
run: |
|
run: |
|
||||||
docker tag $IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:pr-${{ github.event.issue.number }}-ci
|
docker tag $IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:pr-$PR_NUMBER-ci
|
||||||
docker push -a $ECR_REGISTRY/$ECR_REPOSITORY
|
docker push -a $ECR_REGISTRY/$ECR_REPOSITORY
|
||||||
|
|
||||||
ephemeral-env-up:
|
ephemeral-env-up:
|
||||||
needs: [ephemeral-env-comment, ephemeral-docker-build]
|
needs: [ephemeral-env-label, ephemeral-docker-build]
|
||||||
if: needs.ephemeral-env-comment.outputs.slash-command == 'up'
|
if: needs.ephemeral-env-label.outputs.slash-command == 'up'
|
||||||
name: Spin up an ephemeral environment
|
name: Spin up an ephemeral environment
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
@@ -177,26 +227,28 @@ jobs:
|
|||||||
- name: Check target image exists in ECR
|
- name: Check target image exists in ECR
|
||||||
id: check-image
|
id: check-image
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
|
env:
|
||||||
|
PR_NUMBER: ${{ github.event.inputs.issue_number || github.event.pull_request.number }}
|
||||||
run: |
|
run: |
|
||||||
aws ecr describe-images \
|
aws ecr describe-images \
|
||||||
--registry-id $(echo "${{ steps.login-ecr.outputs.registry }}" | grep -Eo "^[0-9]+") \
|
--registry-id $(echo "${{ steps.login-ecr.outputs.registry }}" | grep -Eo "^[0-9]+") \
|
||||||
--repository-name superset-ci \
|
--repository-name superset-ci \
|
||||||
--image-ids imageTag=pr-${{ github.event.issue.number }}-ci
|
--image-ids imageTag=pr-$PR_NUMBER-ci
|
||||||
|
|
||||||
- name: Fail on missing container image
|
- name: Fail on missing container image
|
||||||
if: steps.check-image.outcome == 'failure'
|
if: steps.check-image.outcome == 'failure'
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@v7
|
||||||
with:
|
with:
|
||||||
github-token: ${{github.token}}
|
github-token: ${{ github.token }}
|
||||||
script: |
|
script: |
|
||||||
const errMsg = '@${{ github.event.comment.user.login }} Container image not yet published for this PR. Please try again when build is complete.'
|
const errMsg = '@${{ github.event.comment.user.login }} Container image not yet published for this PR. Please try again when build is complete.';
|
||||||
github.rest.issues.createComment({
|
github.rest.issues.createComment({
|
||||||
issue_number: ${{ github.event.issue.number }},
|
issue_number: ${{ github.event.inputs.issue_number || github.event.pull_request.number }},
|
||||||
owner: context.repo.owner,
|
owner: context.repo.owner,
|
||||||
repo: context.repo.repo,
|
repo: context.repo.repo,
|
||||||
body: errMsg
|
body: errMsg
|
||||||
})
|
});
|
||||||
core.setFailed(errMsg)
|
core.setFailed(errMsg);
|
||||||
|
|
||||||
- name: Fill in the new image ID in the Amazon ECS task definition
|
- name: Fill in the new image ID in the Amazon ECS task definition
|
||||||
id: task-def
|
id: task-def
|
||||||
@@ -204,39 +256,39 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
task-definition: .github/workflows/ecs-task-definition.json
|
task-definition: .github/workflows/ecs-task-definition.json
|
||||||
container-name: superset-ci
|
container-name: superset-ci
|
||||||
image: ${{ steps.login-ecr.outputs.registry }}/superset-ci:pr-${{ github.event.issue.number }}-ci
|
image: ${{ steps.login-ecr.outputs.registry }}/superset-ci:pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-ci
|
||||||
|
|
||||||
- name: Update env vars in the Amazon ECS task definition
|
- name: Update env vars in the Amazon ECS task definition
|
||||||
run: |
|
run: |
|
||||||
cat <<< "$(jq '.containerDefinitions[0].environment += ${{ needs.ephemeral-env-comment.outputs.feature-flags }}' < ${{ steps.task-def.outputs.task-definition }})" > ${{ steps.task-def.outputs.task-definition }}
|
cat <<< "$(jq '.containerDefinitions[0].environment += ${{ needs.ephemeral-env-label.outputs.feature-flags }}' < ${{ steps.task-def.outputs.task-definition }})" > ${{ steps.task-def.outputs.task-definition }}
|
||||||
|
|
||||||
- name: Describe ECS service
|
- name: Describe ECS service
|
||||||
id: describe-services
|
id: describe-services
|
||||||
run: |
|
run: |
|
||||||
echo "active=$(aws ecs describe-services --cluster superset-ci --services pr-${{ github.event.issue.number }}-service | jq '.services[] | select(.status == "ACTIVE") | any')" >> $GITHUB_OUTPUT
|
echo "active=$(aws ecs describe-services --cluster superset-ci --services pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-service | jq '.services[] | select(.status == "ACTIVE") | any')" >> $GITHUB_OUTPUT
|
||||||
- name: Create ECS service
|
- name: Create ECS service
|
||||||
if: steps.describe-services.outputs.active != 'true'
|
|
||||||
id: create-service
|
id: create-service
|
||||||
|
if: steps.describe-services.outputs.active != 'true'
|
||||||
env:
|
env:
|
||||||
ECR_SUBNETS: subnet-0e15a5034b4121710,subnet-0e8efef4a72224974
|
ECR_SUBNETS: subnet-0e15a5034b4121710,subnet-0e8efef4a72224974
|
||||||
ECR_SECURITY_GROUP: sg-092ff3a6ae0574d91
|
ECR_SECURITY_GROUP: sg-092ff3a6ae0574d91
|
||||||
|
PR_NUMBER: ${{ github.event.inputs.issue_number || github.event.pull_request.number }}
|
||||||
run: |
|
run: |
|
||||||
aws ecs create-service \
|
aws ecs create-service \
|
||||||
--cluster superset-ci \
|
--cluster superset-ci \
|
||||||
--service-name pr-${{ github.event.issue.number }}-service \
|
--service-name pr-$PR_NUMBER-service \
|
||||||
--task-definition superset-ci \
|
--task-definition superset-ci \
|
||||||
--launch-type FARGATE \
|
--launch-type FARGATE \
|
||||||
--desired-count 1 \
|
--desired-count 1 \
|
||||||
--platform-version LATEST \
|
--platform-version LATEST \
|
||||||
--network-configuration "awsvpcConfiguration={subnets=[$ECR_SUBNETS],securityGroups=[$ECR_SECURITY_GROUP],assignPublicIp=ENABLED}" \
|
--network-configuration "awsvpcConfiguration={subnets=[$ECR_SUBNETS],securityGroups=[$ECR_SECURITY_GROUP],assignPublicIp=ENABLED}" \
|
||||||
--tags key=pr,value=${{ github.event.issue.number }} key=github_user,value=${{ github.actor }}
|
--tags key=pr,value=$PR_NUMBER key=github_user,value=${{ github.actor }}
|
||||||
|
|
||||||
- name: Deploy Amazon ECS task definition
|
- name: Deploy Amazon ECS task definition
|
||||||
id: deploy-task
|
id: deploy-task
|
||||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
|
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
|
||||||
with:
|
with:
|
||||||
task-definition: ${{ steps.task-def.outputs.task-definition }}
|
task-definition: ${{ steps.task-def.outputs.task-definition }}
|
||||||
service: pr-${{ github.event.issue.number }}-service
|
service: pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-service
|
||||||
cluster: superset-ci
|
cluster: superset-ci
|
||||||
wait-for-service-stability: true
|
wait-for-service-stability: true
|
||||||
wait-for-minutes: 10
|
wait-for-minutes: 10
|
||||||
@@ -244,40 +296,38 @@ jobs:
|
|||||||
- name: List tasks
|
- name: List tasks
|
||||||
id: list-tasks
|
id: list-tasks
|
||||||
run: |
|
run: |
|
||||||
echo "task=$(aws ecs list-tasks --cluster superset-ci --service-name pr-${{ github.event.issue.number }}-service | jq '.taskArns | first')" >> $GITHUB_OUTPUT
|
echo "task=$(aws ecs list-tasks --cluster superset-ci --service-name pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-service | jq '.taskArns | first')" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Get network interface
|
- name: Get network interface
|
||||||
id: get-eni
|
id: get-eni
|
||||||
run: |
|
run: |
|
||||||
echo "eni=$(aws ecs describe-tasks --cluster superset-ci --tasks ${{ steps.list-tasks.outputs.task }} | jq '.tasks | .[0] | .attachments | .[0] | .details | map(select(.name=="networkInterfaceId")) | .[0] | .value')" >> $GITHUB_OUTPUT
|
echo "eni=$(aws ecs describe-tasks --cluster superset-ci --tasks ${{ steps.list-tasks.outputs.task }} | jq '.tasks[0].attachments[0].details | map(select(.name=="networkInterfaceId"))[0].value')" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Get public IP
|
- name: Get public IP
|
||||||
id: get-ip
|
id: get-ip
|
||||||
run: |
|
run: |
|
||||||
echo "ip=$(aws ec2 describe-network-interfaces --network-interface-ids ${{ steps.get-eni.outputs.eni }} | jq -r '.NetworkInterfaces | first | .Association.PublicIp')" >> $GITHUB_OUTPUT
|
echo "ip=$(aws ec2 describe-network-interfaces --network-interface-ids ${{ steps.get-eni.outputs.eni }} | jq -r '.NetworkInterfaces | first | .Association.PublicIp')" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Comment (success)
|
- name: Comment (success)
|
||||||
if: ${{ success() }}
|
if: ${{ success() }}
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@v7
|
||||||
with:
|
with:
|
||||||
github-token: ${{github.token}}
|
github-token: ${{github.token}}
|
||||||
script: |
|
script: |
|
||||||
|
const issue_number = context.payload.inputs?.issue_number || context.issue.number;
|
||||||
github.rest.issues.createComment({
|
github.rest.issues.createComment({
|
||||||
issue_number: ${{ github.event.issue.number }},
|
issue_number: issue_number,
|
||||||
owner: context.repo.owner,
|
owner: context.repo.owner,
|
||||||
repo: context.repo.repo,
|
repo: context.repo.repo,
|
||||||
body: '@${{ github.event.comment.user.login }} Ephemeral environment spinning up at http://${{ steps.get-ip.outputs.ip }}:8080. Credentials are `admin`/`admin`. Please allow several minutes for bootstrapping and startup.'
|
body: `@${{ github.actor }} Ephemeral environment spinning up at http://${{ steps.get-ip.outputs.ip }}:8080. Credentials are 'admin'/'admin'. Please allow several minutes for bootstrapping and startup.`
|
||||||
})
|
});
|
||||||
|
|
||||||
- name: Comment (failure)
|
- name: Comment (failure)
|
||||||
if: ${{ failure() }}
|
if: ${{ failure() }}
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@v7
|
||||||
with:
|
with:
|
||||||
github-token: ${{github.token}}
|
github-token: ${{github.token}}
|
||||||
script: |
|
script: |
|
||||||
|
const issue_number = context.payload.inputs?.issue_number || context.issue.number;
|
||||||
github.rest.issues.createComment({
|
github.rest.issues.createComment({
|
||||||
issue_number: ${{ github.event.issue.number }},
|
issue_number: issue_number,
|
||||||
owner: context.repo.owner,
|
owner: context.repo.owner,
|
||||||
repo: context.repo.repo,
|
repo: context.repo.repo,
|
||||||
body: '@${{ github.event.comment.user.login }} Ephemeral environment creation failed. Please check the Actions logs for details.'
|
body: '@${{ github.event.inputs.user_login || github.event.comment.user.login }} Ephemeral environment creation failed. Please check the Actions logs for details.'
|
||||||
})
|
})
|
||||||
|
|||||||
4
.github/workflows/generate-FOSSA-report.yml
vendored
4
.github/workflows/generate-FOSSA-report.yml
vendored
@@ -8,7 +8,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
config:
|
config:
|
||||||
runs-on: "ubuntu-22.04"
|
runs-on: ubuntu-24.04
|
||||||
outputs:
|
outputs:
|
||||||
has-secrets: ${{ steps.check.outputs.has-secrets }}
|
has-secrets: ${{ steps.check.outputs.has-secrets }}
|
||||||
steps:
|
steps:
|
||||||
@@ -24,7 +24,7 @@ jobs:
|
|||||||
needs: config
|
needs: config
|
||||||
if: needs.config.outputs.has-secrets
|
if: needs.config.outputs.has-secrets
|
||||||
name: Generate Report
|
name: Generate Report
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
|
|
||||||
validate-all-ghas:
|
validate-all-ghas:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout Repository
|
- name: Checkout Repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|||||||
2
.github/workflows/issue_creation.yml
vendored
2
.github/workflows/issue_creation.yml
vendored
@@ -9,7 +9,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
superbot-orglabel:
|
superbot-orglabel:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
|
|||||||
2
.github/workflows/labeler.yml
vendored
2
.github/workflows/labeler.yml
vendored
@@ -7,7 +7,7 @@ jobs:
|
|||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/labeler@v5
|
- uses: actions/labeler@v5
|
||||||
with:
|
with:
|
||||||
|
|||||||
2
.github/workflows/latest-release-tag.yml
vendored
2
.github/workflows/latest-release-tag.yml
vendored
@@ -6,7 +6,7 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
latest-release:
|
latest-release:
|
||||||
name: Add/update tag to new release
|
name: Add/update tag to new release
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
|
|
||||||
|
|||||||
2
.github/workflows/license-check.yml
vendored
2
.github/workflows/license-check.yml
vendored
@@ -12,7 +12,7 @@ concurrency:
|
|||||||
jobs:
|
jobs:
|
||||||
license_check:
|
license_check:
|
||||||
name: License Check
|
name: License Check
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|||||||
2
.github/workflows/no-hold-label.yml
vendored
2
.github/workflows/no-hold-label.yml
vendored
@@ -11,7 +11,7 @@ concurrency:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check-hold-label:
|
check-hold-label:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: Check for 'hold' label
|
- name: Check for 'hold' label
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@v7
|
||||||
|
|||||||
2
.github/workflows/pr-lint.yml
vendored
2
.github/workflows/pr-lint.yml
vendored
@@ -10,7 +10,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
lint-check:
|
lint-check:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
|
|||||||
35
.github/workflows/pre-commit.yml
vendored
35
.github/workflows/pre-commit.yml
vendored
@@ -15,10 +15,10 @@ concurrency:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
pre-commit:
|
pre-commit:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ["current", "next", "previous"]
|
python-version: ["current", "previous"]
|
||||||
steps:
|
steps:
|
||||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
@@ -38,12 +38,39 @@ jobs:
|
|||||||
echo "HOMEBREW_CELLAR=$HOMEBREW_CELLAR" >>"${GITHUB_ENV}"
|
echo "HOMEBREW_CELLAR=$HOMEBREW_CELLAR" >>"${GITHUB_ENV}"
|
||||||
echo "HOMEBREW_REPOSITORY=$HOMEBREW_REPOSITORY" >>"${GITHUB_ENV}"
|
echo "HOMEBREW_REPOSITORY=$HOMEBREW_REPOSITORY" >>"${GITHUB_ENV}"
|
||||||
brew install norwoodj/tap/helm-docs
|
brew install norwoodj/tap/helm-docs
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '20'
|
||||||
|
|
||||||
|
- name: Install Frontend Dependencies
|
||||||
|
run: |
|
||||||
|
cd superset-frontend
|
||||||
|
npm ci
|
||||||
|
|
||||||
|
- name: Install Docs Dependencies
|
||||||
|
run: |
|
||||||
|
cd docs
|
||||||
|
yarn install --immutable
|
||||||
|
|
||||||
- name: pre-commit
|
- name: pre-commit
|
||||||
run: |
|
run: |
|
||||||
set +e # Don't exit immediately on failure
|
set +e # Don't exit immediately on failure
|
||||||
|
export SKIP=eslint-frontend,type-checking-frontend
|
||||||
pre-commit run --all-files
|
pre-commit run --all-files
|
||||||
if [ $? -ne 0 ] || ! git diff --quiet --exit-code; then
|
PRE_COMMIT_EXIT_CODE=$?
|
||||||
echo "❌ Pre-commit check failed."
|
git diff --quiet --exit-code
|
||||||
|
GIT_DIFF_EXIT_CODE=$?
|
||||||
|
if [ "${PRE_COMMIT_EXIT_CODE}" -ne 0 ] || [ "${GIT_DIFF_EXIT_CODE}" -ne 0 ]; then
|
||||||
|
if [ "${PRE_COMMIT_EXIT_CODE}" -ne 0 ]; then
|
||||||
|
echo "❌ Pre-commit check failed (exit code: ${EXIT_CODE})."
|
||||||
|
else
|
||||||
|
echo "❌ Git working directory is dirty."
|
||||||
|
echo "📌 This likely means that pre-commit made changes that were not committed."
|
||||||
|
echo "🔍 Modified files:"
|
||||||
|
git diff --name-only
|
||||||
|
fi
|
||||||
|
|
||||||
echo "🚒 To prevent/address this CI issue, please install/use pre-commit locally."
|
echo "🚒 To prevent/address this CI issue, please install/use pre-commit locally."
|
||||||
echo "📖 More details here: https://superset.apache.org/docs/contributing/development#git-hooks"
|
echo "📖 More details here: https://superset.apache.org/docs/contributing/development#git-hooks"
|
||||||
exit 1
|
exit 1
|
||||||
|
|||||||
2
.github/workflows/prefer-typescript.yml
vendored
2
.github/workflows/prefer-typescript.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
|||||||
prefer_typescript:
|
prefer_typescript:
|
||||||
if: github.ref == 'ref/heads/master' && github.event_name == 'pull_request'
|
if: github.ref == 'ref/heads/master' && github.event_name == 'pull_request'
|
||||||
name: Prefer TypeScript
|
name: Prefer TypeScript
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
|
|||||||
14
.github/workflows/release.yml
vendored
14
.github/workflows/release.yml
vendored
@@ -8,7 +8,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
config:
|
config:
|
||||||
runs-on: "ubuntu-22.04"
|
runs-on: ubuntu-24.04
|
||||||
outputs:
|
outputs:
|
||||||
has-secrets: ${{ steps.check.outputs.has-secrets }}
|
has-secrets: ${{ steps.check.outputs.has-secrets }}
|
||||||
steps:
|
steps:
|
||||||
@@ -24,13 +24,7 @@ jobs:
|
|||||||
needs: config
|
needs: config
|
||||||
if: needs.config.outputs.has-secrets
|
if: needs.config.outputs.has-secrets
|
||||||
name: Bump version and publish package(s)
|
name: Bump version and publish package(s)
|
||||||
|
runs-on: ubuntu-24.04
|
||||||
runs-on: ubuntu-22.04
|
|
||||||
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
node-version: [20]
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
@@ -46,11 +40,11 @@ jobs:
|
|||||||
git fetch --prune --unshallow
|
git fetch --prune --unshallow
|
||||||
git tag -d `git tag | grep -E '^trigger-'`
|
git tag -d `git tag | grep -E '^trigger-'`
|
||||||
|
|
||||||
- name: Use Node.js ${{ matrix.node-version }}
|
- name: Install Node.js
|
||||||
if: env.HAS_TAGS
|
if: env.HAS_TAGS
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: ${{ matrix.node-version }}
|
node-version-file: './superset-frontend/.nvmrc'
|
||||||
|
|
||||||
- name: Cache npm
|
- name: Cache npm
|
||||||
if: env.HAS_TAGS
|
if: env.HAS_TAGS
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
config:
|
config:
|
||||||
runs-on: "ubuntu-22.04"
|
runs-on: ubuntu-24.04
|
||||||
outputs:
|
outputs:
|
||||||
has-secrets: ${{ steps.check.outputs.has-secrets }}
|
has-secrets: ${{ steps.check.outputs.has-secrets }}
|
||||||
steps:
|
steps:
|
||||||
@@ -21,12 +21,11 @@ jobs:
|
|||||||
cypress-applitools:
|
cypress-applitools:
|
||||||
needs: config
|
needs: config
|
||||||
if: needs.config.outputs.has-secrets
|
if: needs.config.outputs.has-secrets
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
browser: ["chrome"]
|
browser: ["chrome"]
|
||||||
node: [20]
|
|
||||||
env:
|
env:
|
||||||
SUPERSET_ENV: development
|
SUPERSET_ENV: development
|
||||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||||
@@ -40,7 +39,7 @@ jobs:
|
|||||||
APPLITOOLS_BATCH_NAME: Superset Cypress
|
APPLITOOLS_BATCH_NAME: Superset Cypress
|
||||||
services:
|
services:
|
||||||
postgres:
|
postgres:
|
||||||
image: postgres:15-alpine
|
image: postgres:16-alpine
|
||||||
env:
|
env:
|
||||||
POSTGRES_USER: superset
|
POSTGRES_USER: superset
|
||||||
POSTGRES_PASSWORD: superset
|
POSTGRES_PASSWORD: superset
|
||||||
@@ -66,7 +65,7 @@ jobs:
|
|||||||
- name: Setup Node.js
|
- name: Setup Node.js
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: ${{ matrix.node }}
|
node-version-file: './superset-frontend/.nvmrc'
|
||||||
- name: Install npm dependencies
|
- name: Install npm dependencies
|
||||||
uses: ./.github/actions/cached-dependencies
|
uses: ./.github/actions/cached-dependencies
|
||||||
with:
|
with:
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ env:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
config:
|
config:
|
||||||
runs-on: "ubuntu-22.04"
|
runs-on: ubuntu-24.04
|
||||||
outputs:
|
outputs:
|
||||||
has-secrets: ${{ steps.check.outputs.has-secrets }}
|
has-secrets: ${{ steps.check.outputs.has-secrets }}
|
||||||
steps:
|
steps:
|
||||||
@@ -27,10 +27,7 @@ jobs:
|
|||||||
cron:
|
cron:
|
||||||
needs: config
|
needs: config
|
||||||
if: needs.config.outputs.has-secrets
|
if: needs.config.outputs.has-secrets
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
node: [20]
|
|
||||||
steps:
|
steps:
|
||||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
@@ -41,7 +38,7 @@ jobs:
|
|||||||
- name: Set up Node.js
|
- name: Set up Node.js
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: ${{ matrix.node }}
|
node-version-file: './superset-frontend/.nvmrc'
|
||||||
- name: Install eyes-storybook dependencies
|
- name: Install eyes-storybook dependencies
|
||||||
uses: ./.github/actions/cached-dependencies
|
uses: ./.github/actions/cached-dependencies
|
||||||
with:
|
with:
|
||||||
|
|||||||
4
.github/workflows/superset-cli.yml
vendored
4
.github/workflows/superset-cli.yml
vendored
@@ -15,7 +15,7 @@ concurrency:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test-load-examples:
|
test-load-examples:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
env:
|
env:
|
||||||
PYTHONPATH: ${{ github.workspace }}
|
PYTHONPATH: ${{ github.workspace }}
|
||||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||||
@@ -23,7 +23,7 @@ jobs:
|
|||||||
SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset
|
SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset
|
||||||
services:
|
services:
|
||||||
postgres:
|
postgres:
|
||||||
image: postgres:15-alpine
|
image: postgres:16-alpine
|
||||||
env:
|
env:
|
||||||
POSTGRES_USER: superset
|
POSTGRES_USER: superset
|
||||||
POSTGRES_PASSWORD: superset
|
POSTGRES_PASSWORD: superset
|
||||||
|
|||||||
10
.github/workflows/superset-docs-deploy.yml
vendored
10
.github/workflows/superset-docs-deploy.yml
vendored
@@ -12,7 +12,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
config:
|
config:
|
||||||
runs-on: "ubuntu-22.04"
|
runs-on: ubuntu-24.04
|
||||||
outputs:
|
outputs:
|
||||||
has-secrets: ${{ steps.check.outputs.has-secrets }}
|
has-secrets: ${{ steps.check.outputs.has-secrets }}
|
||||||
steps:
|
steps:
|
||||||
@@ -28,19 +28,21 @@ jobs:
|
|||||||
needs: config
|
needs: config
|
||||||
if: needs.config.outputs.has-secrets
|
if: needs.config.outputs.has-secrets
|
||||||
name: Build & Deploy
|
name: Build & Deploy
|
||||||
runs-on: "ubuntu-22.04"
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
- name: Set up Node.js 20
|
- name: Set up Node.js
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: '20'
|
node-version-file: './docs/.nvmrc'
|
||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
uses: ./.github/actions/setup-backend/
|
uses: ./.github/actions/setup-backend/
|
||||||
|
- name: Update openapi docs
|
||||||
|
run: superset update_api_docs
|
||||||
- uses: actions/setup-java@v4
|
- uses: actions/setup-java@v4
|
||||||
with:
|
with:
|
||||||
distribution: 'zulu'
|
distribution: 'zulu'
|
||||||
|
|||||||
9
.github/workflows/superset-docs-verify.yml
vendored
9
.github/workflows/superset-docs-verify.yml
vendored
@@ -24,11 +24,10 @@ jobs:
|
|||||||
- uses: JustinBeckwith/linkinator-action@v1.11.0
|
- uses: JustinBeckwith/linkinator-action@v1.11.0
|
||||||
continue-on-error: true # This will make the job advisory (non-blocking, no red X)
|
continue-on-error: true # This will make the job advisory (non-blocking, no red X)
|
||||||
with:
|
with:
|
||||||
paths: "**/*.md, **/*.mdx"
|
paths: "**/*.md, **/*.mdx, !superset-frontend/CHANGELOG.md"
|
||||||
linksToSkip: >-
|
linksToSkip: >-
|
||||||
^https://github.com/apache/(superset|incubator-superset)/(pull|issue)/\d+,
|
^https://github.com/apache/(superset|incubator-superset)/(pull|issue)/\d+,
|
||||||
http://localhost:8088/,
|
http://localhost:8088/,
|
||||||
docker/.env-non-dev,
|
|
||||||
http://127.0.0.1:3000/,
|
http://127.0.0.1:3000/,
|
||||||
http://localhost:9001/,
|
http://localhost:9001/,
|
||||||
https://charts.bitnami.com/bitnami,
|
https://charts.bitnami.com/bitnami,
|
||||||
@@ -51,7 +50,7 @@ jobs:
|
|||||||
https://www.plaidcloud.com/
|
https://www.plaidcloud.com/
|
||||||
build-deploy:
|
build-deploy:
|
||||||
name: Build & Deploy
|
name: Build & Deploy
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: docs
|
working-directory: docs
|
||||||
@@ -61,10 +60,10 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
- name: Set up Node.js 20
|
- name: Set up Node.js
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: '20'
|
node-version-file: './docs/.nvmrc'
|
||||||
- name: yarn install
|
- name: yarn install
|
||||||
run: |
|
run: |
|
||||||
yarn install --check-cache
|
yarn install --check-cache
|
||||||
|
|||||||
16
.github/workflows/superset-e2e.yml
vendored
16
.github/workflows/superset-e2e.yml
vendored
@@ -28,6 +28,7 @@ concurrency:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
cypress-matrix:
|
cypress-matrix:
|
||||||
|
# Somehow one test flakes on 24.04 for unknown reasons, this is the only GHA left on 22.04
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
@@ -41,6 +42,7 @@ jobs:
|
|||||||
matrix:
|
matrix:
|
||||||
parallel_id: [0, 1, 2, 3, 4, 5]
|
parallel_id: [0, 1, 2, 3, 4, 5]
|
||||||
browser: ["chrome"]
|
browser: ["chrome"]
|
||||||
|
app_root: ["", "/app/prefix"]
|
||||||
env:
|
env:
|
||||||
SUPERSET_ENV: development
|
SUPERSET_ENV: development
|
||||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||||
@@ -48,11 +50,11 @@ jobs:
|
|||||||
PYTHONPATH: ${{ github.workspace }}
|
PYTHONPATH: ${{ github.workspace }}
|
||||||
REDIS_PORT: 16379
|
REDIS_PORT: 16379
|
||||||
GITHUB_TOKEN: ${{ github.token }}
|
GITHUB_TOKEN: ${{ github.token }}
|
||||||
# use the dashboard feature when running manually OR merging to master
|
# Only use dashboard when explicitly requested via workflow_dispatch
|
||||||
USE_DASHBOARD: ${{ github.event.inputs.use_dashboard == 'true'|| (github.ref == 'refs/heads/master' && 'true') || 'false' }}
|
USE_DASHBOARD: ${{ github.event.inputs.use_dashboard == 'true' || 'false' }}
|
||||||
services:
|
services:
|
||||||
postgres:
|
postgres:
|
||||||
image: postgres:15-alpine
|
image: postgres:16-alpine
|
||||||
env:
|
env:
|
||||||
POSTGRES_USER: superset
|
POSTGRES_USER: superset
|
||||||
POSTGRES_PASSWORD: superset
|
POSTGRES_PASSWORD: superset
|
||||||
@@ -108,7 +110,7 @@ jobs:
|
|||||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: "20"
|
node-version-file: './superset-frontend/.nvmrc'
|
||||||
- name: Install npm dependencies
|
- name: Install npm dependencies
|
||||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||||
uses: ./.github/actions/cached-dependencies
|
uses: ./.github/actions/cached-dependencies
|
||||||
@@ -134,10 +136,10 @@ jobs:
|
|||||||
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
||||||
NODE_OPTIONS: "--max-old-space-size=4096"
|
NODE_OPTIONS: "--max-old-space-size=4096"
|
||||||
with:
|
with:
|
||||||
run: cypress-run-all ${{ env.USE_DASHBOARD }}
|
run: cypress-run-all ${{ env.USE_DASHBOARD }} ${{ matrix.app_root }}
|
||||||
- name: Upload Artifacts
|
- name: Upload Artifacts
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
if: github.event_name == 'workflow_dispatch' && (steps.check.outputs.python || steps.check.outputs.frontend)
|
if: failure()
|
||||||
with:
|
with:
|
||||||
path: ${{ github.workspace }}/superset-frontend/cypress-base/cypress/screenshots
|
path: ${{ github.workspace }}/superset-frontend/cypress-base/cypress/screenshots
|
||||||
name: cypress-artifact-${{ github.run_id }}-${{ github.job }}
|
name: cypress-artifact-${{ github.run_id }}-${{ github.job }}-${{ matrix.browser }}-${{ matrix.parallel_id }}
|
||||||
|
|||||||
196
.github/workflows/superset-frontend.yml
vendored
196
.github/workflows/superset-frontend.yml
vendored
@@ -1,4 +1,4 @@
|
|||||||
name: Frontend
|
name: "Frontend Build CI (unit tests, linting & sanity checks)"
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
@@ -13,68 +13,168 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
env:
|
||||||
|
TAG: apache/superset:GHA-${{ github.run_id }}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
frontend-build:
|
frontend-build:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
|
outputs:
|
||||||
|
should-run: ${{ steps.check.outputs.frontend }}
|
||||||
steps:
|
steps:
|
||||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
- name: Checkout Code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
submodules: recursive
|
|
||||||
- name: Check npm lock file version
|
- name: Check for File Changes
|
||||||
run: ./scripts/ci_check_npm_lock_version.sh ./superset-frontend/package-lock.json
|
|
||||||
- name: Check for file changes
|
|
||||||
id: check
|
id: check
|
||||||
uses: ./.github/actions/change-detector/
|
uses: ./.github/actions/change-detector/
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: Setup Node.js
|
|
||||||
|
- name: Build Docker Image
|
||||||
if: steps.check.outputs.frontend
|
if: steps.check.outputs.frontend
|
||||||
uses: actions/setup-node@v4
|
shell: bash
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
run: |
|
||||||
|
docker buildx build \
|
||||||
|
-t $TAG \
|
||||||
|
--cache-from=type=registry,ref=apache/superset-cache:3.10-slim-bookworm \
|
||||||
|
--target superset-node-ci \
|
||||||
|
.
|
||||||
|
|
||||||
|
- name: Save Docker Image as Artifact
|
||||||
|
if: steps.check.outputs.frontend
|
||||||
|
run: |
|
||||||
|
docker save $TAG | gzip > docker-image.tar.gz
|
||||||
|
|
||||||
|
- name: Upload Docker Image Artifact
|
||||||
|
if: steps.check.outputs.frontend
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
node-version: "20"
|
name: docker-image
|
||||||
- name: Install dependencies
|
path: docker-image.tar.gz
|
||||||
if: steps.check.outputs.frontend
|
|
||||||
uses: ./.github/actions/cached-dependencies
|
sharded-jest-tests:
|
||||||
|
needs: frontend-build
|
||||||
|
if: needs.frontend-build.outputs.should-run == 'true'
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
shard: [1, 2, 3, 4, 5, 6, 7, 8]
|
||||||
|
fail-fast: false
|
||||||
|
runs-on: ubuntu-24.04
|
||||||
|
steps:
|
||||||
|
- name: Download Docker Image Artifact
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
run: npm-install
|
name: docker-image
|
||||||
- name: eslint
|
|
||||||
if: steps.check.outputs.frontend
|
- name: Load Docker Image
|
||||||
working-directory: ./superset-frontend
|
run: docker load < docker-image.tar.gz
|
||||||
|
|
||||||
|
- name: npm run test with coverage
|
||||||
run: |
|
run: |
|
||||||
npm run eslint -- . --quiet
|
mkdir -p ${{ github.workspace }}/superset-frontend/coverage
|
||||||
- name: tsc
|
docker run \
|
||||||
if: steps.check.outputs.frontend
|
-v ${{ github.workspace }}/superset-frontend/coverage:/app/superset-frontend/coverage \
|
||||||
working-directory: ./superset-frontend
|
--rm $TAG \
|
||||||
run: |
|
bash -c \
|
||||||
npm run type
|
"npm run test -- --coverage --shard=${{ matrix.shard }}/8 --coverageReporters=json-summary"
|
||||||
- name: Build plugins packages
|
|
||||||
if: steps.check.outputs.frontend
|
- name: Upload Coverage Artifact
|
||||||
working-directory: ./superset-frontend
|
uses: actions/upload-artifact@v4
|
||||||
run: npm run plugins:build
|
with:
|
||||||
- name: Build plugins Storybook
|
name: coverage-artifacts-${{ matrix.shard }}
|
||||||
if: steps.check.outputs.frontend
|
path: superset-frontend/coverage
|
||||||
working-directory: ./superset-frontend
|
|
||||||
run: npm run plugins:build-storybook
|
report-coverage:
|
||||||
- name: superset-ui/core coverage
|
needs: [sharded-jest-tests]
|
||||||
if: steps.check.outputs.frontend
|
if: needs.frontend-build.outputs.should-run == 'true'
|
||||||
working-directory: ./superset-frontend
|
runs-on: ubuntu-24.04
|
||||||
run: |
|
steps:
|
||||||
npm run core:cover
|
- name: Download Coverage Artifacts
|
||||||
- name: unit tests
|
uses: actions/download-artifact@v4
|
||||||
if: steps.check.outputs.frontend
|
with:
|
||||||
working-directory: ./superset-frontend
|
pattern: coverage-artifacts-*
|
||||||
run: |
|
path: coverage/
|
||||||
npm run test -- --coverage --silent
|
|
||||||
# todo: remove this step when fix generator as a project in root jest.config.js
|
- name: Show Files
|
||||||
- name: generator-superset unit tests
|
run: find coverage/
|
||||||
if: steps.check.outputs.frontend
|
|
||||||
working-directory: ./superset-frontend/packages/generator-superset
|
- name: Merge Code Coverage
|
||||||
run: npm run test
|
run: npx nyc merge coverage/ merged-output/coverage-summary.json
|
||||||
- name: Upload code coverage
|
|
||||||
uses: codecov/codecov-action@v4
|
- name: Upload Code Coverage
|
||||||
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
flags: javascript
|
flags: javascript
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
verbose: true
|
verbose: true
|
||||||
|
files: merged-output/coverage-summary.json
|
||||||
|
slug: apache/superset
|
||||||
|
|
||||||
|
core-cover:
|
||||||
|
needs: frontend-build
|
||||||
|
if: needs.frontend-build.outputs.should-run == 'true'
|
||||||
|
runs-on: ubuntu-24.04
|
||||||
|
steps:
|
||||||
|
- name: Download Docker Image Artifact
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: docker-image
|
||||||
|
|
||||||
|
- name: Load Docker Image
|
||||||
|
run: docker load < docker-image.tar.gz
|
||||||
|
|
||||||
|
- name: superset-ui/core coverage
|
||||||
|
run: |
|
||||||
|
docker run --rm $TAG bash -c \
|
||||||
|
"npm run core:cover"
|
||||||
|
|
||||||
|
lint-frontend:
|
||||||
|
needs: frontend-build
|
||||||
|
if: needs.frontend-build.outputs.should-run == 'true'
|
||||||
|
runs-on: ubuntu-24.04
|
||||||
|
steps:
|
||||||
|
- name: Download Docker Image Artifact
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: docker-image
|
||||||
|
|
||||||
|
- name: Load Docker Image
|
||||||
|
run: docker load < docker-image.tar.gz
|
||||||
|
|
||||||
|
- name: eslint
|
||||||
|
run: |
|
||||||
|
docker run --rm $TAG bash -c \
|
||||||
|
"npm i && npm run eslint -- . --quiet"
|
||||||
|
|
||||||
|
- name: tsc
|
||||||
|
run: |
|
||||||
|
docker run --rm $TAG bash -c \
|
||||||
|
"npm run type"
|
||||||
|
|
||||||
|
validate-frontend:
|
||||||
|
needs: frontend-build
|
||||||
|
if: needs.frontend-build.outputs.should-run == 'true'
|
||||||
|
runs-on: ubuntu-24.04
|
||||||
|
steps:
|
||||||
|
- name: Download Docker Image Artifact
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: docker-image
|
||||||
|
|
||||||
|
- name: Load Docker Image
|
||||||
|
run: docker load < docker-image.tar.gz
|
||||||
|
|
||||||
|
- name: Build Plugins Packages
|
||||||
|
run: |
|
||||||
|
docker run --rm $TAG bash -c \
|
||||||
|
"npm run plugins:build"
|
||||||
|
|
||||||
|
- name: Build Plugins Storybook
|
||||||
|
run: |
|
||||||
|
docker run --rm $TAG bash -c \
|
||||||
|
"npm run plugins:build-storybook"
|
||||||
|
|||||||
6
.github/workflows/superset-helm-lint.yml
vendored
6
.github/workflows/superset-helm-lint.yml
vendored
@@ -1,4 +1,4 @@
|
|||||||
name: Lint and Test Charts
|
name: "Helm: lint and test charts"
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
@@ -13,7 +13,7 @@ concurrency:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
lint-test:
|
lint-test:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
@@ -25,7 +25,7 @@ jobs:
|
|||||||
- name: Set up Helm
|
- name: Set up Helm
|
||||||
uses: azure/setup-helm@v4
|
uses: azure/setup-helm@v4
|
||||||
with:
|
with:
|
||||||
version: v3.5.4
|
version: v3.16.4
|
||||||
|
|
||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
uses: ./.github/actions/setup-backend/
|
uses: ./.github/actions/setup-backend/
|
||||||
|
|||||||
88
.github/workflows/superset-helm-release.yml
vendored
88
.github/workflows/superset-helm-release.yml
vendored
@@ -1,4 +1,8 @@
|
|||||||
name: Release Charts
|
# This workflow automates the release process for Helm charts.
|
||||||
|
# The workflow creates a new branch for the release and opens a pull request against the 'gh-pages' branch,
|
||||||
|
# allowing the changes to be reviewed and merged manually.
|
||||||
|
|
||||||
|
name: "Helm: release charts"
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
@@ -7,18 +11,28 @@ on:
|
|||||||
- "[0-9].[0-9]*"
|
- "[0-9].[0-9]*"
|
||||||
paths:
|
paths:
|
||||||
- "helm/**"
|
- "helm/**"
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
ref:
|
||||||
|
description: "The branch, tag, or commit SHA to check out"
|
||||||
|
required: false
|
||||||
|
default: "master"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
release:
|
release:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
|
pull-requests: write
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
ref: ${{ inputs.ref || github.ref_name }}
|
||||||
|
persist-credentials: true
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
@@ -35,11 +49,77 @@ jobs:
|
|||||||
- name: Add bitnami repo dependency
|
- name: Add bitnami repo dependency
|
||||||
run: helm repo add bitnami https://charts.bitnami.com/bitnami
|
run: helm repo add bitnami https://charts.bitnami.com/bitnami
|
||||||
|
|
||||||
|
- name: Fetch/list all tags
|
||||||
|
run: |
|
||||||
|
# Debugging tags
|
||||||
|
git fetch --tags --force
|
||||||
|
git tag -d superset-helm-chart-0.13.4 || true
|
||||||
|
echo "DEBUG TAGS"
|
||||||
|
git show-ref --tags
|
||||||
|
|
||||||
|
- name: Create unique pages branch name
|
||||||
|
id: vars
|
||||||
|
run: echo "branch_name=helm-publish-${GITHUB_SHA:0:7}" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Force recreate branch from gh-pages
|
||||||
|
run: |
|
||||||
|
# Ensure a clean working directory
|
||||||
|
git reset --hard
|
||||||
|
git clean -fdx
|
||||||
|
git checkout -b local_gha_temp
|
||||||
|
git submodule update
|
||||||
|
|
||||||
|
# Fetch the latest gh-pages branch
|
||||||
|
git fetch origin gh-pages
|
||||||
|
|
||||||
|
# Check out and reset the target branch based on gh-pages
|
||||||
|
git checkout -B ${{ env.branch_name }} origin/gh-pages
|
||||||
|
|
||||||
|
# Remove submodules from the branch
|
||||||
|
git submodule deinit -f --all
|
||||||
|
|
||||||
|
# Force push to the remote branch
|
||||||
|
git push origin ${{ env.branch_name }} --force
|
||||||
|
|
||||||
|
# Return to the original branch
|
||||||
|
git checkout local_gha_temp
|
||||||
|
|
||||||
|
- name: Fetch/list all tags
|
||||||
|
run: |
|
||||||
|
git submodule update
|
||||||
|
cat .github/actions/chart-releaser-action/action.yml
|
||||||
|
|
||||||
- name: Run chart-releaser
|
- name: Run chart-releaser
|
||||||
uses: ./.github/actions/chart-releaser-action
|
uses: ./.github/actions/chart-releaser-action
|
||||||
with:
|
with:
|
||||||
|
version: v1.6.0
|
||||||
charts_dir: helm
|
charts_dir: helm
|
||||||
mark_as_latest: false
|
mark_as_latest: false
|
||||||
|
pages_branch: ${{ env.branch_name }}
|
||||||
env:
|
env:
|
||||||
CR_TOKEN: "${{ github.token }}"
|
CR_TOKEN: "${{ github.token }}"
|
||||||
CR_RELEASE_NAME_TEMPLATE: "superset-helm-chart-{{ .Version }}"
|
CR_RELEASE_NAME_TEMPLATE: "superset-helm-chart-{{ .Version }}"
|
||||||
|
|
||||||
|
- name: Open Pull Request
|
||||||
|
uses: actions/github-script@v7
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const branchName = '${{ env.branch_name }}';
|
||||||
|
const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');
|
||||||
|
|
||||||
|
if (!branchName) {
|
||||||
|
throw new Error("Branch name is not defined.");
|
||||||
|
}
|
||||||
|
|
||||||
|
const pr = await github.rest.pulls.create({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
title: `Helm chart release for ${branchName}`,
|
||||||
|
head: branchName,
|
||||||
|
base: "gh-pages", // Adjust if the target branch is different
|
||||||
|
body: `This PR releases Helm charts to the gh-pages branch.`,
|
||||||
|
});
|
||||||
|
|
||||||
|
core.info(`Pull request created: ${pr.data.html_url}`);
|
||||||
|
env:
|
||||||
|
BRANCH_NAME: ${{ env.branch_name }}
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ concurrency:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test-mysql:
|
test-mysql:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
env:
|
env:
|
||||||
PYTHONPATH: ${{ github.workspace }}
|
PYTHONPATH: ${{ github.workspace }}
|
||||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||||
@@ -68,16 +68,16 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
./scripts/python_tests.sh
|
./scripts/python_tests.sh
|
||||||
- name: Upload code coverage
|
- name: Upload code coverage
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
flags: python,mysql
|
flags: python,mysql
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
verbose: true
|
verbose: true
|
||||||
test-postgres:
|
test-postgres:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ["current", "next", "previous"]
|
python-version: ["current", "previous"]
|
||||||
env:
|
env:
|
||||||
PYTHONPATH: ${{ github.workspace }}
|
PYTHONPATH: ${{ github.workspace }}
|
||||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||||
@@ -85,7 +85,7 @@ jobs:
|
|||||||
SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset
|
SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset
|
||||||
services:
|
services:
|
||||||
postgres:
|
postgres:
|
||||||
image: postgres:15-alpine
|
image: postgres:16-alpine
|
||||||
env:
|
env:
|
||||||
POSTGRES_USER: superset
|
POSTGRES_USER: superset
|
||||||
POSTGRES_PASSWORD: superset
|
POSTGRES_PASSWORD: superset
|
||||||
@@ -129,14 +129,14 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
./scripts/python_tests.sh
|
./scripts/python_tests.sh
|
||||||
- name: Upload code coverage
|
- name: Upload code coverage
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
flags: python,postgres
|
flags: python,postgres
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
verbose: true
|
verbose: true
|
||||||
|
|
||||||
test-sqlite:
|
test-sqlite:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
env:
|
env:
|
||||||
PYTHONPATH: ${{ github.workspace }}
|
PYTHONPATH: ${{ github.workspace }}
|
||||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||||
@@ -181,7 +181,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
./scripts/python_tests.sh
|
./scripts/python_tests.sh
|
||||||
- name: Upload code coverage
|
- name: Upload code coverage
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
flags: python,sqlite
|
flags: python,sqlite
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ concurrency:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test-postgres-presto:
|
test-postgres-presto:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
env:
|
env:
|
||||||
PYTHONPATH: ${{ github.workspace }}
|
PYTHONPATH: ${{ github.workspace }}
|
||||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||||
@@ -25,7 +25,7 @@ jobs:
|
|||||||
SUPERSET__SQLALCHEMY_EXAMPLES_URI: presto://localhost:15433/memory/default
|
SUPERSET__SQLALCHEMY_EXAMPLES_URI: presto://localhost:15433/memory/default
|
||||||
services:
|
services:
|
||||||
postgres:
|
postgres:
|
||||||
image: postgres:15-alpine
|
image: postgres:16-alpine
|
||||||
env:
|
env:
|
||||||
POSTGRES_USER: superset
|
POSTGRES_USER: superset
|
||||||
POSTGRES_PASSWORD: superset
|
POSTGRES_PASSWORD: superset
|
||||||
@@ -77,14 +77,14 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
./scripts/python_tests.sh -m 'chart_data_flow or sql_json_flow'
|
./scripts/python_tests.sh -m 'chart_data_flow or sql_json_flow'
|
||||||
- name: Upload code coverage
|
- name: Upload code coverage
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
flags: python,presto
|
flags: python,presto
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
verbose: true
|
verbose: true
|
||||||
|
|
||||||
test-postgres-hive:
|
test-postgres-hive:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
env:
|
env:
|
||||||
PYTHONPATH: ${{ github.workspace }}
|
PYTHONPATH: ${{ github.workspace }}
|
||||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||||
@@ -94,7 +94,7 @@ jobs:
|
|||||||
UPLOAD_FOLDER: /tmp/.superset/uploads/
|
UPLOAD_FOLDER: /tmp/.superset/uploads/
|
||||||
services:
|
services:
|
||||||
postgres:
|
postgres:
|
||||||
image: postgres:15-alpine
|
image: postgres:16-alpine
|
||||||
env:
|
env:
|
||||||
POSTGRES_USER: superset
|
POSTGRES_USER: superset
|
||||||
POSTGRES_PASSWORD: superset
|
POSTGRES_PASSWORD: superset
|
||||||
@@ -142,9 +142,10 @@ jobs:
|
|||||||
- name: Python unit tests (PostgreSQL)
|
- name: Python unit tests (PostgreSQL)
|
||||||
if: steps.check.outputs.python
|
if: steps.check.outputs.python
|
||||||
run: |
|
run: |
|
||||||
|
pip install -e .[hive]
|
||||||
./scripts/python_tests.sh -m 'chart_data_flow or sql_json_flow'
|
./scripts/python_tests.sh -m 'chart_data_flow or sql_json_flow'
|
||||||
- name: Upload code coverage
|
- name: Upload code coverage
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
flags: python,hive
|
flags: python,hive
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
|||||||
@@ -16,10 +16,10 @@ concurrency:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
unit-tests:
|
unit-tests:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ["current", "next"]
|
python-version: ["previous", "current"]
|
||||||
env:
|
env:
|
||||||
PYTHONPATH: ${{ github.workspace }}
|
PYTHONPATH: ${{ github.workspace }}
|
||||||
steps:
|
steps:
|
||||||
@@ -44,9 +44,9 @@ jobs:
|
|||||||
SUPERSET_TESTENV: true
|
SUPERSET_TESTENV: true
|
||||||
SUPERSET_SECRET_KEY: not-a-secret
|
SUPERSET_SECRET_KEY: not-a-secret
|
||||||
run: |
|
run: |
|
||||||
pytest --durations-min=0.5 --cov-report= --cov=superset ./tests/common ./tests/unit_tests --cache-clear
|
pytest --durations-min=0.5 --cov-report= --cov=superset ./tests/common ./tests/unit_tests --cache-clear --maxfail=50
|
||||||
- name: Upload code coverage
|
- name: Upload code coverage
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
flags: python,unit
|
flags: python,unit
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
|||||||
6
.github/workflows/superset-translations.yml
vendored
6
.github/workflows/superset-translations.yml
vendored
@@ -15,7 +15,7 @@ concurrency:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
frontend-check-translations:
|
frontend-check-translations:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
@@ -33,7 +33,7 @@ jobs:
|
|||||||
if: steps.check.outputs.frontend
|
if: steps.check.outputs.frontend
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: '18'
|
node-version-file: './superset-frontend/.nvmrc'
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
if: steps.check.outputs.frontend
|
if: steps.check.outputs.frontend
|
||||||
uses: ./.github/actions/cached-dependencies
|
uses: ./.github/actions/cached-dependencies
|
||||||
@@ -46,7 +46,7 @@ jobs:
|
|||||||
npm run build-translation
|
npm run build-translation
|
||||||
|
|
||||||
babel-extract:
|
babel-extract:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|||||||
2
.github/workflows/superset-websocket.yml
vendored
2
.github/workflows/superset-websocket.yml
vendored
@@ -18,7 +18,7 @@ concurrency:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
app-checks:
|
app-checks:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|||||||
2
.github/workflows/supersetbot.yml
vendored
2
.github/workflows/supersetbot.yml
vendored
@@ -15,7 +15,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
supersetbot:
|
supersetbot:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
if: >
|
if: >
|
||||||
github.event_name == 'workflow_dispatch' ||
|
github.event_name == 'workflow_dispatch' ||
|
||||||
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@supersetbot'))
|
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@supersetbot'))
|
||||||
|
|||||||
27
.github/workflows/tag-release.yml
vendored
27
.github/workflows/tag-release.yml
vendored
@@ -23,7 +23,7 @@ on:
|
|||||||
- 'false'
|
- 'false'
|
||||||
jobs:
|
jobs:
|
||||||
config:
|
config:
|
||||||
runs-on: "ubuntu-22.04"
|
runs-on: ubuntu-24.04
|
||||||
outputs:
|
outputs:
|
||||||
has-secrets: ${{ steps.check.outputs.has-secrets }}
|
has-secrets: ${{ steps.check.outputs.has-secrets }}
|
||||||
steps:
|
steps:
|
||||||
@@ -39,23 +39,26 @@ jobs:
|
|||||||
needs: config
|
needs: config
|
||||||
if: needs.config.outputs.has-secrets
|
if: needs.config.outputs.has-secrets
|
||||||
name: docker-release
|
name: docker-release
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
build_preset: ["dev", "lean", "py310", "websocket", "dockerize", "py311"]
|
build_preset: ["dev", "lean", "py310", "websocket", "dockerize", "py311"]
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
steps:
|
steps:
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v3
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
|
|
||||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup Docker Environment
|
||||||
|
uses: ./.github/actions/setup-docker
|
||||||
|
with:
|
||||||
|
dockerhub-user: ${{ secrets.DOCKERHUB_USER }}
|
||||||
|
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
install-docker-compose: "false"
|
||||||
|
build: "true"
|
||||||
|
|
||||||
- name: Use Node.js 20
|
- name: Use Node.js 20
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
@@ -64,13 +67,6 @@ jobs:
|
|||||||
- name: Setup supersetbot
|
- name: Setup supersetbot
|
||||||
uses: ./.github/actions/setup-supersetbot/
|
uses: ./.github/actions/setup-supersetbot/
|
||||||
|
|
||||||
- name: Try to login to DockerHub
|
|
||||||
continue-on-error: true
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USER }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Execute custom Node.js script
|
- name: Execute custom Node.js script
|
||||||
env:
|
env:
|
||||||
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
|
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
|
||||||
@@ -91,6 +87,7 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
supersetbot docker \
|
supersetbot docker \
|
||||||
|
--push \
|
||||||
--preset ${{ matrix.build_preset }} \
|
--preset ${{ matrix.build_preset }} \
|
||||||
--context "$EVENT" \
|
--context "$EVENT" \
|
||||||
--context-ref "$RELEASE" $FORCE_LATEST \
|
--context-ref "$RELEASE" $FORCE_LATEST \
|
||||||
@@ -103,7 +100,7 @@ jobs:
|
|||||||
update-prs-with-release-info:
|
update-prs-with-release-info:
|
||||||
needs: config
|
needs: config
|
||||||
if: needs.config.outputs.has-secrets
|
if: needs.config.outputs.has-secrets
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
|
|||||||
8
.github/workflows/tech-debt.yml
vendored
8
.github/workflows/tech-debt.yml
vendored
@@ -8,7 +8,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
config:
|
config:
|
||||||
runs-on: "ubuntu-22.04"
|
runs-on: ubuntu-24.04
|
||||||
outputs:
|
outputs:
|
||||||
has-secrets: ${{ steps.check.outputs.has-secrets }}
|
has-secrets: ${{ steps.check.outputs.has-secrets }}
|
||||||
steps:
|
steps:
|
||||||
@@ -23,7 +23,7 @@ jobs:
|
|||||||
process-and-upload:
|
process-and-upload:
|
||||||
needs: config
|
needs: config
|
||||||
if: needs.config.outputs.has-secrets
|
if: needs.config.outputs.has-secrets
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
name: Generate Reports
|
name: Generate Reports
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout Repository
|
- name: Checkout Repository
|
||||||
@@ -32,10 +32,10 @@ jobs:
|
|||||||
- name: Set up Node.js
|
- name: Set up Node.js
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: '20'
|
node-version-file: './superset-frontend/.nvmrc'
|
||||||
|
|
||||||
- name: Install Dependencies
|
- name: Install Dependencies
|
||||||
run: npm install
|
run: npm ci
|
||||||
working-directory: ./superset-frontend
|
working-directory: ./superset-frontend
|
||||||
|
|
||||||
- name: Run Script
|
- name: Run Script
|
||||||
|
|||||||
2
.github/workflows/welcome-new-users.yml
vendored
2
.github/workflows/welcome-new-users.yml
vendored
@@ -6,7 +6,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
welcome:
|
welcome:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
permissions:
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
|
|
||||||
|
|||||||
9
.gitignore
vendored
9
.gitignore
vendored
@@ -21,6 +21,7 @@
|
|||||||
*.swp
|
*.swp
|
||||||
__pycache__
|
__pycache__
|
||||||
|
|
||||||
|
.aider*
|
||||||
.local
|
.local
|
||||||
.cache
|
.cache
|
||||||
.bento*
|
.bento*
|
||||||
@@ -50,7 +51,6 @@ env
|
|||||||
venv*
|
venv*
|
||||||
env_py3
|
env_py3
|
||||||
envpy3
|
envpy3
|
||||||
env36
|
|
||||||
local_config.py
|
local_config.py
|
||||||
/superset_config.py
|
/superset_config.py
|
||||||
/superset_text.yml
|
/superset_text.yml
|
||||||
@@ -66,7 +66,10 @@ superset-websocket/config.json
|
|||||||
*.js.map
|
*.js.map
|
||||||
node_modules
|
node_modules
|
||||||
npm-debug.log*
|
npm-debug.log*
|
||||||
superset/static/assets
|
superset/static/assets/*
|
||||||
|
!superset/static/assets/.gitkeep
|
||||||
|
superset/static/uploads/*
|
||||||
|
!superset/static/uploads/.gitkeep
|
||||||
superset/static/version_info.json
|
superset/static/version_info.json
|
||||||
superset-frontend/**/esm/*
|
superset-frontend/**/esm/*
|
||||||
superset-frontend/**/lib/*
|
superset-frontend/**/lib/*
|
||||||
@@ -104,6 +107,7 @@ ghostdriver.log
|
|||||||
testCSV.csv
|
testCSV.csv
|
||||||
.terser-plugin-cache/
|
.terser-plugin-cache/
|
||||||
apache-superset-*.tar.gz*
|
apache-superset-*.tar.gz*
|
||||||
|
apache_superset-*.tar.gz*
|
||||||
release.json
|
release.json
|
||||||
|
|
||||||
# Translation-related files
|
# Translation-related files
|
||||||
@@ -122,3 +126,4 @@ docker/*local*
|
|||||||
# Jest test report
|
# Jest test report
|
||||||
test-report.html
|
test-report.html
|
||||||
superset/static/stats/statistics.html
|
superset/static/stats/statistics.html
|
||||||
|
.aider*
|
||||||
|
|||||||
@@ -16,11 +16,11 @@
|
|||||||
#
|
#
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/MarcoGorelli/auto-walrus
|
- repo: https://github.com/MarcoGorelli/auto-walrus
|
||||||
rev: v0.2.2
|
rev: 0.3.4
|
||||||
hooks:
|
hooks:
|
||||||
- id: auto-walrus
|
- id: auto-walrus
|
||||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||||
rev: v1.3.0
|
rev: v1.15.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: mypy
|
- id: mypy
|
||||||
args: [--check-untyped-defs]
|
args: [--check-untyped-defs]
|
||||||
@@ -38,31 +38,49 @@ repos:
|
|||||||
types-paramiko,
|
types-paramiko,
|
||||||
types-Markdown,
|
types-Markdown,
|
||||||
]
|
]
|
||||||
- repo: https://github.com/peterdemin/pip-compile-multi
|
|
||||||
rev: v2.6.2
|
|
||||||
hooks:
|
|
||||||
- id: pip-compile-multi-verify
|
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v4.4.0
|
rev: v5.0.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-docstring-first
|
- id: check-docstring-first
|
||||||
- id: check-added-large-files
|
- id: check-added-large-files
|
||||||
exclude: ^.*\.(geojson)$|^docs/static/img/screenshots/.*
|
exclude: ^.*\.(geojson)$|^docs/static/img/screenshots/.*|^superset-frontend/CHANGELOG\.md$
|
||||||
- id: check-yaml
|
- id: check-yaml
|
||||||
exclude: ^helm/superset/templates/
|
exclude: ^helm/superset/templates/
|
||||||
- id: debug-statements
|
- id: debug-statements
|
||||||
- id: end-of-file-fixer
|
- id: end-of-file-fixer
|
||||||
|
exclude: .*/lerna\.json$
|
||||||
- id: trailing-whitespace
|
- id: trailing-whitespace
|
||||||
exclude: ^.*\.(snap)
|
exclude: ^.*\.(snap)
|
||||||
args: ["--markdown-linebreak-ext=md"]
|
args: ["--markdown-linebreak-ext=md"]
|
||||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||||
rev: v3.1.0 # Use the sha or tag you want to point at
|
rev: v4.0.0-alpha.8 # Use the sha or tag you want to point at
|
||||||
hooks:
|
hooks:
|
||||||
- id: prettier
|
- id: prettier
|
||||||
additional_dependencies:
|
additional_dependencies:
|
||||||
- prettier@3.3.3
|
- prettier@3.5.3
|
||||||
args: ["--ignore-path=./superset-frontend/.prettierignore"]
|
args: ["--ignore-path=./superset-frontend/.prettierignore"]
|
||||||
files: "superset-frontend"
|
files: "superset-frontend"
|
||||||
|
- repo: local
|
||||||
|
hooks:
|
||||||
|
- id: eslint-frontend
|
||||||
|
name: eslint (frontend)
|
||||||
|
entry: ./scripts/eslint.sh
|
||||||
|
language: system
|
||||||
|
pass_filenames: true
|
||||||
|
files: ^superset-frontend/.*\.(js|jsx|ts|tsx)$
|
||||||
|
- id: eslint-docs
|
||||||
|
name: eslint (docs)
|
||||||
|
entry: bash -c 'cd docs && FILES=$(echo "$@" | sed "s|docs/||g") && yarn eslint --fix --ext .js,.jsx,.ts,.tsx --quiet $FILES'
|
||||||
|
language: system
|
||||||
|
pass_filenames: true
|
||||||
|
files: ^docs/.*\.(js|jsx|ts|tsx)$
|
||||||
|
- id: type-checking-frontend
|
||||||
|
name: Type-Checking (Frontend)
|
||||||
|
entry: bash -c './scripts/check-type.js package=superset-frontend excludeDeclarationDir=cypress-base'
|
||||||
|
language: system
|
||||||
|
files: ^superset-frontend\/.*\.(js|jsx|ts|tsx)$
|
||||||
|
exclude: ^superset-frontend/cypress-base\/
|
||||||
|
require_serial: true
|
||||||
# blacklist unsafe functions like make_url (see #19526)
|
# blacklist unsafe functions like make_url (see #19526)
|
||||||
- repo: https://github.com/skorokithakis/blacklist-pre-commit-hook
|
- repo: https://github.com/skorokithakis/blacklist-pre-commit-hook
|
||||||
rev: e2f070289d8eddcaec0b580d3bde29437e7c8221
|
rev: e2f070289d8eddcaec0b580d3bde29437e7c8221
|
||||||
@@ -70,27 +88,15 @@ repos:
|
|||||||
- id: blacklist
|
- id: blacklist
|
||||||
args: ["--blacklisted-names=make_url", "--ignore=tests/"]
|
args: ["--blacklisted-names=make_url", "--ignore=tests/"]
|
||||||
- repo: https://github.com/norwoodj/helm-docs
|
- repo: https://github.com/norwoodj/helm-docs
|
||||||
rev: v1.11.0
|
rev: v1.14.2
|
||||||
hooks:
|
hooks:
|
||||||
- id: helm-docs
|
- id: helm-docs
|
||||||
files: helm
|
files: helm
|
||||||
|
verbose: false
|
||||||
|
args: ["--log-level", "error"]
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.4.0
|
rev: v0.9.7
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff
|
||||||
args: [ --fix ]
|
args: [--fix]
|
||||||
- id: ruff-format
|
- id: ruff-format
|
||||||
- repo: local
|
|
||||||
hooks:
|
|
||||||
- id: pylint
|
|
||||||
name: pylint
|
|
||||||
entry: pylint
|
|
||||||
language: system
|
|
||||||
types: [python]
|
|
||||||
exclude: ^(tests/|superset/migrations/|scripts/|RELEASING/|docker/)
|
|
||||||
args:
|
|
||||||
[
|
|
||||||
"-rn", # Only display messages
|
|
||||||
"-sn", # Don't display the score
|
|
||||||
"--rcfile=.pylintrc",
|
|
||||||
]
|
|
||||||
|
|||||||
380
.pylintrc
380
.pylintrc
@@ -1,380 +0,0 @@
|
|||||||
#
|
|
||||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
||||||
# contributor license agreements. See the NOTICE file distributed with
|
|
||||||
# this work for additional information regarding copyright ownership.
|
|
||||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
||||||
# (the "License"); you may not use this file except in compliance with
|
|
||||||
# the License. You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
#
|
|
||||||
[MASTER]
|
|
||||||
|
|
||||||
# Specify a configuration file.
|
|
||||||
#rcfile=
|
|
||||||
|
|
||||||
# Python code to execute, usually for sys.path manipulation such as
|
|
||||||
# pygtk.require().
|
|
||||||
#init-hook=
|
|
||||||
|
|
||||||
# Add files or directories to the blacklist. They should be base names, not
|
|
||||||
# paths.
|
|
||||||
ignore=CVS,migrations
|
|
||||||
|
|
||||||
# Add files or directories matching the regex patterns to the blacklist. The
|
|
||||||
# regex matches against base names, not paths.
|
|
||||||
ignore-patterns=
|
|
||||||
|
|
||||||
# Pickle collected data for later comparisons.
|
|
||||||
persistent=yes
|
|
||||||
|
|
||||||
# List of plugins (as comma separated values of python modules names) to load,
|
|
||||||
# usually to register additional checkers.
|
|
||||||
load-plugins=superset.extensions.pylint
|
|
||||||
|
|
||||||
# Use multiple processes to speed up Pylint.
|
|
||||||
jobs=2
|
|
||||||
|
|
||||||
# Allow loading of arbitrary C extensions. Extensions are imported into the
|
|
||||||
# active Python interpreter and may run arbitrary code.
|
|
||||||
unsafe-load-any-extension=no
|
|
||||||
|
|
||||||
# A comma-separated list of package or module names from where C extensions may
|
|
||||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
|
||||||
# run arbitrary code
|
|
||||||
extension-pkg-whitelist=pyarrow
|
|
||||||
|
|
||||||
|
|
||||||
[MESSAGES CONTROL]
|
|
||||||
|
|
||||||
# Only show warnings with the listed confidence levels. Leave empty to show
|
|
||||||
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
|
|
||||||
confidence=
|
|
||||||
|
|
||||||
# Enable the message, report, category or checker with the given id(s). You can
|
|
||||||
# either give multiple identifier separated by comma (,) or put this option
|
|
||||||
# multiple time (only on the command line, not in the configuration file where
|
|
||||||
# it should appear only once). See also the "--disable" option for examples.
|
|
||||||
enable=
|
|
||||||
useless-suppression,
|
|
||||||
|
|
||||||
# Disable the message, report, category or checker with the given id(s). You
|
|
||||||
# can either give multiple identifiers separated by comma (,) or put this
|
|
||||||
# option multiple times (only on the command line, not in the configuration
|
|
||||||
# file where it should appear only once).You can also use "--disable=all" to
|
|
||||||
# disable everything first and then reenable specific checks. For example, if
|
|
||||||
# you want to run only the similarities checker, you can use "--disable=all
|
|
||||||
# --enable=similarities". If you want to run only the classes checker, but have
|
|
||||||
# no Warning level messages displayed, use"--disable=all --enable=classes
|
|
||||||
# --disable=W"
|
|
||||||
disable=
|
|
||||||
cyclic-import, # re-enable once this no longer raises false positives
|
|
||||||
missing-docstring,
|
|
||||||
duplicate-code,
|
|
||||||
line-too-long,
|
|
||||||
unspecified-encoding,
|
|
||||||
too-many-instance-attributes # re-enable once this no longer raises false positives
|
|
||||||
|
|
||||||
[REPORTS]
|
|
||||||
|
|
||||||
# Set the output format. Available formats are text, parseable, colorized, msvs
|
|
||||||
# (visual studio) and html. You can also give a reporter class, eg
|
|
||||||
# mypackage.mymodule.MyReporterClass.
|
|
||||||
output-format=text
|
|
||||||
|
|
||||||
# Tells whether to display a full report or only the messages
|
|
||||||
reports=yes
|
|
||||||
|
|
||||||
# Python expression which should return a note less than 10 (10 is the highest
|
|
||||||
# note). You have access to the variables errors warning, statement which
|
|
||||||
# respectively contain the number of errors / warnings messages and the total
|
|
||||||
# number of statements analyzed. This is used by the global evaluation report
|
|
||||||
# (RP0004).
|
|
||||||
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
|
|
||||||
|
|
||||||
# Template used to display messages. This is a python new-style format string
|
|
||||||
# used to format the message information. See doc for all details
|
|
||||||
#msg-template=
|
|
||||||
|
|
||||||
|
|
||||||
[BASIC]
|
|
||||||
|
|
||||||
# Good variable names which should always be accepted, separated by a comma
|
|
||||||
good-names=_,df,ex,f,i,id,j,k,l,o,pk,Run,ts,v,x,y
|
|
||||||
|
|
||||||
# Bad variable names which should always be refused, separated by a comma
|
|
||||||
bad-names=bar,baz,db,fd,foo,sesh,session,tata,toto,tutu
|
|
||||||
|
|
||||||
# Colon-delimited sets of names that determine each other's naming style when
|
|
||||||
# the name regexes allow several styles.
|
|
||||||
name-group=
|
|
||||||
|
|
||||||
# Include a hint for the correct naming format with invalid-name
|
|
||||||
include-naming-hint=no
|
|
||||||
|
|
||||||
# List of decorators that produce properties, such as abc.abstractproperty. Add
|
|
||||||
# to this list to register other decorators that produce valid properties.
|
|
||||||
property-classes=
|
|
||||||
abc.abstractproperty,
|
|
||||||
sqlalchemy.ext.hybrid.hybrid_property
|
|
||||||
|
|
||||||
# Regular expression matching correct argument names
|
|
||||||
argument-rgx=[a-z_][a-z0-9_]{2,30}$
|
|
||||||
|
|
||||||
# Regular expression matching correct method names
|
|
||||||
method-rgx=[a-z_][a-z0-9_]{2,30}$
|
|
||||||
|
|
||||||
# Regular expression matching correct variable names
|
|
||||||
variable-rgx=[a-z_][a-z0-9_]{1,30}$
|
|
||||||
|
|
||||||
# Regular expression matching correct inline iteration names
|
|
||||||
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
|
|
||||||
|
|
||||||
# Regular expression matching correct constant names
|
|
||||||
const-rgx=(([A-Za-z_][A-Za-z0-9_]*)|(__.*__))$
|
|
||||||
|
|
||||||
# Regular expression matching correct class names
|
|
||||||
class-rgx=[A-Z_][a-zA-Z0-9]+$
|
|
||||||
|
|
||||||
# Regular expression matching correct class attribute names
|
|
||||||
class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
|
|
||||||
|
|
||||||
# Regular expression matching correct module names
|
|
||||||
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
|
|
||||||
|
|
||||||
# Regular expression matching correct attribute names
|
|
||||||
attr-rgx=[a-z_][a-z0-9_]{2,30}$
|
|
||||||
|
|
||||||
# Regular expression matching correct function names
|
|
||||||
function-rgx=[a-z_][a-z0-9_]{2,30}$
|
|
||||||
|
|
||||||
# Regular expression which should only match function or class names that do
|
|
||||||
# not require a docstring.
|
|
||||||
no-docstring-rgx=^_
|
|
||||||
|
|
||||||
# Minimum line length for functions/classes that require docstrings, shorter
|
|
||||||
# ones are exempt.
|
|
||||||
docstring-min-length=10
|
|
||||||
|
|
||||||
|
|
||||||
[ELIF]
|
|
||||||
|
|
||||||
# Maximum number of nested blocks for function / method body
|
|
||||||
max-nested-blocks=5
|
|
||||||
|
|
||||||
|
|
||||||
[FORMAT]
|
|
||||||
|
|
||||||
# Maximum number of characters on a single line.
|
|
||||||
max-line-length=100
|
|
||||||
|
|
||||||
# Regexp for a line that is allowed to be longer than the limit.
|
|
||||||
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
|
|
||||||
|
|
||||||
# Allow the body of an if to be on the same line as the test if there is no
|
|
||||||
# else.
|
|
||||||
single-line-if-stmt=no
|
|
||||||
|
|
||||||
# Maximum number of lines in a module
|
|
||||||
max-module-lines=1000
|
|
||||||
|
|
||||||
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
|
||||||
# tab).
|
|
||||||
indent-string=' '
|
|
||||||
|
|
||||||
# Number of spaces of indent required inside a hanging or continued line.
|
|
||||||
indent-after-paren=4
|
|
||||||
|
|
||||||
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
|
|
||||||
expected-line-ending-format=
|
|
||||||
|
|
||||||
|
|
||||||
[LOGGING]
|
|
||||||
|
|
||||||
# Logging modules to check that the string format arguments are in logging
|
|
||||||
# function parameter format
|
|
||||||
logging-modules=logging
|
|
||||||
|
|
||||||
|
|
||||||
[MISCELLANEOUS]
|
|
||||||
|
|
||||||
# List of note tags to take in consideration, separated by a comma.
|
|
||||||
notes=FIXME,XXX
|
|
||||||
|
|
||||||
|
|
||||||
[SIMILARITIES]
|
|
||||||
|
|
||||||
# Minimum lines number of a similarity.
|
|
||||||
min-similarity-lines=5
|
|
||||||
|
|
||||||
# Ignore comments when computing similarities.
|
|
||||||
ignore-comments=yes
|
|
||||||
|
|
||||||
# Ignore docstrings when computing similarities.
|
|
||||||
ignore-docstrings=yes
|
|
||||||
|
|
||||||
# Ignore imports when computing similarities.
|
|
||||||
ignore-imports=no
|
|
||||||
|
|
||||||
|
|
||||||
[SPELLING]
|
|
||||||
|
|
||||||
# Spelling dictionary name. Available dictionaries: none. To make it working
|
|
||||||
# install python-enchant package.
|
|
||||||
spelling-dict=
|
|
||||||
|
|
||||||
# List of comma separated words that should not be checked.
|
|
||||||
spelling-ignore-words=
|
|
||||||
|
|
||||||
# A path to a file that contains private dictionary; one word per line.
|
|
||||||
spelling-private-dict-file=
|
|
||||||
|
|
||||||
# Tells whether to store unknown words to indicated private dictionary in
|
|
||||||
# --spelling-private-dict-file option instead of raising a message.
|
|
||||||
spelling-store-unknown-words=no
|
|
||||||
|
|
||||||
|
|
||||||
[TYPECHECK]
|
|
||||||
|
|
||||||
# Tells whether missing members accessed in mixin class should be ignored. A
|
|
||||||
# mixin class is detected if its name ends with "mixin" (case insensitive).
|
|
||||||
ignore-mixin-members=yes
|
|
||||||
|
|
||||||
# List of module names for which member attributes should not be checked
|
|
||||||
# (useful for modules/projects where namespaces are manipulated during runtime
|
|
||||||
# and thus existing member attributes cannot be deduced by static analysis. It
|
|
||||||
# supports qualified module names, as well as Unix pattern matching.
|
|
||||||
ignored-modules=numpy,pandas,alembic.op,sqlalchemy,alembic.context,flask_appbuilder.security.sqla.PermissionView.role,flask_appbuilder.Model.metadata,flask_appbuilder.Base.metadata
|
|
||||||
|
|
||||||
# List of class names for which member attributes should not be checked (useful
|
|
||||||
# for classes with dynamically set attributes). This supports the use of
|
|
||||||
# qualified names.
|
|
||||||
ignored-classes=contextlib.closing,optparse.Values,thread._local,_thread._local
|
|
||||||
|
|
||||||
# List of members which are set dynamically and missed by pylint inference
|
|
||||||
# system, and so shouldn't trigger E1101 when accessed. Python regular
|
|
||||||
# expressions are accepted.
|
|
||||||
generated-members=
|
|
||||||
|
|
||||||
# List of decorators that produce context managers, such as
|
|
||||||
# contextlib.contextmanager. Add to this list to register other decorators that
|
|
||||||
# produce valid context managers.
|
|
||||||
contextmanager-decorators=contextlib.contextmanager
|
|
||||||
|
|
||||||
|
|
||||||
[VARIABLES]
|
|
||||||
|
|
||||||
# Tells whether we should check for unused import in __init__ files.
|
|
||||||
init-import=no
|
|
||||||
|
|
||||||
# A regular expression matching the name of dummy variables (i.e. expectedly
|
|
||||||
# not used).
|
|
||||||
dummy-variables-rgx=(_+[a-zA-Z0-9]*?$)|dummy
|
|
||||||
|
|
||||||
# List of additional names supposed to be defined in builtins. Remember that
|
|
||||||
# you should avoid to define new builtins when possible.
|
|
||||||
additional-builtins=
|
|
||||||
|
|
||||||
# List of strings which can identify a callback function by name. A callback
|
|
||||||
# name must start or end with one of those strings.
|
|
||||||
callbacks=cb_,_cb
|
|
||||||
|
|
||||||
# List of qualified module names which can have objects that can redefine
|
|
||||||
# builtins.
|
|
||||||
redefining-builtins-modules=six.moves,future.builtins
|
|
||||||
|
|
||||||
|
|
||||||
[CLASSES]
|
|
||||||
|
|
||||||
# List of method names used to declare (i.e. assign) instance attributes.
|
|
||||||
defining-attr-methods=__init__,__new__,setUp
|
|
||||||
|
|
||||||
# List of valid names for the first argument in a class method.
|
|
||||||
valid-classmethod-first-arg=cls
|
|
||||||
|
|
||||||
# List of valid names for the first argument in a metaclass class method.
|
|
||||||
valid-metaclass-classmethod-first-arg=mcs
|
|
||||||
|
|
||||||
# List of member names, which should be excluded from the protected access
|
|
||||||
# warning.
|
|
||||||
exclude-protected=_asdict,_fields,_replace,_source,_make
|
|
||||||
|
|
||||||
|
|
||||||
[DESIGN]
|
|
||||||
|
|
||||||
# Maximum number of arguments for function / method
|
|
||||||
max-args=5
|
|
||||||
|
|
||||||
# Argument names that match this expression will be ignored. Default to name
|
|
||||||
# with leading underscore
|
|
||||||
ignored-argument-names=_.*
|
|
||||||
|
|
||||||
# Maximum number of locals for function / method body
|
|
||||||
max-locals=15
|
|
||||||
|
|
||||||
# Maximum number of return / yield for function / method body
|
|
||||||
max-returns=10
|
|
||||||
|
|
||||||
# Maximum number of branch for function / method body
|
|
||||||
max-branches=15
|
|
||||||
|
|
||||||
# Maximum number of statements in function / method body
|
|
||||||
max-statements=50
|
|
||||||
|
|
||||||
# Maximum number of parents for a class (see R0901).
|
|
||||||
max-parents=7
|
|
||||||
|
|
||||||
# Maximum number of attributes for a class (see R0902).
|
|
||||||
max-attributes=8
|
|
||||||
|
|
||||||
# Minimum number of public methods for a class (see R0903).
|
|
||||||
min-public-methods=2
|
|
||||||
|
|
||||||
# Maximum number of public methods for a class (see R0904).
|
|
||||||
max-public-methods=20
|
|
||||||
|
|
||||||
# Maximum number of boolean expressions in a if statement
|
|
||||||
max-bool-expr=5
|
|
||||||
|
|
||||||
|
|
||||||
[IMPORTS]
|
|
||||||
|
|
||||||
# Deprecated modules which should not be used, separated by a comma
|
|
||||||
deprecated-modules=optparse
|
|
||||||
|
|
||||||
# Create a graph of every (i.e. internal and external) dependencies in the
|
|
||||||
# given file (report RP0402 must not be disabled)
|
|
||||||
import-graph=
|
|
||||||
|
|
||||||
# Create a graph of external dependencies in the given file (report RP0402 must
|
|
||||||
# not be disabled)
|
|
||||||
ext-import-graph=
|
|
||||||
|
|
||||||
# Create a graph of internal dependencies in the given file (report RP0402 must
|
|
||||||
# not be disabled)
|
|
||||||
int-import-graph=
|
|
||||||
|
|
||||||
# Force import order to recognize a module as part of the standard
|
|
||||||
# compatibility libraries.
|
|
||||||
known-standard-library=
|
|
||||||
|
|
||||||
# Force import order to recognize a module as part of a third party library.
|
|
||||||
known-third-party=enchant
|
|
||||||
|
|
||||||
# Analyse import fallback blocks. This can be used to support both Python 2 and
|
|
||||||
# 3 compatible code, which means that the block might have code that exists
|
|
||||||
# only in one or another interpreter, leading to false positives when analysed.
|
|
||||||
analyse-fallback-blocks=no
|
|
||||||
|
|
||||||
|
|
||||||
[EXCEPTIONS]
|
|
||||||
|
|
||||||
# Exceptions that will emit a warning when being caught. Defaults to
|
|
||||||
# "Exception"
|
|
||||||
overgeneral-exceptions=builtins.Exception
|
|
||||||
@@ -70,6 +70,7 @@ google-sheets.svg
|
|||||||
ibm-db2.svg
|
ibm-db2.svg
|
||||||
postgresql.svg
|
postgresql.svg
|
||||||
snowflake.svg
|
snowflake.svg
|
||||||
|
ydb.svg
|
||||||
|
|
||||||
# docs-related
|
# docs-related
|
||||||
erd.puml
|
erd.puml
|
||||||
|
|||||||
50
CHANGELOG/4.1.1.md
Normal file
50
CHANGELOG/4.1.1.md
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
<!--
|
||||||
|
Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
or more contributor license agreements. See the NOTICE file
|
||||||
|
distributed with this work for additional information
|
||||||
|
regarding copyright ownership. The ASF licenses this file
|
||||||
|
to you under the Apache License, Version 2.0 (the
|
||||||
|
"License"); you may not use this file except in compliance
|
||||||
|
with the License. You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing,
|
||||||
|
software distributed under the License is distributed on an
|
||||||
|
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
KIND, either express or implied. See the License for the
|
||||||
|
specific language governing permissions and limitations
|
||||||
|
under the License.
|
||||||
|
-->
|
||||||
|
|
||||||
|
## Change Log
|
||||||
|
|
||||||
|
### 4.1 (Fri Nov 15 22:13:57 2024 +0530)
|
||||||
|
|
||||||
|
**Database Migrations**
|
||||||
|
|
||||||
|
**Features**
|
||||||
|
|
||||||
|
**Fixes**
|
||||||
|
|
||||||
|
- [#30886](https://github.com/apache/superset/pull/30886) fix: blocks UI elements on right side (@samarsrivastav)
|
||||||
|
- [#30859](https://github.com/apache/superset/pull/30859) fix(package.json): Pin luxon version to unblock master (@geido)
|
||||||
|
- [#30588](https://github.com/apache/superset/pull/30588) fix(explore): column data type tooltip format (@mistercrunch)
|
||||||
|
- [#29911](https://github.com/apache/superset/pull/29911) fix: Rename database from 'couchbasedb' to 'couchbase' in documentation and db_engine_specs (@ayush-couchbase)
|
||||||
|
- [#30828](https://github.com/apache/superset/pull/30828) fix(TimezoneSelector): Failing unit tests due to timezone change (@geido)
|
||||||
|
- [#30875](https://github.com/apache/superset/pull/30875) fix: don't show metadata for embedded dashboards (@sadpandajoe)
|
||||||
|
- [#30851](https://github.com/apache/superset/pull/30851) fix: Graph chart colors (@michael-s-molina)
|
||||||
|
- [#29867](https://github.com/apache/superset/pull/29867) fix(capitalization): Capitalizing a button. (@rusackas)
|
||||||
|
- [#29782](https://github.com/apache/superset/pull/29782) fix(translations): Translate embedded errors (@rusackas)
|
||||||
|
- [#29772](https://github.com/apache/superset/pull/29772) fix: Fixing incomplete string escaping. (@rusackas)
|
||||||
|
- [#29725](https://github.com/apache/superset/pull/29725) fix(frontend/docker, ci): fix borked Docker build due to Lerna v8 uplift (@hainenber)
|
||||||
|
|
||||||
|
**Others**
|
||||||
|
|
||||||
|
- [#30576](https://github.com/apache/superset/pull/30576) chore: add link to Superset when report error (@eschutho)
|
||||||
|
- [#29786](https://github.com/apache/superset/pull/29786) refactor(Slider): Upgrade Slider to Antd 5 (@geido)
|
||||||
|
- [#29674](https://github.com/apache/superset/pull/29674) refactor(ChartCreation): Migrate tests to RTL (@rtexelm)
|
||||||
|
- [#29843](https://github.com/apache/superset/pull/29843) refactor(controls): Migrate AdhocMetricOption.test to RTL (@rtexelm)
|
||||||
|
- [#29845](https://github.com/apache/superset/pull/29845) refactor(controls): Migrate MetricDefinitionValue.test to RTL (@rtexelm)
|
||||||
|
- [#28424](https://github.com/apache/superset/pull/28424) docs: Check markdown files for bad links using linkinator (@rusackas)
|
||||||
|
- [#29768](https://github.com/apache/superset/pull/29768) docs(contributing): fix broken link to translations sub-section (@sfirke)
|
||||||
83
CHANGELOG/4.1.2.md
Normal file
83
CHANGELOG/4.1.2.md
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
<!--
|
||||||
|
Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
or more contributor license agreements. See the NOTICE file
|
||||||
|
distributed with this work for additional information
|
||||||
|
regarding copyright ownership. The ASF licenses this file
|
||||||
|
to you under the Apache License, Version 2.0 (the
|
||||||
|
"License"); you may not use this file except in compliance
|
||||||
|
with the License. You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing,
|
||||||
|
software distributed under the License is distributed on an
|
||||||
|
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
KIND, either express or implied. See the License for the
|
||||||
|
specific language governing permissions and limitations
|
||||||
|
under the License.
|
||||||
|
-->
|
||||||
|
|
||||||
|
## Change Log
|
||||||
|
|
||||||
|
### 4.1.2 (Fri Mar 7 13:28:05 2025 -0800)
|
||||||
|
|
||||||
|
**Database Migrations**
|
||||||
|
|
||||||
|
- [#32538](https://github.com/apache/superset/pull/32538) fix(migrations): Handle comparator None in old time comparison migration (@Antonio-RiveroMartnez)
|
||||||
|
- [#32155](https://github.com/apache/superset/pull/32155) fix(migrations): Handle no params in time comparison migration (@Antonio-RiveroMartnez)
|
||||||
|
- [#31185](https://github.com/apache/superset/pull/31185) fix: check for column before adding in migrations (@betodealmeida)
|
||||||
|
|
||||||
|
**Features**
|
||||||
|
|
||||||
|
- [#29974](https://github.com/apache/superset/pull/29974) feat(sqllab): Adds refresh button to table metadata in SQL Lab (@Usiel)
|
||||||
|
|
||||||
|
**Fixes**
|
||||||
|
|
||||||
|
- [#32515](https://github.com/apache/superset/pull/32515) fix(sqllab): Allow clear on schema and catalog (@justinpark)
|
||||||
|
- [#32500](https://github.com/apache/superset/pull/32500) fix: dashboard, chart and dataset import validation (@dpgaspar)
|
||||||
|
- [#31353](https://github.com/apache/superset/pull/31353) fix(sqllab): duplicate error message (@betodealmeida)
|
||||||
|
- [#31407](https://github.com/apache/superset/pull/31407) fix: Big Number side cut fixed (@fardin-developer)
|
||||||
|
- [#31480](https://github.com/apache/superset/pull/31480) fix(sunburst): Use metric label from verbose map (@gerbermichi)
|
||||||
|
- [#31427](https://github.com/apache/superset/pull/31427) fix(tags): clean up bulk create api and schema (@villebro)
|
||||||
|
- [#31334](https://github.com/apache/superset/pull/31334) fix(docs): add custom editUrl path for intro page (@dwgrossberg)
|
||||||
|
- [#31353](https://github.com/apache/superset/pull/31353) fix(sqllab): duplicate error message (@betodealmeida)
|
||||||
|
- [#31323](https://github.com/apache/superset/pull/31323) fix: Use clickhouse sqlglot dialect for YDB (@vgvoleg)
|
||||||
|
- [#31198](https://github.com/apache/superset/pull/31198) fix: add more clickhouse disallowed functions on config (@dpgaspar)
|
||||||
|
- [#31194](https://github.com/apache/superset/pull/31194) fix(embedded): Hide anchor links in embedded mode (@Vitor-Avila)
|
||||||
|
- [#31960](https://github.com/apache/superset/pull/31960) fix(sqllab): Missing allowHTML props in ResultTableExtension (@justinpark)
|
||||||
|
- [#31332](https://github.com/apache/superset/pull/31332) fix: prevent multiple pvm errors on migration (@eschutho)
|
||||||
|
- [#31437](https://github.com/apache/superset/pull/31437) fix(database import): Gracefully handle error to get catalog schemas (@Vitor-Avila)
|
||||||
|
- [#31173](https://github.com/apache/superset/pull/31173) fix: cache-warmup fails (@nsivarajan)
|
||||||
|
- [#30442](https://github.com/apache/superset/pull/30442) fix(fe/src/dashboard): optional chaining for possibly nullable parent attribute in LayoutItem type (@hainenber)
|
||||||
|
- [#31639](https://github.com/apache/superset/pull/31639) fix(sqllab): unable to update saved queries (@DamianPendrak)
|
||||||
|
- [#29898](https://github.com/apache/superset/pull/29898) fix: parse pandas pivot null values (@eschutho)
|
||||||
|
- [#31414](https://github.com/apache/superset/pull/31414) fix(Pivot Table): Fix column width to respect currency config (@Vitor-Avila)
|
||||||
|
- [#31335](https://github.com/apache/superset/pull/31335) fix(histogram): axis margin padding consistent with other graphs (@tatiana-cherne)
|
||||||
|
- [#31301](https://github.com/apache/superset/pull/31301) fix(AllEntitiesTable): show Tags (@alexandrusoare)
|
||||||
|
- [#31329](https://github.com/apache/superset/pull/31329) fix: pass string to `process_template` (@betodealmeida)
|
||||||
|
- [#31341](https://github.com/apache/superset/pull/31341) fix(pinot): remove query aliases from SELECT and ORDER BY clauses in Pinot (@yuribogomolov)
|
||||||
|
- [#31308](https://github.com/apache/superset/pull/31308) fix: annotations on horizontal bar chart (@DamianPendrak)
|
||||||
|
- [#31294](https://github.com/apache/superset/pull/31294) fix(sqllab): Remove update_saved_query_exec_info to reduce lag (@justinpark)
|
||||||
|
- [#30897](https://github.com/apache/superset/pull/30897) fix: Exception handling for SQL Lab views (@michael-s-molina)
|
||||||
|
- [#31199](https://github.com/apache/superset/pull/31199) fix(Databricks): Escape catalog and schema names in pre-queries (@Vitor-Avila)
|
||||||
|
- [#31265](https://github.com/apache/superset/pull/31265) fix(trino): db session error in handle cursor (@justinpark)
|
||||||
|
- [#31024](https://github.com/apache/superset/pull/31024) fix(dataset): use sqlglot for DML check (@betodealmeida)
|
||||||
|
- [#29885](https://github.com/apache/superset/pull/29885) fix: add mutator to get_columns_description (@eschutho)
|
||||||
|
- [#30821](https://github.com/apache/superset/pull/30821) fix: x axis title disappears when editing bar chart (@DamianPendrak)
|
||||||
|
- [#31181](https://github.com/apache/superset/pull/31181) fix: Time-series Line Chart Display unnecessary total (@michael-s-molina)
|
||||||
|
- [#31163](https://github.com/apache/superset/pull/31163) fix(Dashboard): Backward compatible shared_label_colors field (@geido)
|
||||||
|
- [#31156](https://github.com/apache/superset/pull/31156) fix: check orderby (@betodealmeida)
|
||||||
|
- [#31154](https://github.com/apache/superset/pull/31154) fix: Remove unwanted commit on Trino's handle_cursor (@michael-s-molina)
|
||||||
|
- [#31151](https://github.com/apache/superset/pull/31151) fix: Revert "feat(trino): Add functionality to upload data (#29164)" (@michael-s-molina)
|
||||||
|
- [#31031](https://github.com/apache/superset/pull/31031) fix(Dashboard): Ensure shared label colors are updated (@geido)
|
||||||
|
- [#30967](https://github.com/apache/superset/pull/30967) fix(release validation): scripts now support RSA and EDDSA keys. (@rusackas)
|
||||||
|
- [#30881](https://github.com/apache/superset/pull/30881) fix(Dashboard): Native & Cross-Filters Scoping Performance (@geido)
|
||||||
|
- [#30887](https://github.com/apache/superset/pull/30887) fix(imports): import query_context for imports with charts (@lindenh)
|
||||||
|
- [#31008](https://github.com/apache/superset/pull/31008) fix(explore): verified props is not updated (@justinpark)
|
||||||
|
- [#30646](https://github.com/apache/superset/pull/30646) fix(Dashboard): Retain colors when color scheme not set (@geido)
|
||||||
|
- [#30962](https://github.com/apache/superset/pull/30962) fix(Dashboard): Exclude edit param in async screenshot (@geido)
|
||||||
|
|
||||||
|
**Others**
|
||||||
|
|
||||||
|
- [#32043](https://github.com/apache/superset/pull/32043) chore: Skip the creation of secondary perms during catalog migrations (@Vitor-Avila)
|
||||||
|
- [#30865](https://github.com/apache/superset/pull/30865) docs: Updating 4.1 Release Notes (@yousoph)
|
||||||
329
Dockerfile
329
Dockerfile
@@ -18,46 +18,52 @@
|
|||||||
######################################################################
|
######################################################################
|
||||||
# Node stage to deal with static asset construction
|
# Node stage to deal with static asset construction
|
||||||
######################################################################
|
######################################################################
|
||||||
ARG PY_VER=3.10-slim-bookworm
|
ARG PY_VER=3.11.11-slim-bookworm
|
||||||
|
|
||||||
# if BUILDPLATFORM is null, set it to 'amd64' (or leave as is otherwise).
|
# If BUILDPLATFORM is null, set it to 'amd64' (or leave as is otherwise).
|
||||||
ARG BUILDPLATFORM=${BUILDPLATFORM:-amd64}
|
ARG BUILDPLATFORM=${BUILDPLATFORM:-amd64}
|
||||||
FROM --platform=${BUILDPLATFORM} node:20-bullseye-slim AS superset-node
|
|
||||||
|
|
||||||
ARG NPM_BUILD_CMD="build"
|
# Include translations in the final build
|
||||||
|
|
||||||
# Include translations in the final build. The default supports en only to
|
|
||||||
# reduce complexity and weight for those only using en
|
|
||||||
ARG BUILD_TRANSLATIONS="false"
|
ARG BUILD_TRANSLATIONS="false"
|
||||||
|
|
||||||
# Used by docker-compose to skip the frontend build,
|
######################################################################
|
||||||
# in dev we mount the repo and build the frontend inside docker
|
# superset-node-ci used as a base for building frontend assets and CI
|
||||||
ARG DEV_MODE="false"
|
######################################################################
|
||||||
|
FROM --platform=${BUILDPLATFORM} node:20-bookworm-slim AS superset-node-ci
|
||||||
|
ARG BUILD_TRANSLATIONS
|
||||||
|
ENV BUILD_TRANSLATIONS=${BUILD_TRANSLATIONS}
|
||||||
|
ARG DEV_MODE="false" # Skip frontend build in dev mode
|
||||||
|
ENV DEV_MODE=${DEV_MODE}
|
||||||
|
|
||||||
# Include headless browsers? Allows for alerts, reports & thumbnails, but bloats the images
|
COPY docker/ /app/docker/
|
||||||
ARG INCLUDE_CHROMIUM="true"
|
# Arguments for build configuration
|
||||||
ARG INCLUDE_FIREFOX="false"
|
ARG NPM_BUILD_CMD="build"
|
||||||
|
|
||||||
# Somehow we need python3 + build-essential on this side of the house to install node-gyp
|
# Install system dependencies required for node-gyp
|
||||||
RUN apt-get update -qq \
|
RUN /app/docker/apt-install.sh build-essential python3 zstd
|
||||||
&& apt-get install \
|
|
||||||
-yqq --no-install-recommends \
|
|
||||||
build-essential \
|
|
||||||
python3 \
|
|
||||||
zstd
|
|
||||||
|
|
||||||
|
# Define environment variables for frontend build
|
||||||
ENV BUILD_CMD=${NPM_BUILD_CMD} \
|
ENV BUILD_CMD=${NPM_BUILD_CMD} \
|
||||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true
|
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true
|
||||||
# NPM ci first, as to NOT invalidate previous steps except for when package.json changes
|
|
||||||
|
|
||||||
RUN --mount=type=bind,target=/frontend-mem-nag.sh,src=./docker/frontend-mem-nag.sh \
|
# Run the frontend memory monitoring script
|
||||||
/frontend-mem-nag.sh
|
RUN /app/docker/frontend-mem-nag.sh
|
||||||
|
|
||||||
WORKDIR /app/superset-frontend
|
WORKDIR /app/superset-frontend
|
||||||
# Creating empty folders to avoid errors when running COPY later on
|
|
||||||
RUN mkdir -p /app/superset/static/assets
|
# Create necessary folders to avoid errors in subsequent steps
|
||||||
RUN --mount=type=bind,target=./package.json,src=./superset-frontend/package.json \
|
RUN mkdir -p /app/superset/static/assets \
|
||||||
--mount=type=bind,target=./package-lock.json,src=./superset-frontend/package-lock.json \
|
/app/superset/translations
|
||||||
|
|
||||||
|
# Mount package files and install dependencies if not in dev mode
|
||||||
|
# NOTE: we mount packages and plugins as they are referenced in package.json as workspaces
|
||||||
|
# ideally we'd COPY only their package.json. Here npm ci will be cached as long
|
||||||
|
# as the full content of these folders don't change, yielding a decent cache reuse rate.
|
||||||
|
# Note that's it's not possible selectively COPY of mount using blobs.
|
||||||
|
RUN --mount=type=bind,source=./superset-frontend/package.json,target=./package.json \
|
||||||
|
--mount=type=bind,source=./superset-frontend/package-lock.json,target=./package-lock.json \
|
||||||
|
--mount=type=cache,target=/root/.cache \
|
||||||
|
--mount=type=cache,target=/root/.npm \
|
||||||
if [ "$DEV_MODE" = "false" ]; then \
|
if [ "$DEV_MODE" = "false" ]; then \
|
||||||
npm ci; \
|
npm ci; \
|
||||||
else \
|
else \
|
||||||
@@ -66,161 +72,192 @@ RUN --mount=type=bind,target=./package.json,src=./superset-frontend/package.json
|
|||||||
|
|
||||||
# Runs the webpack build process
|
# Runs the webpack build process
|
||||||
COPY superset-frontend /app/superset-frontend
|
COPY superset-frontend /app/superset-frontend
|
||||||
# This copies the .po files needed for translation
|
|
||||||
RUN mkdir -p /app/superset/translations
|
######################################################################
|
||||||
COPY superset/translations /app/superset/translations
|
# superset-node used for compile frontend assets
|
||||||
RUN if [ "$DEV_MODE" = "false" ]; then \
|
######################################################################
|
||||||
BUILD_TRANSLATIONS=$BUILD_TRANSLATIONS npm run ${BUILD_CMD}; \
|
FROM superset-node-ci AS superset-node
|
||||||
|
|
||||||
|
# Build the frontend if not in dev mode
|
||||||
|
RUN --mount=type=cache,target=/root/.npm \
|
||||||
|
if [ "$DEV_MODE" = "false" ]; then \
|
||||||
|
echo "Running 'npm run ${BUILD_CMD}'"; \
|
||||||
|
npm run ${BUILD_CMD}; \
|
||||||
else \
|
else \
|
||||||
echo "Skipping 'npm run ${BUILD_CMD}' in dev mode"; \
|
echo "Skipping 'npm run ${BUILD_CMD}' in dev mode"; \
|
||||||
fi
|
fi;
|
||||||
|
|
||||||
|
# Copy translation files
|
||||||
|
COPY superset/translations /app/superset/translations
|
||||||
|
|
||||||
# Compiles .json files from the .po files, then deletes the .po files
|
# Build the frontend if not in dev mode
|
||||||
RUN if [ "$BUILD_TRANSLATIONS" = "true" ]; then \
|
RUN if [ "$BUILD_TRANSLATIONS" = "true" ]; then \
|
||||||
npm run build-translation; \
|
npm run build-translation; \
|
||||||
else \
|
fi; \
|
||||||
echo "Skipping translations as requested by build flag"; \
|
rm -rf /app/superset/translations/*/*/*.po; \
|
||||||
fi
|
rm -rf /app/superset/translations/*/*/*.mo;
|
||||||
RUN rm /app/superset/translations/*/LC_MESSAGES/*.po
|
|
||||||
RUN rm /app/superset/translations/messages.pot
|
|
||||||
|
|
||||||
|
|
||||||
|
######################################################################
|
||||||
|
# Base python layer
|
||||||
|
######################################################################
|
||||||
FROM python:${PY_VER} AS python-base
|
FROM python:${PY_VER} AS python-base
|
||||||
######################################################################
|
|
||||||
# Final lean image...
|
|
||||||
######################################################################
|
|
||||||
FROM python-base AS lean
|
|
||||||
|
|
||||||
# Include translations in the final build. The default supports en only to
|
ARG SUPERSET_HOME="/app/superset_home"
|
||||||
# reduce complexity and weight for those only using en
|
ENV SUPERSET_HOME=${SUPERSET_HOME}
|
||||||
ARG BUILD_TRANSLATIONS="false"
|
|
||||||
|
|
||||||
WORKDIR /app
|
RUN mkdir -p $SUPERSET_HOME
|
||||||
ENV LANG=C.UTF-8 \
|
RUN useradd --user-group -d ${SUPERSET_HOME} -m --no-log-init --shell /bin/bash superset \
|
||||||
LC_ALL=C.UTF-8 \
|
&& chmod -R 1777 $SUPERSET_HOME \
|
||||||
SUPERSET_ENV=production \
|
&& chown -R superset:superset $SUPERSET_HOME
|
||||||
|
|
||||||
|
# Some bash scripts needed throughout the layers
|
||||||
|
COPY --chmod=755 docker/*.sh /app/docker/
|
||||||
|
|
||||||
|
RUN pip install --no-cache-dir --upgrade uv
|
||||||
|
|
||||||
|
# Using uv as it's faster/simpler than pip
|
||||||
|
RUN uv venv /app/.venv
|
||||||
|
ENV PATH="/app/.venv/bin:${PATH}"
|
||||||
|
|
||||||
|
######################################################################
|
||||||
|
# Python translation compiler layer
|
||||||
|
######################################################################
|
||||||
|
FROM python-base AS python-translation-compiler
|
||||||
|
|
||||||
|
ARG BUILD_TRANSLATIONS
|
||||||
|
ENV BUILD_TRANSLATIONS=${BUILD_TRANSLATIONS}
|
||||||
|
|
||||||
|
# Install Python dependencies using docker/pip-install.sh
|
||||||
|
COPY requirements/translations.txt requirements/
|
||||||
|
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||||
|
. /app/.venv/bin/activate && /app/docker/pip-install.sh --requires-build-essential -r requirements/translations.txt
|
||||||
|
|
||||||
|
COPY superset/translations/ /app/translations_mo/
|
||||||
|
RUN if [ "$BUILD_TRANSLATIONS" = "true" ]; then \
|
||||||
|
pybabel compile -d /app/translations_mo | true; \
|
||||||
|
fi; \
|
||||||
|
rm -f /app/translations_mo/*/*/*.po; \
|
||||||
|
rm -f /app/translations_mo/*/*/*.json;
|
||||||
|
|
||||||
|
######################################################################
|
||||||
|
# Python APP common layer
|
||||||
|
######################################################################
|
||||||
|
FROM python-base AS python-common
|
||||||
|
|
||||||
|
ENV SUPERSET_HOME="/app/superset_home" \
|
||||||
|
HOME="/app/superset_home" \
|
||||||
|
SUPERSET_ENV="production" \
|
||||||
FLASK_APP="superset.app:create_app()" \
|
FLASK_APP="superset.app:create_app()" \
|
||||||
PYTHONPATH="/app/pythonpath" \
|
PYTHONPATH="/app/pythonpath" \
|
||||||
SUPERSET_HOME="/app/superset_home" \
|
SUPERSET_PORT="8088"
|
||||||
SUPERSET_PORT=8088
|
|
||||||
|
|
||||||
RUN mkdir -p ${PYTHONPATH} superset/static requirements superset-frontend apache_superset.egg-info requirements \
|
# Copy the entrypoints, make them executable in userspace
|
||||||
&& useradd --user-group -d ${SUPERSET_HOME} -m --no-log-init --shell /bin/bash superset \
|
COPY --chmod=755 docker/entrypoints /app/docker/entrypoints
|
||||||
&& apt-get update -qq && apt-get install -yqq --no-install-recommends \
|
|
||||||
|
WORKDIR /app
|
||||||
|
# Set up necessary directories and user
|
||||||
|
RUN mkdir -p \
|
||||||
|
${PYTHONPATH} \
|
||||||
|
superset/static \
|
||||||
|
requirements \
|
||||||
|
superset-frontend \
|
||||||
|
apache_superset.egg-info \
|
||||||
|
requirements \
|
||||||
|
&& touch superset/static/version_info.json
|
||||||
|
|
||||||
|
# Install Playwright and optionally setup headless browsers
|
||||||
|
ARG INCLUDE_CHROMIUM="true"
|
||||||
|
ARG INCLUDE_FIREFOX="false"
|
||||||
|
RUN --mount=type=cache,target=${SUPERSET_HOME}/.cache/uv \
|
||||||
|
if [ "$INCLUDE_CHROMIUM" = "true" ] || [ "$INCLUDE_FIREFOX" = "true" ]; then \
|
||||||
|
uv pip install playwright && \
|
||||||
|
playwright install-deps && \
|
||||||
|
if [ "$INCLUDE_CHROMIUM" = "true" ]; then playwright install chromium; fi && \
|
||||||
|
if [ "$INCLUDE_FIREFOX" = "true" ]; then playwright install firefox; fi; \
|
||||||
|
else \
|
||||||
|
echo "Skipping browser installation"; \
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Copy required files for Python build
|
||||||
|
COPY pyproject.toml setup.py MANIFEST.in README.md ./
|
||||||
|
COPY superset-frontend/package.json superset-frontend/
|
||||||
|
COPY scripts/check-env.py scripts/
|
||||||
|
|
||||||
|
# keeping for backward compatibility
|
||||||
|
COPY --chmod=755 ./docker/entrypoints/run-server.sh /usr/bin/
|
||||||
|
|
||||||
|
# Some debian libs
|
||||||
|
RUN /app/docker/apt-install.sh \
|
||||||
curl \
|
curl \
|
||||||
libsasl2-dev \
|
libsasl2-dev \
|
||||||
libsasl2-modules-gssapi-mit \
|
libsasl2-modules-gssapi-mit \
|
||||||
libpq-dev \
|
libpq-dev \
|
||||||
libecpg-dev \
|
libecpg-dev \
|
||||||
libldap2-dev \
|
libldap2-dev
|
||||||
&& touch superset/static/version_info.json \
|
|
||||||
&& chown -R superset:superset ./* \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
COPY --chown=superset:superset pyproject.toml setup.py MANIFEST.in README.md ./
|
# Copy compiled things from previous stages
|
||||||
# setup.py uses the version information in package.json
|
COPY --from=superset-node /app/superset/static/assets superset/static/assets
|
||||||
COPY --chown=superset:superset superset-frontend/package.json superset-frontend/
|
|
||||||
COPY --chown=superset:superset requirements/base.txt requirements/
|
|
||||||
COPY --chown=superset:superset scripts/check-env.py scripts/
|
|
||||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
|
||||||
apt-get update -qq && apt-get install -yqq --no-install-recommends \
|
|
||||||
build-essential \
|
|
||||||
&& pip install --no-cache-dir --upgrade setuptools pip \
|
|
||||||
&& pip install --no-cache-dir -r requirements/base.txt \
|
|
||||||
&& apt-get autoremove -yqq --purge build-essential \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
# Copy the compiled frontend assets
|
# TODO, when the next version comes out, use --exclude superset/translations
|
||||||
COPY --chown=superset:superset --from=superset-node /app/superset/static/assets superset/static/assets
|
COPY superset superset
|
||||||
|
# TODO in the meantime, remove the .po files
|
||||||
|
RUN rm superset/translations/*/*/*.po
|
||||||
|
|
||||||
## Lastly, let's install superset itself
|
# Merging translations from backend and frontend stages
|
||||||
COPY --chown=superset:superset superset superset
|
COPY --from=superset-node /app/superset/translations superset/translations
|
||||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
COPY --from=python-translation-compiler /app/translations_mo superset/translations
|
||||||
pip install --no-cache-dir -e .
|
|
||||||
|
|
||||||
# Copy the .json translations from the frontend layer
|
|
||||||
COPY --chown=superset:superset --from=superset-node /app/superset/translations superset/translations
|
|
||||||
|
|
||||||
# Compile translations for the backend - this generates .mo files, then deletes the .po files
|
|
||||||
COPY ./scripts/translations/generate_mo_files.sh ./scripts/translations/
|
|
||||||
RUN if [ "$BUILD_TRANSLATIONS" = "true" ]; then \
|
|
||||||
./scripts/translations/generate_mo_files.sh \
|
|
||||||
&& chown -R superset:superset superset/translations \
|
|
||||||
&& rm superset/translations/messages.pot \
|
|
||||||
&& rm superset/translations/*/LC_MESSAGES/*.po; \
|
|
||||||
else \
|
|
||||||
echo "Skipping translations as requested by build flag"; \
|
|
||||||
fi
|
|
||||||
|
|
||||||
COPY --chmod=755 ./docker/run-server.sh /usr/bin/
|
|
||||||
USER superset
|
|
||||||
|
|
||||||
HEALTHCHECK CMD curl -f "http://localhost:${SUPERSET_PORT}/health"
|
|
||||||
|
|
||||||
|
HEALTHCHECK CMD /app/docker/docker-healthcheck.sh
|
||||||
|
CMD ["/app/docker/entrypoints/run-server.sh"]
|
||||||
EXPOSE ${SUPERSET_PORT}
|
EXPOSE ${SUPERSET_PORT}
|
||||||
|
|
||||||
CMD ["/usr/bin/run-server.sh"]
|
######################################################################
|
||||||
|
# Final lean image...
|
||||||
|
######################################################################
|
||||||
|
FROM python-common AS lean
|
||||||
|
|
||||||
|
# Install Python dependencies using docker/pip-install.sh
|
||||||
|
COPY requirements/base.txt requirements/
|
||||||
|
RUN --mount=type=cache,target=${SUPERSET_HOME}/.cache/uv \
|
||||||
|
/app/docker/pip-install.sh --requires-build-essential -r requirements/base.txt
|
||||||
|
# Install the superset package
|
||||||
|
RUN --mount=type=cache,target=${SUPERSET_HOME}/.cache/uv \
|
||||||
|
uv pip install .
|
||||||
|
RUN python -m compileall /app/superset
|
||||||
|
|
||||||
|
USER superset
|
||||||
|
|
||||||
######################################################################
|
######################################################################
|
||||||
# Dev image...
|
# Dev image...
|
||||||
######################################################################
|
######################################################################
|
||||||
FROM lean AS dev
|
FROM python-common AS dev
|
||||||
|
|
||||||
USER root
|
# Debian libs needed for dev
|
||||||
RUN apt-get update -qq \
|
RUN /app/docker/apt-install.sh \
|
||||||
&& apt-get install -yqq --no-install-recommends \
|
|
||||||
libnss3 \
|
|
||||||
libdbus-glib-1-2 \
|
|
||||||
libgtk-3-0 \
|
|
||||||
libx11-xcb1 \
|
|
||||||
libasound2 \
|
|
||||||
libxtst6 \
|
|
||||||
git \
|
git \
|
||||||
pkg-config \
|
pkg-config \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
default-libmysqlclient-dev
|
||||||
|
|
||||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
# Copy development requirements and install them
|
||||||
pip install --no-cache-dir playwright
|
COPY requirements/*.txt requirements/
|
||||||
RUN playwright install-deps
|
# Install Python dependencies using docker/pip-install.sh
|
||||||
|
RUN --mount=type=cache,target=${SUPERSET_HOME}/.cache/uv \
|
||||||
|
/app/docker/pip-install.sh --requires-build-essential -r requirements/development.txt
|
||||||
|
# Install the superset package
|
||||||
|
RUN --mount=type=cache,target=${SUPERSET_HOME}/.cache/uv \
|
||||||
|
uv pip install .
|
||||||
|
|
||||||
RUN if [ "$INCLUDE_CHROMIUM" = "true" ]; then \
|
RUN uv pip install .[postgres]
|
||||||
playwright install chromium; \
|
RUN python -m compileall /app/superset
|
||||||
else \
|
|
||||||
echo "Skipping translations in dev mode"; \
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Install GeckoDriver WebDriver
|
|
||||||
ARG GECKODRIVER_VERSION=v0.34.0 \
|
|
||||||
FIREFOX_VERSION=125.0.3
|
|
||||||
|
|
||||||
RUN if [ "$INCLUDE_FIREFOX" = "true" ]; then \
|
|
||||||
apt-get update -qq \
|
|
||||||
&& apt-get install -yqq --no-install-recommends wget bzip2 \
|
|
||||||
&& wget -q https://github.com/mozilla/geckodriver/releases/download/${GECKODRIVER_VERSION}/geckodriver-${GECKODRIVER_VERSION}-linux64.tar.gz -O - | tar xfz - -C /usr/local/bin \
|
|
||||||
&& wget -q https://download-installer.cdn.mozilla.net/pub/firefox/releases/${FIREFOX_VERSION}/linux-x86_64/en-US/firefox-${FIREFOX_VERSION}.tar.bz2 -O - | tar xfj - -C /opt \
|
|
||||||
&& ln -s /opt/firefox/firefox /usr/local/bin/firefox \
|
|
||||||
&& apt-get autoremove -yqq --purge wget bzip2 && rm -rf /var/[log,tmp]/* /tmp/* /var/lib/apt/lists/*; \
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Installing mysql client os-level dependencies in dev image only because GPL
|
|
||||||
RUN apt-get install -yqq --no-install-recommends \
|
|
||||||
default-libmysqlclient-dev \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
COPY --chown=superset:superset requirements/development.txt requirements/
|
|
||||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
|
||||||
apt-get update -qq && apt-get install -yqq --no-install-recommends \
|
|
||||||
build-essential \
|
|
||||||
&& pip install --no-cache-dir -r requirements/development.txt \
|
|
||||||
&& apt-get autoremove -yqq --purge build-essential \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
USER superset
|
USER superset
|
||||||
|
|
||||||
######################################################################
|
######################################################################
|
||||||
# CI image...
|
# CI image...
|
||||||
######################################################################
|
######################################################################
|
||||||
FROM lean AS ci
|
FROM lean AS ci
|
||||||
|
USER root
|
||||||
COPY --chown=superset:superset --chmod=755 ./docker/*.sh /app/docker/
|
RUN uv pip install .[postgres]
|
||||||
|
USER superset
|
||||||
CMD ["/app/docker/docker-ci.sh"]
|
CMD ["/app/docker/entrypoints/docker-ci.sh"]
|
||||||
|
|||||||
3
Makefile
3
Makefile
@@ -87,9 +87,6 @@ format: py-format js-format
|
|||||||
py-format: pre-commit
|
py-format: pre-commit
|
||||||
pre-commit run black --all-files
|
pre-commit run black --all-files
|
||||||
|
|
||||||
py-lint: pre-commit
|
|
||||||
pylint -j 0 superset
|
|
||||||
|
|
||||||
js-format:
|
js-format:
|
||||||
cd superset-frontend; npm run prettier
|
cd superset-frontend; npm run prettier
|
||||||
|
|
||||||
|
|||||||
19
README.md
19
README.md
@@ -20,11 +20,11 @@ under the License.
|
|||||||
# Superset
|
# Superset
|
||||||
|
|
||||||
[](https://opensource.org/license/apache-2-0)
|
[](https://opensource.org/license/apache-2-0)
|
||||||
[](https://github.com/apache/superset/tree/latest)
|
[](https://github.com/apache/superset/releases/latest)
|
||||||
[](https://github.com/apache/superset/actions)
|
[](https://github.com/apache/superset/actions)
|
||||||
[](https://badge.fury.io/py/apache-superset)
|
[](https://badge.fury.io/py/apache_superset)
|
||||||
[](https://codecov.io/github/apache/superset)
|
[](https://codecov.io/github/apache/superset)
|
||||||
[](https://pypi.python.org/pypi/apache-superset)
|
[](https://pypi.python.org/pypi/apache_superset)
|
||||||
[](http://bit.ly/join-superset-slack)
|
[](http://bit.ly/join-superset-slack)
|
||||||
[](https://superset.apache.org)
|
[](https://superset.apache.org)
|
||||||
|
|
||||||
@@ -72,8 +72,10 @@ Superset provides:
|
|||||||
## Screenshots & Gifs
|
## Screenshots & Gifs
|
||||||
|
|
||||||
**Video Overview**
|
**Video Overview**
|
||||||
|
|
||||||
<!-- File hosted here https://github.com/apache/superset-site/raw/lfs/superset-video-4k.mp4 -->
|
<!-- File hosted here https://github.com/apache/superset-site/raw/lfs/superset-video-4k.mp4 -->
|
||||||
[superset-video-4k.webm](https://github.com/apache/superset/assets/812905/da036bc2-150c-4ee7-80f9-75e63210ff76)
|
|
||||||
|
[superset-video-1080p.webm](https://github.com/user-attachments/assets/b37388f7-a971-409c-96a7-90c4e31322e6)
|
||||||
|
|
||||||
<br/>
|
<br/>
|
||||||
|
|
||||||
@@ -136,6 +138,8 @@ Here are some of the major database solutions that are supported:
|
|||||||
<img src="https://superset.apache.org/img/databases/oceanbase.svg" alt="oceanbase" border="0" width="220" />
|
<img src="https://superset.apache.org/img/databases/oceanbase.svg" alt="oceanbase" border="0" width="220" />
|
||||||
<img src="https://superset.apache.org/img/databases/sap-hana.png" alt="oceanbase" border="0" width="220" />
|
<img src="https://superset.apache.org/img/databases/sap-hana.png" alt="oceanbase" border="0" width="220" />
|
||||||
<img src="https://superset.apache.org/img/databases/denodo.png" alt="denodo" border="0" width="200" />
|
<img src="https://superset.apache.org/img/databases/denodo.png" alt="denodo" border="0" width="200" />
|
||||||
|
<img src="https://superset.apache.org/img/databases/ydb.svg" alt="ydb" border="0" width="200" />
|
||||||
|
<img src="https://superset.apache.org/img/databases/tdengine.png" alt="TDengine" border="0" width="200" />
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
**A more comprehensive list of supported databases** along with the configuration instructions can be found [here](https://superset.apache.org/docs/configuration/databases).
|
**A more comprehensive list of supported databases** along with the configuration instructions can be found [here](https://superset.apache.org/docs/configuration/databases).
|
||||||
@@ -144,7 +148,7 @@ Want to add support for your datastore or data engine? Read more [here](https://
|
|||||||
|
|
||||||
## Installation and Configuration
|
## Installation and Configuration
|
||||||
|
|
||||||
[Extended documentation for Superset](https://superset.apache.org/docs/installation/docker-compose)
|
Try out Superset's [quickstart](https://superset.apache.org/docs/quickstart/) guide or learn about [the options for production deployments](https://superset.apache.org/docs/installation/architecture/).
|
||||||
|
|
||||||
## Get Involved
|
## Get Involved
|
||||||
|
|
||||||
@@ -185,10 +189,12 @@ Understanding the Superset Points of View
|
|||||||
- [Resources to master Superset by Preset](https://preset.io/resources/)
|
- [Resources to master Superset by Preset](https://preset.io/resources/)
|
||||||
|
|
||||||
- Deploying Superset
|
- Deploying Superset
|
||||||
|
|
||||||
- [Official Docker image](https://hub.docker.com/r/apache/superset)
|
- [Official Docker image](https://hub.docker.com/r/apache/superset)
|
||||||
- [Helm Chart](https://github.com/apache/superset/tree/master/helm/superset)
|
- [Helm Chart](https://github.com/apache/superset/tree/master/helm/superset)
|
||||||
|
|
||||||
- Recordings of Past [Superset Community Events](https://preset.io/events)
|
- Recordings of Past [Superset Community Events](https://preset.io/events)
|
||||||
|
|
||||||
- [Mixed Time Series Charts](https://preset.io/events/mixed-time-series-visualization-in-superset-workshop/)
|
- [Mixed Time Series Charts](https://preset.io/events/mixed-time-series-visualization-in-superset-workshop/)
|
||||||
- [How the Bing Team Customized Superset for the Internal Self-Serve Data & Analytics Platform](https://preset.io/events/how-the-bing-team-heavily-customized-superset-for-their-internal-data/)
|
- [How the Bing Team Customized Superset for the Internal Self-Serve Data & Analytics Platform](https://preset.io/events/how-the-bing-team-heavily-customized-superset-for-their-internal-data/)
|
||||||
- [Live Demo: Visualizing MongoDB and Pinot Data using Trino](https://preset.io/events/2021-04-13-visualizing-mongodb-and-pinot-data-using-trino/)
|
- [Live Demo: Visualizing MongoDB and Pinot Data using Trino](https://preset.io/events/2021-04-13-visualizing-mongodb-and-pinot-data-using-trino/)
|
||||||
@@ -196,6 +202,7 @@ Understanding the Superset Points of View
|
|||||||
- [Building a Database Connector for Superset](https://preset.io/events/2021-02-16-building-a-database-connector-for-superset/)
|
- [Building a Database Connector for Superset](https://preset.io/events/2021-02-16-building-a-database-connector-for-superset/)
|
||||||
|
|
||||||
- Visualizations
|
- Visualizations
|
||||||
|
|
||||||
- [Creating Viz Plugins](https://superset.apache.org/docs/contributing/creating-viz-plugins/)
|
- [Creating Viz Plugins](https://superset.apache.org/docs/contributing/creating-viz-plugins/)
|
||||||
- [Managing and Deploying Custom Viz Plugins](https://medium.com/nmc-techblog/apache-superset-manage-custom-viz-plugins-in-production-9fde1a708e55)
|
- [Managing and Deploying Custom Viz Plugins](https://medium.com/nmc-techblog/apache-superset-manage-custom-viz-plugins-in-production-9fde1a708e55)
|
||||||
- [Why Apache Superset is Betting on Apache ECharts](https://preset.io/blog/2021-4-1-why-echarts/)
|
- [Why Apache Superset is Betting on Apache ECharts](https://preset.io/blog/2021-4-1-why-echarts/)
|
||||||
|
|||||||
@@ -30,12 +30,12 @@ RUN apt-get install -y apt-transport-https apt-utils
|
|||||||
# Install superset dependencies
|
# Install superset dependencies
|
||||||
# https://superset.apache.org/docs/installation/installing-superset-from-scratch
|
# https://superset.apache.org/docs/installation/installing-superset-from-scratch
|
||||||
RUN apt-get install -y build-essential libssl-dev \
|
RUN apt-get install -y build-essential libssl-dev \
|
||||||
libffi-dev python3-dev libsasl2-dev libldap2-dev libxi-dev chromium
|
libffi-dev python3-dev libsasl2-dev libldap2-dev libxi-dev chromium zstd
|
||||||
|
|
||||||
# Install nodejs for custom build
|
# Install nodejs for custom build
|
||||||
# https://nodejs.org/en/download/package-manager/
|
# https://nodejs.org/en/download/package-manager/
|
||||||
RUN set -eux; \
|
RUN set -eux; \
|
||||||
curl -sL https://deb.nodesource.com/setup_18.x | bash -; \
|
curl -sL https://deb.nodesource.com/setup_20.x | bash -; \
|
||||||
apt-get install -y nodejs; \
|
apt-get install -y nodejs; \
|
||||||
node --version;
|
node --version;
|
||||||
RUN if ! which npm; then apt-get install -y npm; fi
|
RUN if ! which npm; then apt-get install -y npm; fi
|
||||||
@@ -50,13 +50,13 @@ ARG SUPERSET_RELEASE_RC_TARBALL
|
|||||||
# Can fetch source from svn or copy tarball from local mounted directory
|
# Can fetch source from svn or copy tarball from local mounted directory
|
||||||
COPY $SUPERSET_RELEASE_RC_TARBALL ./
|
COPY $SUPERSET_RELEASE_RC_TARBALL ./
|
||||||
RUN tar -xvf *.tar.gz
|
RUN tar -xvf *.tar.gz
|
||||||
WORKDIR /home/superset/apache-superset-$VERSION/superset-frontend
|
WORKDIR /home/superset/apache_superset-$VERSION/superset-frontend
|
||||||
|
|
||||||
RUN npm ci \
|
RUN npm ci \
|
||||||
&& npm run build \
|
&& npm run build \
|
||||||
&& rm -rf node_modules
|
&& rm -rf node_modules
|
||||||
|
|
||||||
WORKDIR /home/superset/apache-superset-$VERSION
|
WORKDIR /home/superset/apache_superset-$VERSION
|
||||||
RUN pip install --upgrade setuptools pip \
|
RUN pip install --upgrade setuptools pip \
|
||||||
&& pip install -r requirements/base.txt \
|
&& pip install -r requirements/base.txt \
|
||||||
&& pip install --no-cache-dir .
|
&& pip install --no-cache-dir .
|
||||||
@@ -64,7 +64,7 @@ RUN pip install --upgrade setuptools pip \
|
|||||||
RUN flask fab babel-compile --target superset/translations
|
RUN flask fab babel-compile --target superset/translations
|
||||||
|
|
||||||
ENV PATH=/home/superset/superset/bin:$PATH \
|
ENV PATH=/home/superset/superset/bin:$PATH \
|
||||||
PYTHONPATH=/home/superset/superset/:$PYTHONPATH \
|
PYTHONPATH=/home/superset/superset/ \
|
||||||
SUPERSET_TESTENV=true
|
SUPERSET_TESTENV=true
|
||||||
COPY from_tarball_entrypoint.sh /entrypoint.sh
|
COPY from_tarball_entrypoint.sh /entrypoint.sh
|
||||||
ENTRYPOINT ["/entrypoint.sh"]
|
ENTRYPOINT ["/entrypoint.sh"]
|
||||||
|
|||||||
@@ -29,13 +29,16 @@ RUN apt-get install -y apt-transport-https apt-utils
|
|||||||
|
|
||||||
# Install superset dependencies
|
# Install superset dependencies
|
||||||
# https://superset.apache.org/docs/installation/installing-superset-from-scratch
|
# https://superset.apache.org/docs/installation/installing-superset-from-scratch
|
||||||
RUN apt-get install -y build-essential libssl-dev \
|
RUN apt-get install -y subversion build-essential libssl-dev \
|
||||||
libffi-dev python3-dev libsasl2-dev libldap2-dev libxi-dev chromium
|
libffi-dev python3-dev libsasl2-dev libldap2-dev libxi-dev chromium zstd
|
||||||
|
|
||||||
# Install nodejs for custom build
|
# Install nodejs for custom build
|
||||||
# https://nodejs.org/en/download/package-manager/
|
# https://nodejs.org/en/download/package-manager/
|
||||||
RUN curl -sL https://deb.nodesource.com/setup_16.x | bash - \
|
RUN set -eux; \
|
||||||
&& apt-get install -y nodejs
|
curl -sL https://deb.nodesource.com/setup_20.x | bash -; \
|
||||||
|
apt-get install -y nodejs; \
|
||||||
|
node --version;
|
||||||
|
RUN if ! which npm; then apt-get install -y npm; fi
|
||||||
|
|
||||||
RUN mkdir -p /home/superset
|
RUN mkdir -p /home/superset
|
||||||
RUN chown superset /home/superset
|
RUN chown superset /home/superset
|
||||||
@@ -46,15 +49,13 @@ ARG VERSION
|
|||||||
# Can fetch source from svn or copy tarball from local mounted directory
|
# Can fetch source from svn or copy tarball from local mounted directory
|
||||||
RUN svn co https://dist.apache.org/repos/dist/dev/superset/$VERSION ./
|
RUN svn co https://dist.apache.org/repos/dist/dev/superset/$VERSION ./
|
||||||
RUN tar -xvf *.tar.gz
|
RUN tar -xvf *.tar.gz
|
||||||
WORKDIR apache-superset-$VERSION
|
WORKDIR /home/superset/apache_superset-$VERSION/superset-frontend
|
||||||
|
|
||||||
RUN cd superset-frontend \
|
RUN npm ci \
|
||||||
&& npm ci \
|
|
||||||
&& npm run build \
|
&& npm run build \
|
||||||
&& rm -rf node_modules
|
&& rm -rf node_modules
|
||||||
|
|
||||||
|
WORKDIR /home/superset/apache_superset-$VERSION
|
||||||
WORKDIR /home/superset/apache-superset-$VERSION
|
|
||||||
RUN pip install --upgrade setuptools pip \
|
RUN pip install --upgrade setuptools pip \
|
||||||
&& pip install -r requirements/base.txt \
|
&& pip install -r requirements/base.txt \
|
||||||
&& pip install --no-cache-dir .
|
&& pip install --no-cache-dir .
|
||||||
@@ -62,6 +63,6 @@ RUN pip install --upgrade setuptools pip \
|
|||||||
RUN flask fab babel-compile --target superset/translations
|
RUN flask fab babel-compile --target superset/translations
|
||||||
|
|
||||||
ENV PATH=/home/superset/superset/bin:$PATH \
|
ENV PATH=/home/superset/superset/bin:$PATH \
|
||||||
PYTHONPATH=/home/superset/superset/:$PYTHONPATH
|
PYTHONPATH=/home/superset/superset/
|
||||||
COPY from_tarball_entrypoint.sh /entrypoint.sh
|
COPY from_tarball_entrypoint.sh /entrypoint.sh
|
||||||
ENTRYPOINT ["/entrypoint.sh"]
|
ENTRYPOINT ["/entrypoint.sh"]
|
||||||
|
|||||||
@@ -123,10 +123,10 @@ SUPERSET_RC=1
|
|||||||
SUPERSET_GITHUB_BRANCH=1.5
|
SUPERSET_GITHUB_BRANCH=1.5
|
||||||
SUPERSET_PGP_FULLNAME=villebro@apache.org
|
SUPERSET_PGP_FULLNAME=villebro@apache.org
|
||||||
SUPERSET_VERSION_RC=1.5.1rc1
|
SUPERSET_VERSION_RC=1.5.1rc1
|
||||||
SUPERSET_RELEASE=apache-superset-1.5.1
|
SUPERSET_RELEASE=apache_superset-1.5.1
|
||||||
SUPERSET_RELEASE_RC=apache-superset-1.5.1rc1
|
SUPERSET_RELEASE_RC=apache_superset-1.5.1rc1
|
||||||
SUPERSET_RELEASE_TARBALL=apache-superset-1.5.1-source.tar.gz
|
SUPERSET_RELEASE_TARBALL=apache_superset-1.5.1-source.tar.gz
|
||||||
SUPERSET_RELEASE_RC_TARBALL=apache-superset-1.5.1rc1-source.tar.gz
|
SUPERSET_RELEASE_RC_TARBALL=apache_superset-1.5.1rc1-source.tar.gz
|
||||||
SUPERSET_TMP_ASF_SITE_PATH=/tmp/incubator-superset-site-1.5.1
|
SUPERSET_TMP_ASF_SITE_PATH=/tmp/incubator-superset-site-1.5.1
|
||||||
-------------------------------
|
-------------------------------
|
||||||
```
|
```
|
||||||
@@ -380,7 +380,7 @@ Official instructions:
|
|||||||
https://www.apache.org/info/verification.html
|
https://www.apache.org/info/verification.html
|
||||||
|
|
||||||
We now have a handy script for anyone validating a release to use. The core of it is in this very folder, `verify_release.py`. Just make sure you have all three release files in the same directory (`{some version}.tar.gz`, `{some version}.tar.gz.asc` and `{some version}tar.gz.sha512`). Then you can pass this script the path to the `.gz` file like so:
|
We now have a handy script for anyone validating a release to use. The core of it is in this very folder, `verify_release.py`. Just make sure you have all three release files in the same directory (`{some version}.tar.gz`, `{some version}.tar.gz.asc` and `{some version}tar.gz.sha512`). Then you can pass this script the path to the `.gz` file like so:
|
||||||
`python verify_release.py ~/path/tp/apache-superset-{version/candidate}-source.tar.gz`
|
`python verify_release.py ~/path/tp/apache_superset-{version/candidate}-source.tar.gz`
|
||||||
|
|
||||||
If all goes well, you will see this result in your terminal:
|
If all goes well, you will see this result in your terminal:
|
||||||
|
|
||||||
@@ -452,10 +452,13 @@ cd ../
|
|||||||
|
|
||||||
|
|
||||||
# Compile translations for the backend
|
# Compile translations for the backend
|
||||||
./scripts/translations/generate_po_files.sh
|
./scripts/translations/generate_mo_files.sh
|
||||||
|
|
||||||
|
# update build version number
|
||||||
|
sed -i '' "s/version_string = .*/version_string = \"$SUPERSET_VERSION\"/" setup.py
|
||||||
|
|
||||||
# build the python distribution
|
# build the python distribution
|
||||||
python -m build
|
python setup.py sdist
|
||||||
```
|
```
|
||||||
|
|
||||||
Publish to PyPI
|
Publish to PyPI
|
||||||
@@ -467,7 +470,7 @@ while requesting access to push packages.
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
twine upload dist/apache_superset-${SUPERSET_VERSION}-py3-none-any.whl
|
twine upload dist/apache_superset-${SUPERSET_VERSION}-py3-none-any.whl
|
||||||
twine upload dist/apache-superset-${SUPERSET_VERSION}.tar.gz
|
twine upload dist/apache_superset-${SUPERSET_VERSION}.tar.gz
|
||||||
```
|
```
|
||||||
|
|
||||||
Set your username to `__token__`
|
Set your username to `__token__`
|
||||||
|
|||||||
@@ -232,8 +232,7 @@ class GitChangeLog:
|
|||||||
for log in self._logs:
|
for log in self._logs:
|
||||||
yield {
|
yield {
|
||||||
"pr_number": log.pr_number,
|
"pr_number": log.pr_number,
|
||||||
"pr_link": f"https://github.com/{SUPERSET_REPO}/pull/"
|
"pr_link": f"https://github.com/{SUPERSET_REPO}/pull/{log.pr_number}",
|
||||||
f"{log.pr_number}",
|
|
||||||
"message": log.message,
|
"message": log.message,
|
||||||
"time": log.time,
|
"time": log.time,
|
||||||
"author": log.author,
|
"author": log.author,
|
||||||
@@ -272,14 +271,14 @@ class GitLogs:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _git_get_current_head() -> str:
|
def _git_get_current_head() -> str:
|
||||||
output = os.popen("git status | head -1").read()
|
output = os.popen("git status | head -1").read() # noqa: S605, S607
|
||||||
match = re.match("(?:HEAD detached at|On branch) (.*)", output)
|
match = re.match("(?:HEAD detached at|On branch) (.*)", output)
|
||||||
if not match:
|
if not match:
|
||||||
return ""
|
return ""
|
||||||
return match.group(1)
|
return match.group(1)
|
||||||
|
|
||||||
def _git_checkout(self, git_ref: str) -> None:
|
def _git_checkout(self, git_ref: str) -> None:
|
||||||
os.popen(f"git checkout {git_ref}").read()
|
os.popen(f"git checkout {git_ref}").read() # noqa: S605
|
||||||
current_head = self._git_get_current_head()
|
current_head = self._git_get_current_head()
|
||||||
if current_head != git_ref:
|
if current_head != git_ref:
|
||||||
print(f"Could not checkout {git_ref}")
|
print(f"Could not checkout {git_ref}")
|
||||||
@@ -290,7 +289,7 @@ class GitLogs:
|
|||||||
current_git_ref = self._git_get_current_head()
|
current_git_ref = self._git_get_current_head()
|
||||||
self._git_checkout(self._git_ref)
|
self._git_checkout(self._git_ref)
|
||||||
output = (
|
output = (
|
||||||
os.popen('git --no-pager log --pretty=format:"%h|%an|%ae|%ad|%s|"')
|
os.popen('git --no-pager log --pretty=format:"%h|%an|%ae|%ad|%s|"') # noqa: S605, S607
|
||||||
.read()
|
.read()
|
||||||
.split("\n")
|
.split("\n")
|
||||||
)
|
)
|
||||||
@@ -323,9 +322,9 @@ class BaseParameters:
|
|||||||
|
|
||||||
|
|
||||||
def print_title(message: str) -> None:
|
def print_title(message: str) -> None:
|
||||||
print(f"{50*'-'}")
|
print(f"{50 * '-'}")
|
||||||
print(message)
|
print(message)
|
||||||
print(f"{50*'-'}")
|
print(f"{50 * '-'}")
|
||||||
|
|
||||||
|
|
||||||
@click.group()
|
@click.group()
|
||||||
@@ -349,14 +348,14 @@ def compare(base_parameters: BaseParameters) -> None:
|
|||||||
previous_logs = base_parameters.previous_logs
|
previous_logs = base_parameters.previous_logs
|
||||||
current_logs = base_parameters.current_logs
|
current_logs = base_parameters.current_logs
|
||||||
print_title(
|
print_title(
|
||||||
f"Pull requests from " f"{current_logs.git_ref} not in {previous_logs.git_ref}"
|
f"Pull requests from {current_logs.git_ref} not in {previous_logs.git_ref}"
|
||||||
)
|
)
|
||||||
previous_diff_logs = previous_logs.diff(current_logs)
|
previous_diff_logs = previous_logs.diff(current_logs)
|
||||||
for diff_log in previous_diff_logs:
|
for diff_log in previous_diff_logs:
|
||||||
print(f"{diff_log}")
|
print(f"{diff_log}")
|
||||||
|
|
||||||
print_title(
|
print_title(
|
||||||
f"Pull requests from " f"{previous_logs.git_ref} not in {current_logs.git_ref}"
|
f"Pull requests from {previous_logs.git_ref} not in {current_logs.git_ref}"
|
||||||
)
|
)
|
||||||
current_diff_logs = current_logs.diff(previous_logs)
|
current_diff_logs = current_logs.diff(previous_logs)
|
||||||
for diff_log in current_diff_logs:
|
for diff_log in current_diff_logs:
|
||||||
|
|||||||
@@ -31,7 +31,7 @@ The official source release:
|
|||||||
https://downloads.apache.org/{{ project_module }}/{{ version }}
|
https://downloads.apache.org/{{ project_module }}/{{ version }}
|
||||||
|
|
||||||
The PyPI package:
|
The PyPI package:
|
||||||
https://pypi.org/project/apache-superset/{{ version }}
|
https://pypi.org/project/apache_superset/{{ version }}
|
||||||
|
|
||||||
The CHANGELOG for the release:
|
The CHANGELOG for the release:
|
||||||
https://github.com/apache/{{ project_module }}/blob/{{ version }}/CHANGELOG/{{ version }}.md
|
https://github.com/apache/{{ project_module }}/blob/{{ version }}/CHANGELOG/{{ version }}.md
|
||||||
|
|||||||
@@ -31,7 +31,7 @@ except ModuleNotFoundError:
|
|||||||
RECEIVER_EMAIL = "dev@superset.apache.org"
|
RECEIVER_EMAIL = "dev@superset.apache.org"
|
||||||
PROJECT_NAME = "Superset"
|
PROJECT_NAME = "Superset"
|
||||||
PROJECT_MODULE = "superset"
|
PROJECT_MODULE = "superset"
|
||||||
PROJECT_DESCRIPTION = "Apache Superset is a modern, enterprise-ready business intelligence web application."
|
PROJECT_DESCRIPTION = "Apache Superset is a modern, enterprise-ready business intelligence web application." # noqa: E501
|
||||||
|
|
||||||
|
|
||||||
def string_comma_to_list(message: str) -> list[str]:
|
def string_comma_to_list(message: str) -> list[str]:
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ else
|
|||||||
SUPERSET_VERSION="${1}"
|
SUPERSET_VERSION="${1}"
|
||||||
SUPERSET_RC="${2}"
|
SUPERSET_RC="${2}"
|
||||||
SUPERSET_PGP_FULLNAME="${3}"
|
SUPERSET_PGP_FULLNAME="${3}"
|
||||||
SUPERSET_RELEASE_RC_TARBALL="apache-superset-${SUPERSET_VERSION_RC}-source.tar.gz"
|
SUPERSET_RELEASE_RC_TARBALL="apache_superset-${SUPERSET_VERSION_RC}-source.tar.gz"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
SUPERSET_VERSION_RC="${SUPERSET_VERSION}rc${SUPERSET_RC}"
|
SUPERSET_VERSION_RC="${SUPERSET_VERSION}rc${SUPERSET_RC}"
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ if [ -z "${SUPERSET_VERSION_RC}" ] || [ -z "${SUPERSET_SVN_DEV_PATH}" ] || [ -z
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
SUPERSET_RELEASE_RC=apache-superset-"${SUPERSET_VERSION_RC}"
|
SUPERSET_RELEASE_RC=apache_superset-"${SUPERSET_VERSION_RC}"
|
||||||
SUPERSET_RELEASE_RC_TARBALL="${SUPERSET_RELEASE_RC}"-source.tar.gz
|
SUPERSET_RELEASE_RC_TARBALL="${SUPERSET_RELEASE_RC}"-source.tar.gz
|
||||||
SUPERSET_RELEASE_RC_BASE_PATH="${SUPERSET_SVN_DEV_PATH}"/"${SUPERSET_VERSION_RC}"
|
SUPERSET_RELEASE_RC_BASE_PATH="${SUPERSET_SVN_DEV_PATH}"/"${SUPERSET_VERSION_RC}"
|
||||||
SUPERSET_RELEASE_RC_TARBALL_PATH="${SUPERSET_RELEASE_RC_BASE_PATH}"/"${SUPERSET_RELEASE_RC_TARBALL}"
|
SUPERSET_RELEASE_RC_TARBALL_PATH="${SUPERSET_RELEASE_RC_BASE_PATH}"/"${SUPERSET_RELEASE_RC_TARBALL}"
|
||||||
|
|||||||
@@ -50,8 +50,8 @@ else
|
|||||||
export SUPERSET_GITHUB_BRANCH="${VERSION_MAJOR}.${VERSION_MINOR}"
|
export SUPERSET_GITHUB_BRANCH="${VERSION_MAJOR}.${VERSION_MINOR}"
|
||||||
export SUPERSET_PGP_FULLNAME="${2}"
|
export SUPERSET_PGP_FULLNAME="${2}"
|
||||||
export SUPERSET_VERSION_RC="${SUPERSET_VERSION}rc${VERSION_RC}"
|
export SUPERSET_VERSION_RC="${SUPERSET_VERSION}rc${VERSION_RC}"
|
||||||
export SUPERSET_RELEASE=apache-superset-"${SUPERSET_VERSION}"
|
export SUPERSET_RELEASE=apache_superset-"${SUPERSET_VERSION}"
|
||||||
export SUPERSET_RELEASE_RC=apache-superset-"${SUPERSET_VERSION_RC}"
|
export SUPERSET_RELEASE_RC=apache_superset-"${SUPERSET_VERSION_RC}"
|
||||||
export SUPERSET_RELEASE_TARBALL="${SUPERSET_RELEASE}"-source.tar.gz
|
export SUPERSET_RELEASE_TARBALL="${SUPERSET_RELEASE}"-source.tar.gz
|
||||||
export SUPERSET_RELEASE_RC_TARBALL="${SUPERSET_RELEASE_RC}"-source.tar.gz
|
export SUPERSET_RELEASE_RC_TARBALL="${SUPERSET_RELEASE_RC}"-source.tar.gz
|
||||||
export SUPERSET_TMP_ASF_SITE_PATH="/tmp/incubator-superset-site-${SUPERSET_VERSION}"
|
export SUPERSET_TMP_ASF_SITE_PATH="/tmp/incubator-superset-site-${SUPERSET_VERSION}"
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ if [ -z "${SUPERSET_SVN_DEV_PATH}" ]; then
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ -n ${1} ]] && [[ ${1} == "local" ]]; then
|
if [[ -n ${1} ]] && [[ ${1} == "local" ]]; then
|
||||||
SUPERSET_RELEASE_RC=apache-superset-"${SUPERSET_VERSION_RC}"
|
SUPERSET_RELEASE_RC=apache_superset-"${SUPERSET_VERSION_RC}"
|
||||||
SUPERSET_RELEASE_RC_TARBALL="${SUPERSET_RELEASE_RC}"-source.tar.gz
|
SUPERSET_RELEASE_RC_TARBALL="${SUPERSET_RELEASE_RC}"-source.tar.gz
|
||||||
SUPERSET_TARBALL_PATH="${SUPERSET_SVN_DEV_PATH}"/${SUPERSET_VERSION_RC}/${SUPERSET_RELEASE_RC_TARBALL}
|
SUPERSET_TARBALL_PATH="${SUPERSET_SVN_DEV_PATH}"/${SUPERSET_VERSION_RC}/${SUPERSET_RELEASE_RC_TARBALL}
|
||||||
SUPERSET_TMP_TARBALL_FILENAME=_tmp_"${SUPERSET_VERSION_RC}".tar.gz
|
SUPERSET_TMP_TARBALL_FILENAME=_tmp_"${SUPERSET_VERSION_RC}".tar.gz
|
||||||
|
|||||||
@@ -38,7 +38,7 @@ get_pip_command() {
|
|||||||
PYTHON=$(get_python_command)
|
PYTHON=$(get_python_command)
|
||||||
PIP=$(get_pip_command)
|
PIP=$(get_pip_command)
|
||||||
|
|
||||||
# Get the release directory's path. If you unzip an Apache release and just run the npm script to validate the release, this will be a file name like `apache-superset-x.x.xrcx-source.tar.gz`
|
# Get the release directory's path. If you unzip an Apache release and just run the npm script to validate the release, this will be a file name like `apache_superset-x.x.xrcx-source.tar.gz`
|
||||||
RELEASE_ZIP_PATH="../../$(basename "$(dirname "$(pwd)")")-source.tar.gz"
|
RELEASE_ZIP_PATH="../../$(basename "$(dirname "$(pwd)")")-source.tar.gz"
|
||||||
|
|
||||||
# Install dependencies from requirements.txt if the file exists
|
# Install dependencies from requirements.txt if the file exists
|
||||||
|
|||||||
@@ -23,12 +23,12 @@ from typing import Optional
|
|||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
# Part 1: Verify SHA512 hash - this is the same as running `shasum -a 512 {release}` and comparing it against `{release}.sha512`
|
# Part 1: Verify SHA512 hash - this is the same as running `shasum -a 512 {release}` and comparing it against `{release}.sha512` # noqa: E501
|
||||||
|
|
||||||
|
|
||||||
def get_sha512_hash(filename: str) -> str:
|
def get_sha512_hash(filename: str) -> str:
|
||||||
"""Run the shasum command on the file and return the SHA512 hash."""
|
"""Run the shasum command on the file and return the SHA512 hash."""
|
||||||
result = subprocess.run(["shasum", "-a", "512", filename], stdout=subprocess.PIPE)
|
result = subprocess.run(["shasum", "-a", "512", filename], stdout=subprocess.PIPE) # noqa: S603, S607
|
||||||
sha512_hash = result.stdout.decode().split()[0]
|
sha512_hash = result.stdout.decode().split()[0]
|
||||||
return sha512_hash
|
return sha512_hash
|
||||||
|
|
||||||
@@ -43,7 +43,7 @@ def read_sha512_file(filename: str) -> str:
|
|||||||
|
|
||||||
|
|
||||||
def verify_sha512(filename: str) -> str:
|
def verify_sha512(filename: str) -> str:
|
||||||
"""Verify if the SHA512 hash of the file matches with the hash in the .sha512 file."""
|
"""Verify if the SHA512 hash of the file matches with the hash in the .sha512 file.""" # noqa: E501
|
||||||
sha512_hash = get_sha512_hash(filename)
|
sha512_hash = get_sha512_hash(filename)
|
||||||
sha512_file_content = read_sha512_file(filename)
|
sha512_file_content = read_sha512_file(filename)
|
||||||
|
|
||||||
@@ -53,14 +53,15 @@ def verify_sha512(filename: str) -> str:
|
|||||||
return "SHA failed"
|
return "SHA failed"
|
||||||
|
|
||||||
|
|
||||||
# Part 2: Verify RSA key - this is the same as running `gpg --verify {release}.asc {release}` and comparing the RSA key and email address against the KEYS file
|
# Part 2: Verify RSA key - this is the same as running `gpg --verify {release}.asc {release}` and comparing the RSA key and email address against the KEYS file # noqa: E501
|
||||||
|
|
||||||
|
|
||||||
def get_gpg_info(filename: str) -> tuple[Optional[str], Optional[str]]:
|
def get_gpg_info(filename: str) -> tuple[Optional[str], Optional[str]]:
|
||||||
"""Run the GPG verify command and extract RSA key and email address."""
|
"""Run the GPG verify command and extract RSA key and email address."""
|
||||||
asc_filename = filename + ".asc"
|
asc_filename = filename + ".asc"
|
||||||
result = subprocess.run(
|
result = subprocess.run( # noqa: S603
|
||||||
["gpg", "--verify", asc_filename, filename], capture_output=True
|
["gpg", "--verify", asc_filename, filename], # noqa: S607
|
||||||
|
capture_output=True, # noqa: S607
|
||||||
)
|
)
|
||||||
output = result.stderr.decode()
|
output = result.stderr.decode()
|
||||||
|
|
||||||
@@ -90,7 +91,7 @@ def get_gpg_info(filename: str) -> tuple[Optional[str], Optional[str]]:
|
|||||||
def verify_key(key: str, email: Optional[str]) -> str:
|
def verify_key(key: str, email: Optional[str]) -> str:
|
||||||
"""Fetch the KEYS file and verify if the RSA/EDDSA key and email match."""
|
"""Fetch the KEYS file and verify if the RSA/EDDSA key and email match."""
|
||||||
url = "https://downloads.apache.org/superset/KEYS"
|
url = "https://downloads.apache.org/superset/KEYS"
|
||||||
response = requests.get(url)
|
response = requests.get(url) # noqa: S113
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
if key not in response.text:
|
if key not in response.text:
|
||||||
return "RSA/EDDSA key not found on KEYS page"
|
return "RSA/EDDSA key not found on KEYS page"
|
||||||
|
|||||||
@@ -44,12 +44,11 @@ These features are **finished** but currently being tested. They are usable, but
|
|||||||
- ALLOW_FULL_CSV_EXPORT
|
- ALLOW_FULL_CSV_EXPORT
|
||||||
- CACHE_IMPERSONATION
|
- CACHE_IMPERSONATION
|
||||||
- CONFIRM_DASHBOARD_DIFF
|
- CONFIRM_DASHBOARD_DIFF
|
||||||
- DRILL_TO_DETAIL
|
|
||||||
- DYNAMIC_PLUGINS
|
- DYNAMIC_PLUGINS
|
||||||
|
- DATE_FORMAT_IN_EMAIL_SUBJECT: [(docs)](https://superset.apache.org/docs/configuration/alerts-reports#commons)
|
||||||
- ENABLE_SUPERSET_META_DB: [(docs)](https://superset.apache.org/docs/configuration/databases/#querying-across-databases)
|
- ENABLE_SUPERSET_META_DB: [(docs)](https://superset.apache.org/docs/configuration/databases/#querying-across-databases)
|
||||||
- ESTIMATE_QUERY_COST
|
- ESTIMATE_QUERY_COST
|
||||||
- GLOBAL_ASYNC_QUERIES [(docs)](https://github.com/apache/superset/blob/master/CONTRIBUTING.md#async-chart-queries)
|
- GLOBAL_ASYNC_QUERIES [(docs)](https://github.com/apache/superset/blob/master/CONTRIBUTING.md#async-chart-queries)
|
||||||
- HORIZONTAL_FILTER_BAR
|
|
||||||
- IMPERSONATE_WITH_EMAIL_PREFIX
|
- IMPERSONATE_WITH_EMAIL_PREFIX
|
||||||
- PLAYWRIGHT_REPORTS_AND_THUMBNAILS
|
- PLAYWRIGHT_REPORTS_AND_THUMBNAILS
|
||||||
- RLS_IN_SQLLAB
|
- RLS_IN_SQLLAB
|
||||||
@@ -63,9 +62,8 @@ These features flags are **safe for production**. They have been tested and will
|
|||||||
[//]: # "PLEASE KEEP THESE LISTS SORTED ALPHABETICALLY"
|
[//]: # "PLEASE KEEP THESE LISTS SORTED ALPHABETICALLY"
|
||||||
|
|
||||||
### Flags on the path to feature launch and flag deprecation/removal
|
### Flags on the path to feature launch and flag deprecation/removal
|
||||||
|
|
||||||
- DASHBOARD_VIRTUALIZATION
|
- DASHBOARD_VIRTUALIZATION
|
||||||
- DRILL_BY
|
|
||||||
- DISABLE_LEGACY_DATASOURCE_EDITOR
|
|
||||||
|
|
||||||
### Flags retained for runtime configuration
|
### Flags retained for runtime configuration
|
||||||
|
|
||||||
@@ -79,6 +77,7 @@ independently. This new framework will also allow for non-boolean configurations
|
|||||||
- ALLOW_ADHOC_SUBQUERY
|
- ALLOW_ADHOC_SUBQUERY
|
||||||
- DASHBOARD_RBAC [(docs)](https://superset.apache.org/docs/using-superset/creating-your-first-dashboard#manage-access-to-dashboards)
|
- DASHBOARD_RBAC [(docs)](https://superset.apache.org/docs/using-superset/creating-your-first-dashboard#manage-access-to-dashboards)
|
||||||
- DATAPANEL_CLOSED_BY_DEFAULT
|
- DATAPANEL_CLOSED_BY_DEFAULT
|
||||||
|
- DRILL_BY
|
||||||
- DRUID_JOINS
|
- DRUID_JOINS
|
||||||
- EMBEDDABLE_CHARTS
|
- EMBEDDABLE_CHARTS
|
||||||
- EMBEDDED_SUPERSET
|
- EMBEDDED_SUPERSET
|
||||||
@@ -98,6 +97,6 @@ These features flags currently default to True and **will be removed in a future
|
|||||||
[//]: # "PLEASE KEEP THE LIST SORTED ALPHABETICALLY"
|
[//]: # "PLEASE KEEP THE LIST SORTED ALPHABETICALLY"
|
||||||
|
|
||||||
- AVOID_COLORS_COLLISION
|
- AVOID_COLORS_COLLISION
|
||||||
- DASHBOARD_CROSS_FILTERS
|
- DRILL_TO_DETAIL
|
||||||
- ENABLE_JAVASCRIPT_CONTROLS
|
- ENABLE_JAVASCRIPT_CONTROLS
|
||||||
- KV_STORE
|
- KV_STORE
|
||||||
|
|||||||
@@ -25,8 +25,8 @@ all you have to do is file a simple PR [like this one](https://github.com/apache
|
|||||||
the categorization is inaccurate, please file a PR with your correction as well.
|
the categorization is inaccurate, please file a PR with your correction as well.
|
||||||
Join our growing community!
|
Join our growing community!
|
||||||
|
|
||||||
|
|
||||||
### Sharing Economy
|
### Sharing Economy
|
||||||
|
|
||||||
- [Airbnb](https://github.com/airbnb)
|
- [Airbnb](https://github.com/airbnb)
|
||||||
- [Faasos](https://faasos.com/) [@shashanksingh]
|
- [Faasos](https://faasos.com/) [@shashanksingh]
|
||||||
- [Free2Move](https://www.free2move.com/) [@PaoloTerzi]
|
- [Free2Move](https://www.free2move.com/) [@PaoloTerzi]
|
||||||
@@ -36,6 +36,7 @@ Join our growing community!
|
|||||||
- [Ontruck](https://www.ontruck.com/)
|
- [Ontruck](https://www.ontruck.com/)
|
||||||
|
|
||||||
### Financial Services
|
### Financial Services
|
||||||
|
|
||||||
- [Aktia Bank plc](https://www.aktia.com)
|
- [Aktia Bank plc](https://www.aktia.com)
|
||||||
- [American Express](https://www.americanexpress.com) [@TheLastSultan]
|
- [American Express](https://www.americanexpress.com) [@TheLastSultan]
|
||||||
- [bumper](https://www.bumper.co/) [@vasu-ram, @JamiePercival]
|
- [bumper](https://www.bumper.co/) [@vasu-ram, @JamiePercival]
|
||||||
@@ -43,43 +44,53 @@ Join our growing community!
|
|||||||
- [Capital Service S.A.](https://capitalservice.pl) [@pkonarzewski]
|
- [Capital Service S.A.](https://capitalservice.pl) [@pkonarzewski]
|
||||||
- [Clark.de](https://clark.de/)
|
- [Clark.de](https://clark.de/)
|
||||||
- [KarrotPay](https://www.daangnpay.com/)
|
- [KarrotPay](https://www.daangnpay.com/)
|
||||||
|
- [Remita](https://remita.net) [@mujibishola]
|
||||||
- [Taveo](https://www.taveo.com) [@codek]
|
- [Taveo](https://www.taveo.com) [@codek]
|
||||||
- [Unit](https://www.unit.co/about-us) [@amitmiran137]
|
- [Unit](https://www.unit.co/about-us) [@amitmiran137]
|
||||||
- [Wise](https://wise.com) [@koszti]
|
- [Wise](https://wise.com) [@koszti]
|
||||||
- [Xendit](https://xendit.co/) [@LieAlbertTriAdrian]
|
- [Xendit](https://xendit.co/) [@LieAlbertTriAdrian]
|
||||||
|
- [Cover Genius](https://covergenius.com/)
|
||||||
|
|
||||||
### Gaming
|
### Gaming
|
||||||
|
|
||||||
- [Popoko VM Games Studio](https://popoko.live)
|
- [Popoko VM Games Studio](https://popoko.live)
|
||||||
|
|
||||||
### E-Commerce
|
### E-Commerce
|
||||||
|
|
||||||
- [AiHello](https://www.aihello.com) [@ganeshkrishnan1]
|
- [AiHello](https://www.aihello.com) [@ganeshkrishnan1]
|
||||||
- [Bazaar Technologies](https://www.bazaartech.com) [@umair-abro]
|
- [Bazaar Technologies](https://www.bazaartech.com) [@umair-abro]
|
||||||
- [Dragonpass](https://www.dragonpass.com.cn/) [@zhxjdwh]
|
- [Dragonpass](https://www.dragonpass.com.cn/) [@zhxjdwh]
|
||||||
- [Dropit Shopping](https://www.dropit.shop/) [@dropit-dev]
|
- [Dropit Shopping](https://www.dropit.shop/) [@dropit-dev]
|
||||||
- [Fanatics](https://www.fanatics.com/) [@coderfender]
|
- [Fanatics](https://www.fanatics.com/) [@coderfender]
|
||||||
- [Fordeal](https://www.fordeal.com) [@Renkai]
|
- [Fordeal](https://www.fordeal.com) [@Renkai]
|
||||||
|
- [Fynd](https://www.fynd.com/) [@darpanjain07]
|
||||||
- [GFG - Global Fashion Group](https://global-fashion-group.com) [@ksaagariconic]
|
- [GFG - Global Fashion Group](https://global-fashion-group.com) [@ksaagariconic]
|
||||||
|
- [GoTo/Gojek](https://www.gojek.io/) [@gwthm-in]
|
||||||
- [HuiShouBao](https://www.huishoubao.com/) [@Yukinoshita-Yukino]
|
- [HuiShouBao](https://www.huishoubao.com/) [@Yukinoshita-Yukino]
|
||||||
- [Now](https://www.now.vn/) [@davidkohcw]
|
- [Now](https://www.now.vn/) [@davidkohcw]
|
||||||
- [Qunar](https://www.qunar.com/) [@flametest]
|
- [Qunar](https://www.qunar.com/) [@flametest]
|
||||||
- [Rakuten Viki](https://www.viki.com)
|
- [Rakuten Viki](https://www.viki.com)
|
||||||
- [Shopee](https://shopee.sg) [@xiaohanyu]
|
- [Shopee](https://shopee.sg) [@xiaohanyu]
|
||||||
- [Shopkick](https://www.shopkick.com) [@LAlbertalli]
|
- [Shopkick](https://www.shopkick.com) [@LAlbertalli]
|
||||||
|
- [ShopUp](https://www.shopup.org/) [@gwthm-in]
|
||||||
- [Tails.com](https://tails.com/gb/) [@alanmcruickshank]
|
- [Tails.com](https://tails.com/gb/) [@alanmcruickshank]
|
||||||
- [THE ICONIC](https://theiconic.com.au/) [@ksaagariconic]
|
- [THE ICONIC](https://theiconic.com.au/) [@ksaagariconic]
|
||||||
- [Utair](https://www.utair.ru) [@utair-digital]
|
- [Utair](https://www.utair.ru) [@utair-digital]
|
||||||
- [VkusVill](https://vkusvill.ru/) [@ETselikov]
|
- [VkusVill](https://vkusvill.ru/) [@ETselikov]
|
||||||
- [Zalando](https://www.zalando.com) [@dmigo]
|
- [Zalando](https://www.zalando.com) [@dmigo]
|
||||||
- [Zalora](https://www.zalora.com) [@ksaagariconic]
|
- [Zalora](https://www.zalora.com) [@ksaagariconic]
|
||||||
|
- [Zepto](https://www.zeptonow.com/) [@gwthm-in]
|
||||||
|
|
||||||
### Enterprise Technology
|
### Enterprise Technology
|
||||||
|
|
||||||
- [A3Data](https://a3data.com.br) [@neylsoncrepalde]
|
- [A3Data](https://a3data.com.br) [@neylsoncrepalde]
|
||||||
- [Analytics Aura](https://analyticsaura.com/) [@Analytics-Aura]
|
- [Analytics Aura](https://analyticsaura.com/) [@Analytics-Aura]
|
||||||
- [Apollo GraphQL](https://www.apollographql.com/) [@evans]
|
- [Apollo GraphQL](https://www.apollographql.com/) [@evans]
|
||||||
- [Astronomer](https://www.astronomer.io) [@ryw]
|
- [Astronomer](https://www.astronomer.io) [@ryw]
|
||||||
- [Avesta Technologies](https://avestatechnologies.com/) [@TheRum]
|
- [Avesta Technologies](https://avestatechnologies.com/) [@TheRum]
|
||||||
- [Caizin](https://caizin.com/) [@tejaskatariya]
|
- [Caizin](https://caizin.com/) [@tejaskatariya]
|
||||||
- [Careem](https://www.careem.com/) [@SamraHanifCareem]
|
- [Canonical](https://canonical.com)
|
||||||
|
- [Careem](https://www.careem.com/) [@samraHanif0340]
|
||||||
- [Cloudsmith](https://cloudsmith.io) [@alancarson]
|
- [Cloudsmith](https://cloudsmith.io) [@alancarson]
|
||||||
- [Cyberhaven](https://www.cyberhaven.com/) [@toliver-ch]
|
- [Cyberhaven](https://www.cyberhaven.com/) [@toliver-ch]
|
||||||
- [Deepomatic](https://deepomatic.com/) [@Zanoellia]
|
- [Deepomatic](https://deepomatic.com/) [@Zanoellia]
|
||||||
@@ -90,6 +101,7 @@ Join our growing community!
|
|||||||
- [ELMO Cloud HR & Payroll](https://elmosoftware.com.au/)
|
- [ELMO Cloud HR & Payroll](https://elmosoftware.com.au/)
|
||||||
- [Endress+Hauser](https://www.endress.com/) [@rumbin]
|
- [Endress+Hauser](https://www.endress.com/) [@rumbin]
|
||||||
- [FBK - ICT center](https://ict.fbk.eu)
|
- [FBK - ICT center](https://ict.fbk.eu)
|
||||||
|
- [Formbricks](https://formbricks.com)
|
||||||
- [Gavagai](https://gavagai.io) [@gavagai-corp]
|
- [Gavagai](https://gavagai.io) [@gavagai-corp]
|
||||||
- [GfK Data Lab](https://www.gfk.com/home) [@mherr]
|
- [GfK Data Lab](https://www.gfk.com/home) [@mherr]
|
||||||
- [Hydrolix](https://www.hydrolix.io/)
|
- [Hydrolix](https://www.hydrolix.io/)
|
||||||
@@ -103,6 +115,7 @@ Join our growing community!
|
|||||||
- [Ona](https://ona.io) [@pld]
|
- [Ona](https://ona.io) [@pld]
|
||||||
- [Orange](https://www.orange.com) [@icsu]
|
- [Orange](https://www.orange.com) [@icsu]
|
||||||
- [Oslandia](https://oslandia.com)
|
- [Oslandia](https://oslandia.com)
|
||||||
|
- [Oxylabs](https://oxylabs.io/) [@rytis-ulys]
|
||||||
- [Peak AI](https://www.peak.ai/) [@azhar22k]
|
- [Peak AI](https://www.peak.ai/) [@azhar22k]
|
||||||
- [PeopleDoc](https://www.people-doc.com) [@rodo]
|
- [PeopleDoc](https://www.people-doc.com) [@rodo]
|
||||||
- [PlaidCloud](https://www.plaidcloud.com)
|
- [PlaidCloud](https://www.plaidcloud.com)
|
||||||
@@ -110,8 +123,11 @@ Join our growing community!
|
|||||||
- [PubNub](https://pubnub.com) [@jzucker2]
|
- [PubNub](https://pubnub.com) [@jzucker2]
|
||||||
- [ReadyTech](https://www.readytech.io)
|
- [ReadyTech](https://www.readytech.io)
|
||||||
- [Reward Gateway](https://www.rewardgateway.com)
|
- [Reward Gateway](https://www.rewardgateway.com)
|
||||||
|
- [RIADVICE](https://riadvice.tn) [@riadvice]
|
||||||
- [ScopeAI](https://www.getscopeai.com) [@iloveluce]
|
- [ScopeAI](https://www.getscopeai.com) [@iloveluce]
|
||||||
|
- [shipmnts](https://shipmnts.com)
|
||||||
- [Showmax](https://showmax.com) [@bobek]
|
- [Showmax](https://showmax.com) [@bobek]
|
||||||
|
- [SingleStore](https://www.singlestore.com/)
|
||||||
- [TechAudit](https://www.techaudit.info) [@ETselikov]
|
- [TechAudit](https://www.techaudit.info) [@ETselikov]
|
||||||
- [Tenable](https://www.tenable.com) [@dflionis]
|
- [Tenable](https://www.tenable.com) [@dflionis]
|
||||||
- [Tentacle](https://www.linkedin.com/company/tentacle-cmi/) [@jdclarke5]
|
- [Tentacle](https://www.linkedin.com/company/tentacle-cmi/) [@jdclarke5]
|
||||||
@@ -119,11 +135,14 @@ Join our growing community!
|
|||||||
- [Tobii](https://www.tobii.com/) [@dwa]
|
- [Tobii](https://www.tobii.com/) [@dwa]
|
||||||
- [Tooploox](https://www.tooploox.com/) [@jakubczaplicki]
|
- [Tooploox](https://www.tooploox.com/) [@jakubczaplicki]
|
||||||
- [Unvired](https://unvired.com) [@srinisubramanian]
|
- [Unvired](https://unvired.com) [@srinisubramanian]
|
||||||
|
- [Virtuoso QA](https://www.virtuosoqa.com)
|
||||||
- [Whale](https://whale.im)
|
- [Whale](https://whale.im)
|
||||||
- [Windsor.ai](https://www.windsor.ai/) [@octaviancorlade]
|
- [Windsor.ai](https://www.windsor.ai/) [@octaviancorlade]
|
||||||
|
- [WinWin Network马上赢](https://brandct.cn/) [@wenbinye]
|
||||||
- [Zeta](https://www.zeta.tech/) [@shaikidris]
|
- [Zeta](https://www.zeta.tech/) [@shaikidris]
|
||||||
|
|
||||||
### Media & Entertainment
|
### Media & Entertainment
|
||||||
|
|
||||||
- [6play](https://www.6play.fr) [@CoryChaplin]
|
- [6play](https://www.6play.fr) [@CoryChaplin]
|
||||||
- [bilibili](https://www.bilibili.com) [@Moinheart]
|
- [bilibili](https://www.bilibili.com) [@Moinheart]
|
||||||
- [BurdaForward](https://www.burda-forward.de/en/)
|
- [BurdaForward](https://www.burda-forward.de/en/)
|
||||||
@@ -136,8 +155,10 @@ Join our growing community!
|
|||||||
- [Zaihang](https://www.zaih.com/)
|
- [Zaihang](https://www.zaih.com/)
|
||||||
|
|
||||||
### Education
|
### Education
|
||||||
|
|
||||||
- [Aveti Learning](https://avetilearning.com/) [@TheShubhendra]
|
- [Aveti Learning](https://avetilearning.com/) [@TheShubhendra]
|
||||||
- [Brilliant.org](https://brilliant.org/)
|
- [Brilliant.org](https://brilliant.org/)
|
||||||
|
- [Open edX](https://openedx.org/)
|
||||||
- [Platzi.com](https://platzi.com/)
|
- [Platzi.com](https://platzi.com/)
|
||||||
- [Sunbird](https://www.sunbird.org/) [@eksteporg]
|
- [Sunbird](https://www.sunbird.org/) [@eksteporg]
|
||||||
- [The GRAPH Network](https://thegraphnetwork.org/) [@fccoelho]
|
- [The GRAPH Network](https://thegraphnetwork.org/) [@fccoelho]
|
||||||
@@ -146,6 +167,7 @@ Join our growing community!
|
|||||||
- [WikiMedia Foundation](https://wikimediafoundation.org) [@vg]
|
- [WikiMedia Foundation](https://wikimediafoundation.org) [@vg]
|
||||||
|
|
||||||
### Energy
|
### Energy
|
||||||
|
|
||||||
- [Airboxlab](https://foobot.io) [@antoine-galataud]
|
- [Airboxlab](https://foobot.io) [@antoine-galataud]
|
||||||
- [DouroECI](https://www.douroeci.com/) [@nunohelibeires]
|
- [DouroECI](https://www.douroeci.com/) [@nunohelibeires]
|
||||||
- [Safaricom](https://www.safaricom.co.ke/) [@mmutiso]
|
- [Safaricom](https://www.safaricom.co.ke/) [@mmutiso]
|
||||||
@@ -153,6 +175,7 @@ Join our growing community!
|
|||||||
- [Wattbewerb](https://wattbewerb.de/) [@wattbewerb]
|
- [Wattbewerb](https://wattbewerb.de/) [@wattbewerb]
|
||||||
|
|
||||||
### Healthcare
|
### Healthcare
|
||||||
|
|
||||||
- [Amino](https://amino.com) [@shkr]
|
- [Amino](https://amino.com) [@shkr]
|
||||||
- [Bluesquare](https://www.bluesquarehub.com/) [@madewulf]
|
- [Bluesquare](https://www.bluesquarehub.com/) [@madewulf]
|
||||||
- [Care](https://www.getcare.io/) [@alandao2021]
|
- [Care](https://www.getcare.io/) [@alandao2021]
|
||||||
@@ -165,29 +188,36 @@ Join our growing community!
|
|||||||
- [2070Health](https://2070health.com/)
|
- [2070Health](https://2070health.com/)
|
||||||
|
|
||||||
### HR / Staffing
|
### HR / Staffing
|
||||||
|
|
||||||
- [Swile](https://www.swile.co/) [@PaoloTerzi]
|
- [Swile](https://www.swile.co/) [@PaoloTerzi]
|
||||||
- [Symmetrics](https://www.symmetrics.fyi)
|
- [Symmetrics](https://www.symmetrics.fyi)
|
||||||
- [bluquist](https://bluquist.com/)
|
- [bluquist](https://bluquist.com/)
|
||||||
|
|
||||||
### Government / Non-Profit
|
### Government
|
||||||
|
|
||||||
- [City of Ann Arbor, MI](https://www.a2gov.org/) [@sfirke]
|
- [City of Ann Arbor, MI](https://www.a2gov.org/) [@sfirke]
|
||||||
- [RIS3 Strategy of CZ, MIT CR](https://www.ris3.cz/) [@RIS3CZ]
|
- [RIS3 Strategy of CZ, MIT CR](https://www.ris3.cz/) [@RIS3CZ]
|
||||||
- [NRLM - Sarathi, India](https://pib.gov.in/PressReleasePage.aspx?PRID=1999586)
|
- [NRLM - Sarathi, India](https://pib.gov.in/PressReleasePage.aspx?PRID=1999586)
|
||||||
|
|
||||||
### Travel
|
### Travel
|
||||||
|
|
||||||
- [Agoda](https://www.agoda.com/) [@lostseaway, @maiake, @obombayo]
|
- [Agoda](https://www.agoda.com/) [@lostseaway, @maiake, @obombayo]
|
||||||
|
- [HomeToGo](https://hometogo.com/) [@pedromartinsteenstrup]
|
||||||
- [Skyscanner](https://www.skyscanner.net/) [@cleslie, @stanhoucke]
|
- [Skyscanner](https://www.skyscanner.net/) [@cleslie, @stanhoucke]
|
||||||
|
|
||||||
### Others
|
### Others
|
||||||
|
|
||||||
- [10Web](https://10web.io/)
|
- [10Web](https://10web.io/)
|
||||||
- [AI inside](https://inside.ai/en/)
|
- [AI inside](https://inside.ai/en/)
|
||||||
- [Automattic](https://automattic.com/) [@Khrol, @Usiel]
|
- [Automattic](https://automattic.com/) [@Khrol, @Usiel]
|
||||||
- [Dropbox](https://www.dropbox.com/) [@bkyryliuk]
|
- [Dropbox](https://www.dropbox.com/) [@bkyryliuk]
|
||||||
|
- [Flowbird](https://flowbird.com) [@EmmanuelCbd]
|
||||||
- [GEOTAB](https://www.geotab.com) [@JZ6]
|
- [GEOTAB](https://www.geotab.com) [@JZ6]
|
||||||
- [Grassroot](https://www.grassrootinstitute.org/)
|
- [Grassroot](https://www.grassrootinstitute.org/)
|
||||||
- [Increff](https://www.increff.com/) [@ishansinghania]
|
- [Increff](https://www.increff.com/) [@ishansinghania]
|
||||||
- [komoot](https://www.komoot.com/) [@christophlingg]
|
- [komoot](https://www.komoot.com/) [@christophlingg]
|
||||||
- [Let's Roam](https://www.letsroam.com/)
|
- [Let's Roam](https://www.letsroam.com/)
|
||||||
|
- [Machrent SA](https://www.machrent.com/)
|
||||||
- [Onebeat](https://1beat.com/) [@GuyAttia]
|
- [Onebeat](https://1beat.com/) [@GuyAttia]
|
||||||
- [X](https://x.com/)
|
- [X](https://x.com/)
|
||||||
- [VLMedia](https://www.vlmedia.com.tr/) [@ibotheperfect]
|
- [VLMedia](https://www.vlmedia.com.tr/) [@ibotheperfect]
|
||||||
|
|||||||
@@ -43,8 +43,8 @@ under the License.
|
|||||||
| can this form post on ResetPasswordView |:heavy_check_mark:|O|O|O|
|
| can this form post on ResetPasswordView |:heavy_check_mark:|O|O|O|
|
||||||
| can this form get on ResetMyPasswordView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
| can this form get on ResetMyPasswordView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||||
| can this form post on ResetMyPasswordView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
| can this form post on ResetMyPasswordView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||||
| can this form get on UserInfoEditView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
| can this form get on UserInfoEditView |:heavy_check_mark:|O|O|O|
|
||||||
| can this form post on UserInfoEditView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
| can this form post on UserInfoEditView |:heavy_check_mark:|O|O|O|
|
||||||
| can show on UserDBModelView |:heavy_check_mark:|O|O|O|
|
| can show on UserDBModelView |:heavy_check_mark:|O|O|O|
|
||||||
| can edit on UserDBModelView |:heavy_check_mark:|O|O|O|
|
| can edit on UserDBModelView |:heavy_check_mark:|O|O|O|
|
||||||
| can delete on UserDBModelView |:heavy_check_mark:|O|O|O|
|
| can delete on UserDBModelView |:heavy_check_mark:|O|O|O|
|
||||||
@@ -65,7 +65,6 @@ under the License.
|
|||||||
| can get on MenuApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
| can get on MenuApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||||
| can list on AsyncEventsRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
| can list on AsyncEventsRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||||
| can invalidate on CacheRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
| can invalidate on CacheRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||||
| can function names on Database |:heavy_check_mark:|O|O|O|
|
|
||||||
| can csv upload on Database |:heavy_check_mark:|O|O|O|
|
| can csv upload on Database |:heavy_check_mark:|O|O|O|
|
||||||
| can excel upload on Database |:heavy_check_mark:|O|O|O|
|
| can excel upload on Database |:heavy_check_mark:|O|O|O|
|
||||||
| can query form data on Api |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
| can query form data on Api |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||||
@@ -76,7 +75,6 @@ under the License.
|
|||||||
| can get on Datasource |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
| can get on Datasource |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||||
| can my queries on SqlLab |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
| can my queries on SqlLab |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||||
| can log on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
| can log on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||||
| can schemas access for csv upload on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
|
||||||
| can import dashboards on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
| can import dashboards on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||||
| can schemas on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
| can schemas on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||||
| can sqllab history on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
| can sqllab history on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||||
@@ -118,8 +116,6 @@ under the License.
|
|||||||
| menu access on Data |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
| menu access on Data |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||||
| menu access on Databases |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
| menu access on Databases |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||||
| menu access on Datasets |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
| menu access on Datasets |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||||
| menu access on Upload a CSV |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
|
||||||
| menu access on Upload Excel |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
|
||||||
| menu access on Charts |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
| menu access on Charts |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||||
| menu access on Dashboards |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
| menu access on Dashboards |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||||
| menu access on SQL Lab |:heavy_check_mark:|O|O|:heavy_check_mark:|
|
| menu access on SQL Lab |:heavy_check_mark:|O|O|:heavy_check_mark:|
|
||||||
@@ -129,13 +125,6 @@ under the License.
|
|||||||
| all datasource access on all_datasource_access |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
| all datasource access on all_datasource_access |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||||
| all database access on all_database_access |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
| all database access on all_database_access |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||||
| all query access on all_query_access |:heavy_check_mark:|O|O|O|
|
| all query access on all_query_access |:heavy_check_mark:|O|O|O|
|
||||||
| can edit on UserOAuthModelView |:heavy_check_mark:|O|O|O|
|
|
||||||
| can list on UserOAuthModelView |:heavy_check_mark:|O|O|O|
|
|
||||||
| can show on UserOAuthModelView |:heavy_check_mark:|O|O|O|
|
|
||||||
| can userinfo on UserOAuthModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
|
||||||
| can add on UserOAuthModelView |:heavy_check_mark:|O|O|O|
|
|
||||||
| can delete on UserOAuthModelView |:heavy_check_mark:|O|O|O|
|
|
||||||
| userinfoedit on UserOAuthModelView |:heavy_check_mark:|O|O|O|
|
|
||||||
| can write on DynamicPlugin |:heavy_check_mark:|O|O|O|
|
| can write on DynamicPlugin |:heavy_check_mark:|O|O|O|
|
||||||
| can edit on DynamicPlugin |:heavy_check_mark:|O|O|O|
|
| can edit on DynamicPlugin |:heavy_check_mark:|O|O|O|
|
||||||
| can list on DynamicPlugin |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
| can list on DynamicPlugin |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||||
@@ -192,7 +181,6 @@ under the License.
|
|||||||
| can share chart on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
| can share chart on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||||
| can this form get on ColumnarToDatabaseView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
| can this form get on ColumnarToDatabaseView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||||
| can this form post on ColumnarToDatabaseView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
| can this form post on ColumnarToDatabaseView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||||
| menu access on Upload a Columnar file |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
|
||||||
| can export on Chart |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
| can export on Chart |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||||
| can write on DashboardFilterStateRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
| can write on DashboardFilterStateRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||||
| can read on DashboardFilterStateRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
| can read on DashboardFilterStateRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||||
|
|||||||
28
UPDATING.md
28
UPDATING.md
@@ -23,13 +23,39 @@ This file documents any backwards-incompatible changes in Superset and
|
|||||||
assists people when migrating to a new version.
|
assists people when migrating to a new version.
|
||||||
|
|
||||||
## Next
|
## Next
|
||||||
|
- [33116](https://github.com/apache/superset/pull/33116) In Echarts Series charts (e.g. Line, Area, Bar, etc.) charts, the `x_axis_sort_series` and `x_axis_sort_series_ascending` form data items have been renamed with `x_axis_sort` and `x_axis_sort_asc`.
|
||||||
|
There's a migration added that can potentially affect a significant number of existing charts.
|
||||||
|
- [32317](https://github.com/apache/superset/pull/32317) The horizontal filter bar feature is now out of testing/beta development and its feature flag `HORIZONTAL_FILTER_BAR` has been removed.
|
||||||
|
- [31976](https://github.com/apache/superset/pull/31976) Removed the `DISABLE_LEGACY_DATASOURCE_EDITOR` feature flag. The previous value of the feature flag was `True` and now the feature is permanently removed.
|
||||||
|
- [31959](https://github.com/apache/superset/pull/32000) Removes CSV_UPLOAD_MAX_SIZE config, use your web server to control file upload size.
|
||||||
|
- [31959](https://github.com/apache/superset/pull/31959) Removes the following endpoints from data uploads: `/api/v1/database/<id>/<file type>_upload` and `/api/v1/database/<file type>_metadata`, in favour of new one (Details on the PR). And simplifies permissions.
|
||||||
|
- [31844](https://github.com/apache/superset/pull/31844) The `ALERT_REPORTS_EXECUTE_AS` and `THUMBNAILS_EXECUTE_AS` config parameters have been renamed to `ALERT_REPORTS_EXECUTORS` and `THUMBNAILS_EXECUTORS` respectively. A new config flag `CACHE_WARMUP_EXECUTORS` has also been introduced to be able to control which user is used to execute cache warmup tasks. Finally, the config flag `THUMBNAILS_SELENIUM_USER` has been removed. To use a fixed executor for async tasks, use the new `FixedExecutor` class. See the config and docs for more info on setting up different executor profiles.
|
||||||
|
- [31894](https://github.com/apache/superset/pull/31894) Domain sharding is deprecated in favor of HTTP2. The `SUPERSET_WEBSERVER_DOMAINS` configuration will be removed in the next major version (6.0)
|
||||||
|
- [31794](https://github.com/apache/superset/pull/31794) Removed the previously deprecated `DASHBOARD_CROSS_FILTERS` feature flag
|
||||||
|
- [31774](https://github.com/apache/superset/pull/31774): Fixes the spelling of the `USE-ANALAGOUS-COLORS` feature flag. Please update any scripts/configuration item to use the new/corrected `USE-ANALOGOUS-COLORS` flag spelling.
|
||||||
|
- [31582](https://github.com/apache/superset/pull/31582) Removed the legacy Area, Bar, Event Flow, Heatmap, Histogram, Line, Sankey, and Sankey Loop charts. They were all automatically migrated to their ECharts counterparts with the exception of the Event Flow and Sankey Loop charts which were removed as they were not actively maintained and not widely used. If you were using the Event Flow or Sankey Loop charts, you will need to find an alternative solution.
|
||||||
- [29798](https://github.com/apache/superset/pull/29798) Since 3.1.0, the intial schedule for an alert or report was mistakenly offset by the specified timezone's relation to UTC. The initial schedule should now begin at the correct time.
|
- [29798](https://github.com/apache/superset/pull/29798) Since 3.1.0, the intial schedule for an alert or report was mistakenly offset by the specified timezone's relation to UTC. The initial schedule should now begin at the correct time.
|
||||||
- [30021](https://github.com/apache/superset/pull/30021) The `dev` layer in our Dockerfile no long includes firefox binaries, only Chromium to reduce bloat/docker-build-time.
|
- [30021](https://github.com/apache/superset/pull/30021) The `dev` layer in our Dockerfile no long includes firefox binaries, only Chromium to reduce bloat/docker-build-time.
|
||||||
- [30099](https://github.com/apache/superset/pull/30099) Translations are no longer included in the default docker image builds. If your environment requires translations, you'll want to set the docker build arg `BUILD_TRANSACTION=true`.
|
- [30099](https://github.com/apache/superset/pull/30099) Translations are no longer included in the default docker image builds. If your environment requires translations, you'll want to set the docker build arg `BUILD_TRANSACTION=true`.
|
||||||
|
- [31262](https://github.com/apache/superset/pull/31262) NOTE: deprecated `pylint` in favor of `ruff` as our only python linter. Only affect development workflows positively (not the release itself). It should cover most important rules, be much faster, but some things linting rules that were enforced before may not be enforce in the exact same way as before.
|
||||||
|
- [31413](https://github.com/apache/superset/pull/31413) Enable the DATE_FORMAT_IN_EMAIL_SUBJECT feature flag to allow users to specify a date format for the email subject, which will then be replaced with the actual date.
|
||||||
|
- [31385](https://github.com/apache/superset/pull/31385) Significant docker refactor, reducing access levels for the `superset` user, streamlining layer building, ...
|
||||||
|
- [31503](https://github.com/apache/superset/pull/31503) Deprecating python 3.9.x support, 3.11 is now the recommended version and 3.10 is still supported over the Superset 5.0 lifecycle.
|
||||||
|
- [29121](https://github.com/apache/superset/pull/29121) Removed the `css`, `position_json`, and `json_metadata` from the payload of the dashboard list endpoint (`GET api/v1/dashboard`) for performance reasons.
|
||||||
|
- [29163](https://github.com/apache/superset/pull/29163) Removed the `SHARE_QUERIES_VIA_KV_STORE` and `KV_STORE` feature flags and changed the way Superset shares SQL Lab queries to use permalinks. The legacy `/kv` API was removed but we still support legacy links in 5.0. In 6.0, only permalinks will be supported.
|
||||||
|
- [25166](https://github.com/apache/superset/pull/25166) Changed the default configuration of `UPLOAD_FOLDER` from `/app/static/uploads/` to `/static/uploads/`. It also removed the unused `IMG_UPLOAD_FOLDER` and `IMG_UPLOAD_URL` configuration options.
|
||||||
|
- [30284](https://github.com/apache/superset/pull/30284) Deprecated GLOBAL_ASYNC_QUERIES_REDIS_CONFIG in favor of the new GLOBAL_ASYNC_QUERIES_CACHE_BACKEND configuration. To leverage Redis Sentinel, set CACHE_TYPE to RedisSentinelCache, or use RedisCache for standalone Redis
|
||||||
|
- [31961](https://github.com/apache/superset/pull/31961) Upgraded React from version 16.13.1 to 17.0.2. If you are using custom frontend extensions or plugins, you may need to update them to be compatible with React 17.
|
||||||
|
- [31260](https://github.com/apache/superset/pull/31260) Docker images now use `uv pip install` instead of `pip install` to manage the python envrionment. Most docker-based deployments will be affected, whether you derive one of the published images, or have custom bootstrap script that install python libraries (drivers)
|
||||||
|
- [32432](https://github.com/apache/superset/pull/31260) Moves the List Roles FAB view to the frontend and requires `FAB_ADD_SECURITY_API` to be enabled in the configuration and `superset init` to be executed.
|
||||||
|
|
||||||
### Potential Downtime
|
### Potential Downtime
|
||||||
|
|
||||||
|
## 4.1.2
|
||||||
|
|
||||||
|
- [31198](https://github.com/apache/superset/pull/31198) Disallows by default the use of the following ClickHouse functions: "version", "currentDatabase", "hostName".
|
||||||
|
- [31173](https://github.com/apache/superset/pull/31173) Modified `fetch_csrf_token` to align with HTTP standards, particularly regarding how cookies are handled. If you encounter any issues related to CSRF functionality, please report them as a new issue and reference this PR for context.
|
||||||
|
|
||||||
## 4.1.0
|
## 4.1.0
|
||||||
|
|
||||||
- [29274](https://github.com/apache/superset/pull/29274): We made it easier to trigger CI on your
|
- [29274](https://github.com/apache/superset/pull/29274): We made it easier to trigger CI on your
|
||||||
|
|||||||
@@ -22,9 +22,6 @@
|
|||||||
# unique random secure passwords and SECRET_KEY.
|
# unique random secure passwords and SECRET_KEY.
|
||||||
# -----------------------------------------------------------------------
|
# -----------------------------------------------------------------------
|
||||||
x-superset-image: &superset-image apachesuperset.docker.scarf.sh/apache/superset:${TAG:-latest-dev}
|
x-superset-image: &superset-image apachesuperset.docker.scarf.sh/apache/superset:${TAG:-latest-dev}
|
||||||
x-superset-depends-on: &superset-depends-on
|
|
||||||
- db
|
|
||||||
- redis
|
|
||||||
x-superset-volumes:
|
x-superset-volumes:
|
||||||
&superset-volumes # /app/pythonpath_docker will be appended to the PYTHONPATH in the final container
|
&superset-volumes # /app/pythonpath_docker will be appended to the PYTHONPATH in the final container
|
||||||
- ./docker:/app/docker
|
- ./docker:/app/docker
|
||||||
@@ -44,7 +41,7 @@ services:
|
|||||||
required: true
|
required: true
|
||||||
- path: docker/.env-local # optional override
|
- path: docker/.env-local # optional override
|
||||||
required: false
|
required: false
|
||||||
image: postgres:15
|
image: postgres:16
|
||||||
container_name: superset_db
|
container_name: superset_db
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
@@ -64,8 +61,12 @@ services:
|
|||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
ports:
|
ports:
|
||||||
- 8088:8088
|
- 8088:8088
|
||||||
depends_on: *superset-depends-on
|
depends_on:
|
||||||
|
superset-init:
|
||||||
|
condition: service_completed_successfully
|
||||||
volumes: *superset-volumes
|
volumes: *superset-volumes
|
||||||
|
environment:
|
||||||
|
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||||
|
|
||||||
superset-init:
|
superset-init:
|
||||||
image: *superset-image
|
image: *superset-image
|
||||||
@@ -76,11 +77,18 @@ services:
|
|||||||
required: true
|
required: true
|
||||||
- path: docker/.env-local # optional override
|
- path: docker/.env-local # optional override
|
||||||
required: false
|
required: false
|
||||||
depends_on: *superset-depends-on
|
depends_on:
|
||||||
|
db:
|
||||||
|
condition: service_started
|
||||||
|
redis:
|
||||||
|
condition: service_started
|
||||||
user: "root"
|
user: "root"
|
||||||
volumes: *superset-volumes
|
volumes: *superset-volumes
|
||||||
healthcheck:
|
healthcheck:
|
||||||
disable: true
|
disable: true
|
||||||
|
environment:
|
||||||
|
SUPERSET_LOAD_EXAMPLES: "${SUPERSET_LOAD_EXAMPLES:-yes}"
|
||||||
|
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||||
|
|
||||||
superset-worker:
|
superset-worker:
|
||||||
image: *superset-image
|
image: *superset-image
|
||||||
@@ -92,7 +100,9 @@ services:
|
|||||||
- path: docker/.env-local # optional override
|
- path: docker/.env-local # optional override
|
||||||
required: false
|
required: false
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on: *superset-depends-on
|
depends_on:
|
||||||
|
superset-init:
|
||||||
|
condition: service_completed_successfully
|
||||||
user: "root"
|
user: "root"
|
||||||
volumes: *superset-volumes
|
volumes: *superset-volumes
|
||||||
healthcheck:
|
healthcheck:
|
||||||
@@ -101,6 +111,8 @@ services:
|
|||||||
"CMD-SHELL",
|
"CMD-SHELL",
|
||||||
"celery -A superset.tasks.celery_app:app inspect ping -d celery@$$HOSTNAME",
|
"celery -A superset.tasks.celery_app:app inspect ping -d celery@$$HOSTNAME",
|
||||||
]
|
]
|
||||||
|
environment:
|
||||||
|
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||||
|
|
||||||
superset-worker-beat:
|
superset-worker-beat:
|
||||||
image: *superset-image
|
image: *superset-image
|
||||||
@@ -112,11 +124,15 @@ services:
|
|||||||
- path: docker/.env-local # optional override
|
- path: docker/.env-local # optional override
|
||||||
required: false
|
required: false
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on: *superset-depends-on
|
depends_on:
|
||||||
|
superset-init:
|
||||||
|
condition: service_completed_successfully
|
||||||
user: "root"
|
user: "root"
|
||||||
volumes: *superset-volumes
|
volumes: *superset-volumes
|
||||||
healthcheck:
|
healthcheck:
|
||||||
disable: true
|
disable: true
|
||||||
|
environment:
|
||||||
|
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
superset_home:
|
superset_home:
|
||||||
|
|||||||
@@ -21,9 +21,6 @@
|
|||||||
# create you own docker environment file (docker/.env) with your own
|
# create you own docker environment file (docker/.env) with your own
|
||||||
# unique random secure passwords and SECRET_KEY.
|
# unique random secure passwords and SECRET_KEY.
|
||||||
# -----------------------------------------------------------------------
|
# -----------------------------------------------------------------------
|
||||||
x-superset-depends-on: &superset-depends-on
|
|
||||||
- db
|
|
||||||
- redis
|
|
||||||
x-superset-volumes:
|
x-superset-volumes:
|
||||||
&superset-volumes # /app/pythonpath_docker will be appended to the PYTHONPATH in the final container
|
&superset-volumes # /app/pythonpath_docker will be appended to the PYTHONPATH in the final container
|
||||||
- ./docker:/app/docker
|
- ./docker:/app/docker
|
||||||
@@ -49,7 +46,7 @@ services:
|
|||||||
required: true
|
required: true
|
||||||
- path: docker/.env-local # optional override
|
- path: docker/.env-local # optional override
|
||||||
required: false
|
required: false
|
||||||
image: postgres:15
|
image: postgres:16
|
||||||
container_name: superset_db
|
container_name: superset_db
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
@@ -70,8 +67,12 @@ services:
|
|||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
ports:
|
ports:
|
||||||
- 8088:8088
|
- 8088:8088
|
||||||
depends_on: *superset-depends-on
|
depends_on:
|
||||||
|
superset-init:
|
||||||
|
condition: service_completed_successfully
|
||||||
volumes: *superset-volumes
|
volumes: *superset-volumes
|
||||||
|
environment:
|
||||||
|
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||||
|
|
||||||
superset-init:
|
superset-init:
|
||||||
container_name: superset_init
|
container_name: superset_init
|
||||||
@@ -83,11 +84,18 @@ services:
|
|||||||
required: true
|
required: true
|
||||||
- path: docker/.env-local # optional override
|
- path: docker/.env-local # optional override
|
||||||
required: false
|
required: false
|
||||||
depends_on: *superset-depends-on
|
depends_on:
|
||||||
|
db:
|
||||||
|
condition: service_started
|
||||||
|
redis:
|
||||||
|
condition: service_started
|
||||||
user: "root"
|
user: "root"
|
||||||
volumes: *superset-volumes
|
volumes: *superset-volumes
|
||||||
healthcheck:
|
healthcheck:
|
||||||
disable: true
|
disable: true
|
||||||
|
environment:
|
||||||
|
SUPERSET_LOAD_EXAMPLES: "${SUPERSET_LOAD_EXAMPLES:-yes}"
|
||||||
|
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||||
|
|
||||||
superset-worker:
|
superset-worker:
|
||||||
build:
|
build:
|
||||||
@@ -100,7 +108,9 @@ services:
|
|||||||
- path: docker/.env-local # optional override
|
- path: docker/.env-local # optional override
|
||||||
required: false
|
required: false
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on: *superset-depends-on
|
depends_on:
|
||||||
|
superset-init:
|
||||||
|
condition: service_completed_successfully
|
||||||
user: "root"
|
user: "root"
|
||||||
volumes: *superset-volumes
|
volumes: *superset-volumes
|
||||||
healthcheck:
|
healthcheck:
|
||||||
@@ -109,6 +119,8 @@ services:
|
|||||||
"CMD-SHELL",
|
"CMD-SHELL",
|
||||||
"celery -A superset.tasks.celery_app:app inspect ping -d celery@$$HOSTNAME",
|
"celery -A superset.tasks.celery_app:app inspect ping -d celery@$$HOSTNAME",
|
||||||
]
|
]
|
||||||
|
environment:
|
||||||
|
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||||
|
|
||||||
superset-worker-beat:
|
superset-worker-beat:
|
||||||
build:
|
build:
|
||||||
@@ -121,11 +133,15 @@ services:
|
|||||||
- path: docker/.env-local # optional override
|
- path: docker/.env-local # optional override
|
||||||
required: false
|
required: false
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on: *superset-depends-on
|
depends_on:
|
||||||
|
superset-init:
|
||||||
|
condition: service_completed_successfully
|
||||||
user: "root"
|
user: "root"
|
||||||
volumes: *superset-volumes
|
volumes: *superset-volumes
|
||||||
healthcheck:
|
healthcheck:
|
||||||
disable: true
|
disable: true
|
||||||
|
environment:
|
||||||
|
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
superset_home:
|
superset_home:
|
||||||
|
|||||||
@@ -22,10 +22,6 @@
|
|||||||
# unique random secure passwords and SECRET_KEY.
|
# unique random secure passwords and SECRET_KEY.
|
||||||
# -----------------------------------------------------------------------
|
# -----------------------------------------------------------------------
|
||||||
x-superset-user: &superset-user root
|
x-superset-user: &superset-user root
|
||||||
x-superset-depends-on: &superset-depends-on
|
|
||||||
- db
|
|
||||||
- redis
|
|
||||||
- superset-checks
|
|
||||||
x-superset-volumes: &superset-volumes
|
x-superset-volumes: &superset-volumes
|
||||||
# /app/pythonpath_docker will be appended to the PYTHONPATH in the final container
|
# /app/pythonpath_docker will be appended to the PYTHONPATH in the final container
|
||||||
- ./docker:/app/docker
|
- ./docker:/app/docker
|
||||||
@@ -33,17 +29,24 @@ x-superset-volumes: &superset-volumes
|
|||||||
- ./superset-frontend:/app/superset-frontend
|
- ./superset-frontend:/app/superset-frontend
|
||||||
- superset_home:/app/superset_home
|
- superset_home:/app/superset_home
|
||||||
- ./tests:/app/tests
|
- ./tests:/app/tests
|
||||||
|
|
||||||
x-common-build: &common-build
|
x-common-build: &common-build
|
||||||
context: .
|
context: .
|
||||||
target: dev
|
target: ${SUPERSET_BUILD_TARGET:-dev} # can use `dev` (default) or `lean`
|
||||||
cache_from:
|
cache_from:
|
||||||
- apache/superset-cache:3.10-slim-bookworm
|
- apache/superset-cache:3.10-slim-bookworm
|
||||||
args:
|
args:
|
||||||
DEV_MODE: "true"
|
DEV_MODE: "true"
|
||||||
|
INCLUDE_CHROMIUM: ${INCLUDE_CHROMIUM:-false}
|
||||||
|
INCLUDE_FIREFOX: ${INCLUDE_FIREFOX:-false}
|
||||||
|
BUILD_TRANSLATIONS: ${BUILD_TRANSLATIONS:-false}
|
||||||
|
|
||||||
services:
|
services:
|
||||||
nginx:
|
nginx:
|
||||||
|
env_file:
|
||||||
|
- path: docker/.env # default
|
||||||
|
required: true
|
||||||
|
- path: docker/.env-local # optional override
|
||||||
|
required: false
|
||||||
image: nginx:latest
|
image: nginx:latest
|
||||||
container_name: superset_nginx
|
container_name: superset_nginx
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
@@ -53,6 +56,8 @@ services:
|
|||||||
- "host.docker.internal:host-gateway"
|
- "host.docker.internal:host-gateway"
|
||||||
volumes:
|
volumes:
|
||||||
- ./docker/nginx/nginx.conf:/etc/nginx/nginx.conf:ro
|
- ./docker/nginx/nginx.conf:/etc/nginx/nginx.conf:ro
|
||||||
|
- ./docker/nginx/templates:/etc/nginx/templates:ro
|
||||||
|
|
||||||
redis:
|
redis:
|
||||||
image: redis:7
|
image: redis:7
|
||||||
container_name: superset_cache
|
container_name: superset_cache
|
||||||
@@ -68,7 +73,7 @@ services:
|
|||||||
required: true
|
required: true
|
||||||
- path: docker/.env-local # optional override
|
- path: docker/.env-local # optional override
|
||||||
required: false
|
required: false
|
||||||
image: postgres:15
|
image: postgres:16
|
||||||
container_name: superset_db
|
container_name: superset_db
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
ports:
|
ports:
|
||||||
@@ -90,13 +95,18 @@ services:
|
|||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
ports:
|
ports:
|
||||||
- 8088:8088
|
- 8088:8088
|
||||||
|
# When in cypress-mode ->
|
||||||
|
- 8081:8081
|
||||||
extra_hosts:
|
extra_hosts:
|
||||||
- "host.docker.internal:host-gateway"
|
- "host.docker.internal:host-gateway"
|
||||||
user: *superset-user
|
user: *superset-user
|
||||||
depends_on: *superset-depends-on
|
depends_on:
|
||||||
|
superset-init:
|
||||||
|
condition: service_completed_successfully
|
||||||
volumes: *superset-volumes
|
volumes: *superset-volumes
|
||||||
environment:
|
environment:
|
||||||
CYPRESS_CONFIG: "${CYPRESS_CONFIG:-}"
|
CYPRESS_CONFIG: "${CYPRESS_CONFIG:-}"
|
||||||
|
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||||
|
|
||||||
superset-websocket:
|
superset-websocket:
|
||||||
container_name: superset_websocket
|
container_name: superset_websocket
|
||||||
@@ -131,23 +141,6 @@ services:
|
|||||||
- REDIS_PORT=6379
|
- REDIS_PORT=6379
|
||||||
- REDIS_SSL=false
|
- REDIS_SSL=false
|
||||||
|
|
||||||
superset-checks:
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
target: python-base
|
|
||||||
cache_from:
|
|
||||||
- apache/superset-cache:3.10-slim-bookworm
|
|
||||||
container_name: superset_checks
|
|
||||||
command: ["/app/scripts/check-env.py"]
|
|
||||||
env_file:
|
|
||||||
- path: docker/.env # default
|
|
||||||
required: true
|
|
||||||
- path: docker/.env-local # optional override
|
|
||||||
required: false
|
|
||||||
user: *superset-user
|
|
||||||
healthcheck:
|
|
||||||
disable: true
|
|
||||||
|
|
||||||
superset-init:
|
superset-init:
|
||||||
build:
|
build:
|
||||||
<<: *common-build
|
<<: *common-build
|
||||||
@@ -158,11 +151,17 @@ services:
|
|||||||
required: true
|
required: true
|
||||||
- path: docker/.env-local # optional override
|
- path: docker/.env-local # optional override
|
||||||
required: false
|
required: false
|
||||||
depends_on: *superset-depends-on
|
depends_on:
|
||||||
|
db:
|
||||||
|
condition: service_started
|
||||||
|
redis:
|
||||||
|
condition: service_started
|
||||||
user: *superset-user
|
user: *superset-user
|
||||||
volumes: *superset-volumes
|
volumes: *superset-volumes
|
||||||
environment:
|
environment:
|
||||||
CYPRESS_CONFIG: "${CYPRESS_CONFIG:-}"
|
CYPRESS_CONFIG: "${CYPRESS_CONFIG:-}"
|
||||||
|
SUPERSET_LOAD_EXAMPLES: "${SUPERSET_LOAD_EXAMPLES:-yes}"
|
||||||
|
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||||
healthcheck:
|
healthcheck:
|
||||||
disable: true
|
disable: true
|
||||||
|
|
||||||
@@ -175,11 +174,17 @@ services:
|
|||||||
# and build it on startup while firing docker-frontend.sh in dev mode, where
|
# and build it on startup while firing docker-frontend.sh in dev mode, where
|
||||||
# it'll mount and watch local files and rebuild as you update them
|
# it'll mount and watch local files and rebuild as you update them
|
||||||
DEV_MODE: "true"
|
DEV_MODE: "true"
|
||||||
|
BUILD_TRANSLATIONS: ${BUILD_TRANSLATIONS:-false}
|
||||||
environment:
|
environment:
|
||||||
# set this to false if you have perf issues running the npm i; npm run dev in-docker
|
# set this to false if you have perf issues running the npm i; npm run dev in-docker
|
||||||
# if you do so, you have to run this manually on the host, which should perform better!
|
# if you do so, you have to run this manually on the host, which should perform better!
|
||||||
BUILD_SUPERSET_FRONTEND_IN_DOCKER: true
|
BUILD_SUPERSET_FRONTEND_IN_DOCKER: true
|
||||||
|
NPM_RUN_PRUNE: false
|
||||||
SCARF_ANALYTICS: "${SCARF_ANALYTICS:-}"
|
SCARF_ANALYTICS: "${SCARF_ANALYTICS:-}"
|
||||||
|
# configuring the dev-server to use the host.docker.internal to connect to the backend
|
||||||
|
superset: "http://superset:8088"
|
||||||
|
ports:
|
||||||
|
- "127.0.0.1:9000:9000" # exposing the dynamic webpack dev server
|
||||||
container_name: superset_node
|
container_name: superset_node
|
||||||
command: ["/app/docker/docker-frontend.sh"]
|
command: ["/app/docker/docker-frontend.sh"]
|
||||||
env_file:
|
env_file:
|
||||||
@@ -187,7 +192,6 @@ services:
|
|||||||
required: true
|
required: true
|
||||||
- path: docker/.env-local # optional override
|
- path: docker/.env-local # optional override
|
||||||
required: false
|
required: false
|
||||||
depends_on: *superset-depends-on
|
|
||||||
volumes: *superset-volumes
|
volumes: *superset-volumes
|
||||||
|
|
||||||
superset-worker:
|
superset-worker:
|
||||||
@@ -202,8 +206,12 @@ services:
|
|||||||
required: false
|
required: false
|
||||||
environment:
|
environment:
|
||||||
CELERYD_CONCURRENCY: 2
|
CELERYD_CONCURRENCY: 2
|
||||||
|
CYPRESS_CONFIG: "${CYPRESS_CONFIG:-}"
|
||||||
|
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on: *superset-depends-on
|
depends_on:
|
||||||
|
superset-init:
|
||||||
|
condition: service_completed_successfully
|
||||||
user: *superset-user
|
user: *superset-user
|
||||||
volumes: *superset-volumes
|
volumes: *superset-volumes
|
||||||
extra_hosts:
|
extra_hosts:
|
||||||
@@ -225,11 +233,15 @@ services:
|
|||||||
- path: docker/.env-local # optional override
|
- path: docker/.env-local # optional override
|
||||||
required: false
|
required: false
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on: *superset-depends-on
|
depends_on:
|
||||||
|
- superset-worker
|
||||||
user: *superset-user
|
user: *superset-user
|
||||||
volumes: *superset-volumes
|
volumes: *superset-volumes
|
||||||
healthcheck:
|
healthcheck:
|
||||||
disable: true
|
disable: true
|
||||||
|
environment:
|
||||||
|
CYPRESS_CONFIG: "${CYPRESS_CONFIG:-}"
|
||||||
|
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||||
|
|
||||||
superset-tests-worker:
|
superset-tests-worker:
|
||||||
build:
|
build:
|
||||||
@@ -250,8 +262,11 @@ services:
|
|||||||
REDIS_RESULTS_DB: 3
|
REDIS_RESULTS_DB: 3
|
||||||
REDIS_HOST: localhost
|
REDIS_HOST: localhost
|
||||||
CELERYD_CONCURRENCY: 8
|
CELERYD_CONCURRENCY: 8
|
||||||
|
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
|
||||||
network_mode: host
|
network_mode: host
|
||||||
depends_on: *superset-depends-on
|
depends_on:
|
||||||
|
superset-init:
|
||||||
|
condition: service_completed_successfully
|
||||||
user: *superset-user
|
user: *superset-user
|
||||||
volumes: *superset-volumes
|
volumes: *superset-volumes
|
||||||
healthcheck:
|
healthcheck:
|
||||||
|
|||||||
@@ -15,8 +15,11 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
#
|
#
|
||||||
|
|
||||||
|
# Allowing python to print() in docker
|
||||||
|
PYTHONUNBUFFERED=1
|
||||||
|
|
||||||
COMPOSE_PROJECT_NAME=superset
|
COMPOSE_PROJECT_NAME=superset
|
||||||
|
DEV_MODE=true
|
||||||
|
|
||||||
# database configurations (do not modify)
|
# database configurations (do not modify)
|
||||||
DATABASE_DB=superset
|
DATABASE_DB=superset
|
||||||
@@ -51,6 +54,7 @@ REDIS_HOST=redis
|
|||||||
REDIS_PORT=6379
|
REDIS_PORT=6379
|
||||||
|
|
||||||
FLASK_DEBUG=true
|
FLASK_DEBUG=true
|
||||||
|
SUPERSET_APP_ROOT="/"
|
||||||
SUPERSET_ENV=development
|
SUPERSET_ENV=development
|
||||||
SUPERSET_LOAD_EXAMPLES=yes
|
SUPERSET_LOAD_EXAMPLES=yes
|
||||||
CYPRESS_CONFIG=false
|
CYPRESS_CONFIG=false
|
||||||
@@ -59,7 +63,7 @@ MAPBOX_API_KEY=''
|
|||||||
|
|
||||||
# Make sure you set this to a unique secure random value on production
|
# Make sure you set this to a unique secure random value on production
|
||||||
SUPERSET_SECRET_KEY=TEST_NON_DEV_SECRET
|
SUPERSET_SECRET_KEY=TEST_NON_DEV_SECRET
|
||||||
|
|
||||||
ENABLE_PLAYWRIGHT=false
|
ENABLE_PLAYWRIGHT=false
|
||||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true
|
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true
|
||||||
BUILD_SUPERSET_FRONTEND_IN_DOCKER=true
|
BUILD_SUPERSET_FRONTEND_IN_DOCKER=true
|
||||||
|
SUPERSET_LOG_LEVEL=info
|
||||||
|
|||||||
@@ -68,7 +68,7 @@ Don't forget to reload the page to take the new frontend into account though.
|
|||||||
|
|
||||||
## Production
|
## Production
|
||||||
|
|
||||||
It is possible to run Superset in non-development mode by using [`docker-compose-non-dev.yml`](../docker-compose-non-dev.yml). This file excludes the volumes needed for development and uses [`./docker/.env-non-dev`](./.env-non-dev) which sets the variable `SUPERSET_ENV` to `production`.
|
It is possible to run Superset in non-development mode by using [`docker-compose-non-dev.yml`](../docker-compose-non-dev.yml). This file excludes the volumes needed for development.
|
||||||
|
|
||||||
## Resource Constraints
|
## Resource Constraints
|
||||||
|
|
||||||
|
|||||||
51
docker/apt-install.sh
Executable file
51
docker/apt-install.sh
Executable file
@@ -0,0 +1,51 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
#
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
# contributor license agreements. See the NOTICE file distributed with
|
||||||
|
# this work for additional information regarding copyright ownership.
|
||||||
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
# (the "License"); you may not use this file except in compliance with
|
||||||
|
# the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Ensure this script is run as root
|
||||||
|
if [[ $EUID -ne 0 ]]; then
|
||||||
|
echo "This script must be run as root" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check for required arguments
|
||||||
|
if [[ $# -lt 1 ]]; then
|
||||||
|
echo "Usage: $0 <package1> [<package2> ...]" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Colors for better logging (optional)
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
RED='\033[0;31m'
|
||||||
|
RESET='\033[0m'
|
||||||
|
|
||||||
|
# Install packages with clean-up
|
||||||
|
echo -e "${GREEN}Updating package lists...${RESET}"
|
||||||
|
apt-get update -qq
|
||||||
|
|
||||||
|
echo -e "${GREEN}Installing packages: $@${RESET}"
|
||||||
|
apt-get install -yqq --no-install-recommends "$@"
|
||||||
|
|
||||||
|
echo -e "${GREEN}Autoremoving unnecessary packages...${RESET}"
|
||||||
|
apt-get autoremove -y
|
||||||
|
|
||||||
|
echo -e "${GREEN}Cleaning up package cache and metadata...${RESET}"
|
||||||
|
apt-get clean
|
||||||
|
rm -rf /var/lib/apt/lists/* /var/cache/apt/archives/* /tmp/* /var/tmp/*
|
||||||
|
|
||||||
|
echo -e "${GREEN}Installation and cleanup complete.${RESET}"
|
||||||
@@ -18,19 +18,43 @@
|
|||||||
|
|
||||||
set -eo pipefail
|
set -eo pipefail
|
||||||
|
|
||||||
|
# Make python interactive
|
||||||
|
if [ "$DEV_MODE" == "true" ]; then
|
||||||
|
if [ "$(whoami)" = "root" ] && command -v uv > /dev/null 2>&1; then
|
||||||
|
echo "Reinstalling the app in editable mode"
|
||||||
|
uv pip install -e .
|
||||||
|
fi
|
||||||
|
fi
|
||||||
REQUIREMENTS_LOCAL="/app/docker/requirements-local.txt"
|
REQUIREMENTS_LOCAL="/app/docker/requirements-local.txt"
|
||||||
|
PORT=${PORT:-8088}
|
||||||
# If Cypress run – overwrite the password for admin and export env variables
|
# If Cypress run – overwrite the password for admin and export env variables
|
||||||
if [ "$CYPRESS_CONFIG" == "true" ]; then
|
if [ "$CYPRESS_CONFIG" == "true" ]; then
|
||||||
export SUPERSET_CONFIG=tests.integration_tests.superset_test_config
|
|
||||||
export SUPERSET_TESTENV=true
|
export SUPERSET_TESTENV=true
|
||||||
export SUPERSET__SQLALCHEMY_DATABASE_URI=postgresql+psycopg2://superset:superset@db:5432/superset
|
export POSTGRES_DB=superset_cypress
|
||||||
|
export SUPERSET__SQLALCHEMY_DATABASE_URI=postgresql+psycopg2://superset:superset@db:5432/superset_cypress
|
||||||
|
PORT=8081
|
||||||
|
fi
|
||||||
|
if [[ "$DATABASE_DIALECT" == postgres* ]] && [ "$(whoami)" = "root" ]; then
|
||||||
|
# older images may not have the postgres dev requirements installed
|
||||||
|
echo "Installing postgres requirements"
|
||||||
|
if command -v uv > /dev/null 2>&1; then
|
||||||
|
# Use uv in newer images
|
||||||
|
uv pip install -e .[postgres]
|
||||||
|
else
|
||||||
|
# Use pip in older images
|
||||||
|
pip install -e .[postgres]
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
#
|
#
|
||||||
# Make sure we have dev requirements installed
|
# Make sure we have dev requirements installed
|
||||||
#
|
#
|
||||||
if [ -f "${REQUIREMENTS_LOCAL}" ]; then
|
if [ -f "${REQUIREMENTS_LOCAL}" ]; then
|
||||||
echo "Installing local overrides at ${REQUIREMENTS_LOCAL}"
|
echo "Installing local overrides at ${REQUIREMENTS_LOCAL}"
|
||||||
|
if command -v uv > /dev/null 2>&1; then
|
||||||
|
uv pip install --no-cache-dir -r "${REQUIREMENTS_LOCAL}"
|
||||||
|
else
|
||||||
pip install --no-cache-dir -r "${REQUIREMENTS_LOCAL}"
|
pip install --no-cache-dir -r "${REQUIREMENTS_LOCAL}"
|
||||||
|
fi
|
||||||
else
|
else
|
||||||
echo "Skipping local overrides"
|
echo "Skipping local overrides"
|
||||||
fi
|
fi
|
||||||
@@ -48,7 +72,7 @@ case "${1}" in
|
|||||||
;;
|
;;
|
||||||
app)
|
app)
|
||||||
echo "Starting web app (using development server)..."
|
echo "Starting web app (using development server)..."
|
||||||
flask run -p 8088 --with-threads --reload --debugger --host=0.0.0.0
|
flask run -p $PORT --with-threads --reload --debugger --host=0.0.0.0
|
||||||
;;
|
;;
|
||||||
app-gunicorn)
|
app-gunicorn)
|
||||||
echo "Starting web app..."
|
echo "Starting web app..."
|
||||||
|
|||||||
28
docker/docker-entrypoint-initdb.d/cypress-init.sh
Executable file
28
docker/docker-entrypoint-initdb.d/cypress-init.sh
Executable file
@@ -0,0 +1,28 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
# or more contributor license agreements. See the NOTICE file
|
||||||
|
# distributed with this work for additional information
|
||||||
|
# regarding copyright ownership. The ASF licenses this file
|
||||||
|
# to you under the Apache License, Version 2.0 (the
|
||||||
|
# "License"); you may not use this file except in compliance
|
||||||
|
# with the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing,
|
||||||
|
# software distributed under the License is distributed on an
|
||||||
|
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
# KIND, either express or implied. See the License for the
|
||||||
|
# specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------
|
||||||
|
# Creates the examples database and respective user. This database location
|
||||||
|
# and access credentials are defined on the environment variables
|
||||||
|
# ------------------------------------------------------------------------
|
||||||
|
set -e
|
||||||
|
|
||||||
|
psql -v ON_ERROR_STOP=1 --username "${POSTGRES_USER}" <<-EOSQL
|
||||||
|
CREATE DATABASE superset_cypress;
|
||||||
|
EOSQL
|
||||||
@@ -27,11 +27,18 @@ if [ "$BUILD_SUPERSET_FRONTEND_IN_DOCKER" = "true" ]; then
|
|||||||
echo "Building Superset frontend in dev mode inside docker container"
|
echo "Building Superset frontend in dev mode inside docker container"
|
||||||
cd /app/superset-frontend
|
cd /app/superset-frontend
|
||||||
|
|
||||||
|
if [ "$NPM_RUN_PRUNE" = "true" ]; then
|
||||||
|
echo "Running `npm run prune`"
|
||||||
|
npm run prune
|
||||||
|
fi
|
||||||
|
|
||||||
echo "Running `npm install`"
|
echo "Running `npm install`"
|
||||||
npm install
|
npm install
|
||||||
|
|
||||||
echo "Running frontend"
|
echo "Start webpack dev server"
|
||||||
npm run dev
|
# start the webpack dev server, serving dynamically at http://localhost:9000
|
||||||
|
# it proxies to the backend served at http://localhost:8088
|
||||||
|
npm run dev-server
|
||||||
|
|
||||||
else
|
else
|
||||||
echo "Skipping frontend build steps - YOU NEED TO RUN IT MANUALLY ON THE HOST!"
|
echo "Skipping frontend build steps - YOU NEED TO RUN IT MANUALLY ON THE HOST!"
|
||||||
|
|||||||
19
docker/docker-healthcheck.sh
Executable file
19
docker/docker-healthcheck.sh
Executable file
@@ -0,0 +1,19 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
#
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
# contributor license agreements. See the NOTICE file distributed with
|
||||||
|
# this work for additional information regarding copyright ownership.
|
||||||
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
# (the "License"); you may not use this file except in compliance with
|
||||||
|
# the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
|
||||||
|
curl -f "http://localhost:${SUPERSET_PORT}/${SUPERSET_APP_ROOT/\//}/health" || exit 1
|
||||||
@@ -30,24 +30,18 @@ fi
|
|||||||
|
|
||||||
echo_step() {
|
echo_step() {
|
||||||
cat <<EOF
|
cat <<EOF
|
||||||
|
|
||||||
######################################################################
|
######################################################################
|
||||||
|
|
||||||
|
|
||||||
Init Step ${1}/${STEP_CNT} [${2}] -- ${3}
|
Init Step ${1}/${STEP_CNT} [${2}] -- ${3}
|
||||||
|
|
||||||
|
|
||||||
######################################################################
|
######################################################################
|
||||||
|
|
||||||
EOF
|
EOF
|
||||||
}
|
}
|
||||||
ADMIN_PASSWORD="${ADMIN_PASSWORD:-admin}"
|
ADMIN_PASSWORD="${ADMIN_PASSWORD:-admin}"
|
||||||
# If Cypress run – overwrite the password for admin and export env variables
|
# If Cypress run – overwrite the password for admin and export env variables
|
||||||
if [ "$CYPRESS_CONFIG" == "true" ]; then
|
if [ "$CYPRESS_CONFIG" == "true" ]; then
|
||||||
ADMIN_PASSWORD="general"
|
ADMIN_PASSWORD="general"
|
||||||
export SUPERSET_CONFIG=tests.integration_tests.superset_test_config
|
|
||||||
export SUPERSET_TESTENV=true
|
export SUPERSET_TESTENV=true
|
||||||
export SUPERSET__SQLALCHEMY_DATABASE_URI=postgresql+psycopg2://superset:superset@db:5432/superset
|
export POSTGRES_DB=superset_cypress
|
||||||
|
export SUPERSET__SQLALCHEMY_DATABASE_URI=postgresql+psycopg2://superset:superset@db:5432/superset_cypress
|
||||||
fi
|
fi
|
||||||
# Initialize the database
|
# Initialize the database
|
||||||
echo_step "1" "Starting" "Applying DB migrations"
|
echo_step "1" "Starting" "Applying DB migrations"
|
||||||
@@ -56,12 +50,16 @@ echo_step "1" "Complete" "Applying DB migrations"
|
|||||||
|
|
||||||
# Create an admin user
|
# Create an admin user
|
||||||
echo_step "2" "Starting" "Setting up admin user ( admin / $ADMIN_PASSWORD )"
|
echo_step "2" "Starting" "Setting up admin user ( admin / $ADMIN_PASSWORD )"
|
||||||
superset fab create-admin \
|
if [ "$CYPRESS_CONFIG" == "true" ]; then
|
||||||
|
superset load_test_users
|
||||||
|
else
|
||||||
|
superset fab create-admin \
|
||||||
--username admin \
|
--username admin \
|
||||||
--firstname Superset \
|
|
||||||
--lastname Admin \
|
|
||||||
--email admin@superset.com \
|
--email admin@superset.com \
|
||||||
--password "$ADMIN_PASSWORD"
|
--password "$ADMIN_PASSWORD" \
|
||||||
|
--firstname Superset \
|
||||||
|
--lastname Admin
|
||||||
|
fi
|
||||||
echo_step "2" "Complete" "Setting up admin user"
|
echo_step "2" "Complete" "Setting up admin user"
|
||||||
# Create default roles and permissions
|
# Create default roles and permissions
|
||||||
echo_step "3" "Starting" "Setting up roles and perms"
|
echo_step "3" "Starting" "Setting up roles and perms"
|
||||||
@@ -73,10 +71,9 @@ if [ "$SUPERSET_LOAD_EXAMPLES" = "yes" ]; then
|
|||||||
echo_step "4" "Starting" "Loading examples"
|
echo_step "4" "Starting" "Loading examples"
|
||||||
# If Cypress run which consumes superset_test_config – load required data for tests
|
# If Cypress run which consumes superset_test_config – load required data for tests
|
||||||
if [ "$CYPRESS_CONFIG" == "true" ]; then
|
if [ "$CYPRESS_CONFIG" == "true" ]; then
|
||||||
superset load_test_users
|
|
||||||
superset load_examples --load-test-data
|
superset load_examples --load-test-data
|
||||||
else
|
else
|
||||||
superset load_examples --force
|
superset load_examples
|
||||||
fi
|
fi
|
||||||
echo_step "4" "Complete" "Loading examples"
|
echo_step "4" "Complete" "Loading examples"
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -23,4 +23,4 @@
|
|||||||
export SERVER_THREADS_AMOUNT=8
|
export SERVER_THREADS_AMOUNT=8
|
||||||
# start up the web server
|
# start up the web server
|
||||||
|
|
||||||
/usr/bin/run-server.sh
|
/app/docker/entrypoints/run-server.sh
|
||||||
@@ -90,38 +90,5 @@ http {
|
|||||||
|
|
||||||
client_max_body_size 10m;
|
client_max_body_size 10m;
|
||||||
|
|
||||||
upstream superset_app {
|
include /etc/nginx/conf.d/superset.conf;
|
||||||
server host.docker.internal:8088;
|
|
||||||
keepalive 100;
|
|
||||||
}
|
|
||||||
|
|
||||||
upstream superset_websocket {
|
|
||||||
server host.docker.internal:8080;
|
|
||||||
keepalive 100;
|
|
||||||
}
|
|
||||||
|
|
||||||
server {
|
|
||||||
listen 80 default_server;
|
|
||||||
server_name _;
|
|
||||||
|
|
||||||
location /ws {
|
|
||||||
proxy_pass http://superset_websocket;
|
|
||||||
proxy_http_version 1.1;
|
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
|
||||||
proxy_set_header Connection "Upgrade";
|
|
||||||
proxy_set_header Host $host;
|
|
||||||
}
|
|
||||||
|
|
||||||
location / {
|
|
||||||
proxy_pass http://superset_app;
|
|
||||||
proxy_set_header Host $host;
|
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
|
||||||
proxy_set_header X-Forwarded-For $remote_addr;
|
|
||||||
proxy_set_header X-Forwarded-Host $host;
|
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
|
||||||
proxy_http_version 1.1;
|
|
||||||
port_in_redirect off;
|
|
||||||
proxy_connect_timeout 300;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
57
docker/nginx/templates/superset.conf.template
Normal file
57
docker/nginx/templates/superset.conf.template
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
# Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
# or more contributor license agreements. See the NOTICE file
|
||||||
|
# distributed with this work for additional information
|
||||||
|
# regarding copyright ownership. The ASF licenses this file
|
||||||
|
# to you under the Apache License, Version 2.0 (the
|
||||||
|
# "License"); you may not use this file except in compliance
|
||||||
|
# with the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing,
|
||||||
|
# software distributed under the License is distributed on an
|
||||||
|
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
# KIND, either express or implied. See the License for the
|
||||||
|
# specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
upstream superset_app {
|
||||||
|
server host.docker.internal:8088;
|
||||||
|
keepalive 100;
|
||||||
|
}
|
||||||
|
|
||||||
|
upstream superset_websocket {
|
||||||
|
server host.docker.internal:8080;
|
||||||
|
keepalive 100;
|
||||||
|
}
|
||||||
|
|
||||||
|
server {
|
||||||
|
listen 80 default_server;
|
||||||
|
server_name _;
|
||||||
|
|
||||||
|
location /ws {
|
||||||
|
proxy_pass http://superset_websocket;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection "Upgrade";
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
}
|
||||||
|
|
||||||
|
location ${SUPERSET_APP_ROOT}/static {
|
||||||
|
proxy_pass http://host.docker.internal:9000; # Proxy to superset-node
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
}
|
||||||
|
|
||||||
|
location ${SUPERSET_APP_ROOT} {
|
||||||
|
proxy_pass http://superset_app;
|
||||||
|
proxy_set_header Host $http_host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
port_in_redirect off;
|
||||||
|
proxy_connect_timeout 300;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
64
docker/pip-install.sh
Executable file
64
docker/pip-install.sh
Executable file
@@ -0,0 +1,64 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
#
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
# contributor license agreements. See the NOTICE file distributed with
|
||||||
|
# this work for additional information regarding copyright ownership.
|
||||||
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
# (the "License"); you may not use this file except in compliance with
|
||||||
|
# the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Default flag
|
||||||
|
REQUIRES_BUILD_ESSENTIAL=false
|
||||||
|
USE_CACHE=true
|
||||||
|
|
||||||
|
# Filter arguments
|
||||||
|
ARGS=()
|
||||||
|
for arg in "$@"; do
|
||||||
|
case "$arg" in
|
||||||
|
--requires-build-essential)
|
||||||
|
REQUIRES_BUILD_ESSENTIAL=true
|
||||||
|
;;
|
||||||
|
--no-cache)
|
||||||
|
USE_CACHE=false
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
ARGS+=("$arg")
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
# Install build-essential if required
|
||||||
|
if $REQUIRES_BUILD_ESSENTIAL; then
|
||||||
|
echo "Installing build-essential for package builds..."
|
||||||
|
apt-get update -qq \
|
||||||
|
&& apt-get install -yqq --no-install-recommends build-essential
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Choose whether to use pip cache
|
||||||
|
if $USE_CACHE; then
|
||||||
|
echo "Using pip cache..."
|
||||||
|
uv pip install "${ARGS[@]}"
|
||||||
|
else
|
||||||
|
echo "Disabling pip cache..."
|
||||||
|
uv pip install --no-cache-dir "${ARGS[@]}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Remove build-essential if it was installed
|
||||||
|
if $REQUIRES_BUILD_ESSENTIAL; then
|
||||||
|
echo "Removing build-essential to keep the image lean..."
|
||||||
|
apt-get autoremove -yqq --purge build-essential \
|
||||||
|
&& apt-get clean \
|
||||||
|
&& rm -rf /var/lib/apt/lists/* /var/cache/apt/archives/*
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Python packages installed successfully."
|
||||||
@@ -22,6 +22,7 @@
|
|||||||
#
|
#
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
from celery.schedules import crontab
|
from celery.schedules import crontab
|
||||||
from flask_caching.backends.filesystemcache import FileSystemCache
|
from flask_caching.backends.filesystemcache import FileSystemCache
|
||||||
@@ -70,6 +71,7 @@ CACHE_CONFIG = {
|
|||||||
"CACHE_REDIS_DB": REDIS_RESULTS_DB,
|
"CACHE_REDIS_DB": REDIS_RESULTS_DB,
|
||||||
}
|
}
|
||||||
DATA_CACHE_CONFIG = CACHE_CONFIG
|
DATA_CACHE_CONFIG = CACHE_CONFIG
|
||||||
|
THUMBNAIL_CACHE_CONFIG = CACHE_CONFIG
|
||||||
|
|
||||||
|
|
||||||
class CeleryConfig:
|
class CeleryConfig:
|
||||||
@@ -99,11 +101,28 @@ CELERY_CONFIG = CeleryConfig
|
|||||||
|
|
||||||
FEATURE_FLAGS = {"ALERT_REPORTS": True}
|
FEATURE_FLAGS = {"ALERT_REPORTS": True}
|
||||||
ALERT_REPORTS_NOTIFICATION_DRY_RUN = True
|
ALERT_REPORTS_NOTIFICATION_DRY_RUN = True
|
||||||
WEBDRIVER_BASEURL = "http://superset:8088/" # When using docker compose baseurl should be http://superset_app:8088/
|
WEBDRIVER_BASEURL = f"http://superset_app{os.environ.get('SUPERSET_APP_ROOT', '/')}/" # When using docker compose baseurl should be http://superset_nginx{ENV{BASEPATH}}/ # noqa: E501
|
||||||
# The base URL for the email report hyperlinks.
|
# The base URL for the email report hyperlinks.
|
||||||
WEBDRIVER_BASEURL_USER_FRIENDLY = WEBDRIVER_BASEURL
|
WEBDRIVER_BASEURL_USER_FRIENDLY = (
|
||||||
|
f"http://localhost:8888/{os.environ.get('SUPERSET_APP_ROOT', '/')}/"
|
||||||
|
)
|
||||||
SQLLAB_CTAS_NO_LIMIT = True
|
SQLLAB_CTAS_NO_LIMIT = True
|
||||||
|
|
||||||
|
log_level_text = os.getenv("SUPERSET_LOG_LEVEL", "INFO")
|
||||||
|
LOG_LEVEL = getattr(logging, log_level_text.upper(), logging.INFO)
|
||||||
|
|
||||||
|
if os.getenv("CYPRESS_CONFIG") == "true":
|
||||||
|
# When running the service as a cypress backend, we need to import the config
|
||||||
|
# located @ tests/integration_tests/superset_test_config.py
|
||||||
|
base_dir = os.path.dirname(__file__)
|
||||||
|
module_folder = os.path.abspath(
|
||||||
|
os.path.join(base_dir, "../../tests/integration_tests/")
|
||||||
|
)
|
||||||
|
sys.path.insert(0, module_folder)
|
||||||
|
from superset_test_config import * # noqa
|
||||||
|
|
||||||
|
sys.path.pop(0)
|
||||||
|
|
||||||
#
|
#
|
||||||
# Optionally import superset_config_docker.py (which will have been included on
|
# Optionally import superset_config_docker.py (which will have been included on
|
||||||
# the PYTHONPATH) in order to allow for local settings to be overridden
|
# the PYTHONPATH) in order to allow for local settings to be overridden
|
||||||
@@ -113,7 +132,7 @@ try:
|
|||||||
from superset_config_docker import * # noqa
|
from superset_config_docker import * # noqa
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
f"Loaded your Docker configuration at " f"[{superset_config_docker.__file__}]"
|
f"Loaded your Docker configuration at [{superset_config_docker.__file__}]"
|
||||||
)
|
)
|
||||||
except ImportError:
|
except ImportError:
|
||||||
logger.info("Using default Docker config...")
|
logger.info("Using default Docker config...")
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
/* eslint-env node */
|
||||||
/**
|
/**
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
* or more contributor license agreements. See the NOTICE file
|
* or more contributor license agreements. See the NOTICE file
|
||||||
@@ -16,33 +17,31 @@
|
|||||||
* specific language governing permissions and limitations
|
* specific language governing permissions and limitations
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
import { t } from '@superset-ui/core';
|
module.exports = {
|
||||||
import { ControlPanelConfig } from '@superset-ui/chart-controls';
|
extends: [
|
||||||
|
'eslint:recommended',
|
||||||
const config: ControlPanelConfig = {
|
'plugin:@typescript-eslint/recommended',
|
||||||
controlPanelSections: [
|
'plugin:react/recommended',
|
||||||
{
|
'plugin:prettier/recommended',
|
||||||
label: t('Query'),
|
|
||||||
expanded: true,
|
|
||||||
controlSetRows: [
|
|
||||||
['groupby'],
|
|
||||||
['metric'],
|
|
||||||
['adhoc_filters'],
|
|
||||||
['row_limit'],
|
|
||||||
],
|
],
|
||||||
|
parser: '@typescript-eslint/parser',
|
||||||
|
parserOptions: {
|
||||||
|
ecmaFeatures: {
|
||||||
|
jsx: true,
|
||||||
},
|
},
|
||||||
{
|
ecmaVersion: 2020,
|
||||||
label: t('Chart Options'),
|
sourceType: 'module',
|
||||||
expanded: true,
|
|
||||||
controlSetRows: [['color_scheme']],
|
|
||||||
},
|
},
|
||||||
],
|
plugins: ['@typescript-eslint', 'react', 'prettier'],
|
||||||
controlOverrides: {
|
rules: {
|
||||||
groupby: {
|
'react/react-in-jsx-scope': 'off',
|
||||||
label: t('Source / Target'),
|
'react/prop-types': 'off',
|
||||||
description: t('Choose a source and a target'),
|
'@typescript-eslint/explicit-module-boundary-types': 'off',
|
||||||
|
},
|
||||||
|
settings: {
|
||||||
|
react: {
|
||||||
|
version: 'detect',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
ignorePatterns: ['build/**/*', '.docusaurus/**/*', 'node_modules/**/*'],
|
||||||
};
|
};
|
||||||
|
|
||||||
export default config;
|
|
||||||
@@ -1 +1 @@
|
|||||||
v20.16.0
|
v20.18.3
|
||||||
|
|||||||
@@ -18,6 +18,6 @@ under the License.
|
|||||||
-->
|
-->
|
||||||
|
|
||||||
This is the public documentation site for Superset, built using
|
This is the public documentation site for Superset, built using
|
||||||
[Docusaurus 2](https://docusaurus.io/). See
|
[Docusaurus 3](https://docusaurus.io/). See
|
||||||
[CONTRIBUTING.md](../CONTRIBUTING.md#documentation) for documentation on
|
[CONTRIBUTING.md](../CONTRIBUTING.md#documentation) for documentation on
|
||||||
contributing to documentation.
|
contributing to documentation.
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
/* eslint-env node */
|
||||||
/**
|
/**
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
* or more contributor license agreements. See the NOTICE file
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
|||||||
@@ -63,6 +63,7 @@
|
|||||||
"Fiji",
|
"Fiji",
|
||||||
"Finland",
|
"Finland",
|
||||||
"France",
|
"France",
|
||||||
|
"France (with overseas)",
|
||||||
"France (regions)",
|
"France (regions)",
|
||||||
"French Polynesia",
|
"French Polynesia",
|
||||||
"Gabon",
|
"Gabon",
|
||||||
@@ -85,6 +86,7 @@
|
|||||||
"Israel",
|
"Israel",
|
||||||
"Italy",
|
"Italy",
|
||||||
"Italy (regions)",
|
"Italy (regions)",
|
||||||
|
"Ivory Coast",
|
||||||
"Japan",
|
"Japan",
|
||||||
"Jordan",
|
"Jordan",
|
||||||
"Kazakhstan",
|
"Kazakhstan",
|
||||||
@@ -142,6 +144,7 @@
|
|||||||
"Poland",
|
"Poland",
|
||||||
"Portugal",
|
"Portugal",
|
||||||
"Qatar",
|
"Qatar",
|
||||||
|
"Republic Of Serbia",
|
||||||
"Romania",
|
"Romania",
|
||||||
"Russia",
|
"Russia",
|
||||||
"Rwanda",
|
"Rwanda",
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ hide_title: true
|
|||||||
sidebar_position: 10
|
sidebar_position: 10
|
||||||
---
|
---
|
||||||
|
|
||||||
import { Buffer } from 'buffer/index.js';
|
|
||||||
import SwaggerUI from 'swagger-ui-react';
|
import SwaggerUI from 'swagger-ui-react';
|
||||||
import openapi from '/resources/openapi.json';
|
import openapi from '/resources/openapi.json';
|
||||||
import 'swagger-ui-react/swagger-ui.css';
|
import 'swagger-ui-react/swagger-ui.css';
|
||||||
|
|||||||
@@ -25,6 +25,9 @@ Alerts and reports are disabled by default. To turn them on, you need to do some
|
|||||||
- At least one of those must be configured, depending on what you want to use:
|
- At least one of those must be configured, depending on what you want to use:
|
||||||
- emails: `SMTP_*` settings
|
- emails: `SMTP_*` settings
|
||||||
- Slack messages: `SLACK_API_TOKEN`
|
- Slack messages: `SLACK_API_TOKEN`
|
||||||
|
- Users can customize the email subject by including date code placeholders, which will automatically be replaced with the corresponding UTC date when the email is sent. To enable this functionality, activate the `"DATE_FORMAT_IN_EMAIL_SUBJECT"` [feature flag](/docs/configuration/configuring-superset#feature-flags). This enables date formatting in email subjects, preventing all reporting emails from being grouped into the same thread (optional for the reporting feature).
|
||||||
|
- Use date codes from [strftime.org](https://strftime.org/) to create the email subject.
|
||||||
|
- If no date code is provided, the original string will be used as the email subject.
|
||||||
|
|
||||||
##### Disable dry-run mode
|
##### Disable dry-run mode
|
||||||
|
|
||||||
@@ -53,11 +56,14 @@ To send alerts and reports to Slack channels, you need to create a new Slack App
|
|||||||
- `incoming-webhook`
|
- `incoming-webhook`
|
||||||
- `files:write`
|
- `files:write`
|
||||||
- `chat:write`
|
- `chat:write`
|
||||||
|
- `channels:read`
|
||||||
|
- `groups:read`
|
||||||
4. At the top of the "OAuth and Permissions" section, click "install to workspace".
|
4. At the top of the "OAuth and Permissions" section, click "install to workspace".
|
||||||
5. Select a default channel for your app and continue.
|
5. Select a default channel for your app and continue.
|
||||||
(You can post to any channel by inviting your Superset app into that channel).
|
(You can post to any channel by inviting your Superset app into that channel).
|
||||||
6. The app should now be installed in your workspace, and a "Bot User OAuth Access Token" should have been created. Copy that token in the `SLACK_API_TOKEN` variable of your `superset_config.py`.
|
6. The app should now be installed in your workspace, and a "Bot User OAuth Access Token" should have been created. Copy that token in the `SLACK_API_TOKEN` variable of your `superset_config.py`.
|
||||||
7. Restart the service (or run `superset init`) to pull in the new configuration.
|
7. Ensure the feature flag `ALERT_REPORT_SLACK_V2` is set to True in `superset_config.py`
|
||||||
|
8. Restart the service (or run `superset init`) to pull in the new configuration.
|
||||||
|
|
||||||
Note: when you configure an alert or a report, the Slack channel list takes channel names without the leading '#' e.g. use `alerts` instead of `#alerts`.
|
Note: when you configure an alert or a report, the Slack channel list takes channel names without the leading '#' e.g. use `alerts` instead of `#alerts`.
|
||||||
|
|
||||||
@@ -86,6 +92,7 @@ You can find documentation about each field in the default `config.py` in the Gi
|
|||||||
You need to replace default values with your custom Redis, Slack and/or SMTP config.
|
You need to replace default values with your custom Redis, Slack and/or SMTP config.
|
||||||
|
|
||||||
Superset uses Celery beat and Celery worker(s) to send alerts and reports.
|
Superset uses Celery beat and Celery worker(s) to send alerts and reports.
|
||||||
|
|
||||||
- The beat is the scheduler that tells the worker when to perform its tasks. This schedule is defined when you create the alert or report.
|
- The beat is the scheduler that tells the worker when to perform its tasks. This schedule is defined when you create the alert or report.
|
||||||
- The worker will process the tasks that need to be performed when an alert or report is fired.
|
- The worker will process the tasks that need to be performed when an alert or report is fired.
|
||||||
|
|
||||||
@@ -137,7 +144,7 @@ SLACK_API_TOKEN = "xoxb-"
|
|||||||
SMTP_HOST = "smtp.sendgrid.net" # change to your host
|
SMTP_HOST = "smtp.sendgrid.net" # change to your host
|
||||||
SMTP_PORT = 2525 # your port, e.g. 587
|
SMTP_PORT = 2525 # your port, e.g. 587
|
||||||
SMTP_STARTTLS = True
|
SMTP_STARTTLS = True
|
||||||
SMTP_SSL_SERVER_AUTH = True # If your using an SMTP server with a valid certificate
|
SMTP_SSL_SERVER_AUTH = True # If you're using an SMTP server with a valid certificate
|
||||||
SMTP_SSL = False
|
SMTP_SSL = False
|
||||||
SMTP_USER = "your_user" # use the empty string "" if using an unauthenticated SMTP server
|
SMTP_USER = "your_user" # use the empty string "" if using an unauthenticated SMTP server
|
||||||
SMTP_PASSWORD = "your_password" # use the empty string "" if using an unauthenticated SMTP server
|
SMTP_PASSWORD = "your_password" # use the empty string "" if using an unauthenticated SMTP server
|
||||||
@@ -174,15 +181,13 @@ By default, Alerts and Reports are executed as the owner of the alert/report obj
|
|||||||
just change the config as follows (`admin` in this example):
|
just change the config as follows (`admin` in this example):
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from superset.tasks.types import ExecutorType
|
from superset.tasks.types import FixedExecutor
|
||||||
|
|
||||||
THUMBNAIL_SELENIUM_USER = 'admin'
|
ALERT_REPORTS_EXECUTORS = [FixedExecutor("admin")]
|
||||||
ALERT_REPORTS_EXECUTE_AS = [ExecutorType.SELENIUM]
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Please refer to `ExecutorType` in the codebase for other executor types.
|
Please refer to `ExecutorType` in the codebase for other executor types.
|
||||||
|
|
||||||
|
|
||||||
**Important notes**
|
**Important notes**
|
||||||
|
|
||||||
- Be mindful of the concurrency setting for celery (using `-c 4`). Selenium/webdriver instances can
|
- Be mindful of the concurrency setting for celery (using `-c 4`). Selenium/webdriver instances can
|
||||||
@@ -194,7 +199,6 @@ Please refer to `ExecutorType` in the codebase for other executor types.
|
|||||||
- Adjust `WEBDRIVER_BASEURL` in your configuration file if celery workers can’t access Superset via
|
- Adjust `WEBDRIVER_BASEURL` in your configuration file if celery workers can’t access Superset via
|
||||||
its default value of `http://0.0.0.0:8080/`.
|
its default value of `http://0.0.0.0:8080/`.
|
||||||
|
|
||||||
|
|
||||||
It's also possible to specify a minimum interval between each report's execution through the config file:
|
It's also possible to specify a minimum interval between each report's execution through the config file:
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
@@ -300,6 +304,7 @@ One symptom of an invalid connection to an email server is receiving an error of
|
|||||||
Confirm via testing that your outbound email configuration is correct. Here is the simplest test, for an un-authenticated email SMTP email service running on port 25. If you are sending over SSL, for instance, study how [Superset's codebase sends emails](https://github.com/apache/superset/blob/master/superset/utils/core.py#L818) and then test with those commands and arguments.
|
Confirm via testing that your outbound email configuration is correct. Here is the simplest test, for an un-authenticated email SMTP email service running on port 25. If you are sending over SSL, for instance, study how [Superset's codebase sends emails](https://github.com/apache/superset/blob/master/superset/utils/core.py#L818) and then test with those commands and arguments.
|
||||||
|
|
||||||
Start Python in your worker environment, replace all example values, and run:
|
Start Python in your worker environment, replace all example values, and run:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
import smtplib
|
import smtplib
|
||||||
from email.mime.multipart import MIMEMultipart
|
from email.mime.multipart import MIMEMultipart
|
||||||
@@ -321,6 +326,7 @@ mailserver.quit()
|
|||||||
This should send an email.
|
This should send an email.
|
||||||
|
|
||||||
Possible fixes:
|
Possible fixes:
|
||||||
|
|
||||||
- Some cloud hosts disable outgoing unauthenticated SMTP email to prevent spam. For instance, [Azure blocks port 25 by default on some machines](https://learn.microsoft.com/en-us/azure/virtual-network/troubleshoot-outbound-smtp-connectivity). Enable that port or use another sending method.
|
- Some cloud hosts disable outgoing unauthenticated SMTP email to prevent spam. For instance, [Azure blocks port 25 by default on some machines](https://learn.microsoft.com/en-us/azure/virtual-network/troubleshoot-outbound-smtp-connectivity). Enable that port or use another sending method.
|
||||||
- Use another set of SMTP credentials that you verify works in this setup.
|
- Use another set of SMTP credentials that you verify works in this setup.
|
||||||
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user