Compare commits
1206 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
08b88fd4d2 | ||
|
|
2e172d77cf | ||
|
|
8fe1f8fb3f | ||
|
|
c4eba9e467 | ||
|
|
90d9616f2b | ||
|
|
803738436e | ||
|
|
f14c1bb593 | ||
|
|
31a0b6e5b0 | ||
|
|
27538386bc | ||
|
|
3b35ddf135 | ||
|
|
d5ab6c8d3d | ||
|
|
a4ecff4e23 | ||
|
|
19a0827d1f | ||
|
|
2d8a0cc6c9 | ||
|
|
2789385688 | ||
|
|
e965f95477 | ||
|
|
ad212272d1 | ||
|
|
6d37d97ba5 | ||
|
|
fdd42ef4b6 | ||
|
|
a616bf4082 | ||
|
|
a9e1e685ba | ||
|
|
d41418eaa0 | ||
|
|
1f8fccc0f9 | ||
|
|
75a2b4f610 | ||
|
|
133f98ad58 | ||
|
|
1a7ef4758b | ||
|
|
c77bab8160 | ||
|
|
724c3f48a4 | ||
|
|
073d56cb33 | ||
|
|
e4a95f9428 | ||
|
|
1b06140bde | ||
|
|
f8dcbf70c5 | ||
|
|
b9299d61ac | ||
|
|
2384ad4eb5 | ||
|
|
2b66eadee2 | ||
|
|
94d9337e0b | ||
|
|
a0621e10a8 | ||
|
|
b72d5b03dc | ||
|
|
914480ad3c | ||
|
|
ff2f85f39b | ||
|
|
9cf16a4ff2 | ||
|
|
b90c410c01 | ||
|
|
77d1e5d046 | ||
|
|
4bc5fe5495 | ||
|
|
2c72a7ae4f | ||
|
|
4b11f45f72 | ||
|
|
04ae004f43 | ||
|
|
29ef8c4af8 | ||
|
|
718230cdf2 | ||
|
|
8175e19f72 | ||
|
|
7b76356182 | ||
|
|
1c56319be4 | ||
|
|
36caca3244 | ||
|
|
5079b2aa95 | ||
|
|
cab8e7d22d | ||
|
|
85d137b20a | ||
|
|
a942f81dfd | ||
|
|
01043c9bf4 | ||
|
|
a9610e2886 | ||
|
|
5897d85f7a | ||
|
|
0367dce38b | ||
|
|
1ca1395382 | ||
|
|
2607e4be4d | ||
|
|
04680e5ff1 | ||
|
|
a7a6678d5c | ||
|
|
8069d6221d | ||
|
|
269f55c29a | ||
|
|
bca27b436b | ||
|
|
aecaa85905 | ||
|
|
7e36488f03 | ||
|
|
87c3e831a8 | ||
|
|
ee63ebc8ec | ||
|
|
5916291901 | ||
|
|
4b0f252170 | ||
|
|
9176a4072b | ||
|
|
0cb7c5e4a6 | ||
|
|
e182f7f962 | ||
|
|
23c98294bd | ||
|
|
22bdd9e324 | ||
|
|
b159e51787 | ||
|
|
d57012067b | ||
|
|
9364fb5b79 | ||
|
|
c49fb0aa9b | ||
|
|
b9af019567 | ||
|
|
e7f8143c3b | ||
|
|
c9e47f0bb3 | ||
|
|
686023c8dd | ||
|
|
d997a450cf | ||
|
|
9e053923d4 | ||
|
|
ef06a9d497 | ||
|
|
37205099db | ||
|
|
e498f2fcb6 | ||
|
|
f7c55270db | ||
|
|
0a6208296e | ||
|
|
bf4d3a0dff | ||
|
|
b227612f6e | ||
|
|
45686a1af6 | ||
|
|
82ed4878c4 | ||
|
|
6e1ec8347d | ||
|
|
f905726c24 | ||
|
|
69195f8d2d | ||
|
|
b4909f2d03 | ||
|
|
44e753d94d | ||
|
|
e4903e6dc6 | ||
|
|
d4e8d57fc4 | ||
|
|
281ae45495 | ||
|
|
ff4f9b4527 | ||
|
|
86f9087ea2 | ||
|
|
7cd9b85831 | ||
|
|
71e1eea9f4 | ||
|
|
1e79e9cd2a | ||
|
|
af7cdeba4d | ||
|
|
500e6256c0 | ||
|
|
e79d05fd77 | ||
|
|
fc85756c20 | ||
|
|
6081f7161a | ||
|
|
c21513fb8c | ||
|
|
ec752b1378 | ||
|
|
cf1d9ce1e6 | ||
|
|
6188d60fec | ||
|
|
dfc28f37eb | ||
|
|
23c834f04e | ||
|
|
c84211ec44 | ||
|
|
7d374428d3 | ||
|
|
3a2974f589 | ||
|
|
3ed8f5fc23 | ||
|
|
61755f0b7d | ||
|
|
0a3d2fccd4 | ||
|
|
0b40c8a26f | ||
|
|
81df7087db | ||
|
|
cb7c5aa70c | ||
|
|
5bc581fd44 | ||
|
|
5ee70b244b | ||
|
|
a26cf001c4 | ||
|
|
e02d35ed5c | ||
|
|
e98a1c3537 | ||
|
|
4404751a1d | ||
|
|
defe6789c0 | ||
|
|
823f306f24 | ||
|
|
72627b1761 | ||
|
|
1702b020be | ||
|
|
89f6ccc1c6 | ||
|
|
eff5952641 | ||
|
|
f10395b2f7 | ||
|
|
b2647567c0 | ||
|
|
028456572b | ||
|
|
84a7730f47 | ||
|
|
76a2f95231 | ||
|
|
9904593dc3 | ||
|
|
8f00e9e30b | ||
|
|
16ab696d7c | ||
|
|
1ce14df43d | ||
|
|
34d6618b2e | ||
|
|
abdd1d537f | ||
|
|
d9fda346cb | ||
|
|
6cbe0e6096 | ||
|
|
268edcfedd | ||
|
|
c5ddf57124 | ||
|
|
f9202ba179 | ||
|
|
17635e1a2b | ||
|
|
285197926e | ||
|
|
5466fab2a0 | ||
|
|
ed85032277 | ||
|
|
680e1cbb42 | ||
|
|
2d37dec5ff | ||
|
|
3f4c306bd6 | ||
|
|
ac432495d7 | ||
|
|
12fb7c1a62 | ||
|
|
feb15a30a2 | ||
|
|
eb0f3970cf | ||
|
|
b82d15af76 | ||
|
|
32b38ee2d6 | ||
|
|
1d702f2142 | ||
|
|
4ae77ba8af | ||
|
|
3c72e1f8fb | ||
|
|
4bfe08d7c3 | ||
|
|
3a7ed8d194 | ||
|
|
4d204b3b36 | ||
|
|
39ee33aeff | ||
|
|
831cd21737 | ||
|
|
a82bb588f4 | ||
|
|
a84bd5225c | ||
|
|
f0acc11249 | ||
|
|
fa35d7d2f4 | ||
|
|
e65aba3c46 | ||
|
|
fab7b1083b | ||
|
|
d9161fb76a | ||
|
|
85b18ff5e7 | ||
|
|
3a8af5d0b0 | ||
|
|
1c545d3a2d | ||
|
|
120a5d08f9 | ||
|
|
b586cb0ba7 | ||
|
|
ae2205aeb5 | ||
|
|
2e25fc4161 | ||
|
|
ba89b2d091 | ||
|
|
aee8438924 | ||
|
|
a6ba841e57 | ||
|
|
8643228b51 | ||
|
|
de869973c7 | ||
|
|
ac57780607 | ||
|
|
630604bc6b | ||
|
|
eb5d220b5e | ||
|
|
3f076b00cd | ||
|
|
514f9452f3 | ||
|
|
068c343be0 | ||
|
|
500455fc72 | ||
|
|
1b4f128f55 | ||
|
|
1a3a8daf49 | ||
|
|
7fce8eab3a | ||
|
|
b4c9402737 | ||
|
|
8459347bdc | ||
|
|
f7bf17290c | ||
|
|
d908e48d61 | ||
|
|
a3a4687ebf | ||
|
|
4d48d5d854 | ||
|
|
83e6807fa0 | ||
|
|
ba96984048 | ||
|
|
591e5ec32e | ||
|
|
690de862e8 | ||
|
|
35810ce2bf | ||
|
|
6c52f2ff72 | ||
|
|
d663bea5e6 | ||
|
|
1ea4521d0c | ||
|
|
c4153c0bbe | ||
|
|
ae8b249dc2 | ||
|
|
9500f0aae3 | ||
|
|
be3da6396f | ||
|
|
330926c167 | ||
|
|
cbcc00c929 | ||
|
|
d03b74f754 | ||
|
|
ec21d5af21 | ||
|
|
70c7315ae0 | ||
|
|
4fa1f0ab17 | ||
|
|
39e502faae | ||
|
|
0280bc52e0 | ||
|
|
dee47864c4 | ||
|
|
17623f71d4 | ||
|
|
7453131858 | ||
|
|
e822fb50d8 | ||
|
|
e2bca47421 | ||
|
|
7987cb794b | ||
|
|
7483e2c942 | ||
|
|
e6129eb492 | ||
|
|
b10aca2de1 | ||
|
|
02cbad59de | ||
|
|
ccb87d337c | ||
|
|
63a49983eb | ||
|
|
81dd622fdb | ||
|
|
9a49b1c41d | ||
|
|
b059506afa | ||
|
|
13c17e1526 | ||
|
|
8e3217a921 | ||
|
|
aed7c7436a | ||
|
|
7f3edad119 | ||
|
|
7fd9c82ae8 | ||
|
|
f3c7052f30 | ||
|
|
326d90a5e4 | ||
|
|
cccc47311b | ||
|
|
5c03167948 | ||
|
|
87b6d76c32 | ||
|
|
abfa03474c | ||
|
|
5bc734b2e5 | ||
|
|
814b70ffd8 | ||
|
|
1e18bfdea4 | ||
|
|
200b66d088 | ||
|
|
cbd01074ba | ||
|
|
1582fa1964 | ||
|
|
a9b6d11ade | ||
|
|
c4b6324e74 | ||
|
|
9432ea80be | ||
|
|
f412b4c158 | ||
|
|
547a3bf4e7 | ||
|
|
e97dc9d3cb | ||
|
|
efae14592e | ||
|
|
1d06495629 | ||
|
|
ffdfdb94ab | ||
|
|
8d7e97a26e | ||
|
|
f8b8f6a343 | ||
|
|
4967342362 | ||
|
|
9893847991 | ||
|
|
18e9640d99 | ||
|
|
58ea736ed6 | ||
|
|
b4bdc45a6b | ||
|
|
fa07b8d51b | ||
|
|
e121a8585e | ||
|
|
adef519583 | ||
|
|
08f09b4761 | ||
|
|
2a89c90e0b | ||
|
|
ce5fa379ec | ||
|
|
d4d4a9b1f1 | ||
|
|
d0b5b449b2 | ||
|
|
bad6938d1a | ||
|
|
48e28eff9b | ||
|
|
f87163413b | ||
|
|
52a9f2742b | ||
|
|
7f07fbefbc | ||
|
|
93660c6838 | ||
|
|
3ebadbcda9 | ||
|
|
3df3e0d681 | ||
|
|
4a3c09187a | ||
|
|
76f8d33d81 | ||
|
|
6cc6637454 | ||
|
|
d7f8a7fde3 | ||
|
|
80eb9c2c64 | ||
|
|
bd45e3b19a | ||
|
|
b866b33dee | ||
|
|
8994bdacbd | ||
|
|
f3b403d346 | ||
|
|
5ad4167512 | ||
|
|
ca67a7a4e9 | ||
|
|
64ef8b14b4 | ||
|
|
912c6f6231 | ||
|
|
6f1351fbbb | ||
|
|
f75dc0271d | ||
|
|
1fb8716231 | ||
|
|
ed212440b5 | ||
|
|
1528288b59 | ||
|
|
7c936e7f60 | ||
|
|
ff268a7526 | ||
|
|
e9804aedff | ||
|
|
e95132ddc3 | ||
|
|
bb0f69d074 | ||
|
|
645de384e3 | ||
|
|
15ecdeb3ba | ||
|
|
04ea3addc4 | ||
|
|
7e64f2e988 | ||
|
|
40fbf1c761 | ||
|
|
a85968eadb | ||
|
|
efc63669a6 | ||
|
|
076f9cd095 | ||
|
|
fdbc936dc9 | ||
|
|
18e459e19e | ||
|
|
064363df78 | ||
|
|
f8cc05b54e | ||
|
|
9baca6758d | ||
|
|
b39d165913 | ||
|
|
bc3ad64619 | ||
|
|
0bd2ac5353 | ||
|
|
03e2af8bd9 | ||
|
|
82b85d1d6c | ||
|
|
17c7ca239a | ||
|
|
ef59b6b650 | ||
|
|
d1a7a7b85c | ||
|
|
3d72eb475a | ||
|
|
b50489eb96 | ||
|
|
8773e32cd6 | ||
|
|
f438ccbcb1 | ||
|
|
f829b486d1 | ||
|
|
06e52e600e | ||
|
|
f0636b8748 | ||
|
|
c629282ec4 | ||
|
|
7d934e7246 | ||
|
|
8efcaeb768 | ||
|
|
cf0b670932 | ||
|
|
3949d39478 | ||
|
|
5718d6bbaf | ||
|
|
f3146ef6f9 | ||
|
|
255ea69977 | ||
|
|
9af34ba51c | ||
|
|
1cf634afa2 | ||
|
|
d7fc364ff4 | ||
|
|
aebd089ca5 | ||
|
|
ae7f163372 | ||
|
|
ed9f56448f | ||
|
|
ccd5fd44cf | ||
|
|
c5252d0f43 | ||
|
|
ede1432936 | ||
|
|
c3c9ceb1cc | ||
|
|
6fe93e18c7 | ||
|
|
c988080990 | ||
|
|
a26e65f418 | ||
|
|
e66f68d36c | ||
|
|
cb4d934ba5 | ||
|
|
6962c76412 | ||
|
|
b0d25a154a | ||
|
|
e22aecb0d1 | ||
|
|
32bd827b25 | ||
|
|
e399a8c613 | ||
|
|
b90d8e32f1 | ||
|
|
c81026ddb1 | ||
|
|
dd72048320 | ||
|
|
ad604aed09 | ||
|
|
31b7b9a6a0 | ||
|
|
745784fc97 | ||
|
|
fdee06bbf2 | ||
|
|
816c517f0f | ||
|
|
49f24d128b | ||
|
|
8223729e1e | ||
|
|
2d6b9422c6 | ||
|
|
3c0e85e2c0 | ||
|
|
490c707eb6 | ||
|
|
7c1b56f3a9 | ||
|
|
3e9f797949 | ||
|
|
f3de758363 | ||
|
|
90e46cb39c | ||
|
|
fe77534c03 | ||
|
|
1f135e41cd | ||
|
|
147c12dddf | ||
|
|
3dfdde130a | ||
|
|
255a36c280 | ||
|
|
66f646ac66 | ||
|
|
e53f3032bb | ||
|
|
3b4cd812ae | ||
|
|
ac5da46fb2 | ||
|
|
497a6f1df9 | ||
|
|
1fd08a5912 | ||
|
|
9676f02497 | ||
|
|
42dd64e413 | ||
|
|
c3ab796734 | ||
|
|
a782d623f0 | ||
|
|
48b88e5241 | ||
|
|
a47a512808 | ||
|
|
aff7a82664 | ||
|
|
2d237fe2ef | ||
|
|
c944c61747 | ||
|
|
0c8b24378d | ||
|
|
90ba6ee6a0 | ||
|
|
8d877e8a35 | ||
|
|
c5b1eb7f5b | ||
|
|
46d60880eb | ||
|
|
0c36827368 | ||
|
|
2b1bb35c5c | ||
|
|
d3824bbb38 | ||
|
|
670ba5d32e | ||
|
|
64c91ec9e3 | ||
|
|
1fda6f0745 | ||
|
|
f8e596b9d8 | ||
|
|
254645773c | ||
|
|
e79adbbc5f | ||
|
|
75e69f02e8 | ||
|
|
6fc837db51 | ||
|
|
2923a125db | ||
|
|
527572c3eb | ||
|
|
a7ba6e4a5d | ||
|
|
e31ad22f50 | ||
|
|
813ed6018d | ||
|
|
0454ef3726 | ||
|
|
afedcdf0d8 | ||
|
|
9f3aeb22d9 | ||
|
|
59268e978a | ||
|
|
b9a2fa4015 | ||
|
|
3c8577b853 | ||
|
|
0aa3d2a818 | ||
|
|
fbd0d46e8a | ||
|
|
c4e056929d | ||
|
|
84e59a11f1 | ||
|
|
9fcd5d67e4 | ||
|
|
ccf505a480 | ||
|
|
d1d1c49009 | ||
|
|
232a5c392e | ||
|
|
9c6248f3ba | ||
|
|
6841697917 | ||
|
|
81817309d3 | ||
|
|
025ef5a0f1 | ||
|
|
9a4e4d0443 | ||
|
|
c9c6bcaabe | ||
|
|
144f516700 | ||
|
|
c17ffc1e9c | ||
|
|
0d4137d21e | ||
|
|
0c5db55d55 | ||
|
|
bd4a4c2753 | ||
|
|
f399fcd624 | ||
|
|
08e40e2d78 | ||
|
|
2898f9d379 | ||
|
|
dfea8df7c9 | ||
|
|
a5320a0f37 | ||
|
|
b68084b9ac | ||
|
|
3b24d7df83 | ||
|
|
b3107bb603 | ||
|
|
08b7e891a7 | ||
|
|
57421d14d0 | ||
|
|
0cf0860a3d | ||
|
|
327c052456 | ||
|
|
033ba2cb66 | ||
|
|
cc36428260 | ||
|
|
6da68ab271 | ||
|
|
be01851ef7 | ||
|
|
20915457ff | ||
|
|
2385cd445b | ||
|
|
0429e842b5 | ||
|
|
f68189b54e | ||
|
|
dcf83031d1 | ||
|
|
ef7e9dd336 | ||
|
|
ae0655028f | ||
|
|
166c576c94 | ||
|
|
7190cf8e77 | ||
|
|
2ef9bfed20 | ||
|
|
0191fa58c8 | ||
|
|
4c3313b01c | ||
|
|
5278b53218 | ||
|
|
4ea770068b | ||
|
|
91bd38a851 | ||
|
|
163f4e359c | ||
|
|
90592d3e3d | ||
|
|
62fcdf2a92 | ||
|
|
48821b5101 | ||
|
|
3b129253a3 | ||
|
|
48760849ec | ||
|
|
9c1ca07c40 | ||
|
|
774ad45efb | ||
|
|
299e9ce6b8 | ||
|
|
219f33f0d1 | ||
|
|
58a704b84c | ||
|
|
b58cfbcb91 | ||
|
|
1e325d9645 | ||
|
|
ad5a4389a2 | ||
|
|
e4fba0ffb7 | ||
|
|
e584a9673f | ||
|
|
b888802e05 | ||
|
|
25c599d040 | ||
|
|
fb866a937b | ||
|
|
aa95e03eb9 | ||
|
|
cf1d0f38ad | ||
|
|
fca982c609 | ||
|
|
747bf80474 | ||
|
|
4f7fd65c8b | ||
|
|
6045063e78 | ||
|
|
0ec9cd4ad2 | ||
|
|
4268513653 | ||
|
|
7654eef110 | ||
|
|
b301ba1f57 | ||
|
|
95509f2000 | ||
|
|
49ab09101b | ||
|
|
40d9e15126 | ||
|
|
a141695b2b | ||
|
|
d01e67a159 | ||
|
|
c34df3eea4 | ||
|
|
56bcbb0f8e | ||
|
|
51f1aa3106 | ||
|
|
d7e419127c | ||
|
|
27fab0d54f | ||
|
|
091e93c831 | ||
|
|
bb6b2da267 | ||
|
|
7abe2d5eee | ||
|
|
7b015faae9 | ||
|
|
e834154030 | ||
|
|
256a521bf1 | ||
|
|
a626f994bf | ||
|
|
76dda688b1 | ||
|
|
163a6a20e5 | ||
|
|
3d136aa0a4 | ||
|
|
ff3057de5d | ||
|
|
7045018d86 | ||
|
|
a55f963e52 | ||
|
|
8dfe2b70b2 | ||
|
|
fdbb569c3e | ||
|
|
cc3e63f1de | ||
|
|
a48e246aa0 | ||
|
|
6eba6cac0b | ||
|
|
bd706ebbd1 | ||
|
|
abbf138cfb | ||
|
|
de346a3eba | ||
|
|
99e1de58bc | ||
|
|
5344a80535 | ||
|
|
0b09a74d37 | ||
|
|
e21745ac9d | ||
|
|
593861eac6 | ||
|
|
53dead9c29 | ||
|
|
f64e2ba7d5 | ||
|
|
80515d2a92 | ||
|
|
4d72afb54b | ||
|
|
06fcaa3095 | ||
|
|
de88764e93 | ||
|
|
9051e1f3e2 | ||
|
|
ba93e6a2d1 | ||
|
|
7e5e229f48 | ||
|
|
b9915e7ecf | ||
|
|
3e51c61dbf | ||
|
|
591e512327 | ||
|
|
fc5db474b7 | ||
|
|
c083aec8e8 | ||
|
|
1ce3b81d01 | ||
|
|
89cd10b3ce | ||
|
|
712297480c | ||
|
|
6ddccaaa9b | ||
|
|
1dcf2c4326 | ||
|
|
f0a8ea644b | ||
|
|
fb6ef26a24 | ||
|
|
00b34d7fbd | ||
|
|
8329ea2b9b | ||
|
|
da0a87a735 | ||
|
|
7aeca39c46 | ||
|
|
e1751c065c | ||
|
|
e5151cb915 | ||
|
|
24e3c7f89a | ||
|
|
987cb9978d | ||
|
|
274d21795f | ||
|
|
0d2c2b0681 | ||
|
|
1eff48facb | ||
|
|
17bd7512ff | ||
|
|
16141ecb94 | ||
|
|
34f381bc25 | ||
|
|
24292dbc11 | ||
|
|
24a2f5b8f0 | ||
|
|
0e6f754af9 | ||
|
|
fb85f008fa | ||
|
|
85fb4a933d | ||
|
|
d3ace6d63f | ||
|
|
205eed8350 | ||
|
|
5bf40e2256 | ||
|
|
7c28e4eace | ||
|
|
a882f7a55f | ||
|
|
9aba77db74 | ||
|
|
c9e5fbb09b | ||
|
|
65f25a1e5a | ||
|
|
e0dd5d9d1d | ||
|
|
737db25b6e | ||
|
|
bea702269c | ||
|
|
d78da8a9bf | ||
|
|
ed8153ff8b | ||
|
|
f7ce100180 | ||
|
|
72291d65c9 | ||
|
|
0d3f57a78d | ||
|
|
04fb0e2c2d | ||
|
|
155fec0a6b | ||
|
|
d90044cd52 | ||
|
|
62bd4eb211 | ||
|
|
677c427b16 | ||
|
|
52b0716571 | ||
|
|
04b662ea11 | ||
|
|
db052b17ea | ||
|
|
e300273e71 | ||
|
|
c5f2eafc90 | ||
|
|
90e4d6469d | ||
|
|
1e7773eb16 | ||
|
|
3c89c8cc46 | ||
|
|
74086dae2b | ||
|
|
66403f1876 | ||
|
|
3a4cd3ae24 | ||
|
|
4ffc1f613e | ||
|
|
dfbba84400 | ||
|
|
77864e6cf4 | ||
|
|
4d12251806 | ||
|
|
0c9f9b695b | ||
|
|
a4a2bf7ae9 | ||
|
|
d0a04cde49 | ||
|
|
69685b9dcc | ||
|
|
b308a3eb4e | ||
|
|
bfa40bd360 | ||
|
|
b0e2904c24 | ||
|
|
ce506bdf65 | ||
|
|
922cc037bf | ||
|
|
8252ada1f9 | ||
|
|
a2d2f8bb8c | ||
|
|
7c5f61d6a6 | ||
|
|
841e18a08c | ||
|
|
dbc7fef7f5 | ||
|
|
cbfe3cb2dc | ||
|
|
5fcd25def1 | ||
|
|
fe3f5f69ae | ||
|
|
2395fbbdaa | ||
|
|
960b26c7a2 | ||
|
|
28ac3504d6 | ||
|
|
ecc00bdd26 | ||
|
|
e7946451d6 | ||
|
|
9b34600c8e | ||
|
|
884610861b | ||
|
|
d9bd3d6460 | ||
|
|
38375be5c3 | ||
|
|
91d951ac42 | ||
|
|
d79a45ff32 | ||
|
|
818251fc85 | ||
|
|
75abd1f44a | ||
|
|
f55df3b18b | ||
|
|
5dbfdefae8 | ||
|
|
e5584440ce | ||
|
|
d5e9d5d045 | ||
|
|
3208a014ff | ||
|
|
874c12ad2d | ||
|
|
22d8075c53 | ||
|
|
baebba1159 | ||
|
|
04748b4cda | ||
|
|
a471afe206 | ||
|
|
5d0a01d0d0 | ||
|
|
9e1272e97c | ||
|
|
aeebd8840d | ||
|
|
55d3b012e5 | ||
|
|
a6e1e18244 | ||
|
|
46d7a925bb | ||
|
|
5929ab7689 | ||
|
|
fffb7b500a | ||
|
|
cb14640a82 | ||
|
|
d65054e015 | ||
|
|
5d5060eca6 | ||
|
|
9ff351532a | ||
|
|
59a6f447ec | ||
|
|
1887b5e934 | ||
|
|
33758bfff5 | ||
|
|
2d5beb1f91 | ||
|
|
5fd0e7d028 | ||
|
|
ef0c4be067 | ||
|
|
ac3aba7c7d | ||
|
|
0fdb57a181 | ||
|
|
3e7b5df287 | ||
|
|
3cd16cf368 | ||
|
|
a58adc862e | ||
|
|
7d88f80a9b | ||
|
|
09be02f70a | ||
|
|
50fcdd3a34 | ||
|
|
dee36491c5 | ||
|
|
903612ac6c | ||
|
|
ce705054fa | ||
|
|
c589616883 | ||
|
|
58309f275f | ||
|
|
edf4e4f24e | ||
|
|
b3e0b5b586 | ||
|
|
68802989bc | ||
|
|
70887d72e2 | ||
|
|
1922225042 | ||
|
|
0bdc3010d8 | ||
|
|
1df37e6e4d | ||
|
|
e9ed416654 | ||
|
|
03c42b5b87 | ||
|
|
e055e6d2c2 | ||
|
|
29780821e8 | ||
|
|
f10ee13901 | ||
|
|
cdfc4a35b2 | ||
|
|
eb762c8bfe | ||
|
|
83abfef830 | ||
|
|
54137ad023 | ||
|
|
4be6bfafaa | ||
|
|
9ba6d489f3 | ||
|
|
af4bd40853 | ||
|
|
84fa0d1940 | ||
|
|
938e13a429 | ||
|
|
dc364daffd | ||
|
|
1cadfecd4b | ||
|
|
5b26667fd5 | ||
|
|
899caf9449 | ||
|
|
e6063f2ddf | ||
|
|
46486f82d9 | ||
|
|
0089762955 | ||
|
|
2bd60c0747 | ||
|
|
db6cd21504 | ||
|
|
f40499e550 | ||
|
|
67a85b9831 | ||
|
|
cb3384b3b2 | ||
|
|
ac51a30f98 | ||
|
|
91fe02cdc8 | ||
|
|
76042be7c3 | ||
|
|
d3f55a0905 | ||
|
|
efaef8fe09 | ||
|
|
8757a24d89 | ||
|
|
63785f039c | ||
|
|
d6689ee700 | ||
|
|
787daf6005 | ||
|
|
23aeee5a9c | ||
|
|
70c6cad0e3 | ||
|
|
6b1bf3b395 | ||
|
|
f5216f6047 | ||
|
|
15654a3082 | ||
|
|
baff0cba38 | ||
|
|
c4ee098bb7 | ||
|
|
612b8ca3d7 | ||
|
|
c43a9fd554 | ||
|
|
6c68a21e4f | ||
|
|
db02b33e09 | ||
|
|
2df6baa7a7 | ||
|
|
fc7bd63039 | ||
|
|
959a09cc92 | ||
|
|
366ecefbaa | ||
|
|
a2b30f35fc | ||
|
|
8bceda8134 | ||
|
|
bae1067015 | ||
|
|
116dca3e6f | ||
|
|
b448394077 | ||
|
|
09f407f553 | ||
|
|
0479118efc | ||
|
|
c93411b1e7 | ||
|
|
31283f1424 | ||
|
|
93c6597cf4 | ||
|
|
4446c745a8 | ||
|
|
38e90fe309 | ||
|
|
f5489467e5 | ||
|
|
ab0bc5a3fa | ||
|
|
412634cb57 | ||
|
|
a0ddbb9ec9 | ||
|
|
a803705ddc | ||
|
|
75a358c616 | ||
|
|
493ba18362 | ||
|
|
5e4fca4ea4 | ||
|
|
d289783b67 | ||
|
|
ac84fc2b65 | ||
|
|
40b3d3b3ef | ||
|
|
50a9e13f9b | ||
|
|
66bff01b45 | ||
|
|
9691234b7e | ||
|
|
ddeabdd048 | ||
|
|
3ed45ab98c | ||
|
|
ca08e7051e | ||
|
|
1fb21b8b45 | ||
|
|
c581ea8661 | ||
|
|
f19d1958c5 | ||
|
|
9c99be510b | ||
|
|
7a08cdcb1c | ||
|
|
f24ddfd467 | ||
|
|
23a8ea5636 | ||
|
|
2c04d3c250 | ||
|
|
337454b646 | ||
|
|
b7f46ebe75 | ||
|
|
10773f96a7 | ||
|
|
b97a8275d4 | ||
|
|
081bdca71e | ||
|
|
62959ca38b | ||
|
|
fe68bc31c3 | ||
|
|
3d2c791ff1 | ||
|
|
d40ce52139 | ||
|
|
122891c29b | ||
|
|
c1d9918abe | ||
|
|
d93b1fc686 | ||
|
|
02c5cac26f | ||
|
|
6566377740 | ||
|
|
db6b2f3ae1 | ||
|
|
c31210b96d | ||
|
|
dcc6f2a18f | ||
|
|
0c0666caa0 | ||
|
|
243eeadfd6 | ||
|
|
9ba5b49d8a | ||
|
|
c870bd414e | ||
|
|
4b01e92509 | ||
|
|
513a090cdc | ||
|
|
abe79d1427 | ||
|
|
b81968dc20 | ||
|
|
66cc546a30 | ||
|
|
6e899ac55f | ||
|
|
6b52384024 | ||
|
|
0a1d8db357 | ||
|
|
6f68ddb505 | ||
|
|
4f59abf189 | ||
|
|
5c441f4ddb | ||
|
|
be023aba8d | ||
|
|
53990201bc | ||
|
|
37783d685f | ||
|
|
25fdcaca8b | ||
|
|
ce6e7c1359 | ||
|
|
91167665b1 | ||
|
|
f374345860 | ||
|
|
d3b50cb92e | ||
|
|
a58194bdb0 | ||
|
|
5f3484ac59 | ||
|
|
e14b74fdbf | ||
|
|
56f28859b7 | ||
|
|
f3cdb3b787 | ||
|
|
b35f6b0a94 | ||
|
|
5574cfef59 | ||
|
|
c3015583ce | ||
|
|
7cc2c930ed | ||
|
|
2662bf19df | ||
|
|
62e3fe2345 | ||
|
|
7d25d171e2 | ||
|
|
c5859c7254 | ||
|
|
dd7b4b8310 | ||
|
|
93551a65b8 | ||
|
|
7eafbabe65 | ||
|
|
43dd948476 | ||
|
|
75e7f2d22c | ||
|
|
26662eed9e | ||
|
|
121b1d0951 | ||
|
|
398036d77e | ||
|
|
59d5fcf88c | ||
|
|
1f8e48b374 | ||
|
|
7bf19b1232 | ||
|
|
1590b8c7e5 | ||
|
|
22522fc05f | ||
|
|
c9b59fab1f | ||
|
|
69152e087a | ||
|
|
652e572b56 | ||
|
|
65c89f54dc | ||
|
|
edf5c0e83b | ||
|
|
a4abbfe126 | ||
|
|
7b28bcef15 | ||
|
|
8042ac876e | ||
|
|
82bc907088 | ||
|
|
e2b572d9e2 | ||
|
|
e71596dc45 | ||
|
|
c3be58db43 | ||
|
|
3d77a12aa9 | ||
|
|
36deb8da71 | ||
|
|
05ee8c0e36 | ||
|
|
5ca55a5585 | ||
|
|
1b330a8c55 | ||
|
|
696678c981 | ||
|
|
20aec3cfca | ||
|
|
4ded37e71e | ||
|
|
0674ed846c | ||
|
|
3107152f5b | ||
|
|
5b19528662 | ||
|
|
357773c631 | ||
|
|
5e43d074c3 | ||
|
|
08bdcd52b8 | ||
|
|
0b8522be50 | ||
|
|
c02a7fe763 | ||
|
|
dcd5bdeb00 | ||
|
|
562b4f0415 | ||
|
|
6160a3fdff | ||
|
|
0779da6d24 | ||
|
|
740624ba01 | ||
|
|
2969cc9993 | ||
|
|
9a8c3a0447 | ||
|
|
e817382efd | ||
|
|
422d1feb3e | ||
|
|
2b0cb2b0a5 | ||
|
|
705d09d3d0 | ||
|
|
9114d86ecd | ||
|
|
ad4a950b56 | ||
|
|
bd480e0c6b | ||
|
|
af3415b040 | ||
|
|
b4a96bd840 | ||
|
|
9d8d421384 | ||
|
|
f6ffc00748 | ||
|
|
e35016f07d | ||
|
|
af8e2523a8 | ||
|
|
492df94b2a | ||
|
|
b62f7e2820 | ||
|
|
5cc2fc157c | ||
|
|
266c049f2d | ||
|
|
4e848c8cb5 | ||
|
|
efff1ac4a1 | ||
|
|
fc64a75fbd | ||
|
|
dd9f431b6f | ||
|
|
c894c54d00 | ||
|
|
4d349c7885 | ||
|
|
675b819e0a | ||
|
|
09f1083c50 | ||
|
|
47be3ef3ea | ||
|
|
9dd7778597 | ||
|
|
efffa925ed | ||
|
|
fa9bc92c95 | ||
|
|
227c66c2c5 | ||
|
|
e91bc9dfcc | ||
|
|
bc29035bda | ||
|
|
f10e453c9b | ||
|
|
d4b59b36a8 | ||
|
|
4f644cd0ca | ||
|
|
ed2935ec69 | ||
|
|
ea72c6b018 | ||
|
|
2df6ab36bf | ||
|
|
10ea63557a | ||
|
|
55e462d90b | ||
|
|
73393925c0 | ||
|
|
f9852bc807 | ||
|
|
6e1901e8e8 | ||
|
|
87582962d9 | ||
|
|
3de2698657 | ||
|
|
ec1f0221cd | ||
|
|
4d900c9ee1 | ||
|
|
3a758900eb | ||
|
|
1ea7178d17 | ||
|
|
c85c9988df | ||
|
|
34f68073a2 | ||
|
|
acc880c4df | ||
|
|
557b557503 | ||
|
|
3018356588 | ||
|
|
ede4dffcb7 | ||
|
|
cad392eb76 | ||
|
|
0296158100 | ||
|
|
b2a4692a02 | ||
|
|
2fbadea9e3 | ||
|
|
dc05be36a6 | ||
|
|
dac0d1d0dc | ||
|
|
459f7160ac | ||
|
|
aff524d843 | ||
|
|
3a91667e92 | ||
|
|
3e0d3584f7 | ||
|
|
1e47d6fb41 | ||
|
|
d5ba88b407 | ||
|
|
ce1e18b31b | ||
|
|
ec84aa7577 | ||
|
|
8b4d72cf32 | ||
|
|
85e6e65a47 | ||
|
|
7cad3655f5 | ||
|
|
b9e7f292c3 | ||
|
|
fc85034c60 | ||
|
|
f5e3d0cc02 | ||
|
|
fe377e8b94 | ||
|
|
5bb87138e9 | ||
|
|
579e58206e | ||
|
|
172b6ce892 | ||
|
|
0cc8eff1c3 | ||
|
|
3b023e5eaa | ||
|
|
615d8f1624 | ||
|
|
b4409ace21 | ||
|
|
dbee6aca1f | ||
|
|
acfe62eaf7 | ||
|
|
527a8af060 | ||
|
|
a5a931a670 | ||
|
|
2f05efaf12 | ||
|
|
83ef8a2e12 | ||
|
|
c564881867 | ||
|
|
b16930f35d | ||
|
|
2d910e3f07 | ||
|
|
daa1420c8e | ||
|
|
cea310e50b | ||
|
|
fcdd5c6752 | ||
|
|
2ace73e9a1 | ||
|
|
80cfb08794 | ||
|
|
1edc2b91cf | ||
|
|
1f58e18b6f | ||
|
|
f2bf316058 | ||
|
|
9cd38fa1ed | ||
|
|
edb0111775 | ||
|
|
de4f9e8d1a | ||
|
|
461e41cd61 | ||
|
|
716406198e | ||
|
|
68592aeddf | ||
|
|
b927ff6eef | ||
|
|
ce50e6e4fe | ||
|
|
167ed33bba | ||
|
|
0ee1abf31a | ||
|
|
6a0a1af67e | ||
|
|
f85481d51b | ||
|
|
00b6b0ac68 | ||
|
|
1546b1ae71 | ||
|
|
1e94498d9d | ||
|
|
0f7189b859 | ||
|
|
a6e0f1b75a | ||
|
|
543c22bb50 | ||
|
|
07e067cf0b | ||
|
|
6c256a34a9 | ||
|
|
6b2eb04a73 | ||
|
|
898d80ba38 | ||
|
|
ea8e4ad05b | ||
|
|
27aeac6859 | ||
|
|
8da371e324 | ||
|
|
0c59fe933d | ||
|
|
e169c67760 | ||
|
|
3a5a927dc6 | ||
|
|
2d419e4253 | ||
|
|
87869a29c9 | ||
|
|
544211f5ec | ||
|
|
f6ac95e2dd | ||
|
|
63bef2f844 | ||
|
|
4a8cd04de6 | ||
|
|
85806624db | ||
|
|
1ac2273984 | ||
|
|
a8c29c4ffe | ||
|
|
31af01c4f2 | ||
|
|
b1bba96d04 | ||
|
|
c5c730224e | ||
|
|
7441cf7d39 | ||
|
|
45c72d25df | ||
|
|
3fff631b32 | ||
|
|
bfa2891b23 | ||
|
|
5715f52fef | ||
|
|
1f2126f463 | ||
|
|
27ed0b37bf | ||
|
|
cdbd2f8507 | ||
|
|
e46ba2b4a4 | ||
|
|
1c338ba742 | ||
|
|
2b7673ad5d | ||
|
|
2f27353015 | ||
|
|
1b8c3f420a | ||
|
|
a3a070855c | ||
|
|
e84c6393b8 | ||
|
|
7413dd9f4b | ||
|
|
9cbd667eb7 | ||
|
|
37fb56c61c | ||
|
|
404a94cadb | ||
|
|
0807a8d016 | ||
|
|
4a9888157e | ||
|
|
83fbdcceac | ||
|
|
b070ef5fdb | ||
|
|
7d380dcd14 | ||
|
|
a15dbd992d | ||
|
|
52c5d235af | ||
|
|
495f6460a4 | ||
|
|
a96024d0e7 | ||
|
|
99b84d2909 | ||
|
|
24728b8b47 | ||
|
|
9750e49df8 | ||
|
|
bf31783d0c | ||
|
|
87eacf88c3 | ||
|
|
1dbfb99ead | ||
|
|
ff4020ea73 | ||
|
|
0ce7fc18a8 | ||
|
|
470a6e9d76 | ||
|
|
fc74fbeeaa | ||
|
|
9c6a5793b9 | ||
|
|
49b6b38741 | ||
|
|
a385ee9e97 | ||
|
|
f0917c62f2 | ||
|
|
94d20168da | ||
|
|
2d866e3ffa | ||
|
|
5d94d7067e | ||
|
|
7323f4c2ab | ||
|
|
eca6dfef6a | ||
|
|
761462ef93 | ||
|
|
98e83255e6 | ||
|
|
cbf3562a6f | ||
|
|
a2c41bbace | ||
|
|
2a12a3c702 | ||
|
|
2ab6a411f4 | ||
|
|
14ed10bdb0 | ||
|
|
49e6fd5bfb | ||
|
|
af872fa4d4 | ||
|
|
cec4cf014c | ||
|
|
783ad703d0 | ||
|
|
222671675c | ||
|
|
9a62d94630 | ||
|
|
c3edc6e24b | ||
|
|
119b0c55e9 | ||
|
|
c14c7edc5e | ||
|
|
e3b296c558 | ||
|
|
c2d29fb54b | ||
|
|
7aab8b0ae3 | ||
|
|
861a3bd4ae | ||
|
|
9bc7ad9cd5 | ||
|
|
a1e3fc1c23 | ||
|
|
242869db3a | ||
|
|
8924bb79e7 | ||
|
|
a0d103dac3 | ||
|
|
d52b299df8 | ||
|
|
092432f04f | ||
|
|
ea8e6634d6 | ||
|
|
3e6f90cf72 | ||
|
|
16731056ed | ||
|
|
0712894353 | ||
|
|
36fad803ed | ||
|
|
6732f01cb7 | ||
|
|
bb04e6fcfa | ||
|
|
007ee88d33 | ||
|
|
7a5bb94754 | ||
|
|
e06a0cd89b | ||
|
|
b6cba13293 | ||
|
|
d929bbfe30 | ||
|
|
bf67d64708 | ||
|
|
92aa1a6124 | ||
|
|
733ab8014b | ||
|
|
6aaa49f0bf | ||
|
|
638f27c2df | ||
|
|
84a3b55912 | ||
|
|
552d46479b | ||
|
|
fa9c066ffe | ||
|
|
e1e20b8757 | ||
|
|
2fb94a89e2 | ||
|
|
7a9604a3c9 | ||
|
|
e099088012 | ||
|
|
34e107e7d3 | ||
|
|
2254a4d0b4 | ||
|
|
9f7486f402 | ||
|
|
699602d1c5 | ||
|
|
2993ff1d75 | ||
|
|
afb3c24d5a | ||
|
|
8ef730b5fe | ||
|
|
866cfe5279 | ||
|
|
68c2eab6b9 | ||
|
|
aeda5bd260 | ||
|
|
a95cd71456 | ||
|
|
34d0dd9d6e | ||
|
|
401d9afd54 | ||
|
|
74edb936a5 | ||
|
|
c1558578d7 | ||
|
|
3597fdb7f8 | ||
|
|
43f2a379a1 | ||
|
|
69702e3a19 | ||
|
|
eb0655cf85 | ||
|
|
d8864bc92b | ||
|
|
89fc9d7c80 | ||
|
|
76aa9f7e10 | ||
|
|
abd0974897 | ||
|
|
c4e943a24f | ||
|
|
a3106bcb3d | ||
|
|
b045075a96 | ||
|
|
09d597f3ad | ||
|
|
9d4c3d83d0 | ||
|
|
95580a004f | ||
|
|
723f90755e | ||
|
|
324205f77a | ||
|
|
0a40d8ce8f | ||
|
|
168a25239e | ||
|
|
7eef46e941 | ||
|
|
50da4f8c07 | ||
|
|
2d0ebeae1b | ||
|
|
1a16491971 | ||
|
|
7f4f250970 | ||
|
|
25acb78071 | ||
|
|
e822d5a1b7 | ||
|
|
32fc0ff6d0 | ||
|
|
94dde075b3 | ||
|
|
65e92327ab | ||
|
|
0be02e67a5 | ||
|
|
7327c97e4c | ||
|
|
03b21dcf0a | ||
|
|
dc98c6739f | ||
|
|
fcb870728d | ||
|
|
7919428a1e | ||
|
|
3496a80f5a | ||
|
|
56b917a5c2 | ||
|
|
18c43aaea2 | ||
|
|
c43fc38f69 | ||
|
|
c07f0ab9c7 | ||
|
|
1c429b27bc | ||
|
|
b7019ad4f3 | ||
|
|
84e8f741ae | ||
|
|
e3a9b393c2 | ||
|
|
16aba517e4 | ||
|
|
205928e6df | ||
|
|
39ce4aa049 | ||
|
|
cef4a8296a | ||
|
|
b370ef0229 | ||
|
|
6b80f5bb35 | ||
|
|
bdae570a69 | ||
|
|
face5245a9 | ||
|
|
db1ed2a765 | ||
|
|
10982dec3c | ||
|
|
6825e75681 |
@@ -5,6 +5,11 @@ engines:
|
||||
enabled: false
|
||||
eslint:
|
||||
enabled: true
|
||||
checks:
|
||||
import/extensions:
|
||||
enabled: false
|
||||
import/no-extraneous-dependencies:
|
||||
enabled: false
|
||||
config:
|
||||
config: superset/assets/.eslintrc
|
||||
pep8:
|
||||
@@ -27,7 +32,6 @@ exclude_paths:
|
||||
- "**.gz"
|
||||
- "env/"
|
||||
- "tests/"
|
||||
- "superset/ascii_art.py"
|
||||
- "superset/assets/images/"
|
||||
- "superset/assets/vendor/"
|
||||
- "superset/assets/node_modules/"
|
||||
|
||||
@@ -1 +1 @@
|
||||
repo_token: eESbYiv4An6KEvjpmguDs4L7YkubXbqn1
|
||||
repo_token: 4P9MpvLrZfJKzHdGZsdV3MzO43OZJgYFn
|
||||
|
||||
8
.gitignore
vendored
@@ -24,10 +24,16 @@ app.db
|
||||
*.bak
|
||||
.idea
|
||||
*.sqllite
|
||||
.vscode
|
||||
.python-version
|
||||
|
||||
# Node.js, webpack artifacts
|
||||
*.entry.js
|
||||
*.js.map
|
||||
node_modules
|
||||
npm-debug.log
|
||||
npm-debug.log*
|
||||
yarn.lock
|
||||
superset/assets/version_info.json
|
||||
|
||||
# IntelliJ
|
||||
*.iml
|
||||
|
||||
@@ -17,7 +17,6 @@ pep8:
|
||||
ignore-paths:
|
||||
- docs
|
||||
- superset/migrations/env.py
|
||||
- superset/ascii_art.py
|
||||
ignore-patterns:
|
||||
- ^example/doc_.*\.py$
|
||||
- (^|/)docs(/|$)
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
[pycodestyle]
|
||||
max-line-length = 90
|
||||
407
.pylintrc
Normal file
@@ -0,0 +1,407 @@
|
||||
[MASTER]
|
||||
|
||||
# Specify a configuration file.
|
||||
#rcfile=
|
||||
|
||||
# Python code to execute, usually for sys.path manipulation such as
|
||||
# pygtk.require().
|
||||
#init-hook=
|
||||
|
||||
# Add files or directories to the blacklist. They should be base names, not
|
||||
# paths.
|
||||
ignore=CVS
|
||||
|
||||
# Add files or directories matching the regex patterns to the blacklist. The
|
||||
# regex matches against base names, not paths.
|
||||
ignore-patterns=
|
||||
|
||||
# Pickle collected data for later comparisons.
|
||||
persistent=yes
|
||||
|
||||
# List of plugins (as comma separated values of python modules names) to load,
|
||||
# usually to register additional checkers.
|
||||
load-plugins=
|
||||
|
||||
# Use multiple processes to speed up Pylint.
|
||||
jobs=1
|
||||
|
||||
# Allow loading of arbitrary C extensions. Extensions are imported into the
|
||||
# active Python interpreter and may run arbitrary code.
|
||||
unsafe-load-any-extension=no
|
||||
|
||||
# A comma-separated list of package or module names from where C extensions may
|
||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||
# run arbitrary code
|
||||
extension-pkg-whitelist=
|
||||
|
||||
# Allow optimization of some AST trees. This will activate a peephole AST
|
||||
# optimizer, which will apply various small optimizations. For instance, it can
|
||||
# be used to obtain the result of joining multiple strings with the addition
|
||||
# operator. Joining a lot of strings can lead to a maximum recursion error in
|
||||
# Pylint and this flag can prevent that. It has one side effect, the resulting
|
||||
# AST will be different than the one from reality. This option is deprecated
|
||||
# and it will be removed in Pylint 2.0.
|
||||
optimize-ast=no
|
||||
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
|
||||
# Only show warnings with the listed confidence levels. Leave empty to show
|
||||
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
|
||||
confidence=
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
# multiple time (only on the command line, not in the configuration file where
|
||||
# it should appear only once). See also the "--disable" option for examples.
|
||||
#enable=
|
||||
|
||||
# Disable the message, report, category or checker with the given id(s). You
|
||||
# can either give multiple identifiers separated by comma (,) or put this
|
||||
# option multiple times (only on the command line, not in the configuration
|
||||
# file where it should appear only once).You can also use "--disable=all" to
|
||||
# disable everything first and then reenable specific checks. For example, if
|
||||
# you want to run only the similarities checker, you can use "--disable=all
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use"--disable=all --enable=classes
|
||||
# --disable=W"
|
||||
disable=standarderror-builtin,long-builtin,dict-view-method,intern-builtin,suppressed-message,no-absolute-import,unpacking-in-except,apply-builtin,delslice-method,indexing-exception,old-raise-syntax,print-statement,cmp-builtin,reduce-builtin,useless-suppression,coerce-method,input-builtin,cmp-method,raw_input-builtin,nonzero-method,backtick,basestring-builtin,setslice-method,reload-builtin,oct-method,map-builtin-not-iterating,execfile-builtin,old-octal-literal,zip-builtin-not-iterating,buffer-builtin,getslice-method,metaclass-assignment,xrange-builtin,long-suffix,round-builtin,range-builtin-not-iterating,next-method-called,dict-iter-method,parameter-unpacking,unicode-builtin,unichr-builtin,import-star-module-level,raising-string,filter-builtin-not-iterating,old-ne-operator,using-cmp-argument,coerce-builtin,file-builtin,old-division,hex-method,invalid-unary-operand-type
|
||||
|
||||
|
||||
[REPORTS]
|
||||
|
||||
# Set the output format. Available formats are text, parseable, colorized, msvs
|
||||
# (visual studio) and html. You can also give a reporter class, eg
|
||||
# mypackage.mymodule.MyReporterClass.
|
||||
output-format=text
|
||||
|
||||
# Put messages in a separate file for each module / package specified on the
|
||||
# command line instead of printing them on stdout. Reports (if any) will be
|
||||
# written in a file name "pylint_global.[txt|html]". This option is deprecated
|
||||
# and it will be removed in Pylint 2.0.
|
||||
files-output=no
|
||||
|
||||
# Tells whether to display a full report or only the messages
|
||||
reports=yes
|
||||
|
||||
# Python expression which should return a note less than 10 (10 is the highest
|
||||
# note). You have access to the variables errors warning, statement which
|
||||
# respectively contain the number of errors / warnings messages and the total
|
||||
# number of statements analyzed. This is used by the global evaluation report
|
||||
# (RP0004).
|
||||
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
|
||||
|
||||
# Template used to display messages. This is a python new-style format string
|
||||
# used to format the message information. See doc for all details
|
||||
#msg-template=
|
||||
|
||||
|
||||
[BASIC]
|
||||
|
||||
# Good variable names which should always be accepted, separated by a comma
|
||||
good-names=i,j,k,ex,Run,_,d,e,v,o,l,x,ts
|
||||
|
||||
# Bad variable names which should always be refused, separated by a comma
|
||||
bad-names=foo,bar,baz,toto,tutu,tata,d,fd
|
||||
|
||||
# Colon-delimited sets of names that determine each other's naming style when
|
||||
# the name regexes allow several styles.
|
||||
name-group=
|
||||
|
||||
# Include a hint for the correct naming format with invalid-name
|
||||
include-naming-hint=no
|
||||
|
||||
# List of decorators that produce properties, such as abc.abstractproperty. Add
|
||||
# to this list to register other decorators that produce valid properties.
|
||||
property-classes=abc.abstractproperty
|
||||
|
||||
# Regular expression matching correct argument names
|
||||
argument-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Naming hint for argument names
|
||||
argument-name-hint=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression matching correct method names
|
||||
method-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Naming hint for method names
|
||||
method-name-hint=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression matching correct variable names
|
||||
variable-rgx=[a-z_][a-z0-9_]{1,30}$
|
||||
|
||||
# Naming hint for variable names
|
||||
variable-name-hint=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression matching correct inline iteration names
|
||||
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
|
||||
|
||||
# Naming hint for inline iteration names
|
||||
inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$
|
||||
|
||||
# Regular expression matching correct constant names
|
||||
const-rgx=(([A-Za-z_][A-Za-z0-9_]*)|(__.*__))$
|
||||
|
||||
# Naming hint for constant names
|
||||
const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$
|
||||
|
||||
# Regular expression matching correct class names
|
||||
class-rgx=[A-Z_][a-zA-Z0-9]+$
|
||||
|
||||
# Naming hint for class names
|
||||
class-name-hint=[A-Z_][a-zA-Z0-9]+$
|
||||
|
||||
# Regular expression matching correct class attribute names
|
||||
class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
|
||||
|
||||
# Naming hint for class attribute names
|
||||
class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
|
||||
|
||||
# Regular expression matching correct module names
|
||||
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
|
||||
|
||||
# Naming hint for module names
|
||||
module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
|
||||
|
||||
# Regular expression matching correct attribute names
|
||||
attr-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Naming hint for attribute names
|
||||
attr-name-hint=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression matching correct function names
|
||||
function-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Naming hint for function names
|
||||
function-name-hint=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression which should only match function or class names that do
|
||||
# not require a docstring.
|
||||
no-docstring-rgx=^_
|
||||
|
||||
# Minimum line length for functions/classes that require docstrings, shorter
|
||||
# ones are exempt.
|
||||
docstring-min-length=10
|
||||
|
||||
|
||||
[ELIF]
|
||||
|
||||
# Maximum number of nested blocks for function / method body
|
||||
max-nested-blocks=5
|
||||
|
||||
|
||||
[FORMAT]
|
||||
|
||||
# Maximum number of characters on a single line.
|
||||
max-line-length=90
|
||||
|
||||
# Regexp for a line that is allowed to be longer than the limit.
|
||||
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
|
||||
|
||||
# Allow the body of an if to be on the same line as the test if there is no
|
||||
# else.
|
||||
single-line-if-stmt=no
|
||||
|
||||
# List of optional constructs for which whitespace checking is disabled. `dict-
|
||||
# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
|
||||
# `trailing-comma` allows a space between comma and closing bracket: (a, ).
|
||||
# `empty-line` allows space-only lines.
|
||||
no-space-check=trailing-comma,dict-separator
|
||||
|
||||
# Maximum number of lines in a module
|
||||
max-module-lines=1000
|
||||
|
||||
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
||||
# tab).
|
||||
indent-string=' '
|
||||
|
||||
# Number of spaces of indent required inside a hanging or continued line.
|
||||
indent-after-paren=4
|
||||
|
||||
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
|
||||
expected-line-ending-format=
|
||||
|
||||
|
||||
[LOGGING]
|
||||
|
||||
# Logging modules to check that the string format arguments are in logging
|
||||
# function parameter format
|
||||
logging-modules=logging
|
||||
|
||||
|
||||
[MISCELLANEOUS]
|
||||
|
||||
# List of note tags to take in consideration, separated by a comma.
|
||||
notes=FIXME,XXX,TODO
|
||||
|
||||
|
||||
[SIMILARITIES]
|
||||
|
||||
# Minimum lines number of a similarity.
|
||||
min-similarity-lines=4
|
||||
|
||||
# Ignore comments when computing similarities.
|
||||
ignore-comments=yes
|
||||
|
||||
# Ignore docstrings when computing similarities.
|
||||
ignore-docstrings=yes
|
||||
|
||||
# Ignore imports when computing similarities.
|
||||
ignore-imports=no
|
||||
|
||||
|
||||
[SPELLING]
|
||||
|
||||
# Spelling dictionary name. Available dictionaries: none. To make it working
|
||||
# install python-enchant package.
|
||||
spelling-dict=
|
||||
|
||||
# List of comma separated words that should not be checked.
|
||||
spelling-ignore-words=
|
||||
|
||||
# A path to a file that contains private dictionary; one word per line.
|
||||
spelling-private-dict-file=
|
||||
|
||||
# Tells whether to store unknown words to indicated private dictionary in
|
||||
# --spelling-private-dict-file option instead of raising a message.
|
||||
spelling-store-unknown-words=no
|
||||
|
||||
|
||||
[TYPECHECK]
|
||||
|
||||
# Tells whether missing members accessed in mixin class should be ignored. A
|
||||
# mixin class is detected if its name ends with "mixin" (case insensitive).
|
||||
ignore-mixin-members=yes
|
||||
|
||||
# List of module names for which member attributes should not be checked
|
||||
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||
# and thus existing member attributes cannot be deduced by static analysis. It
|
||||
# supports qualified module names, as well as Unix pattern matching.
|
||||
ignored-modules=numpy,pandas,alembic.op,sqlalchemy,alembic.context,flask_appbuilder.security.sqla.PermissionView.role,flask_appbuilder.Model.metadata,flask_appbuilder.Base.metadata
|
||||
|
||||
# List of class names for which member attributes should not be checked (useful
|
||||
# for classes with dynamically set attributes). This supports the use of
|
||||
# qualified names.
|
||||
ignored-classes=optparse.Values,thread._local,_thread._local
|
||||
|
||||
# List of members which are set dynamically and missed by pylint inference
|
||||
# system, and so shouldn't trigger E1101 when accessed. Python regular
|
||||
# expressions are accepted.
|
||||
generated-members=
|
||||
|
||||
# List of decorators that produce context managers, such as
|
||||
# contextlib.contextmanager. Add to this list to register other decorators that
|
||||
# produce valid context managers.
|
||||
contextmanager-decorators=contextlib.contextmanager
|
||||
|
||||
|
||||
[VARIABLES]
|
||||
|
||||
# Tells whether we should check for unused import in __init__ files.
|
||||
init-import=no
|
||||
|
||||
# A regular expression matching the name of dummy variables (i.e. expectedly
|
||||
# not used).
|
||||
dummy-variables-rgx=(_+[a-zA-Z0-9]*?$)|dummy
|
||||
|
||||
# List of additional names supposed to be defined in builtins. Remember that
|
||||
# you should avoid to define new builtins when possible.
|
||||
additional-builtins=
|
||||
|
||||
# List of strings which can identify a callback function by name. A callback
|
||||
# name must start or end with one of those strings.
|
||||
callbacks=cb_,_cb
|
||||
|
||||
# List of qualified module names which can have objects that can redefine
|
||||
# builtins.
|
||||
redefining-builtins-modules=six.moves,future.builtins
|
||||
|
||||
|
||||
[CLASSES]
|
||||
|
||||
# List of method names used to declare (i.e. assign) instance attributes.
|
||||
defining-attr-methods=__init__,__new__,setUp
|
||||
|
||||
# List of valid names for the first argument in a class method.
|
||||
valid-classmethod-first-arg=cls
|
||||
|
||||
# List of valid names for the first argument in a metaclass class method.
|
||||
valid-metaclass-classmethod-first-arg=mcs
|
||||
|
||||
# List of member names, which should be excluded from the protected access
|
||||
# warning.
|
||||
exclude-protected=_asdict,_fields,_replace,_source,_make
|
||||
|
||||
|
||||
[DESIGN]
|
||||
|
||||
# Maximum number of arguments for function / method
|
||||
max-args=5
|
||||
|
||||
# Argument names that match this expression will be ignored. Default to name
|
||||
# with leading underscore
|
||||
ignored-argument-names=_.*
|
||||
|
||||
# Maximum number of locals for function / method body
|
||||
max-locals=15
|
||||
|
||||
# Maximum number of return / yield for function / method body
|
||||
max-returns=6
|
||||
|
||||
# Maximum number of branch for function / method body
|
||||
max-branches=12
|
||||
|
||||
# Maximum number of statements in function / method body
|
||||
max-statements=50
|
||||
|
||||
# Maximum number of parents for a class (see R0901).
|
||||
max-parents=7
|
||||
|
||||
# Maximum number of attributes for a class (see R0902).
|
||||
max-attributes=7
|
||||
|
||||
# Minimum number of public methods for a class (see R0903).
|
||||
min-public-methods=2
|
||||
|
||||
# Maximum number of public methods for a class (see R0904).
|
||||
max-public-methods=20
|
||||
|
||||
# Maximum number of boolean expressions in a if statement
|
||||
max-bool-expr=5
|
||||
|
||||
|
||||
[IMPORTS]
|
||||
|
||||
# Deprecated modules which should not be used, separated by a comma
|
||||
deprecated-modules=optparse
|
||||
|
||||
# Create a graph of every (i.e. internal and external) dependencies in the
|
||||
# given file (report RP0402 must not be disabled)
|
||||
import-graph=
|
||||
|
||||
# Create a graph of external dependencies in the given file (report RP0402 must
|
||||
# not be disabled)
|
||||
ext-import-graph=
|
||||
|
||||
# Create a graph of internal dependencies in the given file (report RP0402 must
|
||||
# not be disabled)
|
||||
int-import-graph=
|
||||
|
||||
# Force import order to recognize a module as part of the standard
|
||||
# compatibility libraries.
|
||||
known-standard-library=
|
||||
|
||||
# Force import order to recognize a module as part of a third party library.
|
||||
known-third-party=enchant
|
||||
|
||||
# Analyse import fallback blocks. This can be used to support both Python 2 and
|
||||
# 3 compatible code, which means that the block might have code that exists
|
||||
# only in one or another interpreter, leading to false positives when analysed.
|
||||
analyse-fallback-blocks=no
|
||||
|
||||
|
||||
[EXCEPTIONS]
|
||||
|
||||
# Exceptions that will emit a warning when being caught. Defaults to
|
||||
# "Exception"
|
||||
overgeneral-exceptions=Exception
|
||||
21
.travis.yml
@@ -1,38 +1,31 @@
|
||||
|
||||
language: python
|
||||
services:
|
||||
- redis-server
|
||||
addons:
|
||||
code_climate:
|
||||
repo_token: 5f3a06c425eef7be4b43627d7d07a3e46c45bdc07155217825ff7c49cb6a470c
|
||||
apt:
|
||||
sources:
|
||||
- deadsnakes
|
||||
packages:
|
||||
- python3.5
|
||||
cache:
|
||||
directories:
|
||||
- $HOME/.wheelhouse/
|
||||
env:
|
||||
global:
|
||||
- TRAVIS_CACHE=$HOME/.travis_cache/
|
||||
- TRAVIS_NODE_VERSION="5.11"
|
||||
matrix:
|
||||
- TOX_ENV=flake8
|
||||
- TOX_ENV=javascript
|
||||
- TOX_ENV=pylint
|
||||
- TOX_ENV=py34-postgres
|
||||
- TOX_ENV=py34-sqlite
|
||||
- TOX_ENV=py27-mysql
|
||||
- TOX_ENV=py27-sqlite
|
||||
before_install:
|
||||
- npm install -g npm@'>=3.9.5'
|
||||
before_script:
|
||||
- mysql -e 'drop database if exists superset; create database superset DEFAULT CHARACTER SET utf8 COLLATE utf8_unicode_ci' -u root
|
||||
- mysql -u root -e "DROP DATABASE IF EXISTS superset; CREATE DATABASE superset DEFAULT CHARACTER SET utf8 COLLATE utf8_unicode_ci"
|
||||
- mysql -u root -e "CREATE USER 'mysqluser'@'localhost' IDENTIFIED BY 'mysqluserpassword';"
|
||||
- mysql -u root -e "GRANT ALL ON superset.* TO 'mysqluser'@'localhost';"
|
||||
- psql -c 'create database superset;' -U postgres
|
||||
- psql -c "CREATE USER postgresuser WITH PASSWORD 'pguserpassword';" -U postgres
|
||||
- psql -U postgres -c "CREATE DATABASE superset;"
|
||||
- psql -U postgres -c "CREATE USER postgresuser WITH PASSWORD 'pguserpassword';"
|
||||
- export PATH=${PATH}:/tmp/hive/bin
|
||||
install:
|
||||
- pip install --upgrade pip
|
||||
- pip install tox tox-travis
|
||||
- rm -rf ~/.nvm && git clone https://github.com/creationix/nvm.git ~/.nvm && (cd ~/.nvm && git checkout `git describe --abbrev=0 --tags`) && source ~/.nvm/nvm.sh && nvm install $TRAVIS_NODE_VERSION
|
||||
- npm install
|
||||
script: tox -e $TOX_ENV
|
||||
|
||||
1208
CHANGELOG.md
84
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1,84 @@
|
||||
# Code of Conduct
|
||||
|
||||
## 1. Purpose
|
||||
|
||||
A primary goal of Apache Superset is to be inclusive to the largest number of contributors, with the most varied and diverse backgrounds possible. As such, we are committed to providing a friendly, safe and welcoming environment for all, regardless of gender, sexual orientation, ability, ethnicity, socioeconomic status, and religion (or lack thereof).
|
||||
|
||||
This code of conduct outlines our expectations for all those who participate in our community, as well as the consequences for unacceptable behavior.
|
||||
|
||||
We invite all those who participate in Apache Superset to help us create safe and positive experiences for everyone.
|
||||
|
||||
## 2. Open Source Citizenship
|
||||
|
||||
A supplemental goal of this Code of Conduct is to increase open source citizenship by encouraging participants to recognize and strengthen the relationships between our actions and their effects on our community.
|
||||
|
||||
Communities mirror the societies in which they exist and positive action is essential to counteract the many forms of inequality and abuses of power that exist in society.
|
||||
|
||||
If you see someone who is making an extra effort to ensure our community is welcoming, friendly, and encourages all participants to contribute to the fullest extent, we want to know.
|
||||
|
||||
## 3. Expected Behavior
|
||||
|
||||
The following behaviors are expected and requested of all community members:
|
||||
|
||||
* Participate in an authentic and active way. In doing so, you contribute to the health and longevity of this community.
|
||||
* Exercise consideration and respect in your speech and actions.
|
||||
* Attempt collaboration before conflict.
|
||||
* Refrain from demeaning, discriminatory, or harassing behavior and speech.
|
||||
* Be mindful of your surroundings and of your fellow participants. Alert community leaders if you notice a dangerous situation, someone in distress, or violations of this Code of Conduct, even if they seem inconsequential.
|
||||
* Remember that community event venues may be shared with members of the public; please be respectful to all patrons of these locations.
|
||||
|
||||
## 4. Unacceptable Behavior
|
||||
|
||||
The following behaviors are considered harassment and are unacceptable within our community:
|
||||
|
||||
* Violence, threats of violence or violent language directed against another person.
|
||||
* Sexist, racist, homophobic, transphobic, ableist or otherwise discriminatory jokes and language.
|
||||
* Posting or displaying sexually explicit or violent material.
|
||||
* Posting or threatening to post other people’s personally identifying information ("doxing").
|
||||
* Personal insults, particularly those related to gender, sexual orientation, race, religion, or disability.
|
||||
* Inappropriate photography or recording.
|
||||
* Inappropriate physical contact. You should have someone’s consent before touching them.
|
||||
* Unwelcome sexual attention. This includes, sexualized comments or jokes; inappropriate touching, groping, and unwelcomed sexual advances.
|
||||
* Deliberate intimidation, stalking or following (online or in person).
|
||||
* Advocating for, or encouraging, any of the above behavior.
|
||||
* Sustained disruption of community events, including talks and presentations.
|
||||
|
||||
## 5. Consequences of Unacceptable Behavior
|
||||
|
||||
Unacceptable behavior from any community member, including sponsors and those with decision-making authority, will not be tolerated.
|
||||
|
||||
Anyone asked to stop unacceptable behavior is expected to comply immediately.
|
||||
|
||||
If a community member engages in unacceptable behavior, the community organizers may take any action they deem appropriate, up to and including a temporary ban or permanent expulsion from the community without warning (and without refund in the case of a paid event).
|
||||
|
||||
## 6. Reporting Guidelines
|
||||
|
||||
If you are subject to or witness unacceptable behavior, or have any other concerns, please notify a community organizer as soon as possible. dev@superset.incubator.apache.org .
|
||||
|
||||
|
||||
|
||||
Additionally, community organizers are available to help community members engage with local law enforcement or to otherwise help those experiencing unacceptable behavior feel safe. In the context of in-person events, organizers will also provide escorts as desired by the person experiencing distress.
|
||||
|
||||
## 7. Addressing Grievances
|
||||
|
||||
If you feel you have been falsely or unfairly accused of violating this Code of Conduct, you should notify Apache with a concise description of your grievance. Your grievance will be handled in accordance with our existing governing policies.
|
||||
|
||||
|
||||
|
||||
## 8. Scope
|
||||
|
||||
We expect all community participants (contributors, paid or otherwise; sponsors; and other guests) to abide by this Code of Conduct in all community venues–online and in-person–as well as in all one-on-one communications pertaining to community business.
|
||||
|
||||
This code of conduct and its related procedures also applies to unacceptable behavior occurring outside the scope of community activities when such behavior has the potential to adversely affect the safety and well-being of community members.
|
||||
|
||||
## 9. Contact info
|
||||
|
||||
dev@superset.incubator.apache.org
|
||||
|
||||
## 10. License and attribution
|
||||
|
||||
This Code of Conduct is distributed under a [Creative Commons Attribution-ShareAlike license](http://creativecommons.org/licenses/by-sa/3.0/).
|
||||
|
||||
Portions of text derived from the [Django Code of Conduct](https://www.djangoproject.com/conduct/) and the [Geek Feminism Anti-Harassment Policy](http://geekfeminism.wikia.com/wiki/Conference_anti-harassment/Policy).
|
||||
|
||||
Retrieved on November 22, 2016 from [http://citizencodeofconduct.org/](http://citizencodeofconduct.org/)
|
||||
233
CONTRIBUTING.md
@@ -18,6 +18,9 @@ If you are reporting a bug, please include:
|
||||
troubleshooting.
|
||||
- Detailed steps to reproduce the bug.
|
||||
|
||||
When you post python stack traces please quote them using
|
||||
[markdown blocks](https://help.github.com/articles/creating-and-highlighting-code-blocks/).
|
||||
|
||||
### Fix Bugs
|
||||
|
||||
Look through the GitHub issues for bugs. Anything tagged with "bug" is
|
||||
@@ -26,7 +29,7 @@ open to whoever wants to implement it.
|
||||
### Implement Features
|
||||
|
||||
Look through the GitHub issues for features. Anything tagged with
|
||||
"feature" is open to whoever wants to implement it.
|
||||
"feature" or "starter_task" is open to whoever wants to implement it.
|
||||
|
||||
### Documentation
|
||||
|
||||
@@ -46,26 +49,135 @@ If you are proposing a feature:
|
||||
implement.
|
||||
- Remember that this is a volunteer-driven project, and that
|
||||
contributions are welcome :)
|
||||
|
||||
### Questions
|
||||
|
||||
## Latest Documentation
|
||||
There is a dedicated [tag](https://stackoverflow.com/questions/tagged/apache-superset) on [stackoverflow](https://stackoverflow.com/). Please use it when asking questions.
|
||||
|
||||
Latest documentation and tutorial are available [here](http://airbnb.io/superset)
|
||||
## Pull Request Guidelines
|
||||
|
||||
Before you submit a pull request from your forked repo, check that it
|
||||
meets these guidelines:
|
||||
|
||||
1. The pull request should include tests, either as doctests,
|
||||
unit tests, or both.
|
||||
2. If the pull request adds functionality, the docs should be updated
|
||||
as part of the same PR. Doc string are often sufficient, make
|
||||
sure to follow the sphinx compatible standards.
|
||||
3. The pull request should work for Python 2.7, and ideally python 3.4+.
|
||||
``from __future__ import`` will be required in every `.py` file soon.
|
||||
4. Code will be reviewed by re running the unittests, flake8 and syntax
|
||||
should be as rigorous as the core Python project.
|
||||
5. Please rebase and resolve all conflicts before submitting.
|
||||
6. If you are asked to update your pull request with some changes there's
|
||||
no need to create a new one. Push your changes to the same branch.
|
||||
|
||||
## Documentation
|
||||
|
||||
The latest documentation and tutorial are available [here](https://superset.incubator.apache.org/).
|
||||
|
||||
Contributing to the official documentation is relatively easy, once you've setup
|
||||
your environment and done an edit end-to-end. The docs can be found in the
|
||||
`docs/` subdirectory of the repository, and are written in the
|
||||
[reStructuredText format](https://en.wikipedia.org/wiki/ReStructuredText) (.rst).
|
||||
If you've written Markdown before, you'll find the reStructuredText format familiar.
|
||||
|
||||
Superset uses [Sphinx](http://www.sphinx-doc.org/en/1.5.1/) to convert the rst files
|
||||
in `docs/` to the final HTML output users see.
|
||||
|
||||
Before you start changing the docs, you'll want to
|
||||
[fork the Superset project on Github](https://help.github.com/articles/fork-a-repo/).
|
||||
Once that new repository has been created, clone it on your local machine:
|
||||
|
||||
git clone git@github.com:your_username/incubator-superset.git
|
||||
|
||||
At this point, you may also want to create a
|
||||
[Python virtual environment](http://docs.python-guide.org/en/latest/dev/virtualenvs/)
|
||||
to manage the Python packages you're about to install:
|
||||
|
||||
virtualenv superset-dev
|
||||
source superset-dev/bin/activate
|
||||
|
||||
Finally, to make changes to the rst files and build the docs using Sphinx,
|
||||
you'll need to install a handful of dependencies from the repo you cloned:
|
||||
|
||||
cd incubator-superset
|
||||
pip install -r dev-reqs-for-docs.txt
|
||||
|
||||
To get the feel for how to edit and build the docs, let's edit a file, build
|
||||
the docs and see our changes in action. First, you'll want to
|
||||
[create a new branch](https://git-scm.com/book/en/v2/Git-Branching-Basic-Branching-and-Merging)
|
||||
to work on your changes:
|
||||
|
||||
git checkout -b changes-to-docs
|
||||
|
||||
Now, go ahead and edit one of the files under `docs/`, say `docs/tutorial.rst`
|
||||
- change it however you want. Check out the
|
||||
[ReStructuredText Primer](http://docutils.sourceforge.net/docs/user/rst/quickstart.html)
|
||||
for a reference on the formatting of the rst files.
|
||||
|
||||
Once you've made your changes, run this command from the root of the Superset
|
||||
repo to convert the docs into HTML:
|
||||
|
||||
python setup.py build_sphinx
|
||||
|
||||
You'll see a lot of output as Sphinx handles the conversion. After it's done, the
|
||||
HTML Sphinx generated should be in `docs/_build/html`. Go ahead and navigate there
|
||||
and start a simple web server so we can check out the docs in a browser:
|
||||
|
||||
cd docs/_build/html
|
||||
python -m SimpleHTTPServer
|
||||
|
||||
This will start a small Python web server listening on port 8000. Point your
|
||||
browser to [http://localhost:8000/](http://localhost:8000/), find the file
|
||||
you edited earlier, and check out your changes!
|
||||
|
||||
If you've made a change you'd like to contribute to the actual docs, just commit
|
||||
your code, push your new branch to Github:
|
||||
|
||||
git add docs/tutorial.rst
|
||||
git commit -m 'Awesome new change to tutorial'
|
||||
git push origin changes-to-docs
|
||||
|
||||
Then, [open a pull request](https://help.github.com/articles/about-pull-requests/).
|
||||
|
||||
If you're adding new images to the documentation, you'll notice that the images
|
||||
referenced in the rst, e.g.
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_01_sources_database.png
|
||||
|
||||
aren't actually included in that directory. _Instead_, you'll want to add and commit
|
||||
images (and any other static assets) to the _superset/assets/images_ directory.
|
||||
When the docs are being pushed to [Apache Superset (incubating)](https://superset.incubator.apache.org/), images
|
||||
will be moved from there to the _\_static/img_ directory, just like they're referenced
|
||||
in the docs.
|
||||
|
||||
For example, the image referenced above actually lives in
|
||||
|
||||
superset/assets/images/tutorial
|
||||
|
||||
Since the image is moved during the documentation build process, the docs reference the
|
||||
image in
|
||||
|
||||
_static/img/tutorial
|
||||
|
||||
instead.
|
||||
|
||||
## Setting up a Python development environment
|
||||
|
||||
Check the [OS dependencies](http://airbnb.io/superset/installation.html#os-dependencies) before follows these steps.
|
||||
Check the [OS dependencies](https://superset.incubator.apache.org/installation.html#os-dependencies) before follows these steps.
|
||||
|
||||
# fork the repo on GitHub and then clone it
|
||||
# alternatively you may want to clone the main repo but that won't work
|
||||
# so well if you are planning on sending PRs
|
||||
# git clone git@github.com:airbnb/superset.git
|
||||
# git clone git@github.com:apache/incubator-superset.git
|
||||
|
||||
# [optional] setup a virtual env and activate it
|
||||
virtualenv env
|
||||
source env/bin/activate
|
||||
|
||||
# install for development
|
||||
python setup.py develop
|
||||
pip install -e .
|
||||
|
||||
# Create an admin user
|
||||
fabmanager create-admin --app superset
|
||||
@@ -91,7 +203,7 @@ While these may be phased out over time, these packages are currently not
|
||||
managed with npm.
|
||||
|
||||
### Node/npm versions
|
||||
Make sure you are using recent versions of node and npm. No problems have been found with node>=5.10 and npm>=3.9.
|
||||
Make sure you are using recent versions of node and npm. No problems have been found with node>=5.10 and 4.0. > npm>=3.9.
|
||||
|
||||
### Using npm to generate bundled files
|
||||
|
||||
@@ -115,8 +227,13 @@ To install third party libraries defined in `package.json`, run the
|
||||
following within the `superset/assets/` directory which will install them in a
|
||||
new `node_modules/` folder within `assets/`.
|
||||
|
||||
```
|
||||
npm install
|
||||
```bash
|
||||
# from the root of the repository, move to where our JS package.json lives
|
||||
cd superset/assets/
|
||||
# install yarn, a replacement for `npm install` that is faster and more deterministic
|
||||
npm install -g yarn
|
||||
# run yarn to fetch all the dependencies
|
||||
yarn
|
||||
```
|
||||
|
||||
To parse and generate bundled files for superset, run either of the
|
||||
@@ -124,6 +241,9 @@ following commands. The `dev` flag will keep the npm script running and
|
||||
re-run it upon any changes within the assets directory.
|
||||
|
||||
```
|
||||
# Copies a conf file from the frontend to the backend
|
||||
npm run sync-backend
|
||||
|
||||
# Compiles the production / optimized js & css
|
||||
npm run prod
|
||||
|
||||
@@ -141,9 +261,20 @@ npm run dev
|
||||
|
||||
## Testing
|
||||
|
||||
Python tests can be run with:
|
||||
Before running python unit tests, please setup local testing environment:
|
||||
```
|
||||
pip install -r dev-reqs.txt
|
||||
```
|
||||
|
||||
All python tests can be run with:
|
||||
|
||||
./run_tests.sh
|
||||
|
||||
Alternatively, you can run a specific test with:
|
||||
|
||||
./run_specific_test.sh tests.core_tests:CoreTests.test_function_name
|
||||
|
||||
Note that before running specific tests, you have to both setup the local testing environment and run all tests.
|
||||
|
||||
We use [Mocha](https://mochajs.org/), [Chai](http://chaijs.com/) and [Enzyme](http://airbnb.io/enzyme/) to test Javascript. Tests can be run with:
|
||||
|
||||
@@ -155,9 +286,8 @@ We use [Mocha](https://mochajs.org/), [Chai](http://chaijs.com/) and [Enzyme](ht
|
||||
|
||||
Lint the project with:
|
||||
|
||||
# for python changes
|
||||
flake8 changes tests
|
||||
flake8 changes superset
|
||||
# for python
|
||||
flake8
|
||||
|
||||
# for javascript
|
||||
npm run lint
|
||||
@@ -202,23 +332,6 @@ The `variables.less` and `bootswatch.less` files that ship with Superset are der
|
||||
[Bootswatch](https://bootswatch.com) and thus extend Bootstrap. Modify variables in these files directly, or
|
||||
swap them out entirely with the equivalent files from other Bootswatch (themes)[https://github.com/thomaspark/bootswatch.git]
|
||||
|
||||
## Pull Request Guidelines
|
||||
|
||||
Before you submit a pull request from your forked repo, check that it
|
||||
meets these guidelines:
|
||||
|
||||
1. The pull request should include tests, either as doctests,
|
||||
unit tests, or both.
|
||||
2. If the pull request adds functionality, the docs should be updated
|
||||
as part of the same PR. Doc string are often sufficient, make
|
||||
sure to follow the sphinx compatible standards.
|
||||
3. The pull request should work for Python 2.6, 2.7, and ideally python 3.3.
|
||||
``from __future__ import`` will be required in every `.py` file soon.
|
||||
4. Code will be reviewed by re running the unittests, flake8 and syntax
|
||||
should be as rigorous as the core Python project.
|
||||
5. Please rebase and resolve all conflicts before submitting.
|
||||
|
||||
|
||||
## Translations
|
||||
|
||||
We use [Babel](http://babel.pocoo.org/en/latest/) to translate Superset. The
|
||||
@@ -227,6 +340,8 @@ key is to instrument the strings that need translation using
|
||||
a module, all you have to do is to `_("Wrap your strings")` using the
|
||||
underscore `_` "function".
|
||||
|
||||
We use `import {t, tn, TCT} from locales;` in js, JSX file, locales is in `./superset/assets/javascripts/` directory.
|
||||
|
||||
To enable changing language in your environment, you can simply add the
|
||||
`LANGUAGES` parameter to your `superset_config.py`. Having more than one
|
||||
options here will add a language selection dropdown on the right side of the
|
||||
@@ -239,14 +354,15 @@ navigation bar.
|
||||
}
|
||||
|
||||
As per the [Flask AppBuilder documentation] about translation, to create a
|
||||
new language dictionary, run the following command:
|
||||
new language dictionary, run the following command (where `es` is replaced with
|
||||
the language code for your target language):
|
||||
|
||||
pybabel init -i ./babel/messages.pot -d superset/translations -l es
|
||||
pybabel init -i superset/translations/messages.pot -d superset/translations -l es
|
||||
|
||||
Then it's a matter of running the statement below to gather all stings that
|
||||
Then it's a matter of running the statement below to gather all strings that
|
||||
need translation
|
||||
|
||||
fabmanager babel-extract --target superset/translations/
|
||||
fabmanager babel-extract --target superset/translations/ --output superset/translations/messages.pot --config superset/translations/babel.cfg -k _ -k __ -k t -k tn -k tct
|
||||
|
||||
You can then translate the strings gathered in files located under
|
||||
`superset/translation`, where there's one per language. For the translations
|
||||
@@ -254,6 +370,19 @@ to take effect, they need to be compiled using this command:
|
||||
|
||||
fabmanager babel-compile --target superset/translations/
|
||||
|
||||
In the case of JS translation, we need to convert the PO file into a JSON file, and we need the global download of the npm package po2json.
|
||||
We need to be compiled using this command:
|
||||
|
||||
npm install po2json -g
|
||||
|
||||
Execute this command to convert the en PO file into a json file:
|
||||
|
||||
po2json -d superset -f jed1.x superset/translations/en/LC_MESSAGES/messages.po superset/translations/en/LC_MESSAGES/messages.json
|
||||
|
||||
If you get errors running `po2json`, you might be running the ubuntu package with the same
|
||||
name rather than the nodejs package (they have a different format for the arguments). You
|
||||
need to be running the nodejs version, and so if there is a conflict you may need to point
|
||||
directly at `/usr/local/bin/po2json` rather than just `po2json`.
|
||||
|
||||
## Adding new datasources
|
||||
|
||||
@@ -270,3 +399,39 @@ to take effect, they need to be compiled using this command:
|
||||
`ADDITIONAL_MODULE_DS_MAP = {'superset.my_models': ['MyDatasource', 'MyOtherDatasource']}`
|
||||
|
||||
This means it'll register MyDatasource and MyOtherDatasource in superset.my_models module in the source registry.
|
||||
|
||||
## Creating a new visualization type
|
||||
|
||||
Here's an example as a Github PR with comments that describe what the
|
||||
different sections of the code do:
|
||||
https://github.com/apache/incubator-superset/pull/3013
|
||||
|
||||
## Refresh documentation website
|
||||
|
||||
Every once in a while we want to compile the documentation and publish it.
|
||||
Here's how to do it.
|
||||
|
||||
.. code::
|
||||
|
||||
# install doc dependencies
|
||||
pip install -r dev-reqs-for-docs.txt
|
||||
|
||||
# build the docs
|
||||
python setup.py build_sphinx
|
||||
|
||||
# copy html files to temp folder
|
||||
cp -r docs/_build/html/ /tmp/tmp_superset_docs/
|
||||
|
||||
# clone the docs repo
|
||||
cd ~/
|
||||
git clone https://git-wip-us.apache.org/repos/asf/incubator-superset-site.git
|
||||
|
||||
# copy
|
||||
cp -r /tmp/tmp_superset_docs/ ~/incubator-superset-site.git/
|
||||
|
||||
# commit and push to `asf-site` branch
|
||||
cd ~/incubator-superset-site.git/
|
||||
git checkout asf-site
|
||||
git add .
|
||||
git commit -a -m "New doc version"
|
||||
git push origin master
|
||||
|
||||
15
INTHEWILD.md
@@ -1,15 +0,0 @@
|
||||
Please use [pull requests](https://github.com/airbnb/superset/pull/new/master)
|
||||
to add your organization and/or project to this document!
|
||||
|
||||
Organizations
|
||||
----------
|
||||
- [Airbnb](https://github.com/airbnb)
|
||||
- [GfK Data Lab] (http://datalab.gfk.com)
|
||||
- [Maieutical Labs] (https://cloudschooling.it)
|
||||
- [Shopkick] (https://www.shopkick.com)
|
||||
- [Amino] (https://amino.com)
|
||||
- [Faasos] (http://faasos.com/)
|
||||
|
||||
Projects
|
||||
----------
|
||||
- None we know of yet
|
||||
14
MANIFEST.in
@@ -1,9 +1,9 @@
|
||||
recursive-include superset/templates *
|
||||
recursive-include superset/static *
|
||||
recursive-exclude superset/static/assets/node_modules *
|
||||
recursive-include superset/static/assets/node_modules/font-awesome *
|
||||
recursive-exclude superset/static/docs *
|
||||
recursive-exclude superset/static/spec *
|
||||
recursive-exclude tests *
|
||||
recursive-include superset/data *
|
||||
recursive-include superset/migrations *
|
||||
recursive-include superset/static *
|
||||
recursive-exclude superset/static/docs *
|
||||
recursive-exclude superset/static/spec *
|
||||
recursive-exclude superset/static/assets/node_modules *
|
||||
recursive-include superset/templates *
|
||||
recursive-include superset/translations *
|
||||
recursive-exclude tests *
|
||||
|
||||
180
README.md
@@ -1,20 +1,23 @@
|
||||
Superset
|
||||
=========
|
||||
|
||||
[](https://travis-ci.org/airbnb/superset)
|
||||
[](https://travis-ci.org/apache/incubator-superset)
|
||||
[](https://badge.fury.io/py/superset)
|
||||
[](https://coveralls.io/github/airbnb/superset?branch=master)
|
||||
[](https://codeclimate.com/github/airbnb/superset/coverage)
|
||||
[](https://landscape.io/github/airbnb/superset/master)
|
||||
[](https://codeclimate.com/github/airbnb/superset)
|
||||
[](https://coveralls.io/github/apache/incubator-superset?branch=master)
|
||||
[](https://pypi.python.org/pypi/superset)
|
||||
[](https://requires.io/github/airbnb/superset/requirements/?branch=master)
|
||||
[](https://gitter.im/airbnb/superset?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
[](http://airbnb.io/superset/)
|
||||
[](https://david-dm.org/airbnb/superset?path=superset/assets)
|
||||
[](https://requires.io/github/apache/incubator-superset/requirements/?branch=master)
|
||||
[](https://gitter.im/airbnb/superset?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
[](https://superset.incubator.apache.org)
|
||||
[](https://david-dm.org/apache/incubator-superset?path=superset/assets)
|
||||
|
||||
**Superset** is a data exploration platform designed to be visual, intuitive
|
||||
and interactive.
|
||||
<img
|
||||
src="https://cloud.githubusercontent.com/assets/130878/20946612/49a8a25c-bbc0-11e6-8314-10bef902af51.png"
|
||||
alt="Superset"
|
||||
width="500"
|
||||
/>
|
||||
|
||||
**Apache Superset** (incubating) is a modern, enterprise-ready
|
||||
business intelligence web application
|
||||
|
||||
[this project used to be named **Caravel**, and **Panoramix** in the past]
|
||||
|
||||
@@ -23,41 +26,49 @@ Screenshots & Gifs
|
||||
------------------
|
||||
|
||||
**View Dashboards**
|
||||
|
||||

|
||||
|
||||
<br/>
|
||||
|
||||
**View/Edit a Slice**
|
||||
|
||||

|
||||
|
||||
<br/>
|
||||
|
||||
**Query and Visualize with SQL Lab**
|
||||
|
||||

|
||||
|
||||
<br/>
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
Superset
|
||||
---------
|
||||
Superset's main goal is to make it easy to slice, dice and visualize data.
|
||||
It empowers users to perform **analytics at the speed of thought**.
|
||||
Apache Superset
|
||||
---------------
|
||||
Apache Superset is a data exploration and visualization web application.
|
||||
|
||||
Superset provides:
|
||||
* A quick way to intuitively visualize datasets by allowing users to create
|
||||
and share interactive dashboards
|
||||
* A rich set of visualizations to analyze your data, as well as a flexible
|
||||
way to extend the capabilities
|
||||
* An intuitive interface to explore and visualize datasets, and
|
||||
create interactive dashboards.
|
||||
* A wide array of beautiful visualizations to showcase your data.
|
||||
* Easy, code-free, user flows to drill down and slice and dice the data
|
||||
underlying exposed dashboards. The dashboards and charts acts as a starting
|
||||
point for deeper analysis.
|
||||
* A state of the art SQL editor/IDE exposing a rich metadata browser, and
|
||||
an easy workflow to create visualizations out of any result set.
|
||||
* An extensible, high granularity security model allowing intricate rules
|
||||
on who can access which features, and integration with major
|
||||
authentication providers (database, OpenID, LDAP, OAuth & REMOTE_USER
|
||||
through Flask AppBuiler)
|
||||
* A simple semantic layer, allowing to control how data sources are
|
||||
displayed in the UI, by defining which fields should show up in
|
||||
which dropdown and which aggregation and function (metrics) are
|
||||
made available to the user
|
||||
on who can access which product features and datasets.
|
||||
Integration with major
|
||||
authentication backends (database, OpenID, LDAP, OAuth, REMOTE_USER, ...)
|
||||
* A lightweight semantic layer, allowing to control how data sources are
|
||||
exposed to the user by defining dimensions and metrics
|
||||
* Out of the box support for most SQL-speaking databases
|
||||
* Deep integration with Druid allows for Superset to stay blazing fast while
|
||||
slicing and dicing large, realtime datasets
|
||||
* Fast loading dashboards with configurable caching
|
||||
@@ -66,15 +77,41 @@ Superset provides:
|
||||
Database Support
|
||||
----------------
|
||||
|
||||
Superset was originally designed on top of Druid.io, but quickly broadened
|
||||
its scope to support other databases through the use of SQLAlchemy, a Python
|
||||
Superset speaks many SQL dialects through SQLAlchemy, a Python
|
||||
ORM that is compatible with
|
||||
[most common databases](http://docs.sqlalchemy.org/en/rel_1_0/core/engines.html).
|
||||
|
||||
Superset can be used to visualize data out of most databases:
|
||||
* MySQL
|
||||
* Postgres
|
||||
* Vertica
|
||||
* Oracle
|
||||
* Microsoft SQL Server
|
||||
* SQLite
|
||||
* Greenplum
|
||||
* Firebird
|
||||
* MariaDB
|
||||
* Sybase
|
||||
* IBM DB2
|
||||
* Exasol
|
||||
* MonetDB
|
||||
* Snowflake
|
||||
* Redshift
|
||||
* **more!** look for the availability of a SQLAlchemy dialect for your database
|
||||
to find out whether it will work with Superset
|
||||
|
||||
What is Druid?
|
||||
-------------
|
||||
From their website at http://druid.io
|
||||
|
||||
Druid!
|
||||
------
|
||||
|
||||
On top of having the ability to query your relational databases,
|
||||
Superset has ships with deep integration with Druid (a real time distributed
|
||||
column-store). When querying Druid,
|
||||
Superset can query humongous amounts of data on top of real time dataset.
|
||||
Note that Superset does not require Druid in any way to function, it's simply
|
||||
another database backend that it can query.
|
||||
|
||||
Here's a description of Druid from the http://druid.io website:
|
||||
|
||||
*Druid is an open-source analytics data store designed for
|
||||
business intelligence (OLAP) queries on event data. Druid provides low
|
||||
@@ -87,7 +124,60 @@ power analytic dashboards and applications.*
|
||||
Installation & Configuration
|
||||
----------------------------
|
||||
|
||||
[See in the documentation](http://airbnb.io/superset/installation.html)
|
||||
[See in the documentation](https://superset.incubator.apache.org/installation.html)
|
||||
|
||||
|
||||
Resources
|
||||
-------------
|
||||
* [Mailing list](https://lists.apache.org/list.html?dev@superset.apache.org/)
|
||||
* [Gitter (live chat) Channel](https://gitter.im/airbnb/superset)
|
||||
* [Docker image](https://hub.docker.com/r/amancevice/superset/) (community contributed)
|
||||
* [Slides from Strata (March 2016)](https://drive.google.com/open?id=0B5PVE0gzO81oOVJkdF9aNkJMSmM)
|
||||
* [Stackoverflow tag](https://stackoverflow.com/questions/tagged/apache-superset)
|
||||
* [DEPRECATED Google Group](https://groups.google.com/forum/#!forum/airbnb_superset)
|
||||
|
||||
|
||||
Contributing
|
||||
------------
|
||||
|
||||
Interested in contributing? Casual hacking? Check out
|
||||
[Contributing.MD](https://github.com/airbnb/superset/blob/master/CONTRIBUTING.md)
|
||||
|
||||
|
||||
Who uses Apache Superset (incubating)?
|
||||
--------------------------------------
|
||||
|
||||
Here's a list of organizations who have taken the time to send a PR to let
|
||||
the world know they are using Superset. Join our growing community!
|
||||
|
||||
- [AiHello](https://www.aihello.com)
|
||||
- [Airbnb](https://github.com/airbnb)
|
||||
- [Amino](https://amino.com)
|
||||
- [Brilliant.org](https://brilliant.org/)
|
||||
- [Capital Service S.A.](http://capitalservice.pl)
|
||||
- [Clark.de](http://clark.de/)
|
||||
- [Digit Game Studios](https://www.digitgaming.com/)
|
||||
- [Douban](https://www.douban.com/)
|
||||
- [Endress+Hauser](http://www.endress.com/)
|
||||
- [FBK - ICT center](http://ict.fbk.eu)
|
||||
- [Faasos](http://faasos.com/)
|
||||
- [GfK Data Lab](http://datalab.gfk.com)
|
||||
- [Konfío](http://konfio.mx)
|
||||
- [Lyft](https://www.lyft.com/)
|
||||
- [Maieutical Labs](https://cloudschooling.it)
|
||||
- [Ona](https://ona.io)
|
||||
- [Pronto Tools](http://www.prontotools.io)
|
||||
- [Qunar](https://www.qunar.com/)
|
||||
- [Shopee](https://shopee.sg)
|
||||
- [Shopkick](https://www.shopkick.com)
|
||||
- [Tails.com](https://tails.com)
|
||||
- [Tobii](http://www.tobii.com/)
|
||||
- [Tooploox](https://www.tooploox.com/)
|
||||
- [Twitter](https://twitter.com/)
|
||||
- [Udemy](https://www.udemy.com/)
|
||||
- [VIPKID](https://www.vipkid.com.cn/)
|
||||
- [Yahoo!](https://yahoo.com/)
|
||||
- [Zalando](https://www.zalando.com)
|
||||
|
||||
|
||||
More screenshots
|
||||
@@ -108,29 +198,3 @@ More screenshots
|
||||

|
||||
|
||||

|
||||
|
||||
|
||||
Resources
|
||||
-------------
|
||||
* [Superset Google Group](https://groups.google.com/forum/#!forum/airbnb_superset)
|
||||
* [Gitter (live chat) Channel](https://gitter.im/airbnb/superset)
|
||||
* [Docker image](https://hub.docker.com/r/amancevice/superset/) (community contributed)
|
||||
* [Slides from Strata (March 2016)](https://drive.google.com/open?id=0B5PVE0gzO81oOVJkdF9aNkJMSmM)
|
||||
|
||||
|
||||
Tip of the Hat
|
||||
--------------
|
||||
|
||||
Superset would not be possible without these great frameworks / libs
|
||||
|
||||
* Flask App Builder - Allowing us to focus on building the app quickly while
|
||||
getting the foundation for free
|
||||
* The Flask ecosystem - Simply amazing. So much Plug, easy play.
|
||||
* NVD3 - One of the best charting libraries out there
|
||||
* Much more, check out the `install_requires` section in the [setup.py](https://github.com/airbnb/superset/blob/master/setup.py) file!
|
||||
|
||||
|
||||
Contributing
|
||||
------------
|
||||
|
||||
Interested in contributing? Casual hacking? Check out [Contributing.MD](https://github.com/airbnb/superset/blob/master/CONTRIBUTING.md)
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
[ignore: superset/assets/node_modules/**]
|
||||
[python: superset/**.py]
|
||||
[jinja2: superset/**/templates/**.html]
|
||||
encoding = utf-8
|
||||
1809
babel/messages.pot
3
dev-reqs-for-docs.txt
Normal file
@@ -0,0 +1,3 @@
|
||||
sphinx
|
||||
sphinx-rtd-theme
|
||||
sphinxcontrib.youtube
|
||||
10
dev-reqs.txt
@@ -1,11 +1,15 @@
|
||||
codeclimate-test-reporter
|
||||
coveralls
|
||||
flake8
|
||||
flask_cors
|
||||
ipdb
|
||||
mock
|
||||
mysqlclient
|
||||
nose
|
||||
psycopg2
|
||||
pylint
|
||||
pyyaml
|
||||
sphinx
|
||||
sphinx-rtd-theme
|
||||
sphinxcontrib.youtube
|
||||
redis
|
||||
statsd
|
||||
# Also install everything we need to build Sphinx docs
|
||||
-r dev-reqs-for-docs.txt
|
||||
|
||||
1
docs/_build/html/README.md
vendored
Normal file
@@ -0,0 +1 @@
|
||||
Folder containing the sphinx-generated documentation
|
||||
@@ -51,7 +51,7 @@ source_suffix = '.rst'
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = "Superset's documentation"
|
||||
project = "Apache Superset"
|
||||
copyright = None
|
||||
author = u'Maxime Beauchemin'
|
||||
|
||||
@@ -119,10 +119,7 @@ html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
html_theme_options = {
|
||||
'collapse_navigation': False,
|
||||
'display_version': False,
|
||||
}
|
||||
html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
#html_theme_path = []
|
||||
|
||||
145
docs/faq.rst
@@ -45,15 +45,31 @@ visualizations.
|
||||
https://github.com/airbnb/superset/issues?q=label%3Aexample+is%3Aclosed
|
||||
|
||||
|
||||
Can I upload and visualize csv data?
|
||||
------------------------------------
|
||||
|
||||
Yes, using the ``Upload a CSV`` button under the ``Sources``
|
||||
menu item. This brings up a form that allows you specify required information. After creating the table from CSV, it can then be loaded like any other on the ``Sources -> Tables``page.
|
||||
|
||||
|
||||
Why are my queries timing out?
|
||||
------------------------------
|
||||
|
||||
If you are seeing timeouts (504 Gateway Time-out) when running queries,
|
||||
it's because the web server is timing out web requests. If you want to
|
||||
increase the default (50), you can specify the timeout when starting the
|
||||
web server with the ``-t`` flag, which is expressed in seconds.
|
||||
There are many reasons may cause long query timing out.
|
||||
|
||||
``superset runserver -t 300``
|
||||
|
||||
- For running long query from Sql Lab, by default Superset allows it run as long as 6 hours before it being killed by celery. If you want to increase the time for running query, you can specify the timeout in configuration. For example:
|
||||
|
||||
``SQLLAB_ASYNC_TIME_LIMIT_SEC = 60 * 60 * 6``
|
||||
|
||||
|
||||
- Superset is running on gunicorn web server, which may time out web requests. If you want to increase the default (50), you can specify the timeout when starting the web server with the ``-t`` flag, which is expressed in seconds.
|
||||
|
||||
``superset runserver -t 300``
|
||||
|
||||
- If you are seeing timeouts (504 Gateway Time-out) when loading dashboard or explore slice, you are probably behind gateway or proxy server (such as Nginx). If it did not receive a timely response from Superset server (which is processing long queries), these web servers will send 504 status code to clients directly. Superset has a client-side timeout limit to address this issue. If query didn't come back within clint-side timeout (60 seconds by default), Superset will display warning message to avoid gateway timeout message. If you have a longer gateway timeout limit, you can change the timeout settings in ``superset_config.py``:
|
||||
|
||||
``SUPERSET_WEBSERVER_TIMEOUT = 60``
|
||||
|
||||
|
||||
Why is the map not visible in the mapbox visualization?
|
||||
@@ -78,6 +94,11 @@ The widget also has a checkbox ``Date Filter``, which enables time filtering
|
||||
capabilities to your dashboard. After checking the box and refreshing, you'll
|
||||
see a ``from`` and a ``to`` dropdown show up.
|
||||
|
||||
By default, the filtering will be applied to all the slices that are built
|
||||
on top of a datasource that shares the column name that the filter is based
|
||||
on. It's also a requirement for that column to be checked as "filterable"
|
||||
in the column tab of the table editor.
|
||||
|
||||
But what about if you don't want certain widgets to get filtered on your
|
||||
dashboard? You can do that by editing your dashboard, and in the form,
|
||||
edit the ``JSON Metadata`` field, more specifically the
|
||||
@@ -85,7 +106,7 @@ edit the ``JSON Metadata`` field, more specifically the
|
||||
never be affected by any dashboard level filtering.
|
||||
|
||||
|
||||
..code::
|
||||
..code:: json
|
||||
|
||||
{
|
||||
"filter_immune_slices": [324, 65, 92],
|
||||
@@ -93,7 +114,8 @@ never be affected by any dashboard level filtering.
|
||||
"filter_immune_slice_fields": {
|
||||
"177": ["country_name", "__from", "__to"],
|
||||
"32": ["__from", "__to"]
|
||||
}
|
||||
},
|
||||
"timed_refresh_immune_slices": [324]
|
||||
}
|
||||
|
||||
In the json blob above, slices 324, 65 and 92 won't be affected by any
|
||||
@@ -110,12 +132,115 @@ But what happens with filtering when dealing with slices coming from
|
||||
different tables or databases? If the column name is shared, the filter will
|
||||
be applied, it's as simple as that.
|
||||
|
||||
|
||||
How to limit the timed refresh on a dashboard?
|
||||
----------------------------------------------
|
||||
By default, the dashboard timed refresh feature allows you to automatically re-query every slice
|
||||
on a dashboard according to a set schedule. Sometimes, however, you won't want all of the slices
|
||||
to be refreshed - especially if some data is slow moving, or run heavy queries. To exclude specific
|
||||
slices from the timed refresh process, add the ``timed_refresh_immune_slices`` key to the dashboard
|
||||
``JSON Metadata`` field:
|
||||
|
||||
..code:: json
|
||||
|
||||
{
|
||||
"filter_immune_slices": [],
|
||||
"expanded_slices": {},
|
||||
"filter_immune_slice_fields": {},
|
||||
"timed_refresh_immune_slices": [324]
|
||||
}
|
||||
|
||||
In the example above, if a timed refresh is set for the dashboard, then every slice except 324 will
|
||||
be automatically re-queried on schedule.
|
||||
|
||||
Slice refresh will also be staggered over the specified period. You can turn off this staggering
|
||||
by setting the ``stagger_refresh`` to ``false`` and modify the stagger period by setting
|
||||
``stagger_time`` to a value in milliseconds in the ``JSON Metadata`` field:
|
||||
|
||||
..code:: json
|
||||
|
||||
{
|
||||
"stagger_refresh": false,
|
||||
"stagger_time": 2500
|
||||
}
|
||||
|
||||
Here, the entire dashboard will refresh at once if periodic refresh is on. The stagger time of
|
||||
2.5 seconds is ignored.
|
||||
|
||||
Why does fabmanager or superset freezed/hung/not responding when started (my home directory is NFS mounted)?
|
||||
-----------------------------------------------------------------------------------------
|
||||
superset creates and uses an sqlite database at ``~/.superset/superset.db``. Sqlite is known to `don't work well if used on NFS`__ due to broken file locking implementation on NFS.
|
||||
By default, superset creates and uses an sqlite database at ``~/.superset/superset.db``. Sqlite is known to `don't work well if used on NFS`__ due to broken file locking implementation on NFS.
|
||||
|
||||
__ https://www.sqlite.org/lockingv3.html
|
||||
|
||||
One work around is to create a symlink from ~/.superset to a directory located on a non-NFS partition.
|
||||
You can override this path using the ``SUPERSET_HOME`` environment variable.
|
||||
|
||||
Another work around is to change where superset stores the sqlite database by adding ``SQLALCHEMY_DATABASE_URI = 'sqlite:////new/localtion/superset.db'`` in superset_config.py (create the file if needed), then adding the directory where superset_config.py lives to PYTHONPATH environment variable (e.g. ``export PYTHONPATH=/opt/logs/sandbox/airbnb/``).
|
||||
Another work around is to change where superset stores the sqlite database by adding ``SQLALCHEMY_DATABASE_URI = 'sqlite:////new/location/superset.db'`` in superset_config.py (create the file if needed), then adding the directory where superset_config.py lives to PYTHONPATH environment variable (e.g. ``export PYTHONPATH=/opt/logs/sandbox/airbnb/``).
|
||||
|
||||
What if the table schema changed?
|
||||
---------------------------------
|
||||
|
||||
Table schemas evolve, and Superset needs to reflect that. It's pretty common
|
||||
in the life cycle of a dashboard to want to add a new dimension or metric.
|
||||
To get Superset to discover your new columns, all you have to do is to
|
||||
go to ``Menu -> Sources -> Tables``, click the ``edit`` icon next to the
|
||||
table who's schema has changed, and hit ``Save`` from the ``Detail`` tab.
|
||||
Behind the scene, the new columns will get merged it. Following this,
|
||||
you may want to
|
||||
re-edit the table afterwards to configure the ``Column`` tab, check the
|
||||
appropriate boxes and save again.
|
||||
|
||||
How do I go about developing a new visualization type?
|
||||
------------------------------------------------------
|
||||
Here's an example as a Github PR with comments that describe what the
|
||||
different sections of the code do:
|
||||
https://github.com/airbnb/superset/pull/3013
|
||||
|
||||
What database engine can I use as a backend for Superset?
|
||||
---------------------------------------------------------
|
||||
|
||||
To clarify, the *database backend* is an OLTP database used by Superset to store its internal
|
||||
information like your list of users, slices and dashboard definitions.
|
||||
|
||||
Superset is tested using Mysql, Postgresql and Sqlite for its backend. It's recommended you
|
||||
install Superset on one of these database server for production.
|
||||
|
||||
Using a column-store, non-OLTP databases like Vertica, Redshift or Presto as a database backend simply won't work as these databases are not designed for this type of workload. Installation on Oracle, Microsoft SQL Server, or other OLTP databases may work but isn't tested.
|
||||
|
||||
Please note that pretty much any databases that have a SqlAlchemy integration should work perfectly fine as a datasource for Superset, just not as the OLTP backend.
|
||||
|
||||
How can i configure OAuth authentication and authorization?
|
||||
-----------------------------------------------------------
|
||||
|
||||
You can take a look at this Flask-AppBuilder `configuration example
|
||||
<https://github.com/dpgaspar/Flask-AppBuilder/blob/master/examples/oauth/config.py>`_.
|
||||
|
||||
How can I set a default filter on my dashboard?
|
||||
-----------------------------------------------
|
||||
|
||||
Easy. Simply apply the filter and save the dashboard while the filter
|
||||
is active.
|
||||
|
||||
How do I get Superset to refresh the schema of my table?
|
||||
--------------------------------------------------------
|
||||
|
||||
When adding columns to a table, you can have Superset detect and merge the
|
||||
new columns in by using the "Refresh Metadata" action in the
|
||||
``Source -> Tables`` page. Simply check the box next to the tables
|
||||
you want the schema refreshed, and click ``Actions -> Refresh Metadata``.
|
||||
|
||||
Is there a way to force the use specific colors?
|
||||
------------------------------------------------
|
||||
|
||||
It is possible on a per-dashboard basis by providing a mapping of
|
||||
labels to colors in the ``JSON Metadata`` attribute using the
|
||||
``label_colors`` key.
|
||||
|
||||
..code:: json
|
||||
|
||||
{
|
||||
"label_colors": {
|
||||
"Girls": "#FF69B4",
|
||||
"Boys": "#ADD8E6"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -49,6 +49,9 @@ Gallery
|
||||
.. image:: _static/img/viz_thumbnails/big_number_total.png
|
||||
:scale: 25 %
|
||||
|
||||
.. image:: _static/img/viz_thumbnails/bullet.png
|
||||
:scale: 25 %
|
||||
|
||||
.. image:: _static/img/viz_thumbnails/dist_bar.png
|
||||
:scale: 25 %
|
||||
|
||||
|
||||
103
docs/import_export_datasources.rst
Normal file
@@ -0,0 +1,103 @@
|
||||
Importing and Exporting Datasources
|
||||
===================================
|
||||
|
||||
The superset cli allows you to import and export datasources from and to YAML.
|
||||
Datasources include both databases and druid clusters. The data is expected to be organized in the following hierarchy: ::
|
||||
|
||||
.
|
||||
├──databases
|
||||
| ├──database_1
|
||||
| | ├──table_1
|
||||
| | | ├──columns
|
||||
| | | | ├──column_1
|
||||
| | | | ├──column_2
|
||||
| | | | └──... (more columns)
|
||||
| | | └──metrics
|
||||
| | | ├──metric_1
|
||||
| | | ├──metric_2
|
||||
| | | └──... (more metrics)
|
||||
| | └── ... (more tables)
|
||||
| └── ... (more databases)
|
||||
└──druid_clusters
|
||||
├──cluster_1
|
||||
| ├──datasource_1
|
||||
| | ├──columns
|
||||
| | | ├──column_1
|
||||
| | | ├──column_2
|
||||
| | | └──... (more columns)
|
||||
| | └──metrics
|
||||
| | ├──metric_1
|
||||
| | ├──metric_2
|
||||
| | └──... (more metrics)
|
||||
| └── ... (more datasources)
|
||||
└── ... (more clusters)
|
||||
|
||||
|
||||
Exporting Datasources to YAML
|
||||
-----------------------------
|
||||
You can print your current datasources to stdout by running: ::
|
||||
|
||||
superset export_datasources
|
||||
|
||||
|
||||
To save your datasources to a file run: ::
|
||||
|
||||
superset export_datasources -f <filename>
|
||||
|
||||
|
||||
By default, default (null) values will be omitted. Use the ``-d`` flag to include them.
|
||||
If you want back references to be included (e.g. a column to include the table id
|
||||
it belongs to) use the ``-b`` flag.
|
||||
|
||||
Alternatively you can export datasources using the UI: ::
|
||||
|
||||
1. Open **Sources** -> **Databases** to export all tables associated to a single or multiple databases. (**Tables** for one or more tables, **Druid Clusters** for clusters, **Druid Datasources** for datasources)
|
||||
2. Select the items you would like to export
|
||||
3. Click **Actions** -> **Export to YAML**
|
||||
4. If you want to import an item that you exported through the UI, you will need to nest it inside its parent element, e.g. a `database` needs to be nested under `databases` a `table` needs to be nested inside a `database` element.
|
||||
|
||||
Exporting the complete supported YAML schema
|
||||
--------------------------------------------
|
||||
In order to obtain an exhaustive list of all fields you can import using the YAML import run: ::
|
||||
|
||||
superset export_datasource_schema
|
||||
|
||||
Again, you can use the ``-b`` flag to include back references.
|
||||
|
||||
|
||||
Importing Datasources from YAML
|
||||
-------------------------------
|
||||
In order to import datasources from a YAML file(s), run: ::
|
||||
|
||||
superset import_datasources -p <path or filename>
|
||||
|
||||
If you supply a path all files ending with ``*.yaml`` or ``*.yml`` will be parsed.
|
||||
You can apply additional flags e.g.: ::
|
||||
|
||||
superset import_datasources -p <path> -r
|
||||
|
||||
Will search the supplied path recursively.
|
||||
|
||||
The sync flag ``-s`` takes parameters in order to sync the supplied elements with
|
||||
your file. Be careful this can delete the contents of your meta database. Example:
|
||||
|
||||
superset import_datasources -p <path / filename> -s columns,metrics
|
||||
|
||||
This will sync all ``metrics`` and ``columns`` for all datasources found in the
|
||||
``<path / filename>`` in the Superset meta database. This means columns and metrics
|
||||
not specified in YAML will be deleted. If you would add ``tables`` to ``columns,metrics``
|
||||
those would be synchronised as well.
|
||||
|
||||
|
||||
If you don't supply the sync flag (``-s``) importing will only add and update (override) fields.
|
||||
E.g. you can add a ``verbose_name`` to the the column ``ds`` in the table ``random_time_series`` from the example datasets
|
||||
by saving the following YAML to file and then running the ``import_datasources`` command. ::
|
||||
|
||||
databases:
|
||||
- database_name: main
|
||||
tables:
|
||||
- table_name: random_time_series
|
||||
columns:
|
||||
- column_name: ds
|
||||
verbose_name: datetime
|
||||
|
||||
@@ -1,33 +1,48 @@
|
||||
Superset's documentation
|
||||
''''''''''''''''''''''''
|
||||
.. image:: _static/img/s.png
|
||||
|
||||
Apache Superset (incubating)
|
||||
''''''''''''''''''''''''''''
|
||||
|
||||
Apache Superset (incubating) is a modern, enterprise-ready business
|
||||
intelligence web application
|
||||
|
||||
Superset is a data exploration platform designed to be visual, intuitive
|
||||
and interactive.
|
||||
|
||||
----------------
|
||||
|
||||
.. warning:: This project was originally named Panoramix, was renamed to
|
||||
Caravel in March 2016, and is currently named Superset as of November 2016
|
||||
|
||||
.. important::
|
||||
|
||||
**Disclaimer**: Apache Superset is an effort undergoing incubation at The
|
||||
Apache Software Foundation (ASF), sponsored by the Apache Incubator.
|
||||
Incubation is required of all newly accepted projects until a further
|
||||
review indicates that the infrastructure, communications, and
|
||||
decision making process have stabilized in a manner consistent with
|
||||
other successful ASF projects. While incubation status is not
|
||||
necessarily a reflection of the completeness or stability of
|
||||
the code, it does indicate that the project has yet to be fully
|
||||
endorsed by the ASF.
|
||||
|
||||
Overview
|
||||
=======================================
|
||||
========
|
||||
|
||||
Features
|
||||
---------
|
||||
--------
|
||||
|
||||
- A rich set of data visualizations, integrated from some of the best
|
||||
visualization libraries
|
||||
- Create and share simple dashboards
|
||||
- An extensible, high-granularity security/permission model allowing
|
||||
intricate rules on who can access individual features and the dataset
|
||||
- A rich set of data visualizations
|
||||
- An easy-to-use interface for exploring and visualizing data
|
||||
- Create and share dashboards
|
||||
- Enterprise-ready authentication with integration with major authentication
|
||||
providers (database, OpenID, LDAP, OAuth & REMOTE_USER through
|
||||
Flask AppBuilder)
|
||||
- An extensible, high-granularity security/permission model allowing
|
||||
intricate rules on who can access individual features and the dataset
|
||||
- A simple semantic layer, allowing users to control how data sources are
|
||||
displayed in the UI by defining which fields should show up in which
|
||||
drop-down and which aggregation and function metrics are made available
|
||||
to the user
|
||||
- Integration with most RDBMS through SqlAlchemy
|
||||
- Integration with most SQL-speaking RDBMS through SQLAlchemy
|
||||
- Deep integration with Druid.io
|
||||
|
||||
------
|
||||
@@ -46,7 +61,7 @@ Features
|
||||
|
||||
|
||||
Contents
|
||||
---------
|
||||
--------
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
@@ -55,6 +70,7 @@ Contents
|
||||
tutorial
|
||||
security
|
||||
sqllab
|
||||
visualization
|
||||
videos
|
||||
gallery
|
||||
druid
|
||||
|
||||
@@ -4,9 +4,36 @@ Installation & Configuration
|
||||
Getting Started
|
||||
---------------
|
||||
|
||||
Superset is tested using Python 2.7 and Python 3.4+. Python 3 is the recommended version,
|
||||
Python 2.6 won't be supported.
|
||||
Superset is tested against Python ``2.7`` and Python ``3.4``.
|
||||
Airbnb currently uses 2.7.* in production. We do not plan on supporting
|
||||
Python ``2.6``.
|
||||
|
||||
Cloud-native!
|
||||
-------------
|
||||
|
||||
Superset is designed to be highly available. It is
|
||||
"cloud-native" as it has been designed scale out in large,
|
||||
distributed environments, and works well inside containers.
|
||||
While you can easily
|
||||
test drive Superset on a modest setup or simply on your laptop,
|
||||
there's virtually no limit around scaling out the platform.
|
||||
Superset is also cloud-native in the sense that it is
|
||||
flexible and lets you choose your web server (Gunicorn, Nginx, Apache),
|
||||
your metadata database engine (MySQL, Postgres, MariaDB, ...),
|
||||
your message queue (Redis, RabbitMQ, SQS, ...),
|
||||
your results backend (S3, Redis, Memcached, ...), your caching layer
|
||||
(memcached, Redis, ...), works well with services like NewRelic, StatsD and
|
||||
DataDog, and has the ability to run analytic workloads against
|
||||
most popular database technologies.
|
||||
|
||||
Superset is battle tested in large environments with hundreds
|
||||
of concurrent users. Airbnb's production environment runs inside
|
||||
Kubernetes and serves 600+ daily active users viewing over 100K charts a
|
||||
day.
|
||||
|
||||
The Superset web server and the Superset Celery workers (optional)
|
||||
are stateless, so you can scale out by running on as many servers
|
||||
as needed.
|
||||
|
||||
OS dependencies
|
||||
---------------
|
||||
@@ -26,16 +53,22 @@ the required dependencies are installed: ::
|
||||
|
||||
sudo apt-get install build-essential libssl-dev libffi-dev python-dev python-pip libsasl2-dev libldap2-dev
|
||||
|
||||
**Ubuntu 16.04** If you have python3.5 installed alongside with python2.7, as is default on **Ubuntu 16.04 LTS**, run this command also
|
||||
|
||||
sudo apt-get install build-essential libssl-dev libffi-dev python3.5-dev python-pip libsasl2-dev libldap2-dev
|
||||
|
||||
otherwhise build for ``cryptography`` fails.
|
||||
|
||||
For **Fedora** and **RHEL-derivatives**, the following command will ensure
|
||||
that the required dependencies are installed: ::
|
||||
|
||||
sudo yum upgrade python-setuptools
|
||||
sudo yum install gcc libffi-devel python-devel python-pip python-wheel openssl-devel libsasl2-devel openldap-devel
|
||||
sudo yum install gcc gcc-c++ libffi-devel python-devel python-pip python-wheel openssl-devel libsasl2-devel openldap-devel
|
||||
|
||||
**OSX**, system python is not recommended. brew's python also ships with pip ::
|
||||
|
||||
brew install pkg-config libffi openssl python
|
||||
env LDFLAGS="-L$(brew --prefix openssl)/lib" CFLAGS="-I$(brew --prefix openssl)/include" pip install cryptography
|
||||
env LDFLAGS="-L$(brew --prefix openssl)/lib" CFLAGS="-I$(brew --prefix openssl)/include" pip install cryptography==1.9
|
||||
|
||||
**Windows** isn't officially supported at this point, but if you want to
|
||||
attempt it, download `get-pip.py <https://bootstrap.pypa.io/get-pip.py>`_, and run ``python get-pip.py`` which may need admin access. Then run the following: ::
|
||||
@@ -80,7 +113,7 @@ Follow these few simple steps to install Superset.::
|
||||
# Install superset
|
||||
pip install superset
|
||||
|
||||
# Create an admin user
|
||||
# Create an admin user (you will be prompted to set username, first and last name before setting a password)
|
||||
fabmanager create-admin --app superset
|
||||
|
||||
# Initialize the database
|
||||
@@ -92,8 +125,8 @@ Follow these few simple steps to install Superset.::
|
||||
# Create default roles and permissions
|
||||
superset init
|
||||
|
||||
# Start the web server on port 8088
|
||||
superset runserver -p 8088
|
||||
# Start the web server on port 8088, use -p to bind to another port
|
||||
superset runserver
|
||||
|
||||
# To start a development web server, use the -d switch
|
||||
# superset runserver -d
|
||||
@@ -106,10 +139,61 @@ the credential you entered while creating the admin account, and navigate to
|
||||
your datasources for Superset to be aware of, and they should show up in
|
||||
`Menu -> Datasources`, from where you can start playing with your data!
|
||||
|
||||
Please note that *gunicorn*, Superset default application server, does not
|
||||
work on Windows so you need to use the development web server.
|
||||
The development web server though is not intended to be used on production systems
|
||||
so better use a supported platform that can run *gunicorn*.
|
||||
A proper WSGI HTTP Server
|
||||
-------------------------
|
||||
|
||||
While you can setup Superset to run on Nginx or Apache, many use
|
||||
Gunicorn, preferably in **async mode**, which allows for impressive
|
||||
concurrency even and is fairly easy to install and configure. Please
|
||||
refer to the
|
||||
documentation of your preferred technology to set up this Flask WSGI
|
||||
application in a way that works well in your environment.
|
||||
|
||||
While the `superset runserver` command act as an quick wrapper
|
||||
around `gunicorn`, it doesn't expose all the options you may need,
|
||||
so you'll want to craft your own `gunicorn` command in your production
|
||||
environment. Here's an **async** setup known to work well: ::
|
||||
|
||||
gunicorn \
|
||||
-w 10 \
|
||||
-k gevent \
|
||||
--timeout 120 \
|
||||
-b 0.0.0.0:6666 \
|
||||
--limit-request-line 0 \
|
||||
--limit-request-field_size 0 \
|
||||
--statsd-host localhost:8125 \
|
||||
superset:app
|
||||
|
||||
Refer to the
|
||||
[Gunicorn documentation](http://docs.gunicorn.org/en/stable/design.html)
|
||||
for more information.
|
||||
|
||||
Note that *gunicorn* does not
|
||||
work on Windows so the `superset runserver` command is not expected to work
|
||||
in that context. Also note that the development web
|
||||
server (`superset runserver -d`) is not intended for production use.
|
||||
|
||||
Flask-AppBuilder Permissions
|
||||
----------------------------
|
||||
|
||||
By default every time the Flask-AppBuilder (FAB) app is initialized the
|
||||
permissions and views are added automatically to the backend and associated with
|
||||
the ‘Admin’ role. The issue however is when you are running multiple concurrent
|
||||
workers this creates a lot of contention and race conditions when defining
|
||||
permissions and views.
|
||||
|
||||
To alleviate this issue, the automatic updating of permissions can be disabled
|
||||
by setting the :envvar:`SUPERSET_UPDATE_PERMS` environment variable to `0`.
|
||||
The value `1` enables it, `0` disables it. Note if undefined the functionality
|
||||
is enabled to maintain backwards compatibility.
|
||||
|
||||
In a production environment initialization could take on the following form:
|
||||
|
||||
export SUPERSET_UPDATE_PERMS=1
|
||||
superset init
|
||||
|
||||
export SUPERSET_UPDATE_PERMS=0
|
||||
gunicorn -w 10 ... superset:app
|
||||
|
||||
Configuration behind a load balancer
|
||||
------------------------------------
|
||||
@@ -124,6 +208,11 @@ If the load balancer is inserting X-Forwarded-For/X-Forwarded-Proto headers, you
|
||||
should set `ENABLE_PROXY_FIX = True` in the superset config file to extract and use
|
||||
the headers.
|
||||
|
||||
In case that the reverse proxy is used for providing ssl encryption,
|
||||
an explicit definition of the `X-Forwarded-Proto` may be required.
|
||||
For the Apache webserver this can be set as follows: ::
|
||||
|
||||
RequestHeader set X-Forwarded-Proto "https"
|
||||
|
||||
Configuration
|
||||
-------------
|
||||
@@ -155,7 +244,9 @@ of the parameters you can copy / paste in that configuration module: ::
|
||||
SQLALCHEMY_DATABASE_URI = 'sqlite:////path/to/superset.db'
|
||||
|
||||
# Flask-WTF flag for CSRF
|
||||
CSRF_ENABLED = True
|
||||
WTF_CSRF_ENABLED = True
|
||||
# Add endpoints that need to be exempt from CSRF protection
|
||||
WTF_CSRF_EXEMPT_LIST = []
|
||||
|
||||
# Set this API key to enable Mapbox visualizations
|
||||
MAPBOX_API_KEY = ''
|
||||
@@ -171,6 +262,11 @@ Please make sure to change:
|
||||
* *SQLALCHEMY_DATABASE_URI*, by default it is stored at *~/.superset/superset.db*
|
||||
* *SECRET_KEY*, to a long random string
|
||||
|
||||
In case you need to exempt endpoints from CSRF, e.g. you are running a custom
|
||||
auth postback endpoint, you can add them to *WTF_CSRF_EXEMPT_LIST*
|
||||
|
||||
WTF_CSRF_EXEMPT_LIST = ['']
|
||||
|
||||
Database dependencies
|
||||
---------------------
|
||||
|
||||
@@ -195,7 +291,7 @@ Here's a list of some of the recommended packages.
|
||||
+---------------+-------------------------------------+-------------------------------------------------+
|
||||
| sqlite | | ``sqlite://`` |
|
||||
+---------------+-------------------------------------+-------------------------------------------------+
|
||||
| Redshift | ``pip install sqlalchemy-redshift`` | ``redshift+psycopg2://`` |
|
||||
| Redshift | ``pip install sqlalchemy-redshift`` | ``postgresql+psycopg2://`` |
|
||||
+---------------+-------------------------------------+-------------------------------------------------+
|
||||
| MSSQL | ``pip install pymssql`` | ``mssql://`` |
|
||||
+---------------+-------------------------------------+-------------------------------------------------+
|
||||
@@ -205,12 +301,33 @@ Here's a list of some of the recommended packages.
|
||||
+---------------+-------------------------------------+-------------------------------------------------+
|
||||
| Greenplum | ``pip install psycopg2`` | ``postgresql+psycopg2://`` |
|
||||
+---------------+-------------------------------------+-------------------------------------------------+
|
||||
| Athena | ``pip install "PyAthenaJDBC>1.0.9"``| ``awsathena+jdbc://`` |
|
||||
+---------------+-------------------------------------+-------------------------------------------------+
|
||||
| Vertica | ``pip install | ``vertica+vertica_python://`` |
|
||||
| | sqlalchemy-vertica-python`` | |
|
||||
+---------------+-------------------------------------+-------------------------------------------------+
|
||||
| ClickHouse | ``pip install | ``clickhouse://`` |
|
||||
| | sqlalchemy-clickhouse`` | |
|
||||
+---------------+-------------------------------------+-------------------------------------------------+
|
||||
| Kylin | ``pip install kylinpy`` | ``kylin://`` |
|
||||
+---------------+-------------------------------------+-------------------------------------------------+
|
||||
|
||||
Note that many other database are supported, the main criteria being the
|
||||
existence of a functional SqlAlchemy dialect and Python driver. Googling
|
||||
the keyword ``sqlalchemy`` in addition of a keyword that describes the
|
||||
database you want to connect to should get you to the right place.
|
||||
|
||||
(AWS) Athena
|
||||
------------
|
||||
|
||||
The connection string for Athena looks like this ::
|
||||
|
||||
awsathena+jdbc://{aws_access_key_id}:{aws_secret_access_key}@athena.{region_name}.amazonaws.com/{schema_name}?s3_staging_dir={s3_staging_dir}&...
|
||||
|
||||
Where you need to escape/encode at least the s3_staging_dir, i.e., ::
|
||||
|
||||
s3://... -> s3%3A//...
|
||||
|
||||
|
||||
Caching
|
||||
-------
|
||||
@@ -222,9 +339,11 @@ complies with the Flask-Cache specifications.
|
||||
|
||||
Flask-Cache supports multiple caching backends (Redis, Memcached,
|
||||
SimpleCache (in-memory), or the local filesystem). If you are going to use
|
||||
Memcached please use the pylibmc client library as python-memcached does
|
||||
Memcached please use the `pylibmc` client library as `python-memcached` does
|
||||
not handle storing binary data correctly. If you use Redis, please install
|
||||
[python-redis](https://pypi.python.org/pypi/redis).
|
||||
the `redis <https://pypi.python.org/pypi/redis>`_ Python package: ::
|
||||
|
||||
pip install redis
|
||||
|
||||
For setting your timeouts, this is done in the Superset metadata and goes
|
||||
up the "timeout searchpath", from your slice configuration, to your
|
||||
@@ -258,6 +377,24 @@ on top of the **database**. For Superset to connect to a specific schema,
|
||||
there's a **schema** parameter you can set in the table form.
|
||||
|
||||
|
||||
External Password store for SQLAlchemy connections
|
||||
--------------------------------------------------
|
||||
It is possible to use an external store for you database passwords. This is
|
||||
useful if you a running a custom secret distribution framework and do not wish
|
||||
to store secrets in Superset's meta database.
|
||||
|
||||
Example:
|
||||
Write a function that takes a single argument of type ``sqla.engine.url`` and returns
|
||||
the password for the given connection string. Then set ``SQLALCHEMY_CUSTOM_PASSWORD_STORE``
|
||||
in your config file to point to that function. ::
|
||||
|
||||
def example_lookup_password(url):
|
||||
secret = <<get password from external framework>>
|
||||
return 'secret'
|
||||
|
||||
SQLALCHEMY_CUSTOM_PASSWORD_STORE = example_lookup_password
|
||||
|
||||
|
||||
SSL Access to databases
|
||||
-----------------------
|
||||
This example worked with a MySQL database that requires SSL. The configuration
|
||||
@@ -279,10 +416,10 @@ Druid
|
||||
-----
|
||||
|
||||
* From the UI, enter the information about your clusters in the
|
||||
``Admin->Clusters`` menu by hitting the + sign.
|
||||
`Sources -> Druid Clusters` menu by hitting the + sign.
|
||||
|
||||
* Once the Druid cluster connection information is entered, hit the
|
||||
``Admin->Refresh Metadata`` menu item to populate
|
||||
`Sources -> Refresh Druid Metadata` menu item to populate
|
||||
|
||||
* Navigate to your datasources
|
||||
|
||||
@@ -291,7 +428,7 @@ metadata from your Druid cluster(s)
|
||||
|
||||
|
||||
CORS
|
||||
-----
|
||||
----
|
||||
|
||||
The extra CORS Dependency must be installed:
|
||||
|
||||
@@ -304,6 +441,30 @@ The following keys in `superset_config.py` can be specified to configure CORS:
|
||||
* ``ENABLE_CORS``: Must be set to True in order to enable CORS
|
||||
* ``CORS_OPTIONS``: options passed to Flask-CORS (`documentation <http://flask-cors.corydolphin.com/en/latest/api.html#extension>`)
|
||||
|
||||
|
||||
MIDDLEWARE
|
||||
----------
|
||||
|
||||
Superset allows you to add your own middleware. To add your own middleware, update the ``ADDITIONAL_MIDDLEWARE`` key in
|
||||
your `superset_config.py`. ``ADDITIONAL_MIDDLEWARE`` should be a list of your additional middleware classes.
|
||||
|
||||
For example, to use AUTH_REMOTE_USER from behind a proxy server like nginx, you have to add a simple middleware class to
|
||||
add the value of ``HTTP_X_PROXY_REMOTE_USER`` (or any other custom header from the proxy) to Gunicorn's ``REMOTE_USER``
|
||||
environment variable: ::
|
||||
|
||||
class RemoteUserMiddleware(object):
|
||||
def __init__(self, app):
|
||||
self.app = app
|
||||
def __call__(self, environ, start_response):
|
||||
user = environ.pop('HTTP_X_PROXY_REMOTE_USER', None)
|
||||
environ['REMOTE_USER'] = user
|
||||
return self.app(environ, start_response)
|
||||
|
||||
ADDITIONAL_MIDDLEWARE = [RemoteUserMiddleware, ]
|
||||
|
||||
*Adapted from http://flask.pocoo.org/snippets/69/*
|
||||
|
||||
|
||||
Upgrading
|
||||
---------
|
||||
|
||||
@@ -316,28 +477,79 @@ Upgrading should be as straightforward as running::
|
||||
SQL Lab
|
||||
-------
|
||||
SQL Lab is a powerful SQL IDE that works with all SQLAlchemy compatible
|
||||
databases out there. By default, queries are run in a web request, and
|
||||
databases. By default, queries are executed in the scope of a web
|
||||
request so they
|
||||
may eventually timeout as queries exceed the maximum duration of a web
|
||||
request in your environment, whether it'd be a reverse proxy or the Superset
|
||||
server itself.
|
||||
|
||||
In the modern analytics world, it's not uncommon to run large queries that
|
||||
run for minutes or hours.
|
||||
On large analytic databases, it's common to run queries that
|
||||
execute for minutes or hours.
|
||||
To enable support for long running queries that
|
||||
execute beyond the typical web request's timeout (30-60 seconds), it is
|
||||
necessary to deploy an asynchronous backend, which consist of one or many
|
||||
Superset worker, which is implemented as a Celery worker, and a Celery
|
||||
broker for which we recommend using Redis or RabbitMQ.
|
||||
necessary to configure an asynchronous backend for Superset which consist of:
|
||||
|
||||
It's also preferable to setup an async result backend as a key value store
|
||||
that can hold the long-running query results for a period of time. More
|
||||
details to come as to how to set this up here soon.
|
||||
* one or many Superset worker (which is implemented as a Celery worker), and
|
||||
can be started with the ``superset worker`` command, run
|
||||
``superset worker --help`` to view the related options
|
||||
* a celery broker (message queue) for which we recommend using Redis
|
||||
or RabbitMQ
|
||||
* a results backend that defines where the worker will persist the query
|
||||
results
|
||||
|
||||
SQL Lab supports templating in queries, and it's possible to override
|
||||
Configuring Celery requires defining a ``CELERY_CONFIG`` in your
|
||||
``superset_config.py``. Both the worker and web server processes should
|
||||
have the same configuration.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class CeleryConfig(object):
|
||||
BROKER_URL = 'redis://localhost:6379/0'
|
||||
CELERY_IMPORTS = ('superset.sql_lab', )
|
||||
CELERY_RESULT_BACKEND = 'redis://localhost:6379/0'
|
||||
CELERY_ANNOTATIONS = {'tasks.add': {'rate_limit': '10/s'}}
|
||||
|
||||
CELERY_CONFIG = CeleryConfig
|
||||
|
||||
To setup a result backend, you need to pass an instance of a derivative
|
||||
of ``werkzeug.contrib.cache.BaseCache`` to the ``RESULTS_BACKEND``
|
||||
configuration key in your ``superset_config.py``. It's possible to use
|
||||
Memcached, Redis, S3 (https://pypi.python.org/pypi/s3werkzeugcache),
|
||||
memory or the file system (in a single server-type setup or for testing),
|
||||
or to write your own caching interface. Your ``superset_config.py`` may
|
||||
look something like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# On S3
|
||||
from s3cache.s3cache import S3Cache
|
||||
S3_CACHE_BUCKET = 'foobar-superset'
|
||||
S3_CACHE_KEY_PREFIX = 'sql_lab_result'
|
||||
RESULTS_BACKEND = S3Cache(S3_CACHE_BUCKET, S3_CACHE_KEY_PREFIX)
|
||||
|
||||
# On Redis
|
||||
from werkzeug.contrib.cache import RedisCache
|
||||
RESULTS_BACKEND = RedisCache(
|
||||
host='localhost', port=6379, key_prefix='superset_results')
|
||||
|
||||
Note that it's important that all the worker nodes and web servers in
|
||||
the Superset cluster share a common metadata database.
|
||||
This means that SQLite will not work in this context since it has
|
||||
limited support for concurrency and
|
||||
typically lives on the local file system.
|
||||
|
||||
Also note that SQL Lab supports Jinja templating in queries, and that it's
|
||||
possible to overload
|
||||
the default Jinja context in your environment by defining the
|
||||
``JINJA_CONTEXT_ADDONS`` in your superset configuration. Objects referenced
|
||||
in this dictionary are made available for users to use in their SQL.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
JINJA_CONTEXT_ADDONS = {
|
||||
'my_crazy_macro': lambda x: x*2,
|
||||
}
|
||||
|
||||
|
||||
Making your own build
|
||||
---------------------
|
||||
@@ -348,7 +560,49 @@ your environment.::
|
||||
|
||||
# assuming $SUPERSET_HOME as the root of the repo
|
||||
cd $SUPERSET_HOME/superset/assets
|
||||
npm install
|
||||
npm run prod
|
||||
yarn
|
||||
yarn run build
|
||||
cd $SUPERSET_HOME
|
||||
python setup.py install
|
||||
|
||||
|
||||
Blueprints
|
||||
----------
|
||||
|
||||
`Blueprints are Flask's reusable apps <http://flask.pocoo.org/docs/0.12/blueprints/>`_.
|
||||
Superset allows you to specify an array of Blueprints
|
||||
in your ``superset_config`` module. Here's
|
||||
an example on how this can work with a simple Blueprint. By doing
|
||||
so, you can expect Superset to serve a page that says "OK"
|
||||
at the ``/simple_page`` url. This can allow you to run other things such
|
||||
as custom data visualization applications alongside Superset, on the
|
||||
same server.
|
||||
|
||||
..code ::
|
||||
|
||||
from flask import Blueprint
|
||||
simple_page = Blueprint('simple_page', __name__,
|
||||
template_folder='templates')
|
||||
@simple_page.route('/', defaults={'page': 'index'})
|
||||
@simple_page.route('/<page>')
|
||||
def show(page):
|
||||
return "Ok"
|
||||
|
||||
BLUEPRINTS = [simple_page]
|
||||
|
||||
StatsD logging
|
||||
--------------
|
||||
|
||||
Superset is instrumented to log events to StatsD if desired. Most endpoints hit
|
||||
are logged as well as key events like query start and end in SQL Lab.
|
||||
|
||||
To setup StatsD logging, it's a matter of configuring the logger in your
|
||||
``superset_config.py``.
|
||||
|
||||
..code ::
|
||||
|
||||
from superset.stats_logger import StatsdStatsLogger
|
||||
STATS_LOGGER = StatsdStatsLogger(host='localhost', port=8125, prefix='superset')
|
||||
|
||||
Note that it's also possible to implement you own logger by deriving
|
||||
``superset.stats_logger.BaseStatsLogger``.
|
||||
|
||||
@@ -3,13 +3,14 @@ Security
|
||||
Security in Superset is handled by Flask AppBuilder (FAB). FAB is a
|
||||
"Simple and rapid application development framework, built on top of Flask.".
|
||||
FAB provides authentication, user management, permissions and roles.
|
||||
|
||||
Please read its `Security documentation
|
||||
<http://flask-appbuilder.readthedocs.io/en/latest/security.html>`_.
|
||||
|
||||
Provided Roles
|
||||
--------------
|
||||
Superset ships with a set of roles that are handled by Superset itself.
|
||||
You can assume that these roles will stay up-to-date as Superset evolves.
|
||||
Even though it's possible for ``Admin`` usrs to do so, it is not recommended
|
||||
Even though it's possible for ``Admin`` users to do so, it is not recommended
|
||||
that you alter these roles in any way by removing
|
||||
or adding permissions to them as these roles will be re-synchronized to
|
||||
their original values as you run your next ``superset init`` command.
|
||||
@@ -35,7 +36,7 @@ own. Alpha users can add and alter data sources.
|
||||
Gamma
|
||||
"""""
|
||||
Gamma have limited access. They can only consume data coming from data sources
|
||||
they have been giving access to through another complementary role.
|
||||
they have been given access to through another complementary role.
|
||||
They only have access to view the slices and
|
||||
dashboards made from data sources that they have access to. Currently Gamma
|
||||
users are not able to alter or add data sources. We assume that they are
|
||||
@@ -50,6 +51,17 @@ The ``sql_lab`` role grants access to SQL Lab. Note that while ``Admin``
|
||||
users have access to all databases by default, both ``Alpha`` and ``Gamma``
|
||||
users need to be given access on a per database basis.
|
||||
|
||||
Public
|
||||
""""""
|
||||
It's possible to allow logged out users to access some Superset features.
|
||||
|
||||
By setting ``PUBLIC_ROLE_LIKE_GAMMA = True`` in your ``superset_config.py``,
|
||||
you grant public role the same set of permissions as for the GAMMA role.
|
||||
This is useful if one wants to enable anonymous users to view
|
||||
dashboards. Explicit grant on specific datasets is still required, meaning
|
||||
that you need to edit the ``Public`` role and add the Public data sources
|
||||
to the role manually.
|
||||
|
||||
|
||||
Managing Gamma per data source access
|
||||
-------------------------------------
|
||||
|
||||
@@ -13,9 +13,11 @@ Feature Overview
|
||||
visualization capabilities
|
||||
- Browse database metadata: tables, columns, indexes, partitions
|
||||
- Support for long-running queries
|
||||
|
||||
- uses the `Celery distributed queue <http://www.python.org/>`_
|
||||
to dispatch query handling to workers
|
||||
- supports defining a "results backend" to persist query results
|
||||
|
||||
- A search engine to find queries executed in the past
|
||||
- Supports templating using the
|
||||
`Jinja templating language <http://jinja.pocoo.org/docs/dev/>`_
|
||||
@@ -46,15 +48,25 @@ Available macros
|
||||
|
||||
We expose certain modules from Python's standard library in
|
||||
Superset's Jinja context:
|
||||
|
||||
- ``time``: ``time``
|
||||
- ``datetime``: ``datetime.datetime``
|
||||
- ``uuid``: ``uuid``
|
||||
- ``random``: ``random``
|
||||
- ``relativedelta``: ``dateutil.relativedelta.relativedelta``
|
||||
- more to come!
|
||||
|
||||
`Jinja's builtin filters <http://jinja.pocoo.org/docs/dev/templates/>`_ can be also be applied where needed.
|
||||
|
||||
|
||||
.. autoclass:: superset.jinja_context.PrestoTemplateProcessor
|
||||
:members:
|
||||
|
||||
.. autofunction:: superset.jinja_context.url_param
|
||||
|
||||
Extending macros
|
||||
''''''''''''''''
|
||||
|
||||
As mentioned in the `Installation & Configuration <https://superset.incubator.apache.org/installation.html#installation-configuration>`_ documentation,
|
||||
it's possible for administrators to expose more more macros in their
|
||||
environment using the configuration variable ``JINJA_CONTEXT_ADDONS``.
|
||||
All objects referenced in this dictionary will become available for users
|
||||
to integrate in their queries in **SQL Lab**.
|
||||
|
||||
@@ -1,100 +1,308 @@
|
||||
Tutorial
|
||||
========
|
||||
Tutorial for Superset Administrators
|
||||
====================================
|
||||
|
||||
This basic linear tutorial will take you through connecting to a database,
|
||||
adding a table, creating a slice and a dashboard. First you'll need to tell
|
||||
Superset where to find the database you want to
|
||||
query. First go to the database menu
|
||||
This tutorial targets a Superset administrator: someone configuring Superset
|
||||
for an organization on behalf of users. We'll show you how to connect Superset
|
||||
to a new database and configure a table in that database for analysis. You'll
|
||||
also explore the data you've exposed and add a visualization to a dashboard
|
||||
so that you get a feel for the end-to-end user experience.
|
||||
|
||||
.. image:: _static/img/tutorial/db_menu.png
|
||||
:scale: 30 %
|
||||
Connecting to a new database
|
||||
----------------------------
|
||||
|
||||
Now click on the ``+`` button to add a new entry
|
||||
We assume you already have a database configured and can connect to it from the
|
||||
instance on which you’re running Superset. If you’re just testing Superset and
|
||||
want to explore sample data, you can load some
|
||||
`sample PostgreSQL datasets <https://wiki.postgresql.org/wiki/Sample_Databases>`_
|
||||
into a fresh DB, or configure the
|
||||
`example weather data <https://github.com/dylburger/noaa-ghcn-weather-data>`_
|
||||
we use here.
|
||||
|
||||
.. image:: _static/img/tutorial/db_plus.png
|
||||
:scale: 30 %
|
||||
Under the **Sources** menu, select the *Databases* option:
|
||||
|
||||
Fill in an arbitrary reference name for the database, and you SQLAlchemy
|
||||
URI. To figure out how to construct your URI, check out the
|
||||
`SQLAlchemy documentation <http://docs.sqlalchemy.org/en/rel_1_0/core/engines.html>`_.
|
||||
Then you can test your connection. If it works, you'll see a positive popup
|
||||
and list of the tables that SQLAlchemy has found for that URI.
|
||||
.. image:: _static/img/tutorial/tutorial_01_sources_database.png
|
||||
:scale: 70%
|
||||
|
||||
.. image:: _static/img/tutorial/db_added.png
|
||||
:scale: 30 %
|
||||
On the resulting page, click on the green plus sign, near the top right:
|
||||
|
||||
Once your database has been added, it's time to add your table. Navigate
|
||||
using the navigation bar at the top to ``Sources -> Tables`` and click the
|
||||
plus (``+``) sign there (similar to the one ).
|
||||
.. image:: _static/img/tutorial/tutorial_02_add_database.png
|
||||
:scale: 70%
|
||||
|
||||
Now enter the name of the table in the ``Table Name`` textbox, and select
|
||||
the database you just created in the ``Database`` dropdown, hit save. At this
|
||||
moment, Superset fetched the column names, their data types and tries to guess
|
||||
which fields are metrics in dimensions. From the list view, edit the table
|
||||
that you just created by clicking the tiny pen icon.
|
||||
You can configure a number of advanced options on this page, but for
|
||||
this walkthrough, you’ll only need to do **two things**:
|
||||
|
||||
.. image:: _static/img/tutorial/pen.png
|
||||
:scale: 30 %
|
||||
1. Name your database connection:
|
||||
|
||||
Now you're in the table editor, click on the "List Table Column" tab,
|
||||
showing you the list of columns in your table as well as their data types.
|
||||
.. image:: _static/img/tutorial/tutorial_03_database_name.png
|
||||
:scale: 70%
|
||||
|
||||
.. image:: _static/img/tutorial/matrix.png
|
||||
:scale: 30 %
|
||||
2. Provide the SQLAlchemy Connection URI and test the connection:
|
||||
|
||||
Click the checkboxes here that inform Superset how your columns should be
|
||||
shown in the explore view, and which metrics should be created. Make sure
|
||||
to inform Superset about your date columns. You could also create
|
||||
"SQL expression" columns here, or metrics in that tab as aggregate expressions,
|
||||
but let's not do that just yet. Hit ``save``.
|
||||
.. image:: _static/img/tutorial/tutorial_04_sqlalchemy_connection_string.png
|
||||
:scale: 70%
|
||||
|
||||
You should now be back in the ``Table List`` view. Click on the name of the
|
||||
table you just created. You enter the "Explore" view for your table.
|
||||
This example shows the connection string for our test weather database.
|
||||
As noted in the text below the URI, you should refer to the SQLAlchemy
|
||||
documentation on
|
||||
`creating new connection URIs <http://docs.sqlalchemy.org/en/rel_1_0/core/engines.html#database-urls>`_
|
||||
for your target database.
|
||||
|
||||
.. image:: _static/img/tutorial/explore.png
|
||||
:scale: 30 %
|
||||
Click the **Test Connection** button to confirm things work end to end.
|
||||
Once Superset can successfully connect and authenticate, you should see
|
||||
a popup like this:
|
||||
|
||||
The next step is to create a Slice. First, make sure to use a time filter
|
||||
that is relevant.
|
||||
.. image:: _static/img/tutorial/tutorial_05_connection_popup.png
|
||||
:scale: 50%
|
||||
|
||||
.. note::
|
||||
Moreover, you should also see the list of tables Superset can read from
|
||||
the schema you’re connected to, at the bottom of the page:
|
||||
|
||||
You can use some "natural language time expressions"
|
||||
either as relative (as in ``now``, ``4 weeks ago``, or ``1 year ago``) as well
|
||||
as hard date or time expressions (as in ``3015``, ``3016-01-01`` or
|
||||
``May``).
|
||||
.. image:: _static/img/tutorial/tutorial_06_list_of_tables.png
|
||||
:scale: 70%
|
||||
|
||||
Alter the form's option and click ``Query`` until you get to an interesting
|
||||
cut of data, and click ``SAVE AS``, enter a name, and you just created your first
|
||||
slice.
|
||||
If the connection looks good, save the configuration by clicking the **Save**
|
||||
button at the bottom of the page:
|
||||
|
||||
.. image:: _static/img/tutorial/created.png
|
||||
:scale: 30 %
|
||||
.. image:: _static/img/tutorial/tutorial_07_save_button.png
|
||||
:scale: 70%
|
||||
|
||||
This slice is now accessible in the slice list from the
|
||||
``Menu -> Slices`` at any time. Note that this view is easily filterable and
|
||||
searchable.
|
||||
Adding a new table
|
||||
------------------
|
||||
|
||||
.. image:: _static/img/tutorial/search.png
|
||||
:scale: 30 %
|
||||
Now that you’ve configured a database, you’ll need to add specific tables
|
||||
to Superset that you’d like to query.
|
||||
|
||||
Now let's create a dashboard. A dashboard is simply a collection of slices
|
||||
with metadata around their sizes, positions, CSS style and a few other things.
|
||||
Navigate to the dashboard list view ``Menu -> Dashboard`` and click the plus
|
||||
(``+``) sign. In the form, enter a name and pick the slice you just created.
|
||||
Under the **Sources** menu, select the *Tables* option:
|
||||
|
||||
.. image:: _static/img/tutorial/new_dash.png
|
||||
:scale: 30 %
|
||||
.. image:: _static/img/tutorial/tutorial_08_sources_tables.png
|
||||
:scale: 70%
|
||||
|
||||
Hit ``Save``, you should be back in ``Menu -> Dashboard``. Now enter your
|
||||
new dashboard.
|
||||
On the resulting page, click on the green plus sign, near the top left:
|
||||
|
||||
.. image:: _static/img/tutorial/in_new_dash.png
|
||||
:scale: 30 %
|
||||
.. image:: _static/img/tutorial/tutorial_09_add_new_table.png
|
||||
:scale: 70%
|
||||
|
||||
Here you are. You can now resize and move the different slice(s), style them
|
||||
in the CSS modal window, and save right from here. For now, renaming the
|
||||
dashboard or adding on a new slice is done through the dashboard edit view,
|
||||
which is the same form as you used when you originally created the dashboard,
|
||||
and is accessible by clicking the ``edit`` pen icon from the dashboard list
|
||||
view (``Menu -> Dashboards``)
|
||||
You only need a few pieces of information to add a new table to Superset:
|
||||
|
||||
* The name of the table
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_10_table_name.png
|
||||
:scale: 70%
|
||||
|
||||
* The target database from the **Database** drop-down menu (i.e. the one
|
||||
you just added above)
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_11_choose_db.png
|
||||
:scale: 70%
|
||||
|
||||
* Optionally, the database schema. If the table exists in the “default” schema
|
||||
(e.g. the *public* schema in PostgreSQL or Redshift), you can leave the schema
|
||||
field blank.
|
||||
|
||||
Click on the **Save** button to save the configuration:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_07_save_button.png
|
||||
:scale: 70%
|
||||
|
||||
When redirected back to the list of tables, you should see a message indicating
|
||||
that your table was created:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_12_table_creation_success_msg.png
|
||||
:scale: 70%
|
||||
|
||||
This message also directs you to edit the table configuration. We’ll edit a limited
|
||||
portion of the configuration now - just to get you started - and leave the rest for
|
||||
a more advanced tutorial.
|
||||
|
||||
Click on the edit button next to the table you’ve created:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_13_edit_table_config.png
|
||||
:scale: 70%
|
||||
|
||||
On the resulting page, click on the **List Table Column** tab. Here, you’ll define the
|
||||
way you can use specific columns of your table when exploring your data. We’ll run
|
||||
through these options to describe their purpose:
|
||||
|
||||
* If you want users to group metrics by a specific field, mark it as **Groupable**.
|
||||
* If you need to filter on a specific field, mark it as **Filterable**.
|
||||
* Is this field something you’d like to get the distinct count of? Check the **Count
|
||||
Distinct** box.
|
||||
* Is this a metric you want to sum, or get basic summary statistics for? The **Sum**,
|
||||
**Min**, and **Max** columns will help.
|
||||
* The **is temporal** field should be checked for any date or time fields. We’ll cover
|
||||
how this manifests itself in analyses in a moment.
|
||||
|
||||
Here’s how we’ve configured fields for the weather data. Even for measures like the
|
||||
weather measurements (precipitation, snowfall, etc.), it’s ideal to group and filter
|
||||
by these values:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_14_field_config.png
|
||||
|
||||
As with the configurations above, click the **Save** button to save these settings.
|
||||
|
||||
Exploring your data
|
||||
-------------------
|
||||
|
||||
To start exploring your data, simply click on the table name you just created in
|
||||
the list of available tables:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_15_click_table_name.png
|
||||
|
||||
By default, you’ll be presented with a Table View:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_16_datasource_chart_type.png
|
||||
|
||||
Let’s walk through a basic query to get the count of all records in our table.
|
||||
First, we’ll need to change the **Since** filter to capture the range of our data.
|
||||
You can use simple phrases to apply these filters, like "3 years ago":
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_17_choose_time_range.png
|
||||
|
||||
The upper limit for time, the **Until** filter, defaults to "now", which may or may
|
||||
not be what you want.
|
||||
|
||||
Look for the Metrics section under the **GROUP BY** header, and start typing "Count"
|
||||
- you’ll see a list of metrics matching what you type:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_18_choose_metric.png
|
||||
|
||||
Select the *COUNT(\*)* metric, then click the green **Query** button near the top
|
||||
of the explore:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_19_click_query.png
|
||||
|
||||
You’ll see your results in the table:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_20_count_star_result.png
|
||||
|
||||
Let’s group this by the *weather_description* field to get the count of records by
|
||||
the type of weather recorded by adding it to the *Group by* section:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_21_group_by.png
|
||||
|
||||
and run the query:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_22_group_by_result.png
|
||||
|
||||
Let’s find a more useful data point: the top 10 times and places that recorded the
|
||||
highest temperature in 2015.
|
||||
|
||||
We replace *weather_description* with *latitude*, *longitude* and *measurement_date* in the
|
||||
*Group by* section:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_23_group_by_more_dimensions.png
|
||||
|
||||
And replace *COUNT(\*)* with *max__measurement_flag*:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_24_max_metric.png
|
||||
|
||||
The *max__measurement_flag* metric was created when we checked the box under **Max** and
|
||||
next to the *measurement_flag* field, indicating that this field was numeric and that
|
||||
we wanted to find its maximum value when grouped by specific fields.
|
||||
|
||||
In our case, *measurement_flag* is the value of the measurement taken, which clearly
|
||||
depends on the type of measurement (the researchers recorded different values for
|
||||
precipitation and temperature). Therefore, we must filter our query only on records
|
||||
where the *weather_description* is equal to "Maximum temperature", which we do in
|
||||
the **Filters** section at the bottom of the explore:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_25_max_temp_filter.png
|
||||
|
||||
Finally, since we only care about the top 10 measurements, we limit our results to
|
||||
10 records using the *Row limit* option under the **Options** header:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_26_row_limit.png
|
||||
|
||||
We click **Query** and get the following results:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_27_top_10_max_temps.png
|
||||
|
||||
In this dataset, the maximum temperature is recorded in tenths of a degree Celsius.
|
||||
The top value of 1370, measured in the middle of Nevada, is equal to 137 C, or roughly
|
||||
278 degrees F. It’s unlikely this value was correctly recorded. We’ve already been able
|
||||
to investigate some outliers with Superset, but this just scratches the surface of what
|
||||
we can do.
|
||||
|
||||
You may want to do a couple more things with this measure:
|
||||
|
||||
* The default formatting shows values like 1.37k, which may be difficult for some
|
||||
users to read. It’s likely you may want to see the full, comma-separated value.
|
||||
You can change the formatting of any measure by editing its config (*Edit Table
|
||||
Config > List Sql Metric > Edit Metric > D3Format*)
|
||||
* Moreover, you may want to see the temperature measurements in plain degrees C,
|
||||
not tenths of a degree. Or you may want to convert the temperature to degrees
|
||||
Fahrenheit. You can change the SQL that gets executed agains the database, baking
|
||||
the logic into the measure itself (*Edit Table Config > List Sql Metric > Edit
|
||||
Metric > SQL Expression*)
|
||||
|
||||
For now, though, let’s create a better visualization of these data and add it to
|
||||
a dashboard.
|
||||
|
||||
We change the Chart Type to "Distribution - Bar Chart":
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_28_bar_chart.png
|
||||
|
||||
Our filter on Maximum temperature measurements was retained, but the query and
|
||||
formatting options are dependent on the chart type, so you’ll have to set the
|
||||
values again:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_29_bar_chart_series_metrics.png
|
||||
|
||||
You should note the extensive formatting options for this chart: the ability to
|
||||
set axis labels, margins, ticks, etc. To make the data presentable to a broad
|
||||
audience, you’ll want to apply many of these to slices that end up in dashboards.
|
||||
For now, though, we run our query and get the following chart:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_30_bar_chart_results.png
|
||||
:scale: 70%
|
||||
|
||||
Creating a slice and dashboard
|
||||
------------------------------
|
||||
|
||||
This view might be interesting to researchers, so let’s save it. In Superset,
|
||||
a saved query is called a **Slice**.
|
||||
|
||||
To create a slice, click the **Save as** button near the top-left of the
|
||||
explore:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_19_click_query.png
|
||||
|
||||
A popup should appear, asking you to name the slice, and optionally add it to a
|
||||
dashboard. Since we haven’t yet created any dashboards, we can create one and
|
||||
immediately add our slice to it. Let’s do it:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_31_save_slice_to_dashboard.png
|
||||
:scale: 70%
|
||||
|
||||
Click Save, which will direct you back to your original query. We see that
|
||||
our slice and dashboard were successfully created:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_32_save_slice_confirmation.png
|
||||
:scale: 70%
|
||||
|
||||
Let’s check out our new dashboard. We click on the **Dashboards** menu:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_33_dashboard.png
|
||||
|
||||
and find the dashboard we just created:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_34_weather_dashboard.png
|
||||
|
||||
Things seemed to have worked - our slice is here!
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_35_slice_on_dashboard.png
|
||||
:scale: 70%
|
||||
|
||||
But it’s a bit smaller than we might like. Luckily, you can adjust the size
|
||||
of slices in a dashboard by clicking, holding and dragging the bottom-right
|
||||
corner to your desired dimensions:
|
||||
|
||||
.. image:: _static/img/tutorial/tutorial_36_adjust_dimensions.gif
|
||||
:scale: 120%
|
||||
|
||||
After adjusting the size, you’ll be asked to click on the icon near the
|
||||
top-right of the dashboard to save the new configuration.
|
||||
|
||||
Congrats! You’ve successfully linked, analyzed, and visualized data in Superset.
|
||||
There are a wealth of other table configuration and visualization options, so
|
||||
please start exploring and creating slices and dashboards of your own.
|
||||
|
||||
1765
docs/visualization.rst
Normal file
7
gen_changelog.sh
Executable file
@@ -0,0 +1,7 @@
|
||||
# requires github-changes, run
|
||||
# `npm install -g github-changes`
|
||||
# requires $GITHUB_TOKEN to be set
|
||||
|
||||
# usage: ./github-changes 0.20.0 0.20.1
|
||||
# will overwrites the local CHANGELOG.md, somehow you need to merge it in
|
||||
github-changes -o apache -r incubator-superset --token $GITHUB_TOKEN --between-tags $1...$2
|
||||
2
pylint-errors.sh
Executable file
@@ -0,0 +1,2 @@
|
||||
#!/bin/bash
|
||||
pylint superset --errors-only
|
||||
6
pypi_push.sh
Normal file → Executable file
@@ -1,7 +1,7 @@
|
||||
# first bump up package.json manually, commit and tag
|
||||
rm superset/assets/dist/*
|
||||
cd superset/assets/
|
||||
rm build/*
|
||||
npm run prod
|
||||
npm run build
|
||||
cd ../..
|
||||
python setup.py register
|
||||
python setup.py sdist upload
|
||||
|
||||
|
||||
@@ -7,7 +7,8 @@ rm -f .coverage
|
||||
export SUPERSET_CONFIG=tests.superset_test_config
|
||||
set -e
|
||||
superset/bin/superset db upgrade
|
||||
superset/bin/superset db upgrade # running twice on purpose as a test
|
||||
superset/bin/superset version -v
|
||||
python setup.py nosetests
|
||||
coveralls
|
||||
if [ "$CI" = "true" ] ; then
|
||||
coveralls
|
||||
fi
|
||||
|
||||
51
scripts/permissions_cleanup.py
Normal file
@@ -0,0 +1,51 @@
|
||||
from collections import defaultdict
|
||||
|
||||
from superset import sm
|
||||
|
||||
|
||||
def cleanup_permissions():
|
||||
# 1. Clean up duplicates.
|
||||
pvms = sm.get_session.query(sm.permissionview_model).all()
|
||||
print('# of permission view menues is: {}'.format(len(pvms)))
|
||||
pvms_dict = defaultdict(list)
|
||||
for pvm in pvms:
|
||||
pvms_dict[(pvm.permission, pvm.view_menu)].append(pvm)
|
||||
duplicates = [v for v in pvms_dict.values() if len(v) > 1]
|
||||
len(duplicates)
|
||||
|
||||
for pvm_list in duplicates:
|
||||
first_prm = pvm_list[0]
|
||||
roles = set(first_prm.role)
|
||||
for pvm in pvm_list[1:]:
|
||||
roles = roles.union(pvm.role)
|
||||
sm.get_session.delete(pvm)
|
||||
first_prm.roles = list(roles)
|
||||
sm.get_session.commit()
|
||||
|
||||
pvms = sm.get_session.query(sm.permissionview_model).all()
|
||||
print('STage 1: # of permission view menues is: {}'.format(len(pvms)))
|
||||
|
||||
# 2. Clean up None permissions or view menues
|
||||
pvms = sm.get_session.query(sm.permissionview_model).all()
|
||||
for pvm in pvms:
|
||||
if not (pvm.view_menu and pvm.permission):
|
||||
sm.get_session.delete(pvm)
|
||||
sm.get_session.commit()
|
||||
|
||||
pvms = sm.get_session.query(sm.permissionview_model).all()
|
||||
print('Stage 2: # of permission view menues is: {}'.format(len(pvms)))
|
||||
|
||||
# 3. Delete empty permission view menues from roles
|
||||
roles = sm.get_session.query(sm.role_model).all()
|
||||
for role in roles:
|
||||
role.permissions = [p for p in role.permissions if p]
|
||||
sm.get_session.commit()
|
||||
|
||||
# 4. Delete empty roles from permission view menues
|
||||
pvms = sm.get_session.query(sm.permissionview_model).all()
|
||||
for pvm in pvms:
|
||||
pvm.role = [r for r in pvm.role if r]
|
||||
sm.get_session.commit()
|
||||
|
||||
|
||||
cleanup_permissions()
|
||||
@@ -23,6 +23,3 @@ detailed-errors=1
|
||||
with-coverage=1
|
||||
nocapture=1
|
||||
cover-package=superset
|
||||
|
||||
[pycodestyle]
|
||||
max-line-length=90
|
||||
|
||||
86
setup.py
@@ -1,7 +1,8 @@
|
||||
import imp
|
||||
import os
|
||||
import json
|
||||
from setuptools import setup, find_packages
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
from setuptools import find_packages, setup
|
||||
|
||||
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
|
||||
PACKAGE_DIR = os.path.join(BASE_DIR, 'superset', 'static', 'assets')
|
||||
@@ -9,42 +10,76 @@ PACKAGE_FILE = os.path.join(PACKAGE_DIR, 'package.json')
|
||||
with open(PACKAGE_FILE) as package_file:
|
||||
version_string = json.load(package_file)['version']
|
||||
|
||||
|
||||
def get_git_sha():
|
||||
try:
|
||||
s = str(subprocess.check_output(['git', 'rev-parse', 'HEAD']))
|
||||
return s.strip()
|
||||
except Exception:
|
||||
return ''
|
||||
|
||||
|
||||
GIT_SHA = get_git_sha()
|
||||
version_info = {
|
||||
'GIT_SHA': GIT_SHA,
|
||||
'version': version_string,
|
||||
}
|
||||
print('-==-' * 15)
|
||||
print('VERSION: ' + version_string)
|
||||
print('GIT SHA: ' + GIT_SHA)
|
||||
print('-==-' * 15)
|
||||
|
||||
with open(os.path.join(PACKAGE_DIR, 'version_info.json'), 'w') as version_file:
|
||||
json.dump(version_info, version_file)
|
||||
|
||||
|
||||
setup(
|
||||
name='superset',
|
||||
description=(
|
||||
"A interactive data visualization platform build on SqlAlchemy "
|
||||
"and druid.io"),
|
||||
'A interactive data visualization platform build on SqlAlchemy '
|
||||
'and druid.io'),
|
||||
version=version_string,
|
||||
packages=find_packages(),
|
||||
include_package_data=True,
|
||||
zip_safe=False,
|
||||
scripts=['superset/bin/superset'],
|
||||
install_requires=[
|
||||
'celery==3.1.23',
|
||||
'cryptography==1.5.3',
|
||||
'flask-appbuilder==1.8.1',
|
||||
'boto3>=1.4.6',
|
||||
'celery==4.1.0',
|
||||
'colorama==0.3.9',
|
||||
'cryptography==1.9',
|
||||
'flask==0.12.2',
|
||||
'flask-appbuilder==1.9.6',
|
||||
'flask-cache==0.13.1',
|
||||
'flask-migrate==1.5.1',
|
||||
'flask-script==2.0.5',
|
||||
'flask-testing==0.5.0',
|
||||
'flask-sqlalchemy==2.0',
|
||||
'flask-migrate==2.1.1',
|
||||
'flask-script==2.0.6',
|
||||
'flask-sqlalchemy==2.1',
|
||||
'flask-testing==0.7.1',
|
||||
'flask-wtf==0.14.2',
|
||||
'flower==0.9.2',
|
||||
'future>=0.16.0, <0.17',
|
||||
'python-geohash==0.8.5',
|
||||
'humanize==0.5.1',
|
||||
'gunicorn==19.6.0',
|
||||
'markdown==2.6.6',
|
||||
'pandas==0.18.1',
|
||||
'gunicorn==19.7.1',
|
||||
'idna==2.6',
|
||||
'markdown==2.6.11',
|
||||
'pandas==0.22.0',
|
||||
'parsedatetime==2.0.0',
|
||||
'pydruid==0.3.0',
|
||||
'PyHive>=0.2.1',
|
||||
'python-dateutil==2.5.3',
|
||||
'requests==2.10.0',
|
||||
'simplejson==3.8.2',
|
||||
'six==1.10.0',
|
||||
'sqlalchemy==1.0.13',
|
||||
'sqlalchemy-utils==0.32.7',
|
||||
'sqlparse==0.1.19',
|
||||
'pathlib2==2.3.0',
|
||||
'polyline==1.3.2',
|
||||
'pydruid==0.4.0',
|
||||
'PyHive>=0.4.0',
|
||||
'python-dateutil==2.6.1',
|
||||
'pyyaml>=3.11',
|
||||
'requests==2.18.4',
|
||||
'simplejson==3.13.2',
|
||||
'six==1.11.0',
|
||||
'sqlalchemy==1.2.2',
|
||||
'sqlalchemy-utils==0.32.21',
|
||||
'sqlparse==0.2.4',
|
||||
'thrift>=0.9.3',
|
||||
'thrift-sasl>=0.2.1',
|
||||
'werkzeug==0.11.10',
|
||||
'unidecode>=0.04.21',
|
||||
],
|
||||
extras_require={
|
||||
'cors': ['Flask-Cors>=2.0.0'],
|
||||
@@ -54,6 +89,7 @@ setup(
|
||||
'coverage',
|
||||
'mock',
|
||||
'nose',
|
||||
'redis',
|
||||
],
|
||||
author='Maxime Beauchemin',
|
||||
author_email='maximebeauchemin@gmail.com',
|
||||
|
||||
@@ -4,40 +4,97 @@ from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
import os
|
||||
|
||||
from flask import Flask, redirect
|
||||
from flask_appbuilder import SQLA, AppBuilder, IndexView
|
||||
from flask_appbuilder import AppBuilder, IndexView, SQLA
|
||||
from flask_appbuilder.baseviews import expose
|
||||
from flask_cache import Cache
|
||||
from flask_migrate import Migrate
|
||||
from superset.source_registry import SourceRegistry
|
||||
from flask_wtf.csrf import CSRFProtect
|
||||
from werkzeug.contrib.fixers import ProxyFix
|
||||
from superset import utils
|
||||
|
||||
from superset.connectors.connector_registry import ConnectorRegistry
|
||||
from superset import utils, config # noqa
|
||||
|
||||
APP_DIR = os.path.dirname(__file__)
|
||||
CONFIG_MODULE = os.environ.get('SUPERSET_CONFIG', 'superset.config')
|
||||
|
||||
if not os.path.exists(config.DATA_DIR):
|
||||
os.makedirs(config.DATA_DIR)
|
||||
|
||||
with open(APP_DIR + '/static/assets/backendSync.json', 'r') as f:
|
||||
frontend_config = json.load(f)
|
||||
|
||||
app = Flask(__name__)
|
||||
app.config.from_object(CONFIG_MODULE)
|
||||
conf = app.config
|
||||
|
||||
#################################################################
|
||||
# Handling manifest file logic at app start
|
||||
#################################################################
|
||||
MANIFEST_FILE = APP_DIR + '/static/assets/dist/manifest.json'
|
||||
manifest = {}
|
||||
|
||||
|
||||
def parse_manifest_json():
|
||||
global manifest
|
||||
try:
|
||||
with open(MANIFEST_FILE, 'r') as f:
|
||||
manifest = json.load(f)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def get_manifest_file(filename):
|
||||
if app.debug:
|
||||
parse_manifest_json()
|
||||
return '/static/assets/dist/' + manifest.get(filename, '')
|
||||
|
||||
|
||||
parse_manifest_json()
|
||||
|
||||
|
||||
@app.context_processor
|
||||
def get_js_manifest():
|
||||
return dict(js_manifest=get_manifest_file)
|
||||
|
||||
|
||||
#################################################################
|
||||
|
||||
for bp in conf.get('BLUEPRINTS'):
|
||||
try:
|
||||
print("Registering blueprint: '{}'".format(bp.name))
|
||||
app.register_blueprint(bp)
|
||||
except Exception as e:
|
||||
print('blueprint registration failed')
|
||||
logging.exception(e)
|
||||
|
||||
if conf.get('SILENCE_FAB'):
|
||||
logging.getLogger('flask_appbuilder').setLevel(logging.ERROR)
|
||||
|
||||
if not app.debug:
|
||||
# In production mode, add log handler to sys.stderr.
|
||||
app.logger.addHandler(logging.StreamHandler())
|
||||
app.logger.setLevel(logging.INFO)
|
||||
logging.getLogger('pyhive.presto').setLevel(logging.INFO)
|
||||
|
||||
db = SQLA(app)
|
||||
|
||||
if conf.get('WTF_CSRF_ENABLED'):
|
||||
csrf = CSRFProtect(app)
|
||||
csrf_exempt_list = conf.get('WTF_CSRF_EXEMPT_LIST', [])
|
||||
for ex in csrf_exempt_list:
|
||||
csrf.exempt(ex)
|
||||
|
||||
utils.pessimistic_connection_handling(db.engine.pool)
|
||||
utils.pessimistic_connection_handling(db.engine)
|
||||
|
||||
cache = Cache(app, config=app.config.get('CACHE_CONFIG'))
|
||||
cache = utils.setup_cache(app, conf.get('CACHE_CONFIG'))
|
||||
tables_cache = utils.setup_cache(app, conf.get('TABLE_NAMES_CACHE_CONFIG'))
|
||||
|
||||
migrate = Migrate(app, db, directory=APP_DIR + "/migrations")
|
||||
migrate = Migrate(app, db, directory=APP_DIR + '/migrations')
|
||||
|
||||
# Logging configuration
|
||||
logging.basicConfig(format=app.config.get('LOG_FORMAT'))
|
||||
@@ -45,10 +102,11 @@ logging.getLogger().setLevel(app.config.get('LOG_LEVEL'))
|
||||
|
||||
if app.config.get('ENABLE_TIME_ROTATE'):
|
||||
logging.getLogger().setLevel(app.config.get('TIME_ROTATE_LOG_LEVEL'))
|
||||
handler = TimedRotatingFileHandler(app.config.get('FILENAME'),
|
||||
when=app.config.get('ROLLOVER'),
|
||||
interval=app.config.get('INTERVAL'),
|
||||
backupCount=app.config.get('BACKUP_COUNT'))
|
||||
handler = TimedRotatingFileHandler(
|
||||
app.config.get('FILENAME'),
|
||||
when=app.config.get('ROLLOVER'),
|
||||
interval=app.config.get('INTERVAL'),
|
||||
backupCount=app.config.get('BACKUP_COUNT'))
|
||||
logging.getLogger().addHandler(handler)
|
||||
|
||||
if app.config.get('ENABLE_CORS'):
|
||||
@@ -58,32 +116,59 @@ if app.config.get('ENABLE_CORS'):
|
||||
if app.config.get('ENABLE_PROXY_FIX'):
|
||||
app.wsgi_app = ProxyFix(app.wsgi_app)
|
||||
|
||||
if app.config.get('ENABLE_CHUNK_ENCODING'):
|
||||
|
||||
class ChunkedEncodingFix(object):
|
||||
def __init__(self, app):
|
||||
self.app = app
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
# Setting wsgi.input_terminated tells werkzeug.wsgi to ignore
|
||||
# content-length and read the stream till the end.
|
||||
if environ.get('HTTP_TRANSFER_ENCODING', '').lower() == u'chunked':
|
||||
environ['wsgi.input_terminated'] = True
|
||||
return self.app(environ, start_response)
|
||||
|
||||
app.wsgi_app = ChunkedEncodingFix(app.wsgi_app)
|
||||
|
||||
if app.config.get('UPLOAD_FOLDER'):
|
||||
try:
|
||||
os.makedirs(app.config.get('UPLOAD_FOLDER'))
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
for middleware in app.config.get('ADDITIONAL_MIDDLEWARE'):
|
||||
app.wsgi_app = middleware(app.wsgi_app)
|
||||
|
||||
|
||||
class MyIndexView(IndexView):
|
||||
@expose('/')
|
||||
def index(self):
|
||||
return redirect('/superset/welcome')
|
||||
|
||||
|
||||
appbuilder = AppBuilder(
|
||||
app, db.session,
|
||||
app,
|
||||
db.session,
|
||||
base_template='superset/base.html',
|
||||
indexview=MyIndexView,
|
||||
security_manager_class=app.config.get("CUSTOM_SECURITY_MANAGER"))
|
||||
security_manager_class=app.config.get('CUSTOM_SECURITY_MANAGER'),
|
||||
update_perms=utils.get_update_perms_flag(),
|
||||
)
|
||||
|
||||
sm = appbuilder.sm
|
||||
|
||||
get_session = appbuilder.get_session
|
||||
results_backend = app.config.get("RESULTS_BACKEND")
|
||||
results_backend = app.config.get('RESULTS_BACKEND')
|
||||
|
||||
# Registering sources
|
||||
module_datasource_map = app.config.get("DEFAULT_MODULE_DS_MAP")
|
||||
module_datasource_map.update(app.config.get("ADDITIONAL_MODULE_DS_MAP"))
|
||||
SourceRegistry.register_sources(module_datasource_map)
|
||||
module_datasource_map = app.config.get('DEFAULT_MODULE_DS_MAP')
|
||||
module_datasource_map.update(app.config.get('ADDITIONAL_MODULE_DS_MAP'))
|
||||
ConnectorRegistry.register_sources(module_datasource_map)
|
||||
|
||||
from superset import views, config # noqa
|
||||
# Hook that provides administrators a handle on the Flask APP
|
||||
# after initialization
|
||||
flask_app_mutator = app.config.get('FLASK_APP_MUTATOR')
|
||||
if flask_app_mutator:
|
||||
flask_app_mutator(app)
|
||||
|
||||
from superset import views # noqa
|
||||
|
||||
@@ -1,82 +0,0 @@
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
error = (
|
||||
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMNNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMM8OI++=~~~~~~=+?IODMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMMMMMMMMMMMMMMD$~~~~~~~~~~~~~~~~~~~~~~~=$MMMMMMMMMMMMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMMMMMMMMMMMN8?:~~~~~~~~~~~~~~~~~~~~~~~~~~=+8NMMMMMMMMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMMMMMMMMMO=~~~~~~~~~~~~~~~~~+I??~~~~~~~~~~~~~+DMMMMMMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMMMMMMMNI~~~~~~~~~~~~~~~~~~IIIII=~~~~~~~~~~~~~~=NMMMMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMMMMMM+=~~~~~~~~~~~~~~~~~~~=III+~~~~~~~~~~~~~~~~~?8MMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMMMMM?~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~+++=~~~~8MMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMMMI=~~~~~~~~~~~~~~~~~~~~~~~~~III?I~~~~~~~~,:++++++~~8MMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMN7~~~~~~~~~~~~~~~~==+=~~~~~~=IIIII~~~~~~:. ..:=++=~=MMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMO=~~~~~~~~~~~~~~~~+++=~~~~~~~~??I?I~~~~~~. ...,~~~~IMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMM~~~~~~~~~~~~~~~~~+++:,~~~~~~~~~~~?=~~~~~:. ..~~~~~OMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMM$=~~~~~~~~~~~~~~~=++:.. ..~~~~~~~~~~~~~~~~,. . . :~~~~~OMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMM~~~~~~~~~~~~~~~~+++,. .~~~~~~~~~~~~~~~.. .. . .~~~~~=OMMMMMMMMMM\n"+
|
||||
"MMMMMMMM?~~~~~~~~~~~~~~~=+~. .~~~~~~~~~~~~~~. ,MMMMM,=~~~~~~NMMMMMMMMM\n"+
|
||||
"MMMMMMMN~~~~~~~~~~~~~~~~~,. .,~~~~~~~~~~~~~.. ZMMM,+Z:~~~~~~$MMMMMMMMM\n"+
|
||||
"MMMMMM8?~~~~~~~~~~~~~~~~~.. ..~~~~~~~~~~~~~:. DMMM,+D~~~~~~~~IMMMMMMMM\n"+
|
||||
"MMMMMMI~~~~~~~~~~~~~~~~~~.. :MMMO~~~~~~~~~~~~~~~,.. ?MMMMMI~~~~~~~~~MMMMMMMM\n"+
|
||||
"MMMMMM=~~~~~~~~~~~~~~~~~~.. MMM+=M:~~~~~~~~~~~~~:. .:IM$~~~~~~~~~~~8MMMMMMM\n"+
|
||||
"MMMMMD~~~~~~~~~~~~~~~~~~~:. MMM:,M:~~~~~~~~~~~~~~~.......:~~~~~~~~~~$MMMMMMM\n"+
|
||||
"MMMMMI~~~~~~~~~~~~~~~~~~~~, MMMMMM~~~~~~~~~~~~~~~~~~,..:~~~~~~~~~~~~+MMMMMMM\n"+
|
||||
"MMMMD+~~~~~~~~~~~~~~~~~~~~~. $MMMM$~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~=MMMMMMM\n"+
|
||||
"MMMM8~~~~~~~~~~~~~~~~~~~~~~:. . .:~~~~~~,..:. .=~~~~~~~~~~~~~~~~~~~~MMMMMMM\n"+
|
||||
"MMMMO~~~~~~~~~~~~~~~~~~~~~~~:, .:~~~~~=8.. .+ . =8ZI~~~~~~~~~~~~~~~~=MMMMMMM\n"+
|
||||
"MMMMZ=~~~~~~~~~~~~~~~~~~~~~~~~:,,,:~~~~~~IZ8:. .O....888?~~~~~~~~~~~~~~~+MMMMMMM\n"+
|
||||
"MMMMO=~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~?888=...I~I88888O?~~~~~~~~~~~~~~7MMMMMMM\n"+
|
||||
"MMMMO~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~Z888OO88888888888O?~~~~~~~~~~~~~OMMMMMMM\n"+
|
||||
"MMMMD+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~=8888888888888888888~~~~~~~~~~~~+MMMMMMMM\n"+
|
||||
"MMMMM7~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~?8888888888888888888?~~~~~~~~~~=$MMMMMMMM\n"+
|
||||
"MMMMMD~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~=$8888888888888888888O~~~~~~~~~~8MMMMMMMMM\n"+
|
||||
"MMMMMN=~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~+Z88888888888888888ZZ7=~~~~~~~~?MMMMMMMMMM\n"+
|
||||
"MMMMMMZ=~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~+Z88888888Z7I===~~~~~~~~~~~~~=OMMMMMMMMMMM\n"+
|
||||
"MMMMMMN$~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~=$88888O7?=~~~~~~~~~~~~~~~~~~OMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMM?~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~I8OZ+~~~~~~~~~~~~~~~~~~~~=DMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMM8=~~~~~~~~~~~~~~~~~~~~~~~~~~~~~+$+=~~~~~~~~~~~~~~~~~~~~+MMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMD7~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~=$DMMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMM?~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~=$OMMMMMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMD7=~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~+ZMMMMMMMMMMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMMMZ7=~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~78MMMMMMMMMMMMMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMMMMMM8OI=~~~~~~~~~~~~~~~~~~~=+?ZDNMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMMMMMMMMMNDZ7?++~=~==~+?IONMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM\n"+
|
||||
"MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM")
|
||||
|
||||
stacktrace="""
|
||||
-------------------------------------------------------------------------------------------------------
|
||||
=======================================================================================================
|
||||
-------------------------------------------------------------------------------------------------------
|
||||
___ ___ ___
|
||||
( ) ( ) ( )
|
||||
.--. | |_ .---. .--. | | ___ | |_ ___ .-. .---. .--. .--.
|
||||
/ _ \ ( __) / .-, \ / \ | | ( ) ( __) ( ) \ / .-, \ / \ / \\
|
||||
. .' `. ; | | (__) ; | | .-. ; | | ' / | | | ' .-. ; (__) ; | | .-. ; | .-. ;
|
||||
| ' | | | | ___ .'` | | |(___) | |,' / | | ___ | / (___) .'` | | |(___) | | | |
|
||||
_\_`.(___) | |( ) / .'| | | | | . '. | |( ) | | / .'| | | | | |/ |
|
||||
( ). '. | | | | | / | | | | ___ | | `. \ | | | | | | | / | | | | ___ | ' _.'
|
||||
| | `\ | | ' | | ; | ; | | '( ) | | \ \ | ' | | | | ; | ; | | '( ) | .'.-.
|
||||
; '._,' ' ' `-' ; ' `-' | ' `-' | | | \ . ' `-' ; | | ' `-' | ' `-' | ' `-' /
|
||||
'.___.' `.__. `.__.'_. `.__,' (___ ) (___) `.__. (___) `.__.'_. `.__,' `.__.'
|
||||
|
||||
-------------------------------------------------------------------------------------------------------
|
||||
=======================================================================================================
|
||||
-------------------------------------------------------------------------------------------------------
|
||||
"""
|
||||
|
||||
boat = """\
|
||||
+ +
|
||||
)`.).
|
||||
)``)``) .~~
|
||||
).-'.-')|)
|
||||
|-).-).-'_'-/
|
||||
~~~\ `o-o-o' /~~~~
|
||||
~~~'---.____/~~~"""
|
||||
@@ -1,3 +1,3 @@
|
||||
{
|
||||
"presets" : ["airbnb", "es2015", "react"],
|
||||
"presets" : ["airbnb"],
|
||||
}
|
||||
|
||||
@@ -7,3 +7,4 @@ node_modules/*
|
||||
node_modules*/*
|
||||
stylesheets/*
|
||||
vendor/*
|
||||
docs/*
|
||||
|
||||
@@ -5,6 +5,9 @@
|
||||
"experimentalObjectRestSpread": true
|
||||
}
|
||||
},
|
||||
"globals": {
|
||||
"document": true,
|
||||
},
|
||||
"rules": {
|
||||
"prefer-template": 0,
|
||||
"new-cap": 0,
|
||||
@@ -14,5 +17,26 @@
|
||||
"func-names": 0,
|
||||
"react/jsx-no-bind": 0,
|
||||
"no-confusing-arrow": 0,
|
||||
|
||||
"jsx-a11y/no-static-element-interactions": 0,
|
||||
"jsx-a11y/anchor-has-content": 0,
|
||||
"react/require-default-props": 0,
|
||||
"no-plusplus": 0,
|
||||
"no-mixed-operators": 0,
|
||||
"no-continue": 0,
|
||||
"no-bitwise": 0,
|
||||
"no-undef": 0,
|
||||
"no-multi-assign": 0,
|
||||
"react/no-array-index-key": 0,
|
||||
"no-restricted-properties": 0,
|
||||
"no-prototype-builtins": 0,
|
||||
"jsx-a11y/href-no-hash": 0,
|
||||
"react/forbid-prop-types": 0,
|
||||
"class-methods-use-this": 0,
|
||||
"import/no-named-as-default": 0,
|
||||
"import/prefer-default-export": 0,
|
||||
"react/no-unescaped-entities": 0,
|
||||
"react/no-unused-prop-types": 0,
|
||||
"react/no-string-refs": 0,
|
||||
}
|
||||
}
|
||||
|
||||
2977
superset/assets/backendSync.json
Normal file
45
superset/assets/branding/FullLockupWithText.svg
Normal file
@@ -0,0 +1,45 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg width="210px" height="202px" viewBox="0 0 210 202" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<!-- Generator: Sketch 42 (36781) - http://www.bohemiancoding.com/sketch -->
|
||||
<title>Full Lockup With Text</title>
|
||||
<desc>Created with Sketch.</desc>
|
||||
<defs>
|
||||
<path d="M55.8666667,41.25 C64.4268817,50.85 73.137276,55.65 83.3494624,55.65 C100.019355,55.65 112.183871,43.95 112.183871,27.9 C112.183871,11.85 100.019355,0 83.3494624,0 C73.7379928,0 64.8774194,5.4 56.3172043,14.85 C47.9071685,5.25 38.8964158,0 28.8344086,0 C12.1645161,0 -2.84217094e-14,11.85 -2.84217094e-14,27.9 C-2.84217094e-14,43.95 12.1645161,55.65 28.8344086,55.65 C39.046595,55.65 47.0060932,50.85 55.8666667,41.25 Z" id="path-1"></path>
|
||||
<mask id="mask-2" maskContentUnits="userSpaceOnUse" maskUnits="objectBoundingBox" x="-5" y="-5" width="122.183871" height="65.65">
|
||||
<rect x="-5" y="-5" width="122.183871" height="65.65" fill="white"></rect>
|
||||
<use xlink:href="#path-1" fill="black"></use>
|
||||
</mask>
|
||||
</defs>
|
||||
<g id="Main" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
|
||||
<g id="Superset" transform="translate(-177.000000, -812.000000)">
|
||||
<g id="Full-Lockup-With-Text" transform="translate(177.000000, 812.000000)">
|
||||
<g id="Group-7" transform="translate(0.500000, 0.500000)">
|
||||
<g id="Group-17">
|
||||
<g id="Group-6-Copy">
|
||||
<g id="Group" fill="#484848">
|
||||
<path d="M0,80.85 C8.56021505,80.85 15.0179211,84.6 15.0179211,94.8 L15.0179211,116.1 C15.0179211,135.9 25.9810036,145.2 44.1526882,145.2 L48.8082437,145.2 L48.8082437,130.05 L44.3028674,130.05 C36.944086,130.05 31.8379928,126.6 31.8379928,116.55 L31.8379928,91.5 C31.8379928,81.9 26.281362,75 16.6698925,72.6 C26.281362,70.05 31.8379928,63.3 31.8379928,53.7 L31.8379928,28.5 C31.8379928,18.45 36.944086,15.15 44.3028674,15.15 L48.8082437,15.15 L48.8082437,0 L44.1526882,0 C25.9810036,0 15.0179211,9.15 15.0179211,28.95 L15.0179211,50.25 C15.0179211,60.45 8.56021505,64.2 0,64.2 L0,80.85 Z" id="{-copy-4"></path>
|
||||
<path d="M160.691756,80.85 C169.251971,80.85 175.709677,84.6 175.709677,94.8 L175.709677,116.1 C175.709677,135.9 186.67276,145.2 204.844444,145.2 L209.5,145.2 L209.5,130.05 L204.994624,130.05 C197.635842,130.05 192.529749,126.6 192.529749,116.55 L192.529749,91.5 C192.529749,81.9 186.973118,75 177.361649,72.6 C186.973118,70.05 192.529749,63.3 192.529749,53.7 L192.529749,28.5 C192.529749,18.45 197.635842,15.15 204.994624,15.15 L209.5,15.15 L209.5,0 L204.844444,0 C186.67276,0 175.709677,9.15 175.709677,28.95 L175.709677,50.25 C175.709677,60.45 169.251971,64.2 160.691756,64.2 L160.691756,80.85 Z" id="{-copy-5" transform="translate(185.095878, 72.600000) rotate(-180.000000) translate(-185.095878, -72.600000) "></path>
|
||||
<path d="M104.67491,86.25 C95.8143369,95.85 87.8548387,100.65 77.6426523,100.65 C60.9727599,100.65 48.8082437,88.95 48.8082437,72.9 C48.8082437,56.85 60.9727599,45 77.6426523,45 C87.7046595,45 96.7154122,50.25 105.125448,59.85 C113.685663,50.4 122.546237,45 132.157706,45 C148.827599,45 160.992115,56.85 160.992115,72.9 C160.992115,88.95 148.827599,100.65 132.157706,100.65 C121.94552,100.65 113.235125,95.85 104.67491,86.25 Z M77.9430108,62.1 C70.8845878,62.1 66.6795699,66.9 66.6795699,73.05 C66.6795699,79.2 70.8845878,83.85 77.9430108,83.85 C83.8,83.85 89.0562724,79.35 94.0121864,73.35 C88.755914,66.9 83.9501792,62.1 77.9430108,62.1 Z M131.857348,83.85 C126.000358,83.85 121.044444,79.2 115.788172,73.05 C121.194624,66.6 125.850179,62.1 131.857348,62.1 C138.915771,62.1 143.120789,66.9 143.120789,73.05 C143.120789,79.2 138.915771,83.85 131.857348,83.85 Z" id="∞"></path>
|
||||
</g>
|
||||
<rect id="Bottom" fill="#FFFFFF" transform="translate(116.947287, 85.695730) rotate(-320.000000) translate(-116.947287, -85.695730) " x="107.936535" y="73.3847709" width="18.0215054" height="24.6219184"></rect>
|
||||
<rect id="Top" fill="#FFFFFF" transform="translate(91.942412, 61.695730) rotate(-320.000000) translate(-91.942412, -61.695730) " x="82.9316592" y="49.3847709" width="18.0215054" height="24.6219184"></rect>
|
||||
</g>
|
||||
<text id="Superset-Copy" font-family="Roboto-Black, Roboto" font-size="37.5" font-weight="700" letter-spacing="0.500625014" fill="#484848">
|
||||
<tspan x="26.281362" y="192.625">Superset</tspan>
|
||||
</text>
|
||||
</g>
|
||||
</g>
|
||||
<g id="Group-10" transform="translate(49.500000, 45.500000)">
|
||||
<g id="WORK-SPACE">
|
||||
<path d="M55.8666667,41.25 C64.4268817,50.85 73.137276,55.65 83.3494624,55.65 C100.019355,55.65 112.183871,43.95 112.183871,27.9 C112.183871,11.85 100.019355,0 83.3494624,0 C73.7379928,0 64.8774194,5.4 56.3172043,14.85 C47.9071685,5.25 38.8964158,0 28.8344086,0 C12.1645161,0 -2.84217094e-14,11.85 -2.84217094e-14,27.9 C-2.84217094e-14,43.95 12.1645161,55.65 28.8344086,55.65 C39.046595,55.65 47.0060932,50.85 55.8666667,41.25 Z" id="∞-copy-2" fill="#484848"></path>
|
||||
<path d="M35.3031737,7.82736301 L54.6231734,7.82736301 C54.6231734,7.82736301 54.1382597,11.9130391 54.1201021,16.2068622 C54.1019446,20.5006853 53.079701,24.1223631 53.079701,24.1223631 L35.3031737,24.1223631 L35.3031737,7.82736301 Z" id="Path" fill="#00D1C1" transform="translate(44.963174, 15.974863) rotate(-50.000000) translate(-44.963174, -15.974863) "></path>
|
||||
<rect id="Path-Copy" fill="#00D1C1" transform="translate(67.518574, 40.130521) rotate(-50.000000) translate(-67.518574, -40.130521) " x="57.8585742" y="31.9830205" width="19.3199997" height="16.2950001"></rect>
|
||||
<path d="M29.134767,17.1 C35.1419355,17.1 39.9476703,21.9 45.2039427,28.35 C40.2480287,34.35 34.9917563,38.85 29.134767,38.85 C22.0763441,38.85 17.8713262,34.2 17.8713262,28.05 C17.8713262,21.9 22.0763441,17.1 29.134767,17.1 Z" id="Path" fill="#FFFFFF"></path>
|
||||
<path d="M83.0491039,38.85 C77.1921147,38.85 72.2362007,34.2 66.9799283,28.05 C72.3863799,21.6 77.0419355,17.1 83.0491039,17.1 C90.1075269,17.1 94.3125448,21.9 94.3125448,28.05 C94.3125448,34.2 90.1075269,38.85 83.0491039,38.85 Z" id="Path" fill="#FFFFFF"></path>
|
||||
</g>
|
||||
<use id="∞-copy-2" stroke="#FFFFFF" mask="url(#mask-2)" stroke-width="10" xlink:href="#path-1"></use>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 6.6 KiB |
BIN
superset/assets/branding/FullLockupWithText@2x.png
Normal file
|
After Width: | Height: | Size: 29 KiB |
34
superset/assets/branding/FullLockupWithoutText@1x.svg
Normal file
@@ -0,0 +1,34 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg width="210px" height="146px" viewBox="0 0 210 146" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<!-- Generator: Sketch 42 (36781) - http://www.bohemiancoding.com/sketch -->
|
||||
<title>Full Lockup Without Text@1x</title>
|
||||
<desc>Created with Sketch.</desc>
|
||||
<defs>
|
||||
<path d="M55.8666667,41.25 C64.4268817,50.85 73.137276,55.65 83.3494624,55.65 C100.019355,55.65 112.183871,43.95 112.183871,27.9 C112.183871,11.85 100.019355,0 83.3494624,0 C73.7379928,0 64.8774194,5.4 56.3172043,14.85 C47.9071685,5.25 38.8964158,0 28.8344086,0 C12.1645161,0 -2.84217094e-14,11.85 -2.84217094e-14,27.9 C-2.84217094e-14,43.95 12.1645161,55.65 28.8344086,55.65 C39.046595,55.65 47.0060932,50.85 55.8666667,41.25 Z" id="path-1"></path>
|
||||
<mask id="mask-2" maskContentUnits="userSpaceOnUse" maskUnits="objectBoundingBox" x="-5" y="-5" width="122.183871" height="65.65">
|
||||
<rect x="-5" y="-5" width="122.183871" height="65.65" fill="white"></rect>
|
||||
<use xlink:href="#path-1" fill="black"></use>
|
||||
</mask>
|
||||
</defs>
|
||||
<g id="Main" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
|
||||
<g id="Superset" transform="translate(-177.000000, -466.000000)">
|
||||
<g id="Full-Lockup-Without-Text" transform="translate(177.000000, 466.000000)">
|
||||
<path d="M0.5,81.35 C9.06021505,81.35 15.5179211,85.1 15.5179211,95.3 L15.5179211,116.6 C15.5179211,136.4 26.4810036,145.7 44.6526882,145.7 L49.3082437,145.7 L49.3082437,130.55 L44.8028674,130.55 C37.444086,130.55 32.3379928,127.1 32.3379928,117.05 L32.3379928,92 C32.3379928,82.4 26.781362,75.5 17.1698925,73.1 C26.781362,70.55 32.3379928,63.8 32.3379928,54.2 L32.3379928,29 C32.3379928,18.95 37.444086,15.65 44.8028674,15.65 L49.3082437,15.65 L49.3082437,0.5 L44.6526882,0.5 C26.4810036,0.5 15.5179211,9.65 15.5179211,29.45 L15.5179211,50.75 C15.5179211,60.95 9.06021505,64.7 0.5,64.7 L0.5,81.35 Z" id="{-copy-4" fill="#484848"></path>
|
||||
<path d="M161.191756,81.35 C169.751971,81.35 176.209677,85.1 176.209677,95.3 L176.209677,116.6 C176.209677,136.4 187.17276,145.7 205.344444,145.7 L210,145.7 L210,130.55 L205.494624,130.55 C198.135842,130.55 193.029749,127.1 193.029749,117.05 L193.029749,92 C193.029749,82.4 187.473118,75.5 177.861649,73.1 C187.473118,70.55 193.029749,63.8 193.029749,54.2 L193.029749,29 C193.029749,18.95 198.135842,15.65 205.494624,15.65 L210,15.65 L210,0.5 L205.344444,0.5 C187.17276,0.5 176.209677,9.65 176.209677,29.45 L176.209677,50.75 C176.209677,60.95 169.751971,64.7 161.191756,64.7 L161.191756,81.35 Z" id="{-copy-5" fill="#484848" transform="translate(185.595878, 73.100000) rotate(-180.000000) translate(-185.595878, -73.100000) "></path>
|
||||
<path d="M105.366667,86.75 C96.5060932,96.35 88.546595,101.15 78.3344086,101.15 C61.6645161,101.15 49.5,89.45 49.5,73.4 C49.5,57.35 61.6645161,45.5 78.3344086,45.5 C88.3964158,45.5 97.4071685,50.75 105.817204,60.35 C114.377419,50.9 123.237993,45.5 132.849462,45.5 C149.519355,45.5 161.683871,57.35 161.683871,73.4 C161.683871,89.45 149.519355,101.15 132.849462,101.15 C122.637276,101.15 113.926882,96.35 105.366667,86.75 Z M78.634767,62.6 C71.5763441,62.6 67.3713262,67.4 67.3713262,73.55 C67.3713262,79.7 71.5763441,84.35 78.634767,84.35 C84.4917563,84.35 89.7480287,79.85 94.7039427,73.85 C89.4476703,67.4 84.6419355,62.6 78.634767,62.6 Z M132.549104,84.35 C126.692115,84.35 121.736201,79.7 116.479928,73.55 C121.88638,67.1 126.541935,62.6 132.549104,62.6 C139.607527,62.6 143.812545,67.4 143.812545,73.55 C143.812545,79.7 139.607527,84.35 132.549104,84.35 Z" id="∞" fill="#484848"></path>
|
||||
<rect id="Bottom" fill="#FFFFFF" transform="translate(117.815969, 86.222742) rotate(-320.000000) translate(-117.815969, -86.222742) " x="108.805216" y="73.9117829" width="18.0215054" height="24.6219184"></rect>
|
||||
<polygon id="Top" fill="#FFFFFF" transform="translate(93.488745, 61.141018) rotate(-320.000000) translate(-93.488745, -61.141018) " points="84.477992 50.894853 102.499497 50.894853 102.499497 71.3871824 84.5471936 70.9586997"></polygon>
|
||||
<g id="Group-10" transform="translate(49.500000, 45.500000)">
|
||||
<g id="WORK-SPACE">
|
||||
<path d="M55.8666667,41.25 C64.4268817,50.85 73.137276,55.65 83.3494624,55.65 C100.019355,55.65 112.183871,43.95 112.183871,27.9 C112.183871,11.85 100.019355,0 83.3494624,0 C73.7379928,0 64.8774194,5.4 56.3172043,14.85 C47.9071685,5.25 38.8964158,0 28.8344086,0 C12.1645161,0 -2.84217094e-14,11.85 -2.84217094e-14,27.9 C-2.84217094e-14,43.95 12.1645161,55.65 28.8344086,55.65 C39.046595,55.65 47.0060932,50.85 55.8666667,41.25 Z" id="∞-copy-2" fill="#484848"></path>
|
||||
<path d="M35.3031737,7.82736301 L54.6231734,7.82736301 C54.6231734,7.82736301 54.1382597,11.9130391 54.1201021,16.2068622 C54.1019446,20.5006853 53.079701,24.1223631 53.079701,24.1223631 L35.3031737,24.1223631 L35.3031737,7.82736301 Z" id="Path" fill="#00D1C1" transform="translate(44.963174, 15.974863) rotate(-50.000000) translate(-44.963174, -15.974863) "></path>
|
||||
<rect id="Path-Copy" fill="#00D1C1" transform="translate(67.518574, 40.130521) rotate(-50.000000) translate(-67.518574, -40.130521) " x="57.8585742" y="31.9830205" width="19.3199997" height="16.2950001"></rect>
|
||||
<path d="M29.134767,17.1 C35.1419355,17.1 39.9476703,21.9 45.2039427,28.35 C40.2480287,34.35 34.9917563,38.85 29.134767,38.85 C22.0763441,38.85 17.8713262,34.2 17.8713262,28.05 C17.8713262,21.9 22.0763441,17.1 29.134767,17.1 Z" id="Path" fill="#FFFFFF"></path>
|
||||
<path d="M83.0491039,38.85 C77.1921147,38.85 72.2362007,34.2 66.9799283,28.05 C72.3863799,21.6 77.0419355,17.1 83.0491039,17.1 C90.1075269,17.1 94.3125448,21.9 94.3125448,28.05 C94.3125448,34.2 90.1075269,38.85 83.0491039,38.85 Z" id="Path" fill="#FFFFFF"></path>
|
||||
</g>
|
||||
<use id="∞-copy-2" stroke="#FFFFFF" mask="url(#mask-2)" stroke-width="10" xlink:href="#path-1"></use>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 6.1 KiB |
BIN
superset/assets/branding/FullLockupWithoutText@2x.png
Normal file
|
After Width: | Height: | Size: 22 KiB |
35
superset/assets/branding/Horizontal.svg
Normal file
@@ -0,0 +1,35 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg width="350px" height="66px" viewBox="0 0 350 66" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<!-- Generator: Sketch 42 (36781) - http://www.bohemiancoding.com/sketch -->
|
||||
<title>Horizontal</title>
|
||||
<desc>Created with Sketch.</desc>
|
||||
<defs>
|
||||
<path d="M55.8666667,41.25 C64.4268817,50.85 73.137276,55.65 83.3494624,55.65 C100.019355,55.65 112.183871,43.95 112.183871,27.9 C112.183871,11.85 100.019355,0 83.3494624,0 C73.7379928,0 64.8774194,5.4 56.3172043,14.85 C47.9071685,5.25 38.8964158,0 28.8344086,0 C12.1645161,0 -2.84217094e-14,11.85 -2.84217094e-14,27.9 C-2.84217094e-14,43.95 12.1645161,55.65 28.8344086,55.65 C39.046595,55.65 47.0060932,50.85 55.8666667,41.25 Z" id="path-1"></path>
|
||||
<mask id="mask-2" maskContentUnits="userSpaceOnUse" maskUnits="objectBoundingBox" x="-5" y="-5" width="122.183871" height="65.65">
|
||||
<rect x="-5" y="-5" width="122.183871" height="65.65" fill="white"></rect>
|
||||
<use xlink:href="#path-1" fill="black"></use>
|
||||
</mask>
|
||||
</defs>
|
||||
<g id="Main" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
|
||||
<g id="Superset" transform="translate(-319.000000, -195.000000)">
|
||||
<g id="Horizontal" transform="translate(324.000000, 200.000000)">
|
||||
<g id="Group-3">
|
||||
<text id="Superset" font-family="Roboto-Black, Roboto" font-size="50" font-weight="700" letter-spacing="0.670000017" fill="#484848">
|
||||
<tspan x="137" y="46">Superse</tspan>
|
||||
<tspan x="328.335508" y="46">t</tspan>
|
||||
</text>
|
||||
<g id="Group-10-Copy-8">
|
||||
<g id="WORK-SPACE">
|
||||
<path d="M55.8666667,41.25 C64.4268817,50.85 73.137276,55.65 83.3494624,55.65 C100.019355,55.65 112.183871,43.95 112.183871,27.9 C112.183871,11.85 100.019355,0 83.3494624,0 C73.7379928,0 64.8774194,5.4 56.3172043,14.85 C47.9071685,5.25 38.8964158,0 28.8344086,0 C12.1645161,0 -2.84217094e-14,11.85 -2.84217094e-14,27.9 C-2.84217094e-14,43.95 12.1645161,55.65 28.8344086,55.65 C39.046595,55.65 47.0060932,50.85 55.8666667,41.25 Z" id="∞-copy-2" fill="#484848"></path>
|
||||
<path d="M35.3031737,7.82736301 L54.6231734,7.82736301 C54.6231734,7.82736301 54.1382597,11.9130391 54.1201021,16.2068622 C54.1019446,20.5006853 53.079701,24.1223631 53.079701,24.1223631 L35.3031737,24.1223631 L35.3031737,7.82736301 Z" id="Path" fill="#00D1C1" transform="translate(44.963174, 15.974863) rotate(-50.000000) translate(-44.963174, -15.974863) "></path>
|
||||
<rect id="Path-Copy" fill="#00D1C1" transform="translate(67.518574, 40.130521) rotate(-50.000000) translate(-67.518574, -40.130521) " x="57.8585742" y="31.9830205" width="19.3199997" height="16.2950001"></rect>
|
||||
<path d="M29.134767,17.1 C35.1419355,17.1 39.9476703,21.9 45.2039427,28.35 C40.2480287,34.35 34.9917563,38.85 29.134767,38.85 C22.0763441,38.85 17.8713262,34.2 17.8713262,28.05 C17.8713262,21.9 22.0763441,17.1 29.134767,17.1 Z" id="Path" fill="#FFFFFF"></path>
|
||||
<path d="M83.0491039,38.85 C77.1921147,38.85 72.2362007,34.2 66.9799283,28.05 C72.3863799,21.6 77.0419355,17.1 83.0491039,17.1 C90.1075269,17.1 94.3125448,21.9 94.3125448,28.05 C94.3125448,34.2 90.1075269,38.85 83.0491039,38.85 Z" id="Path" fill="#FFFFFF"></path>
|
||||
</g>
|
||||
<use id="∞-copy-2" stroke="#FFFFFF" mask="url(#mask-2)" stroke-width="10" xlink:href="#path-1"></use>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 3.6 KiB |
BIN
superset/assets/branding/Horizontal@2x.png
Normal file
|
After Width: | Height: | Size: 22 KiB |
BIN
superset/assets/branding/SoloMark.png
Normal file
|
After Width: | Height: | Size: 12 KiB |
27
superset/assets/branding/SoloMark@1x.svg
Normal file
@@ -0,0 +1,27 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg width="123px" height="66px" viewBox="0 0 123 66" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<!-- Generator: Sketch 42 (36781) - http://www.bohemiancoding.com/sketch -->
|
||||
<title>Solo Mark@1x</title>
|
||||
<desc>Created with Sketch.</desc>
|
||||
<defs>
|
||||
<path d="M55.8666667,41.25 C64.4268817,50.85 73.137276,55.65 83.3494624,55.65 C100.019355,55.65 112.183871,43.95 112.183871,27.9 C112.183871,11.85 100.019355,0 83.3494624,0 C73.7379928,0 64.8774194,5.4 56.3172043,14.85 C47.9071685,5.25 38.8964158,0 28.8344086,0 C12.1645161,0 -2.84217094e-14,11.85 -2.84217094e-14,27.9 C-2.84217094e-14,43.95 12.1645161,55.65 28.8344086,55.65 C39.046595,55.65 47.0060932,50.85 55.8666667,41.25 Z" id="path-1"></path>
|
||||
<mask id="mask-2" maskContentUnits="userSpaceOnUse" maskUnits="objectBoundingBox" x="-5" y="-5" width="122.183871" height="65.65">
|
||||
<rect x="-5" y="-5" width="122.183871" height="65.65" fill="white"></rect>
|
||||
<use xlink:href="#path-1" fill="black"></use>
|
||||
</mask>
|
||||
</defs>
|
||||
<g id="Main" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
|
||||
<g id="Superset" transform="translate(-787.000000, -557.000000)">
|
||||
<g id="Solo-Mark" transform="translate(792.000000, 562.000000)">
|
||||
<g id="WORK-SPACE">
|
||||
<path d="M55.8666667,41.25 C64.4268817,50.85 73.137276,55.65 83.3494624,55.65 C100.019355,55.65 112.183871,43.95 112.183871,27.9 C112.183871,11.85 100.019355,0 83.3494624,0 C73.7379928,0 64.8774194,5.4 56.3172043,14.85 C47.9071685,5.25 38.8964158,0 28.8344086,0 C12.1645161,0 -2.84217094e-14,11.85 -2.84217094e-14,27.9 C-2.84217094e-14,43.95 12.1645161,55.65 28.8344086,55.65 C39.046595,55.65 47.0060932,50.85 55.8666667,41.25 Z" id="∞-copy-2" fill="#484848"></path>
|
||||
<path d="M35.3031737,7.82736301 L54.6231734,7.82736301 C54.6231734,7.82736301 54.1382597,11.9130391 54.1201021,16.2068622 C54.1019446,20.5006853 53.079701,24.1223631 53.079701,24.1223631 L35.3031737,24.1223631 L35.3031737,7.82736301 Z" id="Path" fill="#00D1C1" transform="translate(44.963174, 15.974863) rotate(-50.000000) translate(-44.963174, -15.974863) "></path>
|
||||
<rect id="Path-Copy" fill="#00D1C1" transform="translate(67.518574, 40.130521) rotate(-50.000000) translate(-67.518574, -40.130521) " x="57.8585742" y="31.9830205" width="19.3199997" height="16.2950001"></rect>
|
||||
<path d="M29.134767,17.1 C35.1419355,17.1 39.9476703,21.9 45.2039427,28.35 C40.2480287,34.35 34.9917563,38.85 29.134767,38.85 C22.0763441,38.85 17.8713262,34.2 17.8713262,28.05 C17.8713262,21.9 22.0763441,17.1 29.134767,17.1 Z" id="Path" fill="#FFFFFF"></path>
|
||||
<path d="M83.0491039,38.85 C77.1921147,38.85 72.2362007,34.2 66.9799283,28.05 C72.3863799,21.6 77.0419355,17.1 83.0491039,17.1 C90.1075269,17.1 94.3125448,21.9 94.3125448,28.05 C94.3125448,34.2 90.1075269,38.85 83.0491039,38.85 Z" id="Path" fill="#FFFFFF"></path>
|
||||
</g>
|
||||
<use id="∞-copy-2" stroke="#FFFFFF" mask="url(#mask-2)" stroke-width="10" xlink:href="#path-1"></use>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 3.2 KiB |
|
Before Width: | Height: | Size: 13 KiB After Width: | Height: | Size: 6.8 KiB |
BIN
superset/assets/images/s.png
Normal file
|
After Width: | Height: | Size: 12 KiB |
BIN
superset/assets/images/superset-logo@2x.png
Normal file
|
After Width: | Height: | Size: 4.0 KiB |
|
Before Width: | Height: | Size: 13 KiB |
|
Before Width: | Height: | Size: 190 KiB |
|
Before Width: | Height: | Size: 245 KiB |
|
Before Width: | Height: | Size: 26 KiB |
|
Before Width: | Height: | Size: 33 KiB |
|
Before Width: | Height: | Size: 133 KiB |
|
Before Width: | Height: | Size: 128 KiB |
|
Before Width: | Height: | Size: 141 KiB |
|
Before Width: | Height: | Size: 49 KiB |
|
Before Width: | Height: | Size: 17 KiB |
|
Before Width: | Height: | Size: 106 KiB |
BIN
superset/assets/images/tutorial/tutorial_01_sources_database.png
Normal file
|
After Width: | Height: | Size: 19 KiB |
BIN
superset/assets/images/tutorial/tutorial_02_add_database.png
Normal file
|
After Width: | Height: | Size: 24 KiB |
BIN
superset/assets/images/tutorial/tutorial_03_database_name.png
Normal file
|
After Width: | Height: | Size: 14 KiB |
|
After Width: | Height: | Size: 52 KiB |
BIN
superset/assets/images/tutorial/tutorial_05_connection_popup.png
Normal file
|
After Width: | Height: | Size: 82 KiB |
BIN
superset/assets/images/tutorial/tutorial_06_list_of_tables.png
Normal file
|
After Width: | Height: | Size: 23 KiB |
BIN
superset/assets/images/tutorial/tutorial_07_save_button.png
Normal file
|
After Width: | Height: | Size: 8.0 KiB |
BIN
superset/assets/images/tutorial/tutorial_08_sources_tables.png
Normal file
|
After Width: | Height: | Size: 18 KiB |
BIN
superset/assets/images/tutorial/tutorial_09_add_new_table.png
Normal file
|
After Width: | Height: | Size: 18 KiB |
BIN
superset/assets/images/tutorial/tutorial_10_table_name.png
Normal file
|
After Width: | Height: | Size: 26 KiB |
BIN
superset/assets/images/tutorial/tutorial_11_choose_db.png
Normal file
|
After Width: | Height: | Size: 22 KiB |
|
After Width: | Height: | Size: 32 KiB |
|
After Width: | Height: | Size: 32 KiB |
BIN
superset/assets/images/tutorial/tutorial_14_field_config.png
Normal file
|
After Width: | Height: | Size: 60 KiB |
BIN
superset/assets/images/tutorial/tutorial_15_click_table_name.png
Normal file
|
After Width: | Height: | Size: 7.7 KiB |
|
After Width: | Height: | Size: 14 KiB |
|
After Width: | Height: | Size: 11 KiB |
BIN
superset/assets/images/tutorial/tutorial_18_choose_metric.png
Normal file
|
After Width: | Height: | Size: 12 KiB |
BIN
superset/assets/images/tutorial/tutorial_19_click_query.png
Normal file
|
After Width: | Height: | Size: 5.6 KiB |
|
After Width: | Height: | Size: 5.2 KiB |
BIN
superset/assets/images/tutorial/tutorial_21_group_by.png
Normal file
|
After Width: | Height: | Size: 6.7 KiB |
BIN
superset/assets/images/tutorial/tutorial_22_group_by_result.png
Normal file
|
After Width: | Height: | Size: 22 KiB |
|
After Width: | Height: | Size: 8.0 KiB |
BIN
superset/assets/images/tutorial/tutorial_24_max_metric.png
Normal file
|
After Width: | Height: | Size: 6.6 KiB |
BIN
superset/assets/images/tutorial/tutorial_25_max_temp_filter.png
Normal file
|
After Width: | Height: | Size: 11 KiB |
BIN
superset/assets/images/tutorial/tutorial_26_row_limit.png
Normal file
|
After Width: | Height: | Size: 4.8 KiB |
BIN
superset/assets/images/tutorial/tutorial_27_top_10_max_temps.png
Normal file
|
After Width: | Height: | Size: 49 KiB |
BIN
superset/assets/images/tutorial/tutorial_28_bar_chart.png
Normal file
|
After Width: | Height: | Size: 14 KiB |
|
After Width: | Height: | Size: 20 KiB |
|
After Width: | Height: | Size: 74 KiB |
|
After Width: | Height: | Size: 33 KiB |
|
After Width: | Height: | Size: 24 KiB |
BIN
superset/assets/images/tutorial/tutorial_33_dashboard.png
Normal file
|
After Width: | Height: | Size: 5.1 KiB |
|
After Width: | Height: | Size: 6.5 KiB |