Compare commits

...

111 Commits

Author SHA1 Message Date
GitHub Action
bdd0304c32 chore(🦾): bump python sqlalchemy-utils subpackage(s) 2024-12-02 17:42:13 +00:00
Vitor Avila
d66ac9f3f4 fix(Databricks): Escape catalog and schema names in pre-queries (#31199) 2024-12-02 14:00:00 -03:00
Kamil Gabryjelski
06fb330569 perf: Optimize DashboardPage and SyncDashboardState (#31244) 2024-12-02 16:10:02 +01:00
Kamil Gabryjelski
ce0e06a935 perf: Optimize native filters and cross filters (#31243) 2024-12-02 15:42:34 +01:00
Kamil Gabryjelski
5006f97f70 perf: Optimize dashboard grid components (#31240) 2024-12-02 15:05:29 +01:00
Kamil Gabryjelski
24d001e498 perf: Optimize Dashboard components (#31242) 2024-12-02 15:04:39 +01:00
Kamil Gabryjelski
eab888c63a perf: Optimize dashboard chart-related components (#31241) 2024-12-02 15:04:29 +01:00
alexandrusoare
3d3c09d299 chore(Tooltip): Upgrade Tooltip to Ant Design 5 (#31182)
Co-authored-by: Diego Pucci <diegopucci.me@gmail.com>
2024-11-30 12:37:40 +02:00
Damian Pendrak
97dde8c485 fix: x axis title disappears when editing bar chart (#30821) 2024-11-29 16:09:03 +01:00
Vitor Avila
14682b9054 fix(embedded): Hide anchor links in embedded mode (#31194) 2024-11-29 12:06:44 -03:00
Michael S. Molina
93ba8e16c3 refactor: Creates the VizType enum (#31193) 2024-11-29 10:05:02 -03:00
Michael S. Molina
dbcb473040 fix: Time-series Line Chart Display unnecessary total (#31181) 2024-11-27 10:29:09 -08:00
scottduszy
f0811c8863 docs: update slack alert instructions to work with V2 slack API (#31165) 2024-11-27 10:19:18 -08:00
github-actions[bot]
0166db9663 chore(🦾): bump python sqlglot 23.6.3 -> 23.15.8 (#28461)
Co-authored-by: GitHub Action <action@github.com>
Co-authored-by: Maxime Beauchemin <maximebeauchemin@gmail.com>
2024-11-26 15:48:38 -08:00
github-actions[bot]
c26f073134 chore(🦾): bump python pyparsing 3.1.2 -> 3.2.0 (#31171)
Co-authored-by: GitHub Action <action@github.com>
2024-11-26 15:48:13 -08:00
Beto Dealmeida
45668e31fc feat: make sure to quote formulas on Excel export (#31166) 2024-11-26 18:16:44 -05:00
Maxime Beauchemin
529aed5da1 chore(deps): cap async_timeout<5.0.0 (#31170) 2024-11-26 14:12:04 -08:00
Beto Dealmeida
09802acf0d refactor: remove more sqlparse (#31032) 2024-11-26 17:01:07 -05:00
github-actions[bot]
9224051b80 chore(🦾): bump python importlib-metadata 7.1.0 -> 8.5.0 (#31126)
Co-authored-by: GitHub Action <action@github.com>
2024-11-26 13:39:08 -08:00
Maxime Beauchemin
fd9d3301f6 chore: deprecate tox in favor of act (#29382) 2024-11-26 13:27:37 -08:00
Beto Dealmeida
68499a1199 feat: purge OAuth2 tokens when DB changes (#31164) 2024-11-26 15:57:01 -05:00
Geido
f077323e6f fix(Dashboard): Backward compatible shared_label_colors field (#31163) 2024-11-26 20:34:06 +02:00
Beto Dealmeida
7f2e752796 fix: check orderby (#31156) 2024-11-26 10:15:06 -05:00
Maxime Beauchemin
97683ec052 fix: helm chart deploy to open PRs to now-protected gh-pages branch (#31155) 2024-11-25 18:38:12 -08:00
github-actions[bot]
73164c61ad chore(🦾): bump python billiard 4.2.0 -> 4.2.1 (#31109)
Co-authored-by: GitHub Action <action@github.com>
2024-11-25 17:11:49 -08:00
github-actions[bot]
564c168420 chore(🦾): bump python flask-limiter 3.7.0 -> 3.8.0 (#31138)
Co-authored-by: GitHub Action <action@github.com>
2024-11-25 17:11:21 -08:00
github-actions[bot]
95f4fe0cb8 chore(🦾): bump python mako 1.3.5 -> 1.3.6 (#31140)
Co-authored-by: GitHub Action <action@github.com>
2024-11-25 17:10:58 -08:00
github-actions[bot]
bbc6d374ea chore(🦾): bump python celery subpackage(s) (#31127)
Co-authored-by: GitHub Action <action@github.com>
2024-11-25 17:10:23 -08:00
github-actions[bot]
316da5e5f5 chore(🦾): bump python humanize 4.9.0 -> 4.11.0 (#31128)
Co-authored-by: GitHub Action <action@github.com>
2024-11-25 17:10:05 -08:00
github-actions[bot]
e2b9b8e9fd chore(🦾): bump python simplejson 3.19.2 -> 3.19.3 (#31129)
Co-authored-by: GitHub Action <action@github.com>
2024-11-25 17:09:49 -08:00
github-actions[bot]
7154b8d40f chore(🦾): bump python numexpr 2.10.1 -> 2.10.2 (#31130)
Co-authored-by: GitHub Action <action@github.com>
2024-11-25 17:09:32 -08:00
github-actions[bot]
fcb3ff3a41 chore(🦾): bump python slack-sdk 3.27.2 -> 3.33.4 (#31132)
Co-authored-by: GitHub Action <action@github.com>
2024-11-25 17:09:12 -08:00
github-actions[bot]
342cfc41ec chore(🦾): bump python pyopenssl 24.1.0 -> 24.2.1 (#31133)
Co-authored-by: GitHub Action <action@github.com>
2024-11-25 17:08:54 -08:00
github-actions[bot]
aa7d3b0f96 chore(🦾): bump python dnspython 2.6.1 -> 2.7.0 (#31135)
Co-authored-by: GitHub Action <action@github.com>
2024-11-25 17:06:53 -08:00
github-actions[bot]
3e28bd2cfa chore(🦾): bump python zstandard 0.22.0 -> 0.23.0 (#31136)
Co-authored-by: GitHub Action <action@github.com>
2024-11-25 17:06:35 -08:00
github-actions[bot]
cc1eec69df chore(🦾): bump python limits 3.12.0 -> 3.13.0 (#31137)
Co-authored-by: GitHub Action <action@github.com>
2024-11-25 17:06:18 -08:00
github-actions[bot]
3fa0de4293 chore(🦾): bump python flask-jwt-extended 4.6.0 -> 4.7.1 (#31139)
Co-authored-by: GitHub Action <action@github.com>
2024-11-25 17:05:59 -08:00
github-actions[bot]
2ad8af71b5 chore(🦾): bump python gunicorn 22.0.0 -> 23.0.0 (#31125)
Co-authored-by: GitHub Action <action@github.com>
2024-11-25 16:52:11 -08:00
github-actions[bot]
b648cc1168 chore(🦾): bump python zipp 3.19.0 -> 3.21.0 (#31124)
Co-authored-by: GitHub Action <action@github.com>
2024-11-25 16:50:22 -08:00
github-actions[bot]
f24bf873bf chore(🦾): bump python flask-compress 1.15 -> 1.17 (#31123)
Co-authored-by: GitHub Action <action@github.com>
2024-11-25 16:50:06 -08:00
github-actions[bot]
e0a5033596 chore(🦾): bump python dill 0.3.8 -> 0.3.9 (#31108)
Co-authored-by: GitHub Action <action@github.com>
2024-11-25 16:49:31 -08:00
github-actions[bot]
ef14d58c64 chore(🦾): bump python email-validator 2.1.1 -> 2.2.0 (#31116)
Co-authored-by: GitHub Action <action@github.com>
2024-11-25 16:48:15 -08:00
Michael S. Molina
547a4adef5 fix: Remove unwanted commit on Trino's handle_cursor (#31154) 2024-11-25 17:42:31 -03:00
Evan Rusackas
5256a2f194 chore(asf): add gh-pages to protected branches (#31153) 2024-11-25 13:31:52 -07:00
github-actions[bot]
0560c2615d chore(🦾): bump python async-timeout 4.0.3 -> 5.0.1 (#31122)
Co-authored-by: GitHub Action <action@github.com>
2024-11-25 10:18:23 -08:00
Michael S. Molina
ff282492a1 fix: Revert "feat(trino): Add functionality to upload data (#29164)" (#31151) 2024-11-25 15:16:28 -03:00
github-actions[bot]
312dc1c749 chore(🦾): bump python prompt-toolkit 3.0.44 -> 3.0.48 (#31121)
Co-authored-by: GitHub Action <action@github.com>
2024-11-25 10:05:13 -08:00
github-actions[bot]
1e26c34758 chore(🦾): bump python sqlparse 0.5.0 -> 0.5.2 (#31119)
Co-authored-by: GitHub Action <action@github.com>
2024-11-25 10:04:43 -08:00
Maxime Beauchemin
decaba72c3 fix: try to re-enable gh-pages (#31152) 2024-11-25 09:58:30 -08:00
Maxime Beauchemin
7e8c77e636 fix: touch helm/ folder to trigger doc deploy in CI (#31148) 2024-11-25 09:08:34 -08:00
alexandrusoare
ba99980cf4 refactor(List): Upgrade List from antdesign4 to antdesign5 (#30963) 2024-11-25 16:44:17 +02:00
github-actions[bot]
c62f722f99 chore(🦾): bump python mysqlclient 2.2.4 -> 2.2.6 (#31113)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 22:36:20 -08:00
github-actions[bot]
3fd23508bc chore(🦾): bump python grpcio-status subpackage(s) (#31114)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 22:34:54 -08:00
github-actions[bot]
9ff9e0299b chore(🦾): bump python cycler 0.11.0 -> 0.12.1 (#31112)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 22:34:35 -08:00
github-actions[bot]
6488ced3d3 chore(🦾): bump python croniter 2.0.5 -> 5.0.1 (#31091)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 22:33:02 -08:00
github-actions[bot]
9a2be95159 chore(🦾): bump python google-auth 2.29.0 -> 2.36.0 (#31107)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 22:25:23 -08:00
github-actions[bot]
ef4e03c9fe chore(🦾): bump python psutil 6.0.0 -> 6.1.0 (#31106)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 22:25:05 -08:00
github-actions[bot]
ca2f0288e5 chore(🦾): bump python dnspython 2.6.1 -> 2.7.0 (#31105)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 22:24:46 -08:00
github-actions[bot]
ca63760a4b chore(🦾): bump python markdown 3.6 -> 3.7 (#31102)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 22:23:37 -08:00
github-actions[bot]
83924f7e10 chore(🦾): bump python pluggy 1.4.0 -> 1.5.0 (#31101)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 22:23:18 -08:00
github-actions[bot]
c4a56c3f6e chore(🦾): bump python sqloxide 0.1.43 -> 0.1.51 (#31100)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 22:22:51 -08:00
github-actions[bot]
cf134ab3aa chore(🦾): bump python wheel 0.43.0 -> 0.45.1 (#31099)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 22:22:28 -08:00
github-actions[bot]
043c585008 chore(🦾): bump python pyproject-api 1.6.1 -> 1.8.0 (#31098)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 22:22:08 -08:00
github-actions[bot]
0d346d4414 chore(🦾): bump python pytest-cov 5.0.0 -> 6.0.0 (#31096)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 22:21:50 -08:00
github-actions[bot]
9067371234 chore(🦾): bump python chardet 5.1.0 -> 5.2.0 (#31094)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 22:21:22 -08:00
github-actions[bot]
40fe05c5e2 chore(🦾): bump python jsonpath-ng 1.6.1 -> 1.7.0 (#31093)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 22:20:26 -08:00
github-actions[bot]
e3bdfb5def chore(🦾): bump python sshtunnel subpackage(s) (#31092)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 22:20:11 -08:00
github-actions[bot]
55f0713a2f chore(🦾): bump python mako 1.3.5 -> 1.3.6 (#31097)
Co-authored-by: GitHub Action <action@github.com>
Co-authored-by: Maxime Beauchemin <maximebeauchemin@gmail.com>
2024-11-24 22:19:16 -08:00
github-actions[bot]
5aee59cc3a chore(🦾): bump python tomlkit 0.12.5 -> 0.13.2 (#31090)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 21:28:45 -08:00
github-actions[bot]
94d3774d9e chore(🦾): bump python isodate 0.6.1 -> 0.7.2 (#31087)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 21:28:28 -08:00
github-actions[bot]
b665254f39 chore(🦾): bump python db-dtypes 1.2.0 -> 1.3.1 (#31082)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 21:28:11 -08:00
github-actions[bot]
4dc8cce8e8 chore(🦾): bump python trino 0.328.0 -> 0.330.0 (#31081)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 21:27:52 -08:00
github-actions[bot]
d206a20ce7 chore(🦾): bump python certifi 2024.2.2 -> 2024.8.30 (#31089)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 21:27:18 -08:00
github-actions[bot]
6fcc282a4e chore(🦾): bump python pydata-google-auth 1.7.0 -> 1.9.0 (#31088)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 21:26:58 -08:00
github-actions[bot]
93c35a7ba5 chore(🦾): bump python pyproject-hooks 1.0.0 -> 1.2.0 (#31086)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 21:26:36 -08:00
github-actions[bot]
9dfa8d5f8f chore(🦾): bump python sqlalchemy-bigquery 1.11.0 -> 1.12.0 (#31085)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 21:26:14 -08:00
github-actions[bot]
87504056fe chore(🦾): bump python kiwisolver 1.4.5 -> 1.4.7 (#31084)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 21:25:47 -08:00
github-actions[bot]
429c18f9e8 chore(🦾): bump python coverage subpackage(s) (#31083)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 21:25:33 -08:00
github-actions[bot]
5bddc81f60 chore(🦾): bump python cfgv 3.3.1 -> 3.4.0 (#31077)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 21:25:12 -08:00
github-actions[bot]
9837b4a61e chore(🦾): bump python fonttools 4.51.0 -> 4.55.0 (#31075)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 21:24:49 -08:00
github-actions[bot]
454f143661 chore(🦾): bump python pyasn1-modules 0.4.0 -> 0.4.1 (#31076)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 21:03:01 -08:00
github-actions[bot]
7376dfc6e9 chore(🦾): bump python pyhive subpackage(s) (#31079)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 21:00:56 -08:00
github-actions[bot]
838d47d578 chore(🦾): bump python google-cloud-core 2.3.2 -> 2.4.1 (#31078)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 20:57:49 -08:00
github-actions[bot]
14e81d0a9a chore(🦾): bump python sqlalchemy-utils subpackage(s) (#31048)
Co-authored-by: GitHub Action <action@github.com>
Co-authored-by: Maxime Beauchemin <maximebeauchemin@gmail.com>
2024-11-24 20:45:06 -08:00
github-actions[bot]
f68c2b2454 chore(🦾): bump python amqp 5.2.0 -> 5.3.1 (#31073)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 20:44:29 -08:00
github-actions[bot]
814c3dfecc chore(🦾): bump python cachetools 5.3.3 -> 5.5.0 (#31071)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 20:44:05 -08:00
github-actions[bot]
b8aade776b chore(🦾): bump python kombu 5.3.7 -> 5.4.2 (#31074)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 20:43:26 -08:00
github-actions[bot]
e092e6002d chore(🦾): bump python pyyaml 6.0.1 -> 6.0.2 (#31066)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 18:10:24 -08:00
github-actions[bot]
673754d16e chore(🦾): bump python tqdm 4.66.4 -> 4.67.1 (#31068)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 18:10:06 -08:00
github-actions[bot]
27deeb2f51 chore(🦾): bump python proto-plus 1.22.2 -> 1.25.0 (#31069)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 18:09:38 -08:00
github-actions[bot]
9a7a84c7a0 chore(🦾): bump python importlib-resources 6.4.0 -> 6.4.5 (#31067)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 18:09:13 -08:00
github-actions[bot]
a3d2588313 chore(🦾): bump python apispec subpackage(s) (#31062)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 17:56:17 -08:00
github-actions[bot]
5c87fee282 chore(🦾): bump python deprecated 1.2.14 -> 1.2.15 (#31056)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 17:55:07 -08:00
github-actions[bot]
b24323d500 chore(🦾): bump python pre-commit 3.7.1 -> 4.0.1 (#31050)
Co-authored-by: GitHub Action <action@github.com>
Co-authored-by: Maxime Beauchemin <maximebeauchemin@gmail.com>
2024-11-24 17:34:36 -08:00
github-actions[bot]
824aca85d0 chore(🦾): bump python charset-normalizer 3.3.2 -> 3.4.0 (#31064)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 17:31:13 -08:00
github-actions[bot]
1e4098a29e chore(🦾): bump python ruff 0.4.5 -> 0.8.0 (#31001)
Co-authored-by: GitHub Action <action@github.com>
Co-authored-by: Maxime Beauchemin <maximebeauchemin@gmail.com>
2024-11-24 17:30:48 -08:00
github-actions[bot]
3aa8f32ca9 chore(🦾): bump python googleapis-common-protos 1.63.0 -> 1.66.0 (#31049)
Co-authored-by: GitHub Action <action@github.com>
Co-authored-by: Maxime Beauchemin <maximebeauchemin@gmail.com>
2024-11-24 17:15:24 -08:00
github-actions[bot]
bf42ea70ba chore(🦾): bump python cron-descriptor 1.4.3 -> 1.4.5 (#31046)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 16:31:38 -08:00
github-actions[bot]
d69da5f0f5 chore(🦾): bump python flask-wtf 1.2.1 -> 1.2.2 (#31052)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 16:23:35 -08:00
Rida KEJJI
078257dd1b docs: updated the install process in pypi.mdx (#31044)
Co-authored-by: Rida KEJJI <42012627+ShameGod@users.noreply.github.com>
2024-11-24 16:23:17 -08:00
github-actions[bot]
8c1c2570b3 chore(🦾): bump python nh3 0.2.17 -> 0.2.18 (#31054)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 16:22:15 -08:00
github-actions[bot]
a80803566d chore(🦾): bump python marshmallow 3.21.2 -> 3.23.1 (#31045)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 16:21:48 -08:00
github-actions[bot]
f551f5b7b6 chore(🦾): bump python idna 3.7 -> 3.10 (#31041)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 13:38:14 -08:00
github-actions[bot]
1978cde4f1 chore(🦾): bump python pyjwt 2.8.0 -> 2.10.0 (#31042)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 13:37:57 -08:00
github-actions[bot]
c5f6cc6382 chore(🦾): bump python et-xmlfile 1.1.0 -> 2.0.0 & remove pyhive[hive] from requirements/development.in (#31040)
Co-authored-by: GitHub Action <action@github.com>
Co-authored-by: Maxime Beauchemin <maximebeauchemin@gmail.com>
2024-11-24 13:35:50 -08:00
Birk Skyum
e9e2c0bee8 chore(legacy-plugin-chart-map-box): replace viewport-mercator-project with @math.gl/web-mercator (#30651) 2024-11-24 20:00:26 +00:00
github-actions[bot]
33a9817388 chore(🦾): bump python pandas subpackage(s) (#31004)
Co-authored-by: GitHub Action <action@github.com>
2024-11-24 10:05:59 -08:00
Geido
91301bcd5b fix(Dashboard): Ensure shared label colors are updated (#31031) 2024-11-23 16:39:40 +02:00
Maxime Beauchemin
67ad7da5cc fix: ephemeral environments missing env var (#31035) 2024-11-22 17:39:34 -08:00
Maxime Beauchemin
e0deb704f9 feat: make ephemeral env use supersetbot + deprecate build_docker.py (#30870) 2024-11-22 14:19:08 -08:00
Kamil Gabryjelski
abf3790ea6 chore: Cleanup code related to MetadataBar, fix types (#31030) 2024-11-22 16:02:13 +01:00
314 changed files with 5200 additions and 5135 deletions

View File

@@ -53,6 +53,9 @@ github:
merge: false merge: false
rebase: false rebase: false
ghp_branch: gh-pages
ghp_path: /
protected_branches: protected_branches:
master: master:
required_status_checks: required_status_checks:
@@ -88,3 +91,10 @@ github:
required_approving_review_count: 1 required_approving_review_count: 1
required_signatures: false required_signatures: false
gh-pages:
required_pull_request_reviews:
dismiss_stale_reviews: false
require_code_owner_reviews: true
required_approving_review_count: 1
required_signatures: false

1
.gitattributes vendored
View File

@@ -1,2 +1,3 @@
docker/**/*.sh text eol=lf docker/**/*.sh text eol=lf
*.svg binary *.svg binary
*.ipynb binary

View File

@@ -42,7 +42,7 @@ runs:
- name: Install dependencies - name: Install dependencies
run: | run: |
if [ "${{ inputs.install-superset }}" = "true" ]; then if [ "${{ inputs.install-superset }}" = "true" ]; then
sudo apt-get update && sudo apt-get -y install libldap2-dev libsasl2-dev sudo apt-get update && sudo apt-get -y install libldap2-dev libsasl2-dev build-essential
pip install --upgrade pip setuptools wheel pip install --upgrade pip setuptools wheel
if [ "${{ inputs.requirements-type }}" = "dev" ]; then if [ "${{ inputs.requirements-type }}" = "dev" ]; then
pip install -r requirements/development.txt pip install -r requirements/development.txt

View File

@@ -14,6 +14,12 @@ on:
required: true required: true
description: Max number of PRs to open (0 for no limit) description: Max number of PRs to open (0 for no limit)
default: 5 default: 5
extra-flags:
required: false
default: --only-base
description: Additional flags to pass to the bump-python command
#schedule:
# - cron: '0 0 * * *' # Runs daily at midnight UTC
jobs: jobs:
bump-python-package: bump-python-package:
@@ -59,10 +65,13 @@ jobs:
GROUP_OPT="-g ${{ github.event.inputs.group }}" GROUP_OPT="-g ${{ github.event.inputs.group }}"
fi fi
EXTRA_FLAGS="${{ github.event.inputs.extra-flags }}"
supersetbot bump-python \ supersetbot bump-python \
--verbose \ --verbose \
--use-current-repo \ --use-current-repo \
--include-subpackages \ --include-subpackages \
--limit ${{ github.event.inputs.limit }} \ --limit ${{ github.event.inputs.limit }} \
$PACKAGE_OPT \ $PACKAGE_OPT \
$GROUP_OPT $GROUP_OPT \
$EXTRA_FLAGS

View File

@@ -1,30 +1,25 @@
name: Ephemeral env workflow name: Ephemeral env workflow
# Example manual trigger: gh workflow run ephemeral-env.yml --ref fix_ephemerals --field comment_body="/testenv up" --field issue_number=666
on: on:
issue_comment: issue_comment:
types: [created] types: [created]
workflow_dispatch:
inputs:
comment_body:
description: 'Comment body to simulate /testenv command'
required: true
default: '/testenv up'
issue_number:
description: 'Issue or PR number'
required: true
jobs: jobs:
config:
runs-on: "ubuntu-22.04"
if: github.event.issue.pull_request
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
- name: "Check for secrets"
id: check
shell: bash
run: |
if [ -n "${{ (secrets.AWS_ACCESS_KEY_ID != '' && secrets.AWS_SECRET_ACCESS_KEY != '') || '' }}" ]; then
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
fi
ephemeral-env-comment: ephemeral-env-comment:
concurrency: concurrency:
group: ${{ github.workflow }}-${{ github.event.issue.number || github.run_id }}-comment group: ${{ github.workflow }}-${{ github.event.inputs.issue_number || github.event.issue.number || github.run_id }}-comment
cancel-in-progress: true cancel-in-progress: true
needs: config
if: needs.config.outputs.has-secrets
name: Evaluate ephemeral env comment trigger (/testenv) name: Evaluate ephemeral env comment trigger (/testenv)
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
permissions: permissions:
@@ -44,18 +39,18 @@ jobs:
with: with:
result-encoding: string result-encoding: string
script: | script: |
const pattern = /^\/testenv (up|down)/ const pattern = /^\/testenv (up|down)/;
const result = pattern.exec(context.payload.comment.body) const result = pattern.exec('${{ github.event.inputs.comment_body || github.event.comment.body }}');
return result === null ? 'noop' : result[1] return result === null ? 'noop' : result[1];
- name: Eval comment body for feature flags - name: Looking for feature flags
uses: actions/github-script@v7 uses: actions/github-script@v7
id: eval-feature-flags id: eval-feature-flags
with: with:
script: | script: |
const pattern = /FEATURE_(\w+)=(\w+)/g; const pattern = /FEATURE_(\w+)=(\w+)/g;
let results = []; let results = [];
[...context.payload.comment.body.matchAll(pattern)].forEach(match => { [...'${{ github.event.inputs.comment_body || github.event.comment.body }}'.matchAll(pattern)].forEach(match => {
const config = { const config = {
name: `SUPERSET_FEATURE_${match[1]}`, name: `SUPERSET_FEATURE_${match[1]}`,
value: match[2], value: match[2],
@@ -67,24 +62,48 @@ jobs:
- name: Limit to committers - name: Limit to committers
if: > if: >
steps.eval-body.outputs.result != 'noop' && steps.eval-body.outputs.result != 'noop' &&
github.event_name == 'issue_comment' &&
github.event.comment.author_association != 'MEMBER' && github.event.comment.author_association != 'MEMBER' &&
github.event.comment.author_association != 'OWNER' github.event.comment.author_association != 'OWNER'
uses: actions/github-script@v7 uses: actions/github-script@v7
with: with:
github-token: ${{github.token}} github-token: ${{ github.token }}
script: | script: |
const errMsg = '@${{ github.event.comment.user.login }} Ephemeral environment creation is currently limited to committers.' const errMsg = '@${{ github.event.comment.user.login }} Ephemeral environment creation is currently limited to committers.';
github.rest.issues.createComment({ github.rest.issues.createComment({
issue_number: ${{ github.event.issue.number }}, issue_number: ${{ github.event.issue.number }},
owner: context.repo.owner, owner: context.repo.owner,
repo: context.repo.repo, repo: context.repo.repo,
body: errMsg body: errMsg
}) });
core.setFailed(errMsg) core.setFailed(errMsg);
- name: Reply with confirmation comment
uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const issueNumber = ${{ github.event.inputs.issue_number || github.event.issue.number }};
const user = '${{ github.event.comment.user.login || github.actor }}';
const action = '${{ steps.eval-body.outputs.result }}';
const runId = context.runId;
const workflowUrl = `${context.serverUrl}/${context.repo.owner}/${context.repo.repo}/actions/runs/${runId}`;
const body = action === 'noop'
? `@${user} No ephemeral environment action detected. Please use '/testenv up' or '/testenv down'. [View workflow run](${workflowUrl}).`
: `@${user} Processing your ephemeral environment request [here](${workflowUrl}).`;
if (action !== 'noop') {
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: issueNumber,
body,
});
}
ephemeral-docker-build: ephemeral-docker-build:
concurrency: concurrency:
group: ${{ github.workflow }}-${{ github.event.issue.number || github.run_id }}-build group: ${{ github.workflow }}-${{ github.event.inputs.issue_number || github.event.issue.number || github.run_id }}-build
cancel-in-progress: true cancel-in-progress: true
needs: ephemeral-env-comment needs: ephemeral-env-comment
name: ephemeral-docker-build name: ephemeral-docker-build
@@ -98,9 +117,9 @@ jobs:
const request = { const request = {
owner: context.repo.owner, owner: context.repo.owner,
repo: context.repo.repo, repo: context.repo.repo,
pull_number: ${{ github.event.issue.number }}, pull_number: ${{ github.event.inputs.issue_number || github.event.issue.number }},
} };
core.info(`Getting PR #${request.pull_number} from ${request.owner}/${request.repo}`) core.info(`Getting PR #${request.pull_number} from ${request.owner}/${request.repo}`);
const pr = await github.rest.pulls.get(request); const pr = await github.rest.pulls.get(request);
return pr.data; return pr.data;
@@ -121,12 +140,17 @@ jobs:
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3 uses: docker/setup-buildx-action@v3
- name: Setup supersetbot
uses: ./.github/actions/setup-supersetbot/
- name: Build ephemeral env image - name: Build ephemeral env image
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: | run: |
./scripts/build_docker.py \ supersetbot docker \
"ci" \ --preset ci \
"pull_request" \ --platform linux/amd64 \
--build_context_ref ${{ github.event.issue.number }} --context-ref "$RELEASE"
- name: Configure AWS credentials - name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4 uses: aws-actions/configure-aws-credentials@v4
@@ -146,7 +170,7 @@ jobs:
ECR_REPOSITORY: superset-ci ECR_REPOSITORY: superset-ci
IMAGE_TAG: apache/superset:${{ steps.get-sha.outputs.sha }}-ci IMAGE_TAG: apache/superset:${{ steps.get-sha.outputs.sha }}-ci
run: | run: |
docker tag $IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:pr-${{ github.event.issue.number }}-ci docker tag $IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-ci
docker push -a $ECR_REGISTRY/$ECR_REPOSITORY docker push -a $ECR_REGISTRY/$ECR_REPOSITORY
ephemeral-env-up: ephemeral-env-up:
@@ -181,22 +205,22 @@ jobs:
aws ecr describe-images \ aws ecr describe-images \
--registry-id $(echo "${{ steps.login-ecr.outputs.registry }}" | grep -Eo "^[0-9]+") \ --registry-id $(echo "${{ steps.login-ecr.outputs.registry }}" | grep -Eo "^[0-9]+") \
--repository-name superset-ci \ --repository-name superset-ci \
--image-ids imageTag=pr-${{ github.event.issue.number }}-ci --image-ids imageTag=pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-ci
- name: Fail on missing container image - name: Fail on missing container image
if: steps.check-image.outcome == 'failure' if: steps.check-image.outcome == 'failure'
uses: actions/github-script@v7 uses: actions/github-script@v7
with: with:
github-token: ${{github.token}} github-token: ${{ github.token }}
script: | script: |
const errMsg = '@${{ github.event.comment.user.login }} Container image not yet published for this PR. Please try again when build is complete.' const errMsg = '@${{ github.event.comment.user.login }} Container image not yet published for this PR. Please try again when build is complete.';
github.rest.issues.createComment({ github.rest.issues.createComment({
issue_number: ${{ github.event.issue.number }}, issue_number: ${{ github.event.inputs.issue_number || github.event.issue.number }},
owner: context.repo.owner, owner: context.repo.owner,
repo: context.repo.repo, repo: context.repo.repo,
body: errMsg body: errMsg
}) });
core.setFailed(errMsg) core.setFailed(errMsg);
- name: Fill in the new image ID in the Amazon ECS task definition - name: Fill in the new image ID in the Amazon ECS task definition
id: task-def id: task-def
@@ -204,7 +228,7 @@ jobs:
with: with:
task-definition: .github/workflows/ecs-task-definition.json task-definition: .github/workflows/ecs-task-definition.json
container-name: superset-ci container-name: superset-ci
image: ${{ steps.login-ecr.outputs.registry }}/superset-ci:pr-${{ github.event.issue.number }}-ci image: ${{ steps.login-ecr.outputs.registry }}/superset-ci:pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-ci
- name: Update env vars in the Amazon ECS task definition - name: Update env vars in the Amazon ECS task definition
run: | run: |
@@ -213,30 +237,29 @@ jobs:
- name: Describe ECS service - name: Describe ECS service
id: describe-services id: describe-services
run: | run: |
echo "active=$(aws ecs describe-services --cluster superset-ci --services pr-${{ github.event.issue.number }}-service | jq '.services[] | select(.status == "ACTIVE") | any')" >> $GITHUB_OUTPUT echo "active=$(aws ecs describe-services --cluster superset-ci --services pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-service | jq '.services[] | select(.status == "ACTIVE") | any')" >> $GITHUB_OUTPUT
- name: Create ECS service - name: Create ECS service
if: steps.describe-services.outputs.active != 'true'
id: create-service id: create-service
if: steps.describe-services.outputs.active != 'true'
env: env:
ECR_SUBNETS: subnet-0e15a5034b4121710,subnet-0e8efef4a72224974 ECR_SUBNETS: subnet-0e15a5034b4121710,subnet-0e8efef4a72224974
ECR_SECURITY_GROUP: sg-092ff3a6ae0574d91 ECR_SECURITY_GROUP: sg-092ff3a6ae0574d91
run: | run: |
aws ecs create-service \ aws ecs create-service \
--cluster superset-ci \ --cluster superset-ci \
--service-name pr-${{ github.event.issue.number }}-service \ --service-name pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-service \
--task-definition superset-ci \ --task-definition superset-ci \
--launch-type FARGATE \ --launch-type FARGATE \
--desired-count 1 \ --desired-count 1 \
--platform-version LATEST \ --platform-version LATEST \
--network-configuration "awsvpcConfiguration={subnets=[$ECR_SUBNETS],securityGroups=[$ECR_SECURITY_GROUP],assignPublicIp=ENABLED}" \ --network-configuration "awsvpcConfiguration={subnets=[$ECR_SUBNETS],securityGroups=[$ECR_SECURITY_GROUP],assignPublicIp=ENABLED}" \
--tags key=pr,value=${{ github.event.issue.number }} key=github_user,value=${{ github.actor }} --tags key=pr,value=${{ github.event.inputs.issue_number || github.event.issue.number }} key=github_user,value=${{ github.actor }}
- name: Deploy Amazon ECS task definition - name: Deploy Amazon ECS task definition
id: deploy-task id: deploy-task
uses: aws-actions/amazon-ecs-deploy-task-definition@v2 uses: aws-actions/amazon-ecs-deploy-task-definition@v2
with: with:
task-definition: ${{ steps.task-def.outputs.task-definition }} task-definition: ${{ steps.task-def.outputs.task-definition }}
service: pr-${{ github.event.issue.number }}-service service: pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-service
cluster: superset-ci cluster: superset-ci
wait-for-service-stability: true wait-for-service-stability: true
wait-for-minutes: 10 wait-for-minutes: 10
@@ -244,18 +267,15 @@ jobs:
- name: List tasks - name: List tasks
id: list-tasks id: list-tasks
run: | run: |
echo "task=$(aws ecs list-tasks --cluster superset-ci --service-name pr-${{ github.event.issue.number }}-service | jq '.taskArns | first')" >> $GITHUB_OUTPUT echo "task=$(aws ecs list-tasks --cluster superset-ci --service-name pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-service | jq '.taskArns | first')" >> $GITHUB_OUTPUT
- name: Get network interface - name: Get network interface
id: get-eni id: get-eni
run: | run: |
echo "eni=$(aws ecs describe-tasks --cluster superset-ci --tasks ${{ steps.list-tasks.outputs.task }} | jq '.tasks | .[0] | .attachments | .[0] | .details | map(select(.name=="networkInterfaceId")) | .[0] | .value')" >> $GITHUB_OUTPUT echo "eni=$(aws ecs describe-tasks --cluster superset-ci --tasks ${{ steps.list-tasks.outputs.task }} | jq '.tasks | .[0] | .attachments | .[0] | .details | map(select(.name==\"networkInterfaceId\")) | .[0] | .value')" >> $GITHUB_OUTPUT
- name: Get public IP - name: Get public IP
id: get-ip id: get-ip
run: | run: |
echo "ip=$(aws ec2 describe-network-interfaces --network-interface-ids ${{ steps.get-eni.outputs.eni }} | jq -r '.NetworkInterfaces | first | .Association.PublicIp')" >> $GITHUB_OUTPUT echo "ip=$(aws ec2 describe-network-interfaces --network-interface-ids ${{ steps.get-eni.outputs.eni }} | jq -r '.NetworkInterfaces | first | .Association.PublicIp')" >> $GITHUB_OUTPUT
- name: Comment (success) - name: Comment (success)
if: ${{ success() }} if: ${{ success() }}
uses: actions/github-script@v7 uses: actions/github-script@v7
@@ -263,12 +283,11 @@ jobs:
github-token: ${{github.token}} github-token: ${{github.token}}
script: | script: |
github.rest.issues.createComment({ github.rest.issues.createComment({
issue_number: ${{ github.event.issue.number }}, issue_number: ${{ github.event.inputs.issue_number || github.event.issue.number }},
owner: context.repo.owner, owner: context.repo.owner,
repo: context.repo.repo, repo: context.repo.repo,
body: '@${{ github.event.comment.user.login }} Ephemeral environment spinning up at http://${{ steps.get-ip.outputs.ip }}:8080. Credentials are `admin`/`admin`. Please allow several minutes for bootstrapping and startup.' body: '@${{ github.event.inputs.user_login || github.event.comment.user.login }} Ephemeral environment spinning up at http://${{ steps.get-ip.outputs.ip }}:8080. Credentials are `admin`/`admin`. Please allow several minutes for bootstrapping and startup.'
}) })
- name: Comment (failure) - name: Comment (failure)
if: ${{ failure() }} if: ${{ failure() }}
uses: actions/github-script@v7 uses: actions/github-script@v7
@@ -276,8 +295,8 @@ jobs:
github-token: ${{github.token}} github-token: ${{github.token}}
script: | script: |
github.rest.issues.createComment({ github.rest.issues.createComment({
issue_number: ${{ github.event.issue.number }}, issue_number: ${{ github.event.inputs.issue_number || github.event.issue.number }},
owner: context.repo.owner, owner: context.repo.owner,
repo: context.repo.repo, repo: context.repo.repo,
body: '@${{ github.event.comment.user.login }} Ephemeral environment creation failed. Please check the Actions logs for details.' body: '@${{ github.event.inputs.user_login || github.event.comment.user.login }} Ephemeral environment creation failed. Please check the Actions logs for details.'
}) })

View File

@@ -1,4 +1,4 @@
name: Lint and Test Charts name: "Helm: lint and test charts"
on: on:
pull_request: pull_request:

View File

@@ -1,4 +1,8 @@
name: Release Charts # This workflow automates the release process for Helm charts.
# The workflow creates a new branch for the release and opens a pull request against the 'gh-pages' branch,
# allowing the changes to be reviewed and merged manually.
name: "Helm: release charts"
on: on:
push: push:
@@ -7,18 +11,28 @@ on:
- "[0-9].[0-9]*" - "[0-9].[0-9]*"
paths: paths:
- "helm/**" - "helm/**"
workflow_dispatch:
inputs:
ref:
description: "The branch, tag, or commit SHA to check out"
required: false
default: "master"
jobs: jobs:
release: release:
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
permissions: permissions:
contents: write contents: write
pull-requests: write
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps: steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - name: Checkout code
uses: actions/checkout@v4 uses: actions/checkout@v4
with: with:
persist-credentials: false ref: ${{ inputs.ref || github.ref_name }}
persist-credentials: true
submodules: recursive submodules: recursive
fetch-depth: 0 fetch-depth: 0
@@ -35,11 +49,77 @@ jobs:
- name: Add bitnami repo dependency - name: Add bitnami repo dependency
run: helm repo add bitnami https://charts.bitnami.com/bitnami run: helm repo add bitnami https://charts.bitnami.com/bitnami
- name: Fetch/list all tags
run: |
# Debugging tags
git fetch --tags --force
git tag -d superset-helm-chart-0.13.4 || true
echo "DEBUG TAGS"
git show-ref --tags
- name: Create unique pages branch name
id: vars
run: echo "branch_name=helm-publish-${GITHUB_SHA:0:7}" >> $GITHUB_ENV
- name: Force recreate branch from gh-pages
run: |
# Ensure a clean working directory
git reset --hard
git clean -fdx
git checkout -b local_gha_temp
git submodule update
# Fetch the latest gh-pages branch
git fetch origin gh-pages
# Check out and reset the target branch based on gh-pages
git checkout -B ${{ env.branch_name }} origin/gh-pages
# Remove submodules from the branch
git submodule deinit -f --all
# Force push to the remote branch
git push origin ${{ env.branch_name }} --force
# Return to the original branch
git checkout local_gha_temp
- name: Fetch/list all tags
run: |
git submodule update
cat .github/actions/chart-releaser-action/action.yml
- name: Run chart-releaser - name: Run chart-releaser
uses: ./.github/actions/chart-releaser-action uses: ./.github/actions/chart-releaser-action
with: with:
version: v1.6.0
charts_dir: helm charts_dir: helm
mark_as_latest: false mark_as_latest: false
pages_branch: ${{ env.branch_name }}
env: env:
CR_TOKEN: "${{ github.token }}" CR_TOKEN: "${{ github.token }}"
CR_RELEASE_NAME_TEMPLATE: "superset-helm-chart-{{ .Version }}" CR_RELEASE_NAME_TEMPLATE: "superset-helm-chart-{{ .Version }}"
- name: Open Pull Request
uses: actions/github-script@v7
with:
script: |
const branchName = '${{ env.branch_name }}';
const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');
if (!branchName) {
throw new Error("Branch name is not defined.");
}
const pr = await github.rest.pulls.create({
owner,
repo,
title: `Helm chart release for ${branchName}`,
head: branchName,
base: "gh-pages", // Adjust if the target branch is different
body: `This PR releases Helm charts to the gh-pages branch.`,
});
core.info(`Pull request created: ${pr.data.html_url}`);
env:
BRANCH_NAME: ${{ env.branch_name }}

View File

@@ -142,6 +142,7 @@ jobs:
- name: Python unit tests (PostgreSQL) - name: Python unit tests (PostgreSQL)
if: steps.check.outputs.python if: steps.check.outputs.python
run: | run: |
pip install -e .[hive]
./scripts/python_tests.sh -m 'chart_data_flow or sql_json_flow' ./scripts/python_tests.sh -m 'chart_data_flow or sql_json_flow'
- name: Upload code coverage - name: Upload code coverage
uses: codecov/codecov-action@v4 uses: codecov/codecov-action@v4

View File

@@ -16,11 +16,11 @@
# #
repos: repos:
- repo: https://github.com/MarcoGorelli/auto-walrus - repo: https://github.com/MarcoGorelli/auto-walrus
rev: v0.2.2 rev: 0.3.4
hooks: hooks:
- id: auto-walrus - id: auto-walrus
- repo: https://github.com/pre-commit/mirrors-mypy - repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.3.0 rev: v1.13.0
hooks: hooks:
- id: mypy - id: mypy
args: [--check-untyped-defs] args: [--check-untyped-defs]
@@ -39,11 +39,11 @@ repos:
types-Markdown, types-Markdown,
] ]
- repo: https://github.com/peterdemin/pip-compile-multi - repo: https://github.com/peterdemin/pip-compile-multi
rev: v2.6.2 rev: v2.6.4
hooks: hooks:
- id: pip-compile-multi-verify - id: pip-compile-multi-verify
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0 rev: v5.0.0
hooks: hooks:
- id: check-docstring-first - id: check-docstring-first
- id: check-added-large-files - id: check-added-large-files
@@ -56,7 +56,7 @@ repos:
exclude: ^.*\.(snap) exclude: ^.*\.(snap)
args: ["--markdown-linebreak-ext=md"] args: ["--markdown-linebreak-ext=md"]
- repo: https://github.com/pre-commit/mirrors-prettier - repo: https://github.com/pre-commit/mirrors-prettier
rev: v3.1.0 # Use the sha or tag you want to point at rev: v4.0.0-alpha.8 # Use the sha or tag you want to point at
hooks: hooks:
- id: prettier - id: prettier
additional_dependencies: additional_dependencies:
@@ -70,12 +70,12 @@ repos:
- id: blacklist - id: blacklist
args: ["--blacklisted-names=make_url", "--ignore=tests/"] args: ["--blacklisted-names=make_url", "--ignore=tests/"]
- repo: https://github.com/norwoodj/helm-docs - repo: https://github.com/norwoodj/helm-docs
rev: v1.11.0 rev: v1.14.2
hooks: hooks:
- id: helm-docs - id: helm-docs
files: helm files: helm
- repo: https://github.com/astral-sh/ruff-pre-commit - repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.4.0 rev: v0.8.0
hooks: hooks:
- id: ruff - id: ruff
args: [ --fix ] args: [ --fix ]

View File

@@ -115,7 +115,7 @@ RUN mkdir -p ${PYTHONPATH} superset/static requirements superset-frontend apache
libldap2-dev \ libldap2-dev \
&& touch superset/static/version_info.json \ && touch superset/static/version_info.json \
&& chown -R superset:superset ./* \ && chown -R superset:superset ./* \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/* /var/cache/apt/archives/*
COPY --chown=superset:superset pyproject.toml setup.py MANIFEST.in README.md ./ COPY --chown=superset:superset pyproject.toml setup.py MANIFEST.in README.md ./
# setup.py uses the version information in package.json # setup.py uses the version information in package.json
@@ -128,7 +128,7 @@ RUN --mount=type=cache,target=/root/.cache/pip \
&& pip install --no-cache-dir --upgrade setuptools pip \ && pip install --no-cache-dir --upgrade setuptools pip \
&& pip install --no-cache-dir -r requirements/base.txt \ && pip install --no-cache-dir -r requirements/base.txt \
&& apt-get autoremove -yqq --purge build-essential \ && apt-get autoremove -yqq --purge build-essential \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/* /var/cache/apt/archives/*
# Copy the compiled frontend assets # Copy the compiled frontend assets
COPY --chown=superset:superset --from=superset-node /app/superset/static/assets superset/static/assets COPY --chown=superset:superset --from=superset-node /app/superset/static/assets superset/static/assets
@@ -177,7 +177,7 @@ RUN apt-get update -qq \
libxtst6 \ libxtst6 \
git \ git \
pkg-config \ pkg-config \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/cache/apt/archives/* /var/lib/apt/lists/*
RUN --mount=type=cache,target=/root/.cache/pip \ RUN --mount=type=cache,target=/root/.cache/pip \
pip install --no-cache-dir playwright pip install --no-cache-dir playwright
@@ -199,13 +199,13 @@ RUN if [ "$INCLUDE_FIREFOX" = "true" ]; then \
&& wget -q https://github.com/mozilla/geckodriver/releases/download/${GECKODRIVER_VERSION}/geckodriver-${GECKODRIVER_VERSION}-linux64.tar.gz -O - | tar xfz - -C /usr/local/bin \ && wget -q https://github.com/mozilla/geckodriver/releases/download/${GECKODRIVER_VERSION}/geckodriver-${GECKODRIVER_VERSION}-linux64.tar.gz -O - | tar xfz - -C /usr/local/bin \
&& wget -q https://download-installer.cdn.mozilla.net/pub/firefox/releases/${FIREFOX_VERSION}/linux-x86_64/en-US/firefox-${FIREFOX_VERSION}.tar.bz2 -O - | tar xfj - -C /opt \ && wget -q https://download-installer.cdn.mozilla.net/pub/firefox/releases/${FIREFOX_VERSION}/linux-x86_64/en-US/firefox-${FIREFOX_VERSION}.tar.bz2 -O - | tar xfj - -C /opt \
&& ln -s /opt/firefox/firefox /usr/local/bin/firefox \ && ln -s /opt/firefox/firefox /usr/local/bin/firefox \
&& apt-get autoremove -yqq --purge wget bzip2 && rm -rf /var/[log,tmp]/* /tmp/* /var/lib/apt/lists/*; \ && apt-get autoremove -yqq --purge wget bzip2 && rm -rf /var/[log,tmp]/* /tmp/* /var/lib/apt/lists/* /var/cache/apt/archives/*; \
fi fi
# Installing mysql client os-level dependencies in dev image only because GPL # Installing mysql client os-level dependencies in dev image only because GPL
RUN apt-get install -yqq --no-install-recommends \ RUN apt-get install -yqq --no-install-recommends \
default-libmysqlclient-dev \ default-libmysqlclient-dev \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/* /var/cache/apt/archives/*
COPY --chown=superset:superset requirements/development.txt requirements/ COPY --chown=superset:superset requirements/development.txt requirements/
RUN --mount=type=cache,target=/root/.cache/pip \ RUN --mount=type=cache,target=/root/.cache/pip \
@@ -213,7 +213,7 @@ RUN --mount=type=cache,target=/root/.cache/pip \
build-essential \ build-essential \
&& pip install --no-cache-dir -r requirements/development.txt \ && pip install --no-cache-dir -r requirements/development.txt \
&& apt-get autoremove -yqq --purge build-essential \ && apt-get autoremove -yqq --purge build-essential \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/* /var/cache/apt/archives/*
USER superset USER superset
###################################################################### ######################################################################

View File

@@ -25,7 +25,6 @@ x-superset-user: &superset-user root
x-superset-depends-on: &superset-depends-on x-superset-depends-on: &superset-depends-on
- db - db
- redis - redis
- superset-checks
x-superset-volumes: &superset-volumes x-superset-volumes: &superset-volumes
# /app/pythonpath_docker will be appended to the PYTHONPATH in the final container # /app/pythonpath_docker will be appended to the PYTHONPATH in the final container
- ./docker:/app/docker - ./docker:/app/docker
@@ -131,23 +130,6 @@ services:
- REDIS_PORT=6379 - REDIS_PORT=6379
- REDIS_SSL=false - REDIS_SSL=false
superset-checks:
build:
context: .
target: python-base
cache_from:
- apache/superset-cache:3.10-slim-bookworm
container_name: superset_checks
command: ["/app/scripts/check-env.py"]
env_file:
- path: docker/.env # default
required: true
- path: docker/.env-local # optional override
required: false
user: *superset-user
healthcheck:
disable: true
superset-init: superset-init:
build: build:
<<: *common-build <<: *common-build
@@ -179,6 +161,7 @@ services:
# set this to false if you have perf issues running the npm i; npm run dev in-docker # set this to false if you have perf issues running the npm i; npm run dev in-docker
# if you do so, you have to run this manually on the host, which should perform better! # if you do so, you have to run this manually on the host, which should perform better!
BUILD_SUPERSET_FRONTEND_IN_DOCKER: true BUILD_SUPERSET_FRONTEND_IN_DOCKER: true
NPM_RUN_PRUNE: false
SCARF_ANALYTICS: "${SCARF_ANALYTICS:-}" SCARF_ANALYTICS: "${SCARF_ANALYTICS:-}"
container_name: superset_node container_name: superset_node
command: ["/app/docker/docker-frontend.sh"] command: ["/app/docker/docker-frontend.sh"]

View File

@@ -27,6 +27,11 @@ if [ "$BUILD_SUPERSET_FRONTEND_IN_DOCKER" = "true" ]; then
echo "Building Superset frontend in dev mode inside docker container" echo "Building Superset frontend in dev mode inside docker container"
cd /app/superset-frontend cd /app/superset-frontend
if [ "$NPM_RUN_PRUNE" = "true" ]; then
echo "Running `npm run prune`"
npm run prune
fi
echo "Running `npm install`" echo "Running `npm install`"
npm install npm install

View File

@@ -53,11 +53,14 @@ To send alerts and reports to Slack channels, you need to create a new Slack App
- `incoming-webhook` - `incoming-webhook`
- `files:write` - `files:write`
- `chat:write` - `chat:write`
- `channels:read`
- `groups:read`
4. At the top of the "OAuth and Permissions" section, click "install to workspace". 4. At the top of the "OAuth and Permissions" section, click "install to workspace".
5. Select a default channel for your app and continue. 5. Select a default channel for your app and continue.
(You can post to any channel by inviting your Superset app into that channel). (You can post to any channel by inviting your Superset app into that channel).
6. The app should now be installed in your workspace, and a "Bot User OAuth Access Token" should have been created. Copy that token in the `SLACK_API_TOKEN` variable of your `superset_config.py`. 6. The app should now be installed in your workspace, and a "Bot User OAuth Access Token" should have been created. Copy that token in the `SLACK_API_TOKEN` variable of your `superset_config.py`.
7. Restart the service (or run `superset init`) to pull in the new configuration. 7. Ensure the feature flag `ALERT_REPORT_SLACK_V2` is set to True in `superset_config.py`
8. Restart the service (or run `superset init`) to pull in the new configuration.
Note: when you configure an alert or a report, the Slack channel list takes channel names without the leading '#' e.g. use `alerts` instead of `#alerts`. Note: when you configure an alert or a report, the Slack channel list takes channel names without the leading '#' e.g. use `alerts` instead of `#alerts`.

View File

@@ -455,17 +455,6 @@ pre-commit install
A series of checks will now run when you make a git commit. A series of checks will now run when you make a git commit.
Alternatively, it is possible to run pre-commit via tox:
```bash
tox -e pre-commit
```
Or by running pre-commit manually:
```bash
pre-commit run --all-files
```
## Linting ## Linting
@@ -474,8 +463,7 @@ pre-commit run --all-files
We use [Pylint](https://pylint.org/) for linting which can be invoked via: We use [Pylint](https://pylint.org/) for linting which can be invoked via:
```bash ```bash
# for python pylint
tox -e pylint
``` ```
In terms of best practices please avoid blanket disabling of Pylint messages globally (via `.pylintrc`) or top-level within the file header, albeit there being a few exceptions. Disabling should occur inline as it prevents masking issues and provides context as to why said message is disabled. In terms of best practices please avoid blanket disabling of Pylint messages globally (via `.pylintrc`) or top-level within the file header, albeit there being a few exceptions. Disabling should occur inline as it prevents masking issues and provides context as to why said message is disabled.
@@ -502,39 +490,108 @@ If using the eslint extension with vscode, put the following in your workspace `
] ]
``` ```
## GitHub Actions and `act`
:::tip
`act` compatibility of Superset's GHAs is not fully tested. Running `act` locally may or may not
work for different actions, and may require fine tunning and local secret-handling.
For those more intricate GHAs that are tricky to run locally, we recommend iterating
directly on GHA's infrastructure, by pushing directly on a branch and monitoring GHA logs.
For more targetted iteration, see the `gh workflow run --ref {BRANCH}` subcommand of the GitHub CLI.
:::
For automation and CI/CD, Superset makes extensive use of GitHub Actions (GHA). You
can find all of the workflows and other assets under the `.github/` folder. This includes:
- running the backend unit test suites (`tests/`)
- running the frontend test suites (`superset-frontend/src/**.*.test.*`)
- running our Cypress end-to-end tests (`superset-frontend/cypress-base/`)
- linting the codebase, including all Python, Typescript and Javascript, yaml and beyond
- checking for all sorts of other rules conventions
When you open a pull request (PR), the appropriate GitHub Actions (GHA) workflows will
automatically run depending on the changes in your branch. It's perfectly reasonable
(and required!) to rely on this automation. However, the downside is that it's mostly an
all-or-nothing approach and doesn't provide much control to target specific tests or
iterate quickly.
At times, it may be more convenient to run GHA workflows locally. For that purpose
we use [act](https://github.com/nektos/act), a tool that allows you to run GitHub Actions (GHA)
workflows locally. It simulates the GitHub Actions environment, enabling developers to
test and debug workflows on their local machines before pushing changes to the repository. More
on how to use it in the next section.
:::note
In both GHA and `act`, we can run a more complex matrix for our tests, executing against different
database engines (PostgreSQL, MySQL, SQLite) and different versions of Python.
This enables us to ensure compatibility and stability across various environments.
:::
### Using `act`
First, install `act` -> https://nektosact.com/
To list the workflows, simply:
```bash
act --list
```
To run a specific workflow:
```bash
act pull_request --job {workflow_name} --secret GITHUB_TOKEN=$GITHUB_TOKEN --container-architecture linux/amd64
```
In the example above, notice that:
- we target a specific workflow, using `--job`
- we pass a secret using `--secret`, as many jobs require read access (public) to the repo
- we simulate a `pull_request` event by specifying it as the first arg,
similarly, we could simulate a `push` event or something else
- we specify `--container-architecture`, which tends to emulate GHA more reliably
:::note
`act` is a rich tool that offers all sorts of features, allowing you to simulate different
events (pull_request, push, ...), semantics around passing secrets where required and much
more. For more information, refer to [act's documentation](https://nektosact.com/)
:::
:::note
Some jobs require secrets to interact with external systems and accounts that you may
not have in your possession. In those cases you may have to rely on remote CI or parameterize the
job further to target a different environment/sandbox or your own alongside the related
secrets.
:::
---
## Testing ## Testing
### Python Testing ### Python Testing
All python tests are carried out in [tox](https://tox.readthedocs.io/en/latest/index.html) #### Unit Tests
a standardized testing framework.
All python tests can be run with any of the tox [environments](https://tox.readthedocs.io/en/latest/example/basic.html#a-simple-tox-ini-default-environments), via, For unit tests located in `tests/unit_tests/`, it's usually easy to simply run the script locally using:
```bash ```bash
tox -e <environment> pytest tests/unit_tests/*
``` ```
For example, #### Integration Tests
For more complex pytest-defined integration tests (not to be confused with our end-to-end Cypress tests), many tests will require having a working test environment. Some tests require a database, Celery, and potentially other services or libraries installed.
### Running Tests with `act`
To run integration tests locally using `act`, ensure you have followed the setup instructions from the [GitHub Actions and `act`](#github-actions-and-act) section. You can run specific workflows or jobs that include integration tests. For example:
```bash ```bash
tox -e py38 act --job test-python-38 --secret GITHUB_TOKEN=$GITHUB_TOKEN --event pull_request --container-architecture linux/amd64
``` ```
Alternatively, you can run all tests in a single file via, #### Running locally using a test script
```bash There is also a utility script included in the Superset codebase to run Python integration tests. The [readme can be found here](https://github.com/apache/superset/tree/master/scripts/tests).
tox -e <environment> -- tests/test_file.py
```
or for a specific test via,
```bash
tox -e <environment> -- tests/test_file.py::TestClassName::test_method_name
```
Note that the test environment uses a temporary directory for defining the
SQLite databases which will be cleared each time before the group of test
commands are invoked.
There is also a utility script included in the Superset codebase to run python integration tests. The [readme can be There is also a utility script included in the Superset codebase to run python integration tests. The [readme can be
found here](https://github.com/apache/superset/tree/master/scripts/tests) found here](https://github.com/apache/superset/tree/master/scripts/tests)
@@ -545,7 +602,7 @@ To run all integration tests, for example, run this script from the root directo
scripts/tests/run.sh scripts/tests/run.sh
``` ```
You can run unit tests found in './tests/unit_tests' for example with pytest. It is a simple way to run an isolated test that doesn't need any database setup You can run unit tests found in `./tests/unit_tests` with pytest. It is a simple way to run an isolated test that doesn't need any database setup:
```bash ```bash
pytest ./link_to_test.py pytest ./link_to_test.py
@@ -568,7 +625,7 @@ npm run test -- path/to/file.js
### Integration Testing ### Integration Testing
We use [Cypress](https://www.cypress.io/) for integration tests. Tests can be run by `tox -e cypress`. To open Cypress and explore tests first setup and run test server: We use [Cypress](https://www.cypress.io/) for integration tests. To open Cypress and explore tests first setup and run test server:
```bash ```bash
export SUPERSET_CONFIG=tests.integration_tests.superset_test_config export SUPERSET_CONFIG=tests.integration_tests.superset_test_config

View File

@@ -170,31 +170,10 @@ npm run dev-server
### Python Testing ### Python Testing
All python tests are carried out in [tox](https://tox.readthedocs.io/en/latest/index.html) `pytest`, backend by docker-compose is how we recommend running tests locally.
a standardized testing framework.
All python tests can be run with any of the tox [environments](https://tox.readthedocs.io/en/latest/example/basic.html#a-simple-tox-ini-default-environments), via,
```bash For a more complex test matrix (against different database backends, python versions, ...) you
tox -e <environment> can rely on our GitHub Actions by simply opening a draft pull request.
```
For example,
```bash
tox -e py38
```
Alternatively, you can run all tests in a single file via,
```bash
tox -e <environment> -- tests/test_file.py
```
or for a specific test via,
```bash
tox -e <environment> -- tests/test_file.py::TestClassName::test_method_name
```
Note that the test environment uses a temporary directory for defining the Note that the test environment uses a temporary directory for defining the
SQLite databases which will be cleared each time before the group of test SQLite databases which will be cleared each time before the group of test
@@ -246,13 +225,7 @@ npm run test -- path/to/file.js
### e2e Integration Testing ### e2e Integration Testing
We use [Cypress](https://www.cypress.io/) for end-to-end integration For e2e testing, we recommend that you use a `docker-compose` backed-setup
tests. One easy option to get started quickly is to leverage `tox` to
run the whole suite in an isolated environment.
```bash
tox -e cypress
```
Alternatively, you can go lower level and set things up in your Alternatively, you can go lower level and set things up in your
development environment by following these steps: development environment by following these steps:
@@ -598,17 +571,31 @@ pybabel compile -d superset/translations
### Python ### Python
We use [Pylint](https://pylint.org/) for linting which can be invoked via: We use [Pylint](https://pylint.org/) and [ruff](https://github.com/astral-sh/ruff)
for linting which can be invoked via:
```bash ```
# for python # Run pylint
tox -e pylint pylint superset/
# auto-reformat using ruff
ruff format
# lint check with ruff
ruff check
# lint fix with ruff
ruff check --fix
``` ```
In terms of best practices please avoid blanket disabling of Pylint messages globally (via `.pylintrc`) or top-level within the file header, albeit there being a few exceptions. Disabling should occur inline as it prevents masking issues and provides context as to why said message is disabled.
Additionally, the Python code is auto-formatted using [Black](https://github.com/python/black) which In terms of best practices please avoid blanket disabling of Pylint messages globally
is configured as a pre-commit hook. There are also numerous [editor integrations](https://black.readthedocs.io/en/stable/integrations/editors.html) (via `.pylintrc`) or top-level within the file header, albeit there being a few exceptions.
Disabling should occur inline as it prevents masking issues and provides context as to why
said message is disabled.
All this is configured to run in pre-commit hooks, which we encourage you to setup
with `pre-commit install`
### TypeScript ### TypeScript

View File

@@ -29,7 +29,7 @@ We have a set of build "presets" that each represent a combination of
parameters for the build, mostly pointing to either different target layer parameters for the build, mostly pointing to either different target layer
for the build, and/or base image. for the build, and/or base image.
Here are the build presets that are exposed through the `build_docker.py` script: Here are the build presets that are exposed through the `supersetbot docker` utility:
- `lean`: The default Docker image, including both frontend and backend. Tags - `lean`: The default Docker image, including both frontend and backend. Tags
without a build_preset are lean builds (ie: `latest`, `4.0.0`, `3.0.0`, ...). `lean` without a build_preset are lean builds (ie: `latest`, `4.0.0`, `3.0.0`, ...). `lean`
@@ -62,8 +62,8 @@ Here are the build presets that are exposed through the `build_docker.py` script
For insights or modifications to the build matrix and tagging conventions, For insights or modifications to the build matrix and tagging conventions,
check the [build_docker.py](https://github.com/apache/superset/blob/master/scripts/build_docker.py) check the [supersetbot docker](https://github.com/apache-superset/supersetbot)
script and the [docker.yml](https://github.com/apache/superset/blob/master/.github/workflows/docker.yml) subcommand and the [docker.yml](https://github.com/apache/superset/blob/master/.github/workflows/docker.yml)
GitHub action. GitHub action.
## Key ARGs in Dockerfile ## Key ARGs in Dockerfile

View File

@@ -95,6 +95,14 @@ perform those operations. In this case, we recommend you set the env var
Simply trigger `npm i && npm run dev`, this should be MUCH faster. Simply trigger `npm i && npm run dev`, this should be MUCH faster.
::: :::
:::tip
Sometimes, your npm-related state can get out-of-wack, running `npm run prune` from
the `superset-frontend/` folder will nuke the various' packages `node_module/` folders
and help you start fresh. In the context of `docker compose` setting
`export NPM_RUN_PRUNE=true` prior to running `docker compose up` will trigger that
from within docker. This will slow down the startup, but will fix various npm-related issues.
:::
### Option #2 - build a set of immutable images from the local branch ### Option #2 - build a set of immutable images from the local branch
```bash ```bash

View File

@@ -77,10 +77,6 @@ versions officially supported by Superset. We'd recommend using a Python version
like [pyenv](https://github.com/pyenv/pyenv) like [pyenv](https://github.com/pyenv/pyenv)
(and also [pyenv-virtualenv](https://github.com/pyenv/pyenv-virtualenv)). (and also [pyenv-virtualenv](https://github.com/pyenv/pyenv-virtualenv)).
:::tip
To identify the Python version used by the official docker image, see the [Dockerfile](https://github.com/apache/superset/blob/master/Dockerfile). Additional docker images published for newer versions of Python can be found in [this file](https://github.com/apache/superset/blob/master/scripts/build_docker.py).
:::
Let's also make sure we have the latest version of `pip` and `setuptools`: Let's also make sure we have the latest version of `pip` and `setuptools`:
```bash ```bash
@@ -134,21 +130,22 @@ First, start by installing `apache-superset`:
pip install apache-superset pip install apache-superset
``` ```
Then, define mandatory configurations, SECRET_KEY and FLASK_APP:
```bash
export SUPERSET_SECRET_KEY=YOUR-SECRET-KEY
export FLASK_APP=superset
```
Then, you need to initialize the database: Then, you need to initialize the database:
```bash ```bash
superset db upgrade superset db upgrade
``` ```
:::tip
Note that some configuration is mandatory for production instances of Superset. In particular, Superset will not start without a user-specified value of SECRET_KEY. Please see [Configuring Superset](/docs/configuration/configuring-superset).
:::
Finish installing by running through the following commands: Finish installing by running through the following commands:
```bash ```bash
# Create an admin user in your metadata database (use `admin` as username to be able to load the examples) # Create an admin user in your metadata database (use `admin` as username to be able to load the examples)
export FLASK_APP=superset
superset fab create-admin superset fab create-admin
# Load some data to play with # Load some data to play with

View File

@@ -29,7 +29,7 @@ maintainers:
- name: craig-rueda - name: craig-rueda
email: craig@craigrueda.com email: craig@craigrueda.com
url: https://github.com/craig-rueda url: https://github.com/craig-rueda
version: 0.13.3 version: 0.13.4
dependencies: dependencies:
- name: postgresql - name: postgresql
version: 12.1.6 version: 12.1.6

View File

@@ -23,7 +23,7 @@ NOTE: This file is generated by helm-docs: https://github.com/norwoodj/helm-docs
# superset # superset
![Version: 0.13.3](https://img.shields.io/badge/Version-0.13.3-informational?style=flat-square) ![Version: 0.13.4](https://img.shields.io/badge/Version-0.13.4-informational?style=flat-square)
Apache Superset is a modern, enterprise-ready business intelligence web application Apache Superset is a modern, enterprise-ready business intelligence web application

View File

@@ -19,7 +19,8 @@
# This is a YAML-formatted file. # This is a YAML-formatted file.
# Declare variables to be passed into your templates. # Declare variables to be passed into your templates.
# A README is automatically generated from this file to document it, using helm-docs (see https://github.com/norwoodj/helm-docs) # A README is automatically generated from this file to document it,
# using helm-docs (see https://github.com/norwoodj/helm-docs)
# To update it, install helm-docs and run helm-docs from the root of this chart # To update it, install helm-docs and run helm-docs from the root of this chart
# -- Provide a name to override the name of the chart # -- Provide a name to override the name of the chart

View File

@@ -69,7 +69,7 @@ dependencies = [
"nh3>=0.2.11, <0.3", "nh3>=0.2.11, <0.3",
"numpy==1.23.5", "numpy==1.23.5",
"packaging", "packaging",
"pandas[performance]>=2.0.3, <2.1", "pandas[excel,performance]>=2.0.3, <2.1",
"parsedatetime", "parsedatetime",
"paramiko>=3.4.0", "paramiko>=3.4.0",
"pgsanity", "pgsanity",
@@ -90,7 +90,9 @@ dependencies = [
"slack_sdk>=3.19.0, <4", "slack_sdk>=3.19.0, <4",
"sqlalchemy>=1.4, <2", "sqlalchemy>=1.4, <2",
"sqlalchemy-utils>=0.38.3, <0.39", "sqlalchemy-utils>=0.38.3, <0.39",
"sqlglot>=25.24.0,<26", # known breaking changes in sqlglot 25.25.0
#https://github.com/tobymao/sqlglot/blob/main/CHANGELOG.md#v25250---2024-10-14
"sqlglot>=25.24.0,<25.25.0",
"sqlparse>=0.5.0", "sqlparse>=0.5.0",
"tabulate>=0.8.9, <0.9", "tabulate>=0.8.9, <0.9",
"typing-extensions>=4, <5", "typing-extensions>=4, <5",
@@ -135,7 +137,6 @@ gevent = ["gevent>=23.9.1"]
gsheets = ["shillelagh[gsheetsapi]>=1.2.18, <2"] gsheets = ["shillelagh[gsheetsapi]>=1.2.18, <2"]
hana = ["hdbcli==2.4.162", "sqlalchemy_hana==0.4.0"] hana = ["hdbcli==2.4.162", "sqlalchemy_hana==0.4.0"]
hive = [ hive = [
"boto3",
"pyhive[hive]>=0.6.5;python_version<'3.11'", "pyhive[hive]>=0.6.5;python_version<'3.11'",
"pyhive[hive_pure_sasl]>=0.7.0", "pyhive[hive_pure_sasl]>=0.7.0",
"tableschema", "tableschema",
@@ -158,7 +159,7 @@ pinot = ["pinotdb>=5.0.0, <6.0.0"]
playwright = ["playwright>=1.37.0, <2"] playwright = ["playwright>=1.37.0, <2"]
postgres = ["psycopg2-binary==2.9.6"] postgres = ["psycopg2-binary==2.9.6"]
presto = ["pyhive[presto]>=0.6.5"] presto = ["pyhive[presto]>=0.6.5"]
trino = ["boto3", "trino>=0.328.0"] trino = ["trino>=0.328.0"]
prophet = ["prophet>=1.1.5, <2"] prophet = ["prophet>=1.1.5, <2"]
redshift = ["sqlalchemy-redshift>=0.8.1, <0.9"] redshift = ["sqlalchemy-redshift>=0.8.1, <0.9"]
rockset = ["rockset-sqlalchemy>=0.0.1, <1"] rockset = ["rockset-sqlalchemy>=0.0.1, <1"]
@@ -181,7 +182,10 @@ development = [
"docker", "docker",
"flask-testing", "flask-testing",
"freezegun", "freezegun",
"greenlet>=2.0.2", # playwright requires greenlet==3.0.3
# submitted a PR to relax deps in 11/2024
# https://github.com/microsoft/playwright-python/pull/2669
"greenlet==3.0.3",
"grpcio>=1.55.3", "grpcio>=1.55.3",
"openapi-spec-validator", "openapi-spec-validator",
"parameterized", "parameterized",
@@ -199,7 +203,6 @@ development = [
"ruff", "ruff",
"sqloxide", "sqloxide",
"statsd", "statsd",
"tox",
] ]
[project.urls] [project.urls]
@@ -236,172 +239,10 @@ disallow_untyped_calls = false
disallow_untyped_defs = false disallow_untyped_defs = false
disable_error_code = "annotation-unchecked" disable_error_code = "annotation-unchecked"
[tool.tox]
legacy_tox_ini = """
# Remember to start celery workers to run celery tests, e.g.
# celery --app=superset.tasks.celery_app:app worker -Ofair -c 2
[testenv]
basepython = python3.10
ignore_basepython_conflict = true
commands =
superset db upgrade
superset init
superset load-test-users
# use -s to be able to use break pointers.
# no args or tests/* can be passed as an argument to run all tests
pytest -s {posargs}
deps =
-rrequirements/development.txt
setenv =
PYTHONPATH = {toxinidir}
SUPERSET_TESTENV = true
SUPERSET_CONFIG = tests.integration_tests.superset_test_config
SUPERSET_HOME = {envtmpdir}
mysql: SUPERSET__SQLALCHEMY_DATABASE_URI = mysql://mysqluser:mysqluserpassword@localhost/superset?charset=utf8
postgres: SUPERSET__SQLALCHEMY_DATABASE_URI = postgresql+psycopg2://superset:superset@localhost/test
sqlite: SUPERSET__SQLALCHEMY_DATABASE_URI = sqlite:////{envtmpdir}/superset.db
sqlite: SUPERSET__SQLALCHEMY_EXAMPLES_URI = sqlite:////{envtmpdir}/examples.db
mysql-presto: SUPERSET__SQLALCHEMY_DATABASE_URI = mysql://mysqluser:mysqluserpassword@localhost/superset?charset=utf8
# docker run -p 8080:8080 --name presto starburstdata/presto
mysql-presto: SUPERSET__SQLALCHEMY_EXAMPLES_URI = presto://localhost:8080/memory/default
# based on https://github.com/big-data-europe/docker-hadoop
# clone the repo & run docker compose up -d to test locally
mysql-hive: SUPERSET__SQLALCHEMY_DATABASE_URI = mysql://mysqluser:mysqluserpassword@localhost/superset?charset=utf8
mysql-hive: SUPERSET__SQLALCHEMY_EXAMPLES_URI = hive://localhost:10000/default
# make sure that directory is accessible by docker
hive: UPLOAD_FOLDER = /tmp/.superset/app/static/uploads/
usedevelop = true
allowlist_externals =
npm
pkill
[testenv:cypress]
setenv =
PYTHONPATH = {toxinidir}
SUPERSET_TESTENV = true
SUPERSET_CONFIG = tests.integration_tests.superset_test_config
SUPERSET_HOME = {envtmpdir}
commands =
npm install -g npm@'>=6.5.0'
pip install -e {toxinidir}/
{toxinidir}/superset-frontend/cypress_build.sh
commands_post =
pkill -if "python {envbindir}/flask"
[testenv:cypress-dashboard]
setenv =
PYTHONPATH = {toxinidir}
SUPERSET_TESTENV = true
SUPERSET_CONFIG = tests.integration_tests.superset_test_config
SUPERSET_HOME = {envtmpdir}
commands =
npm install -g npm@'>=6.5.0'
pip install -e {toxinidir}/
{toxinidir}/superset-frontend/cypress_build.sh dashboard
commands_post =
pkill -if "python {envbindir}/flask"
[testenv:cypress-explore]
setenv =
PYTHONPATH = {toxinidir}
SUPERSET_TESTENV = true
SUPERSET_CONFIG = tests.integration_tests.superset_test_config
SUPERSET_HOME = {envtmpdir}
commands =
npm install -g npm@'>=6.5.0'
pip install -e {toxinidir}/
{toxinidir}/superset-frontend/cypress_build.sh explore
commands_post =
pkill -if "python {envbindir}/flask"
[testenv:cypress-sqllab]
setenv =
PYTHONPATH = {toxinidir}
SUPERSET_TESTENV = true
SUPERSET_CONFIG = tests.integration_tests.superset_test_config
SUPERSET_HOME = {envtmpdir}
commands =
npm install -g npm@'>=6.5.0'
pip install -e {toxinidir}/
{toxinidir}/superset-frontend/cypress_build.sh sqllab
commands_post =
pkill -if "python {envbindir}/flask"
[testenv:cypress-sqllab-backend-persist]
setenv =
PYTHONPATH = {toxinidir}
SUPERSET_TESTENV = true
SUPERSET_CONFIG = tests.integration_tests.superset_test_config
SUPERSET_HOME = {envtmpdir}
commands =
npm install -g npm@'>=6.5.0'
pip install -e {toxinidir}/
{toxinidir}/superset-frontend/cypress_build.sh sqllab
commands_post =
pkill -if "python {envbindir}/flask"
[testenv:eslint]
changedir = {toxinidir}/superset-frontend
commands =
npm run lint
deps =
[testenv:fossa]
commands =
{toxinidir}/scripts/fossa.sh
deps =
passenv = *
[testenv:javascript]
commands =
npm install -g npm@'>=6.5.0'
{toxinidir}/superset-frontend/js_build.sh
deps =
[testenv:license-check]
commands =
{toxinidir}/scripts/check_license.sh
passenv = *
whitelist_externals =
{toxinidir}/scripts/check_license.sh
deps =
[testenv:pre-commit]
commands =
pre-commit run --all-files
deps =
-rrequirements/development.txt
skip_install = true
[testenv:pylint]
commands =
pylint superset
deps =
-rrequirements/development.txt
[testenv:thumbnails]
setenv =
SUPERSET_CONFIG = tests.integration_tests.superset_test_config_thumbnails
deps =
-rrequirements/development.txt
[tox]
envlist =
cypress-dashboard
cypress-explore
cypress-sqllab
cypress-sqllab-backend-persist
eslint
fossa
javascript
license-check
pre-commit
pylint
skipsdist = true
"""
[tool.ruff] [tool.ruff]
# Exclude a variety of commonly ignored directories. # Exclude a variety of commonly ignored directories.
exclude = [ exclude = [
"**/*.ipynb",
".bzr", ".bzr",
".direnv", ".direnv",
".eggs", ".eggs",

View File

@@ -20,3 +20,7 @@
urllib3>=1.26.18 urllib3>=1.26.18
werkzeug>=3.0.1 werkzeug>=3.0.1
numexpr>=2.9.0 numexpr>=2.9.0
# 5.0.0 has a sensitive deprecation used in other libs
# -> https://github.com/aio-libs/async-timeout/blob/master/CHANGES.rst#500-2024-10-31
async_timeout>=4.0.0,<5.0.0

View File

@@ -1,4 +1,4 @@
# SHA1:85649679306ea016e401f37adfbad832028d2e5f # SHA1:cc62b2b6658afa9dbb6e81046e1084f15442858a
# #
# This file is autogenerated by pip-compile-multi # This file is autogenerated by pip-compile-multi
# To update, run: # To update, run:
@@ -9,14 +9,16 @@
# via -r requirements/base.in # via -r requirements/base.in
alembic==1.13.1 alembic==1.13.1
# via flask-migrate # via flask-migrate
amqp==5.2.0 amqp==5.3.1
# via kombu # via kombu
apispec[yaml]==6.3.0 apispec[yaml]==6.3.0
# via flask-appbuilder # via flask-appbuilder
apsw==3.46.0.0 apsw==3.46.0.0
# via shillelagh # via shillelagh
async-timeout==4.0.3 async-timeout==4.0.3
# via redis # via
# -r requirements/base.in
# redis
attrs==24.2.0 attrs==24.2.0
# via # via
# cattrs # cattrs
@@ -26,13 +28,13 @@ babel==2.16.0
# via flask-babel # via flask-babel
backoff==2.2.1 backoff==2.2.1
# via apache-superset # via apache-superset
bcrypt==4.1.3 bcrypt==4.2.1
# via paramiko # via paramiko
billiard==4.2.0 billiard==4.2.1
# via celery # via celery
blinker==1.9.0 blinker==1.9.0
# via flask # via flask
bottleneck==1.3.8 bottleneck==1.4.2
# via pandas # via pandas
brotli==1.1.0 brotli==1.1.0
# via flask-compress # via flask-compress
@@ -40,19 +42,19 @@ cachelib==0.9.0
# via # via
# flask-caching # flask-caching
# flask-session # flask-session
cachetools==5.3.3 cachetools==5.5.0
# via google-auth # via google-auth
cattrs==24.1.2 cattrs==24.1.2
# via requests-cache # via requests-cache
celery==5.4.0 celery==5.4.0
# via apache-superset # via apache-superset
certifi==2024.2.2 certifi==2024.8.30
# via requests # via requests
cffi==1.17.1 cffi==1.17.1
# via # via
# cryptography # cryptography
# pynacl # pynacl
charset-normalizer==3.3.2 charset-normalizer==3.4.0
# via requests # via requests
click==8.1.7 click==8.1.7
# via # via
@@ -76,23 +78,27 @@ colorama==0.4.6
# via # via
# apache-superset # apache-superset
# flask-appbuilder # flask-appbuilder
cron-descriptor==1.4.3 cron-descriptor==1.4.5
# via apache-superset # via apache-superset
croniter==2.0.5 croniter==5.0.1
# via apache-superset # via apache-superset
cryptography==42.0.8 cryptography==43.0.3
# via # via
# apache-superset # apache-superset
# paramiko # paramiko
# pyopenssl # pyopenssl
deprecated==1.2.14 defusedxml==0.7.1
# via odfpy
deprecated==1.2.15
# via limits # via limits
deprecation==2.1.0 deprecation==2.1.0
# via apache-superset # via apache-superset
dnspython==2.6.1 dnspython==2.7.0
# via email-validator # via email-validator
email-validator==2.1.1 email-validator==2.2.0
# via flask-appbuilder # via flask-appbuilder
et-xmlfile==2.0.0
# via openpyxl
exceptiongroup==1.2.2 exceptiongroup==1.2.2
# via cattrs # via cattrs
flask==2.3.3 flask==2.3.3
@@ -115,11 +121,11 @@ flask-babel==2.0.0
# via flask-appbuilder # via flask-appbuilder
flask-caching==2.3.0 flask-caching==2.3.0
# via apache-superset # via apache-superset
flask-compress==1.15 flask-compress==1.17
# via apache-superset # via apache-superset
flask-jwt-extended==4.6.0 flask-jwt-extended==4.7.1
# via flask-appbuilder # via flask-appbuilder
flask-limiter==3.7.0 flask-limiter==3.8.0
# via flask-appbuilder # via flask-appbuilder
flask-login==0.6.3 flask-login==0.6.3
# via # via
@@ -135,7 +141,7 @@ flask-sqlalchemy==2.5.1
# flask-migrate # flask-migrate
flask-talisman==1.1.0 flask-talisman==1.1.0
# via apache-superset # via apache-superset
flask-wtf==1.2.1 flask-wtf==1.2.2
# via # via
# apache-superset # apache-superset
# flask-appbuilder # flask-appbuilder
@@ -145,29 +151,29 @@ geographiclib==2.0
# via geopy # via geopy
geopy==2.4.1 geopy==2.4.1
# via apache-superset # via apache-superset
google-auth==2.29.0 google-auth==2.36.0
# via shillelagh # via shillelagh
greenlet==3.0.3 greenlet==3.1.1
# via # via
# shillelagh # shillelagh
# sqlalchemy # sqlalchemy
gunicorn==22.0.0 gunicorn==23.0.0
# via apache-superset # via apache-superset
hashids==1.3.1 hashids==1.3.1
# via apache-superset # via apache-superset
holidays==0.25 holidays==0.25
# via apache-superset # via apache-superset
humanize==4.9.0 humanize==4.11.0
# via apache-superset # via apache-superset
idna==3.7 idna==3.10
# via # via
# email-validator # email-validator
# requests # requests
importlib-metadata==7.1.0 importlib-metadata==8.5.0
# via apache-superset # via apache-superset
importlib-resources==6.4.0 importlib-resources==6.4.5
# via limits # via limits
isodate==0.6.1 isodate==0.7.2
# via apache-superset # via apache-superset
itsdangerous==2.2.0 itsdangerous==2.2.0
# via # via
@@ -177,23 +183,23 @@ jinja2==3.1.4
# via # via
# flask # flask
# flask-babel # flask-babel
jsonpath-ng==1.6.1 jsonpath-ng==1.7.0
# via apache-superset # via apache-superset
jsonschema==4.17.3 jsonschema==4.17.3
# via flask-appbuilder # via flask-appbuilder
kombu==5.3.7 kombu==5.4.2
# via celery # via celery
korean-lunar-calendar==0.3.1 korean-lunar-calendar==0.3.1
# via holidays # via holidays
limits==3.12.0 limits==3.13.0
# via flask-limiter # via flask-limiter
llvmlite==0.42.0 llvmlite==0.43.0
# via numba # via numba
mako==1.3.5 mako==1.3.6
# via # via
# alembic # alembic
# apache-superset # apache-superset
markdown==3.6 markdown==3.7
# via apache-superset # via apache-superset
markdown-it-py==3.0.0 markdown-it-py==3.0.0
# via rich # via rich
@@ -203,7 +209,7 @@ markupsafe==3.0.2
# mako # mako
# werkzeug # werkzeug
# wtforms # wtforms
marshmallow==3.21.2 marshmallow==3.23.1
# via # via
# flask-appbuilder # flask-appbuilder
# marshmallow-sqlalchemy # marshmallow-sqlalchemy
@@ -215,11 +221,11 @@ msgpack==1.0.8
# via apache-superset # via apache-superset
msgspec==0.18.6 msgspec==0.18.6
# via flask-session # via flask-session
nh3==0.2.17 nh3==0.2.18
# via apache-superset # via apache-superset
numba==0.59.1 numba==0.60.0
# via pandas # via pandas
numexpr==2.10.1 numexpr==2.10.2
# via # via
# -r requirements/base.in # -r requirements/base.in
# pandas # pandas
@@ -231,9 +237,13 @@ numpy==1.23.5
# numexpr # numexpr
# pandas # pandas
# pyarrow # pyarrow
odfpy==1.4.1
# via pandas
openpyxl==3.1.5
# via pandas
ordered-set==4.1.0 ordered-set==4.1.0
# via flask-limiter # via flask-limiter
packaging==23.2 packaging==24.2
# via # via
# apache-superset # apache-superset
# apispec # apispec
@@ -243,9 +253,9 @@ packaging==23.2
# marshmallow # marshmallow
# marshmallow-sqlalchemy # marshmallow-sqlalchemy
# shillelagh # shillelagh
pandas[performance]==2.0.3 pandas[excel,performance]==2.0.3
# via apache-superset # via apache-superset
paramiko==3.4.0 paramiko==3.5.0
# via # via
# apache-superset # apache-superset
# sshtunnel # sshtunnel
@@ -261,30 +271,30 @@ polyline==2.0.2
# via apache-superset # via apache-superset
prison==0.2.1 prison==0.2.1
# via flask-appbuilder # via flask-appbuilder
prompt-toolkit==3.0.44 prompt-toolkit==3.0.48
# via click-repl # via click-repl
pyarrow==14.0.2 pyarrow==14.0.2
# via apache-superset # via apache-superset
pyasn1==0.6.0 pyasn1==0.6.1
# via # via
# pyasn1-modules # pyasn1-modules
# rsa # rsa
pyasn1-modules==0.4.0 pyasn1-modules==0.4.1
# via google-auth # via google-auth
pycparser==2.22 pycparser==2.22
# via cffi # via cffi
pygments==2.18.0 pygments==2.18.0
# via rich # via rich
pyjwt==2.8.0 pyjwt==2.10.0
# via # via
# apache-superset # apache-superset
# flask-appbuilder # flask-appbuilder
# flask-jwt-extended # flask-jwt-extended
pynacl==1.5.0 pynacl==1.5.0
# via paramiko # via paramiko
pyopenssl==24.1.0 pyopenssl==24.2.1
# via shillelagh # via shillelagh
pyparsing==3.1.2 pyparsing==3.2.0
# via apache-superset # via apache-superset
pyrsistent==0.20.0 pyrsistent==0.20.0
# via jsonschema # via jsonschema
@@ -306,7 +316,9 @@ pytz==2024.2
# croniter # croniter
# flask-babel # flask-babel
# pandas # pandas
pyyaml==6.0.1 pyxlsb==1.0.10
# via pandas
pyyaml==6.0.2
# via # via
# apache-superset # apache-superset
# apispec # apispec
@@ -318,7 +330,7 @@ requests==2.32.2
# shillelagh # shillelagh
requests-cache==1.2.0 requests-cache==1.2.0
# via shillelagh # via shillelagh
rich==13.7.1 rich==13.9.4
# via flask-limiter # via flask-limiter
rsa==4.9 rsa==4.9
# via google-auth # via google-auth
@@ -328,18 +340,17 @@ shillelagh[gsheetsapi]==1.2.18
# via apache-superset # via apache-superset
shortid==0.1.2 shortid==0.1.2
# via apache-superset # via apache-superset
simplejson==3.19.2 simplejson==3.19.3
# via apache-superset # via apache-superset
six==1.16.0 six==1.16.0
# via # via
# isodate
# prison # prison
# python-dateutil # python-dateutil
# url-normalize # url-normalize
# wtforms-json # wtforms-json
slack-sdk==3.27.2 slack-sdk==3.33.4
# via apache-superset # via apache-superset
sqlalchemy==1.4.52 sqlalchemy==1.4.54
# via # via
# alembic # alembic
# apache-superset # apache-superset
@@ -352,9 +363,9 @@ sqlalchemy-utils==0.38.3
# via # via
# apache-superset # apache-superset
# flask-appbuilder # flask-appbuilder
sqlglot==25.24.0 sqlglot==25.24.5
# via apache-superset # via apache-superset
sqlparse==0.5.0 sqlparse==0.5.2
# via apache-superset # via apache-superset
sshtunnel==0.4.0 sshtunnel==0.4.0
# via apache-superset # via apache-superset
@@ -367,10 +378,12 @@ typing-extensions==4.12.2
# cattrs # cattrs
# flask-limiter # flask-limiter
# limits # limits
# rich
# shillelagh # shillelagh
tzdata==2024.1 tzdata==2024.2
# via # via
# celery # celery
# kombu
# pandas # pandas
url-normalize==1.4.3 url-normalize==1.4.3
# via requests-cache # via requests-cache
@@ -394,7 +407,7 @@ werkzeug==3.1.3
# flask-appbuilder # flask-appbuilder
# flask-jwt-extended # flask-jwt-extended
# flask-login # flask-login
wrapt==1.16.0 wrapt==1.17.0
# via deprecated # via deprecated
wtforms==3.2.1 wtforms==3.2.1
# via # via
@@ -404,9 +417,13 @@ wtforms==3.2.1
# wtforms-json # wtforms-json
wtforms-json==0.3.5 wtforms-json==0.3.5
# via apache-superset # via apache-superset
xlrd==2.0.1
# via pandas
xlsxwriter==3.0.9 xlsxwriter==3.0.9
# via apache-superset # via
zipp==3.19.0 # apache-superset
# pandas
zipp==3.21.0
# via importlib-metadata # via importlib-metadata
zstandard==0.22.0 zstandard==0.23.0
# via flask-compress # via flask-compress

View File

@@ -17,4 +17,4 @@
# under the License. # under the License.
# #
-r base.in -r base.in
-e .[development,bigquery,cors,druid,gevent,gsheets,hive,mysql,playwright,postgres,presto,prophet,trino,thumbnails] -e .[development,bigquery,cors,druid,gevent,gsheets,mysql,playwright,postgres,presto,prophet,trino,thumbnails]

View File

@@ -1,4 +1,4 @@
# SHA1:c186006a3f82c8775e1039f37c52309f6c858197 # SHA1:dc767a7288b56c785b0cd3c38e95e7b5e66be1ac
# #
# This file is autogenerated by pip-compile-multi # This file is autogenerated by pip-compile-multi
# To update, run: # To update, run:
@@ -12,89 +12,69 @@
# -r requirements/development.in # -r requirements/development.in
astroid==3.1.0 astroid==3.1.0
# via pylint # via pylint
boto3==1.34.112
# via
# apache-superset
# dataflows-tabulator
botocore==1.34.112
# via
# boto3
# s3transfer
build==1.2.1 build==1.2.1
# via pip-tools # via pip-tools
cached-property==1.5.2 cfgv==3.4.0
# via tableschema
cfgv==3.3.1
# via pre-commit # via pre-commit
chardet==5.1.0
# via
# dataflows-tabulator
# tox
cmdstanpy==1.1.0 cmdstanpy==1.1.0
# via prophet # via prophet
contourpy==1.0.7 contourpy==1.0.7
# via matplotlib # via matplotlib
coverage[toml]==7.2.5 coverage[toml]==7.6.8
# via pytest-cov # via pytest-cov
cycler==0.11.0 cycler==0.12.1
# via matplotlib # via matplotlib
dataflows-tabulator==1.54.3 db-dtypes==1.3.1
# via tableschema
db-dtypes==1.2.0
# via pandas-gbq # via pandas-gbq
dill==0.3.8 dill==0.3.9
# via pylint # via pylint
distlib==0.3.8 distlib==0.3.8
# via virtualenv # via virtualenv
docker==7.0.0 docker==7.0.0
# via apache-superset # via apache-superset
et-xmlfile==1.1.0
# via openpyxl
filelock==3.12.2 filelock==3.12.2
# via # via virtualenv
# tox
# virtualenv
flask-cors==4.0.0 flask-cors==4.0.0
# via apache-superset # via apache-superset
flask-testing==0.8.1 flask-testing==0.8.1
# via apache-superset # via apache-superset
fonttools==4.51.0 fonttools==4.55.0
# via matplotlib # via matplotlib
freezegun==1.5.1 freezegun==1.5.1
# via apache-superset # via apache-superset
future==0.18.3 future==1.0.0
# via pyhive # via pyhive
gevent==24.2.1 gevent==24.2.1
# via apache-superset # via apache-superset
google-api-core[grpc]==2.11.0 google-api-core[grpc]==2.23.0
# via # via
# google-cloud-bigquery # google-cloud-bigquery
# google-cloud-bigquery-storage # google-cloud-bigquery-storage
# google-cloud-core # google-cloud-core
# pandas-gbq # pandas-gbq
# sqlalchemy-bigquery # sqlalchemy-bigquery
google-auth-oauthlib==1.0.0 google-auth-oauthlib==1.2.1
# via # via
# pandas-gbq # pandas-gbq
# pydata-google-auth # pydata-google-auth
google-cloud-bigquery==3.20.1 google-cloud-bigquery==3.27.0
# via # via
# apache-superset # apache-superset
# pandas-gbq # pandas-gbq
# sqlalchemy-bigquery # sqlalchemy-bigquery
google-cloud-bigquery-storage==2.19.1 google-cloud-bigquery-storage==2.19.1
# via pandas-gbq # via pandas-gbq
google-cloud-core==2.3.2 google-cloud-core==2.4.1
# via google-cloud-bigquery # via google-cloud-bigquery
google-crc32c==1.5.0 google-crc32c==1.6.0
# via google-resumable-media # via google-resumable-media
google-resumable-media==2.7.0 google-resumable-media==2.7.2
# via google-cloud-bigquery # via google-cloud-bigquery
googleapis-common-protos==1.63.0 googleapis-common-protos==1.66.0
# via # via
# google-api-core # google-api-core
# grpcio-status # grpcio-status
grpcio==1.62.1 grpcio==1.68.0
# via # via
# apache-superset # apache-superset
# google-api-core # google-api-core
@@ -103,31 +83,21 @@ grpcio-status==1.60.1
# via google-api-core # via google-api-core
identify==2.5.36 identify==2.5.36
# via pre-commit # via pre-commit
ijson==3.2.3
# via dataflows-tabulator
iniconfig==2.0.0 iniconfig==2.0.0
# via pytest # via pytest
isort==5.12.0 isort==5.12.0
# via pylint # via pylint
jmespath==1.0.1
# via
# boto3
# botocore
jsonlines==4.0.0
# via dataflows-tabulator
jsonschema-spec==0.1.6 jsonschema-spec==0.1.6
# via openapi-spec-validator # via openapi-spec-validator
kiwisolver==1.4.5 kiwisolver==1.4.7
# via matplotlib # via matplotlib
lazy-object-proxy==1.10.0 lazy-object-proxy==1.10.0
# via openapi-spec-validator # via openapi-spec-validator
linear-tsv==1.1.0
# via dataflows-tabulator
matplotlib==3.9.0 matplotlib==3.9.0
# via prophet # via prophet
mccabe==0.7.0 mccabe==0.7.0
# via pylint # via pylint
mysqlclient==2.2.4 mysqlclient==2.2.6
# via apache-superset # via apache-superset
nodeenv==1.8.0 nodeenv==1.8.0
# via pre-commit # via pre-commit
@@ -137,8 +107,6 @@ openapi-schema-validator==0.4.4
# via openapi-spec-validator # via openapi-spec-validator
openapi-spec-validator==0.5.6 openapi-spec-validator==0.5.6
# via apache-superset # via apache-superset
openpyxl==3.1.2
# via dataflows-tabulator
pandas-gbq==0.19.1 pandas-gbq==0.19.1
# via apache-superset # via apache-superset
parameterized==0.9.0 parameterized==0.9.0
@@ -155,32 +123,30 @@ pip-tools==7.4.1
# via pip-compile-multi # via pip-compile-multi
playwright==1.42.0 playwright==1.42.0
# via apache-superset # via apache-superset
pluggy==1.4.0 pluggy==1.5.0
# via # via pytest
# pytest pre-commit==4.0.1
# tox
pre-commit==3.7.1
# via apache-superset # via apache-superset
progress==1.6 progress==1.6
# via apache-superset # via apache-superset
prophet==1.1.5 prophet==1.1.5
# via apache-superset # via apache-superset
proto-plus==1.22.2 proto-plus==1.25.0
# via google-cloud-bigquery-storage # via
protobuf==4.23.0 # google-api-core
# google-cloud-bigquery-storage
protobuf==4.25.5
# via # via
# google-api-core # google-api-core
# google-cloud-bigquery-storage # google-cloud-bigquery-storage
# googleapis-common-protos # googleapis-common-protos
# grpcio-status # grpcio-status
# proto-plus # proto-plus
psutil==6.0.0 psutil==6.1.0
# via apache-superset # via apache-superset
psycopg2-binary==2.9.6 psycopg2-binary==2.9.6
# via apache-superset # via apache-superset
pure-sasl==0.6.2 pydata-google-auth==1.9.0
# via thrift-sasl
pydata-google-auth==1.7.0
# via pandas-gbq # via pandas-gbq
pydruid==0.6.9 pydruid==0.6.9
# via apache-superset # via apache-superset
@@ -194,9 +160,7 @@ pyinstrument==4.4.0
# via apache-superset # via apache-superset
pylint==3.1.0 pylint==3.1.0
# via apache-superset # via apache-superset
pyproject-api==1.6.1 pyproject-hooks==1.2.0
# via tox
pyproject-hooks==1.0.0
# via # via
# build # build
# pip-tools # pip-tools
@@ -205,7 +169,7 @@ pytest==7.4.4
# apache-superset # apache-superset
# pytest-cov # pytest-cov
# pytest-mock # pytest-mock
pytest-cov==5.0.0 pytest-cov==6.0.0
# via apache-superset # via apache-superset
pytest-mock==3.10.0 pytest-mock==3.10.0
# via apache-superset # via apache-superset
@@ -215,62 +179,37 @@ requests-oauthlib==2.0.0
# via google-auth-oauthlib # via google-auth-oauthlib
rfc3339-validator==0.1.4 rfc3339-validator==0.1.4
# via openapi-schema-validator # via openapi-schema-validator
rfc3986==2.0.0 ruff==0.8.0
# via tableschema
ruff==0.4.5
# via apache-superset # via apache-superset
s3transfer==0.10.1 sqlalchemy-bigquery==1.12.0
# via boto3
sqlalchemy-bigquery==1.11.0
# via apache-superset # via apache-superset
sqloxide==0.1.43 sqloxide==0.1.51
# via apache-superset # via apache-superset
statsd==4.0.1 statsd==4.0.1
# via apache-superset # via apache-superset
tableschema==1.20.10 tomli==2.1.0
# via apache-superset
thrift==0.16.0
# via
# apache-superset
# thrift-sasl
thrift-sasl==0.4.3
# via apache-superset
tomli==2.0.1
# via # via
# build # build
# coverage # coverage
# pip-tools # pip-tools
# pylint # pylint
# pyproject-api
# pyproject-hooks
# pytest # pytest
# tox tomlkit==0.13.2
tomlkit==0.12.5
# via pylint # via pylint
toposort==1.10 toposort==1.10
# via pip-compile-multi # via pip-compile-multi
tox==4.6.4 tqdm==4.67.1
# via apache-superset
tqdm==4.66.4
# via # via
# cmdstanpy # cmdstanpy
# prophet # prophet
trino==0.328.0 trino==0.330.0
# via apache-superset # via apache-superset
tzlocal==5.2 tzlocal==5.2
# via trino # via trino
unicodecsv==0.14.1
# via
# dataflows-tabulator
# tableschema
virtualenv==20.23.1 virtualenv==20.23.1
# via # via pre-commit
# pre-commit wheel==0.45.1
# tox
wheel==0.43.0
# via pip-tools # via pip-tools
xlrd==2.0.1
# via dataflows-tabulator
zope-event==5.0 zope-event==5.0
# via gevent # via gevent
zope-interface==5.4.0 zope-interface==5.4.0

View File

@@ -1,294 +0,0 @@
#!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import re
import subprocess
from textwrap import dedent
import click
REPO = "apache/superset"
CACHE_REPO = f"{REPO}-cache"
BASE_PY_IMAGE = "3.10-slim-bookworm"
def run_cmd(command: str, raise_on_failure: bool = True) -> str:
process = subprocess.Popen(
command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True
)
output = ""
if process.stdout is not None:
for line in iter(process.stdout.readline, ""):
print(line.strip()) # Print the line to stdout in real-time
output += line
process.wait() # Wait for the subprocess to finish
if process.returncode != 0 and raise_on_failure:
raise subprocess.CalledProcessError(process.returncode, command, output)
return output
def get_git_sha() -> str:
return run_cmd("git rev-parse HEAD").strip()
def get_build_context_ref(build_context: str) -> str:
"""
Given a context, return a ref:
- if context is pull_request, return the PR's id
- if context is push, return the branch
- if context is release, return the release ref
"""
event = os.getenv("GITHUB_EVENT_NAME")
github_ref = os.getenv("GITHUB_REF", "")
if event == "pull_request":
github_head_ref = os.getenv("GITHUB_HEAD_REF", "")
return re.sub("[^a-zA-Z0-9]", "-", github_head_ref)[:40]
elif event == "release":
return re.sub("refs/tags/", "", github_ref)[:40]
elif event == "push":
return re.sub("[^a-zA-Z0-9]", "-", re.sub("refs/heads/", "", github_ref))[:40]
return ""
def is_latest_release(release: str) -> bool:
output = (
run_cmd(
f"./scripts/tag_latest_release.sh {release} --dry-run",
raise_on_failure=False,
)
or ""
)
return "SKIP_TAG::false" in output
def make_docker_tag(l: list[str]) -> str: # noqa: E741
return f"{REPO}:" + "-".join([o for o in l if o])
def get_docker_tags(
build_preset: str,
build_platforms: list[str],
sha: str,
build_context: str,
build_context_ref: str,
force_latest: bool = False,
) -> set[str]:
"""
Return a set of tags given a given build context
"""
tags: set[str] = set()
tag_chunks: list[str] = []
is_latest = is_latest_release(build_context_ref)
if build_preset != "lean":
# Always add the preset_build name if different from default (lean)
tag_chunks += [build_preset]
if len(build_platforms) == 1:
build_platform = build_platforms[0]
short_build_platform = build_platform.replace("linux/", "").replace("64", "")
if short_build_platform != "amd":
# Always a platform indicator if different from default (amd)
tag_chunks += [short_build_platform]
# Always craft a tag for the SHA
tags.add(make_docker_tag([sha] + tag_chunks))
# also a short SHA, cause it's nice
tags.add(make_docker_tag([sha[:7]] + tag_chunks))
if build_context == "release":
# add a release tag
tags.add(make_docker_tag([build_context_ref] + tag_chunks))
if is_latest or force_latest:
# add a latest tag
tags.add(make_docker_tag(["latest"] + tag_chunks))
elif build_context == "push" and build_context_ref == "master":
tags.add(make_docker_tag(["master"] + tag_chunks))
elif build_context == "pull_request":
tags.add(make_docker_tag([f"pr-{build_context_ref}"] + tag_chunks))
return tags
def get_docker_command(
build_preset: str,
build_platforms: list[str],
is_authenticated: bool,
sha: str,
build_context: str,
build_context_ref: str,
force_latest: bool = False,
) -> str:
tag = "" # noqa: F841
build_target = ""
py_ver = BASE_PY_IMAGE
docker_context = "."
if build_preset == "dev":
build_target = "dev"
elif build_preset == "lean":
build_target = "lean"
elif build_preset == "py311":
build_target = "lean"
py_ver = "3.11-slim-bookworm"
elif build_preset == "websocket":
build_target = ""
docker_context = "superset-websocket"
elif build_preset == "ci":
build_target = "ci"
elif build_preset == "dockerize":
build_target = ""
docker_context = "-f dockerize.Dockerfile ."
else:
print(f"Invalid build preset: {build_preset}")
exit(1)
# Try to get context reference if missing
if not build_context_ref:
build_context_ref = get_build_context_ref(build_context)
tags = get_docker_tags(
build_preset,
build_platforms,
sha,
build_context,
build_context_ref,
force_latest,
)
docker_tags = ("\\\n" + 8 * " ").join([f"-t {s} " for s in tags])
docker_args = "--load" if not is_authenticated else "--push"
target_argument = f"--target {build_target}" if build_target else ""
cache_ref = f"{CACHE_REPO}:{py_ver}"
if len(build_platforms) == 1:
build_platform = build_platforms[0]
short_build_platform = build_platform.replace("linux/", "").replace("64", "")
cache_ref = f"{CACHE_REPO}:{py_ver}-{short_build_platform}"
platform_arg = "--platform " + ",".join(build_platforms)
cache_from_arg = f"--cache-from=type=registry,ref={cache_ref}"
cache_to_arg = (
f"--cache-to=type=registry,mode=max,ref={cache_ref}" if is_authenticated else ""
)
build_arg = f"--build-arg PY_VER={py_ver}" if py_ver else ""
actor = os.getenv("GITHUB_ACTOR")
return dedent(
f"""\
docker buildx build \\
{docker_args} \\
{docker_tags} \\
{cache_from_arg} \\
{cache_to_arg} \\
{build_arg} \\
{platform_arg} \\
{target_argument} \\
--label sha={sha} \\
--label target={build_target} \\
--label build_trigger={build_context} \\
--label base={py_ver} \\
--label build_actor={actor} \\
{docker_context}"""
)
@click.command()
@click.argument(
"build_preset",
type=click.Choice(["lean", "dev", "dockerize", "websocket", "py311", "ci"]),
)
@click.argument("build_context", type=click.Choice(["push", "pull_request", "release"]))
@click.option(
"--platform",
type=click.Choice(["linux/arm64", "linux/amd64"]),
default=["linux/amd64"],
multiple=True,
)
@click.option("--build_context_ref", help="a reference to the pr, release or branch")
@click.option("--dry-run", is_flag=True, help="Run the command in dry-run mode.")
@click.option("--verbose", is_flag=True, help="Print more info")
@click.option(
"--force-latest", is_flag=True, help="Force the 'latest' tag on the release"
)
def main(
build_preset: str,
build_context: str,
build_context_ref: str,
platform: list[str],
dry_run: bool,
force_latest: bool,
verbose: bool,
) -> None:
"""
This script executes docker build and push commands based on given arguments.
"""
is_authenticated = (
True if os.getenv("DOCKERHUB_TOKEN") and os.getenv("DOCKERHUB_USER") else False
)
if force_latest and build_context != "release":
print(
"--force-latest can only be applied if the build context is set to 'release'"
)
exit(1)
if build_context == "release" and not build_context_ref.strip():
print("Release number has to be provided")
exit(1)
docker_build_command = get_docker_command(
build_preset,
platform,
is_authenticated,
get_git_sha(),
build_context,
build_context_ref,
force_latest,
)
if not dry_run:
print("Executing Docker Build Command:")
print(docker_build_command)
script = ""
if os.getenv("DOCKERHUB_USER"):
script = dedent(
f"""\
docker logout
docker login --username "{os.getenv("DOCKERHUB_USER")}" --password "{os.getenv("DOCKERHUB_TOKEN")}"
DOCKER_ARGS="--push"
"""
)
script = script + docker_build_command
if verbose:
run_cmd("cat Dockerfile")
stdout = run_cmd(script) # noqa: F841
else:
print("Dry Run - Docker Build Command:")
print(docker_build_command)
if __name__ == "__main__":
main()

View File

@@ -95,15 +95,21 @@ def print_files(files: List[str]) -> None:
print("\n".join([f"- {s}" for s in files])) print("\n".join([f"- {s}" for s in files]))
def is_int(s: str) -> bool:
return bool(re.match(r"^-?\d+$", s))
def main(event_type: str, sha: str, repo: str) -> None: def main(event_type: str, sha: str, repo: str) -> None:
"""Main function to check for file changes based on event context.""" """Main function to check for file changes based on event context."""
print("SHA:", sha) print("SHA:", sha)
print("EVENT_TYPE", event_type) print("EVENT_TYPE", event_type)
files = None
if event_type == "pull_request": if event_type == "pull_request":
pr_number = os.getenv("GITHUB_REF", "").split("/")[-2] pr_number = os.getenv("GITHUB_REF", "").split("/")[-2]
files = fetch_changed_files_pr(repo, pr_number) if is_int(pr_number):
print("PR files:") files = fetch_changed_files_pr(repo, pr_number)
print_files(files) print("PR files:")
print_files(files)
elif event_type == "push": elif event_type == "push":
files = fetch_changed_files_push(repo, sha) files = fetch_changed_files_push(repo, sha)
@@ -119,7 +125,7 @@ def main(event_type: str, sha: str, repo: str) -> None:
changes_detected = {} changes_detected = {}
for group, regex_patterns in PATTERNS.items(): for group, regex_patterns in PATTERNS.items():
patterns_compiled = [re.compile(p) for p in regex_patterns] patterns_compiled = [re.compile(p) for p in regex_patterns]
changes_detected[group] = event_type == "workflow_dispatch" or detect_changes( changes_detected[group] = files is None or detect_changes(
files, patterns_compiled files, patterns_compiled
) )

View File

@@ -87,7 +87,7 @@ describe('Charts list', () => {
visitChartList(); visitChartList();
cy.getBySel('count-crosslinks').should('be.visible'); cy.getBySel('count-crosslinks').should('be.visible');
cy.getBySel('crosslinks').first().trigger('mouseover'); cy.getBySel('crosslinks').first().trigger('mouseover');
cy.get('.ant-tooltip') cy.get('.antd5-tooltip')
.contains('3 - Sample dashboard') .contains('3 - Sample dashboard')
.invoke('removeAttr', 'target') .invoke('removeAttr', 'target')
.click(); .click();

View File

@@ -99,16 +99,13 @@ describe('Color scheme control', () => {
cy.get('.ant-select-selection-item .color-scheme-label').trigger( cy.get('.ant-select-selection-item .color-scheme-label').trigger(
'mouseover', 'mouseover',
); );
cy.get('.color-scheme-tooltip').should('be.visible');
cy.get('.color-scheme-tooltip').contains('Superset Colors'); cy.get('.color-scheme-tooltip').contains('Superset Colors');
cy.get('.Control[data-test="color_scheme"]').scrollIntoView(); cy.get('.Control[data-test="color_scheme"]').scrollIntoView();
cy.get('.Control[data-test="color_scheme"] input[type="search"]').focus(); cy.get('.Control[data-test="color_scheme"] input[type="search"]').focus();
cy.focused().type('lyftColors{enter}'); cy.focused().type('lyftColors');
cy.get( cy.getBySel('lyftColors').should('exist');
'.Control[data-test="color_scheme"] .ant-select-selection-item [data-test="lyftColors"]', cy.getBySel('lyftColors').trigger('mouseover');
).should('exist');
cy.get('.ant-select-selection-item .color-scheme-label').trigger(
'mouseover',
);
cy.get('.color-scheme-tooltip').should('not.exist'); cy.get('.color-scheme-tooltip').should('not.exist');
}); });
}); });

View File

@@ -140,7 +140,7 @@ export const sqlLabView = {
tabsNavList: "[class='ant-tabs-nav-list']", tabsNavList: "[class='ant-tabs-nav-list']",
tab: "[class='ant-tabs-tab-btn']", tab: "[class='ant-tabs-tab-btn']",
addTabButton: dataTestLocator('add-tab-icon'), addTabButton: dataTestLocator('add-tab-icon'),
tooltip: '.ant-tooltip-content', tooltip: '.antd5-tooltip-content',
tabName: '.css-1suejie', tabName: '.css-1suejie',
schemaInput: '[data-test=DatabaseSelector] > :nth-child(2)', schemaInput: '[data-test=DatabaseSelector] > :nth-child(2)',
loadingIndicator: '.Select__loading-indicator', loadingIndicator: '.Select__loading-indicator',

View File

@@ -53067,14 +53067,6 @@
"node": ">=4" "node": ">=4"
} }
}, },
"node_modules/viewport-mercator-project": {
"version": "6.2.3",
"license": "MIT",
"dependencies": {
"@babel/runtime": "^7.0.0",
"gl-matrix": "^3.0.0"
}
},
"node_modules/vlq": { "node_modules/vlq": {
"version": "0.2.3", "version": "0.2.3",
"license": "MIT" "license": "MIT"
@@ -57951,10 +57943,10 @@
"version": "0.18.25", "version": "0.18.25",
"license": "Apache-2.0", "license": "Apache-2.0",
"dependencies": { "dependencies": {
"@math.gl/web-mercator": "^4.1.0",
"prop-types": "^15.8.1", "prop-types": "^15.8.1",
"react-map-gl": "^6.1.19", "react-map-gl": "^6.1.19",
"supercluster": "^8.0.1", "supercluster": "^8.0.1"
"viewport-mercator-project": "^6.1.1"
}, },
"peerDependencies": { "peerDependencies": {
"@superset-ui/chart-controls": "*", "@superset-ui/chart-controls": "*",
@@ -57963,6 +57955,30 @@
"react": "^15 || ^16" "react": "^15 || ^16"
} }
}, },
"plugins/legacy-plugin-chart-map-box/node_modules/@math.gl/core": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/@math.gl/core/-/core-4.1.0.tgz",
"integrity": "sha512-FrdHBCVG3QdrworwrUSzXIaK+/9OCRLscxI2OUy6sLOHyHgBMyfnEGs99/m3KNvs+95BsnQLWklVfpKfQzfwKA==",
"license": "MIT",
"dependencies": {
"@math.gl/types": "4.1.0"
}
},
"plugins/legacy-plugin-chart-map-box/node_modules/@math.gl/types": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/@math.gl/types/-/types-4.1.0.tgz",
"integrity": "sha512-clYZdHcmRvMzVK5fjeDkQlHUzXQSNdZ7s4xOqC3nJPgz4C/TZkUecTo9YS4PruZqtDda/ag4erndP0MIn40dGA==",
"license": "MIT"
},
"plugins/legacy-plugin-chart-map-box/node_modules/@math.gl/web-mercator": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/@math.gl/web-mercator/-/web-mercator-4.1.0.tgz",
"integrity": "sha512-HZo3vO5GCMkXJThxRJ5/QYUYRr3XumfT8CzNNCwoJfinxy5NtKUd7dusNTXn7yJ40UoB8FMIwkVwNlqaiRZZAw==",
"license": "MIT",
"dependencies": {
"@math.gl/core": "4.1.0"
}
},
"plugins/legacy-plugin-chart-map-box/node_modules/kdbush": { "plugins/legacy-plugin-chart-map-box/node_modules/kdbush": {
"version": "4.0.2", "version": "4.0.2",
"resolved": "https://registry.npmjs.org/kdbush/-/kdbush-4.0.2.tgz", "resolved": "https://registry.npmjs.org/kdbush/-/kdbush-4.0.2.tgz",
@@ -68666,12 +68682,33 @@
"@superset-ui/legacy-plugin-chart-map-box": { "@superset-ui/legacy-plugin-chart-map-box": {
"version": "file:plugins/legacy-plugin-chart-map-box", "version": "file:plugins/legacy-plugin-chart-map-box",
"requires": { "requires": {
"@math.gl/web-mercator": "^4.1.0",
"prop-types": "^15.8.1", "prop-types": "^15.8.1",
"react-map-gl": "^6.1.19", "react-map-gl": "^6.1.19",
"supercluster": "^8.0.1", "supercluster": "^8.0.1"
"viewport-mercator-project": "^6.1.1"
}, },
"dependencies": { "dependencies": {
"@math.gl/core": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/@math.gl/core/-/core-4.1.0.tgz",
"integrity": "sha512-FrdHBCVG3QdrworwrUSzXIaK+/9OCRLscxI2OUy6sLOHyHgBMyfnEGs99/m3KNvs+95BsnQLWklVfpKfQzfwKA==",
"requires": {
"@math.gl/types": "4.1.0"
}
},
"@math.gl/types": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/@math.gl/types/-/types-4.1.0.tgz",
"integrity": "sha512-clYZdHcmRvMzVK5fjeDkQlHUzXQSNdZ7s4xOqC3nJPgz4C/TZkUecTo9YS4PruZqtDda/ag4erndP0MIn40dGA=="
},
"@math.gl/web-mercator": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/@math.gl/web-mercator/-/web-mercator-4.1.0.tgz",
"integrity": "sha512-HZo3vO5GCMkXJThxRJ5/QYUYRr3XumfT8CzNNCwoJfinxy5NtKUd7dusNTXn7yJ40UoB8FMIwkVwNlqaiRZZAw==",
"requires": {
"@math.gl/core": "4.1.0"
}
},
"kdbush": { "kdbush": {
"version": "4.0.2", "version": "4.0.2",
"resolved": "https://registry.npmjs.org/kdbush/-/kdbush-4.0.2.tgz", "resolved": "https://registry.npmjs.org/kdbush/-/kdbush-4.0.2.tgz",
@@ -95387,13 +95424,6 @@
"unist-util-stringify-position": "^3.0.0" "unist-util-stringify-position": "^3.0.0"
} }
}, },
"viewport-mercator-project": {
"version": "6.2.3",
"requires": {
"@babel/runtime": "^7.0.0",
"gl-matrix": "^3.0.0"
}
},
"vlq": { "vlq": {
"version": "0.2.3" "version": "0.2.3"
}, },

View File

@@ -18,9 +18,8 @@
*/ */
import { CSSProperties } from 'react'; import { CSSProperties } from 'react';
import { kebabCase } from 'lodash'; import { kebabCase } from 'lodash';
import { TooltipPlacement } from 'antd/lib/tooltip';
import { t } from '@superset-ui/core'; import { t } from '@superset-ui/core';
import { Tooltip, TooltipProps } from './Tooltip'; import { Tooltip, TooltipProps, TooltipPlacement } from './Tooltip';
export interface InfoTooltipWithTriggerProps { export interface InfoTooltipWithTriggerProps {
label?: string; label?: string;

View File

@@ -17,48 +17,41 @@
* under the License. * under the License.
*/ */
import { useTheme, css } from '@superset-ui/core'; import { useTheme } from '@superset-ui/core';
import { Tooltip as BaseTooltip } from 'antd'; import { Tooltip as BaseTooltip } from 'antd-v5';
import type { TooltipProps } from 'antd/lib/tooltip'; import {
import { Global } from '@emotion/react'; TooltipProps as BaseTooltipProps,
TooltipPlacement as BaseTooltipPlacement,
} from 'antd-v5/lib/tooltip';
export type { TooltipProps } from 'antd/lib/tooltip'; export type TooltipProps = BaseTooltipProps;
export type TooltipPlacement = BaseTooltipPlacement;
export const Tooltip = ({ overlayStyle, color, ...props }: TooltipProps) => { export const Tooltip = ({
overlayStyle,
color,
...props
}: BaseTooltipProps) => {
const theme = useTheme(); const theme = useTheme();
const defaultColor = `${theme.colors.grayscale.dark2}e6`; const defaultColor = `${theme.colors.grayscale.dark2}e6`;
return ( return (
<> <BaseTooltip
{/* Safari hack to hide browser default tooltips */} overlayStyle={{
<Global fontSize: theme.typography.sizes.s,
styles={css` lineHeight: '1.6',
.ant-tooltip-open { maxWidth: theme.gridUnit * 62,
display: inline-block; minWidth: theme.gridUnit * 30,
&::after { ...overlayStyle,
content: ''; }}
display: block; // make the tooltip display closer to the label
} align={{ offset: [0, 1] }}
} color={defaultColor || color}
`} trigger="hover"
/> placement="bottom"
<BaseTooltip // don't allow hovering over the tooltip
overlayStyle={{ mouseLeaveDelay={0}
fontSize: theme.typography.sizes.s, {...props}
lineHeight: '1.6', />
maxWidth: theme.gridUnit * 62,
minWidth: theme.gridUnit * 30,
...overlayStyle,
}}
// make the tooltip display closer to the label
align={{ offset: [0, 1] }}
color={defaultColor || color}
trigger="hover"
placement="bottom"
// don't allow hovering over the tooltip
mouseLeaveDelay={0}
{...props}
/>
</>
); );
}; };

View File

@@ -262,6 +262,7 @@ export interface BaseControlConfig<
props: ControlPanelsContainerProps, props: ControlPanelsContainerProps,
controlData: AnyDict, controlData: AnyDict,
) => boolean; ) => boolean;
disableStash?: boolean;
hidden?: hidden?:
| boolean | boolean
| ((props: ControlPanelsContainerProps, controlData: AnyDict) => boolean); | ((props: ControlPanelsContainerProps, controlData: AnyDict) => boolean);

View File

@@ -16,7 +16,7 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
import { QueryObject, SqlaFormData } from '@superset-ui/core'; import { QueryObject, SqlaFormData, VizType } from '@superset-ui/core';
import { boxplotOperator } from '@superset-ui/chart-controls'; import { boxplotOperator } from '@superset-ui/chart-controls';
const formData: SqlaFormData = { const formData: SqlaFormData = {
@@ -27,7 +27,7 @@ const formData: SqlaFormData = {
time_range: '2015 : 2016', time_range: '2015 : 2016',
time_grain_sqla: 'P1Y', time_grain_sqla: 'P1Y',
datasource: 'foo', datasource: 'foo',
viz_type: 'table', viz_type: VizType.Table,
}; };
const queryObject: QueryObject = { const queryObject: QueryObject = {
metrics: [ metrics: [

View File

@@ -16,7 +16,7 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
import { QueryObject, SqlaFormData } from '@superset-ui/core'; import { QueryObject, SqlaFormData, VizType } from '@superset-ui/core';
import { contributionOperator } from '@superset-ui/chart-controls'; import { contributionOperator } from '@superset-ui/chart-controls';
const formData: SqlaFormData = { const formData: SqlaFormData = {
@@ -27,7 +27,7 @@ const formData: SqlaFormData = {
time_range: '2015 : 2016', time_range: '2015 : 2016',
granularity: 'month', granularity: 'month',
datasource: 'foo', datasource: 'foo',
viz_type: 'table', viz_type: VizType.Table,
}; };
const queryObject: QueryObject = { const queryObject: QueryObject = {
metrics: [ metrics: [

View File

@@ -16,7 +16,7 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
import { QueryObject, SqlaFormData } from '@superset-ui/core'; import { QueryObject, SqlaFormData, VizType } from '@superset-ui/core';
import { flattenOperator } from '@superset-ui/chart-controls'; import { flattenOperator } from '@superset-ui/chart-controls';
const formData: SqlaFormData = { const formData: SqlaFormData = {
@@ -27,7 +27,7 @@ const formData: SqlaFormData = {
time_range: '2015 : 2016', time_range: '2015 : 2016',
granularity: 'month', granularity: 'month',
datasource: 'foo', datasource: 'foo',
viz_type: 'table', viz_type: VizType.Table,
}; };
const queryObject: QueryObject = { const queryObject: QueryObject = {
metrics: [ metrics: [

View File

@@ -17,7 +17,7 @@
* under the License. * under the License.
*/ */
import { histogramOperator } from '@superset-ui/chart-controls'; import { histogramOperator } from '@superset-ui/chart-controls';
import { SqlaFormData } from '@superset-ui/core'; import { SqlaFormData, VizType } from '@superset-ui/core';
import { omit } from 'lodash'; import { omit } from 'lodash';
const formData: SqlaFormData = { const formData: SqlaFormData = {
@@ -26,7 +26,7 @@ const formData: SqlaFormData = {
cumulative: true, cumulative: true,
normalize: true, normalize: true,
groupby: ['country', 'region'], groupby: ['country', 'region'],
viz_type: 'histogram', viz_type: VizType.LegacyHistogram,
datasource: 'foo', datasource: 'foo',
}; };

View File

@@ -16,7 +16,7 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
import { QueryObject, SqlaFormData } from '@superset-ui/core'; import { QueryObject, SqlaFormData, VizType } from '@superset-ui/core';
import { pivotOperator } from '@superset-ui/chart-controls'; import { pivotOperator } from '@superset-ui/chart-controls';
const formData: SqlaFormData = { const formData: SqlaFormData = {
@@ -27,7 +27,7 @@ const formData: SqlaFormData = {
time_range: '2015 : 2016', time_range: '2015 : 2016',
granularity: 'month', granularity: 'month',
datasource: 'foo', datasource: 'foo',
viz_type: 'table', viz_type: VizType.Table,
show_empty_columns: true, show_empty_columns: true,
}; };
const queryObject: QueryObject = { const queryObject: QueryObject = {

View File

@@ -16,7 +16,12 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
import { DTTM_ALIAS, QueryObject, SqlaFormData } from '@superset-ui/core'; import {
DTTM_ALIAS,
QueryObject,
SqlaFormData,
VizType,
} from '@superset-ui/core';
import { prophetOperator } from '@superset-ui/chart-controls'; import { prophetOperator } from '@superset-ui/chart-controls';
const formData: SqlaFormData = { const formData: SqlaFormData = {
@@ -27,7 +32,7 @@ const formData: SqlaFormData = {
time_range: '2015 : 2016', time_range: '2015 : 2016',
time_grain_sqla: 'P1Y', time_grain_sqla: 'P1Y',
datasource: 'foo', datasource: 'foo',
viz_type: 'table', viz_type: VizType.Table,
}; };
const queryObject: QueryObject = { const queryObject: QueryObject = {
metrics: [ metrics: [

View File

@@ -16,7 +16,7 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
import { QueryObject, SqlaFormData } from '@superset-ui/core'; import { QueryObject, SqlaFormData, VizType } from '@superset-ui/core';
import { rankOperator } from '@superset-ui/chart-controls'; import { rankOperator } from '@superset-ui/chart-controls';
const formData: SqlaFormData = { const formData: SqlaFormData = {
@@ -26,7 +26,7 @@ const formData: SqlaFormData = {
time_range: '2015 : 2016', time_range: '2015 : 2016',
granularity: 'month', granularity: 'month',
datasource: 'foo', datasource: 'foo',
viz_type: 'table', viz_type: VizType.Table,
truncate_metric: true, truncate_metric: true,
}; };
const queryObject: QueryObject = { const queryObject: QueryObject = {

View File

@@ -16,7 +16,12 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
import { ComparisonType, QueryObject, SqlaFormData } from '@superset-ui/core'; import {
ComparisonType,
QueryObject,
SqlaFormData,
VizType,
} from '@superset-ui/core';
import { renameOperator } from '@superset-ui/chart-controls'; import { renameOperator } from '@superset-ui/chart-controls';
const formData: SqlaFormData = { const formData: SqlaFormData = {
@@ -26,7 +31,7 @@ const formData: SqlaFormData = {
time_range: '2015 : 2016', time_range: '2015 : 2016',
granularity: 'month', granularity: 'month',
datasource: 'foo', datasource: 'foo',
viz_type: 'table', viz_type: VizType.Table,
truncate_metric: true, truncate_metric: true,
}; };
const queryObject: QueryObject = { const queryObject: QueryObject = {

View File

@@ -16,7 +16,7 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
import { QueryObject, SqlaFormData } from '@superset-ui/core'; import { QueryObject, SqlaFormData, VizType } from '@superset-ui/core';
import { resampleOperator } from '@superset-ui/chart-controls'; import { resampleOperator } from '@superset-ui/chart-controls';
const formData: SqlaFormData = { const formData: SqlaFormData = {
@@ -27,7 +27,7 @@ const formData: SqlaFormData = {
time_range: '2015 : 2016', time_range: '2015 : 2016',
granularity: 'month', granularity: 'month',
datasource: 'foo', datasource: 'foo',
viz_type: 'table', viz_type: VizType.Table,
}; };
const queryObject: QueryObject = { const queryObject: QueryObject = {
metrics: [ metrics: [

View File

@@ -16,7 +16,7 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
import { QueryObject, SqlaFormData } from '@superset-ui/core'; import { QueryObject, SqlaFormData, VizType } from '@superset-ui/core';
import { rollingWindowOperator } from '@superset-ui/chart-controls'; import { rollingWindowOperator } from '@superset-ui/chart-controls';
const formData: SqlaFormData = { const formData: SqlaFormData = {
@@ -27,7 +27,7 @@ const formData: SqlaFormData = {
time_range: '2015 : 2016', time_range: '2015 : 2016',
granularity: 'month', granularity: 'month',
datasource: 'foo', datasource: 'foo',
viz_type: 'table', viz_type: VizType.Table,
}; };
const queryObject: QueryObject = { const queryObject: QueryObject = {
metrics: [ metrics: [

View File

@@ -16,7 +16,7 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
import { QueryObject, SqlaFormData } from '@superset-ui/core'; import { QueryObject, SqlaFormData, VizType } from '@superset-ui/core';
import { sortOperator } from '@superset-ui/chart-controls'; import { sortOperator } from '@superset-ui/chart-controls';
const formData: SqlaFormData = { const formData: SqlaFormData = {
@@ -27,7 +27,7 @@ const formData: SqlaFormData = {
time_range: '2015 : 2016', time_range: '2015 : 2016',
granularity: 'month', granularity: 'month',
datasource: 'foo', datasource: 'foo',
viz_type: 'table', viz_type: VizType.Table,
}; };
const queryObject: QueryObject = { const queryObject: QueryObject = {
metrics: [ metrics: [

View File

@@ -16,7 +16,7 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
import { QueryObject, SqlaFormData } from '@superset-ui/core'; import { QueryObject, SqlaFormData, VizType } from '@superset-ui/core';
import { timeCompareOperator } from '@superset-ui/chart-controls'; import { timeCompareOperator } from '@superset-ui/chart-controls';
const formData: SqlaFormData = { const formData: SqlaFormData = {
@@ -27,7 +27,7 @@ const formData: SqlaFormData = {
time_range: '2015 : 2016', time_range: '2015 : 2016',
granularity: 'month', granularity: 'month',
datasource: 'foo', datasource: 'foo',
viz_type: 'table', viz_type: VizType.Table,
}; };
const queryObject: QueryObject = { const queryObject: QueryObject = {
metrics: [ metrics: [

View File

@@ -16,7 +16,7 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
import { QueryObject, SqlaFormData } from '@superset-ui/core'; import { QueryObject, SqlaFormData, VizType } from '@superset-ui/core';
import { import {
timeCompareOperator, timeCompareOperator,
timeComparePivotOperator, timeComparePivotOperator,
@@ -30,7 +30,7 @@ const formData: SqlaFormData = {
time_range: '2015 : 2016', time_range: '2015 : 2016',
granularity: 'month', granularity: 'month',
datasource: 'foo', datasource: 'foo',
viz_type: 'table', viz_type: VizType.Table,
show_empty_columns: true, show_empty_columns: true,
}; };
const queryObject: QueryObject = { const queryObject: QueryObject = {

View File

@@ -16,12 +16,12 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
import { QueryFormData, QueryFormMetric } from '@superset-ui/core'; import { QueryFormData, QueryFormMetric, VizType } from '@superset-ui/core';
import { extractExtraMetrics } from '@superset-ui/chart-controls'; import { extractExtraMetrics } from '@superset-ui/chart-controls';
const baseFormData: QueryFormData = { const baseFormData: QueryFormData = {
datasource: 'dummy', datasource: 'dummy',
viz_type: 'table', viz_type: VizType.Table,
metrics: ['a', 'b'], metrics: ['a', 'b'],
columns: ['foo', 'bar'], columns: ['foo', 'bar'],
limit: 100, limit: 100,

View File

@@ -17,11 +17,11 @@
* under the License. * under the License.
*/ */
import { isDerivedSeries } from '@superset-ui/chart-controls'; import { isDerivedSeries } from '@superset-ui/chart-controls';
import { SqlaFormData, ComparisonType } from '@superset-ui/core'; import { SqlaFormData, ComparisonType, VizType } from '@superset-ui/core';
const formData: SqlaFormData = { const formData: SqlaFormData = {
datasource: 'foo', datasource: 'foo',
viz_type: 'table', viz_type: VizType.Table,
}; };
const series = { const series = {
id: 'metric__1 month ago', id: 'metric__1 month ago',

View File

@@ -16,6 +16,7 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
import { VizType } from '@superset-ui/core';
import { displayTimeRelatedControls } from '../../src'; import { displayTimeRelatedControls } from '../../src';
const mockData = { const mockData = {
@@ -35,7 +36,7 @@ const mockData = {
exportState: {}, exportState: {},
form_data: { form_data: {
datasource: '22__table', datasource: '22__table',
viz_type: 'table', viz_type: VizType.Table,
}, },
}; };

View File

@@ -16,12 +16,12 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
import { QueryFormData } from '@superset-ui/core'; import { QueryFormData, VizType } from '@superset-ui/core';
import { getStandardizedControls } from '../../src'; import { getStandardizedControls } from '../../src';
const formData: QueryFormData = { const formData: QueryFormData = {
datasource: '30__table', datasource: '30__table',
viz_type: 'table', viz_type: VizType.Table,
standardizedFormData: { standardizedFormData: {
controls: { controls: {
metrics: ['count(*)', 'sum(sales)'], metrics: ['count(*)', 'sum(sales)'],
@@ -34,7 +34,7 @@ const formData: QueryFormData = {
test('without standardizedFormData', () => { test('without standardizedFormData', () => {
getStandardizedControls().setStandardizedControls({ getStandardizedControls().setStandardizedControls({
datasource: '30__table', datasource: '30__table',
viz_type: 'table', viz_type: VizType.Table,
}); });
expect(getStandardizedControls().controls).toEqual({ expect(getStandardizedControls().controls).toEqual({
metrics: [], metrics: [],

View File

@@ -41,6 +41,7 @@ export { default as ChartDataProvider } from './components/ChartDataProvider';
export * from './types/Base'; export * from './types/Base';
export * from './types/TransformFunction'; export * from './types/TransformFunction';
export * from './types/QueryResponse'; export * from './types/QueryResponse';
export * from './types/VizType';
export { default as __hack_reexport_chart_Base } from './types/Base'; export { default as __hack_reexport_chart_Base } from './types/Base';
export { default as __hack_reexport_chart_TransformFunction } from './types/TransformFunction'; export { default as __hack_reexport_chart_TransformFunction } from './types/TransformFunction';

View File

@@ -0,0 +1,72 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export enum VizType {
Area = 'echarts_area',
Bar = 'echarts_timeseries_bar',
BigNumber = 'big_number',
BigNumberTotal = 'big_number_total',
BigNumberPeriodOverPeriod = 'pop_kpi',
BoxPlot = 'box_plot',
Bubble = 'bubble_v2',
Bullet = 'bullet',
Calendar = 'cal_heatmap',
Chord = 'chord',
Compare = 'compare',
CountryMap = 'country_map',
DistBar = 'dist_bar',
EventFlow = 'event_flow',
Funnel = 'funnel',
Gauge = 'gauge_chart',
Graph = 'graph_chart',
Handlebars = 'handlebars',
Heatmap = 'heatmap_v2',
Histogram = 'histogram_v2',
Horizon = 'horizon',
LegacyArea = 'area',
LegacyBar = 'bar',
LegacyBubble = 'bubble',
LegacyHeatmap = 'heatmap',
LegacyHistogram = 'histogram',
LegacyLine = 'line',
LegacySankey = 'sankey',
Line = 'echarts_timeseries_line',
MapBox = 'mapbox',
MixedTimeseries = 'mixed_timeseries',
PairedTTest = 'paired_ttest',
ParallelCoordinates = 'para',
Partition = 'partition',
Pie = 'pie',
PivotTable = 'pivot_table_v2',
Radar = 'radar',
Rose = 'rose',
Sankey = 'sankey_v2',
Scatter = 'echarts_timeseries_scatter',
SmoothLine = 'echarts_timeseries_smooth',
Step = 'echarts_timeseries_step',
Sunburst = 'sunburst_v2',
Table = 'table',
TimePivot = 'time_pivot',
TimeTable = 'time_table',
Timeseries = 'echarts_timeseries',
Tree = 'tree_chart',
Treemap = 'treemap_v2',
Waterfall = 'waterfall',
WordCloud = 'word_cloud',
WorldMap = 'world_map',
}

View File

@@ -28,6 +28,7 @@ import {
getChartBuildQueryRegistry, getChartBuildQueryRegistry,
getChartMetadataRegistry, getChartMetadataRegistry,
ChartMetadata, ChartMetadata,
VizType,
} from '@superset-ui/core'; } from '@superset-ui/core';
import { LOGIN_GLOB } from '../fixtures/constants'; import { LOGIN_GLOB } from '../fixtures/constants';
@@ -86,13 +87,13 @@ describe('ChartClient', () => {
sliceId, sliceId,
formData: { formData: {
granularity: 'second', granularity: 'second',
viz_type: 'bar', viz_type: VizType.LegacyBar,
}, },
}), }),
).resolves.toEqual({ ).resolves.toEqual({
...sankeyFormData, ...sankeyFormData,
granularity: 'second', granularity: 'second',
viz_type: 'bar', viz_type: VizType.LegacyBar,
}); });
}); });
it('returns promise of formData if only formData was given', () => it('returns promise of formData if only formData was given', () =>
@@ -101,13 +102,13 @@ describe('ChartClient', () => {
formData: { formData: {
datasource: '1__table', datasource: '1__table',
granularity: 'minute', granularity: 'minute',
viz_type: 'line', viz_type: VizType.LegacyLine,
}, },
}), }),
).resolves.toEqual({ ).resolves.toEqual({
datasource: '1__table', datasource: '1__table',
granularity: 'minute', granularity: 'minute',
viz_type: 'line', viz_type: VizType.LegacyLine,
})); }));
it('rejects if none of sliceId or formData is specified', () => it('rejects if none of sliceId or formData is specified', () =>
expect( expect(
@@ -120,12 +121,12 @@ describe('ChartClient', () => {
describe('.loadQueryData(formData, options)', () => { describe('.loadQueryData(formData, options)', () => {
it('returns a promise of query data for known chart type', () => { it('returns a promise of query data for known chart type', () => {
getChartMetadataRegistry().registerValue( getChartMetadataRegistry().registerValue(
'word_cloud', VizType.WordCloud,
new ChartMetadata({ name: 'Word Cloud', thumbnail: '' }), new ChartMetadata({ name: 'Word Cloud', thumbnail: '' }),
); );
getChartBuildQueryRegistry().registerValue( getChartBuildQueryRegistry().registerValue(
'word_cloud', VizType.WordCloud,
(formData: QueryFormData) => buildQueryContext(formData), (formData: QueryFormData) => buildQueryContext(formData),
); );
fetchMock.post('glob:*/api/v1/chart/data', [ fetchMock.post('glob:*/api/v1/chart/data', [
@@ -138,7 +139,7 @@ describe('ChartClient', () => {
return expect( return expect(
chartClient.loadQueryData({ chartClient.loadQueryData({
granularity: 'minute', granularity: 'minute',
viz_type: 'word_cloud', viz_type: VizType.WordCloud,
datasource: '1__table', datasource: '1__table',
}), }),
).resolves.toEqual([ ).resolves.toEqual([
@@ -255,7 +256,7 @@ describe('ChartClient', () => {
it('loadAllDataNecessaryForAChart', () => { it('loadAllDataNecessaryForAChart', () => {
fetchMock.get(`glob:*/api/v1/form_data/?slice_id=${sliceId}`, { fetchMock.get(`glob:*/api/v1/form_data/?slice_id=${sliceId}`, {
granularity: 'minute', granularity: 'minute',
viz_type: 'line', viz_type: VizType.LegacyLine,
datasource: '1__table', datasource: '1__table',
color: 'living-coral', color: 'living-coral',
}); });
@@ -275,12 +276,12 @@ describe('ChartClient', () => {
}); });
getChartMetadataRegistry().registerValue( getChartMetadataRegistry().registerValue(
'line', VizType.LegacyLine,
new ChartMetadata({ name: 'Line', thumbnail: '.gif' }), new ChartMetadata({ name: 'Line', thumbnail: '.gif' }),
); );
getChartBuildQueryRegistry().registerValue( getChartBuildQueryRegistry().registerValue(
'line', VizType.LegacyLine,
(formData: QueryFormData) => buildQueryContext(formData), (formData: QueryFormData) => buildQueryContext(formData),
); );
@@ -296,7 +297,7 @@ describe('ChartClient', () => {
}, },
formData: { formData: {
granularity: 'minute', granularity: 'minute',
viz_type: 'line', viz_type: VizType.LegacyLine,
datasource: '1__table', datasource: '1__table',
color: 'living-coral', color: 'living-coral',
}, },

View File

@@ -19,11 +19,11 @@
/* eslint sort-keys: 'off' */ /* eslint sort-keys: 'off' */
/** The form data defined here is based on default visualizations packaged with Apache Superset */ /** The form data defined here is based on default visualizations packaged with Apache Superset */
import { TimeGranularity } from '@superset-ui/core'; import { TimeGranularity, VizType } from '@superset-ui/core';
export const bigNumberFormData = { export const bigNumberFormData = {
datasource: '3__table', datasource: '3__table',
viz_type: 'big_number', viz_type: VizType.BigNumber,
slice_id: 54, slice_id: 54,
granularity_sqla: 'ds', granularity_sqla: 'ds',
time_grain_sqla: TimeGranularity.DAY, time_grain_sqla: TimeGranularity.DAY,
@@ -39,7 +39,7 @@ export const bigNumberFormData = {
export const wordCloudFormData = { export const wordCloudFormData = {
datasource: '3__table', datasource: '3__table',
viz_type: 'word_cloud', viz_type: VizType.WordCloud,
slice_id: 60, slice_id: 60,
url_params: {}, url_params: {},
granularity_sqla: 'ds', granularity_sqla: 'ds',
@@ -56,7 +56,7 @@ export const wordCloudFormData = {
export const sunburstFormData = { export const sunburstFormData = {
datasource: '2__table', datasource: '2__table',
viz_type: 'sunburst_v2', viz_type: VizType.Sunburst,
slice_id: 47, slice_id: 47,
url_params: {}, url_params: {},
granularity_sqla: 'year', granularity_sqla: 'year',
@@ -71,7 +71,7 @@ export const sunburstFormData = {
export const sankeyFormData = { export const sankeyFormData = {
datasource: '1__table', datasource: '1__table',
viz_type: 'sankey', viz_type: VizType.LegacySankey,
slice_id: 1, slice_id: 1,
url_params: {}, url_params: {},
granularity_sqla: null, granularity_sqla: null,

View File

@@ -31,6 +31,7 @@ import {
QueryFormData, QueryFormData,
DatasourceType, DatasourceType,
supersetTheme, supersetTheme,
VizType,
} from '@superset-ui/core'; } from '@superset-ui/core';
describe('ChartPlugin', () => { describe('ChartPlugin', () => {
@@ -59,7 +60,7 @@ describe('ChartPlugin', () => {
const FORM_DATA = { const FORM_DATA = {
datasource: '1__table', datasource: '1__table',
granularity: 'day', granularity: 'day',
viz_type: 'table', viz_type: VizType.Table,
}; };
it('creates a new plugin', () => { it('creates a new plugin', () => {

View File

@@ -17,6 +17,7 @@
* under the License. * under the License.
*/ */
import fetchMock from 'fetch-mock'; import fetchMock from 'fetch-mock';
import { VizType } from '@superset-ui/core';
import { getFormData } from '../../../../src/query/api/legacy'; import { getFormData } from '../../../../src/query/api/legacy';
import setupClientForTest from '../setupClientForTest'; import setupClientForTest from '../setupClientForTest';
@@ -28,7 +29,7 @@ describe('getFormData()', () => {
const mockData = { const mockData = {
datasource: '1__table', datasource: '1__table',
viz_type: 'sankey', viz_type: VizType.LegacySankey,
slice_id: 1, slice_id: 1,
url_params: {}, url_params: {},
granularity_sqla: null, granularity_sqla: null,

View File

@@ -17,7 +17,7 @@
* under the License. * under the License.
*/ */
import fetchMock from 'fetch-mock'; import fetchMock from 'fetch-mock';
import { buildQueryContext, ApiV1 } from '@superset-ui/core'; import { buildQueryContext, ApiV1, VizType } from '@superset-ui/core';
import setupClientForTest from '../setupClientForTest'; import setupClientForTest from '../setupClientForTest';
describe('API v1 > getChartData()', () => { describe('API v1 > getChartData()', () => {
@@ -39,7 +39,7 @@ describe('API v1 > getChartData()', () => {
const result = await ApiV1.getChartData( const result = await ApiV1.getChartData(
buildQueryContext({ buildQueryContext({
granularity: 'minute', granularity: 'minute',
viz_type: 'word_cloud', viz_type: VizType.WordCloud,
datasource: '1__table', datasource: '1__table',
}), }),
); );

View File

@@ -16,7 +16,7 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
import { buildQueryContext } from '@superset-ui/core'; import { buildQueryContext, VizType } from '@superset-ui/core';
import * as queryModule from '../../src/query/normalizeTimeColumn'; import * as queryModule from '../../src/query/normalizeTimeColumn';
describe('buildQueryContext', () => { describe('buildQueryContext', () => {
@@ -24,7 +24,7 @@ describe('buildQueryContext', () => {
const queryContext = buildQueryContext({ const queryContext = buildQueryContext({
datasource: '5__table', datasource: '5__table',
granularity_sqla: 'ds', granularity_sqla: 'ds',
viz_type: 'table', viz_type: VizType.Table,
}); });
expect(queryContext.datasource.id).toBe(5); expect(queryContext.datasource.id).toBe(5);
expect(queryContext.datasource.type).toBe('table'); expect(queryContext.datasource.type).toBe('table');
@@ -37,7 +37,7 @@ describe('buildQueryContext', () => {
{ {
datasource: '5__table', datasource: '5__table',
granularity_sqla: 'ds', granularity_sqla: 'ds',
viz_type: 'table', viz_type: VizType.Table,
source: 'source_column', source: 'source_column',
source_category: 'source_category_column', source_category: 'source_category_column',
target: 'target_column', target: 'target_column',
@@ -75,7 +75,7 @@ describe('buildQueryContext', () => {
{ {
datasource: '5__table', datasource: '5__table',
granularity_sqla: 'ds', granularity_sqla: 'ds',
viz_type: 'table', viz_type: VizType.Table,
source: 'source_column', source: 'source_column',
source_category: 'source_category_column', source_category: 'source_category_column',
target: 'target_column', target: 'target_column',
@@ -103,7 +103,7 @@ describe('buildQueryContext', () => {
const queryContext = buildQueryContext( const queryContext = buildQueryContext(
{ {
datasource: '5__table', datasource: '5__table',
viz_type: 'table', viz_type: VizType.Table,
}, },
() => [ () => [
{ {
@@ -133,7 +133,7 @@ describe('buildQueryContext', () => {
buildQueryContext( buildQueryContext(
{ {
datasource: '5__table', datasource: '5__table',
viz_type: 'table', viz_type: VizType.Table,
x_axis: 'axis', x_axis: 'axis',
}, },
() => [{}], () => [{}],

View File

@@ -25,6 +25,7 @@ import {
AnnotationType, AnnotationType,
buildQueryObject, buildQueryObject,
QueryObject, QueryObject,
VizType,
} from '@superset-ui/core'; } from '@superset-ui/core';
describe('buildQueryObject', () => { describe('buildQueryObject', () => {
@@ -34,7 +35,7 @@ describe('buildQueryObject', () => {
query = buildQueryObject({ query = buildQueryObject({
datasource: '5__table', datasource: '5__table',
granularity_sqla: 'ds', granularity_sqla: 'ds',
viz_type: 'table', viz_type: VizType.Table,
}); });
expect(query.granularity).toEqual('ds'); expect(query.granularity).toEqual('ds');
}); });
@@ -43,7 +44,7 @@ describe('buildQueryObject', () => {
query = buildQueryObject({ query = buildQueryObject({
datasource: '5__table', datasource: '5__table',
granularity_sqla: 'ds', granularity_sqla: 'ds',
viz_type: 'table', viz_type: VizType.Table,
metric: 'sum__num', metric: 'sum__num',
secondary_metric: 'avg__num', secondary_metric: 'avg__num',
}); });
@@ -54,7 +55,7 @@ describe('buildQueryObject', () => {
query = buildQueryObject({ query = buildQueryObject({
datasource: '5__table', datasource: '5__table',
granularity_sqla: 'ds', granularity_sqla: 'ds',
viz_type: 'table', viz_type: VizType.Table,
extra_filters: [{ col: 'abc', op: '==', val: 'qwerty' }], extra_filters: [{ col: 'abc', op: '==', val: 'qwerty' }],
adhoc_filters: [ adhoc_filters: [
{ {
@@ -88,7 +89,7 @@ describe('buildQueryObject', () => {
{ {
datasource: '5__table', datasource: '5__table',
granularity_sqla: 'ds', granularity_sqla: 'ds',
viz_type: 'table', viz_type: VizType.Table,
my_custom_metric_control: 'sum__num', my_custom_metric_control: 'sum__num',
}, },
{ my_custom_metric_control: 'metrics' }, { my_custom_metric_control: 'metrics' },
@@ -101,7 +102,7 @@ describe('buildQueryObject', () => {
{ {
datasource: '5__table', datasource: '5__table',
granularity_sqla: 'ds', granularity_sqla: 'ds',
viz_type: 'table', viz_type: VizType.Table,
metrics: ['sum__num'], metrics: ['sum__num'],
my_custom_metric_control: 'avg__num', my_custom_metric_control: 'avg__num',
}, },
@@ -115,7 +116,7 @@ describe('buildQueryObject', () => {
query = buildQueryObject({ query = buildQueryObject({
datasource: '5__table', datasource: '5__table',
granularity_sqla: 'ds', granularity_sqla: 'ds',
viz_type: 'table', viz_type: VizType.Table,
limit: series_limit, limit: series_limit,
}); });
expect(query.series_limit).toEqual(series_limit); expect(query.series_limit).toEqual(series_limit);
@@ -126,7 +127,7 @@ describe('buildQueryObject', () => {
query = buildQueryObject({ query = buildQueryObject({
datasource: '5__table', datasource: '5__table',
granularity_sqla: 'ds', granularity_sqla: 'ds',
viz_type: 'table', viz_type: VizType.Table,
series_limit, series_limit,
}); });
expect(query.series_limit).toEqual(series_limit); expect(query.series_limit).toEqual(series_limit);
@@ -137,7 +138,7 @@ describe('buildQueryObject', () => {
query = buildQueryObject({ query = buildQueryObject({
datasource: '5__table', datasource: '5__table',
granularity_sqla: 'ds', granularity_sqla: 'ds',
viz_type: 'table', viz_type: VizType.Table,
order_desc: orderDesc, order_desc: orderDesc,
}); });
expect(query.order_desc).toEqual(orderDesc); expect(query.order_desc).toEqual(orderDesc);
@@ -148,7 +149,7 @@ describe('buildQueryObject', () => {
query = buildQueryObject({ query = buildQueryObject({
datasource: '5__table', datasource: '5__table',
granularity_sqla: 'ds', granularity_sqla: 'ds',
viz_type: 'table', viz_type: VizType.Table,
timeseries_limit_metric: metric, timeseries_limit_metric: metric,
}); });
expect(query.series_limit_metric).toEqual(metric); expect(query.series_limit_metric).toEqual(metric);
@@ -159,7 +160,7 @@ describe('buildQueryObject', () => {
query = buildQueryObject({ query = buildQueryObject({
datasource: '5__table', datasource: '5__table',
granularity_sqla: 'ds', granularity_sqla: 'ds',
viz_type: 'pivot_table_v2', viz_type: VizType.PivotTable,
series_limit_metric: metric, series_limit_metric: metric,
}); });
expect(query.series_limit_metric).toEqual(metric); expect(query.series_limit_metric).toEqual(metric);
@@ -170,7 +171,7 @@ describe('buildQueryObject', () => {
query = buildQueryObject({ query = buildQueryObject({
datasource: '5__table', datasource: '5__table',
granularity_sqla: 'ds', granularity_sqla: 'ds',
viz_type: 'pivot_table_v2', viz_type: VizType.PivotTable,
series_limit_metric: metric, series_limit_metric: metric,
}); });
expect(query.series_limit_metric).toEqual(undefined); expect(query.series_limit_metric).toEqual(undefined);
@@ -180,7 +181,7 @@ describe('buildQueryObject', () => {
const baseQuery = { const baseQuery = {
datasource: '5__table', datasource: '5__table',
granularity_sqla: 'ds', granularity_sqla: 'ds',
viz_type: 'table', viz_type: VizType.Table,
row_limit: null, row_limit: null,
}; };
@@ -267,7 +268,7 @@ describe('buildQueryObject', () => {
query = buildQueryObject({ query = buildQueryObject({
datasource: '5__table', datasource: '5__table',
granularity_sqla: 'ds', granularity_sqla: 'ds',
viz_type: 'table', viz_type: VizType.Table,
annotation_layers: annotationLayers, annotation_layers: annotationLayers,
}); });
expect(query.annotation_layers).toEqual(annotationLayers); expect(query.annotation_layers).toEqual(annotationLayers);
@@ -278,7 +279,7 @@ describe('buildQueryObject', () => {
buildQueryObject({ buildQueryObject({
datasource: '5__table', datasource: '5__table',
granularity_sqla: 'ds', granularity_sqla: 'ds',
viz_type: 'table', viz_type: VizType.Table,
url_params: { abc: '123' }, url_params: { abc: '123' },
}).url_params, }).url_params,
).toEqual({ abc: '123' }); ).toEqual({ abc: '123' });
@@ -286,7 +287,7 @@ describe('buildQueryObject', () => {
buildQueryObject({ buildQueryObject({
datasource: '5__table', datasource: '5__table',
granularity_sqla: 'ds', granularity_sqla: 'ds',
viz_type: 'table', viz_type: VizType.Table,
// @ts-expect-error // @ts-expect-error
url_params: null, url_params: null,
}).url_params, }).url_params,
@@ -298,7 +299,7 @@ describe('buildQueryObject', () => {
query = buildQueryObject({ query = buildQueryObject({
datasource: '5__table', datasource: '5__table',
granularity, granularity,
viz_type: 'table', viz_type: VizType.Table,
}); });
expect(query.granularity).toEqual(granularity); expect(query.granularity).toEqual(granularity);
}); });
@@ -308,7 +309,7 @@ describe('buildQueryObject', () => {
query = buildQueryObject({ query = buildQueryObject({
datasource: '5__table', datasource: '5__table',
granularity_sqla: granularity, granularity_sqla: granularity,
viz_type: 'table', viz_type: VizType.Table,
}); });
expect(query.granularity).toEqual(granularity); expect(query.granularity).toEqual(granularity);
}); });
@@ -320,7 +321,7 @@ describe('buildQueryObject', () => {
query = buildQueryObject({ query = buildQueryObject({
datasource: '5__table', datasource: '5__table',
granularity_sqla: 'ds', granularity_sqla: 'ds',
viz_type: 'table', viz_type: VizType.Table,
custom_params: customParams, custom_params: customParams,
}); });
expect(query.custom_params).toEqual(customParams); expect(query.custom_params).toEqual(customParams);

View File

@@ -16,11 +16,13 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
import { isXAxisSet } from '@superset-ui/core'; import { isXAxisSet, VizType } from '@superset-ui/core';
test('isXAxisSet', () => { test('isXAxisSet', () => {
expect(isXAxisSet({ datasource: '123', viz_type: 'table' })).not.toBeTruthy();
expect( expect(
isXAxisSet({ datasource: '123', viz_type: 'table', x_axis: 'axis' }), isXAxisSet({ datasource: '123', viz_type: VizType.Table }),
).not.toBeTruthy();
expect(
isXAxisSet({ datasource: '123', viz_type: VizType.Table, x_axis: 'axis' }),
).toBeTruthy(); ).toBeTruthy();
}); });

View File

@@ -16,13 +16,13 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
import { normalizeOrderBy, QueryObject } from '@superset-ui/core'; import { normalizeOrderBy, QueryObject, VizType } from '@superset-ui/core';
describe('normalizeOrderBy', () => { describe('normalizeOrderBy', () => {
it('should not change original queryObject when orderby populated', () => { it('should not change original queryObject when orderby populated', () => {
const query: QueryObject = { const query: QueryObject = {
datasource: '5__table', datasource: '5__table',
viz_type: 'table', viz_type: VizType.Table,
time_range: '1 year ago : 2013', time_range: '1 year ago : 2013',
orderby: [['count(*)', true]], orderby: [['count(*)', true]],
}; };
@@ -32,7 +32,7 @@ describe('normalizeOrderBy', () => {
it('has series_limit_metric in queryObject', () => { it('has series_limit_metric in queryObject', () => {
const query: QueryObject = { const query: QueryObject = {
datasource: '5__table', datasource: '5__table',
viz_type: 'table', viz_type: VizType.Table,
time_range: '1 year ago : 2013', time_range: '1 year ago : 2013',
metrics: ['count(*)'], metrics: ['count(*)'],
series_limit_metric: { series_limit_metric: {
@@ -50,7 +50,7 @@ describe('normalizeOrderBy', () => {
expect(expectedQueryObject).not.toHaveProperty('order_desc'); expect(expectedQueryObject).not.toHaveProperty('order_desc');
expect(expectedQueryObject).toEqual({ expect(expectedQueryObject).toEqual({
datasource: '5__table', datasource: '5__table',
viz_type: 'table', viz_type: VizType.Table,
time_range: '1 year ago : 2013', time_range: '1 year ago : 2013',
metrics: ['count(*)'], metrics: ['count(*)'],
orderby: [ orderby: [
@@ -72,7 +72,7 @@ describe('normalizeOrderBy', () => {
it('should transform legacy_order_by in queryObject', () => { it('should transform legacy_order_by in queryObject', () => {
const query: QueryObject = { const query: QueryObject = {
datasource: '5__table', datasource: '5__table',
viz_type: 'table', viz_type: VizType.Table,
time_range: '1 year ago : 2013', time_range: '1 year ago : 2013',
metrics: ['count(*)'], metrics: ['count(*)'],
legacy_order_by: { legacy_order_by: {
@@ -90,7 +90,7 @@ describe('normalizeOrderBy', () => {
expect(expectedQueryObject).not.toHaveProperty('order_desc'); expect(expectedQueryObject).not.toHaveProperty('order_desc');
expect(expectedQueryObject).toEqual({ expect(expectedQueryObject).toEqual({
datasource: '5__table', datasource: '5__table',
viz_type: 'table', viz_type: VizType.Table,
time_range: '1 year ago : 2013', time_range: '1 year ago : 2013',
metrics: ['count(*)'], metrics: ['count(*)'],
orderby: [ orderby: [
@@ -112,7 +112,7 @@ describe('normalizeOrderBy', () => {
it('has metrics in queryObject', () => { it('has metrics in queryObject', () => {
const query: QueryObject = { const query: QueryObject = {
datasource: '5__table', datasource: '5__table',
viz_type: 'table', viz_type: VizType.Table,
time_range: '1 year ago : 2013', time_range: '1 year ago : 2013',
metrics: ['count(*)'], metrics: ['count(*)'],
order_desc: true, order_desc: true,
@@ -122,7 +122,7 @@ describe('normalizeOrderBy', () => {
expect(expectedQueryObject).not.toHaveProperty('order_desc'); expect(expectedQueryObject).not.toHaveProperty('order_desc');
expect(expectedQueryObject).toEqual({ expect(expectedQueryObject).toEqual({
datasource: '5__table', datasource: '5__table',
viz_type: 'table', viz_type: VizType.Table,
time_range: '1 year ago : 2013', time_range: '1 year ago : 2013',
metrics: ['count(*)'], metrics: ['count(*)'],
orderby: [['count(*)', false]], orderby: [['count(*)', false]],
@@ -132,7 +132,7 @@ describe('normalizeOrderBy', () => {
it('should not change', () => { it('should not change', () => {
const query: QueryObject = { const query: QueryObject = {
datasource: '5__table', datasource: '5__table',
viz_type: 'table', viz_type: VizType.Table,
time_range: '1 year ago : 2013', time_range: '1 year ago : 2013',
}; };
expect(normalizeOrderBy(query)).toEqual(query); expect(normalizeOrderBy(query)).toEqual(query);
@@ -141,7 +141,7 @@ describe('normalizeOrderBy', () => {
it('remove empty orderby', () => { it('remove empty orderby', () => {
const query: QueryObject = { const query: QueryObject = {
datasource: '5__table', datasource: '5__table',
viz_type: 'table', viz_type: VizType.Table,
time_range: '1 year ago : 2013', time_range: '1 year ago : 2013',
orderby: [], orderby: [],
}; };
@@ -151,7 +151,7 @@ describe('normalizeOrderBy', () => {
it('remove orderby with an empty array', () => { it('remove orderby with an empty array', () => {
const query: QueryObject = { const query: QueryObject = {
datasource: '5__table', datasource: '5__table',
viz_type: 'table', viz_type: VizType.Table,
time_range: '1 year ago : 2013', time_range: '1 year ago : 2013',
orderby: [[]], orderby: [[]],
}; };
@@ -161,7 +161,7 @@ describe('normalizeOrderBy', () => {
it('remove orderby with an empty metric', () => { it('remove orderby with an empty metric', () => {
const query: QueryObject = { const query: QueryObject = {
datasource: '5__table', datasource: '5__table',
viz_type: 'table', viz_type: VizType.Table,
time_range: '1 year ago : 2013', time_range: '1 year ago : 2013',
orderby: [['', true]], orderby: [['', true]],
}; };
@@ -171,7 +171,7 @@ describe('normalizeOrderBy', () => {
it('remove orderby with an empty adhoc metric', () => { it('remove orderby with an empty adhoc metric', () => {
const query: QueryObject = { const query: QueryObject = {
datasource: '5__table', datasource: '5__table',
viz_type: 'table', viz_type: VizType.Table,
time_range: '1 year ago : 2013', time_range: '1 year ago : 2013',
orderby: [[{}, true]], orderby: [[{}, true]],
}; };
@@ -181,7 +181,7 @@ describe('normalizeOrderBy', () => {
it('remove orderby with an non-boolean type', () => { it('remove orderby with an non-boolean type', () => {
const query: QueryObject = { const query: QueryObject = {
datasource: '5__table', datasource: '5__table',
viz_type: 'table', viz_type: VizType.Table,
time_range: '1 year ago : 2013', time_range: '1 year ago : 2013',
// @ts-ignore // @ts-ignore
orderby: [['count(*)', 'true']], orderby: [['count(*)', 'true']],

View File

@@ -20,12 +20,13 @@ import {
normalizeTimeColumn, normalizeTimeColumn,
QueryObject, QueryObject,
SqlaFormData, SqlaFormData,
VizType,
} from '@superset-ui/core'; } from '@superset-ui/core';
test('should return original QueryObject if x_axis is empty', () => { test('should return original QueryObject if x_axis is empty', () => {
const formData: SqlaFormData = { const formData: SqlaFormData = {
datasource: '5__table', datasource: '5__table',
viz_type: 'table', viz_type: VizType.Table,
granularity: 'time_column', granularity: 'time_column',
time_grain_sqla: 'P1Y', time_grain_sqla: 'P1Y',
time_range: '1 year ago : 2013', time_range: '1 year ago : 2013',
@@ -34,7 +35,7 @@ test('should return original QueryObject if x_axis is empty', () => {
}; };
const query: QueryObject = { const query: QueryObject = {
datasource: '5__table', datasource: '5__table',
viz_type: 'table', viz_type: VizType.Table,
granularity: 'time_column', granularity: 'time_column',
extras: { extras: {
time_grain_sqla: 'P1Y', time_grain_sqla: 'P1Y',
@@ -51,7 +52,7 @@ test('should return original QueryObject if x_axis is empty', () => {
test('should support different columns for x-axis and granularity', () => { test('should support different columns for x-axis and granularity', () => {
const formData: SqlaFormData = { const formData: SqlaFormData = {
datasource: '5__table', datasource: '5__table',
viz_type: 'table', viz_type: VizType.Table,
granularity: 'time_column', granularity: 'time_column',
time_grain_sqla: 'P1Y', time_grain_sqla: 'P1Y',
time_range: '1 year ago : 2013', time_range: '1 year ago : 2013',
@@ -61,7 +62,7 @@ test('should support different columns for x-axis and granularity', () => {
}; };
const query: QueryObject = { const query: QueryObject = {
datasource: '5__table', datasource: '5__table',
viz_type: 'table', viz_type: VizType.Table,
granularity: 'time_column', granularity: 'time_column',
extras: { extras: {
time_grain_sqla: 'P1Y', time_grain_sqla: 'P1Y',
@@ -76,7 +77,7 @@ test('should support different columns for x-axis and granularity', () => {
}; };
expect(normalizeTimeColumn(formData, query)).toEqual({ expect(normalizeTimeColumn(formData, query)).toEqual({
datasource: '5__table', datasource: '5__table',
viz_type: 'table', viz_type: VizType.Table,
granularity: 'time_column', granularity: 'time_column',
extras: { where: '', having: '', time_grain_sqla: 'P1Y' }, extras: { where: '', having: '', time_grain_sqla: 'P1Y' },
time_range: '1 year ago : 2013', time_range: '1 year ago : 2013',
@@ -98,7 +99,7 @@ test('should support different columns for x-axis and granularity', () => {
test('should support custom SQL in x-axis', () => { test('should support custom SQL in x-axis', () => {
const formData: SqlaFormData = { const formData: SqlaFormData = {
datasource: '5__table', datasource: '5__table',
viz_type: 'table', viz_type: VizType.Table,
granularity: 'time_column', granularity: 'time_column',
time_grain_sqla: 'P1Y', time_grain_sqla: 'P1Y',
time_range: '1 year ago : 2013', time_range: '1 year ago : 2013',
@@ -112,7 +113,7 @@ test('should support custom SQL in x-axis', () => {
}; };
const query: QueryObject = { const query: QueryObject = {
datasource: '5__table', datasource: '5__table',
viz_type: 'table', viz_type: VizType.Table,
granularity: 'time_column', granularity: 'time_column',
extras: { extras: {
time_grain_sqla: 'P1Y', time_grain_sqla: 'P1Y',
@@ -134,7 +135,7 @@ test('should support custom SQL in x-axis', () => {
}; };
expect(normalizeTimeColumn(formData, query)).toEqual({ expect(normalizeTimeColumn(formData, query)).toEqual({
datasource: '5__table', datasource: '5__table',
viz_type: 'table', viz_type: VizType.Table,
granularity: 'time_column', granularity: 'time_column',
extras: { where: '', having: '', time_grain_sqla: 'P1Y' }, extras: { where: '', having: '', time_grain_sqla: 'P1Y' },
time_range: '1 year ago : 2013', time_range: '1 year ago : 2013',
@@ -156,7 +157,7 @@ test('should support custom SQL in x-axis', () => {
test('fallback and invalid columns value', () => { test('fallback and invalid columns value', () => {
const formData: SqlaFormData = { const formData: SqlaFormData = {
datasource: '5__table', datasource: '5__table',
viz_type: 'table', viz_type: VizType.Table,
granularity: 'time_column', granularity: 'time_column',
time_grain_sqla: 'P1Y', time_grain_sqla: 'P1Y',
time_range: '1 year ago : 2013', time_range: '1 year ago : 2013',
@@ -170,7 +171,7 @@ test('fallback and invalid columns value', () => {
}; };
const query: QueryObject = { const query: QueryObject = {
datasource: '5__table', datasource: '5__table',
viz_type: 'table', viz_type: VizType.Table,
granularity: 'time_column', granularity: 'time_column',
extras: { extras: {
time_grain_sqla: 'P1Y', time_grain_sqla: 'P1Y',

View File

@@ -17,11 +17,11 @@
* under the License. * under the License.
*/ */
import { getComparisonFilters } from '@superset-ui/core'; import { getComparisonFilters, VizType } from '@superset-ui/core';
const form_data = { const form_data = {
datasource: '22__table', datasource: '22__table',
viz_type: 'pop_kpi', viz_type: VizType.BigNumberPeriodOverPeriod,
slice_id: 97, slice_id: 97,
url_params: { url_params: {
form_data_key: form_data_key:

View File

@@ -17,11 +17,15 @@
* under the License. * under the License.
*/ */
import { getComparisonInfo, ComparisonTimeRangeType } from '@superset-ui/core'; import {
getComparisonInfo,
ComparisonTimeRangeType,
VizType,
} from '@superset-ui/core';
const form_data = { const form_data = {
datasource: '22__table', datasource: '22__table',
viz_type: 'pop_kpi', viz_type: VizType.BigNumberPeriodOverPeriod,
slice_id: 97, slice_id: 97,
url_params: { url_params: {
form_data_key: form_data_key:

View File

@@ -17,12 +17,12 @@
* under the License. * under the License.
*/ */
import { SuperChart } from '@superset-ui/core'; import { SuperChart, VizType } from '@superset-ui/core';
import ChordChartPlugin from '@superset-ui/legacy-plugin-chart-chord'; import ChordChartPlugin from '@superset-ui/legacy-plugin-chart-chord';
import data from './data'; import data from './data';
import { withResizableChartDemo } from '../../../shared/components/ResizableChartDemo'; import { withResizableChartDemo } from '../../../shared/components/ResizableChartDemo';
new ChordChartPlugin().configure({ key: 'chord' }).register(); new ChordChartPlugin().configure({ key: VizType.Chord }).register();
export default { export default {
title: 'Legacy Chart Plugins/legacy-plugin-chart-chord', title: 'Legacy Chart Plugins/legacy-plugin-chart-chord',
@@ -31,7 +31,7 @@ export default {
export const basic = ({ width, height }) => ( export const basic = ({ width, height }) => (
<SuperChart <SuperChart
chartType="chord" chartType={VizType.Chord}
width={width} width={width}
height={height} height={height}
queriesData={[{ data }]} queriesData={[{ data }]}

View File

@@ -17,12 +17,12 @@
* under the License. * under the License.
*/ */
import { SuperChart } from '@superset-ui/core'; import { SuperChart, VizType } from '@superset-ui/core';
import HeatmapChartPlugin from '@superset-ui/legacy-plugin-chart-heatmap'; import HeatmapChartPlugin from '@superset-ui/legacy-plugin-chart-heatmap';
import ResizableChartDemo from '../../../shared/components/ResizableChartDemo'; import ResizableChartDemo from '../../../shared/components/ResizableChartDemo';
import data from './data'; import data from './data';
new HeatmapChartPlugin().configure({ key: 'heatmap' }).register(); new HeatmapChartPlugin().configure({ key: VizType.LegacyHeatmap }).register();
export default { export default {
title: 'Legacy Chart Plugins/legacy-plugin-chart-heatmap', title: 'Legacy Chart Plugins/legacy-plugin-chart-heatmap',
@@ -30,7 +30,7 @@ export default {
export const basic = () => ( export const basic = () => (
<SuperChart <SuperChart
chartType="heatmap" chartType={VizType.LegacyHeatmap}
width={500} width={500}
height={500} height={500}
formData={{ formData={{
@@ -67,7 +67,7 @@ export const resizable = () => (
<ResizableChartDemo> <ResizableChartDemo>
{({ width, height }) => ( {({ width, height }) => (
<SuperChart <SuperChart
chartType="heatmap" chartType={VizType.LegacyHeatmap}
width={width} width={width}
height={height} height={height}
formData={{ formData={{
@@ -104,7 +104,7 @@ export const resizable = () => (
export const withNullData = () => ( export const withNullData = () => (
<SuperChart <SuperChart
chartType="heatmap" chartType={VizType.LegacyHeatmap}
width={500} width={500}
height={500} height={500}
formData={{ formData={{

View File

@@ -18,11 +18,13 @@
*/ */
/* eslint-disable no-magic-numbers */ /* eslint-disable no-magic-numbers */
import { SuperChart } from '@superset-ui/core'; import { SuperChart, VizType } from '@superset-ui/core';
import HistogramChartPlugin from '@superset-ui/legacy-plugin-chart-histogram'; import HistogramChartPlugin from '@superset-ui/legacy-plugin-chart-histogram';
import data from './data'; import data from './data';
new HistogramChartPlugin().configure({ key: 'histogram' }).register(); new HistogramChartPlugin()
.configure({ key: VizType.LegacyHistogram })
.register();
export default { export default {
title: 'Legacy Chart Plugins/legacy-plugin-chart-histogram', title: 'Legacy Chart Plugins/legacy-plugin-chart-histogram',
@@ -30,7 +32,7 @@ export default {
export const basic = () => ( export const basic = () => (
<SuperChart <SuperChart
chartType="histogram" chartType={VizType.LegacyHistogram}
width={400} width={400}
height={400} height={400}
queriesData={[{ data }]} queriesData={[{ data }]}

View File

@@ -17,11 +17,11 @@
* under the License. * under the License.
*/ */
import { SuperChart } from '@superset-ui/core'; import { SuperChart, VizType } from '@superset-ui/core';
import HorizonChartPlugin from '@superset-ui/legacy-plugin-chart-horizon'; import HorizonChartPlugin from '@superset-ui/legacy-plugin-chart-horizon';
import data from './data'; import data from './data';
new HorizonChartPlugin().configure({ key: 'horizon' }).register(); new HorizonChartPlugin().configure({ key: VizType.Horizon }).register();
export default { export default {
title: 'Legacy Chart Plugins/legacy-plugin-chart-horizon', title: 'Legacy Chart Plugins/legacy-plugin-chart-horizon',
@@ -29,7 +29,7 @@ export default {
export const basic = () => ( export const basic = () => (
<SuperChart <SuperChart
chartType="horizon" chartType={VizType.Horizon}
width={400} width={400}
height={400} height={400}
queriesData={[{ data }]} queriesData={[{ data }]}

View File

@@ -17,12 +17,12 @@
* under the License. * under the License.
*/ */
import { SuperChart } from '@superset-ui/core'; import { SuperChart, VizType } from '@superset-ui/core';
import PartitionChartPlugin from '@superset-ui/legacy-plugin-chart-partition'; import PartitionChartPlugin from '@superset-ui/legacy-plugin-chart-partition';
import data from './data'; import data from './data';
import dummyDatasource from '../../../shared/dummyDatasource'; import dummyDatasource from '../../../shared/dummyDatasource';
new PartitionChartPlugin().configure({ key: 'partition' }).register(); new PartitionChartPlugin().configure({ key: VizType.Partition }).register();
export default { export default {
title: 'Legacy Chart Plugins/legacy-plugin-chart-partition', title: 'Legacy Chart Plugins/legacy-plugin-chart-partition',
@@ -30,7 +30,7 @@ export default {
export const basic = () => ( export const basic = () => (
<SuperChart <SuperChart
chartType="partition" chartType={VizType.Partition}
width={400} width={400}
height={400} height={400}
datasource={dummyDatasource} datasource={dummyDatasource}

View File

@@ -18,11 +18,11 @@
*/ */
/* eslint-disable no-magic-numbers, sort-keys */ /* eslint-disable no-magic-numbers, sort-keys */
import { SuperChart } from '@superset-ui/core'; import { SuperChart, VizType } from '@superset-ui/core';
import RoseChartPlugin from '@superset-ui/legacy-plugin-chart-rose'; import RoseChartPlugin from '@superset-ui/legacy-plugin-chart-rose';
import data from './data'; import data from './data';
new RoseChartPlugin().configure({ key: 'rose' }).register(); new RoseChartPlugin().configure({ key: VizType.Rose }).register();
export default { export default {
title: 'Legacy Chart Plugins/legacy-plugin-chart-rose', title: 'Legacy Chart Plugins/legacy-plugin-chart-rose',
@@ -30,7 +30,7 @@ export default {
export const basic = () => ( export const basic = () => (
<SuperChart <SuperChart
chartType="rose" chartType={VizType.Rose}
width={400} width={400}
height={400} height={400}
queriesData={[{ data }]} queriesData={[{ data }]}

View File

@@ -18,12 +18,12 @@
*/ */
/* eslint-disable no-magic-numbers */ /* eslint-disable no-magic-numbers */
import { SuperChart } from '@superset-ui/core'; import { SuperChart, VizType } from '@superset-ui/core';
import SankeyChartPlugin from '@superset-ui/legacy-plugin-chart-sankey'; import SankeyChartPlugin from '@superset-ui/legacy-plugin-chart-sankey';
import ResizableChartDemo from '../../../shared/components/ResizableChartDemo'; import ResizableChartDemo from '../../../shared/components/ResizableChartDemo';
import data from './data'; import data from './data';
new SankeyChartPlugin().configure({ key: 'sankey' }).register(); new SankeyChartPlugin().configure({ key: VizType.LegacySankey }).register();
export default { export default {
title: 'Legacy Chart Plugins/legacy-plugin-chart-sankey', title: 'Legacy Chart Plugins/legacy-plugin-chart-sankey',
@@ -31,7 +31,7 @@ export default {
export const basic = () => ( export const basic = () => (
<SuperChart <SuperChart
chartType="sankey" chartType={VizType.LegacySankey}
width={400} width={400}
height={400} height={400}
queriesData={[{ data }]} queriesData={[{ data }]}
@@ -45,7 +45,7 @@ export const resizable = () => (
<ResizableChartDemo> <ResizableChartDemo>
{({ width, height }) => ( {({ width, height }) => (
<SuperChart <SuperChart
chartType="sankey" chartType={VizType.LegacySankey}
width={width} width={width}
height={height} height={height}
queriesData={[{ data }]} queriesData={[{ data }]}

View File

@@ -16,7 +16,7 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
import { SuperChart } from '@superset-ui/core'; import { SuperChart, VizType } from '@superset-ui/core';
import { BigNumberChartPlugin } from '@superset-ui/plugin-chart-echarts'; import { BigNumberChartPlugin } from '@superset-ui/plugin-chart-echarts';
import testData from './data'; import testData from './data';
@@ -37,7 +37,7 @@ const formData = {
showTrendLine: true, showTrendLine: true,
startYAxisAtZero: true, startYAxisAtZero: true,
timeGrainSqla: 'P1Y', timeGrainSqla: 'P1Y',
vizType: 'big_number', vizType: VizType.BigNumber,
yAxisFormat: '.3s', yAxisFormat: '.3s',
}; };

View File

@@ -16,7 +16,7 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
import { SuperChart } from '@superset-ui/core'; import { SuperChart, VizType } from '@superset-ui/core';
import { BigNumberTotalChartPlugin } from '@superset-ui/plugin-chart-echarts'; import { BigNumberTotalChartPlugin } from '@superset-ui/plugin-chart-echarts';
import data from './data'; import data from './data';
@@ -37,7 +37,7 @@ export const totalBasic = () => (
formData={{ formData={{
metric: 'sum__num', metric: 'sum__num',
subheader: 'total female participants', subheader: 'total female participants',
vizType: 'big_number_total', vizType: VizType.BigNumberTotal,
yAxisFormat: '.3s', yAxisFormat: '.3s',
}} }}
/> />
@@ -52,7 +52,7 @@ export const totalNoData = () => (
formData={{ formData={{
metric: 'sum__num', metric: 'sum__num',
subheader: 'total female participants', subheader: 'total female participants',
vizType: 'big_number_total', vizType: VizType.BigNumberTotal,
yAxisFormat: '.3s', yAxisFormat: '.3s',
}} }}
/> />

View File

@@ -17,9 +17,10 @@
* under the License. * under the License.
*/ */
import { VizType } from '@superset-ui/core';
import { AreaChartPlugin } from '@superset-ui/legacy-preset-chart-nvd3'; import { AreaChartPlugin } from '@superset-ui/legacy-preset-chart-nvd3';
new AreaChartPlugin().configure({ key: 'area' }).register(); new AreaChartPlugin().configure({ key: VizType.LegacyArea }).register();
export default { export default {
title: 'Legacy Chart Plugins/legacy-preset-chart-nvd3/Area', title: 'Legacy Chart Plugins/legacy-preset-chart-nvd3/Area',

View File

@@ -17,13 +17,13 @@
* under the License. * under the License.
*/ */
import { SuperChart } from '@superset-ui/core'; import { SuperChart, VizType } from '@superset-ui/core';
import dummyDatasource from '../../../../../shared/dummyDatasource'; import dummyDatasource from '../../../../../shared/dummyDatasource';
import data from '../data'; import data from '../data';
export const controlsShown = () => ( export const controlsShown = () => (
<SuperChart <SuperChart
chartType="area" chartType={VizType.LegacyArea}
datasource={dummyDatasource} datasource={dummyDatasource}
width={400} width={400}
height={400} height={400}
@@ -40,7 +40,7 @@ export const controlsShown = () => (
showControls: true, showControls: true,
showLegend: true, showLegend: true,
stackedStyle: 'stack', stackedStyle: 'stack',
vizType: 'area', vizType: VizType.LegacyArea,
xAxisFormat: '%Y', xAxisFormat: '%Y',
xAxisLabel: '', xAxisLabel: '',
xAxisShowminmax: false, xAxisShowminmax: false,

View File

@@ -17,13 +17,13 @@
* under the License. * under the License.
*/ */
import { SuperChart } from '@superset-ui/core'; import { SuperChart, VizType } from '@superset-ui/core';
import dummyDatasource from '../../../../../shared/dummyDatasource'; import dummyDatasource from '../../../../../shared/dummyDatasource';
import data from '../data'; import data from '../data';
export const expanded = () => ( export const expanded = () => (
<SuperChart <SuperChart
chartType="area" chartType={VizType.LegacyArea}
datasource={dummyDatasource} datasource={dummyDatasource}
width={400} width={400}
height={400} height={400}
@@ -40,7 +40,7 @@ export const expanded = () => (
showControls: false, showControls: false,
showLegend: true, showLegend: true,
stackedStyle: 'expand', stackedStyle: 'expand',
vizType: 'area', vizType: VizType.LegacyArea,
xAxisFormat: '%Y', xAxisFormat: '%Y',
xAxisLabel: '', xAxisLabel: '',
xAxisShowminmax: false, xAxisShowminmax: false,

View File

@@ -17,14 +17,14 @@
* under the License. * under the License.
*/ */
import { SuperChart } from '@superset-ui/core'; import { SuperChart, VizType } from '@superset-ui/core';
import dummyDatasource from '../../../../../shared/dummyDatasource'; import dummyDatasource from '../../../../../shared/dummyDatasource';
import data from '../data'; import data from '../data';
export const stacked = () => ( export const stacked = () => (
<SuperChart <SuperChart
id="stacked-area-chart" id="stacked-area-chart"
chartType="area" chartType={VizType.LegacyArea}
datasource={dummyDatasource} datasource={dummyDatasource}
width={400} width={400}
height={400} height={400}
@@ -41,7 +41,7 @@ export const stacked = () => (
showControls: false, showControls: false,
showLegend: true, showLegend: true,
stackedStyle: 'stack', stackedStyle: 'stack',
vizType: 'area', vizType: VizType.LegacyArea,
xAxisFormat: '%Y', xAxisFormat: '%Y',
xAxisLabel: '', xAxisLabel: '',
xAxisShowminmax: false, xAxisShowminmax: false,

View File

@@ -17,13 +17,13 @@
* under the License. * under the License.
*/ */
import { SuperChart } from '@superset-ui/core'; import { SuperChart, VizType } from '@superset-ui/core';
import dummyDatasource from '../../../../../shared/dummyDatasource'; import dummyDatasource from '../../../../../shared/dummyDatasource';
import data from '../data'; import data from '../data';
export const stackedWithYAxisBounds = () => ( export const stackedWithYAxisBounds = () => (
<SuperChart <SuperChart
chartType="area" chartType={VizType.LegacyArea}
datasource={dummyDatasource} datasource={dummyDatasource}
width={400} width={400}
height={400} height={400}
@@ -40,7 +40,7 @@ export const stackedWithYAxisBounds = () => (
showControls: false, showControls: false,
showLegend: true, showLegend: true,
stackedStyle: 'stack', stackedStyle: 'stack',
vizType: 'area', vizType: VizType.LegacyArea,
xAxisFormat: '%Y', xAxisFormat: '%Y',
xAxisLabel: '', xAxisLabel: '',
xAxisShowminmax: false, xAxisShowminmax: false,
@@ -56,7 +56,7 @@ stackedWithYAxisBounds.storyName = 'Stacked with yAxisBounds';
export const stackedWithYAxisBoundsMinOnly = () => ( export const stackedWithYAxisBoundsMinOnly = () => (
<SuperChart <SuperChart
chartType="area" chartType={VizType.LegacyArea}
datasource={dummyDatasource} datasource={dummyDatasource}
width={400} width={400}
height={400} height={400}
@@ -73,7 +73,7 @@ export const stackedWithYAxisBoundsMinOnly = () => (
showControls: true, showControls: true,
showLegend: true, showLegend: true,
stackedStyle: 'stack', stackedStyle: 'stack',
vizType: 'area', vizType: VizType.LegacyArea,
xAxisFormat: '%Y', xAxisFormat: '%Y',
xAxisLabel: '', xAxisLabel: '',
xAxisShowminmax: false, xAxisShowminmax: false,

View File

@@ -17,9 +17,10 @@
* under the License. * under the License.
*/ */
import { VizType } from '@superset-ui/core';
import { BarChartPlugin } from '@superset-ui/legacy-preset-chart-nvd3'; import { BarChartPlugin } from '@superset-ui/legacy-preset-chart-nvd3';
new BarChartPlugin().configure({ key: 'bar' }).register(); new BarChartPlugin().configure({ key: VizType.LegacyBar }).register();
export default { export default {
title: 'Legacy Chart Plugins/legacy-preset-chart-nvd3/Bar', title: 'Legacy Chart Plugins/legacy-preset-chart-nvd3/Bar',

View File

@@ -17,13 +17,13 @@
* under the License. * under the License.
*/ */
import { SuperChart } from '@superset-ui/core'; import { SuperChart, VizType } from '@superset-ui/core';
import dummyDatasource from '../../../../../shared/dummyDatasource'; import dummyDatasource from '../../../../../shared/dummyDatasource';
import data from '../data'; import data from '../data';
export const barWithPositiveAndNegativeValues = () => ( export const barWithPositiveAndNegativeValues = () => (
<SuperChart <SuperChart
chartType="bar" chartType={VizType.LegacyBar}
width={400} width={400}
height={400} height={400}
datasource={dummyDatasource} datasource={dummyDatasource}
@@ -51,7 +51,7 @@ export const barWithPositiveAndNegativeValues = () => (
showControls: false, showControls: false,
showLegend: true, showLegend: true,
stackedStyle: 'stack', stackedStyle: 'stack',
vizType: 'bar', vizType: VizType.LegacyBar,
xAxisFormat: '%Y', xAxisFormat: '%Y',
xAxisLabel: '', xAxisLabel: '',
xAxisShowminmax: false, xAxisShowminmax: false,

View File

@@ -17,13 +17,13 @@
* under the License. * under the License.
*/ */
import { SuperChart } from '@superset-ui/core'; import { SuperChart, VizType } from '@superset-ui/core';
import dummyDatasource from '../../../../../shared/dummyDatasource'; import dummyDatasource from '../../../../../shared/dummyDatasource';
import data from '../data'; import data from '../data';
export const barWithValues = () => ( export const barWithValues = () => (
<SuperChart <SuperChart
chartType="bar" chartType={VizType.LegacyBar}
width={400} width={400}
height={400} height={400}
datasource={dummyDatasource} datasource={dummyDatasource}
@@ -41,7 +41,7 @@ export const barWithValues = () => (
showControls: false, showControls: false,
showLegend: true, showLegend: true,
stackedStyle: 'stack', stackedStyle: 'stack',
vizType: 'bar', vizType: VizType.LegacyBar,
xAxisFormat: '%Y', xAxisFormat: '%Y',
xAxisLabel: '', xAxisLabel: '',
xAxisShowminmax: false, xAxisShowminmax: false,

View File

@@ -17,13 +17,13 @@
* under the License. * under the License.
*/ */
import { SuperChart } from '@superset-ui/core'; import { SuperChart, VizType } from '@superset-ui/core';
import dummyDatasource from '../../../../../shared/dummyDatasource'; import dummyDatasource from '../../../../../shared/dummyDatasource';
import data from '../data'; import data from '../data';
export const basic = () => ( export const basic = () => (
<SuperChart <SuperChart
chartType="bar" chartType={VizType.LegacyBar}
width={400} width={400}
height={400} height={400}
datasource={dummyDatasource} datasource={dummyDatasource}
@@ -41,7 +41,7 @@ export const basic = () => (
showControls: false, showControls: false,
showLegend: true, showLegend: true,
stackedStyle: 'stack', stackedStyle: 'stack',
vizType: 'bar', vizType: VizType.LegacyBar,
xAxisFormat: '%Y', xAxisFormat: '%Y',
xAxisLabel: '', xAxisLabel: '',
xAxisShowminmax: false, xAxisShowminmax: false,

View File

@@ -17,13 +17,13 @@
* under the License. * under the License.
*/ */
import { SuperChart } from '@superset-ui/core'; import { SuperChart, VizType } from '@superset-ui/core';
import dummyDatasource from '../../../../../shared/dummyDatasource'; import dummyDatasource from '../../../../../shared/dummyDatasource';
import data from '../data'; import data from '../data';
export const stackedBarWithValues = () => ( export const stackedBarWithValues = () => (
<SuperChart <SuperChart
chartType="bar" chartType={VizType.LegacyBar}
width={400} width={400}
height={400} height={400}
datasource={dummyDatasource} datasource={dummyDatasource}
@@ -42,7 +42,7 @@ export const stackedBarWithValues = () => (
showControls: false, showControls: false,
showLegend: true, showLegend: true,
stackedStyle: 'stack', stackedStyle: 'stack',
vizType: 'bar', vizType: VizType.LegacyBar,
xAxisFormat: '%Y', xAxisFormat: '%Y',
xAxisLabel: '', xAxisLabel: '',
xAxisShowminmax: false, xAxisShowminmax: false,

View File

@@ -17,7 +17,7 @@
* under the License. * under the License.
*/ */
import { SuperChart } from '@superset-ui/core'; import { SuperChart, VizType } from '@superset-ui/core';
import dummyDatasource from '../../../../../shared/dummyDatasource'; import dummyDatasource from '../../../../../shared/dummyDatasource';
import data from '../data'; import data from '../data';
@@ -30,7 +30,7 @@ export const basic = () => (
queriesData={[{ data }]} queriesData={[{ data }]}
formData={{ formData={{
colorScheme: 'd3Category10', colorScheme: 'd3Category10',
vizType: 'box_plot', vizType: VizType.BoxPlot,
whiskerOptions: 'Min/max (no outliers)', whiskerOptions: 'Min/max (no outliers)',
}} }}
/> />

View File

@@ -17,9 +17,10 @@
* under the License. * under the License.
*/ */
import { VizType } from '@superset-ui/core';
import { BubbleChartPlugin } from '@superset-ui/legacy-preset-chart-nvd3'; import { BubbleChartPlugin } from '@superset-ui/legacy-preset-chart-nvd3';
new BubbleChartPlugin().configure({ key: 'bubble' }).register(); new BubbleChartPlugin().configure({ key: VizType.LegacyBubble }).register();
export default { export default {
title: 'Legacy Chart Plugins/legacy-preset-chart-nvd3/Bubble', title: 'Legacy Chart Plugins/legacy-preset-chart-nvd3/Bubble',

View File

@@ -17,13 +17,13 @@
* under the License. * under the License.
*/ */
import { SuperChart } from '@superset-ui/core'; import { SuperChart, VizType } from '@superset-ui/core';
import dummyDatasource from '../../../../../shared/dummyDatasource'; import dummyDatasource from '../../../../../shared/dummyDatasource';
import data from '../data'; import data from '../data';
export const basic = () => ( export const basic = () => (
<SuperChart <SuperChart
chartType="bubble" chartType={VizType.LegacyBubble}
width={400} width={400}
height={400} height={400}
datasource={dummyDatasource} datasource={dummyDatasource}
@@ -38,7 +38,7 @@ export const basic = () => (
series: 'region', series: 'region',
showLegend: true, showLegend: true,
size: 'sum__SP_POP_TOTL', size: 'sum__SP_POP_TOTL',
vizType: 'bubble', vizType: VizType.LegacyBubble,
x: 'sum__SP_RUR_TOTL_ZS', x: 'sum__SP_RUR_TOTL_ZS',
xAxisFormat: '.3s', xAxisFormat: '.3s',
xAxisLabel: 'x-axis label', xAxisLabel: 'x-axis label',

View File

@@ -17,9 +17,10 @@
* under the License. * under the License.
*/ */
import { VizType } from '@superset-ui/core';
import { BulletChartPlugin } from '@superset-ui/legacy-preset-chart-nvd3'; import { BulletChartPlugin } from '@superset-ui/legacy-preset-chart-nvd3';
new BulletChartPlugin().configure({ key: 'bullet' }).register(); new BulletChartPlugin().configure({ key: VizType.Bullet }).register();
export default { export default {
title: 'Legacy Chart Plugins/legacy-preset-chart-nvd3/Bullet', title: 'Legacy Chart Plugins/legacy-preset-chart-nvd3/Bullet',

View File

@@ -17,13 +17,13 @@
* under the License. * under the License.
*/ */
import { SuperChart } from '@superset-ui/core'; import { SuperChart, VizType } from '@superset-ui/core';
import dummyDatasource from '../../../../../shared/dummyDatasource'; import dummyDatasource from '../../../../../shared/dummyDatasource';
import data from '../data'; import data from '../data';
export const basic = () => ( export const basic = () => (
<SuperChart <SuperChart
chartType="bullet" chartType={VizType.Bullet}
width={400} width={400}
height={400} height={400}
datasource={dummyDatasource} datasource={dummyDatasource}
@@ -35,7 +35,7 @@ export const basic = () => (
markers: '', markers: '',
rangeLabels: '', rangeLabels: '',
ranges: '', ranges: '',
vizType: 'bullet', vizType: VizType.Bullet,
}} }}
/> />
); );

View File

@@ -17,9 +17,10 @@
* under the License. * under the License.
*/ */
import { VizType } from '@superset-ui/core';
import { CompareChartPlugin } from '@superset-ui/legacy-preset-chart-nvd3'; import { CompareChartPlugin } from '@superset-ui/legacy-preset-chart-nvd3';
new CompareChartPlugin().configure({ key: 'compare' }).register(); new CompareChartPlugin().configure({ key: VizType.Compare }).register();
export default { export default {
title: 'Legacy Chart Plugins/legacy-preset-chart-nvd3/Compare', title: 'Legacy Chart Plugins/legacy-preset-chart-nvd3/Compare',

View File

@@ -17,7 +17,7 @@
* under the License. * under the License.
*/ */
import { SuperChart } from '@superset-ui/core'; import { SuperChart, VizType } from '@superset-ui/core';
import dummyDatasource from '../../../../../shared/dummyDatasource'; import dummyDatasource from '../../../../../shared/dummyDatasource';
import data from '../data'; import data from '../data';
@@ -33,7 +33,7 @@ export const basic = () => (
colorScheme: 'd3Category10', colorScheme: 'd3Category10',
contribution: false, contribution: false,
leftMargin: 'auto', leftMargin: 'auto',
vizType: 'compare', vizType: VizType.Compare,
xAxisFormat: 'smart_date', xAxisFormat: 'smart_date',
xAxisLabel: '', xAxisLabel: '',
xAxisShowminmax: false, xAxisShowminmax: false,

View File

@@ -17,7 +17,7 @@
* under the License. * under the License.
*/ */
import { SuperChart } from '@superset-ui/core'; import { SuperChart, VizType } from '@superset-ui/core';
import dummyDatasource from '../../../../../shared/dummyDatasource'; import dummyDatasource from '../../../../../shared/dummyDatasource';
export const timeFormat = () => ( export const timeFormat = () => (
@@ -208,7 +208,7 @@ export const timeFormat = () => (
]} ]}
formData={{ formData={{
datasource: '24771__table', datasource: '24771__table',
vizType: 'compare', vizType: VizType.Compare,
urlParams: {}, urlParams: {},
timeRangeEndpoints: ['inclusive', 'exclusive'], timeRangeEndpoints: ['inclusive', 'exclusive'],
granularitySqla: '__time', granularitySqla: '__time',

View File

@@ -17,7 +17,7 @@
* under the License. * under the License.
*/ */
import { SuperChart } from '@superset-ui/core'; import { SuperChart, VizType } from '@superset-ui/core';
import dummyDatasource from '../../../../../shared/dummyDatasource'; import dummyDatasource from '../../../../../shared/dummyDatasource';
import data from '../data'; import data from '../data';
@@ -38,7 +38,7 @@ export const basic = () => (
showBarValue: false, showBarValue: false,
showControls: false, showControls: false,
showLegend: true, showLegend: true,
vizType: 'dist_bar', vizType: VizType.DistBar,
xAxisLabel: 'ddd', xAxisLabel: 'ddd',
xTicksLayout: 'auto', xTicksLayout: 'auto',
yAxisFormat: '.3s', yAxisFormat: '.3s',

View File

@@ -17,7 +17,7 @@
* under the License. * under the License.
*/ */
import { SuperChart, seedRandom } from '@superset-ui/core'; import { SuperChart, VizType, seedRandom } from '@superset-ui/core';
import dummyDatasource from '../../../../../shared/dummyDatasource'; import dummyDatasource from '../../../../../shared/dummyDatasource';
const data: { const data: {
@@ -51,7 +51,7 @@ export const manyBars = () => (
colorScheme: 'd3Category10', colorScheme: 'd3Category10',
showBarValue: false, showBarValue: false,
showLegend: true, showLegend: true,
vizType: 'dist_bar', vizType: VizType.DistBar,
xTicksLayout: 'auto', xTicksLayout: 'auto',
}} }}
/> />

View File

@@ -17,9 +17,10 @@
* under the License. * under the License.
*/ */
import { VizType } from '@superset-ui/core';
import { LineChartPlugin } from '@superset-ui/legacy-preset-chart-nvd3'; import { LineChartPlugin } from '@superset-ui/legacy-preset-chart-nvd3';
new LineChartPlugin().configure({ key: 'line' }).register(); new LineChartPlugin().configure({ key: VizType.LegacyLine }).register();
export default { export default {
title: 'Legacy Chart Plugins/legacy-preset-chart-nvd3/Line', title: 'Legacy Chart Plugins/legacy-preset-chart-nvd3/Line',

View File

@@ -17,13 +17,13 @@
* under the License. * under the License.
*/ */
import { SuperChart } from '@superset-ui/core'; import { SuperChart, VizType } from '@superset-ui/core';
import dummyDatasource from '../../../../../shared/dummyDatasource'; import dummyDatasource from '../../../../../shared/dummyDatasource';
import data from '../data'; import data from '../data';
export const basic = () => ( export const basic = () => (
<SuperChart <SuperChart
chartType="line" chartType={VizType.LegacyLine}
width={400} width={400}
height={400} height={400}
datasource={dummyDatasource} datasource={dummyDatasource}
@@ -37,7 +37,7 @@ export const basic = () => (
showBrush: 'auto', showBrush: 'auto',
showLegend: true, showLegend: true,
showMarkers: false, showMarkers: false,
vizType: 'line', vizType: VizType.LegacyLine,
xAxisFormat: 'smart_date', xAxisFormat: 'smart_date',
xAxisLabel: '', xAxisLabel: '',
xAxisShowminmax: false, xAxisShowminmax: false,

View File

@@ -17,20 +17,20 @@
* under the License. * under the License.
*/ */
import { SuperChart } from '@superset-ui/core'; import { SuperChart, VizType } from '@superset-ui/core';
import dummyDatasource from '../../../../../shared/dummyDatasource'; import dummyDatasource from '../../../../../shared/dummyDatasource';
import data from '../data'; import data from '../data';
export const logScale = () => ( export const logScale = () => (
<SuperChart <SuperChart
chartType="line" chartType={VizType.LegacyLine}
width={400} width={400}
height={400} height={400}
datasource={dummyDatasource} datasource={dummyDatasource}
queriesData={[{ data }]} queriesData={[{ data }]}
formData={{ formData={{
richTooltip: true, richTooltip: true,
vizType: 'line', vizType: VizType.LegacyLine,
yAxisBounds: [1, 60000], yAxisBounds: [1, 60000],
yAxisFormat: ',d', yAxisFormat: ',d',
yLogScale: true, yLogScale: true,

View File

@@ -17,13 +17,13 @@
* under the License. * under the License.
*/ */
import { SuperChart } from '@superset-ui/core'; import { SuperChart, VizType } from '@superset-ui/core';
import dummyDatasource from '../../../../../shared/dummyDatasource'; import dummyDatasource from '../../../../../shared/dummyDatasource';
import data from '../data'; import data from '../data';
export const markers = () => ( export const markers = () => (
<SuperChart <SuperChart
chartType="line" chartType={VizType.LegacyLine}
width={400} width={400}
height={400} height={400}
datasource={dummyDatasource} datasource={dummyDatasource}
@@ -37,7 +37,7 @@ export const markers = () => (
showBrush: 'auto', showBrush: 'auto',
showLegend: true, showLegend: true,
showMarkers: true, showMarkers: true,
vizType: 'line', vizType: VizType.LegacyLine,
xAxisFormat: 'smart_date', xAxisFormat: 'smart_date',
xAxisLabel: '', xAxisLabel: '',
xAxisShowminmax: false, xAxisShowminmax: false,

View File

@@ -17,7 +17,7 @@
* under the License. * under the License.
*/ */
import { SuperChart } from '@superset-ui/core'; import { SuperChart, VizType } from '@superset-ui/core';
import dummyDatasource from '../../../../../shared/dummyDatasource'; import dummyDatasource from '../../../../../shared/dummyDatasource';
import data from '../data'; import data from '../data';
@@ -26,7 +26,7 @@ export const yAxisBounds = () => (
<h2>yAxisBounds</h2> <h2>yAxisBounds</h2>
<pre>yAxisBounds=undefined</pre> <pre>yAxisBounds=undefined</pre>
<SuperChart <SuperChart
chartType="line" chartType={VizType.LegacyLine}
width={400} width={400}
height={200} height={200}
datasource={dummyDatasource} datasource={dummyDatasource}
@@ -34,12 +34,12 @@ export const yAxisBounds = () => (
formData={{ formData={{
richTooltip: true, richTooltip: true,
showLegend: false, showLegend: false,
vizType: 'line', vizType: VizType.LegacyLine,
}} }}
/> />
<pre>yAxisBounds=[0, 60000]</pre> <pre>yAxisBounds=[0, 60000]</pre>
<SuperChart <SuperChart
chartType="line" chartType={VizType.LegacyLine}
width={400} width={400}
height={200} height={200}
datasource={dummyDatasource} datasource={dummyDatasource}
@@ -47,13 +47,13 @@ export const yAxisBounds = () => (
formData={{ formData={{
richTooltip: true, richTooltip: true,
showLegend: false, showLegend: false,
vizType: 'line', vizType: VizType.LegacyLine,
yAxisBounds: [0, 60000], yAxisBounds: [0, 60000],
}} }}
/> />
<pre>yAxisBounds=[null, 60000]</pre> <pre>yAxisBounds=[null, 60000]</pre>
<SuperChart <SuperChart
chartType="line" chartType={VizType.LegacyLine}
width={400} width={400}
height={200} height={200}
datasource={dummyDatasource} datasource={dummyDatasource}
@@ -61,13 +61,13 @@ export const yAxisBounds = () => (
formData={{ formData={{
richTooltip: true, richTooltip: true,
showLegend: false, showLegend: false,
vizType: 'line', vizType: VizType.LegacyLine,
yAxisBounds: [null, 60000], yAxisBounds: [null, 60000],
}} }}
/> />
<pre>yAxisBounds=[40000, null]</pre> <pre>yAxisBounds=[40000, null]</pre>
<SuperChart <SuperChart
chartType="line" chartType={VizType.LegacyLine}
width={400} width={400}
height={200} height={200}
datasource={dummyDatasource} datasource={dummyDatasource}
@@ -75,13 +75,13 @@ export const yAxisBounds = () => (
formData={{ formData={{
richTooltip: true, richTooltip: true,
showLegend: false, showLegend: false,
vizType: 'line', vizType: VizType.LegacyLine,
yAxisBounds: [40000, null], yAxisBounds: [40000, null],
}} }}
/> />
<pre>yAxisBounds=[40000, null] with Legend</pre> <pre>yAxisBounds=[40000, null] with Legend</pre>
<SuperChart <SuperChart
chartType="line" chartType={VizType.LegacyLine}
width={400} width={400}
height={200} height={200}
datasource={dummyDatasource} datasource={dummyDatasource}
@@ -89,7 +89,7 @@ export const yAxisBounds = () => (
formData={{ formData={{
richTooltip: true, richTooltip: true,
showLegend: true, showLegend: true,
vizType: 'line', vizType: VizType.LegacyLine,
yAxisBounds: [40000, null], yAxisBounds: [40000, null],
}} }}
/> />

Some files were not shown because too many files have changed in this diff Show More