Compare commits

...

423 Commits

Author SHA1 Message Date
Beto Dealmeida
503933756e fix: show only filterable columns on filter dropdown 2025-05-01 17:20:02 -04:00
Beto Dealmeida
60261a5dc6 Fix docstring 2025-05-01 12:33:44 -04:00
Beto Dealmeida
456512c508 Fix test 2025-05-01 11:56:13 -04:00
Beto Dealmeida
c1d9b06649 Remove old method 2025-05-01 09:49:50 -04:00
Beto Dealmeida
7a64a82cd9 fix: improve function detection 2025-04-30 16:58:11 -04:00
JUST.in DO IT
ef14b529b8 fix(echarts): rename time series shifted colnames (#33269) 2025-04-30 14:18:18 -03:00
github-actions[bot]
2a97a6ec1f chore(🦾): bump python importlib-metadata 8.6.1 -> 8.7.0 (#33277)
Co-authored-by: GitHub Action <action@github.com>
2025-04-29 10:03:18 -07:00
github-actions[bot]
fa6548939e chore(🦾): bump python mako 1.3.9 -> 1.3.10 (#33280)
Co-authored-by: GitHub Action <action@github.com>
2025-04-29 10:02:45 -07:00
github-actions[bot]
418c673699 chore(🦾): bump python pyparsing 3.2.2 -> 3.2.3 (#33281)
Co-authored-by: GitHub Action <action@github.com>
2025-04-29 10:01:42 -07:00
github-actions[bot]
13f77a7416 chore(🦾): bump python celery 5.4.0 -> 5.5.2 (#33257)
Co-authored-by: GitHub Action <action@github.com>
2025-04-29 08:28:39 -07:00
github-actions[bot]
303a80a316 chore(🦾): bump python packaging 24.2 -> 25.0 (#33259)
Co-authored-by: GitHub Action <action@github.com>
2025-04-29 08:27:48 -07:00
github-actions[bot]
2392ac6827 chore(🦾): bump python deprecation subpackage(s) (#33260)
Co-authored-by: GitHub Action <action@github.com>
2025-04-29 08:27:25 -07:00
github-actions[bot]
01ce4b987e chore(🦾): bump python python-dotenv 1.0.1 -> 1.1.0 (#33262)
Co-authored-by: GitHub Action <action@github.com>
2025-04-29 08:26:59 -07:00
github-actions[bot]
2f308a85d8 chore(🦾): bump python pandas subpackage(s) (#33263)
Co-authored-by: GitHub Action <action@github.com>
2025-04-29 08:26:40 -07:00
github-actions[bot]
e8d60509a0 chore(🦾): bump python sqlglot 26.11.1 -> 26.16.2 (#33266)
Co-authored-by: GitHub Action <action@github.com>
2025-04-29 08:26:01 -07:00
github-actions[bot]
d6f80eaae7 chore(🦾): bump python gunicorn subpackage(s) (#33265)
Co-authored-by: GitHub Action <action@github.com>
2025-04-29 08:25:23 -07:00
Emad Rad
a5f986fec5 feat: Persian translations (#29580) 2025-04-29 09:01:34 -06:00
Beto Dealmeida
141d0252f2 fix: mask password on DB import (#33267) 2025-04-29 10:27:03 -04:00
Daniel Vaz Gaspar
c029b532d4 fix: LocalProxy is not mapped warning (#33025) 2025-04-28 23:01:26 -06:00
github-actions[bot]
13816443ba chore(🦾): bump python croniter subpackage(s) (#33258)
Co-authored-by: GitHub Action <action@github.com>
2025-04-28 16:52:09 -07:00
Elizabeth Thompson
2c4e22e598 chore: add some utils tests (#33236) 2025-04-28 15:00:32 -07:00
Hamir Mahal
aea776a131 fix: Unexpected input(s) 'depth' CI warnings (#33254) 2025-04-28 11:07:13 -06:00
Evan Rusackas
d2360b533b fix(histogram): remove extra single quotes (#33248) 2025-04-25 16:45:05 -06:00
Vitor Avila
de84a534ac fix(DB update): Gracefully handle querry error during DB update (#33250) 2025-04-25 15:38:59 -03:00
Sam Firke
ac636c73ae fix(heatmap): correctly render int and boolean falsy values on axes (#33238) 2025-04-25 11:25:50 -04:00
Levis Mbote
6a586fe4fd fix(chart): Restore subheader used in bignumber with trendline (#33196) 2025-04-25 09:39:07 -03:00
Vitor Avila
fbd8ae2888 fix(sqllab permalink): Commit SQL Lab permalinks (#33237) 2025-04-24 22:41:15 -03:00
Vitor Avila
7e4fde7a14 fix(standalone): Ensure correct URL param value for standalone mode (#33234) 2025-04-24 16:41:42 -03:00
Evan Rusackas
150b9a0168 feat(maps): Adding Republic of Serbia to country maps (#33208)
Co-authored-by: dykoffi <dykoffi@users.noreply.github.com>
2025-04-23 11:29:35 -06:00
Vitor Avila
f7b7aace38 fix(export): Full CSV/Excel exports respecting SQL_MAX_ROW config (#33214) 2025-04-23 13:13:07 -03:00
Sam Firke
f78c94c988 docs(installation): compare installation methods (#33137) 2025-04-23 11:57:33 -04:00
sha174n
74ff8dc724 docs: Add note on SQL execution security considerations (#33210) 2025-04-23 13:58:33 +01:00
Shao Yu-Lung (Allen)
8aa127eac2 feat(i18n): Frontend add zh_TW Option (#33192)
Co-authored-by: Shao Yu-Lung (Allen) <mis@cendai.com.tw>
2025-04-22 15:36:09 -06:00
Kalai
3729016a0d docs: improve documentation(docs): clarify URL encoding requirement for connection strings (#30047)
Co-authored-by: Evan Rusackas <evan@preset.io>
2025-04-22 15:30:19 -06:00
Elizabeth Thompson
b6628cdfd2 chore: migrate to more db migration utils (#33155) 2025-04-22 11:26:54 -07:00
Evan Rusackas
ae48dba3e1 feat(maps): Adding Ivory Coast / Côte d'Ivoire (#33198)
Co-authored-by: dykoffi <dykoffi@users.noreply.github.com>
2025-04-22 10:04:19 -06:00
dependabot[bot]
09364d182c chore(deps-dev): bump http-proxy-middleware from 2.0.7 to 2.0.9 in /superset-frontend (#33197)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-04-22 09:31:10 -06:00
Geido
99ed968289 fix(Native Filters): Keep default filter values when configuring creatable behavior (#33205) 2025-04-22 16:32:30 +02:00
Geido
8fa3b8d7e3 fix(Native Filters): Keep default filter values when configuring creatable behavior (#33205) 2025-04-22 16:30:36 +02:00
Maxime Alay-Eddine
7530487760 feat(country-map): fix France Regions IDF region code - Fixes #32627 (#32695)
Co-authored-by: Maxime ALAY-EDDINE <maxime@galeax.com>
2025-04-21 20:15:27 -06:00
Maxime Beauchemin
79afc2b545 docs: add a high-level architecture diagram to the docs (#33173) 2025-04-21 11:15:29 -07:00
JUST.in DO IT
8c94f9c435 fix(sqllab): Invalid SQL Error breaks SQL Lab (#33164) 2025-04-18 13:31:54 -07:00
Evan Rusackas
b589d44dfb fix(deckgl): Update Arc to properly adjust line width (#33154) 2025-04-18 10:07:40 -06:00
Elizabeth Thompson
4140261797 fix: subheader should show as subtitle (#33172) 2025-04-18 13:03:20 +08:00
Jacob Amrany
00f1fdb3c4 fix: os.makedirs race condition (#33161) 2025-04-17 15:09:44 -03:00
JUST.in DO IT
172e5dd095 fix(echart): Thrown errors shown after resized (#33143) 2025-04-17 09:49:49 -07:00
Mehmet Salih Yavuz
a53907a646 feat(Select): Select all and Deselect all that works on visible items while searching (#33043) 2025-04-17 18:04:08 +03:00
amaannawab923
be1b8d6751 feat(Native Filters): Exclude Filter Values (#33054)
Co-authored-by: Amaan Nawab <nelsondrew07@gmail.com>
2025-04-17 17:56:26 +03:00
Elizabeth Thompson
26ff734ef9 fix: add folders to import schema (#33142) 2025-04-15 19:49:44 -07:00
dependabot[bot]
0e18246999 chore(deps): bump @babel/runtime from 7.17.2 to 7.27.0 in /superset-frontend/cypress-base (#33102)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-04-15 20:38:56 -06:00
JUST.in DO IT
7333ffd41e fix(echart): Tooltip date format doesn't follow time grain (#33138) 2025-04-15 18:51:53 -07:00
Elizabeth Thompson
7dc5019b9d fix: app icon should not use subdirectory (#33141) 2025-04-15 18:09:06 -07:00
Jillian
93fa39a14f fix(lang): patch FAB's LocaleView to redirect to previous page (#31692) 2025-04-15 09:46:06 -07:00
JUST.in DO IT
342e6f3ab0 fix(dashboard): invalid active tab state (#33106) 2025-04-15 09:14:20 -07:00
Enzo Martellucci
013379eb86 feat(List Users): Migrate List Users FAB to React (#32882) 2025-04-15 17:04:28 +03:00
Michael S. Molina
bc0ffe0d10 fix: Viz migration error handling (#33037) 2025-04-15 08:25:09 -03:00
Elizabeth Thompson
5f62deaa36 chore: use create table util (#33072) 2025-04-14 19:01:11 -07:00
WLCFaro
ff8605b723 feat(lang): update Italian language (#29827) 2025-04-14 16:16:08 -06:00
Felipe Granado
45c77a1976 chore(translations): Update PT-BR language (partial) (#29828)
Co-authored-by: Evan Rusackas <evan@preset.io>
2025-04-14 16:06:54 -06:00
Kamil Gabryjelski
8cb71b8d3b fix(plugin-chart-table): Don't render redundant items in column config when time comparison is enabled (#33126) 2025-04-14 23:08:15 +02:00
Daniel Höxtermann
2233c02720 fix(playwright): allow screenshotting empty dashboards (#33107) 2025-04-14 12:20:39 -07:00
Kamil Gabryjelski
839215148a feat(explore): X-axis sort by specific metric when more than 1 metric is set (#33116) 2025-04-14 20:39:09 +02:00
Maxime Beauchemin
c1eeb63d89 fix: master builds are failing while trying to push report to cypress (#33124) 2025-04-14 10:53:02 -07:00
Elizabeth Thompson
7b9ebbe735 feat(explore): Integrate dataset panel with Folders feature (#33104)
Co-authored-by: Kamil Gabryjelski <kamil.gabryjelski@gmail.com>
2025-04-14 18:40:31 +02:00
Vitor Avila
a5a91d5e48 fix(OAuth2): Update connection should not fail if connection is missing OAuth2 token (#33100) 2025-04-14 11:19:55 -03:00
Michael S. Molina
e1f5c49df7 fix: Allows configuration of Selenium Webdriver binary (#33103) 2025-04-14 08:11:02 -03:00
Kamil Gabryjelski
3c1fc0b722 fix: Broken menu links to datasets and sql lab (#33114) 2025-04-13 21:31:21 +02:00
Maxime Beauchemin
05faf2f352 fix: resolve recent merge collisio (#33110) 2025-04-12 16:33:00 -07:00
Daniel Höxtermann
347c174099 fix(thumbnails): ensure consistent cache_key (#33109) 2025-04-12 12:15:08 -07:00
Erkka Tahvanainen
5656d69c04 fix(dashboard): Generate screenshot via celery (#32193)
Co-authored-by: Erkka Tahvanainen <erkka.tahvanainen@confidently.fi>
2025-04-12 12:14:16 -07:00
Maxime Beauchemin
ac4df8d06b fix: CI file change detector to handle large PRs (#33092) 2025-04-12 12:08:41 -07:00
Beto Dealmeida
bcd136cee1 feat: catalogs for DuckDB (#28751)
Co-authored-by: Maxime Beauchemin <maximebeauchemin@gmail.com>
2025-04-11 12:58:59 -07:00
Beto Dealmeida
7ab8534ef6 feat: dataset folders (backend) (#32520)
Co-authored-by: Maxime Beauchemin <maximebeauchemin@gmail.com>
2025-04-11 11:38:08 -07:00
Geido
014b39290b feat(Native Filters): Configure creatable filter behavior (#33096) 2025-04-11 20:38:02 +03:00
Martyn Gigg
4f97b739b1 fix: Broken Python tests on master after merging prefix branch (#33095) 2025-04-11 08:52:35 -07:00
Beto Dealmeida
d88cba92c0 feat: optimize catalog permission sync (#33000) 2025-04-10 17:38:34 -07:00
Pedro-Gato
5304bed4ed chore: Update INTHEWILD.md (#33079) 2025-04-10 11:52:04 -06:00
Johannes
37194a41ec chore: Added Formbricks to INTHEWILD.md (#33074) 2025-04-10 11:51:39 -06:00
Levis Mbote
d75ff9e784 feat(charts): add subtitle option and metric customization controls (#32975) 2025-04-10 17:24:24 +02:00
Hossein Khalilian
164a07e2be fix(docker): fallback to pip if uv is not available (#33087) 2025-04-10 11:10:26 -04:00
Clay Heaton
44bd200885 fix(docs): Update quickstart.mdx to reflect latest version tag (#33063) 2025-04-10 09:58:00 -04:00
dependabot[bot]
8242692541 chore(deps-dev): bump lerna from 8.1.9 to 8.2.1 in /superset-frontend (#32941)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-04-09 16:11:41 -06:00
Martyn Gigg
09b92e7d08 feat: Allow superset to be deployed under a prefixed URL (#30134)
Co-authored-by: Kamil Gabryjelski <kamil.gabryjelski@gmail.com>
2025-04-09 13:43:44 -07:00
Landry Breuil
31ac3898ad fix(list roles): dont send invalid querystrings (#33060) 2025-04-09 23:25:20 +03:00
Michael S. Molina
c1159c53e3 fix: Adds missing __init__ file to commands/logs (#33059) 2025-04-09 16:39:32 -03:00
Maxime Beauchemin
deb6aedddb feat: add a title prop to the dashboard link in CRUD LIST view (#33046) 2025-04-09 12:02:37 -07:00
JUST.in DO IT
ed0cd5e7b0 fix: improve error type on parse error (#33048) 2025-04-09 09:52:15 -07:00
Maxime Beauchemin
9280b4d2a9 docs: clarify docker-compose-image-tag instructions (#33045) 2025-04-09 08:59:07 -07:00
Ville Brofeldt
3a57857707 chore(helm): bump appVersion to 4.1.2 (#33061) 2025-04-09 08:45:48 -07:00
EmmanuelCbd
6b7394e789 fix(export): charts csv export in dashboards (#31720) 2025-04-08 14:02:13 -07:00
dependabot[bot]
5a8eab3b25 chore(deps): bump estree-util-value-to-estree from 3.1.1 to 3.3.3 in /docs (#33028)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-04-08 11:21:04 -06:00
Ookong
15969fdf94 docs: add WinWin Network(马上赢) to users list (#33018) 2025-04-08 11:18:47 -06:00
JUST.in DO IT
9b15e04bc4 fix(log): Missing failed query log on async queries (#33024) 2025-04-08 07:12:03 -07:00
Asher Manangan
fd947a097d feat(tags): Export and Import Functionality for Superset Dashboards and Charts (#30833)
Co-authored-by: Asher Manangan <amanangan@powercosts.com>
2025-04-07 15:12:22 -04:00
Sameer ali
e1383d3821 refactor(IconButton): Refactor IconButton to use Ant Design 5 Card (#32890)
Co-authored-by: Maxime Beauchemin <maximebeauchemin@gmail.com>
Co-authored-by: Geido <60598000+geido@users.noreply.github.com>
2025-04-07 20:57:32 +03:00
Hugues Verlin
c131205ff1 docs: Update documentation about publishing a dashboard (#32999) 2025-04-07 10:19:39 -07:00
Levis Mbote
b6df88a134 fix: fix bug where dashboard did not enter fullscreen mode. (#32839) 2025-04-07 18:20:49 +03:00
Hugo Lavernhe
629b137bb0 fix(dashboard): chart fullscreen issue when filter pane is collapsed (#28428) 2025-04-04 17:12:14 -06:00
Vitor Avila
db959a6463 chore(Databricks): Display older Databricks driver as legacy (#33001) 2025-04-04 15:09:15 -03:00
Kamil Gabryjelski
4041150660 feat: Add getDataMask function to embedded SDK (#32997) 2025-04-03 21:10:01 +02:00
Hex Café
bcb43327b1 fix: show_filters URL parameter is not working (#29422)
Co-authored-by: Evan Rusackas <evan@preset.io>
Co-authored-by: Vitor Avila <vitor.avila@preset.io>
2025-04-03 15:59:11 -03:00
Trent Lavoie
63c8bbf3eb fix(frontend): add missing antd-5 icon to import (#32990)
Co-authored-by: Trent Lavoie <lavtrent@amazon.com>
2025-04-03 11:18:39 -06:00
Michael S. Molina
24b1666273 fix: Bar Chart (legacy) migration to keep labels layout (#32965) 2025-04-03 08:16:55 -03:00
Mohamed Halat
86b795cd36 feat(embedding-sdk): emit data-mask events through embedded sdk to iframe parent (#31331) 2025-04-03 12:37:52 +02:00
Maxime Beauchemin
bc0bf94680 chore: bump marshmallow-sqlalchemy to 1.4.0 (#32922) 2025-04-02 09:09:08 -07:00
SBIN2010
f5d64176f6 fix: fixed Add Metrics to Tree Chart (#29158) (#30679) 2025-04-02 10:04:36 -06:00
Enzo Martellucci
4f0020d0df feat(List Roles): Migrate FAB view to React (#32432)
Co-authored-by: Diego Pucci <diegopucci.me@gmail.com>
2025-04-02 14:06:17 +03:00
Maxime Beauchemin
c83eda9551 feat: add latest partition support for BigQuery (#30760) 2025-04-01 17:13:09 -07:00
JUST.in DO IT
a36e636a58 fix(pivot-table): Revert "fix(Pivot Table): Fix column width to respect currency config (#31414)" (#32968) 2025-04-01 19:05:36 -03:00
dependabot[bot]
f5d3627468 chore(deps-dev): bump eslint-config-prettier from 10.0.2 to 10.1.1 in /docs (#32952)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-04-01 10:28:50 -07:00
dependabot[bot]
8eeed49547 chore(deps): bump antd from 5.24.2 to 5.24.5 in /docs (#32951)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-04-01 10:27:27 -07:00
dependabot[bot]
00933a27af chore(deps): bump swagger-ui-react from 5.20.0 to 5.20.2 in /docs (#32950)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-04-01 10:26:55 -07:00
dependabot[bot]
2bc33beec4 chore(deps-dev): bump @babel/compat-data from 7.26.5 to 7.26.8 in /superset-frontend (#32939)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-04-01 10:24:51 -07:00
dependabot[bot]
e1c1de1b94 chore(deps-dev): bump css-minimizer-webpack-plugin from 7.0.0 to 7.0.2 in /superset-frontend (#32937)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-04-01 10:24:13 -07:00
notHuman9504
26743dfcee fix: Clicking in the body of a Markdown component does not put it into edit mode (#32384) 2025-04-01 11:23:48 -06:00
dependabot[bot]
8b0bda3bad chore(deps): update @types/react-redux requirement from ^7.1.10 to ^7.1.34 in /superset-frontend/plugins/plugin-chart-echarts (#32927)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-04-01 10:22:12 -07:00
dependabot[bot]
a8a6254ea2 chore(deps-dev): bump @typescript-eslint/parser from 8.19.0 to 8.29.0 in /superset-websocket (#32925)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-04-01 10:21:01 -07:00
dependabot[bot]
be4bc3dec5 chore(deps-dev): bump ts-jest from 29.2.5 to 29.3.1 in /superset-websocket (#32924)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-04-01 10:19:54 -07:00
Joe Li
6e02d19b0d fix: make packages PEP 625 compliant (#32866)
Co-authored-by: Michael S. Molina <michael.s.molina@gmail.com>
2025-03-31 21:34:24 -06:00
Usiel Riedl
662f0fa8f4 chore(reports): add task for slack channels warm-up (#32585) 2025-03-31 14:30:21 -03:00
JUST.in DO IT
56bf17f879 fix(sqllab): Invalid display of table column keys (#32763) 2025-03-31 14:26:31 -03:00
Kamil Gabryjelski
b92909d621 feat: Enable passing a permalink to cache_dashboard_screenshot endpoint (#32900) 2025-03-31 10:40:36 +02:00
mkramer5454
8f35a3ec8c feat(plugins): Make comparison values on BigNumberPeriodOverPeriod toggleable (#28605) 2025-03-30 22:05:53 -06:00
Vladislav Korenkov
a4a092794a feat(chart controls): Add "%d.%m.%Y" time format option (#32814) 2025-03-30 22:02:58 -06:00
bmaquet
174750c9dd refactor(jinja macro): Update current_user_roles() macro to fetch roles from existing get_user_roles() method (#32888) 2025-03-28 20:50:53 -07:00
Levis Mbote
f2c0686346 feat: Add Aggregation Method for Big Number with Trendline (#32767) 2025-03-29 05:34:23 +02:00
github-actions[bot]
c2afae51cb chore(🦾): bump python grpcio 1.68.0 -> 1.71.0 (#32901)
Co-authored-by: GitHub Action <action@github.com>
2025-03-28 17:36:41 -07:00
Geido
6e1d1ad18b refactor(Icons): Add typing support and improve structure (#32880) 2025-03-28 17:16:31 -07:00
Vitor Avila
ab22bb1878 fix(Jinja): Emit time grain to table charts even if they don't have a temporal column (#32871) 2025-03-28 13:48:49 -03:00
Đỗ Trọng Hải
e0ed652ed8 fix(backend/async_events): allow user to configure username for Redis authentication in GLOBAL_ASYNC_QUERIES_CACHE_BACKEND (#32372)
Signed-off-by: hainenber <dotronghai96@gmail.com>
Co-authored-by: Ville Brofeldt <33317356+villebro@users.noreply.github.com>
2025-03-27 19:39:05 -07:00
Luke Hart
103fedaf92 fix: use role_model from security manager (#32873) 2025-03-27 10:01:14 -07:00
Joe Li
50fe7483ae chore: update migrations to use utils (#32852) 2025-03-26 10:29:04 -07:00
SBIN2010
37f626f5e2 fix(ColorPickerControl): change color picker control width (#32851) 2025-03-26 10:28:07 -07:00
Michael S. Molina
b1693f625a chore: Removes unused file (#32860) 2025-03-26 13:35:14 -03:00
Vitor Avila
f0dc1e7527 fix(table-chart): Do not show comparison columns config if time_compare is set to [] (#32863) 2025-03-26 13:28:22 -03:00
Christiaan Baartse
6c7f089ebb fix(translation): Dutch translations for Current datetime filter (#31869) 2025-03-26 22:13:50 +07:00
Beto Dealmeida
68a81c3989 fix: update dataset/query catalog on DB changes (#32829) 2025-03-26 08:56:02 -04:00
Vitor Avila
5222f940cc fix(echarts): Sort series by name using naturalCompare (#32850) 2025-03-26 08:17:43 -03:00
Radovenchyk
45ea11c1b6 docs: added a link to badge releases (#32822)
Co-authored-by: Maxime Beauchemin <maximebeauchemin@gmail.com>
2025-03-25 12:49:48 -07:00
Michael S. Molina
b624919d2f fix: Bump FAB to 4.6.1 (#32848) 2025-03-25 15:29:19 -03:00
Joe Li
b5cb5f4525 chore: updating files for release 4.1.2 (#32831) 2025-03-25 10:26:37 -07:00
JUST.in DO IT
4a70065e5f fix(log): store navigation path to get correct logging path (#32795) 2025-03-25 10:18:55 -07:00
Fardin Mustaque
7d77dc4fd2 fix: Time Comparison Feature Reverts Metric Labels to Metric Keys in Table Charts (#32665)
Co-authored-by: Fardin Mustaque <fardinmustaque@Fardins-Mac-mini.local>
2025-03-25 14:22:15 +02:00
Chris
6f69c84d10 fix: key error in frontend on disallowed GSheets (#32792) 2025-03-24 15:19:59 -07:00
bmaquet
6b96b37c38 feat: Add current_user_roles() Jinja macro (#32770) 2025-03-24 18:39:07 -03:00
github-actions[bot]
b7435f84f0 chore(🦾): bump python humanize 4.12.1 -> 4.12.2 (#32826)
Co-authored-by: GitHub Action <action@github.com>
2025-03-24 13:44:15 -07:00
github-actions[bot]
7bc349c3c3 chore(🦾): bump python pyparsing 3.2.1 -> 3.2.2 (#32827)
Co-authored-by: GitHub Action <action@github.com>
2025-03-24 13:43:52 -07:00
github-actions[bot]
fd4e45aafc chore(🦾): bump python shillelagh subpackage(s) (#32828)
Co-authored-by: GitHub Action <action@github.com>
2025-03-24 13:43:23 -07:00
github-actions[bot]
b339d7ad20 chore(🦾): bump python click-option-group 0.5.6 -> 0.5.7 (#32825)
Co-authored-by: GitHub Action <action@github.com>
2025-03-24 13:43:01 -07:00
Vitor Avila
cedd186c21 feat(Jinja): to_datetime filter (#32781) 2025-03-24 16:55:37 -03:00
SBIN2010
c6c9114b40 fix: CSV/Excel upload form change column dates description (#32797) 2025-03-24 09:48:39 -07:00
Đỗ Trọng Hải
f4a05a5ffd fix(docs): scrollable table of content right bar in Superset docs (#32801)
Signed-off-by: hainenber <dotronghai96@gmail.com>
2025-03-22 10:52:10 -06:00
Đỗ Trọng Hải
a82f916a71 fix(sec): resolve CVE-2025-29907 and CVE-2025-25977 by pinning jspdf to v3 (#32802)
Signed-off-by: hainenber <dotronghai96@gmail.com>
2025-03-22 10:50:13 -06:00
Đỗ Trọng Hải
ff0529c932 fix(model/helper): represent RLS filter clause in proper textual SQL string (#32406)
Signed-off-by: hainenber <dotronghai96@gmail.com>
2025-03-21 14:36:03 -06:00
CharlesNkdl
c0f83a7467 fix(excel export): big number truncation handling (#32739) 2025-03-21 09:39:59 -07:00
V9 Developer
9bb3a5782d fix(config): correct slack image url in talisman (#32778) 2025-03-21 09:32:51 -07:00
Ruslan
5ec710efc6 fix(css): typos in styles (#28350) 2025-03-20 16:32:02 -06:00
Vladislav Korenkov
5866f3ec83 fix(import): Missing catalog field in saved query schema (#32775)
Co-authored-by: Vladislav Koren'kov <korenkov.vv@dns-shop.ru>
2025-03-20 16:33:51 -04:00
Antonio Rivero
01801e3c36 fix(sqllab): Pass query_id as kwarg so backoff can see it (#32774) 2025-03-20 18:46:31 +01:00
Vladislav Korenkov
d319543377 fix(chart control): Change default of "Y Axis Title Margin" (#32720) 2025-03-20 10:14:09 -03:00
Alexandru Soare
5392bafe28 feat(FormModal): Specialized Modal component for forms (#32721) 2025-03-20 14:28:25 +02:00
Elizabeth Thompson
89ce7ba0b0 fix: do not add calculated columns when syncing (#32761) 2025-03-19 17:33:28 -07:00
Antonio Rivero
376a1f49d3 fix(migrations): fix foreign keys to match FAB 4.6.0 tables (#32759) 2025-03-19 22:47:26 +01:00
Vitor Avila
6042ea8f28 feat(embedded): Force a specific referrerPolicy for the iframe request (#32735) 2025-03-19 15:44:07 -03:00
Giampaolo Capelli
78efb62781 fix: Changing language doesn't affect echarts charts (#31751)
Co-authored-by: Giampaolo Capelli <giampaolo.capelli@docaposte.fr>
2025-03-19 11:38:53 -07:00
github-actions[bot]
e9d5079986 chore(🦾): bump python flask-appbuilder subpackage(s) (#32744)
Co-authored-by: GitHub Action <action@github.com>
Co-authored-by: Maxime Beauchemin <maximebeauchemin@gmail.com>
2025-03-19 11:24:24 -07:00
Radovenchyk
c6e0abbe13 chore: replaced the workflow badge link (#32749) 2025-03-19 11:42:47 -06:00
github-actions[bot]
4f166a03f5 chore(🦾): bump python slack-sdk 3.34.0 -> 3.35.0 (#32742)
Co-authored-by: GitHub Action <action@github.com>
2025-03-18 20:46:48 -06:00
sowo
29b62f7c0a fix(contextmenu): uncaught TypeError (#28203) 2025-03-18 20:42:38 -06:00
github-actions[bot]
09ee3e2a1d chore(🦾): bump python shillelagh subpackage(s) (#31255)
Co-authored-by: GitHub Action <action@github.com>
2025-03-18 18:52:34 -07:00
github-actions[bot]
121e424a7f chore(🦾): bump python celery subpackage(s) (#32743)
Co-authored-by: GitHub Action <action@github.com>
2025-03-18 18:52:19 -07:00
github-actions[bot]
66c1a6a875 chore(🦾): bump python sqlglot 26.1.3 -> 26.11.1 (#32745)
Co-authored-by: GitHub Action <action@github.com>
2025-03-18 18:52:00 -07:00
CharlesNkdl
b26c373f4d chore(lang): update and fix french translations (#32711) 2025-03-18 17:31:23 -07:00
github-actions[bot]
4dd318ca68 chore(🦾): bump python flask-appbuilder subpackage(s) (#31251)
Co-authored-by: GitHub Action <action@github.com>
Co-authored-by: Maxime Beauchemin <maximebeauchemin@gmail.com>
2025-03-18 16:25:15 -07:00
Enzo Martellucci
ce6d5f5551 refactor(Icons): Replaces custom icons with Ant Design 5 icons (#32112)
Replace custom icons with Ant Design 5 icons to standardize the icon
2025-03-18 22:22:41 +01:00
Daniel Höxtermann
9e3052968b fix: ensure datasource permission in explore (#32679) 2025-03-18 17:15:10 -04:00
github-actions[bot]
3f1ef2a283 chore(🦾): bump python greenlet (#31247)
Co-authored-by: GitHub Action <action@github.com>
2025-03-18 13:01:31 -07:00
Paul Rhodes
bc3e19d0a2 fix(import): Ensure import exceptions are logged (#32410) 2025-03-18 12:35:57 -07:00
Vitor Avila
850801f510 feat(where_in): Support returning None if filter_values return None (#32731) 2025-03-18 13:18:51 -06:00
Evan Rusackas
710af87faf Revert "Revert "fix(asf): moving notifications to the top of .asf.yaml"" (#32732) 2025-03-18 13:18:36 -06:00
Evan Rusackas
6612343f33 Revert "fix(asf): moving notifications to the top of .asf.yaml" (#32730) 2025-03-18 12:22:53 -06:00
Evan Rusackas
c399295a4e fix(docs): Another CSP hole for run.app to allow Kapa AI (#32728) 2025-03-18 13:42:07 -04:00
Evan Rusackas
e34644d983 fix(docs): poking ANOTHER hole in the CSP for the AI bot. (#32727) 2025-03-18 13:33:00 -04:00
Evan Rusackas
cc0097c87a fix(asf): moving notifications to the top of .asf.yaml (#32726) 2025-03-18 13:19:47 -04:00
Evan Rusackas
d71e655a4b fix(docs): allow recaptcha in CSP (#32724) 2025-03-18 13:13:51 -04:00
Beto Dealmeida
99e69c32ee fix: coerce datetime conversion errors (#32683) 2025-03-18 13:09:23 -04:00
PyKen
a2c164a77d chore(helm): bump postgresql image tag in helm values (#32686) 2025-03-18 09:49:32 -07:00
Evan Rusackas
78d2a584b7 chore(asf): Another .asf.yaml touch-up. (#32714) 2025-03-18 09:27:27 -06:00
Evan Rusackas
f0c8c12c1a chore(docs): touching up AI styling/text (#32689) 2025-03-17 21:25:48 -06:00
Evan Rusackas
34cd741e9b fix(docs): Fixes scrolling issue with AI widget on docs site (#32713) 2025-03-17 21:25:35 -06:00
Evan Rusackas
1684ddc7e6 chore(asf): trying to fix .asf.yaml again to re-enable Discussions (#32712) 2025-03-17 21:01:54 -06:00
Vitor Avila
e35145c816 feat(file uploads): List only allowed schemas in the file uploads dialog (#32702) 2025-03-17 23:36:16 -03:00
Evan Rusackas
4adf44a43c chore(asf): Removing notifications from .asf.yaml - they still don't work :( (#32710) 2025-03-17 17:12:48 -06:00
JUST.in DO IT
cd5a94305c fix(logging): missing path in event data (#32708) 2025-03-17 14:46:34 -07:00
Evan Rusackas
b4602aaf28 chore(asf): fixing(?) .asf.yaml (#32709) 2025-03-17 15:43:45 -06:00
Evan Rusackas
3e69ba1384 fix(repo): re-enable GitHub Discussions (#32703) 2025-03-17 14:34:50 -06:00
Beto Dealmeida
41bf215367 fix: boolean filters in Explore (#32701) 2025-03-17 14:38:00 -04:00
Evan Rusackas
06deaebe19 fix(docs): poking a CSP hole for Kapa AI widget (#32704) 2025-03-17 11:33:15 -06:00
Sam Firke
6a13ab8920 fix(spreadsheet uploads): make file extension comparisons case-insensitive (#32696) 2025-03-17 11:31:11 -06:00
Đỗ Trọng Hải
f1a222d356 fix(cosmetics): allow toast message to be toggled off when modal is opened (#32691)
Signed-off-by: hainenber <dotronghai96@gmail.com>
2025-03-17 11:28:04 -06:00
Đỗ Trọng Hải
a87bedf31a docs(api): correct attribute name instead of table for GET table_metadata in openapi.json (#32690)
Signed-off-by: hainenber <dotronghai96@gmail.com>
2025-03-17 11:26:47 -06:00
Đỗ Trọng Hải
890b6079b9 build(dev-deps): bump prettier to v3.5.3 and follow-up refactor (#32688)
Signed-off-by: hainenber <dotronghai96@gmail.com>
2025-03-17 11:24:24 -06:00
Rytis Ulys
9c62456487 chore: add Oxylabs to INTHEWILD.md (#32697) 2025-03-17 10:18:52 -07:00
Geido
414cdbf83a fix(no-restricted-imports): Fix overrides and include no-fa-icons-usage (#32571) 2025-03-17 18:52:52 +02:00
Michael S. Molina
df06bdf33b fix: Signature of Celery pruner jobs (#32699) 2025-03-17 13:52:31 -03:00
JUST.in DO IT
449f51aed5 fix(log): Update recent_activity by event name (#32681) 2025-03-17 09:18:47 -07:00
Paul Rhodes
c9e2c7037e feat: Implement sparse import for ImportAssetsCommand (#32670) 2025-03-17 08:44:15 -06:00
Đỗ Trọng Hải
a49a15f990 chore(docs): remove customized "Edit this page on GitHub" button (#32407)
Signed-off-by: hainenber <dotronghai96@gmail.com>
2025-03-15 21:27:10 +07:00
Evan Rusackas
eb39ddbfe3 feat(docs): Adding Kapa.ai integration (#32682) 2025-03-15 12:34:42 +07:00
dependabot[bot]
974d36d35e chore(deps): bump jinja2 from 3.1.5 to 3.1.6 in /superset/translations (#32580)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-15 12:17:18 +07:00
Beto Dealmeida
b64e3254fc feat: DB migration for dataset folders (#32680) 2025-03-14 17:16:02 -04:00
Maxime Beauchemin
9907db9e1a feat: add a note to install cors-related dependency when using ENABLE_CORS (#32662) 2025-03-14 10:33:31 -07:00
Michael S. Molina
b4dd64aa24 fix: Update RELEASING/README.md (#32678) 2025-03-14 10:28:18 -07:00
Pedro Martin-Steenstrup
6e049225f9 docs: add Hometogo to users list (#32668) 2025-03-14 10:27:28 -07:00
Beto Dealmeida
831369a44b fix(gsheets): update params from encrypted extra (#32661) 2025-03-14 12:00:53 -04:00
Evan Rusackas
7c9c30db1d chore(examples): Touching up Vehicle Sales a bit (#32623) 2025-03-14 09:31:02 -06:00
Vitor Avila
0c6d868483 fix(import): Import a DB connection with expanded rows enabled (#32657) 2025-03-14 12:02:39 -03:00
Andrey Yakir
777760b096 fix(dashboard): Ensure dashboardId is included in form_data for embedded mode (#32646) 2025-03-14 10:36:42 -04:00
Vitor Avila
e8ad096173 fix(sync perms): Avoid UnboundLocalError during perm sync for DBs that don't support catalogs (#32658) 2025-03-13 21:07:49 -03:00
Dolph Mathews
2f6f5c6778 fix: Upgrade node base image to Debian 12 bookworm (#32652) 2025-03-13 12:56:24 -07:00
JUST.in DO IT
832e028b39 fix(welcome): perf on distinct recent activities (#32608) 2025-03-13 09:44:48 -07:00
Beto Dealmeida
d92af9c95c chore: simplify user impersonation (#32485) 2025-03-13 12:43:05 -04:00
Ville Brofeldt
12435159db chore: add unique option to index migration utils (#32641) 2025-03-13 08:55:24 -07:00
Beto Dealmeida
8695239372 feat: OAuth2StoreTokenCommand (#32546) 2025-03-13 09:45:24 -04:00
SkinnyPigeon
29b4c40e43 feat(reports): removing index column (#32366) 2025-03-12 16:27:07 -07:00
github-actions[bot]
53471072f4 chore(🦾): bump python paramiko 3.5.0 -> 3.5.1 (#32575)
Co-authored-by: GitHub Action <action@github.com>
2025-03-12 14:52:24 -07:00
github-actions[bot]
bf902b2240 chore(🦾): bump python croniter 5.0.1 -> 6.0.0 (#32639)
Co-authored-by: GitHub Action <action@github.com>
2025-03-12 14:44:30 -07:00
github-actions[bot]
4b4912ba99 chore(🦾): bump python flask-session subpackage(s) (#32637)
Co-authored-by: GitHub Action <action@github.com>
2025-03-12 14:43:21 -07:00
github-actions[bot]
fa890ecb23 chore(🦾): bump python celery subpackage(s) (#32638)
Co-authored-by: GitHub Action <action@github.com>
2025-03-12 14:43:05 -07:00
github-actions[bot]
67af8bd730 chore(🦾): bump python importlib-metadata 8.5.0 -> 8.6.1 (#32636)
Co-authored-by: GitHub Action <action@github.com>
2025-03-12 14:42:17 -07:00
github-actions[bot]
f5eca4fe0b chore(🦾): bump python simplejson 3.19.3 -> 3.20.1 (#32635)
Co-authored-by: GitHub Action <action@github.com>
2025-03-12 14:41:59 -07:00
github-actions[bot]
057423ed92 chore(🦾): bump python flask-caching 2.3.0 -> 2.3.1 (#32634)
Co-authored-by: GitHub Action <action@github.com>
2025-03-12 14:41:36 -07:00
github-actions[bot]
7dbe608d27 chore(🦾): bump python sshtunnel subpackage(s) (#32629)
Co-authored-by: GitHub Action <action@github.com>
2025-03-12 14:40:32 -07:00
Maxime Beauchemin
d8d4b75a11 chore: fix precommit for eslint (#32596) 2025-03-12 11:26:56 -07:00
Maxime Beauchemin
664047f3fb chore: fix precommit for eslint (#32596) 2025-03-12 11:26:36 -07:00
github-actions[bot]
1e20b048d3 chore(🦾): bump python sqlparse 0.5.2 -> 0.5.3 (#32631)
Co-authored-by: GitHub Action <action@github.com>
2025-03-12 10:43:23 -07:00
github-actions[bot]
6c1806df74 chore(🦾): bump python greenlet 3.0.3 -> 3.1.1 (#32628)
Co-authored-by: GitHub Action <action@github.com>
2025-03-12 10:42:46 -07:00
github-actions[bot]
d97d991b5f chore(🦾): bump python humanize 4.11.0 -> 4.12.1 (#32632)
Co-authored-by: GitHub Action <action@github.com>
2025-03-12 10:40:37 -07:00
github-actions[bot]
90e18e37d0 chore(🦾): bump python nh3 0.2.19 -> 0.2.21 (#32630)
Co-authored-by: GitHub Action <action@github.com>
2025-03-12 10:40:03 -07:00
github-actions[bot]
c5a2bc5484 chore(🦾): bump python flask-migrate subpackage(s) (#32578)
Co-authored-by: GitHub Action <action@github.com>
Co-authored-by: Maxime Beauchemin <maximebeauchemin@gmail.com>
2025-03-12 10:19:05 -07:00
github-actions[bot]
2ecc7e4f56 chore(🦾): bump python pyparsing 3.2.0 -> 3.2.1 (#32577)
Co-authored-by: GitHub Action <action@github.com>
Co-authored-by: Maxime Beauchemin <maximebeauchemin@gmail.com>
2025-03-12 10:18:25 -07:00
JUST.in DO IT
9f79c5ab4d fix(sqllab): Grid header menu (#32381) 2025-03-12 09:48:33 -07:00
JUST.in DO IT
e7721a8c4d fix(dashboard): Support bigint value in native filters (#32549) 2025-03-12 09:47:41 -07:00
dependabot[bot]
c8f5089f7a chore(deps-dev): bump axios from 1.7.7 to 1.8.2 in /superset-embedded-sdk (#32581)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-12 21:34:04 +07:00
dependabot[bot]
a0ea905a7a chore(deps): bump axios from 1.7.8 to 1.8.2 in /docs (#32582)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-11 21:37:16 -06:00
dependabot[bot]
b8fd1a30ee chore(deps-dev): bump axios from 1.7.9 to 1.8.2 in /superset-frontend (#32583)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-11 21:36:52 -06:00
dependabot[bot]
ff9ae54ae9 chore(deps): bump @babel/runtime-corejs3 from 7.26.9 to 7.26.10 in /docs (#32603)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-11 21:27:43 -06:00
dependabot[bot]
a16de15015 chore(deps): bump @babel/helpers from 7.24.5 to 7.26.10 in /docs (#32598)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-11 21:26:45 -06:00
dependabot[bot]
079e40144e chore(deps): bump @babel/runtime from 7.26.9 to 7.26.10 in /docs (#32604)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-11 21:24:44 -06:00
Evan Rusackas
a3f3a35c20 docs(analytics): actually USING Matomo to track page views/changes (#32607) 2025-03-11 17:23:57 -06:00
Maxime Beauchemin
4fdeab8dad docs: fix typo in ephemeral envs docs (#32605) 2025-03-11 16:17:30 -07:00
Maxime Beauchemin
9ea58381f4 docs: add information about ephemeral environments (#32600) 2025-03-11 15:26:54 -07:00
RealGreenDragon
85d51f5c9a chore: bump postgresql from 15 to 16 (#32597) 2025-03-11 15:26:26 -07:00
dependabot[bot]
3b1d763421 chore(deps): bump @babel/helpers from 7.17.2 to 7.26.10 in /superset-frontend/cypress-base (#32602)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-11 15:25:33 -07:00
github-actions[bot]
91ab123860 chore(🦾): bump python slack-sdk 3.33.4 -> 3.34.0 (#32576)
Co-authored-by: GitHub Action <action@github.com>
Co-authored-by: Maxime Beauchemin <maximebeauchemin@gmail.com>
2025-03-11 15:15:52 -07:00
Vitor Avila
8e021b0c82 fix(Slack V2): Specify the filename for the Slack upload method (#32599) 2025-03-11 18:42:17 -03:00
github-actions[bot]
7aa89db8d0 chore(🦾): bump python pandas subpackage(s) (#32579)
Co-authored-by: GitHub Action <action@github.com>
Co-authored-by: Maxime Beauchemin <maximebeauchemin@gmail.com>
2025-03-11 12:36:26 -07:00
Sam Firke
d3ba2755e8 feat(charts): add two new boxplot parameter sets (#32170) 2025-03-11 14:58:02 -04:00
Maxime Beauchemin
0b0e0e9ce8 chore(🦾): bump python cryptography 43.0.3 -> 44.0.2 (#32573) 2025-03-11 09:08:38 -07:00
Đỗ Trọng Hải
979f890cd5 fix(comp/async-ace-editor): proper import of ace-builds (#32553) 2025-03-11 19:13:08 +07:00
Michael S. Molina
89b6d7fb68 fix: Log table retention policy (#32572) 2025-03-10 18:47:57 -03:00
EmmanuelCbd
644882faff chore(docs): Add Flowbird to users list (#32561) 2025-03-10 10:21:23 -06:00
Mehmet Salih Yavuz
edfcbed24f refactor(input): Remove leftover direct usage of Ant Design input (#32545) 2025-03-10 11:22:47 +01:00
Ville Brofeldt
f45ab70080 chore: bump node to v20.18.3 (#32550) 2025-03-09 20:48:06 +07:00
Elizabeth Thompson
33aa9030bf fix: add DateOffset to json serializer (#32532) 2025-03-07 16:15:06 -08:00
JUST.in DO IT
4c3aae7583 fix(sqllab): Allow clear on schema and catalog (#32515) 2025-03-07 13:28:05 -08:00
Jake Hoban
c5dd52bcc9 docs: add Canonical to INTHEWILD.md (#32547) 2025-03-07 11:20:34 -08:00
Geido
eae7cf81b0 chore(Ant Design): Remove unnecessary exports from version 4 (#32544) 2025-03-07 18:56:12 +01:00
Antonio Rivero
20e5df501e fix(migrations): Handle comparator None in old time comparison migration (#32538) 2025-03-07 16:30:24 +01:00
Ville Brofeldt
68e8d9858c fix: always extract query source from request (#32525) 2025-03-06 14:17:21 -08:00
Elizabeth Thompson
99238dccbb fix: keep calculated columns when datasource is updated (#32523) 2025-03-06 13:45:52 -08:00
Beto Dealmeida
626736bdd3 chore: add logging to index error (#31770) 2025-03-06 14:47:40 -05:00
Elizabeth Thompson
c2de749d0e fix: Show response message as default error (#32507) 2025-03-06 10:58:22 -08:00
Vitor Avila
9ad9ea67cf chore: Caching the Slack channels list (#32529)
Co-authored-by: Elizabeth Thompson <eschutho@gmail.com>
2025-03-06 14:59:12 -03:00
Ville Brofeldt
82595df6f9 chore(ci): use npm/yarn lock files where possible (#32527) 2025-03-06 06:47:48 -08:00
dependabot[bot]
281d1a8ec4 chore(deps-dev): bump eslint-config-prettier from 8.10.0 to 10.0.2 in /docs (#32448)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-06 20:44:22 +07:00
Vitor Avila
d2e0e2b79c fix(Slack): Fix Slack recipients migration to V2 (#32336) 2025-03-06 08:52:15 -03:00
Usiel Riedl
05409d51da feat(slack): adds rate limit error handler for Slack client (#32510) 2025-03-06 08:01:12 -03:00
Usiel Riedl
e98194cdd3 fix(beat): prune_query celery task args fix (#32511) 2025-03-05 19:25:53 -08:00
Ramachandran A G
317532752c feat(KustoKQL): Update KQL alchemy version and update timegrain expressions (#32509) 2025-03-05 17:53:47 -08:00
Ville Brofeldt
c90e45a373 feat: make user agent customizable (#32506) 2025-03-05 16:33:24 -08:00
Paul Rhodes
8decc9e45f feat(api): Added uuid to list api calls (#32414) 2025-03-05 16:28:29 -08:00
dependabot[bot]
8053833e1f chore(deps-dev): bump globals from 15.9.0 to 16.0.0 in /superset-websocket (#32437)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-05 14:37:28 -07:00
dependabot[bot]
07221d8859 chore(deps): bump markdown-to-jsx from 7.7.3 to 7.7.4 in /superset-frontend (#32456)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-05 14:36:42 -07:00
Ville Brofeldt
c1abe1ec44 chore(ci): show more failed pre-commit context (#32517) 2025-03-05 11:39:07 -08:00
Kamil Gabryjelski
b3dfd4930a fix(explore): Glitch in a tooltip with metric's name (#32499) 2025-03-05 16:55:00 +01:00
Daniel Vaz Gaspar
fc844d3dfd fix: dashboard, chart and dataset import validation (#32500) 2025-03-05 08:47:49 +00:00
dependabot[bot]
d8686c2d12 chore(deps-dev): update @babel/types requirement from ^7.26.3 to ^7.26.9 in /superset-frontend/plugins/plugin-chart-pivot-table (#32470)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Joe Li <joe@preset.io>
2025-03-04 13:52:35 -08:00
dependabot[bot]
90388885db chore(deps-dev): bump @typescript-eslint/eslint-plugin from 8.19.0 to 8.26.0 in /superset-websocket (#32503)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-04 13:51:42 -08:00
Maxime Beauchemin
33370eaa5c chore: enable dependabot using uv for auto-bumping python packages (#32501) 2025-03-04 12:23:12 -08:00
Emad Rad
2b53b1800e chore: various markdown warnings resolved (#30657)
Co-authored-by: Evan Rusackas <evan@preset.io>
2025-03-04 12:45:49 -07:00
dependabot[bot]
807dcddc28 chore(deps): bump @deck.gl/react from 9.1.0 to 9.1.4 in /superset-frontend (#32453)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-04 12:31:34 -07:00
dependabot[bot]
a45ce1e8d1 chore(deps-dev): bump @babel/types from 7.26.7 to 7.26.9 in /superset-frontend (#32460)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-04 12:29:50 -07:00
dependabot[bot]
3d5128735b chore(deps): bump @rjsf/utils from 5.24.1 to 5.24.3 in /superset-frontend (#32461)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-04 12:29:37 -07:00
dependabot[bot]
6173a6c329 chore(deps): bump chrono-node from 2.7.7 to 2.7.8 in /superset-frontend (#32462)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-04 12:29:21 -07:00
Beto Dealmeida
813e79fa9f fix: skip DB filter when doing OAuth2 (#32486) 2025-03-04 13:33:53 -05:00
Evan Rusackas
c0e92b1639 feat(flag flip): Setting Horizontal Filters to True by default. (#32317)
Co-authored-by: Michael S. Molina <70410625+michael-s-molina@users.noreply.github.com>
2025-03-04 11:26:14 -07:00
dependabot[bot]
ef08ccbaa2 chore(deps-dev): bump @types/jsonwebtoken from 9.0.6 to 9.0.9 in /superset-websocket (#32440)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-04 11:25:59 -07:00
dependabot[bot]
93d759c689 chore(deps): bump swagger-ui-react from 5.19.0 to 5.20.0 in /docs (#32454)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-04 11:25:35 -07:00
dependabot[bot]
0d24ce0ef9 chore(deps-dev): bump @types/lodash from 4.17.14 to 4.17.16 in /superset-frontend (#32476)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-04 11:25:21 -07:00
dependabot[bot]
a4902a3685 chore(deps): bump antd from 5.24.1 to 5.24.2 in /docs (#32447)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-04 10:23:43 -08:00
dependabot[bot]
16b08e333d chore(deps-dev): bump webpack from 5.97.1 to 5.98.0 in /docs (#32449)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-04 10:05:11 -07:00
Daniel Vaz Gaspar
15cf06699a feat: security, user group support (#32121) 2025-03-04 09:54:12 +00:00
dependabot[bot]
fe33661821 chore(deps-dev): bump typescript from 5.1.6 to 5.8.2 in /docs (#32452)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-03 21:35:23 -07:00
Maxime Beauchemin
2b98f326e8 feat: cache the frontend's bootstrap data (#31996) 2025-03-03 17:02:38 -08:00
Giampaolo Capelli
d7e0ee6ceb fix(docker compose): replace port 8088 with 9000 (#32481) 2025-03-03 14:55:48 -08:00
dependabot[bot]
ce367d6427 chore(deps-dev): bump @docusaurus/tsconfig from 3.6.3 to 3.7.0 in /docs (#32087)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Evan Rusackas <evan@rusackas.com>
2025-03-03 14:50:58 -08:00
Evan Rusackas
6c3886aad0 fix(tooltip): displaying <a> tags correctly (#32488) 2025-03-03 15:04:05 -07:00
Beto Dealmeida
5af4e61aff feat: improve GSheets OAuth2 (#32048) 2025-03-03 12:55:54 -05:00
Ville Brofeldt
5766c36372 fix(plugin-chart-echarts): remove erroneous upper bound value (#32473) 2025-03-01 22:07:12 -08:00
Đỗ Trọng Hải
61b72f0c0b fix(com/grid-comp/markdown): pin remark-gfm to v3 to allow inline code block by backticks in Markdown (#32420)
Signed-off-by: hainenber <dotronghai96@gmail.com>
2025-03-01 15:55:31 -07:00
Vitor Avila
d79f7b28c2 feat: Update database permissions in async mode (#32231) 2025-02-28 21:25:47 -03:00
Le Xich Long
84b52b2323 fix(clickhouse): get_parameters_from_uri failing when secure is true (#32423) 2025-02-28 16:24:14 -05:00
Tejaswa Jain
eacb234872 docs: add SingleStore to the users list (#32421) 2025-02-28 12:30:42 -07:00
Damian Pendrak
6317a91541 fix(viz): update nesting logic to handle multiple dimensions in PartitionViz (#32290) 2025-02-28 16:57:11 +01:00
Beto Dealmeida
128c45e2d3 fix: prevent nested transactions (#32401) 2025-02-28 09:59:03 -05:00
Alexandru Soare
4d6b4f8343 feat(filter): adding inputs to Numerical Range Filter (#31726)
Co-authored-by: Diego Pucci <diegopucci.me@gmail.com>
Co-authored-by: Mehmet Salih Yavuz <salih.yavuz@proton.me>
2025-02-28 15:42:24 +01:00
Đỗ Trọng Hải
789049d386 docs(config): fill in commonly connection string for Oracle, Presto and SQL Server databases (#32385) 2025-02-28 08:32:29 +07:00
Đỗ Trọng Hải
cf7ce31054 build(deps): bump major versions for math-expression-evaluator and fetch-mock + clean up obsolete dev/override packages (#32322) 2025-02-28 08:06:19 +07:00
EmmanuelCbd
2c851b7580 feat(i18n): Add polish to default language (#31506) 2025-02-27 19:02:32 +01:00
Beto Dealmeida
f4105e9ed2 feat: default ports for SSH tunnel (#32403) 2025-02-27 10:59:48 -05:00
Dev10-34
74733ae310 feat: Adding the option and feature to enable borders with color, opacity and width control on heatmaps along with white borders on emphasis (#32358)
Co-authored-by: Michael S. Molina <70410625+michael-s-molina@users.noreply.github.com>
2025-02-27 11:30:25 -03:00
omahs
1d823a0be5 chore(docs): fix typos (#32393) 2025-02-27 13:57:36 +07:00
ekansh-shipmnts
00429558c2 docs: add shipmnts to users list (#32396)
Co-authored-by: Ekansh Gupta <ekanshgupta@Ekanshs-MacBook-Pro.local>
2025-02-26 10:32:39 -08:00
Joe Li
dae6acf028 chore(docs): update instructions for pypi distribution (#32380) 2025-02-25 12:35:03 -08:00
Yuri
822d72c57d fix(pinot): revert join and subquery flags (#32382) 2025-02-25 13:29:25 -07:00
Daniel Vaz Gaspar
c02a0a00f4 fix: bump FAB to 4.5.4 (#32325) 2025-02-25 19:06:41 +00:00
Sam Firke
a08c18febe docs(intro): broaden link to installation options (#32379) 2025-02-25 13:37:30 -05:00
Kamil Gabryjelski
479a5d2f72 chore: Upgrade AG Grid to use tree shaking (#32334) 2025-02-25 19:00:53 +01:00
Evan Rusackas
793fbac405 chore(cleanup): removing accidentally committed package/lock files. (#32365) 2025-02-25 11:33:42 -05:00
Daniel Vaz Gaspar
167dacc2e4 fix: ephemeral CI fetching task ENI (#32377) 2025-02-25 15:51:49 +00:00
Beto Dealmeida
00883c395c feat: allow importing encrypted_extra (#32339) 2025-02-24 19:29:04 -05:00
Beto Dealmeida
83071d0e5f fix: ensure metric_macro expands templates (#32344) 2025-02-24 18:08:50 -05:00
Enzo Martellucci
b0dac046e6 refactor(DrillDetailTableControls): Upgrade DrillDetailTableControls component to Ant Design 5 (#32313) 2025-02-24 23:19:40 +02:00
Dino
8dcae810d4 fix: clickhouse-connect engine SSH parameter (#32348) 2025-02-24 12:58:49 -08:00
Evan Rusackas
b43e2ac8f4 chore(tests): converting enzyme to RTL, part 3 (#32363) 2025-02-24 12:08:52 -07:00
Thomas Shallenberger
bc02f05613 feat(number-format): adds memory data transfer rates in binary and decimal format (#32264) 2025-02-24 11:03:56 -08:00
Evan Rusackas
90651dfe3e fix(dev/ci): pre-commit fixes galore (#32352) 2025-02-24 11:26:45 -07:00
alveifbklsiu259
c583eec4c7 fix(eslint-hook): ensure eslint hook receives arguments (#32333) 2025-02-24 08:57:48 -08:00
Vedant Prajapati
0f07d78e01 fix(docker): Configure nginx for consistent port mapping and hot reloading (#32362) 2025-02-24 08:53:51 -08:00
Beto Dealmeida
22fe985cfc fix(firebolt): allow backslach escape for single quotes (#32350) 2025-02-24 11:12:34 -05:00
Enzo Martellucci
ace8a3adb7 refactor(DatabaseSelector): Changes the imported types from antd-4 to antd-5 (#32314) 2025-02-23 19:56:36 +01:00
Enzo Martellucci
4c4b5e8c64 fix(SSHTunnelForm): make the password tooltip visible (#32356) 2025-02-23 17:27:31 +01:00
Levis Mbote
2c37ddb2f6 fix(roles): Add SqlLabPermalinkRestApi as default sqlab roles. (#32284) 2025-02-21 15:42:35 -07:00
amineBouilzmin
b06a9edfd6 chore(docs): Fix typo in security.mdx (#32349) 2025-02-21 14:29:00 -07:00
alveifbklsiu259
5140250421 ci(type-checking): run type-checking-frontend hook sequentially (#32323) 2025-02-21 12:26:52 -08:00
Đỗ Trọng Hải
88cf2d5c39 fix(fe/dashboard-list): display modifier info for Last modified data (#32035)
Signed-off-by: hainenber <dotronghai96@gmail.com>
2025-02-21 12:14:08 -07:00
Elizabeth Thompson
422a07b382 fix: revert "fix: remove sort values on stacked totals (#31333)" (#32337) 2025-02-21 10:35:06 -08:00
Đỗ Trọng Hải
f820f9a976 chore(build): remove Lodash filter and noop usage in superset-frontend (#32341)
Signed-off-by: hainenber <dotronghai96@gmail.com>
2025-02-21 19:29:07 +07:00
Guen Prawiroatmodjo
c27aee2b14 chore(duckdb): Bump duckdb-engine, duckdb versions (#32302) 2025-02-20 16:29:55 -07:00
Dmitry Kochnev
7ce1a3445c fix: oauth2 trino (#31993) 2025-02-20 16:04:11 -07:00
Kamil Gabryjelski
42a3c523ae fix: Download as PDF fails due to cache error (#32332) 2025-02-20 10:59:39 -08:00
dependabot[bot]
bb46dd93be chore(deps): bump swagger-ui-react from 5.18.2 to 5.19.0 in /docs (#32330)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-02-20 10:38:10 -08:00
dependabot[bot]
b207f0616d chore(deps): bump antd from 5.22.7 to 5.24.1 in /docs (#32329)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-02-20 10:36:52 -08:00
dependabot[bot]
9dcf788f47 chore(deps): bump @docsearch/react from 3.8.2 to 3.9.0 in /docs (#32327)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-02-20 10:35:38 -08:00
Evan Rusackas
6900bc1855 chore(readme): updating video on Readme page. (#32319) 2025-02-20 10:25:27 -08:00
Ghazi Triki
b09bfd7889 chore(docs): Add RIADVICE to companies using Superset (#32326) 2025-02-20 10:56:50 -05:00
Đỗ Trọng Hải
2d8892958e docs: various enhancements across /docs workspace (#31921)
Signed-off-by: hainenber <dotronghai96@gmail.com>
2025-02-20 09:18:49 -05:00
dependabot[bot]
f9a43921c5 chore(deps): bump core-js from 3.39.0 to 3.40.0 in /superset-frontend/packages/superset-ui-demo (#32066)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-02-19 17:26:16 -07:00
dependabot[bot]
e74efd3072 chore(deps-dev): bump @docusaurus/module-type-aliases from 3.6.3 to 3.7.0 in /docs (#32088)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-02-19 17:12:59 -07:00
Steven Liu
d5a5bd46d2 fix: keep the tab order (#30888)
Co-authored-by: Steven Liu <steven.l@covergenius.com>
2025-02-19 16:26:36 -07:00
alveifbklsiu259
e422e3c620 feat(type-checking): Add type-checking pre-commit hooks (#32261) 2025-02-19 15:12:17 -08:00
Evan Rusackas
b269d920a9 chore(code owners): adding @mistercrunch to cypress/e2e code owners (#32316) 2025-02-19 13:47:18 -07:00
Evan Rusackas
de2bce6f47 chore(tests): Trying to kill enzyme, part 2 (more RTL!) (#32226)
Co-authored-by: JUST.in DO IT <justin.park@airbnb.com>
2025-02-19 11:33:38 -07:00
dependabot[bot]
e061116032 chore(deps-dev): bump typescript from 5.7.2 to 5.7.3 in /docs (#32090)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-02-19 20:17:08 +07:00
dependabot[bot]
878bcbd8c7 chore(deps-dev): bump @babel/preset-env from 7.26.0 to 7.26.7 in /superset-frontend (#32103)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-02-19 18:29:39 +07:00
Đỗ Trọng Hải
093135ff30 chore(be/deps): add comments for un-greppable Python dependencies (#32259) 2025-02-17 19:09:35 +07:00
Đỗ Trọng Hải
734f8ed4c3 fix(sec): resolve Dependabot security alerts (#32274) 2025-02-17 08:02:09 +07:00
Đỗ Trọng Hải
dcc9628f31 fix(viz/table): selected column not shown in Conditional Formatting popover (#32272)
Signed-off-by: hainenber <dotronghai96@gmail.com>
2025-02-16 16:39:00 -07:00
dependabot[bot]
321d105c42 chore(deps): bump dompurify from 3.2.3 to 3.2.4 in /superset-frontend (#32270)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>

Stability: no breaking change as described 3.2.4's GH release

Functionality: fixes Moderate GHSA-vhxf-7vqr-mrjg (CVE-2025-26791)
2025-02-16 14:42:22 +07:00
Đỗ Trọng Hải
460aec7bc9 build(fe/dev-deps): remove unused esbuild dev deps (#32243)
Signed-off-by: hainenber <dotronghai96@gmail.com>
2025-02-16 12:07:06 +07:00
Michael S. Molina
ffe9244458 fix: Decimal values for Histogram bins (#32253) 2025-02-14 09:04:23 -03:00
dependabot[bot]
fa09d8187a chore(deps): bump cryptography from 43.0.3 to 44.0.1 (#32236)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-02-14 11:11:07 +00:00
Erkka Tahvanainen
9da30956c0 fix(Datasource): handle undefined datasource_type in fetchSyncedColumns (#32218)
Co-authored-by: Erkka Tahvanainen <erkka.tahvanainen@confidently.fi>
2025-02-13 17:31:17 -08:00
Mehmet Salih Yavuz
9c7835a244 docs(api): Improve api documentation for dashboard endpoints(filter_state, permalink, embedded) (#32142) 2025-02-13 19:02:45 +02:00
gpchandran
ad057324b7 fix: upgrade to 3.11.11-slim-bookworm to address critical vulnerabilities (#32240) 2025-02-13 08:32:48 -03:00
Beto Dealmeida
2c583d1584 feat: recursive metric definitions (#32228) 2025-02-12 22:00:44 -05:00
Elizabeth Thompson
15fbb195e9 fix: remove sort values on stacked totals (#31333) 2025-02-12 16:56:53 -08:00
Maxime Beauchemin
5867b87680 docs: adding notes about using uv instead of raw pip (#32239) 2025-02-12 15:37:54 -08:00
Fardin Mustaque
52563d3eea fix: Update 'Last modified' time when modifying RLS rules (#32227)
Co-authored-by: Fardin Mustaque <fardinmustaque@Fardins-Mac-mini.local>
2025-02-12 12:22:15 -08:00
Đỗ Trọng Hải
21348c418a chore(backend): replace insecure shortid usage for native filter migration with native uuid Python implementation (#32235)
Signed-off-by: hainenber <dotronghai96@gmail.com>
2025-02-12 13:15:51 -03:00
Levis Mbote
af3589fe91 fix(Scope): Correct issue where filters appear out of scope when sort is unchecked. (#32115) 2025-02-12 14:32:20 +01:00
JUST.in DO IT
937d40cdde fix(sqllab): close the table tab (#32224) 2025-02-11 13:13:47 -08:00
Evan Rusackas
319a860f23 chore: Working toward killing enzyme and cleaning up test noise. (#32207) 2025-02-11 12:14:36 -07:00
Maxime Beauchemin
d3b854a833 fix: set Rich tooltip -> 'Show percentage' to false by default (#32212) 2025-02-11 10:58:49 -08:00
Enzo Martellucci
650fa5ccfb fix(SaveDatasetModal): repairs field alignment in the SaveDatasetModal component (#32222)
Co-authored-by: Geido <60598000+geido@users.noreply.github.com>
2025-02-11 19:16:54 +01:00
Đỗ Trọng Hải
db70c7912c chore(fe): migrate 4 Enzyme-based tests to RTL (#31634)
Signed-off-by: hainenber <dotronghai96@gmail.com>
2025-02-11 09:01:59 -07:00
xavier-GitHub76
3160607aaf docs: Permissions 'can this form get on UserInfoEditView' and 'can this form get on UserInfoEditView' are not associated with Aplha and Gamma by default (#32180)
Co-authored-by: Xavier RICHARD <xavier.richard@developpement-durable.gouv.fr>
2025-02-11 14:52:13 +00:00
Beto Dealmeida
eec54affc3 fix: hidrate datasetsStatus (#32211) 2025-02-11 09:50:45 -05:00
Daniel Vaz Gaspar
31d6f5a639 chore(ci): fix ephemeral env null issue number (v2) (#32221) 2025-02-11 14:32:01 +00:00
Daniel Vaz Gaspar
60424c4ccd chore(ci): fix ephemeral env null issue number (#32220) 2025-02-11 14:06:43 +00:00
Mehmet Salih Yavuz
60bbd72028 feat(dropdown accessibility): Wrap dropdown triggers with buttons for accessibility (#32189) 2025-02-11 13:09:35 +02:00
Đỗ Trọng Hải
a78968c68e chore(ci): consolidate Node version reference in CI to associated .nvmrc (#32192)
Signed-off-by: hainenber <dotronghai96@gmail.com>
2025-02-10 11:52:36 -08:00
Maxime Beauchemin
1c3ec21e0f chore: migrating easy-to-migrate AntD vanilla components (#32010) 2025-02-10 10:46:28 -08:00
Maxime Beauchemin
8d1fb9c82d fix: false negative on critical security related to eslint-plugin-translation-vars (#32018) 2025-02-10 10:45:54 -08:00
Jonathan Morales Vélez
f01493277f docs(docker-compose): remove extra backticks (#32206) 2025-02-10 15:26:34 -03:00
Damian Pendrak
0f6bd5ea83 fix: handlebars html and css templates reset on dataset update (#32195) 2025-02-10 16:40:55 +01:00
Alexandru Soare
0030f46d2d refactor(Popover): Upgrade Popover to Antd5 (#31973)
Co-authored-by: Geido <60598000+geido@users.noreply.github.com>
2025-02-10 16:38:17 +02:00
Alex Duan
06f8f8e608 fix: TDengine move tdengine.png to databases/ subfolder (#32176) 2025-02-07 10:39:54 -08:00
Michael S. Molina
a144464506 fix: Adds an entry to UPDATING.md about DISABLE_LEGACY_DATASOURCE_EDITOR (#32185) 2025-02-07 15:30:18 -03:00
Levis Mbote
2770bc0865 fix(sqllab): correct URL format for SQL Lab permalinks (#32154) 2025-02-07 19:57:07 +02:00
Mehmet Salih Yavuz
bcc61bd933 refactor(Dropdown): Migrate Dropdown to Ant Design 5 (#31972) 2025-02-07 18:38:04 +01:00
asritha
38c46fcafd docs(typo): PostgresQL corrected to PostgreSQL (#32188) 2025-02-07 12:29:22 -05:00
Jack
f3e7c64de6 fix(virtual dataset sync): Sync virtual dataset columns when changing the SQL query (#30903)
Co-authored-by: Kamil Gabryjelski <kamil.gabryjelski@gmail.com>
2025-02-07 18:16:44 +01:00
Levis Mbote
f9f8c5d07a fix(sqllab): correct URL format for SQL Lab permalinks (#32154) 2025-02-07 18:53:29 +02:00
Adrian Koszałka
c5f4a7f302 feat: Add parseJson Handlebars Helper to Support Processing Nested JSON Data (#31998)
Co-authored-by: AdrianKoszalka <adrian.koszalka@techminers.com>
2025-02-06 15:44:49 -07:00
Elizabeth Thompson
389aae270b chore: add query context data tests (#32157) 2025-02-06 14:33:38 -08:00
dependabot[bot]
e97eb71a52 chore(deps): bump less from 4.2.1 to 4.2.2 in /docs (#32085)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-02-06 14:22:27 -08:00
EmmanuelCbd
5a8488af36 fix(docker): Docker python-translation-build (#32163) 2025-02-06 12:49:36 -08:00
Adrian Koszałka
205cff3a94 feat: Add parseJson Handlebars Helper to Support Processing Nested JSON Data (#31998)
Co-authored-by: AdrianKoszalka <adrian.koszalka@techminers.com>
2025-02-06 12:48:28 -08:00
Alex Duan
649a0dec6c feat: add TDengine.py driver to db_engine (#32041)
Co-authored-by: Ville Brofeldt <33317356+villebro@users.noreply.github.com>
2025-02-06 12:45:55 -08:00
Beto Dealmeida
e8990f4a36 fix: ScreenshotCachePayload serialization (#32156) 2025-02-06 15:13:40 -05:00
Chris Chinchilla
acf91e1f60 docs: fix typo in docker compose (#32171) 2025-02-06 13:01:23 -05:00
Antonio Rivero
6ed9dae2f7 fix(migrations): Handle no params in time comparison migration (#32155) 2025-02-05 23:00:22 +01:00
Đỗ Trọng Hải
ea5879bf2b fix(releasing): fix borked SVN-based image building process (#32151)
Signed-off-by: hainenber <dotronghai96@gmail.com>
2025-02-05 13:06:21 -03:00
Beto Dealmeida
c7c3b1b0e9 fix: move oauth2 capture to get_sqla_engine (#32137) 2025-02-04 18:24:05 -05:00
Michael S. Molina
c64018d421 fix: Local tarball Docker container is missing zstd dependency (#32135) 2025-02-04 16:02:01 -03:00
Michael S. Molina
53d944d013 fix: No virtual environment when running Docker translation compiler (#32133) 2025-02-04 10:43:25 -03:00
Benjami
9aa8b09505 docs: incorrect psycopg2 package in k8s install instructions (#31999) 2025-02-04 02:33:43 -08:00
Elizabeth Thompson
8984f88a3e chore(timeseries charts): adjust legend width by padding (#32030) 2025-02-03 14:11:09 -08:00
Michael S. Molina
386aa93e24 fix: Histogram examples config (#32122) 2025-02-03 13:53:34 -03:00
Daniel Vaz Gaspar
0cd0fcdecb fix(ci): ephemeral env, handle different label, create comment (#32040) 2025-02-03 16:13:22 +00:00
Mehmet Salih Yavuz
cde2d49c95 fix(datepicker): Full width datepicker on filter value select (#32064) 2025-02-03 17:51:05 +02:00
1410 changed files with 95423 additions and 43109 deletions

View File

@@ -17,6 +17,12 @@
# https://cwiki.apache.org/confluence/display/INFRA/.asf.yaml+features+for+git+repositories
---
notifications:
commits: commits@superset.apache.org
issues: notifications@superset.apache.org
pullrequests: notifications@superset.apache.org
discussions: notifications@superset.apache.org
github:
del_branch_on_merge: true
description: "Apache Superset is a Data Visualization and Data Exploration Platform"
@@ -48,6 +54,8 @@ github:
projects: true
# Enable wiki for documentation
wiki: true
# Enable discussions
discussions: true
enabled_merge_buttons:
squash: true

2
.github/CODEOWNERS vendored
View File

@@ -16,7 +16,7 @@
# Notify E2E test maintainers of changes
/superset-frontend/cypress-base/ @sadpandajoe @geido @eschutho @rusackas @betodealmeida
/superset-frontend/cypress-base/ @sadpandajoe @geido @eschutho @rusackas @betodealmeida @mistercrunch
# Notify PMC members of changes to GitHub Actions

View File

@@ -41,7 +41,7 @@ body:
label: Superset version
options:
- master / latest-dev
- "4.1.1"
- "4.1.2"
- "4.0.2"
validations:
required: true

View File

@@ -1,4 +1,5 @@
version: 2
enable-beta-ecosystems: true
updates:
- package-ecosystem: "github-actions"
@@ -21,9 +22,14 @@ updates:
versioning-strategy: increase
# - package-ecosystem: "pip"
# NOTE: as dependabot isn't compatible with our usage of `uv pip compile` we're using
# `supersetbot` instead
# NOTE: `uv` support is in beta, more details here:
# https://github.com/dependabot/dependabot-core/pull/10040#issuecomment-2696978430
- package-ecosystem: "uv"
directory: "requirements/"
open-pull-requests-limit: 10
labels:
- uv
- dependabot
- package-ecosystem: "npm"
directory: ".github/actions"
@@ -322,6 +328,10 @@ updates:
- package-ecosystem: "npm"
directory: "/superset-frontend/packages/superset-ui-core/"
ignore:
# not until React >= 18.0.0
- dependency-name: "react-markdown"
- dependency-name: "remark-gfm"
schedule:
interval: "monthly"
labels:

5
.github/labeler.yml vendored
View File

@@ -127,6 +127,11 @@
- any-glob-to-any-file:
- 'superset/translations/es/**'
"i18n:persian":
- changed-files:
- any-glob-to-any-file:
- 'superset/translations/fa/**'
############################################
# Sub-projects and monorepo packages
############################################

View File

@@ -145,6 +145,7 @@ cypress-install() {
cypress-run-all() {
local USE_DASHBOARD=$1
local APP_ROOT=$2
cd "$GITHUB_WORKSPACE/superset-frontend/cypress-base"
# Start Flask and run it in background
@@ -152,7 +153,12 @@ cypress-run-all() {
# so errors can print to stderr.
local flasklog="${HOME}/flask.log"
local port=8081
export CYPRESS_BASE_URL="http://localhost:${port}"
CYPRESS_BASE_URL="http://localhost:${port}"
if [ -n "$APP_ROOT" ]; then
export SUPERSET_APP_ROOT=$APP_ROOT
CYPRESS_BASE_URL=${CYPRESS_BASE_URL}${APP_ROOT}
fi
export CYPRESS_BASE_URL
nohup flask run --no-debugger -p $port >"$flasklog" 2>&1 </dev/null &
local flaskProcessId=$!

View File

@@ -17,13 +17,12 @@ jobs:
check-python-deps:
runs-on: ubuntu-22.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
depth: 1
fetch-depth: 1
- name: Setup Python
if: steps.check.outputs.python

View File

@@ -31,7 +31,7 @@ jobs:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: "20"
node-version-file: './superset-embedded-sdk/.nvmrc'
registry-url: 'https://registry.npmjs.org'
- run: npm ci
- run: npm run ci:release

View File

@@ -21,7 +21,7 @@ jobs:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: "20"
node-version-file: './superset-embedded-sdk/.nvmrc'
registry-url: 'https://registry.npmjs.org'
- run: npm ci
- run: npm test

View File

@@ -50,17 +50,45 @@ jobs:
echo "result=up" >> $GITHUB_OUTPUT
else
echo "result=noop" >> $GITHUB_OUTPUT
exit 1
fi
- name: Get event SHA
id: get-sha
run: |
echo "sha=${{ github.event.pull_request.head.sha }}" >> $GITHUB_OUTPUT
if: steps.eval-label.outputs.result == 'up'
uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
let prSha;
// If event is workflow_dispatch, use the issue_number from inputs
if (context.eventName === "workflow_dispatch") {
const prNumber = "${{ github.event.inputs.issue_number }}";
if (!prNumber) {
console.log("No PR number found.");
return;
}
// Fetch PR details using the provided issue_number
const { data: pr } = await github.rest.pulls.get({
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: prNumber
});
prSha = pr.head.sha;
} else {
// If it's not workflow_dispatch, use the PR head sha from the event
prSha = context.payload.pull_request.head.sha;
}
console.log(`PR SHA: ${prSha}`);
core.setOutput("sha", prSha);
- name: Looking for feature flags in PR description
uses: actions/github-script@v7
id: eval-feature-flags
if: steps.eval-label.outputs.result == 'up'
with:
script: |
const description = context.payload.pull_request
@@ -81,6 +109,7 @@ jobs:
- name: Reply with confirmation comment
uses: actions/github-script@v7
if: steps.eval-label.outputs.result == 'up'
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
@@ -97,7 +126,11 @@ jobs:
throw new Error("Issue number is not available.");
}
const body = `@${user} Processing your ephemeral environment request [here](${workflowUrl}). Action: **${action}**.`;
const body = `@${user} Processing your ephemeral environment request [here](${workflowUrl}).` +
` Action: **${action}**.` +
` More information on [how to use or configure ephemeral environments]` +
`(https://superset.apache.org/docs/contributing/howtos/#github-ephemeral-environments)`;
await github.rest.issues.createComment({
owner: context.repo.owner,
@@ -161,8 +194,9 @@ jobs:
ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }}
ECR_REPOSITORY: superset-ci
IMAGE_TAG: apache/superset:${{ needs.ephemeral-env-label.outputs.sha }}-ci
PR_NUMBER: ${{ github.event.inputs.issue_number || github.event.pull_request.number }}
run: |
docker tag $IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-ci
docker tag $IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:pr-$PR_NUMBER-ci
docker push -a $ECR_REGISTRY/$ECR_REPOSITORY
ephemeral-env-up:
@@ -193,11 +227,13 @@ jobs:
- name: Check target image exists in ECR
id: check-image
continue-on-error: true
env:
PR_NUMBER: ${{ github.event.inputs.issue_number || github.event.pull_request.number }}
run: |
aws ecr describe-images \
--registry-id $(echo "${{ steps.login-ecr.outputs.registry }}" | grep -Eo "^[0-9]+") \
--repository-name superset-ci \
--image-ids imageTag=pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-ci
--image-ids imageTag=pr-$PR_NUMBER-ci
- name: Fail on missing container image
if: steps.check-image.outcome == 'failure'
@@ -207,7 +243,7 @@ jobs:
script: |
const errMsg = '@${{ github.event.comment.user.login }} Container image not yet published for this PR. Please try again when build is complete.';
github.rest.issues.createComment({
issue_number: ${{ github.event.inputs.issue_number || github.event.issue.number }},
issue_number: ${{ github.event.inputs.issue_number || github.event.pull_request.number }},
owner: context.repo.owner,
repo: context.repo.repo,
body: errMsg
@@ -220,7 +256,7 @@ jobs:
with:
task-definition: .github/workflows/ecs-task-definition.json
container-name: superset-ci
image: ${{ steps.login-ecr.outputs.registry }}/superset-ci:pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-ci
image: ${{ steps.login-ecr.outputs.registry }}/superset-ci:pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-ci
- name: Update env vars in the Amazon ECS task definition
run: |
@@ -229,29 +265,30 @@ jobs:
- name: Describe ECS service
id: describe-services
run: |
echo "active=$(aws ecs describe-services --cluster superset-ci --services pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-service | jq '.services[] | select(.status == "ACTIVE") | any')" >> $GITHUB_OUTPUT
echo "active=$(aws ecs describe-services --cluster superset-ci --services pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-service | jq '.services[] | select(.status == "ACTIVE") | any')" >> $GITHUB_OUTPUT
- name: Create ECS service
id: create-service
if: steps.describe-services.outputs.active != 'true'
env:
ECR_SUBNETS: subnet-0e15a5034b4121710,subnet-0e8efef4a72224974
ECR_SECURITY_GROUP: sg-092ff3a6ae0574d91
PR_NUMBER: ${{ github.event.inputs.issue_number || github.event.pull_request.number }}
run: |
aws ecs create-service \
--cluster superset-ci \
--service-name pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-service \
--service-name pr-$PR_NUMBER-service \
--task-definition superset-ci \
--launch-type FARGATE \
--desired-count 1 \
--platform-version LATEST \
--network-configuration "awsvpcConfiguration={subnets=[$ECR_SUBNETS],securityGroups=[$ECR_SECURITY_GROUP],assignPublicIp=ENABLED}" \
--tags key=pr,value=${{ github.event.inputs.issue_number || github.event.issue.number }} key=github_user,value=${{ github.actor }}
--tags key=pr,value=$PR_NUMBER key=github_user,value=${{ github.actor }}
- name: Deploy Amazon ECS task definition
id: deploy-task
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
with:
task-definition: ${{ steps.task-def.outputs.task-definition }}
service: pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-service
service: pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-service
cluster: superset-ci
wait-for-service-stability: true
wait-for-minutes: 10
@@ -259,11 +296,11 @@ jobs:
- name: List tasks
id: list-tasks
run: |
echo "task=$(aws ecs list-tasks --cluster superset-ci --service-name pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-service | jq '.taskArns | first')" >> $GITHUB_OUTPUT
echo "task=$(aws ecs list-tasks --cluster superset-ci --service-name pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-service | jq '.taskArns | first')" >> $GITHUB_OUTPUT
- name: Get network interface
id: get-eni
run: |
echo "eni=$(aws ecs describe-tasks --cluster superset-ci --tasks ${{ steps.list-tasks.outputs.task }} | jq '.tasks[0].attachments[0].details | map(select(.name==\"networkInterfaceId\"))[0].value')" >> $GITHUB_OUTPUT
echo "eni=$(aws ecs describe-tasks --cluster superset-ci --tasks ${{ steps.list-tasks.outputs.task }} | jq '.tasks[0].attachments[0].details | map(select(.name=="networkInterfaceId"))[0].value')" >> $GITHUB_OUTPUT
- name: Get public IP
id: get-ip
run: |
@@ -274,20 +311,22 @@ jobs:
with:
github-token: ${{github.token}}
script: |
const issue_number = context.payload.inputs?.issue_number || context.issue.number;
github.rest.issues.createComment({
issue_number: ${{ github.event.inputs.issue_number || github.event.issue.number }},
issue_number: issue_number,
owner: context.repo.owner,
repo: context.repo.repo,
body: '@${{ github.actor }} Ephemeral environment spinning up at http://${{ steps.get-ip.outputs.ip }}:8080. Credentials are `admin`/`admin`. Please allow several minutes for bootstrapping and startup.'
})
body: `@${{ github.actor }} Ephemeral environment spinning up at http://${{ steps.get-ip.outputs.ip }}:8080. Credentials are 'admin'/'admin'. Please allow several minutes for bootstrapping and startup.`
});
- name: Comment (failure)
if: ${{ failure() }}
uses: actions/github-script@v7
with:
github-token: ${{github.token}}
script: |
const issue_number = context.payload.inputs?.issue_number || context.issue.number;
github.rest.issues.createComment({
issue_number: ${{ github.event.inputs.issue_number || github.event.issue.number }},
issue_number: issue_number,
owner: context.repo.owner,
repo: context.repo.repo,
body: '@${{ github.event.inputs.user_login || github.event.comment.user.login }} Ephemeral environment creation failed. Please check the Actions logs for details.'

View File

@@ -38,14 +38,39 @@ jobs:
echo "HOMEBREW_CELLAR=$HOMEBREW_CELLAR" >>"${GITHUB_ENV}"
echo "HOMEBREW_REPOSITORY=$HOMEBREW_REPOSITORY" >>"${GITHUB_ENV}"
brew install norwoodj/tap/helm-docs
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '20'
- name: Install Frontend Dependencies
run: |
cd superset-frontend
npm ci
- name: Install Docs Dependencies
run: |
cd docs
yarn install --immutable
- name: pre-commit
run: |
set +e # Don't exit immediately on failure
# Skip eslint as it requires `npm ci` and is executed in another job
export SKIP=eslint
export SKIP=eslint-frontend,type-checking-frontend
pre-commit run --all-files
if [ $? -ne 0 ] || ! git diff --quiet --exit-code; then
echo "❌ Pre-commit check failed."
PRE_COMMIT_EXIT_CODE=$?
git diff --quiet --exit-code
GIT_DIFF_EXIT_CODE=$?
if [ "${PRE_COMMIT_EXIT_CODE}" -ne 0 ] || [ "${GIT_DIFF_EXIT_CODE}" -ne 0 ]; then
if [ "${PRE_COMMIT_EXIT_CODE}" -ne 0 ]; then
echo "❌ Pre-commit check failed (exit code: ${EXIT_CODE})."
else
echo "❌ Git working directory is dirty."
echo "📌 This likely means that pre-commit made changes that were not committed."
echo "🔍 Modified files:"
git diff --name-only
fi
echo "🚒 To prevent/address this CI issue, please install/use pre-commit locally."
echo "📖 More details here: https://superset.apache.org/docs/contributing/development#git-hooks"
exit 1

View File

@@ -24,13 +24,7 @@ jobs:
needs: config
if: needs.config.outputs.has-secrets
name: Bump version and publish package(s)
runs-on: ubuntu-24.04
strategy:
matrix:
node-version: [20]
steps:
- uses: actions/checkout@v4
with:
@@ -46,11 +40,11 @@ jobs:
git fetch --prune --unshallow
git tag -d `git tag | grep -E '^trigger-'`
- name: Use Node.js ${{ matrix.node-version }}
- name: Install Node.js
if: env.HAS_TAGS
uses: actions/setup-node@v4
with:
node-version: ${{ matrix.node-version }}
node-version-file: './superset-frontend/.nvmrc'
- name: Cache npm
if: env.HAS_TAGS

View File

@@ -26,7 +26,6 @@ jobs:
fail-fast: false
matrix:
browser: ["chrome"]
node: [20]
env:
SUPERSET_ENV: development
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
@@ -40,7 +39,7 @@ jobs:
APPLITOOLS_BATCH_NAME: Superset Cypress
services:
postgres:
image: postgres:15-alpine
image: postgres:16-alpine
env:
POSTGRES_USER: superset
POSTGRES_PASSWORD: superset
@@ -66,7 +65,7 @@ jobs:
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: ${{ matrix.node }}
node-version-file: './superset-frontend/.nvmrc'
- name: Install npm dependencies
uses: ./.github/actions/cached-dependencies
with:

View File

@@ -28,9 +28,6 @@ jobs:
needs: config
if: needs.config.outputs.has-secrets
runs-on: ubuntu-24.04
strategy:
matrix:
node: [20]
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
@@ -41,7 +38,7 @@ jobs:
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: ${{ matrix.node }}
node-version-file: './superset-frontend/.nvmrc'
- name: Install eyes-storybook dependencies
uses: ./.github/actions/cached-dependencies
with:

View File

@@ -23,7 +23,7 @@ jobs:
SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset
services:
postgres:
image: postgres:15-alpine
image: postgres:16-alpine
env:
POSTGRES_USER: superset
POSTGRES_PASSWORD: superset

View File

@@ -35,10 +35,10 @@ jobs:
with:
persist-credentials: false
submodules: recursive
- name: Set up Node.js 20
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: '20'
node-version-file: './docs/.nvmrc'
- name: Setup Python
uses: ./.github/actions/setup-backend/
- uses: actions/setup-java@v4

View File

@@ -60,10 +60,10 @@ jobs:
with:
persist-credentials: false
submodules: recursive
- name: Set up Node.js 20
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: '20'
node-version-file: './docs/.nvmrc'
- name: yarn install
run: |
yarn install --check-cache

View File

@@ -42,6 +42,7 @@ jobs:
matrix:
parallel_id: [0, 1, 2, 3, 4, 5]
browser: ["chrome"]
app_root: ["", "/app/prefix"]
env:
SUPERSET_ENV: development
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
@@ -49,11 +50,11 @@ jobs:
PYTHONPATH: ${{ github.workspace }}
REDIS_PORT: 16379
GITHUB_TOKEN: ${{ github.token }}
# use the dashboard feature when running manually OR merging to master
USE_DASHBOARD: ${{ github.event.inputs.use_dashboard == 'true'|| (github.ref == 'refs/heads/master' && 'true') || 'false' }}
# Only use dashboard when explicitly requested via workflow_dispatch
USE_DASHBOARD: ${{ github.event.inputs.use_dashboard == 'true' || 'false' }}
services:
postgres:
image: postgres:15-alpine
image: postgres:16-alpine
env:
POSTGRES_USER: superset
POSTGRES_PASSWORD: superset
@@ -109,7 +110,7 @@ jobs:
if: steps.check.outputs.python || steps.check.outputs.frontend
uses: actions/setup-node@v4
with:
node-version: "20"
node-version-file: './superset-frontend/.nvmrc'
- name: Install npm dependencies
if: steps.check.outputs.python || steps.check.outputs.frontend
uses: ./.github/actions/cached-dependencies
@@ -135,7 +136,7 @@ jobs:
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
NODE_OPTIONS: "--max-old-space-size=4096"
with:
run: cypress-run-all ${{ env.USE_DASHBOARD }}
run: cypress-run-all ${{ env.USE_DASHBOARD }} ${{ matrix.app_root }}
- name: Upload Artifacts
uses: actions/upload-artifact@v4
if: failure()

View File

@@ -85,7 +85,7 @@ jobs:
SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset
services:
postgres:
image: postgres:15-alpine
image: postgres:16-alpine
env:
POSTGRES_USER: superset
POSTGRES_PASSWORD: superset

View File

@@ -25,7 +25,7 @@ jobs:
SUPERSET__SQLALCHEMY_EXAMPLES_URI: presto://localhost:15433/memory/default
services:
postgres:
image: postgres:15-alpine
image: postgres:16-alpine
env:
POSTGRES_USER: superset
POSTGRES_PASSWORD: superset
@@ -94,7 +94,7 @@ jobs:
UPLOAD_FOLDER: /tmp/.superset/uploads/
services:
postgres:
image: postgres:15-alpine
image: postgres:16-alpine
env:
POSTGRES_USER: superset
POSTGRES_PASSWORD: superset

View File

@@ -44,7 +44,7 @@ jobs:
SUPERSET_TESTENV: true
SUPERSET_SECRET_KEY: not-a-secret
run: |
pytest --durations-min=0.5 --cov-report= --cov=superset ./tests/common ./tests/unit_tests --cache-clear
pytest --durations-min=0.5 --cov-report= --cov=superset ./tests/common ./tests/unit_tests --cache-clear --maxfail=50
- name: Upload code coverage
uses: codecov/codecov-action@v5
with:

View File

@@ -33,7 +33,7 @@ jobs:
if: steps.check.outputs.frontend
uses: actions/setup-node@v4
with:
node-version: '18'
node-version-file: './superset-frontend/.nvmrc'
- name: Install dependencies
if: steps.check.outputs.frontend
uses: ./.github/actions/cached-dependencies

View File

@@ -32,10 +32,10 @@ jobs:
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: '20'
node-version-file: './superset-frontend/.nvmrc'
- name: Install Dependencies
run: npm install
run: npm ci
working-directory: ./superset-frontend
- name: Run Script

2
.gitignore vendored
View File

@@ -21,6 +21,7 @@
*.swp
__pycache__
.aider*
.local
.cache
.bento*
@@ -106,6 +107,7 @@ ghostdriver.log
testCSV.csv
.terser-plugin-cache/
apache-superset-*.tar.gz*
apache_superset-*.tar.gz*
release.json
# Translation-related files

View File

@@ -20,7 +20,7 @@ repos:
hooks:
- id: auto-walrus
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.13.0
rev: v1.15.0
hooks:
- id: mypy
args: [--check-untyped-defs]
@@ -57,17 +57,30 @@ repos:
hooks:
- id: prettier
additional_dependencies:
- prettier@3.3.3
- prettier@3.5.3
args: ["--ignore-path=./superset-frontend/.prettierignore"]
files: "superset-frontend"
- repo: local
hooks:
- id: eslint
name: eslint
entry: bash -c 'cd superset-frontend && npm run eslint -- $(echo "$@" | sed "s|superset-frontend/||g")'
language: system
pass_filenames: true
files: \.(js|jsx|ts|tsx)$
- id: eslint-frontend
name: eslint (frontend)
entry: ./scripts/eslint.sh
language: system
pass_filenames: true
files: ^superset-frontend/.*\.(js|jsx|ts|tsx)$
- id: eslint-docs
name: eslint (docs)
entry: bash -c 'cd docs && FILES=$(echo "$@" | sed "s|docs/||g") && yarn eslint --fix --ext .js,.jsx,.ts,.tsx --quiet $FILES'
language: system
pass_filenames: true
files: ^docs/.*\.(js|jsx|ts|tsx)$
- id: type-checking-frontend
name: Type-Checking (Frontend)
entry: bash -c './scripts/check-type.js package=superset-frontend excludeDeclarationDir=cypress-base'
language: system
files: ^superset-frontend\/.*\.(js|jsx|ts|tsx)$
exclude: ^superset-frontend/cypress-base\/
require_serial: true
# blacklist unsafe functions like make_url (see #19526)
- repo: https://github.com/skorokithakis/blacklist-pre-commit-hook
rev: e2f070289d8eddcaec0b580d3bde29437e7c8221
@@ -79,9 +92,11 @@ repos:
hooks:
- id: helm-docs
files: helm
verbose: false
args: ["--log-level", "error"]
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.8.0
rev: v0.9.7
hooks:
- id: ruff
args: [ --fix ]
args: [--fix]
- id: ruff-format

50
CHANGELOG/4.1.1.md Normal file
View File

@@ -0,0 +1,50 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
## Change Log
### 4.1 (Fri Nov 15 22:13:57 2024 +0530)
**Database Migrations**
**Features**
**Fixes**
- [#30886](https://github.com/apache/superset/pull/30886) fix: blocks UI elements on right side (@samarsrivastav)
- [#30859](https://github.com/apache/superset/pull/30859) fix(package.json): Pin luxon version to unblock master (@geido)
- [#30588](https://github.com/apache/superset/pull/30588) fix(explore): column data type tooltip format (@mistercrunch)
- [#29911](https://github.com/apache/superset/pull/29911) fix: Rename database from 'couchbasedb' to 'couchbase' in documentation and db_engine_specs (@ayush-couchbase)
- [#30828](https://github.com/apache/superset/pull/30828) fix(TimezoneSelector): Failing unit tests due to timezone change (@geido)
- [#30875](https://github.com/apache/superset/pull/30875) fix: don't show metadata for embedded dashboards (@sadpandajoe)
- [#30851](https://github.com/apache/superset/pull/30851) fix: Graph chart colors (@michael-s-molina)
- [#29867](https://github.com/apache/superset/pull/29867) fix(capitalization): Capitalizing a button. (@rusackas)
- [#29782](https://github.com/apache/superset/pull/29782) fix(translations): Translate embedded errors (@rusackas)
- [#29772](https://github.com/apache/superset/pull/29772) fix: Fixing incomplete string escaping. (@rusackas)
- [#29725](https://github.com/apache/superset/pull/29725) fix(frontend/docker, ci): fix borked Docker build due to Lerna v8 uplift (@hainenber)
**Others**
- [#30576](https://github.com/apache/superset/pull/30576) chore: add link to Superset when report error (@eschutho)
- [#29786](https://github.com/apache/superset/pull/29786) refactor(Slider): Upgrade Slider to Antd 5 (@geido)
- [#29674](https://github.com/apache/superset/pull/29674) refactor(ChartCreation): Migrate tests to RTL (@rtexelm)
- [#29843](https://github.com/apache/superset/pull/29843) refactor(controls): Migrate AdhocMetricOption.test to RTL (@rtexelm)
- [#29845](https://github.com/apache/superset/pull/29845) refactor(controls): Migrate MetricDefinitionValue.test to RTL (@rtexelm)
- [#28424](https://github.com/apache/superset/pull/28424) docs: Check markdown files for bad links using linkinator (@rusackas)
- [#29768](https://github.com/apache/superset/pull/29768) docs(contributing): fix broken link to translations sub-section (@sfirke)

83
CHANGELOG/4.1.2.md Normal file
View File

@@ -0,0 +1,83 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
## Change Log
### 4.1.2 (Fri Mar 7 13:28:05 2025 -0800)
**Database Migrations**
- [#32538](https://github.com/apache/superset/pull/32538) fix(migrations): Handle comparator None in old time comparison migration (@Antonio-RiveroMartnez)
- [#32155](https://github.com/apache/superset/pull/32155) fix(migrations): Handle no params in time comparison migration (@Antonio-RiveroMartnez)
- [#31185](https://github.com/apache/superset/pull/31185) fix: check for column before adding in migrations (@betodealmeida)
**Features**
- [#29974](https://github.com/apache/superset/pull/29974) feat(sqllab): Adds refresh button to table metadata in SQL Lab (@Usiel)
**Fixes**
- [#32515](https://github.com/apache/superset/pull/32515) fix(sqllab): Allow clear on schema and catalog (@justinpark)
- [#32500](https://github.com/apache/superset/pull/32500) fix: dashboard, chart and dataset import validation (@dpgaspar)
- [#31353](https://github.com/apache/superset/pull/31353) fix(sqllab): duplicate error message (@betodealmeida)
- [#31407](https://github.com/apache/superset/pull/31407) fix: Big Number side cut fixed (@fardin-developer)
- [#31480](https://github.com/apache/superset/pull/31480) fix(sunburst): Use metric label from verbose map (@gerbermichi)
- [#31427](https://github.com/apache/superset/pull/31427) fix(tags): clean up bulk create api and schema (@villebro)
- [#31334](https://github.com/apache/superset/pull/31334) fix(docs): add custom editUrl path for intro page (@dwgrossberg)
- [#31353](https://github.com/apache/superset/pull/31353) fix(sqllab): duplicate error message (@betodealmeida)
- [#31323](https://github.com/apache/superset/pull/31323) fix: Use clickhouse sqlglot dialect for YDB (@vgvoleg)
- [#31198](https://github.com/apache/superset/pull/31198) fix: add more clickhouse disallowed functions on config (@dpgaspar)
- [#31194](https://github.com/apache/superset/pull/31194) fix(embedded): Hide anchor links in embedded mode (@Vitor-Avila)
- [#31960](https://github.com/apache/superset/pull/31960) fix(sqllab): Missing allowHTML props in ResultTableExtension (@justinpark)
- [#31332](https://github.com/apache/superset/pull/31332) fix: prevent multiple pvm errors on migration (@eschutho)
- [#31437](https://github.com/apache/superset/pull/31437) fix(database import): Gracefully handle error to get catalog schemas (@Vitor-Avila)
- [#31173](https://github.com/apache/superset/pull/31173) fix: cache-warmup fails (@nsivarajan)
- [#30442](https://github.com/apache/superset/pull/30442) fix(fe/src/dashboard): optional chaining for possibly nullable parent attribute in LayoutItem type (@hainenber)
- [#31639](https://github.com/apache/superset/pull/31639) fix(sqllab): unable to update saved queries (@DamianPendrak)
- [#29898](https://github.com/apache/superset/pull/29898) fix: parse pandas pivot null values (@eschutho)
- [#31414](https://github.com/apache/superset/pull/31414) fix(Pivot Table): Fix column width to respect currency config (@Vitor-Avila)
- [#31335](https://github.com/apache/superset/pull/31335) fix(histogram): axis margin padding consistent with other graphs (@tatiana-cherne)
- [#31301](https://github.com/apache/superset/pull/31301) fix(AllEntitiesTable): show Tags (@alexandrusoare)
- [#31329](https://github.com/apache/superset/pull/31329) fix: pass string to `process_template` (@betodealmeida)
- [#31341](https://github.com/apache/superset/pull/31341) fix(pinot): remove query aliases from SELECT and ORDER BY clauses in Pinot (@yuribogomolov)
- [#31308](https://github.com/apache/superset/pull/31308) fix: annotations on horizontal bar chart (@DamianPendrak)
- [#31294](https://github.com/apache/superset/pull/31294) fix(sqllab): Remove update_saved_query_exec_info to reduce lag (@justinpark)
- [#30897](https://github.com/apache/superset/pull/30897) fix: Exception handling for SQL Lab views (@michael-s-molina)
- [#31199](https://github.com/apache/superset/pull/31199) fix(Databricks): Escape catalog and schema names in pre-queries (@Vitor-Avila)
- [#31265](https://github.com/apache/superset/pull/31265) fix(trino): db session error in handle cursor (@justinpark)
- [#31024](https://github.com/apache/superset/pull/31024) fix(dataset): use sqlglot for DML check (@betodealmeida)
- [#29885](https://github.com/apache/superset/pull/29885) fix: add mutator to get_columns_description (@eschutho)
- [#30821](https://github.com/apache/superset/pull/30821) fix: x axis title disappears when editing bar chart (@DamianPendrak)
- [#31181](https://github.com/apache/superset/pull/31181) fix: Time-series Line Chart Display unnecessary total (@michael-s-molina)
- [#31163](https://github.com/apache/superset/pull/31163) fix(Dashboard): Backward compatible shared_label_colors field (@geido)
- [#31156](https://github.com/apache/superset/pull/31156) fix: check orderby (@betodealmeida)
- [#31154](https://github.com/apache/superset/pull/31154) fix: Remove unwanted commit on Trino's handle_cursor (@michael-s-molina)
- [#31151](https://github.com/apache/superset/pull/31151) fix: Revert "feat(trino): Add functionality to upload data (#29164)" (@michael-s-molina)
- [#31031](https://github.com/apache/superset/pull/31031) fix(Dashboard): Ensure shared label colors are updated (@geido)
- [#30967](https://github.com/apache/superset/pull/30967) fix(release validation): scripts now support RSA and EDDSA keys. (@rusackas)
- [#30881](https://github.com/apache/superset/pull/30881) fix(Dashboard): Native & Cross-Filters Scoping Performance (@geido)
- [#30887](https://github.com/apache/superset/pull/30887) fix(imports): import query_context for imports with charts (@lindenh)
- [#31008](https://github.com/apache/superset/pull/31008) fix(explore): verified props is not updated (@justinpark)
- [#30646](https://github.com/apache/superset/pull/30646) fix(Dashboard): Retain colors when color scheme not set (@geido)
- [#30962](https://github.com/apache/superset/pull/30962) fix(Dashboard): Exclude edit param in async screenshot (@geido)
**Others**
- [#32043](https://github.com/apache/superset/pull/32043) chore: Skip the creation of secondary perms during catalog migrations (@Vitor-Avila)
- [#30865](https://github.com/apache/superset/pull/30865) docs: Updating 4.1 Release Notes (@yousoph)

View File

@@ -18,16 +18,19 @@
######################################################################
# Node stage to deal with static asset construction
######################################################################
ARG PY_VER=3.11-slim-bookworm
ARG PY_VER=3.11.11-slim-bookworm
# If BUILDPLATFORM is null, set it to 'amd64' (or leave as is otherwise).
ARG BUILDPLATFORM=${BUILDPLATFORM:-amd64}
# Include translations in the final build
ARG BUILD_TRANSLATIONS="false"
######################################################################
# superset-node-ci used as a base for building frontend assets and CI
######################################################################
FROM --platform=${BUILDPLATFORM} node:20-bullseye-slim AS superset-node-ci
ARG BUILD_TRANSLATIONS="false" # Include translations in the final build
FROM --platform=${BUILDPLATFORM} node:20-bookworm-slim AS superset-node-ci
ARG BUILD_TRANSLATIONS
ENV BUILD_TRANSLATIONS=${BUILD_TRANSLATIONS}
ARG DEV_MODE="false" # Skip frontend build in dev mode
ENV DEV_MODE=${DEV_MODE}
@@ -122,10 +125,13 @@ ENV PATH="/app/.venv/bin:${PATH}"
######################################################################
FROM python-base AS python-translation-compiler
ARG BUILD_TRANSLATIONS
ENV BUILD_TRANSLATIONS=${BUILD_TRANSLATIONS}
# Install Python dependencies using docker/pip-install.sh
COPY requirements/translations.txt requirements/
RUN --mount=type=cache,target=/root/.cache/uv \
/app/docker/pip-install.sh --requires-build-essential -r requirements/translations.txt
. /app/.venv/bin/activate && /app/docker/pip-install.sh --requires-build-essential -r requirements/translations.txt
COPY superset/translations/ /app/translations_mo/
RUN if [ "$BUILD_TRANSLATIONS" = "true" ]; then \
@@ -202,7 +208,7 @@ RUN rm superset/translations/*/*/*.po
COPY --from=superset-node /app/superset/translations superset/translations
COPY --from=python-translation-compiler /app/translations_mo superset/translations
HEALTHCHECK CMD curl -f "http://localhost:${SUPERSET_PORT}/health"
HEALTHCHECK CMD /app/docker/docker-healthcheck.sh
CMD ["/app/docker/entrypoints/run-server.sh"]
EXPOSE ${SUPERSET_PORT}

View File

@@ -20,11 +20,11 @@ under the License.
# Superset
[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/license/apache-2-0)
[![GitHub release (latest SemVer)](https://img.shields.io/github/v/release/apache/superset?sort=semver)](https://github.com/apache/superset/tree/latest)
[![Build Status](https://github.com/apache/superset/workflows/Python/badge.svg)](https://github.com/apache/superset/actions)
[![PyPI version](https://badge.fury.io/py/apache-superset.svg)](https://badge.fury.io/py/apache-superset)
[![Latest Release on Github](https://img.shields.io/github/v/release/apache/superset?sort=semver)](https://github.com/apache/superset/releases/latest)
[![Build Status](https://github.com/apache/superset/actions/workflows/superset-python-unittest.yml/badge.svg)](https://github.com/apache/superset/actions)
[![PyPI version](https://badge.fury.io/py/apache_superset.svg)](https://badge.fury.io/py/apache_superset)
[![Coverage Status](https://codecov.io/github/apache/superset/coverage.svg?branch=master)](https://codecov.io/github/apache/superset)
[![PyPI](https://img.shields.io/pypi/pyversions/apache-superset.svg?maxAge=2592000)](https://pypi.python.org/pypi/apache-superset)
[![PyPI](https://img.shields.io/pypi/pyversions/apache_superset.svg?maxAge=2592000)](https://pypi.python.org/pypi/apache_superset)
[![Get on Slack](https://img.shields.io/badge/slack-join-orange.svg)](http://bit.ly/join-superset-slack)
[![Documentation](https://img.shields.io/badge/docs-apache.org-blue.svg)](https://superset.apache.org)
@@ -72,8 +72,10 @@ Superset provides:
## Screenshots & Gifs
**Video Overview**
<!-- File hosted here https://github.com/apache/superset-site/raw/lfs/superset-video-4k.mp4 -->
[superset-video-4k.webm](https://github.com/apache/superset/assets/812905/da036bc2-150c-4ee7-80f9-75e63210ff76)
[superset-video-1080p.webm](https://github.com/user-attachments/assets/b37388f7-a971-409c-96a7-90c4e31322e6)
<br/>
@@ -137,6 +139,7 @@ Here are some of the major database solutions that are supported:
<img src="https://superset.apache.org/img/databases/sap-hana.png" alt="oceanbase" border="0" width="220" />
<img src="https://superset.apache.org/img/databases/denodo.png" alt="denodo" border="0" width="200" />
<img src="https://superset.apache.org/img/databases/ydb.svg" alt="ydb" border="0" width="200" />
<img src="https://superset.apache.org/img/databases/tdengine.png" alt="TDengine" border="0" width="200" />
</p>
**A more comprehensive list of supported databases** along with the configuration instructions can be found [here](https://superset.apache.org/docs/configuration/databases).
@@ -145,7 +148,7 @@ Want to add support for your datastore or data engine? Read more [here](https://
## Installation and Configuration
[Extended documentation for Superset](https://superset.apache.org/docs/installation/docker-compose)
Try out Superset's [quickstart](https://superset.apache.org/docs/quickstart/) guide or learn about [the options for production deployments](https://superset.apache.org/docs/installation/architecture/).
## Get Involved
@@ -154,7 +157,7 @@ Want to add support for your datastore or data engine? Read more [here](https://
and please read our [Slack Community Guidelines](https://github.com/apache/superset/blob/master/CODE_OF_CONDUCT.md#slack-community-guidelines)
- [Join our dev@superset.apache.org Mailing list](https://lists.apache.org/list.html?dev@superset.apache.org). To join, simply send an email to [dev-subscribe@superset.apache.org](mailto:dev-subscribe@superset.apache.org)
- If you want to help troubleshoot GitHub Issues involving the numerous database drivers that Superset supports, please consider adding your name and the databases you have access to on the [Superset Database Familiarity Rolodex](https://docs.google.com/spreadsheets/d/1U1qxiLvOX0kBTUGME1AHHi6Ywel6ECF8xk_Qy-V9R8c/edit#gid=0)
- Join Superset's Town Hall and [Operational Model](https://preset.io/blog/the-superset-operational-model-wants-you/) recurring meetings. Meeting info is available on the [Superset Community Calendar](https://superset.apache.org/community)
- Join Superset's Town Hall and [Operational Model](https://preset.io/blog/the-superset-operational-model-wants-you/) recurring meetings. Meeting info is available on the [Superset Community Calendar](https://superset.apache.org/community)
## Contributor Guide
@@ -182,14 +185,16 @@ Understanding the Superset Points of View
- [Building New Database Connectors](https://preset.io/blog/building-database-connector/)
- [Create Your First Dashboard](https://superset.apache.org/docs/using-superset/creating-your-first-dashboard/)
- [Comprehensive Tutorial for Contributing Code to Apache Superset
](https://preset.io/blog/tutorial-contributing-code-to-apache-superset/)
](https://preset.io/blog/tutorial-contributing-code-to-apache-superset/)
- [Resources to master Superset by Preset](https://preset.io/resources/)
- Deploying Superset
- [Official Docker image](https://hub.docker.com/r/apache/superset)
- [Helm Chart](https://github.com/apache/superset/tree/master/helm/superset)
- Recordings of Past [Superset Community Events](https://preset.io/events)
- [Mixed Time Series Charts](https://preset.io/events/mixed-time-series-visualization-in-superset-workshop/)
- [How the Bing Team Customized Superset for the Internal Self-Serve Data & Analytics Platform](https://preset.io/events/how-the-bing-team-heavily-customized-superset-for-their-internal-data/)
- [Live Demo: Visualizing MongoDB and Pinot Data using Trino](https://preset.io/events/2021-04-13-visualizing-mongodb-and-pinot-data-using-trino/)
@@ -197,6 +202,7 @@ Understanding the Superset Points of View
- [Building a Database Connector for Superset](https://preset.io/events/2021-02-16-building-a-database-connector-for-superset/)
- Visualizations
- [Creating Viz Plugins](https://superset.apache.org/docs/contributing/creating-viz-plugins/)
- [Managing and Deploying Custom Viz Plugins](https://medium.com/nmc-techblog/apache-superset-manage-custom-viz-plugins-in-production-9fde1a708e55)
- [Why Apache Superset is Betting on Apache ECharts](https://preset.io/blog/2021-4-1-why-echarts/)

View File

@@ -20,7 +20,7 @@ RUN useradd --user-group --create-home --no-log-init --shell /bin/bash superset
# Configure environment
ENV LANG=C.UTF-8 \
LC_ALL=C.UTF-8
LC_ALL=C.UTF-8
RUN apt-get update -y
@@ -30,14 +30,14 @@ RUN apt-get install -y apt-transport-https apt-utils
# Install superset dependencies
# https://superset.apache.org/docs/installation/installing-superset-from-scratch
RUN apt-get install -y build-essential libssl-dev \
libffi-dev python3-dev libsasl2-dev libldap2-dev libxi-dev chromium
libffi-dev python3-dev libsasl2-dev libldap2-dev libxi-dev chromium zstd
# Install nodejs for custom build
# https://nodejs.org/en/download/package-manager/
RUN set -eux; \
curl -sL https://deb.nodesource.com/setup_18.x | bash -; \
apt-get install -y nodejs; \
node --version;
curl -sL https://deb.nodesource.com/setup_20.x | bash -; \
apt-get install -y nodejs; \
node --version;
RUN if ! which npm; then apt-get install -y npm; fi
RUN mkdir -p /home/superset
@@ -50,21 +50,21 @@ ARG SUPERSET_RELEASE_RC_TARBALL
# Can fetch source from svn or copy tarball from local mounted directory
COPY $SUPERSET_RELEASE_RC_TARBALL ./
RUN tar -xvf *.tar.gz
WORKDIR /home/superset/apache-superset-$VERSION/superset-frontend
WORKDIR /home/superset/apache_superset-$VERSION/superset-frontend
RUN npm ci \
&& npm run build \
&& rm -rf node_modules
&& npm run build \
&& rm -rf node_modules
WORKDIR /home/superset/apache-superset-$VERSION
WORKDIR /home/superset/apache_superset-$VERSION
RUN pip install --upgrade setuptools pip \
&& pip install -r requirements/base.txt \
&& pip install --no-cache-dir .
&& pip install -r requirements/base.txt \
&& pip install --no-cache-dir .
RUN flask fab babel-compile --target superset/translations
ENV PATH=/home/superset/superset/bin:$PATH \
PYTHONPATH=/home/superset/superset/:$PYTHONPATH \
SUPERSET_TESTENV=true
PYTHONPATH=/home/superset/superset/ \
SUPERSET_TESTENV=true
COPY from_tarball_entrypoint.sh /entrypoint.sh
ENTRYPOINT ["/entrypoint.sh"]

View File

@@ -20,7 +20,7 @@ RUN useradd --user-group --create-home --no-log-init --shell /bin/bash superset
# Configure environment
ENV LANG=C.UTF-8 \
LC_ALL=C.UTF-8
LC_ALL=C.UTF-8
RUN apt-get update -y
@@ -29,13 +29,16 @@ RUN apt-get install -y apt-transport-https apt-utils
# Install superset dependencies
# https://superset.apache.org/docs/installation/installing-superset-from-scratch
RUN apt-get install -y build-essential libssl-dev \
libffi-dev python3-dev libsasl2-dev libldap2-dev libxi-dev chromium
RUN apt-get install -y subversion build-essential libssl-dev \
libffi-dev python3-dev libsasl2-dev libldap2-dev libxi-dev chromium zstd
# Install nodejs for custom build
# https://nodejs.org/en/download/package-manager/
RUN curl -sL https://deb.nodesource.com/setup_16.x | bash - \
&& apt-get install -y nodejs
RUN set -eux; \
curl -sL https://deb.nodesource.com/setup_20.x | bash -; \
apt-get install -y nodejs; \
node --version;
RUN if ! which npm; then apt-get install -y npm; fi
RUN mkdir -p /home/superset
RUN chown superset /home/superset
@@ -46,22 +49,20 @@ ARG VERSION
# Can fetch source from svn or copy tarball from local mounted directory
RUN svn co https://dist.apache.org/repos/dist/dev/superset/$VERSION ./
RUN tar -xvf *.tar.gz
WORKDIR apache-superset-$VERSION
WORKDIR /home/superset/apache_superset-$VERSION/superset-frontend
RUN cd superset-frontend \
&& npm ci \
&& npm run build \
&& rm -rf node_modules
RUN npm ci \
&& npm run build \
&& rm -rf node_modules
WORKDIR /home/superset/apache-superset-$VERSION
WORKDIR /home/superset/apache_superset-$VERSION
RUN pip install --upgrade setuptools pip \
&& pip install -r requirements/base.txt \
&& pip install --no-cache-dir .
&& pip install -r requirements/base.txt \
&& pip install --no-cache-dir .
RUN flask fab babel-compile --target superset/translations
ENV PATH=/home/superset/superset/bin:$PATH \
PYTHONPATH=/home/superset/superset/:$PYTHONPATH
PYTHONPATH=/home/superset/superset/
COPY from_tarball_entrypoint.sh /entrypoint.sh
ENTRYPOINT ["/entrypoint.sh"]

View File

@@ -123,10 +123,10 @@ SUPERSET_RC=1
SUPERSET_GITHUB_BRANCH=1.5
SUPERSET_PGP_FULLNAME=villebro@apache.org
SUPERSET_VERSION_RC=1.5.1rc1
SUPERSET_RELEASE=apache-superset-1.5.1
SUPERSET_RELEASE_RC=apache-superset-1.5.1rc1
SUPERSET_RELEASE_TARBALL=apache-superset-1.5.1-source.tar.gz
SUPERSET_RELEASE_RC_TARBALL=apache-superset-1.5.1rc1-source.tar.gz
SUPERSET_RELEASE=apache_superset-1.5.1
SUPERSET_RELEASE_RC=apache_superset-1.5.1rc1
SUPERSET_RELEASE_TARBALL=apache_superset-1.5.1-source.tar.gz
SUPERSET_RELEASE_RC_TARBALL=apache_superset-1.5.1rc1-source.tar.gz
SUPERSET_TMP_ASF_SITE_PATH=/tmp/incubator-superset-site-1.5.1
-------------------------------
```
@@ -380,7 +380,7 @@ Official instructions:
https://www.apache.org/info/verification.html
We now have a handy script for anyone validating a release to use. The core of it is in this very folder, `verify_release.py`. Just make sure you have all three release files in the same directory (`{some version}.tar.gz`, `{some version}.tar.gz.asc` and `{some version}tar.gz.sha512`). Then you can pass this script the path to the `.gz` file like so:
`python verify_release.py ~/path/tp/apache-superset-{version/candidate}-source.tar.gz`
`python verify_release.py ~/path/tp/apache_superset-{version/candidate}-source.tar.gz`
If all goes well, you will see this result in your terminal:
@@ -452,10 +452,13 @@ cd ../
# Compile translations for the backend
./scripts/translations/generate_po_files.sh
./scripts/translations/generate_mo_files.sh
# update build version number
sed -i '' "s/version_string = .*/version_string = \"$SUPERSET_VERSION\"/" setup.py
# build the python distribution
python -m build
python setup.py sdist
```
Publish to PyPI
@@ -467,7 +470,7 @@ while requesting access to push packages.
```bash
twine upload dist/apache_superset-${SUPERSET_VERSION}-py3-none-any.whl
twine upload dist/apache-superset-${SUPERSET_VERSION}.tar.gz
twine upload dist/apache_superset-${SUPERSET_VERSION}.tar.gz
```
Set your username to `__token__`

View File

@@ -232,8 +232,7 @@ class GitChangeLog:
for log in self._logs:
yield {
"pr_number": log.pr_number,
"pr_link": f"https://github.com/{SUPERSET_REPO}/pull/"
f"{log.pr_number}",
"pr_link": f"https://github.com/{SUPERSET_REPO}/pull/{log.pr_number}",
"message": log.message,
"time": log.time,
"author": log.author,
@@ -323,9 +322,9 @@ class BaseParameters:
def print_title(message: str) -> None:
print(f"{50*'-'}")
print(f"{50 * '-'}")
print(message)
print(f"{50*'-'}")
print(f"{50 * '-'}")
@click.group()
@@ -349,14 +348,14 @@ def compare(base_parameters: BaseParameters) -> None:
previous_logs = base_parameters.previous_logs
current_logs = base_parameters.current_logs
print_title(
f"Pull requests from " f"{current_logs.git_ref} not in {previous_logs.git_ref}"
f"Pull requests from {current_logs.git_ref} not in {previous_logs.git_ref}"
)
previous_diff_logs = previous_logs.diff(current_logs)
for diff_log in previous_diff_logs:
print(f"{diff_log}")
print_title(
f"Pull requests from " f"{previous_logs.git_ref} not in {current_logs.git_ref}"
f"Pull requests from {previous_logs.git_ref} not in {current_logs.git_ref}"
)
current_diff_logs = current_logs.diff(previous_logs)
for diff_log in current_diff_logs:

View File

@@ -31,7 +31,7 @@ The official source release:
https://downloads.apache.org/{{ project_module }}/{{ version }}
The PyPI package:
https://pypi.org/project/apache-superset/{{ version }}
https://pypi.org/project/apache_superset/{{ version }}
The CHANGELOG for the release:
https://github.com/apache/{{ project_module }}/blob/{{ version }}/CHANGELOG/{{ version }}.md

View File

@@ -32,7 +32,7 @@ else
SUPERSET_VERSION="${1}"
SUPERSET_RC="${2}"
SUPERSET_PGP_FULLNAME="${3}"
SUPERSET_RELEASE_RC_TARBALL="apache-superset-${SUPERSET_VERSION_RC}-source.tar.gz"
SUPERSET_RELEASE_RC_TARBALL="apache_superset-${SUPERSET_VERSION_RC}-source.tar.gz"
fi
SUPERSET_VERSION_RC="${SUPERSET_VERSION}rc${SUPERSET_RC}"

View File

@@ -22,7 +22,7 @@ if [ -z "${SUPERSET_VERSION_RC}" ] || [ -z "${SUPERSET_SVN_DEV_PATH}" ] || [ -z
exit 1
fi
SUPERSET_RELEASE_RC=apache-superset-"${SUPERSET_VERSION_RC}"
SUPERSET_RELEASE_RC=apache_superset-"${SUPERSET_VERSION_RC}"
SUPERSET_RELEASE_RC_TARBALL="${SUPERSET_RELEASE_RC}"-source.tar.gz
SUPERSET_RELEASE_RC_BASE_PATH="${SUPERSET_SVN_DEV_PATH}"/"${SUPERSET_VERSION_RC}"
SUPERSET_RELEASE_RC_TARBALL_PATH="${SUPERSET_RELEASE_RC_BASE_PATH}"/"${SUPERSET_RELEASE_RC_TARBALL}"

View File

@@ -50,8 +50,8 @@ else
export SUPERSET_GITHUB_BRANCH="${VERSION_MAJOR}.${VERSION_MINOR}"
export SUPERSET_PGP_FULLNAME="${2}"
export SUPERSET_VERSION_RC="${SUPERSET_VERSION}rc${VERSION_RC}"
export SUPERSET_RELEASE=apache-superset-"${SUPERSET_VERSION}"
export SUPERSET_RELEASE_RC=apache-superset-"${SUPERSET_VERSION_RC}"
export SUPERSET_RELEASE=apache_superset-"${SUPERSET_VERSION}"
export SUPERSET_RELEASE_RC=apache_superset-"${SUPERSET_VERSION_RC}"
export SUPERSET_RELEASE_TARBALL="${SUPERSET_RELEASE}"-source.tar.gz
export SUPERSET_RELEASE_RC_TARBALL="${SUPERSET_RELEASE_RC}"-source.tar.gz
export SUPERSET_TMP_ASF_SITE_PATH="/tmp/incubator-superset-site-${SUPERSET_VERSION}"

View File

@@ -27,7 +27,7 @@ if [ -z "${SUPERSET_SVN_DEV_PATH}" ]; then
fi
if [[ -n ${1} ]] && [[ ${1} == "local" ]]; then
SUPERSET_RELEASE_RC=apache-superset-"${SUPERSET_VERSION_RC}"
SUPERSET_RELEASE_RC=apache_superset-"${SUPERSET_VERSION_RC}"
SUPERSET_RELEASE_RC_TARBALL="${SUPERSET_RELEASE_RC}"-source.tar.gz
SUPERSET_TARBALL_PATH="${SUPERSET_SVN_DEV_PATH}"/${SUPERSET_VERSION_RC}/${SUPERSET_RELEASE_RC_TARBALL}
SUPERSET_TMP_TARBALL_FILENAME=_tmp_"${SUPERSET_VERSION_RC}".tar.gz

View File

@@ -38,7 +38,7 @@ get_pip_command() {
PYTHON=$(get_python_command)
PIP=$(get_pip_command)
# Get the release directory's path. If you unzip an Apache release and just run the npm script to validate the release, this will be a file name like `apache-superset-x.x.xrcx-source.tar.gz`
# Get the release directory's path. If you unzip an Apache release and just run the npm script to validate the release, this will be a file name like `apache_superset-x.x.xrcx-source.tar.gz`
RELEASE_ZIP_PATH="../../$(basename "$(dirname "$(pwd)")")-source.tar.gz"
# Install dependencies from requirements.txt if the file exists

View File

@@ -49,7 +49,6 @@ These features are **finished** but currently being tested. They are usable, but
- ENABLE_SUPERSET_META_DB: [(docs)](https://superset.apache.org/docs/configuration/databases/#querying-across-databases)
- ESTIMATE_QUERY_COST
- GLOBAL_ASYNC_QUERIES [(docs)](https://github.com/apache/superset/blob/master/CONTRIBUTING.md#async-chart-queries)
- HORIZONTAL_FILTER_BAR
- IMPERSONATE_WITH_EMAIL_PREFIX
- PLAYWRIGHT_REPORTS_AND_THUMBNAILS
- RLS_IN_SQLLAB

View File

@@ -25,8 +25,8 @@ all you have to do is file a simple PR [like this one](https://github.com/apache
the categorization is inaccurate, please file a PR with your correction as well.
Join our growing community!
### Sharing Economy
- [Airbnb](https://github.com/airbnb)
- [Faasos](https://faasos.com/) [@shashanksingh]
- [Free2Move](https://www.free2move.com/) [@PaoloTerzi]
@@ -36,6 +36,7 @@ Join our growing community!
- [Ontruck](https://www.ontruck.com/)
### Financial Services
- [Aktia Bank plc](https://www.aktia.com)
- [American Express](https://www.americanexpress.com) [@TheLastSultan]
- [bumper](https://www.bumper.co/) [@vasu-ram, @JamiePercival]
@@ -51,9 +52,11 @@ Join our growing community!
- [Cover Genius](https://covergenius.com/)
### Gaming
- [Popoko VM Games Studio](https://popoko.live)
### E-Commerce
- [AiHello](https://www.aihello.com) [@ganeshkrishnan1]
- [Bazaar Technologies](https://www.bazaartech.com) [@umair-abro]
- [Dragonpass](https://www.dragonpass.com.cn/) [@zhxjdwh]
@@ -79,12 +82,14 @@ Join our growing community!
- [Zepto](https://www.zeptonow.com/) [@gwthm-in]
### Enterprise Technology
- [A3Data](https://a3data.com.br) [@neylsoncrepalde]
- [Analytics Aura](https://analyticsaura.com/) [@Analytics-Aura]
- [Apollo GraphQL](https://www.apollographql.com/) [@evans]
- [Astronomer](https://www.astronomer.io) [@ryw]
- [Avesta Technologies](https://avestatechnologies.com/) [@TheRum]
- [Caizin](https://caizin.com/) [@tejaskatariya]
- [Canonical](https://canonical.com)
- [Careem](https://www.careem.com/) [@samraHanif0340]
- [Cloudsmith](https://cloudsmith.io) [@alancarson]
- [Cyberhaven](https://www.cyberhaven.com/) [@toliver-ch]
@@ -96,6 +101,7 @@ Join our growing community!
- [ELMO Cloud HR & Payroll](https://elmosoftware.com.au/)
- [Endress+Hauser](https://www.endress.com/) [@rumbin]
- [FBK - ICT center](https://ict.fbk.eu)
- [Formbricks](https://formbricks.com)
- [Gavagai](https://gavagai.io) [@gavagai-corp]
- [GfK Data Lab](https://www.gfk.com/home) [@mherr]
- [Hydrolix](https://www.hydrolix.io/)
@@ -109,6 +115,7 @@ Join our growing community!
- [Ona](https://ona.io) [@pld]
- [Orange](https://www.orange.com) [@icsu]
- [Oslandia](https://oslandia.com)
- [Oxylabs](https://oxylabs.io/) [@rytis-ulys]
- [Peak AI](https://www.peak.ai/) [@azhar22k]
- [PeopleDoc](https://www.people-doc.com) [@rodo]
- [PlaidCloud](https://www.plaidcloud.com)
@@ -116,8 +123,11 @@ Join our growing community!
- [PubNub](https://pubnub.com) [@jzucker2]
- [ReadyTech](https://www.readytech.io)
- [Reward Gateway](https://www.rewardgateway.com)
- [RIADVICE](https://riadvice.tn) [@riadvice]
- [ScopeAI](https://www.getscopeai.com) [@iloveluce]
- [shipmnts](https://shipmnts.com)
- [Showmax](https://showmax.com) [@bobek]
- [SingleStore](https://www.singlestore.com/)
- [TechAudit](https://www.techaudit.info) [@ETselikov]
- [Tenable](https://www.tenable.com) [@dflionis]
- [Tentacle](https://www.linkedin.com/company/tentacle-cmi/) [@jdclarke5]
@@ -128,9 +138,11 @@ Join our growing community!
- [Virtuoso QA](https://www.virtuosoqa.com)
- [Whale](https://whale.im)
- [Windsor.ai](https://www.windsor.ai/) [@octaviancorlade]
- [WinWin Network马上赢](https://brandct.cn/) [@wenbinye]
- [Zeta](https://www.zeta.tech/) [@shaikidris]
### Media & Entertainment
- [6play](https://www.6play.fr) [@CoryChaplin]
- [bilibili](https://www.bilibili.com) [@Moinheart]
- [BurdaForward](https://www.burda-forward.de/en/)
@@ -143,6 +155,7 @@ Join our growing community!
- [Zaihang](https://www.zaih.com/)
### Education
- [Aveti Learning](https://avetilearning.com/) [@TheShubhendra]
- [Brilliant.org](https://brilliant.org/)
- [Open edX](https://openedx.org/)
@@ -154,6 +167,7 @@ Join our growing community!
- [WikiMedia Foundation](https://wikimediafoundation.org) [@vg]
### Energy
- [Airboxlab](https://foobot.io) [@antoine-galataud]
- [DouroECI](https://www.douroeci.com/) [@nunohelibeires]
- [Safaricom](https://www.safaricom.co.ke/) [@mmutiso]
@@ -161,6 +175,7 @@ Join our growing community!
- [Wattbewerb](https://wattbewerb.de/) [@wattbewerb]
### Healthcare
- [Amino](https://amino.com) [@shkr]
- [Bluesquare](https://www.bluesquarehub.com/) [@madewulf]
- [Care](https://www.getcare.io/) [@alandao2021]
@@ -173,29 +188,36 @@ Join our growing community!
- [2070Health](https://2070health.com/)
### HR / Staffing
- [Swile](https://www.swile.co/) [@PaoloTerzi]
- [Symmetrics](https://www.symmetrics.fyi)
- [bluquist](https://bluquist.com/)
### Government / Non-Profit
### Government
- [City of Ann Arbor, MI](https://www.a2gov.org/) [@sfirke]
- [RIS3 Strategy of CZ, MIT CR](https://www.ris3.cz/) [@RIS3CZ]
- [NRLM - Sarathi, India](https://pib.gov.in/PressReleasePage.aspx?PRID=1999586)
### Travel
- [Agoda](https://www.agoda.com/) [@lostseaway, @maiake, @obombayo]
- [HomeToGo](https://hometogo.com/) [@pedromartinsteenstrup]
- [Skyscanner](https://www.skyscanner.net/) [@cleslie, @stanhoucke]
### Others
- [10Web](https://10web.io/)
- [AI inside](https://inside.ai/en/)
- [Automattic](https://automattic.com/) [@Khrol, @Usiel]
- [Dropbox](https://www.dropbox.com/) [@bkyryliuk]
- [Flowbird](https://flowbird.com) [@EmmanuelCbd]
- [GEOTAB](https://www.geotab.com) [@JZ6]
- [Grassroot](https://www.grassrootinstitute.org/)
- [Increff](https://www.increff.com/) [@ishansinghania]
- [komoot](https://www.komoot.com/) [@christophlingg]
- [Let's Roam](https://www.letsroam.com/)
- [Machrent SA](https://www.machrent.com/)
- [Onebeat](https://1beat.com/) [@GuyAttia]
- [X](https://x.com/)
- [VLMedia](https://www.vlmedia.com.tr/) [@ibotheperfect]

View File

@@ -43,8 +43,8 @@ under the License.
| can this form post on ResetPasswordView |:heavy_check_mark:|O|O|O|
| can this form get on ResetMyPasswordView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
| can this form post on ResetMyPasswordView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
| can this form get on UserInfoEditView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
| can this form post on UserInfoEditView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
| can this form get on UserInfoEditView |:heavy_check_mark:|O|O|O|
| can this form post on UserInfoEditView |:heavy_check_mark:|O|O|O|
| can show on UserDBModelView |:heavy_check_mark:|O|O|O|
| can edit on UserDBModelView |:heavy_check_mark:|O|O|O|
| can delete on UserDBModelView |:heavy_check_mark:|O|O|O|
@@ -65,7 +65,6 @@ under the License.
| can get on MenuApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
| can list on AsyncEventsRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
| can invalidate on CacheRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
| can function names on Database |:heavy_check_mark:|O|O|O|
| can csv upload on Database |:heavy_check_mark:|O|O|O|
| can excel upload on Database |:heavy_check_mark:|O|O|O|
| can query form data on Api |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
@@ -76,7 +75,6 @@ under the License.
| can get on Datasource |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
| can my queries on SqlLab |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
| can log on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
| can schemas access for csv upload on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
| can import dashboards on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
| can schemas on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
| can sqllab history on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
@@ -118,8 +116,6 @@ under the License.
| menu access on Data |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
| menu access on Databases |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
| menu access on Datasets |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
| menu access on Upload a CSV |:heavy_check_mark:|:heavy_check_mark:|O|O|
| menu access on Upload Excel |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
| menu access on Charts |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
| menu access on Dashboards |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
| menu access on SQL Lab |:heavy_check_mark:|O|O|:heavy_check_mark:|
@@ -129,13 +125,6 @@ under the License.
| all datasource access on all_datasource_access |:heavy_check_mark:|:heavy_check_mark:|O|O|
| all database access on all_database_access |:heavy_check_mark:|:heavy_check_mark:|O|O|
| all query access on all_query_access |:heavy_check_mark:|O|O|O|
| can edit on UserOAuthModelView |:heavy_check_mark:|O|O|O|
| can list on UserOAuthModelView |:heavy_check_mark:|O|O|O|
| can show on UserOAuthModelView |:heavy_check_mark:|O|O|O|
| can userinfo on UserOAuthModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
| can add on UserOAuthModelView |:heavy_check_mark:|O|O|O|
| can delete on UserOAuthModelView |:heavy_check_mark:|O|O|O|
| userinfoedit on UserOAuthModelView |:heavy_check_mark:|O|O|O|
| can write on DynamicPlugin |:heavy_check_mark:|O|O|O|
| can edit on DynamicPlugin |:heavy_check_mark:|O|O|O|
| can list on DynamicPlugin |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
@@ -192,7 +181,6 @@ under the License.
| can share chart on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
| can this form get on ColumnarToDatabaseView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
| can this form post on ColumnarToDatabaseView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
| menu access on Upload a Columnar file |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
| can export on Chart |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
| can write on DashboardFilterStateRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
| can read on DashboardFilterStateRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|

View File

@@ -23,20 +23,21 @@ This file documents any backwards-incompatible changes in Superset and
assists people when migrating to a new version.
## Next
- [33116](https://github.com/apache/superset/pull/33116) In Echarts Series charts (e.g. Line, Area, Bar, etc.) charts, the `x_axis_sort_series` and `x_axis_sort_series_ascending` form data items have been renamed with `x_axis_sort` and `x_axis_sort_asc`.
There's a migration added that can potentially affect a significant number of existing charts.
- [32317](https://github.com/apache/superset/pull/32317) The horizontal filter bar feature is now out of testing/beta development and its feature flag `HORIZONTAL_FILTER_BAR` has been removed.
- [31976](https://github.com/apache/superset/pull/31976) Removed the `DISABLE_LEGACY_DATASOURCE_EDITOR` feature flag. The previous value of the feature flag was `True` and now the feature is permanently removed.
- [31959](https://github.com/apache/superset/pull/32000) Removes CSV_UPLOAD_MAX_SIZE config, use your web server to control file upload size.
- [31959](https://github.com/apache/superset/pull/31959) Removes the following endpoints from data uploads: /api/v1/database/<id>/<file type>_upload and /api/v1/database/<file type>_metadata, in favour of new one (Details on the PR). And simplifies permissions.
- [31959](https://github.com/apache/superset/pull/31959) Removes the following endpoints from data uploads: `/api/v1/database/<id>/<file type>_upload` and `/api/v1/database/<file type>_metadata`, in favour of new one (Details on the PR). And simplifies permissions.
- [31844](https://github.com/apache/superset/pull/31844) The `ALERT_REPORTS_EXECUTE_AS` and `THUMBNAILS_EXECUTE_AS` config parameters have been renamed to `ALERT_REPORTS_EXECUTORS` and `THUMBNAILS_EXECUTORS` respectively. A new config flag `CACHE_WARMUP_EXECUTORS` has also been introduced to be able to control which user is used to execute cache warmup tasks. Finally, the config flag `THUMBNAILS_SELENIUM_USER` has been removed. To use a fixed executor for async tasks, use the new `FixedExecutor` class. See the config and docs for more info on setting up different executor profiles.
- [31894](https://github.com/apache/superset/pull/31894) Domain sharding is deprecated in favor of HTTP2. The `SUPERSET_WEBSERVER_DOMAINS` configuration will be removed in the next major version (6.0)
- [31794](https://github.com/apache/superset/pull/31794) Removed the previously deprecated `DASHBOARD_CROSS_FILTERS` feature flag
- [31774](https://github.com/apache/superset/pull/31774): Fixes the spelling of the `USE-ANALAGOUS-COLORS` feature flag. Please update any scripts/configuration item to use the new/corrected `USE-ANALOGOUS-COLORS` flag spelling.
- [31582](https://github.com/apache/superset/pull/31582) Removed the legacy Area, Bar, Event Flow, Heatmap, Histogram, Line, Sankey, and Sankey Loop charts. They were all automatically migrated to their ECharts counterparts with the exception of the Event Flow and Sankey Loop charts which were removed as they were not actively maintained and not widely used. If you were using the Event Flow or Sankey Loop charts, you will need to find an alternative solution.
- [31198](https://github.com/apache/superset/pull/31198) Disallows by default the use of the following ClickHouse functions: "version", "currentDatabase", "hostName".
- [29798](https://github.com/apache/superset/pull/29798) Since 3.1.0, the intial schedule for an alert or report was mistakenly offset by the specified timezone's relation to UTC. The initial schedule should now begin at the correct time.
- [30021](https://github.com/apache/superset/pull/30021) The `dev` layer in our Dockerfile no long includes firefox binaries, only Chromium to reduce bloat/docker-build-time.
- [30099](https://github.com/apache/superset/pull/30099) Translations are no longer included in the default docker image builds. If your environment requires translations, you'll want to set the docker build arg `BUILD_TRANSACTION=true`.
- [31262](https://github.com/apache/superset/pull/31262) NOTE: deprecated `pylint` in favor of `ruff` as our only python linter. Only affect development workflows positively (not the release itself). It should cover most important rules, be much faster, but some things linting rules that were enforced before may not be enforce in the exact same way as before.
- [31173](https://github.com/apache/superset/pull/31173) Modified `fetch_csrf_token` to align with HTTP standards, particularly regarding how cookies are handled. If you encounter any issues related to CSRF functionality, please report them as a new issue and reference this PR for context.
- [31413](https://github.com/apache/superset/pull/31413) Enable the DATE_FORMAT_IN_EMAIL_SUBJECT feature flag to allow users to specify a date format for the email subject, which will then be replaced with the actual date.
- [31385](https://github.com/apache/superset/pull/31385) Significant docker refactor, reducing access levels for the `superset` user, streamlining layer building, ...
- [31503](https://github.com/apache/superset/pull/31503) Deprecating python 3.9.x support, 3.11 is now the recommended version and 3.10 is still supported over the Superset 5.0 lifecycle.
@@ -45,10 +46,16 @@ assists people when migrating to a new version.
- [25166](https://github.com/apache/superset/pull/25166) Changed the default configuration of `UPLOAD_FOLDER` from `/app/static/uploads/` to `/static/uploads/`. It also removed the unused `IMG_UPLOAD_FOLDER` and `IMG_UPLOAD_URL` configuration options.
- [30284](https://github.com/apache/superset/pull/30284) Deprecated GLOBAL_ASYNC_QUERIES_REDIS_CONFIG in favor of the new GLOBAL_ASYNC_QUERIES_CACHE_BACKEND configuration. To leverage Redis Sentinel, set CACHE_TYPE to RedisSentinelCache, or use RedisCache for standalone Redis
- [31961](https://github.com/apache/superset/pull/31961) Upgraded React from version 16.13.1 to 17.0.2. If you are using custom frontend extensions or plugins, you may need to update them to be compatible with React 17.
- [31260](https://github.com/apache/superset/pull/31260) Docker images now use `uv pip install` instead of `pip install` to manage the python envrionment. Most docker-based deployments will be affected, whether you derive one of the published images, or have custom bootstrap script that install python libraries (drivers)
- [32432](https://github.com/apache/superset/pull/31260) Moves the List Roles FAB view to the frontend and requires `FAB_ADD_SECURITY_API` to be enabled in the configuration and `superset init` to be executed.
### Potential Downtime
## 4.1.2
- [31198](https://github.com/apache/superset/pull/31198) Disallows by default the use of the following ClickHouse functions: "version", "currentDatabase", "hostName".
- [31173](https://github.com/apache/superset/pull/31173) Modified `fetch_csrf_token` to align with HTTP standards, particularly regarding how cookies are handled. If you encounter any issues related to CSRF functionality, please report them as a new issue and reference this PR for context.
## 4.1.0
- [29274](https://github.com/apache/superset/pull/29274): We made it easier to trigger CI on your

View File

@@ -41,7 +41,7 @@ services:
required: true
- path: docker/.env-local # optional override
required: false
image: postgres:15
image: postgres:16
container_name: superset_db
restart: unless-stopped
volumes:

View File

@@ -46,7 +46,7 @@ services:
required: true
- path: docker/.env-local # optional override
required: false
image: postgres:15
image: postgres:16
container_name: superset_db
restart: unless-stopped
volumes:

View File

@@ -29,7 +29,6 @@ x-superset-volumes: &superset-volumes
- ./superset-frontend:/app/superset-frontend
- superset_home:/app/superset_home
- ./tests:/app/tests
x-common-build: &common-build
context: .
target: ${SUPERSET_BUILD_TARGET:-dev} # can use `dev` (default) or `lean`
@@ -43,6 +42,11 @@ x-common-build: &common-build
services:
nginx:
env_file:
- path: docker/.env # default
required: true
- path: docker/.env-local # optional override
required: false
image: nginx:latest
container_name: superset_nginx
restart: unless-stopped
@@ -52,6 +56,8 @@ services:
- "host.docker.internal:host-gateway"
volumes:
- ./docker/nginx/nginx.conf:/etc/nginx/nginx.conf:ro
- ./docker/nginx/templates:/etc/nginx/templates:ro
redis:
image: redis:7
container_name: superset_cache
@@ -67,7 +73,7 @@ services:
required: true
- path: docker/.env-local # optional override
required: false
image: postgres:15
image: postgres:16
container_name: superset_db
restart: unless-stopped
ports:
@@ -176,7 +182,7 @@ services:
NPM_RUN_PRUNE: false
SCARF_ANALYTICS: "${SCARF_ANALYTICS:-}"
# configuring the dev-server to use the host.docker.internal to connect to the backend
superset: "http://host.docker.internal:8088"
superset: "http://superset:8088"
ports:
- "127.0.0.1:9000:9000" # exposing the dynamic webpack dev server
container_name: superset_node

View File

@@ -54,6 +54,7 @@ REDIS_HOST=redis
REDIS_PORT=6379
FLASK_DEBUG=true
SUPERSET_APP_ROOT="/"
SUPERSET_ENV=development
SUPERSET_LOAD_EXAMPLES=yes
CYPRESS_CONFIG=false
@@ -62,7 +63,6 @@ MAPBOX_API_KEY=''
# Make sure you set this to a unique secure random value on production
SUPERSET_SECRET_KEY=TEST_NON_DEV_SECRET
ENABLE_PLAYWRIGHT=false
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true
BUILD_SUPERSET_FRONTEND_IN_DOCKER=true

View File

@@ -50,7 +50,11 @@ fi
#
if [ -f "${REQUIREMENTS_LOCAL}" ]; then
echo "Installing local overrides at ${REQUIREMENTS_LOCAL}"
uv pip install --no-cache-dir -r "${REQUIREMENTS_LOCAL}"
if command -v uv > /dev/null 2>&1; then
uv pip install --no-cache-dir -r "${REQUIREMENTS_LOCAL}"
else
pip install --no-cache-dir -r "${REQUIREMENTS_LOCAL}"
fi
else
echo "Skipping local overrides"
fi

19
docker/docker-healthcheck.sh Executable file
View File

@@ -0,0 +1,19 @@
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
curl -f "http://localhost:${SUPERSET_PORT}/${SUPERSET_APP_ROOT/\//}/health" || exit 1

View File

@@ -90,38 +90,5 @@ http {
client_max_body_size 10m;
upstream superset_app {
server host.docker.internal:8088;
keepalive 100;
}
upstream superset_websocket {
server host.docker.internal:8080;
keepalive 100;
}
server {
listen 80 default_server;
server_name _;
location /ws {
proxy_pass http://superset_websocket;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "Upgrade";
proxy_set_header Host $host;
}
location / {
proxy_pass http://superset_app;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $remote_addr;
proxy_set_header X-Forwarded-Host $host;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_http_version 1.1;
port_in_redirect off;
proxy_connect_timeout 300;
}
}
include /etc/nginx/conf.d/superset.conf;
}

View File

@@ -0,0 +1,57 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
upstream superset_app {
server host.docker.internal:8088;
keepalive 100;
}
upstream superset_websocket {
server host.docker.internal:8080;
keepalive 100;
}
server {
listen 80 default_server;
server_name _;
location /ws {
proxy_pass http://superset_websocket;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "Upgrade";
proxy_set_header Host $host;
}
location ${SUPERSET_APP_ROOT}/static {
proxy_pass http://host.docker.internal:9000; # Proxy to superset-node
proxy_http_version 1.1;
proxy_set_header Host $host;
}
location ${SUPERSET_APP_ROOT} {
proxy_pass http://superset_app;
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_http_version 1.1;
port_in_redirect off;
proxy_connect_timeout 300;
}
}

View File

@@ -71,6 +71,7 @@ CACHE_CONFIG = {
"CACHE_REDIS_DB": REDIS_RESULTS_DB,
}
DATA_CACHE_CONFIG = CACHE_CONFIG
THUMBNAIL_CACHE_CONFIG = CACHE_CONFIG
class CeleryConfig:
@@ -100,9 +101,11 @@ CELERY_CONFIG = CeleryConfig
FEATURE_FLAGS = {"ALERT_REPORTS": True}
ALERT_REPORTS_NOTIFICATION_DRY_RUN = True
WEBDRIVER_BASEURL = "http://superset:8088/" # When using docker compose baseurl should be http://superset_app:8088/ # noqa: E501
WEBDRIVER_BASEURL = f"http://superset_app{os.environ.get('SUPERSET_APP_ROOT', '/')}/" # When using docker compose baseurl should be http://superset_nginx{ENV{BASEPATH}}/ # noqa: E501
# The base URL for the email report hyperlinks.
WEBDRIVER_BASEURL_USER_FRIENDLY = WEBDRIVER_BASEURL
WEBDRIVER_BASEURL_USER_FRIENDLY = (
f"http://localhost:8888/{os.environ.get('SUPERSET_APP_ROOT', '/')}/"
)
SQLLAB_CTAS_NO_LIMIT = True
log_level_text = os.getenv("SUPERSET_LOG_LEVEL", "INFO")
@@ -129,7 +132,7 @@ try:
from superset_config_docker import * # noqa
logger.info(
f"Loaded your Docker configuration at " f"[{superset_config_docker.__file__}]"
f"Loaded your Docker configuration at [{superset_config_docker.__file__}]"
)
except ImportError:
logger.info("Using default Docker config...")

View File

@@ -1,3 +1,4 @@
/* eslint-env node */
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
@@ -16,41 +17,31 @@
* specific language governing permissions and limitations
* under the License.
*/
import Icons from 'src/components/Icons';
import DropdownSelectableIcon, { DropDownSelectableProps } from '.';
export default {
title: 'DropdownSelectableIcon',
component: DropdownSelectableIcon,
};
export const Component = (props: DropDownSelectableProps) => (
<DropdownSelectableIcon
{...props}
icon={<Icons.Gear name="gear" iconColor="#000000" />}
/>
);
Component.args = {
info: 'Info go here',
selectedKeys: ['vertical'],
menuItems: [
{
key: 'vertical',
label: 'Vertical',
},
{
key: 'horizontal',
label: 'Horizontal',
},
module.exports = {
extends: [
'eslint:recommended',
'plugin:@typescript-eslint/recommended',
'plugin:react/recommended',
'plugin:prettier/recommended',
],
};
Component.argTypes = {
onSelect: {
action: 'onSelect',
table: {
disable: true,
parser: '@typescript-eslint/parser',
parserOptions: {
ecmaFeatures: {
jsx: true,
},
ecmaVersion: 2020,
sourceType: 'module',
},
plugins: ['@typescript-eslint', 'react', 'prettier'],
rules: {
'react/react-in-jsx-scope': 'off',
'react/prop-types': 'off',
'@typescript-eslint/explicit-module-boundary-types': 'off',
},
settings: {
react: {
version: 'detect',
},
},
ignorePatterns: ['build/**/*', '.docusaurus/**/*', 'node_modules/**/*'],
};

View File

@@ -1 +1 @@
v20.16.0
v20.18.3

View File

@@ -18,6 +18,6 @@ under the License.
-->
This is the public documentation site for Superset, built using
[Docusaurus 2](https://docusaurus.io/). See
[Docusaurus 3](https://docusaurus.io/). See
[CONTRIBUTING.md](../CONTRIBUTING.md#documentation) for documentation on
contributing to documentation.

View File

@@ -1,3 +1,4 @@
/* eslint-env node */
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file

View File

@@ -86,6 +86,7 @@
"Israel",
"Italy",
"Italy (regions)",
"Ivory Coast",
"Japan",
"Jordan",
"Kazakhstan",
@@ -143,6 +144,7 @@
"Poland",
"Portugal",
"Qatar",
"Republic Of Serbia",
"Romania",
"Russia",
"Rwanda",

View File

@@ -4,7 +4,6 @@ hide_title: true
sidebar_position: 10
---
import { Buffer } from 'buffer/index.js';
import SwaggerUI from 'swagger-ui-react';
import openapi from '/resources/openapi.json';
import 'swagger-ui-react/swagger-ui.css';

View File

@@ -92,6 +92,7 @@ You can find documentation about each field in the default `config.py` in the Gi
You need to replace default values with your custom Redis, Slack and/or SMTP config.
Superset uses Celery beat and Celery worker(s) to send alerts and reports.
- The beat is the scheduler that tells the worker when to perform its tasks. This schedule is defined when you create the alert or report.
- The worker will process the tasks that need to be performed when an alert or report is fired.
@@ -143,7 +144,7 @@ SLACK_API_TOKEN = "xoxb-"
SMTP_HOST = "smtp.sendgrid.net" # change to your host
SMTP_PORT = 2525 # your port, e.g. 587
SMTP_STARTTLS = True
SMTP_SSL_SERVER_AUTH = True # If your using an SMTP server with a valid certificate
SMTP_SSL_SERVER_AUTH = True # If you're using an SMTP server with a valid certificate
SMTP_SSL = False
SMTP_USER = "your_user" # use the empty string "" if using an unauthenticated SMTP server
SMTP_PASSWORD = "your_password" # use the empty string "" if using an unauthenticated SMTP server
@@ -187,7 +188,6 @@ ALERT_REPORTS_EXECUTORS = [FixedExecutor("admin")]
Please refer to `ExecutorType` in the codebase for other executor types.
**Important notes**
- Be mindful of the concurrency setting for celery (using `-c 4`). Selenium/webdriver instances can
@@ -199,7 +199,6 @@ Please refer to `ExecutorType` in the codebase for other executor types.
- Adjust `WEBDRIVER_BASEURL` in your configuration file if celery workers cant access Superset via
its default value of `http://0.0.0.0:8080/`.
It's also possible to specify a minimum interval between each report's execution through the config file:
``` python
@@ -305,6 +304,7 @@ One symptom of an invalid connection to an email server is receiving an error of
Confirm via testing that your outbound email configuration is correct. Here is the simplest test, for an un-authenticated email SMTP email service running on port 25. If you are sending over SSL, for instance, study how [Superset's codebase sends emails](https://github.com/apache/superset/blob/master/superset/utils/core.py#L818) and then test with those commands and arguments.
Start Python in your worker environment, replace all example values, and run:
```python
import smtplib
from email.mime.multipart import MIMEMultipart
@@ -326,6 +326,7 @@ mailserver.quit()
This should send an email.
Possible fixes:
- Some cloud hosts disable outgoing unauthenticated SMTP email to prevent spam. For instance, [Azure blocks port 25 by default on some machines](https://learn.microsoft.com/en-us/azure/virtual-network/troubleshoot-outbound-smtp-connectivity). Enable that port or use another sending method.
- Use another set of SMTP credentials that you verify works in this setup.

View File

@@ -42,13 +42,13 @@ CELERY_CONFIG = CeleryConfig
To start a Celery worker to leverage the configuration, run the following command:
```
```bash
celery --app=superset.tasks.celery_app:app worker --pool=prefork -O fair -c 4
```
To start a job which schedules periodic background jobs, run the following command:
```
```bash
celery --app=superset.tasks.celery_app:app beat
```
@@ -93,12 +93,12 @@ issues arise. Please clear your existing results cache store when upgrading an e
Flower is a web based tool for monitoring the Celery cluster which you can install from pip:
```python
```bash
pip install flower
```
You can run flower using:
```
```bash
celery --app=superset.tasks.celery_app:app flower
```

View File

@@ -17,6 +17,7 @@ Caching can be configured by providing dictionaries in
`superset_config.py` that comply with [the Flask-Caching config specifications](https://flask-caching.readthedocs.io/en/latest/#configuring-flask-caching).
The following cache configurations can be customized in this way:
- Dashboard filter state (required): `FILTER_STATE_CACHE_CONFIG`.
- Explore chart form data (required): `EXPLORE_FORM_DATA_CACHE_CONFIG`
- Metadata cache (optional): `CACHE_CONFIG`
@@ -81,7 +82,7 @@ See [Async Queries via Celery](/docs/configuration/async-queries-celery) for det
## Caching Thumbnails
This is an optional feature that can be turned on by activating its [feature flag](/docs/configuration/configuring-superset#feature-flags) on config:
This is an optional feature that can be turned on by activating its [feature flag](/docs/configuration/configuring-superset#feature-flags) on config:
```
FEATURE_FLAGS = {
@@ -99,7 +100,6 @@ from superset.tasks.types import FixedExecutor
THUMBNAIL_EXECUTORS = [FixedExecutor("admin")]
```
For this feature you will need a cache system and celery workers. All thumbnails are stored on cache
and are processed asynchronously by the workers.

View File

@@ -117,7 +117,7 @@ Your deployment must use a complex, unique key.
### Rotating to a newer SECRET_KEY
If you wish to change your existing SECRET_KEY, add the existing SECRET_KEY to your `superset_config.py` file as
`PREVIOUS_SECRET_KEY = `and provide your new key as `SECRET_KEY =`. You can find your current SECRET_KEY with these
`PREVIOUS_SECRET_KEY =`and provide your new key as `SECRET_KEY =`. You can find your current SECRET_KEY with these
commands - if running Superset with Docker, execute from within the Superset application container:
```python
@@ -141,10 +141,10 @@ database engine on a separate host or container.
Superset supports the following database engines/versions:
| Database Engine | Supported Versions |
| ----------------------------------------- | ---------------------------------- |
| [PostgreSQL](https://www.postgresql.org/) | 10.X, 11.X, 12.X, 13.X, 14.X, 15.X |
| [MySQL](https://www.mysql.com/) | 5.7, 8.X |
| Database Engine | Supported Versions |
| ----------------------------------------- | ---------------------------------------- |
| [PostgreSQL](https://www.postgresql.org/) | 10.X, 11.X, 12.X, 13.X, 14.X, 15.X, 16.X |
| [MySQL](https://www.mysql.com/) | 5.7, 8.X |
Use the following database drivers and connection strings:
@@ -215,6 +215,45 @@ In case the reverse proxy is used for providing SSL encryption, an explicit defi
RequestHeader set X-Forwarded-Proto "https"
```
## Configuring the application root
*Please be advised that this feature is in BETA.*
Superset supports running the application under a non-root path. The root path
prefix can be specified in one of two ways:
- Setting the `SUPERSET_APP_ROOT` environment variable to the desired prefix.
- Customizing the [Flask entrypoint](https://github.com/apache/superset/blob/master/superset/app.py#L29)
by passing the `superset_app_root` variable.
Note, the prefix should start with a `/`.
### Customizing the Flask entrypoint
To configure a prefix, e.g `/analytics`, pass the `superset_app_root` argument to
`create_app` when calling flask run either through the `FLASK_APP`
environment variable:
```sh
FLASK_APP="superset:create_app(superset_app_root='/analytics')"
```
or as part of the `--app` argument to `flask run`:
```sh
flask --app "superset.app:create_app(superset_app_root='/analytics')"
```
### Docker builds
The [docker compose](/docs/installation/docker-compose#configuring-further) developer
configuration includes an additional environmental variable,
[`SUPERSET_APP_ROOT`](https://github.com/apache/superset/blob/master/docker/.env),
to simplify the process of setting up a non-default root path across the services.
In `docker/.env-local` set `SUPERSET_APP_ROOT` to the desired prefix and then bring the
services up with `docker compose up --detach`.
## Custom OAuth2 Configuration
Superset is built on Flask-AppBuilder (FAB), which supports many providers out of the box
@@ -283,7 +322,7 @@ class CustomSsoSecurityManager(SupersetSecurityManager):
...
```
This file must be located at the same directory than `superset_config.py` with the name
This file must be located in the same directory as `superset_config.py` with the name
`custom_sso_security_manager.py`. Finally, add the following 2 lines to `superset_config.py`:
```
@@ -300,6 +339,7 @@ CUSTOM_SECURITY_MANAGER = CustomSsoSecurityManager
- If an OAuth2 authorization server supports OpenID Connect 1.0, you could configure its configuration
document URL only without providing `api_base_url`, `access_token_url`, `authorize_url` and other
required options like user info endpoint, jwks uri etc. For instance:
```python
OAUTH_PROVIDERS = [
{ 'name':'egaSSO',
@@ -313,12 +353,15 @@ CUSTOM_SECURITY_MANAGER = CustomSsoSecurityManager
}
]
```
### Keycloak-Specific Configuration using Flask-OIDC
If you are using Keycloak as OpenID Connect 1.0 Provider, the above configuration based on [`Authlib`](https://authlib.org/) might not work. In this case using [`Flask-OIDC`](https://pypi.org/project/flask-oidc/) is a viable option.
Make sure the pip package [`Flask-OIDC`](https://pypi.org/project/flask-oidc/) is installed on the webserver. This was succesfully tested using version 2.2.0. This package requires [`Flask-OpenID`](https://pypi.org/project/Flask-OpenID/) as a dependency.
Make sure the pip package [`Flask-OIDC`](https://pypi.org/project/flask-oidc/) is installed on the webserver. This was successfully tested using version 2.2.0. This package requires [`Flask-OpenID`](https://pypi.org/project/Flask-OpenID/) as a dependency.
The following code defines a new security manager. Add it to a new file named `keycloak_security_manager.py`, placed in the same directory as your `superset_config.py` file.
```python
from flask_appbuilder.security.manager import AUTH_OID
from superset.security import SupersetSecurityManager
@@ -373,7 +416,9 @@ class AuthOIDCView(AuthOIDView):
return redirect(
oidc.client_secrets.get('issuer') + '/protocol/openid-connect/logout?redirect_uri=' + quote(redirect_url))
```
Then add to your `superset_config.py` file:
```python
from keycloak_security_manager import OIDCSecurityManager
from flask_appbuilder.security.manager import AUTH_OID, AUTH_REMOTE_USER, AUTH_DB, AUTH_LDAP, AUTH_OAUTH
@@ -393,7 +438,9 @@ AUTH_USER_REGISTRATION = True
# The default user self registration role
AUTH_USER_REGISTRATION_ROLE = 'Public'
```
Store your client-specific OpenID information in a file called `client_secret.json`. Create this file in the same directory as `superset_config.py`:
```json
{
"<myOpenIDProvider>": {
@@ -410,6 +457,7 @@ Store your client-specific OpenID information in a file called `client_secret.js
}
}
```
## LDAP Authentication
FAB supports authenticating user credentials against an LDAP server.
@@ -432,6 +480,7 @@ AUTH_ROLES_MAPPING = {
"superset_admins": ["Admin"],
}
```
### Mapping LDAP groups to Superset roles
The following `AUTH_ROLES_MAPPING` dictionary would map the LDAP DN "cn=superset_users,ou=groups,dc=example,dc=com" to the Superset roles "Gamma" as well as "Alpha", and the LDAP DN "cn=superset_admins,ou=groups,dc=example,dc=com" to the Superset role "Admin".
@@ -442,6 +491,7 @@ AUTH_ROLES_MAPPING = {
"cn=superset_admins,ou=groups,dc=example,dc=com": ["Admin"],
}
```
Note: This requires `AUTH_LDAP_SEARCH` to be set. For more details, please see the [FAB Security documentation](https://flask-appbuilder.readthedocs.io/en/latest/security.html).
### Syncing roles at login
@@ -475,7 +525,7 @@ def FLASK_APP_MUTATOR(app: Flask) -> None:
To support a diverse set of users, Superset has some features that are not enabled by default. For
example, some users have stronger security restrictions, while some others may not. So Superset
allow users to enable or disable some features by config. For feature owners, you can add optional
allows users to enable or disable some features by config. For feature owners, you can add optional
functionalities in Superset, but will be only affected by a subset of users.
You can enable or disable features with flag from `superset_config.py`:

View File

@@ -31,18 +31,17 @@ install new database drivers into your Superset configuration.
### Supported Databases and Dependencies
Some of the recommended packages are shown below. Please refer to
[pyproject.toml](https://github.com/apache/superset/blob/master/pyproject.toml) for the versions that
are compatible with Superset.
| <div style={{width: '150px'}}>Database</div> | PyPI package | Connection String |
| --------------------------------------------------------- | ---------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------ |
| [AWS Athena](/docs/configuration/databases#aws-athena) | `pip install pyathena[pandas]` , `pip install PyAthenaJDBC` | `awsathena+rest://{access_key_id}:{access_key}@athena.{region}.amazonaws.com/{schema}?s3_staging_dir={s3_staging_dir}&... ` |
| [AWS Athena](/docs/configuration/databases#aws-athena) | `pip install pyathena[pandas]` , `pip install PyAthenaJDBC` | `awsathena+rest://{access_key_id}:{access_key}@athena.{region}.amazonaws.com/{schema}?s3_staging_dir={s3_staging_dir}&...` |
| [AWS DynamoDB](/docs/configuration/databases#aws-dynamodb) | `pip install pydynamodb` | `dynamodb://{access_key_id}:{secret_access_key}@dynamodb.{region_name}.amazonaws.com?connector=superset` |
| [AWS Redshift](/docs/configuration/databases#aws-redshift) | `pip install sqlalchemy-redshift` | ` redshift+psycopg2://<userName>:<DBPassword>@<AWS End Point>:5439/<Database Name>` |
| [AWS Redshift](/docs/configuration/databases#aws-redshift) | `pip install sqlalchemy-redshift` | `redshift+psycopg2://<userName>:<DBPassword>@<AWS End Point>:5439/<Database Name>` |
| [Apache Doris](/docs/configuration/databases#apache-doris) | `pip install pydoris` | `doris://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>` |
| [Apache Drill](/docs/configuration/databases#apache-drill) | `pip install sqlalchemy-drill` | `drill+sadrill:// For JDBC drill+jdbc://` |
| [Apache Drill](/docs/configuration/databases#apache-drill) | `pip install sqlalchemy-drill` | `drill+sadrill://<username>:<password>@<host>:<port>/<storage_plugin>`, often useful: `?use_ssl=True/False` |
| [Apache Druid](/docs/configuration/databases#apache-druid) | `pip install pydruid` | `druid://<User>:<password>@<Host>:<Port-default-9088>/druid/v2/sql` |
| [Apache Hive](/docs/configuration/databases#hive) | `pip install pyhive` | `hive://hive@{hostname}:{port}/{database}` |
| [Apache Impala](/docs/configuration/databases#apache-impala) | `pip install impyla` | `impala://{hostname}:{port}/{database}` |
@@ -68,22 +67,24 @@ are compatible with Superset.
| [IBM Netezza Performance Server](/docs/configuration/databases#ibm-netezza-performance-server) | `pip install nzalchemy` | `netezza+nzpy://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
| [MySQL](/docs/configuration/databases#mysql) | `pip install mysqlclient` | `mysql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
| [OceanBase](/docs/configuration/databases#oceanbase) | `pip install oceanbase_py` | `oceanbase://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
| [Oracle](/docs/configuration/databases#oracle) | `pip install cx_Oracle` | `oracle://` |
| [Oracle](/docs/configuration/databases#oracle) | `pip install cx_Oracle` | `oracle://<username>:<password>@<hostname>:<port>` |
| [Parseable](/docs/configuration/databases#parseable) | `pip install sqlalchemy-parseable` | `parseable://<UserName>:<DBPassword>@<Database Host>/<Stream Name>` |
| [PostgreSQL](/docs/configuration/databases#postgres) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
| [Presto](/docs/configuration/databases#presto) | `pip install pyhive` | `presto://` |
| [Presto](/docs/configuration/databases#presto) | `pip install pyhive` | `presto://{username}:{password}@{hostname}:{port}/{database}` |
| [Rockset](/docs/configuration/databases#rockset) | `pip install rockset-sqlalchemy` | `rockset://<api_key>:@<api_server>` |
| [SAP Hana](/docs/configuration/databases#hana) | `pip install hdbcli sqlalchemy-hana` or `pip install apache-superset[hana]` | `hana://{username}:{password}@{host}:{port}` |
| [SAP Hana](/docs/configuration/databases#hana) | `pip install hdbcli sqlalchemy-hana` or `pip install apache_superset[hana]` | `hana://{username}:{password}@{host}:{port}` |
| [StarRocks](/docs/configuration/databases#starrocks) | `pip install starrocks` | `starrocks://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>` |
| [Snowflake](/docs/configuration/databases#snowflake) | `pip install snowflake-sqlalchemy` | `snowflake://{user}:{password}@{account}.{region}/{database}?role={role}&warehouse={warehouse}` |
| SQLite | No additional library needed | `sqlite://path/to/file.db?check_same_thread=false` |
| [SQL Server](/docs/configuration/databases#sql-server) | `pip install pymssql` | `mssql+pymssql://` |
| [SQL Server](/docs/configuration/databases#sql-server) | `pip install pymssql` | `mssql+pymssql://<Username>:<Password>@<Host>:<Port-default:1433>/<Database Name>` |
| [TDengine](/docs/configuration/databases#tdengine) | `pip install taospy` `pip install taos-ws-py` | `taosws://<user>:<password>@<host>:<port>` |
| [Teradata](/docs/configuration/databases#teradata) | `pip install teradatasqlalchemy` | `teradatasql://{user}:{password}@{host}` |
| [TimescaleDB](/docs/configuration/databases#timescaledb) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>:<Port>/<Database Name>` |
| [Trino](/docs/configuration/databases#trino) | `pip install trino` | `trino://{username}:{password}@{hostname}:{port}/{catalog}` |
| [Vertica](/docs/configuration/databases#vertica) | `pip install sqlalchemy-vertica-python` | `vertica+vertica_python://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
| [YDB](/docs/configuration/databases#ydb) | `pip install ydb-sqlalchemy` | `ydb://{host}:{port}/{database_name}` |
| [YugabyteDB](/docs/configuration/databases#yugabytedb) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
---
Note that many other databases are supported, the main criteria being the existence of a functional
@@ -184,7 +185,6 @@ purposes of isolating the problem.
Repeat this process for each type of database you want Superset to connect to.
### Database-specific Instructions
#### Ascend.io
@@ -210,14 +210,12 @@ You'll need the following setting values to form the connection string:
- **Catalog**: Catalog Name
- **Database**: Database Name
Here's what the connection string looks like:
```
doris://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>
```
#### AWS Athena
##### PyAthenaJDBC
@@ -247,6 +245,7 @@ awsathena+rest://{aws_access_key_id}:{aws_secret_access_key}@athena.{region_name
```
The PyAthena library also allows to assume a specific IAM role which you can define by adding following parameters in Superset's Athena database connection UI under ADVANCED --> Other --> ENGINE PARAMETERS.
```json
{
"connect_args": {
@@ -269,7 +268,6 @@ dynamodb://{aws_access_key_id}:{aws_secret_access_key}@dynamodb.{region_name}.am
To get more documentation, please visit: [PyDynamoDB WIKI](https://github.com/passren/PyDynamoDB/wiki/5.-Superset).
#### AWS Redshift
The [sqlalchemy-redshift](https://pypi.org/project/sqlalchemy-redshift/) library is the recommended
@@ -285,7 +283,6 @@ You'll need to set the following values to form the connection string:
- **Database Name**: Database Name
- **Port**: default 5439
##### psycopg2
Here's what the SQLALCHEMY URI looks like:
@@ -294,7 +291,6 @@ Here's what the SQLALCHEMY URI looks like:
redshift+psycopg2://<userName>:<DBPassword>@<AWS End Point>:5439/<Database Name>
```
##### redshift_connector
Here's what the SQLALCHEMY URI looks like:
@@ -303,8 +299,7 @@ Here's what the SQLALCHEMY URI looks like:
redshift+redshift_connector://<userName>:<DBPassword>@<AWS End Point>:5439/<Database Name>
```
###### Using IAM-based credentials with Redshift cluster:
###### Using IAM-based credentials with Redshift cluster
[Amazon redshift cluster](https://docs.aws.amazon.com/redshift/latest/mgmt/working-with-clusters.html) also supports generating temporary IAM-based database user credentials.
@@ -315,10 +310,10 @@ You have to define the following arguments in Superset's redshift database conne
```
{"connect_args":{"iam":true,"database":"<database>","cluster_identifier":"<cluster_identifier>","db_user":"<db_user>"}}
```
and SQLALCHEMY URI should be set to `redshift+redshift_connector://`
###### Using IAM-based credentials with Redshift serverless:
###### Using IAM-based credentials with Redshift serverless
[Redshift serverless](https://docs.aws.amazon.com/redshift/latest/mgmt/serverless-whatis.html) supports connection using IAM roles.
@@ -330,8 +325,6 @@ You have to define the following arguments in Superset's redshift database conne
{"connect_args":{"iam":true,"is_serverless":true,"serverless_acct_id":"<aws account number>","serverless_work_group":"<redshift work group>","database":"<database>","user":"IAMR:<superset iam role name>"}}
```
#### ClickHouse
To use ClickHouse with Superset, you will need to install the `clickhouse-connect` Python library:
@@ -364,8 +357,6 @@ uses the default user without a password (and doesn't encrypt the connection):
clickhousedb://localhost/default
```
#### CockroachDB
The recommended connector library for CockroachDB is
@@ -377,13 +368,12 @@ The expected connection string is formatted as follows:
cockroachdb://root@{hostname}:{port}/{database}?sslmode=disable
```
#### Couchbase
The Couchbase's Superset connection is designed to support two services: Couchbase Analytics and Couchbase Columnar.
The recommended connector library for couchbase is
[couchbase-sqlalchemy](https://github.com/couchbase/couchbase-sqlalchemy).
```
pip install couchbase-sqlalchemy
```
@@ -394,22 +384,25 @@ The expected connection string is formatted as follows:
couchbase://{username}:{password}@{hostname}:{port}?truststorepath={certificate path}?ssl={true/false}
```
#### CrateDB
The connector library for CrateDB is [sqlalchemy-cratedb].
We recommend to add the following item to your `requirements.txt` file:
```
sqlalchemy-cratedb>=0.40.1,<1
```
An SQLAlchemy connection string for [CrateDB Self-Managed] on localhost,
for evaluation purposes, looks like this:
```
crate://crate@127.0.0.1:4200
```
An SQLAlchemy connection string for connecting to [CrateDB Cloud] looks like
this:
```
crate://<username>:<password>@<clustername>.cratedb.net:4200/?ssl=true
```
@@ -417,6 +410,7 @@ crate://<username>:<password>@<clustername>.cratedb.net:4200/?ssl=true
Follow the steps [here](/docs/configuration/databases#installing-database-drivers)
to install the CrateDB connector package when setting up Superset locally using
Docker Compose.
```
echo "sqlalchemy-cratedb" >> ./docker/requirements-local.txt
```
@@ -425,7 +419,6 @@ echo "sqlalchemy-cratedb" >> ./docker/requirements-local.txt
[CrateDB Self-Managed]: https://cratedb.com/product/self-managed
[sqlalchemy-cratedb]: https://pypi.org/project/sqlalchemy-cratedb/
#### Databend
The recommended connector library for Databend is [databend-sqlalchemy](https://pypi.org/project/databend-sqlalchemy/).
@@ -443,7 +436,6 @@ Here's a connection string example of Superset connecting to a Databend database
databend://user:password@localhost:8000/default?secure=false
```
#### Databricks
Databricks now offer a native DB API 2.0 driver, `databricks-sql-connector`, that can be used with the `sqlalchemy-databricks` dialect. You can install both with:
@@ -527,7 +519,6 @@ For a connection to a SQL endpoint you need to use the HTTP path from the endpoi
{"connect_args": {"http_path": "/sql/1.0/endpoints/****", "driver_path": "/path/to/odbc/driver"}}
```
#### Denodo
The recommended connector library for Denodo is
@@ -539,7 +530,6 @@ The expected connection string is formatted as follows (default port is 9996):
denodo://{username}:{password}@{hostname}:{port}/{database}
```
#### Dremio
The recommended connector library for Dremio is
@@ -560,7 +550,6 @@ dremio+flight://{username}:{password}@{host}:{port}/dremio
This [blog post by Dremio](https://www.dremio.com/tutorials/dremio-apache-superset/) has some
additional helpful instructions on connecting Superset to Dremio.
#### Apache Drill
##### SQLAlchemy
@@ -602,8 +591,6 @@ We recommend reading the
the [GitHub README](https://github.com/JohnOmernik/sqlalchemy-drill#usage-with-odbc) to learn how to
work with Drill through ODBC.
import useBaseUrl from "@docusaurus/useBaseUrl";
#### Apache Druid
@@ -617,6 +604,7 @@ The connection string looks like:
```
druid://<User>:<password>@<Host>:<Port-default-9088>/druid/v2/sql
```
Here's a breakdown of the key components of this connection string:
- `User`: username portion of the credentials needed to connect to your database
@@ -645,7 +633,7 @@ To disable SSL verification, add the following to the **Extras** field:
```
engine_params:
{"connect_args":
{"scheme": "https", "ssl_verify_cert": false}}
{"scheme": "https", "ssl_verify_cert": false}}
```
##### Aggregations
@@ -669,7 +657,6 @@ much like you would create an aggregation manually, but specify `postagg` as a `
then have to provide a valid json post-aggregation definition (as specified in the Druid docs) in
the JSON field.
#### Elasticsearch
The recommended connector library for Elasticsearch is
@@ -718,7 +705,7 @@ Then register your table with the alias name logstash_all
By default, Superset uses UTC time zone for elasticsearch query. If you need to specify a time zone,
please edit your Database and enter the settings of your specified time zone in the Other > ENGINE PARAMETERS:
```
```json
{
"connect_args": {
"time_zone": "Asia/Shanghai"
@@ -740,8 +727,6 @@ To disable SSL verification, add the following to the **SQLALCHEMY URI** field:
elasticsearch+https://{user}:{password}@{host}:9200/?verify_certs=False
```
#### Exasol
The recommended connector library for Exasol is
@@ -753,7 +738,6 @@ The connection string for Exasol looks like this:
exa+pyodbc://{username}:{password}@{hostname}:{port}/my_schema?CONNECTIONLCALL=en_US.UTF-8&driver=EXAODBC
```
#### Firebird
The recommended connector library for Firebird is [sqlalchemy-firebird](https://pypi.org/project/sqlalchemy-firebird/).
@@ -771,7 +755,6 @@ Here's a connection string example of Superset connecting to a local Firebird da
firebird+fdb://SYSDBA:masterkey@192.168.86.38:3050//Library/Frameworks/Firebird.framework/Versions/A/Resources/examples/empbuild/employee.fdb
```
#### Firebolt
The recommended connector library for Firebolt is [firebolt-sqlalchemy](https://pypi.org/project/firebolt-sqlalchemy/).
@@ -802,7 +785,7 @@ The recommended connector library for BigQuery is
Follow the steps [here](/docs/configuration/databases#installing-drivers-in-docker-images) about how to
install new database drivers when setting up Superset locally via docker compose.
```
```bash
echo "sqlalchemy-bigquery" >> ./docker/requirements-local.txt
```
@@ -815,7 +798,7 @@ credentials file (as a JSON).
appropriate BigQuery datasets, and download the JSON configuration file for the service account.
2. In Superset, you can either upload that JSON or add the JSON blob in the following format (this should be the content of your credential JSON file):
```
```json
{
"type": "service_account",
"project_id": "...",
@@ -843,7 +826,7 @@ credentials file (as a JSON).
Go to the **Advanced** tab, Add a JSON blob to the **Secure Extra** field in the database configuration form with
the following format:
```
```json
{
"credentials_info": <contents of credentials JSON file>
}
@@ -851,7 +834,7 @@ credentials file (as a JSON).
The resulting file should have this structure:
```
```json
{
"credentials_info": {
"type": "service_account",
@@ -878,8 +861,6 @@ To be able to upload CSV or Excel files to BigQuery in Superset, you'll need to
Currently, the Google BigQuery Python SDK is not compatible with `gevent`, due to some dynamic monkeypatching on python core library by `gevent`.
So, when you deploy Superset with `gunicorn` server, you have to use worker type except `gevent`.
#### Google Sheets
Google Sheets has a very limited
@@ -890,7 +871,6 @@ There are a few steps involved in connecting Superset to Google Sheets. This
[tutorial](https://preset.io/blog/2020-06-01-connect-superset-google-sheets/) has the most up to date
instructions on setting up this connection.
#### Hana
The recommended connector library is [sqlalchemy-hana](https://github.com/SAP/sqlalchemy-hana).
@@ -901,7 +881,6 @@ The connection string is formatted as follows:
hana://{username}:{password}@{host}:{port}
```
#### Apache Hive
The [pyhive](https://pypi.org/project/PyHive/) library is the recommended way to connect to Hive through SQLAlchemy.
@@ -912,7 +891,6 @@ The expected connection string is formatted as follows:
hive://hive@{hostname}:{port}/{database}
```
#### Hologres
Hologres is a real-time interactive analytics service developed by Alibaba Cloud. It is fully compatible with PostgreSQL 11 and integrates seamlessly with the big data ecosystem.
@@ -931,7 +909,6 @@ The connection string looks like:
postgresql+psycopg2://{username}:{password}@{host}:{port}/{database}
```
#### IBM DB2
The [IBM_DB_SA](https://github.com/ibmdb/python-ibmdbsa/tree/master/ibm_db_sa) library provides a
@@ -949,7 +926,6 @@ There are two DB2 dialect versions implemented in SQLAlchemy. If you are connect
ibm_db_sa://{username}:{passport}@{hostname}:{port}/{database}
```
#### Apache Impala
The recommended connector library to Apache Impala is [impyla](https://github.com/cloudera/impyla).
@@ -960,7 +936,6 @@ The expected connection string is formatted as follows:
impala://{hostname}:{port}/{database}
```
#### Kusto
The recommended connector library for Kusto is
@@ -981,7 +956,6 @@ kustokql+https://{cluster_url}/{database}?azure_ad_client_id={azure_ad_client_id
Make sure the user has privileges to access and use all required
databases/tables/views.
#### Apache Kylin
The recommended connector library for Apache Kylin is
@@ -993,10 +967,6 @@ The expected connection string is formatted as follows:
kylin://<username>:<password>@<hostname>:<port>/<project>?<param1>=<value1>&<param2>=<value2>
```
#### MySQL
The recommended connector library for MySQL is [mysqlclient](https://pypi.org/project/mysqlclient/).
@@ -1021,7 +991,6 @@ One problem with `mysqlclient` is that it will fail to connect to newer MySQL da
mysql+mysqlconnector://{username}:{password}@{host}/{database}
```
#### IBM Netezza Performance Server
The [nzalchemy](https://pypi.org/project/nzalchemy/) library provides a
@@ -1038,21 +1007,19 @@ netezza+nzpy://{username}:{password}@{hostname}:{port}/{database}
The [sqlalchemy-oceanbase](https://pypi.org/project/oceanbase_py/) library is the recommended
way to connect to OceanBase through SQLAlchemy.
The connection string for OceanBase looks like this:
```
oceanbase://<User>:<Password>@<Host>:<Port>/<Database>
```
#### Ocient DB
The recommended connector library for Ocient is [sqlalchemy-ocient](https://pypi.org/project/sqlalchemy-ocient).
##### Install the Ocient Driver
```
```bash
pip install sqlalchemy-ocient
```
@@ -1093,7 +1060,7 @@ parseable://admin:admin@demo.parseable.com:443/ingress-nginx
Note: The stream_name in the URI represents the Parseable logstream you want to query. You can use both HTTP (port 80) and HTTPS (port 443) connections.
>>>>>>>
#### Apache Pinot
The recommended connector library for Apache Pinot is [pinotdb](https://pypi.org/project/pinotdb/).
@@ -1112,7 +1079,8 @@ pinot://<username>:<password>@<pinot-broker-host>:<pinot-broker-port>/query/sql?
If you want to use explore view or joins, window functions, etc. then enable [multi-stage query engine](https://docs.pinot.apache.org/reference/multi-stage-engine).
Add below argument while creating database connection in Advanced -> Other -> ENGINE PARAMETERS
```
```json
{"connect_args":{"use_multistage_engine":"true"}}
```
@@ -1152,7 +1120,6 @@ More information about PostgreSQL connection options can be found in the
and the
[PostgreSQL docs](https://www.postgresql.org/docs/9.1/libpq-connect.html#LIBPQ-PQCONNECTDBPARAMS).
#### Presto
The [pyhive](https://pypi.org/project/PyHive/) library is the recommended way to connect to Presto through SQLAlchemy.
@@ -1178,7 +1145,7 @@ presto://datascientist:securepassword@presto.example.com:8080/hive
By default Superset assumes the most recent version of Presto is being used when querying the
datasource. If youre using an older version of Presto, you can configure it in the extra parameter:
```
```json
{
"version": "0.123"
}
@@ -1186,7 +1153,7 @@ datasource. If youre using an older version of Presto, you can configure it i
SSL Secure extra add json config to extra connection information.
```
```json
{
"connect_args":
{"protocol": "https",
@@ -1195,8 +1162,6 @@ SSL Secure extra add json config to extra connection information.
}
```
#### RisingWave
The recommended connector library for RisingWave is
@@ -1208,7 +1173,6 @@ The expected connection string is formatted as follows:
risingwave://root@{hostname}:{port}/{database}?sslmode=disable
```
#### Rockset
The connection string for Rockset is:
@@ -1228,7 +1192,6 @@ rockset://{api key}:@{api server}/{VI ID}
For more complete instructions, we recommend the [Rockset documentation](https://docs.rockset.com/apache-superset/).
#### Snowflake
##### Install Snowflake Driver
@@ -1236,7 +1199,7 @@ For more complete instructions, we recommend the [Rockset documentation](https:/
Follow the steps [here](/docs/configuration/databases#installing-database-drivers) about how to
install new database drivers when setting up Superset locally via docker compose.
```
```bash
echo "snowflake-sqlalchemy" >> ./docker/requirements-local.txt
```
@@ -1269,7 +1232,7 @@ To connect Snowflake with Key Pair Authentication, you need to add the following
***Please note that you need to merge multi-line private key content to one line and insert `\n` between each line***
```
```json
{
"auth_method": "keypair",
"auth_params": {
@@ -1281,7 +1244,7 @@ To connect Snowflake with Key Pair Authentication, you need to add the following
If your private key is stored on server, you can replace "privatekey_body" with “privatekey_path” in parameter.
```
```json
{
"auth_method": "keypair",
"auth_params": {
@@ -1302,7 +1265,6 @@ The connection string for Solr looks like this:
solr://{username}:{password}@{host}:{port}/{server_path}/{collection}[/?use_ssl=true|false]
```
#### Apache Spark SQL
The recommended connector library for Apache Spark SQL [pyhive](https://pypi.org/project/PyHive/).
@@ -1320,16 +1282,24 @@ The recommended connector library for SQL Server is [pymssql](https://github.com
The connection string for SQL Server looks like this:
```
mssql+pymssql://<Username>:<Password>@<Host>:<Port-default:1433>/<Database Name>/?Encrypt=yes
mssql+pymssql://<Username>:<Password>@<Host>:<Port-default:1433>/<Database Name>
```
It is also possible to connect using [pyodbc](https://pypi.org/project/pyodbc) with the parameter [odbc_connect](https://docs.sqlalchemy.org/en/14/dialects/mssql.html#pass-through-exact-pyodbc-string)
The connection string for SQL Server looks like this:
```
mssql+pyodbc:///?odbc_connect=Driver%3D%7BODBC+Driver+17+for+SQL+Server%7D%3BServer%3Dtcp%3A%3Cmy_server%3E%2C1433%3BDatabase%3Dmy_database%3BUid%3Dmy_user_name%3BPwd%3Dmy_password%3BEncrypt%3Dyes%3BConnection+Timeout%3D30
```
:::note
You might have noticed that some special charecters are used in the above connection string. For example see the `odbc_connect` parameter. The value is `Driver%3D%7BODBC+Driver+17+for+SQL+Server%7D%3B` which is a URL-encoded form of `Driver={ODBC+Driver+17+for+SQL+Server};`. It's important to give the connection string is URL encoded.
For more information about this check the [sqlalchemy documentation](https://docs.sqlalchemy.org/en/20/core/engines.html#escaping-special-characters-such-as-signs-in-passwords). Which says `When constructing a fully formed URL string to pass to create_engine(), special characters such as those that may be used in the user and password need to be URL encoded to be parsed correctly. This includes the @ sign.`
:::
#### StarRocks
The [sqlalchemy-starrocks](https://pypi.org/project/starrocks/) library is the recommended
@@ -1354,6 +1324,24 @@ starrocks://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>
StarRocks maintains their Superset docuementation [here](https://docs.starrocks.io/docs/integrations/BI_integrations/Superset/).
:::
#### TDengine
[TDengine](https://www.tdengine.com) is a High-Performance, Scalable Time-Series Database for Industrial IoT and provides SQL-like query interface.
The recommended connector library for TDengine is [taospy](https://pypi.org/project/taospy/) and [taos-ws-py](https://pypi.org/project/taos-ws-py/)
The expected connection string is formatted as follows:
```
taosws://<user>:<password>@<host>:<port>
```
For example:
```
taosws://root:taosdata@127.0.0.1:6041
```
#### Teradata
The recommended connector library is
@@ -1375,7 +1363,7 @@ here: https://downloads.teradata.com/download/connectivity/odbc-driver/linux
Here are the required environment variables:
```
```bash
export ODBCINI=/.../teradata/client/ODBC_64/odbc.ini
export ODBCINST=/.../teradata/client/ODBC_64/odbcinst.ini
```
@@ -1384,8 +1372,8 @@ We recommend using the first library because of the
lack of requirement around ODBC drivers and
because it's more regularly updated.
#### TimescaleDB
[TimescaleDB](https://www.timescale.com) is the open-source relational database for time-series and analytics to build powerful data-intensive applications.
TimescaleDB is a PostgreSQL extension, and you can use the standard PostgreSQL connector library, [psycopg2](https://www.psycopg.org/docs/), to connect to the database.
@@ -1417,31 +1405,38 @@ postgresql://{username}:{password}@{host}:{port}/{database name}?sslmode=require
[Learn more about TimescaleDB!](https://docs.timescale.com/)
#### Trino
Supported trino version 352 and higher
##### Connection String
The connection string format is as follows:
```
trino://{username}:{password}@{hostname}:{port}/{catalog}
```
If you are running Trino with docker on local machine, please use the following connection URL
```
trino://trino@host.docker.internal:8080
```
##### Authentications
###### 1. Basic Authentication
You can provide `username`/`password` in the connection string or in the `Secure Extra` field at `Advanced / Security`
* In Connection String
- In Connection String
```
trino://{username}:{password}@{hostname}:{port}/{catalog}
```
* In `Secure Extra` field
- In `Secure Extra` field
```json
{
"auth_method": "basic",
@@ -1455,7 +1450,9 @@ You can provide `username`/`password` in the connection string or in the `Secure
NOTE: if both are provided, `Secure Extra` always takes higher priority.
###### 2. Kerberos Authentication
In `Secure Extra` field, config as following example:
```json
{
"auth_method": "kerberos",
@@ -1472,7 +1469,9 @@ All fields in `auth_params` are passed directly to the [`KerberosAuthentication`
NOTE: Kerberos authentication requires installing the [`trino-python-client`](https://github.com/trinodb/trino-python-client) locally with either the `all` or `kerberos` optional features, i.e., installing `trino[all]` or `trino[kerberos]` respectively.
###### 3. Certificate Authentication
In `Secure Extra` field, config as following example:
```json
{
"auth_method": "certificate",
@@ -1486,7 +1485,9 @@ In `Secure Extra` field, config as following example:
All fields in `auth_params` are passed directly to the [`CertificateAuthentication`](https://github.com/trinodb/trino-python-client/blob/0.315.0/trino/auth.py#L416) class.
###### 4. JWT Authentication
Config `auth_method` and provide token in `Secure Extra` field
```json
{
"auth_method": "jwt",
@@ -1497,8 +1498,10 @@ Config `auth_method` and provide token in `Secure Extra` field
```
###### 5. Custom Authentication
To use custom authentication, first you need to add it into
`ALLOWED_EXTRA_AUTHENTICATIONS` allow list in Superset config file:
```python
from your.module import AuthClass
from another.extra import auth_method
@@ -1512,6 +1515,7 @@ ALLOWED_EXTRA_AUTHENTICATIONS: Dict[str, Dict[str, Callable[..., Any]]] = {
```
Then in `Secure Extra` field:
```json
{
"auth_method": "custom_auth",
@@ -1527,8 +1531,8 @@ or factory function (which returns an `Authentication` instance) to `auth_method
All fields in `auth_params` are passed directly to your class/function.
**Reference**:
* [Trino-Superset-Podcast](https://trino.io/episodes/12.html)
- [Trino-Superset-Podcast](https://trino.io/episodes/12.html)
#### Vertica
@@ -1555,8 +1559,6 @@ Other parameters:
- Load Balancer - Backup Host
#### YDB
The recommended connector library for [YDB](https://ydb.tech/) is
@@ -1571,6 +1573,7 @@ ydb://{host}:{port}/{database_name}
```
##### Protocol
You can specify `protocol` in the `Secure Extra` field at `Advanced / Security`:
```
@@ -1581,9 +1584,10 @@ You can specify `protocol` in the `Secure Extra` field at `Advanced / Security`:
Default is `grpc`.
##### Authentication Methods
###### Static Credentials
To use `Static Credentials` you should provide `username`/`password` in the `Secure Extra` field at `Advanced / Security`:
```
@@ -1595,8 +1599,8 @@ To use `Static Credentials` you should provide `username`/`password` in the `Sec
}
```
###### Access Token Credentials
To use `Access Token Credentials` you should provide `token` in the `Secure Extra` field at `Advanced / Security`:
```
@@ -1607,8 +1611,8 @@ To use `Access Token Credentials` you should provide `token` in the `Secure Extr
}
```
##### Service Account Credentials
To use Service Account Credentials, you should provide `service_account_json` in the `Secure Extra` field at `Advanced / Security`:
```
@@ -1626,8 +1630,6 @@ To use Service Account Credentials, you should provide `service_account_json` in
}
```
#### YugabyteDB
[YugabyteDB](https://www.yugabyte.com/) is a distributed SQL database built on top of PostgreSQL.
@@ -1642,8 +1644,6 @@ The connection string looks like:
postgresql://{username}:{password}@{host}:{port}/{database}
```
## Connecting through the UI
Here is the documentation on how to leverage the new DB Connection UI. This will provide admins the ability to enhance the UX for users who want to connect to new databases.
@@ -1716,9 +1716,6 @@ For databases like MySQL and Postgres that use the standard format of `engine+dr
For other databases you need to implement these methods yourself. The BigQuery DB engine spec is a good example of how to do that.
### Extra Database Settings
##### Deeper SQLAlchemy Integration
@@ -1782,9 +1779,7 @@ You can use the `Extra` field in the **Edit Databases** form to configure SSL:
}
```
## Misc.
## Misc
### Querying across databases

View File

@@ -10,7 +10,7 @@ version: 1
The superset cli allows you to import and export datasources from and to YAML. Datasources include
databases. The data is expected to be organized in the following hierarchy:
```
```text
├──databases
| ├──database_1
| | ├──table_1
@@ -30,13 +30,13 @@ databases. The data is expected to be organized in the following hierarchy:
You can print your current datasources to stdout by running:
```
```bash
superset export_datasources
```
To save your datasources to a ZIP file run:
```
```bash
superset export_datasources -f <filename>
```
@@ -55,7 +55,7 @@ Alternatively, you can export datasources using the UI:
In order to obtain an **exhaustive list of all fields** you can import using the YAML import run:
```
```bash
superset export_datasource_schema
```
@@ -65,13 +65,13 @@ As a reminder, you can use the `-b` flag to include back references.
In order to import datasources from a ZIP file, run:
```
```bash
superset import_datasources -p <path / filename>
```
The optional username flag **-u** sets the user used for the datasource import. The default is 'admin'. Example:
```
```bash
superset import_datasources -p <path / filename> -u 'admin'
```
@@ -81,7 +81,7 @@ superset import_datasources -p <path / filename> -u 'admin'
When using Superset version 4.x.x to import from an older version (2.x.x or 3.x.x) importing is supported as the command `legacy_import_datasources` and expects a JSON or directory of JSONs. The options are `-r` for recursive and `-u` for specifying a user. Example of legacy import without options:
```
```bash
superset legacy_import_datasources -p <path or filename>
```
@@ -89,21 +89,21 @@ superset legacy_import_datasources -p <path or filename>
When using an older Superset version (2.x.x & 3.x.x) of Superset, the command is `import_datasources`. ZIP and YAML files are supported and to switch between them the feature flag `VERSIONED_EXPORT` is used. When `VERSIONED_EXPORT` is `True`, `import_datasources` expects a ZIP file, otherwise YAML. Example:
```
```bash
superset import_datasources -p <path or filename>
```
When `VERSIONED_EXPORT` is `False`, if you supply a path all files ending with **yaml** or **yml** will be parsed. You can apply
additional flags (e.g. to search the supplied path recursively):
```
```bash
superset import_datasources -p <path> -r
```
The sync flag **-s** takes parameters in order to sync the supplied elements with your file. Be
careful this can delete the contents of your meta database. Example:
```
```bash
superset import_datasources -p <path / filename> -s columns,metrics
```
@@ -115,7 +115,7 @@ If you dont supply the sync flag (**-s**) importing will only add and update
E.g. you can add a verbose_name to the column ds in the table random_time_series from the example
datasets by saving the following YAML to file and then running the **import_datasources** command.
```
```yaml
databases:
- database_name: main
tables:

View File

@@ -11,7 +11,7 @@ version: 1
To configure CORS, or cross-origin resource sharing, the following dependency must be installed:
```python
pip install apache-superset[cors]
pip install apache_superset[cors]
```
The following keys in `superset_config.py` can be specified to configure CORS:
@@ -20,14 +20,12 @@ The following keys in `superset_config.py` can be specified to configure CORS:
- `CORS_OPTIONS`: options passed to Flask-CORS
([documentation](https://flask-cors.readthedocs.io/en/latest/api.html#extension))
## HTTP headers
Note that Superset bundles [flask-talisman](https://pypi.org/project/talisman/)
Self-described as a small Flask extension that handles setting HTTP headers that can help
protect against a few common web application security issues.
## HTML Embedding of Dashboards and Charts
There are two ways to embed a dashboard: Using the [SDK](https://www.npmjs.com/package/@superset-ui/embedded-sdk) or embedding a direct link. Note that in the latter case everybody who knows the link is able to access the dashboard.
@@ -39,14 +37,16 @@ This works by first changing the content security policy (CSP) of [flask-talisma
#### Changing flask-talisman CSP
Add to `superset_config.py` the entire `TALISMAN_CONFIG` section from `config.py` and include a `frame-ancestors` section:
```python
TALISMAN_ENABLED = True
TALISMAN_CONFIG = {
"content_security_policy": {
...
"frame-ancestors": ["*.my-domain.com", "*.another-domain.com"],
"frame-ancestors": ["*.my-domain.com", "*.another-domain.com"],
...
```
Restart Superset for this configuration change to take effect.
#### Making a Dashboard Public
@@ -69,6 +69,7 @@ Now anybody can directly access the dashboard's URL. You can embed it in an ifra
>
</iframe>
```
#### Embedding a Chart
A chart's embed code can be generated by going to a chart's edit view and then clicking at the top right on `...` > `Share` > `Embed code`
@@ -85,11 +86,10 @@ SUPERSET_FEATURE_EMBEDDED_SUPERSET=true
## CSRF settings
Similarly, [flask-wtf](https://flask-wtf.readthedocs.io/en/0.15.x/config/) is used manage
Similarly, [flask-wtf](https://flask-wtf.readthedocs.io/en/0.15.x/config/) is used to manage
some CSRF configurations. If you need to exempt endpoints from CSRF (e.g. if you are
running a custom auth postback endpoint), you can add the endpoints to `WTF_CSRF_EXEMPT_LIST`:
## SSH Tunneling
1. Turn on feature flag
@@ -105,7 +105,6 @@ running a custom auth postback endpoint), you can add the endpoints to `WTF_CSRF
3. Verify data is flowing
- Once SSH tunneling has been enabled, go to SQL Lab and write a query to verify data is properly flowing.
## Domain Sharding
:::note
@@ -139,4 +138,4 @@ of your additional middleware classes.
For example, to use `AUTH_REMOTE_USER` from behind a proxy server like nginx, you have to add a
simple middleware class to add the value of `HTTP_X_PROXY_REMOTE_USER` (or any other custom header
from the proxy) to Gunicorns `REMOTE_USER` environment variable:
from the proxy) to Gunicorns `REMOTE_USER` environment variable.

View File

@@ -1,6 +0,0 @@
---
title: Setup SSH Tunneling
hide_title: true
sidebar_position: 8
version: 1
---

View File

@@ -77,6 +77,7 @@ In the UI you can assign a set of parameters as JSON
"my_table": "foo"
}
```
The parameters become available in your SQL (example: `SELECT * FROM {{ my_table }}` ) by using Jinja templating syntax.
SQL Lab template parameters are stored with the dataset as `TEMPLATE PARAMETERS`.
@@ -103,7 +104,6 @@ GROUP BY action
Note ``_filters`` is not stored with the dataset. It's only used within the SQL Lab UI.
Besides default Jinja templating, SQL lab also supports self-defined template processor by setting
the `CUSTOM_TEMPLATE_PROCESSORS` in your superset configuration. The values in this dictionary
overwrite the default Jinja template processors of the specified database engine. The example below
@@ -186,7 +186,7 @@ cache hit in the future and Superset can retrieve cached data.
You can disable the inclusion of the `username` value in the calculation of the
cache key by adding the following parameter to your Jinja code:
```
```python
{{ current_username(add_to_cache_keys=False) }}
```
@@ -201,7 +201,7 @@ cache hit in the future and Superset can retrieve cached data.
You can disable the inclusion of the account `id` value in the calculation of the
cache key by adding the following parameter to your Jinja code:
```
```python
{{ current_user_id(add_to_cache_keys=False) }}
```
@@ -216,10 +216,40 @@ cache hit in the future and Superset can retrieve cached data.
You can disable the inclusion of the email value in the calculation of the
cache key by adding the following parameter to your Jinja code:
```
```python
{{ current_user_email(add_to_cache_keys=False) }}
```
**Current User Roles**
The `{{ current_user_roles() }}` macro returns an array of roles for the logged in user.
If you have caching enabled in your Superset configuration, then by default the roles value will be used
by Superset when calculating the cache key. A cache key is a unique identifier that determines if there's a
cache hit in the future and Superset can retrieve cached data.
You can disable the inclusion of the roles value in the calculation of the
cache key by adding the following parameter to your Jinja code:
```python
{{ current_user_roles(add_to_cache_keys=False) }}
```
You can json-stringify the array by adding `|tojson` to your Jinja code:
```python
{{ current_user_roles()|tojson }}
```
You can use the `|where_in` filter to use your roles in a SQL statement. For example, if `current_user_roles()` returns `['admin', 'viewer']`, the following template:
```python
SELECT * FROM users WHERE role IN {{ current_user_roles()|where_in }}
```
Will be rendered as:
```sql
SELECT * FROM users WHERE role IN ('admin', 'viewer')
```
**Custom URL Parameters**
The `{{ url_param('custom_variable') }}` macro lets you define arbitrary URL
@@ -273,7 +303,7 @@ You can retrieve the value for a specific filter as a list using `{{ filter_valu
This is useful if:
- You want to use a filter component to filter a query where the name of filter component column doesn't match the one in the select statement
- You want to have the ability for filter inside the main query for performance purposes
- You want to have the ability to filter inside the main query for performance purposes
Here's a concrete example:
@@ -301,7 +331,7 @@ This is useful if:
Here's a concrete example:
```
```sql
WITH RECURSIVE
superiors(employee_id, manager_id, full_name, level, lineage) AS (
SELECT
@@ -357,6 +387,7 @@ considerably improve performance, as many databases and query engines are able t
if the temporal filter is placed on the inner query, as opposed to the outer query.
The macro takes the following parameters:
- `column`: Name of the temporal column. Leave undefined to reference the time range from a Dashboard Native Time Range
filter (when present).
- `default`: The default value to fall back to if the time filter is not present, or has the value `No filter`
@@ -370,6 +401,7 @@ The macro takes the following parameters:
filter should only apply to the inner query.
The return type has the following properties:
- `from_expr`: the start of the time filter (if any)
- `to_expr`: the end of the time filter (if any)
- `time_range`: The applied time range
@@ -410,6 +442,7 @@ LIMIT 1000;
When using the `default` parameter, the templated query can be simplified, as the endpoints will always be defined
(to use a fixed time range, you can also use something like `default="2024-08-27 : 2024-09-03"`)
```
{% set time_filter = get_time_filter("dttm", default="Last week", remove_filter=True) %}
SELECT
@@ -429,19 +462,19 @@ To use the macro, first you need to find the ID of the dataset. This can be done
Once you have the ID you can query it as if it were a table:
```
```sql
SELECT * FROM {{ dataset(42) }} LIMIT 10
```
If you want to select the metric definitions as well, in addition to the columns, you need to pass an additional keyword argument:
```
```sql
SELECT * FROM {{ dataset(42, include_metrics=True) }} LIMIT 10
```
Since metrics are aggregations, the resulting SQL expression will be grouped by all non-metric columns. You can specify a subset of columns to group by instead:
```
```sql
SELECT * FROM {{ dataset(42, include_metrics=True, columns=["ds", "category"]) }} LIMIT 10
```
@@ -458,3 +491,37 @@ This macro avoids copy/paste, allowing users to centralize the metric definition
The `dataset_id` parameter is optional, and if not provided Superset will use the current dataset from context (for example, when using this macro in the Chart Builder, by default the `macro_key` will be searched in the dataset powering the chart).
The parameter can be used in SQL Lab, or when fetching a metric from another dataset.
## Available Filters
Superset supports [builtin filters from the Jinja2 templating package](https://jinja.palletsprojects.com/en/stable/templates/#builtin-filters). Custom filters have also been implemented:
**Where In**
Parses a list into a SQL-compatible statement. This is useful with macros that return an array (for example the `filter_values` macro):
```
Dashboard filter with "First", "Second" and "Third" options selected
{{ filter_values('column') }} => ["First", "Second", "Third"]
{{ filter_values('column')|where_in }} => ('First', 'Second', 'Third')
```
By default, this filter returns `()` (as a string) in case the value is null. The `default_to_none` parameter can be se to `True` to return null in this case:
```
Dashboard filter without any value applied
{{ filter_values('column') }} => ()
{{ filter_values('column')|where_in(default_to_none=True) }} => None
```
**To Datetime**
Loads a string as a `datetime` object. This is useful when performing date operations. For example:
```
{% set from_expr = get_time_filter("dttm", strftime="%Y-%m-%d").from_expr %}
{% set to_expr = get_time_filter("dttm", strftime="%Y-%m-%d").to_expr %}
{% if (to_expr|to_datetime(format="%Y-%m-%d") - from_expr|to_datetime(format="%Y-%m-%d")).days > 100 %}
do something
{% else %}
do something else
{% endif %}
```

View File

@@ -24,7 +24,7 @@ The challenge however lies with the slew of [database engines](/docs/configurati
For example the following is a comparison of MySQL and Presto,
```
```python
import pandas as pd
from sqlalchemy import create_engine
@@ -41,7 +41,7 @@ pd.read_sql_query(
which outputs `{"ts":{"0":1640995200000}}` (which infers the UTC timezone per the Epoch time definition) and `{"ts":{"0":"2022-01-01 00:00:00.000"}}` (without an explicit timezone) respectively and thus are treated differently in JavaScript:
```
```js
new Date(1640995200000)
> Sat Jan 01 2022 13:00:00 GMT+1300 (New Zealand Daylight Time)

View File

@@ -26,9 +26,9 @@ More references:
Here's a list of repositories that contain Superset-related packages:
- [apache/superset](https://github.com/apache/superset)
is the main repository containing the `apache-superset` Python package
is the main repository containing the `apache_superset` Python package
distributed on
[pypi](https://pypi.org/project/apache-superset/). This repository
[pypi](https://pypi.org/project/apache_superset/). This repository
also includes Superset's main TypeScript/JavaScript bundles and react apps under
the [superset-frontend](https://github.com/apache/superset/tree/master/superset-frontend)
folder.

View File

@@ -52,7 +52,7 @@ Note that:
[docker-compose.yml](https://github.com/apache/superset/blob/master/docker-compose.yml)
- The local repository is mounted within the services, meaning updating
the code on the host will be reflected in the docker images
- Superset is served at localhost:8088/
- Superset is served at localhost:9000/
- You can login with admin/admin
:::note
@@ -72,10 +72,10 @@ documentation.
configured to be secure.
:::
### Supported environment variables
Affecting the Docker build process:
- **SUPERSET_BUILD_TARGET (default=dev):** which --target to build, either `lean` or `dev` are commonly used
- **INCLUDE_FIREFOX (default=false):** whether to include the Firefox headless browser in the build
- **INCLUDE_CHROMIUM (default=false):** whether to include the Firefox headless browser in the build
@@ -90,6 +90,7 @@ For more env vars that affect your configuration, see this
used in the `docker compose` context to assign env vars to the superset configuration.
### Accessing the postgres database
Sometimes it's useful to access the database in the docker container directly.
You can enter a `psql` shell (the official Postgres client) by running the following command:
@@ -269,22 +270,22 @@ If you have made changes to the FAB-managed templates, which are not built the s
If you add a new requirement or update an existing requirement (per the `install_requires` section in `setup.py`) you must recompile (freeze) the Python dependencies to ensure that for CI, testing, etc. the build is deterministic. This can be achieved via,
```bash
$ python3 -m venv venv
$ source venv/bin/activate
$ python3 -m pip install -r requirements/development.txt
$ ./scripts/uv-pip-compile.sh
python3 -m venv venv
source venv/bin/activate
python3 -m pip install -r requirements/development.txt
./scripts/uv-pip-compile.sh
```
When upgrading the version number of a single package, you should run `./scripts/uv-pip-compile.sh` with the `-P` flag:
```bash
$ ./scripts/uv-pip-compile.sh -P some-package-to-upgrade
./scripts/uv-pip-compile.sh -P some-package-to-upgrade
```
To bring all dependencies up to date as per the restrictions defined in `setup.py` and `requirements/*.in`, run `./scripts/uv-pip-compile.sh --upgrade`
```bash
$ ./scripts/uv-pip-compile.sh --upgrade
./scripts/uv-pip-compile.sh --upgrade
```
This should be done periodically, but it is recommended to do thorough manual testing of the application to ensure no breaking changes have been introduced that aren't caught by the unit and integration tests.
@@ -505,12 +506,10 @@ pre-commit install
A series of checks will now run when you make a git commit.
## Linting
See [how tos](/docs/contributing/howtos#linting)
## GitHub Actions and `act`
:::tip
@@ -523,6 +522,7 @@ For more targetted iteration, see the `gh workflow run --ref {BRANCH}` subcomman
For automation and CI/CD, Superset makes extensive use of GitHub Actions (GHA). You
can find all of the workflows and other assets under the `.github/` folder. This includes:
- running the backend unit test suites (`tests/`)
- running the frontend test suites (`superset-frontend/src/**.*.test.*`)
- running our Cypress end-to-end tests (`superset-frontend/cypress-base/`)
@@ -564,6 +564,7 @@ act pull_request --job {workflow_name} --secret GITHUB_TOKEN=$GITHUB_TOKEN --con
```
In the example above, notice that:
- we target a specific workflow, using `--job`
- we pass a secret using `--secret`, as many jobs require read access (public) to the repo
- we simulate a `pull_request` event by specifying it as the first arg,
@@ -785,7 +786,7 @@ To debug Flask running in POD inside a kubernetes cluster, you'll need to make s
add: ["SYS_PTRACE"]
```
See (set capabilities for a container)[https://kubernetes.io/docs/tasks/configure-pod-container/security-context/#set-capabilities-for-a-container] for more details.
See [set capabilities for a container](https://kubernetes.io/docs/tasks/configure-pod-container/security-context/#set-capabilities-for-a-container) for more details.
Once the pod is running as root and has the SYS_PTRACE capability it will be able to debug the Flask app.

View File

@@ -10,7 +10,7 @@ version: 1
The latest documentation and tutorial are available at https://superset.apache.org/.
The documentation site is built using [Docusaurus 2](https://docusaurus.io/), a modern
The documentation site is built using [Docusaurus 3](https://docusaurus.io/), a modern
static website generator, the source for which resides in `./docs`.
### Local Development
@@ -223,9 +223,9 @@ To run a single test file:
npm run test -- path/to/file.js
```
### e2e Integration Testing
### E2E Integration Testing
For e2e testing, we recommend that you use a `docker compose` backend
For E2E testing, we recommend that you use a `docker compose` backend
```bash
CYPRESS_CONFIG=true docker compose up --build
@@ -411,7 +411,7 @@ See [set capabilities for a container](https://kubernetes.io/docs/tasks/configur
Once the pod is running as root and has the `SYS_PTRACE` capability it will be able to debug the Flask app.
You can follow the same instructions as in `docker compose`. Enter the pod and install the required library and packages; gdb, netstat and debugpy.
You can follow the same instructions as in `docker compose`. Enter the pod and install the required library and packages: gdb, netstat and debugpy.
Often in a Kubernetes environment nodes are not addressable from outside the cluster. VSCode will thus be unable to remotely connect to port 5678 on a Kubernetes node. In order to do this you need to create a tunnel that port forwards 5678 to your local machine.
@@ -608,3 +608,27 @@ If using the eslint extension with vscode, put the following in your workspace `
"superset-frontend"
]
```
## GitHub Ephemeral Environments
On any given pull request on GitHub, it's possible to create a temporary environment/deployment
by simply adding the label `testenv-up` to the PR. Once you add the `testenv-up` label, a
GitHub Action will be triggered that will:
- build a docker image
- deploy it in EC2 (sponsored by the folks at [Preset](https://preset.io))
- write a comment on the PR with a link to the ephemeral environment
For more advanced use cases, it's possible to set a feature flag on the PR body, which will
take effect on the ephemeral environment. For example, if you want to set the `TAGGING_SYSTEM`
feature flag to `true`, you can add the following line to the PR body/description:
```
FEATURE_TAGGING_SYSTEM=true
```
Simarly, it's possible to disable feature flags with:
```
FEATURE_TAGGING_SYSTEM=false
```

View File

@@ -3,7 +3,7 @@ sidebar_position: 6
version: 1
---
# Misc.
# Miscellaneous
## Reporting a Security Vulnerability
@@ -11,7 +11,7 @@ Please report security vulnerabilities to private@superset.apache.org.
In the event a community member discovers a security flaw in Superset, it is important to follow the [Apache Security Guidelines](https://www.apache.org/security/committers.html) and release a fix as quickly as possible before public disclosure. Reporting security vulnerabilities through the usual GitHub Issues channel is not ideal as it will publicize the flaw before a fix can be applied.
### SQL Lab Async
## SQL Lab Async
It's possible to configure a local database to operate in `async` mode,
to work on `async` related features.
@@ -46,7 +46,7 @@ Note that:
to your production environment, and use the similar broker as well as
results backend configuration
### Async Chart Queries
## Async Chart Queries
It's possible to configure database queries for charts to operate in `async` mode. This is especially useful for dashboards with many charts that may otherwise be affected by browser connection limits. To enable async queries for dashboards and Explore, the following dependencies are required:

View File

@@ -4,10 +4,96 @@ version: 1
---
import InteractiveSVG from '../../src/components/InteractiveERDSVG';
import Mermaid from '@theme/Mermaid';
# Resources
## Entity-Relationship Diagram
## High Level Architecture
<div style={{ maxWidth: "600px", margin: "0 auto", marginLeft: 0, marginRight: "auto" }}>
```mermaid
flowchart TD
%% Top Level
LB["<b>Load Balancer(s)</b><br/>(optional)"]
LB -.-> WebServers
%% Web Servers
subgraph WebServers ["<b>Web Server(s)</b>"]
WS1["<b>Frontend</b><br/>(React, AntD, ECharts, AGGrid)"]
WS2["<b>Backend</b><br/>(Python, Flask, SQLAlchemy, Pandas, ...)"]
end
%% Infra
subgraph InfraServices ["<b>Infra</b>"]
DB[("<b>Metadata Database</b><br/>(Postgres / MySQL)")]
subgraph Caching ["<b>Caching Subservices<br/></b>(Redis, memcache, S3, ...)"]
direction LR
DummySpace[" "]:::invisible
QueryCache["<b>Query Results Cache</b><br/>(Accelerated Dashboards)"]
CsvCache["<b>CSV Exports Cache</b>"]
ThumbnailCache["<b>Thumbnails Cache</b>"]
AlertImageCache["<b>Alert/Report Images Cache</b>"]
QueryCache -- " " --> CsvCache
linkStyle 1 stroke:transparent;
ThumbnailCache -- " " --> AlertImageCache
linkStyle 2 stroke:transparent;
end
Broker(("<b>Message Queue</b><br/>(Redis / RabbitMQ / SQS)"))
end
AsyncBackend["<b>Async Workers (Celery)</b><br>required for Alerts & Reports, thumbnails, CSV exports, long-running workloads, ..."]
%% External DBs
subgraph ExternalDatabases ["<b>Analytics Databases</b>"]
direction LR
BigQuery[(BigQuery)]
Snowflake[(Snowflake)]
Redshift[(Redshift)]
Postgres[(Postgres)]
Postgres[(... any ...)]
end
%% Connections
LB -.-> WebServers
WebServers --> DB
WebServers -.-> Caching
WebServers -.-> Broker
WebServers -.-> ExternalDatabases
Broker -.-> AsyncBackend
AsyncBackend -.-> ExternalDatabases
AsyncBackend -.-> Caching
%% Legend styling
classDef requiredNode stroke-width:2px,stroke:black;
class Required requiredNode;
class Optional optionalNode;
%% Hide real arrow
linkStyle 0 stroke:transparent;
%% Styling
classDef optionalNode stroke-dasharray: 5 5, opacity:0.9;
class LB optionalNode;
class Caching optionalNode;
class AsyncBackend optionalNode;
class Broker optionalNode;
class QueryCache optionalNode;
class CsvCache optionalNode;
class ThumbnailCache optionalNode;
class AlertImageCache optionalNode;
class Celery optionalNode;
classDef invisible fill:transparent,stroke:transparent;
```
</div>
## Entity-Relationship Diagram
Here is our interactive ERD:

View File

@@ -66,7 +66,7 @@ For running long query from Sql Lab, by default Superset allows it run as long a
being killed by celery. If you want to increase the time for running query, you can specify the
timeout in configuration. For example:
```
```python
SQLLAB_ASYNC_TIME_LIMIT_SEC = 60 * 60 * 6
```
@@ -78,7 +78,7 @@ come back within client-side timeout (60 seconds by default), Superset will disp
to avoid gateway timeout message. If you have a longer gateway timeout limit, you can change the
timeout settings in **superset_config.py**:
```
```python
SUPERSET_WEBSERVER_TIMEOUT = 60
```
@@ -87,7 +87,7 @@ SUPERSET_WEBSERVER_TIMEOUT = 60
You need to register a free account at [Mapbox.com](https://www.mapbox.com), obtain an API key, and add it
to **.env** at the key MAPBOX_API_KEY:
```
```python
MAPBOX_API_KEY = "longstringofalphanumer1c"
```
@@ -99,7 +99,7 @@ refreshed - especially if some data is slow moving, or run heavy queries. To exc
from the timed refresh process, add the `timed_refresh_immune_slices` key to the dashboard JSON
Metadata field:
```
```json
{
"filter_immune_slices": [],
"expanded_slices": {},
@@ -115,7 +115,7 @@ Slice refresh will also be staggered over the specified period. You can turn off
setting the `stagger_refresh` to false and modify the stagger period by setting `stagger_time` to a
value in milliseconds in the JSON Metadata field:
```
```json
{
"stagger_refresh": false,
"stagger_time": 2500
@@ -125,7 +125,7 @@ value in milliseconds in the JSON Metadata field:
Here, the entire dashboard will refresh at once if periodic refresh is on. The stagger time of 2.5
seconds is ignored.
**Why does flask fab or superset freezed/hung/not responding when started (my home directory is
**Why does flask fab or superset freeze/hang/not responding when started (my home directory is
NFS mounted)?**
By default, Superset creates and uses an SQLite database at `~/.superset/superset.db`. SQLite is
@@ -137,7 +137,7 @@ You can override this path using the **SUPERSET_HOME** environment variable.
Another workaround is to change where superset stores the sqlite database by adding the following in
`superset_config.py`:
```
```python
SQLALCHEMY_DATABASE_URI = 'sqlite:////new/location/superset.db?check_same_thread=false'
```
@@ -157,12 +157,12 @@ table afterwards to configure the Columns tab, check the appropriate boxes and s
To clarify, the database backend is an OLTP database used by Superset to store its internal
information like your list of users and dashboard definitions. While Superset supports a
[variety of databases as data *sources*](/docs/configuration/databases#installing-database-drivers),
[variety of databases as data _sources_](/docs/configuration/databases#installing-database-drivers),
only a few database engines are supported for use as the OLTP backend / metadata store.
Superset is tested using MySQL, PostgreSQL, and SQLite backends. Its recommended you install
Superset on one of these database servers for production. Installation on other OLTP databases
may work but isnt tested. It has been reported that [Microsoft SQL Server does *not*
may work but isnt tested. It has been reported that [Microsoft SQL Server does _not_
work as a Superset backend](https://github.com/apache/superset/issues/18961). Column-store,
non-OLTP databases are not designed for this type of workload.
@@ -213,7 +213,7 @@ SQLAlchemy and DBAPI scope. This includes features like:
Beyond the SQLAlchemy connector, its also possible, though much more involved, to extend Superset
and write your own connector. The only example of this at the moment is the Druid connector, which
is getting superseded by Druids growing SQL support and the recent availability of a DBAPI and
SQLAlchemy driver. If the database you are considering integrating has any kind of of SQL support,
SQLAlchemy driver. If the database you are considering integrating has any kind of SQL support,
its probably preferable to go the SQLAlchemy route. Note that for a native connector to be possible
the database needs to have support for running OLAP-type queries and should be able to do things that
are typical in basic SQL:
@@ -236,7 +236,7 @@ made to cover more and more use cases.
The API available is documented using [Swagger](https://swagger.io/) and the documentation can be
made available under **/swagger/v1** by enabling the following flag in `superset_config.py`:
```
```python
FAB_API_SWAGGER_UI = True
```

View File

@@ -14,6 +14,7 @@ This page is meant to give new administrators an understanding of Superset's com
## Components
A Superset installation is made up of these components:
1. The Superset application itself
2. A metadata database
3. A caching layer (optional, but necessary for some features)
@@ -22,6 +23,7 @@ A Superset installation is made up of these components:
### Optional components and associated features
The optional components above are necessary to enable these features:
- [Alerts and Reports](/docs/configuration/alerts-reports)
- [Caching](/docs/configuration/cache)
- [Async Queries](/docs/configuration/async-queries-celery/)
@@ -36,6 +38,7 @@ Here are further details on each component.
### The Superset Application
This is the core application. Superset operates like this:
- A user visits a chart or dashboard
- That triggers a SQL query to the data warehouse holding the underlying dataset
- The resulting data is served up in a data visualization
@@ -45,13 +48,14 @@ This is the core application. Superset operates like this:
This is where chart and dashboard definitions, user information, logs, etc. are stored. Superset is tested to work with PostgreSQL and MySQL databases as the metadata database (not be confused with a data source like your data warehouse, which could be a much greater variety of options like Snowflake, Redshift, etc.).
Some installation methods like our Quickstart and PyPI come configured by default to use a SQLite on-disk database. And in a Docker Compose installation, the data would be stored in a PostgresQL container volume. Neither of these cases are recommended for production instances of Superset.
Some installation methods like our Quickstart and PyPI come configured by default to use a SQLite on-disk database. And in a Docker Compose installation, the data would be stored in a PostgreSQL container volume. Neither of these cases are recommended for production instances of Superset.
For production, a properly-configured, managed, standalone database is recommended. No matter what database you use, you should plan to back it up regularly.
### Caching Layer
The caching layer serves two main functions:
- Store the results of queries to your data warehouse so that when a chart is loaded twice, it pulls from the cache the second time, speeding up the application and reducing load on your data warehouse.
- Act as a message broker for the worker, enabling the Alerts & Reports, async queries, and thumbnail caching features.

View File

@@ -1,7 +1,7 @@
---
title: Docker Builds
hide_title: true
sidebar_position: 6
sidebar_position: 7
version: 1
---
@@ -59,14 +59,13 @@ Here are the build presets that are exposed through the `supersetbot docker` uti
this specific SHA, which could be from a `master` merge, or release.
- `websocket-latest`: The WebSocket image for use in a Superset cluster.
For insights or modifications to the build matrix and tagging conventions,
check the [supersetbot docker](https://github.com/apache-superset/supersetbot)
subcommand and the [docker.yml](https://github.com/apache/superset/blob/master/.github/workflows/docker.yml)
GitHub action.
## Key ARGs in Dockerfile
- `BUILD_TRANSLATIONS`: whether to build the translations into the image. For the
frontend build this tells webpack to strip out all locales other than `en` from
the `moment-timezone` library. For the backendthis skips compiling the

View File

@@ -1,7 +1,7 @@
---
title: Docker Compose
hide_title: true
sidebar_position: 4
sidebar_position: 5
version: 1
---
@@ -17,7 +17,7 @@ Since `docker compose` is primarily designed to run a set of containers on **a s
and can't support requirements for **high availability**, we do not support nor recommend
using our `docker compose` constructs to support production-type use-cases. For single host
environments, we recommend using [minikube](https://minikube.sigs.k8s.io/docs/start/) along
our [installing on k8s](https://superset.apache.org/docs/installation/running-on-kubernetes)
with our [installing on k8s](https://superset.apache.org/docs/installation/running-on-kubernetes)
documentation.
:::
@@ -43,7 +43,6 @@ Note that there are 3 major ways we support to run `docker compose`:
`export TAG=4.0.0-dev` or `export TAG=3.0.0-dev`, with `latest-dev` being the default.
That's because The `dev` builds happen to package the `psycopg2-binary` required to connect
to the Postgres database launched as part of the `docker compose` builds.
``
More on these two approaches after setting up the requirements for either.
@@ -113,7 +112,15 @@ docker compose -f docker-compose-non-dev.yml up
### Option #3 - boot up an official release
```bash
# Set the version you want to run
export TAG=3.1.1
# Fetch the tag you're about to check out (assuming you shallow-cloned the repo)
git fetch --depth=1 origin tag $TAG
# Could also fetch all tags too if you've got bandwidth to spare
# git fetch --tags
# Checkout the corresponding git ref
git checkout $TAG
# Fire up docker compose
docker compose -f docker-compose-image-tag.yml up
```

View File

@@ -0,0 +1,58 @@
---
title: Installation Methods
hide_title: true
sidebar_position: 2
version: 1
---
import useBaseUrl from "@docusaurus/useBaseUrl";
# Installation Methods
How should you install Superset? Here's a comparison of the different options. It will help if you've first read the [Architecture](/docs/installation/architecture.mdx) page to understand Superset's different components.
The fundamental trade-off is between you needing to do more of the detail work yourself vs. using a more complex deployment route that handles those details.
## [Docker Compose](/docs/installation/docker-compose.mdx)
**Summary:** This takes advantage of containerization while remaining simpler than Kubernetes. This is the best way to try out Superset; it's also useful for developing & contributing back to Superset.
If you're not just demoing the software, you'll need a moderate understanding of Docker to customize your deployment and avoid a few risks. Even when fully-optimized this is not as robust a method as Kubernetes when it comes to large-scale production deployments.
You manage a superset-config.py file and a docker-compose.yml file. Docker Compose brings up all the needed services - the Superset application, a Postgres metadata DB, Redis cache, Celery worker and beat. They are automatically connected to each other.
**Responsibilities**
You will need to back up your metadata DB. That could mean backing up the service running as a Docker container and its volume; ideally you are running Postgres as a service outside of that container and backing up that service.
You will also need to extend the Superset docker image. The default `lean` images do not contain drivers needed to access your metadata database (Postgres or MySQL), nor to access your data warehouse, nor the headless browser needed for Alerts & Reports. You could run a `-dev` image while demoing Superset, which has some of this, but you'll still need to install the driver for your data warehouse. The `-dev` images run as root, which is not recommended for production.
Ideally you will build your own image of Superset that extends `lean`, adding what your deployment needs.
See [Docker Build Presets](/docs/installation/docker-builds/#build-presets) for more information about the different image versions you can extend.
## [Kubernetes (K8s)](/docs/installation/kubernetes.mdx)
**Summary:** This is the best-practice way to deploy a production instance of Superset, but has the steepest skill requirement - someone who knows Kubernetes.
You will deploy Superset into a K8s cluster. The most common method is using the community-maintained Helm chart, though work is now underway to implement [SIP-149 - a Kubernetes Operator for Superset](https://github.com/apache/superset/issues/31408).
A K8s deployment can scale up and down based on usage and deploy rolling updates with zero downtime - features that big deployments appreciate.
**Responsibilities**
You will need to build your own Docker image, and back up your metadata DB, both as described in Docker Compose above. You'll also need to customize your Helm chart values and deploy and maintain your Kubernetes cluster.
## [PyPI (Python)](/docs/installation/pypi.mdx)
**Summary:** This is the only method that requires no knowledge of containers. It requires the most hands-on work to deploy, connect, and maintain each component.
You install Superset as a Python package and run it that way, providing your own metadata database. Superset has documentation on how to install this way, but it is updated infrequently.
If you want caching, you'll set up Redis or RabbitMQ. If you want Alerts & Reports, you'll set up Celery.
**Responsibilities**
You will need to get the component services running and communicating with each other. You'll need to arrange backups of your metadata database.
When upgrading, you'll need to manage the system environment and packages and ensure all components have functional dependencies.

View File

@@ -1,7 +1,7 @@
---
title: Kubernetes
hide_title: true
sidebar_position: 2
sidebar_position: 3
version: 1
---
@@ -150,16 +150,20 @@ Superset requires a Python DB-API database driver and a SQLAlchemy
dialect to be installed for each datastore you want to connect to.
See [Install Database Drivers](/docs/configuration/databases) for more information.
It is recommended that you refer to versions listed in
[pyproject.toml](https://github.com/apache/superset/blob/master/pyproject.toml)
instead of hard-coding them in your bootstrap script, as seen below.
:::
The following example installs the drivers for BigQuery and Elasticsearch, allowing you to connect to these data sources within your Superset setup:
```yaml
bootstrapScript: |
#!/bin/bash
pip install psycopg2==2.9.6 \
sqlalchemy-bigquery==1.6.1 \
elasticsearch-dbapi==0.2.5 &&\
uv pip install .[postgres] \
.[bigquery] \
.[elasticsearch] &&\
if [ ! -f ~/bootstrap ]; then echo "Running Superset with uid {{ .Values.runAsUser }}" > ~/bootstrap; fi
```

View File

@@ -1,7 +1,7 @@
---
title: PyPI
hide_title: true
sidebar_position: 3
sidebar_position: 4
version: 1
---
@@ -12,7 +12,7 @@ import useBaseUrl from "@docusaurus/useBaseUrl";
<img src={useBaseUrl("/img/pypi.png" )} width="150" />
<br /><br />
This page describes how to install Superset using the `apache-superset` package [published on PyPI](https://pypi.org/project/apache-superset/).
This page describes how to install Superset using the `apache_superset` package [published on PyPI](https://pypi.org/project/apache_superset/).
## OS Dependencies
@@ -124,10 +124,10 @@ command line.
### Installing and Initializing Superset
First, start by installing `apache-superset`:
First, start by installing `apache_superset`:
```bash
pip install apache-superset
pip install apache_superset
```
Then, define mandatory configurations, SECRET_KEY and FLASK_APP:

View File

@@ -1,7 +1,7 @@
---
title: Upgrading Superset
hide_title: true
sidebar_position: 5
sidebar_position: 6
version: 1
---
@@ -32,7 +32,7 @@ docker compose up
To upgrade superset in a native installation, run the following commands:
```bash
pip install apache-superset --upgrade
pip install apache_superset --upgrade
```
## Upgrading the Metadata Database

View File

@@ -22,7 +22,7 @@ page.
### 1. Get Superset
```bash
$ git clone https://github.com/apache/superset
git clone https://github.com/apache/superset
```
### 2. Start the latest official release of Superset
@@ -32,7 +32,7 @@ $ git clone https://github.com/apache/superset
$ cd superset
# Set the repo to the state associated with the latest official version
$ git checkout tags/4.1.1
$ git checkout tags/4.1.2
# Fire up Superset using Docker Compose
$ docker compose -f docker-compose-image-tag.yml up
@@ -61,7 +61,7 @@ password: admin
Once you're done with Superset, you can stop and delete just like any other container environment:
```bash
$ docker compose down
docker compose down
```
:::tip

View File

@@ -64,6 +64,26 @@ tables in the **Permissions** dropdown. To select the data sources you want to a
You can then confirm with users assigned to the **Gamma** role that they see the
objects (dashboards and slices) associated with the tables you just extended them.
### SQL Execution Security Considerations
Apache Superset includes features designed to provide safeguards when interacting with connected databases, such as the `DISALLOWED_SQL_FUNCTIONS` configuration setting. This aims to prevent the execution of potentially harmful database functions or system variables directly from Superset interfaces like SQL Lab.
However, it is crucial to understand the following:
**Superset is Not a Database Firewall**: Superset's built-in checks, like `DISALLOWED_SQL_FUNCTIONS`, provide a layer of protection but cannot guarantee complete security against all database-level threats or advanced bypass techniques (like specific comment injection methods). They should be viewed as a supplement to, not a replacement for, robust database security.
**Configuration is Key**: The effectiveness of Superset's safeguards heavily depends on proper configuration by the Superset administrator. This includes maintaining the `DISALLOWED_SQL_FUNCTIONS` list, carefully managing feature flags (like `ENABLE_TEMPLATE_PROCESSING`), and configuring other security settings appropriately.
**Database Security is Paramount**: The ultimate responsibility for securing database access, controlling permissions, and preventing unauthorized function execution lies with the database administrators (DBAs) and security teams managing the underlying database instance.
**Recommended Database Practices**: We strongly recommend implementing security best practices at the database level, including:
* **Least Privilege**: Connecting Superset using dedicated database user accounts with the minimum permissions required for Superset's operation (typically read-only access to necessary schemas/tables).
* **Database Roles & Permissions**: Utilizing database-native roles and permissions to restrict access to sensitive functions, system variables (like `@@hostname`), schemas, or tables.
* **Network Security**: Employing network-level controls like database firewalls or proxies to restrict connections.
* **Auditing**: Enabling database-level auditing to monitor executed queries and access patterns.
By combining Superset's configurable safeguards with strong database-level security practices, you can achieve a more robust and layered security posture.
### REST API for user & role management
Flask-AppBuilder supports a REST API for user CRUD,
@@ -115,7 +135,7 @@ the models and views they can access, and that Finance role that is a collection
A user can have multiple roles associated with them. For example, an executive on the Finance
team could be granted **Gamma**, **Finance**, and the **Executive** roles. The **Executive**
role could provide access to a set of data sources and dashboards made available only to executives.
In the **Dashboards** view, a user can only see the ones they have access too
In the **Dashboards** view, a user can only see the ones they have access to
based on the roles and permissions that were attributed.
### Row Level Security
@@ -224,17 +244,17 @@ this warning using the `CONTENT_SECURITY_POLICY_WARNING` key in `config.py`.
#### CSP Requirements
* Superset needs the `style-src unsafe-inline` CSP directive in order to operate.
- Superset needs the `style-src unsafe-inline` CSP directive in order to operate.
```
style-src 'self' 'unsafe-inline'
```
* Only scripts marked with a [nonce](https://content-security-policy.com/nonce/) can be loaded and executed.
- Only scripts marked with a [nonce](https://content-security-policy.com/nonce/) can be loaded and executed.
Nonce is a random string automatically generated by Talisman on each page load.
You can get current nonce value by calling jinja macro `csp_nonce()`.
```
```html
<script nonce="{{ csp_nonce() }}">
/* my script */
</script>
@@ -256,17 +276,16 @@ You can get current nonce value by calling jinja macro `csp_nonce()`.
- Cartodiagram charts request map data (image and json) from external resources that can be edited by users,
and therefore either require a list of allowed domains to request from or a wildcard (`'*'`) for `img-src` and `connect-src`.
* Other CSP directives default to `'self'` to limit content to the same origin as the Superset server.
- Other CSP directives default to `'self'` to limit content to the same origin as the Superset server.
In order to adjust provided CSP configuration to your needs, follow the instructions and examples provided in
[Content Security Policy Reference](https://content-security-policy.com/)
#### Other Talisman security considerations
Setting `TALISMAN_ENABLED = True` will invoke Talisman's protection with its default arguments,
of which `content_security_policy` is only one. Those can be found in the
[Talisman documentation](https://pypi.org/project/flask-talisman/) under _Options_.
[Talisman documentation](https://pypi.org/project/flask-talisman/) under *Options*.
These generally improve security, but administrators should be aware of their existence.
In particular, the option of `force_https = True` (`False` by default) may break Superset's Alerts & Reports
@@ -281,6 +300,49 @@ TALISMAN_CONFIG = {
"content_security_policy": { ...
```
#### Configuring Talisman in Superset
Talisman settings in Superset can be modified using superset_config.py. If you need to adjust security policies, you can override the default configuration.
Example: Overriding Talisman Configuration in superset_config.py for loading images form s3 or other external sources.
```python
TALISMAN_CONFIG = {
"content_security_policy": {
"base-uri": ["'self'"],
"default-src": ["'self'"],
"img-src": [
"'self'",
"blob:",
"data:",
"https://apachesuperset.gateway.scarf.sh",
"https://static.scarf.sh/",
# "https://cdn.brandfolder.io", # Uncomment when SLACK_ENABLE_AVATARS is True # noqa: E501
"ows.terrestris.de",
"aws.s3.com", # Add Your Bucket or external data source
],
"worker-src": ["'self'", "blob:"],
"connect-src": [
"'self'",
"https://api.mapbox.com",
"https://events.mapbox.com",
],
"object-src": "'none'",
"style-src": [
"'self'",
"'unsafe-inline'",
],
"script-src": ["'self'", "'strict-dynamic'"],
},
"content_security_policy_nonce_in": ["script-src"],
"force_https": False,
"session_cookie_secure": False,
}
```
# For more information on setting up Talisman, please refer to
https://superset.apache.org/docs/configuration/networking-settings/#changing-flask-talisman-csp
### Reporting Security Vulnerabilities
Apache Software Foundation takes a rigorous standpoint in annihilating the security issues in its

View File

@@ -12,8 +12,12 @@ import useBaseUrl from "@docusaurus/useBaseUrl";
This section is focused on documentation for end-users who will be using Superset
for the data analysis and exploration workflow
(data analysts, business analysts, data
scientists, etc). In addition to this site, [Preset.io](http://preset.io/) maintains an updated set of end-user
scientists, etc).
:::tip
In addition to this site, [Preset.io](http://preset.io/) maintains an updated set of end-user
documentation at [docs.preset.io](https://docs.preset.io/).
:::
This tutorial targets someone who wants to create charts and dashboards in Superset. Well show you
how to connect Superset to a new database and configure a table in that database for analysis.
@@ -48,7 +52,6 @@ Please note, if you are trying to connect to another locally running database (w
Once you've clicked that link you only need to specify two things (the database name and SQLAlchemy URI):
<img src={useBaseUrl("/img/tutorial/tutorial_03b_connection_string_details.png" )} width="600" />{" "} <br/><br/>
As noted in the text below the form, you should refer to the SQLAlchemy documentation on
@@ -104,7 +107,7 @@ Aggregate functions are allowed and encouraged for metrics.
You can also certify metrics if you'd like for your team in this view.
2. Virtual calculated columns: you can write SQL queries that
1. Virtual calculated columns: you can write SQL queries that
customize the appearance and behavior
of a specific column (e.g. `CAST(recovery_rate as float)`).
Aggregate functions aren't allowed in calculated columns.
@@ -176,26 +179,40 @@ into a position you like onto the underlying grid.
Congrats! Youve successfully linked, analyzed, and visualized data in Superset. There are a wealth
of other table configuration and visualization options, so please start exploring and creating
slices and dashboards of your own
slices and dashboards of your own.
ֿ
### Manage access to Dashboards
Access to dashboards is managed via owners (users that have edit permissions to the dashboard).
Access to dashboards is managed via owners (users that have edit permissions to the dashboard)
Non-owner users access can be managed in two different ways. The dashboard needs to be published to be visible to other users.
Non-owner users access can be managed two different ways:
1. Dataset permissions - if you add to the relevant role permissions to datasets it automatically grants implicit access to all dashboards that uses those permitted datasets
2. Dashboard roles - if you enable **DASHBOARD_RBAC** [feature flag](/docs/configuration/configuring-superset#feature-flags) then you be able to manage which roles can access the dashboard
1. Dataset permissions - if you add to the relevant role permissions to datasets it automatically grants implicit access to all dashboards that uses those permitted datasets.
2. Dashboard roles - if you enable [**DASHBOARD_RBAC** feature flag](/docs/configuration/configuring-superset#feature-flags) then you will be able to manage which roles can access the dashboard
- Granting a role access to a dashboard will bypass dataset level checks. Having dashboard access implicitly grants read access to all the featured charts in the dashboard, and thereby also all the associated datasets.
- If no roles are specified for a dashboard, regular **Dataset permissions** will apply.
<img src={useBaseUrl("/img/tutorial/tutorial_dashboard_access.png" )} />
### Publishing a Dashboard
If you would like to make your dashboard available to other users, click on the `Draft` button next to the
title of your dashboard.
<img src={useBaseUrl("/img/tutorial/publish_button_dashboard.png" )} />
:::warning
Draft dashboards are only visible to the dashboard owners and admins. Published dashboards are visible to all users with access to the underlying datasets or if RBAC is enabled, to the roles that have been granted access to the dashboard.
:::
### Mark a Dashboard as Favorite
You can mark a dashboard as a favorite by clicking on the star icon next to the title of your dashboard. This makes it easier to find it in the list of dashboards or on the home page.
### Customizing dashboard
The following URL parameters can be used to modify how the dashboard is rendered:
- `standalone`:
- `0` (default): dashboard is displayed normally
- `1`: Top Navigation is hidden

View File

@@ -13,7 +13,7 @@ In this tutorial, we will introduce key concepts in Apache Superset through the
real dataset which contains the flights made by employees of a UK-based organization in 2011. The
following information about each flight is given:
- The travellers department. For the purposes of this tutorial the departments have been renamed
- The travelers department. For the purposes of this tutorial the departments have been renamed
Orange, Yellow and Purple.
- The cost of the ticket.
- The travel class (Economy, Premium Economy, Business and First Class).

View File

@@ -17,14 +17,13 @@
* under the License.
*/
// @ts-check
// Note: type annotations allow type checking and IDEs autocompletion
import type { Config } from '@docusaurus/types';
import type { Options, ThemeConfig } from '@docusaurus/preset-classic';
import { themes } from 'prism-react-renderer';
const lightCodeTheme = require("prism-react-renderer").themes.github;
const darkCodeTheme = require("prism-react-renderer").themes.vsDark;
const { github: lightCodeTheme, vsDark: darkCodeTheme } = themes;
/** @type {import('@docusaurus/types').Config} */
const config = {
const config: Config = {
title: 'Superset',
tagline:
'Apache Superset is a modern data exploration and visualization platform',
@@ -32,10 +31,13 @@ const config = {
baseUrl: '/',
onBrokenLinks: 'throw',
onBrokenMarkdownLinks: 'throw',
markdown: {
mermaid: true,
},
favicon: '/img/favicon.ico',
organizationName: 'apache', // Usually your GitHub org/user name.
projectName: 'superset', // Usually your repo name.
themes: ['@saucelabs/theme-github-codeblock'],
organizationName: 'apache',
projectName: 'superset',
themes: ['@saucelabs/theme-github-codeblock', '@docusaurus/theme-mermaid'],
plugins: [
[
'docusaurus-plugin-less',
@@ -199,105 +201,103 @@ const config = {
presets: [
[
'@docusaurus/preset-classic',
/** @type {import('@docusaurus/preset-classic').Options} */
({
{
docs: {
sidebarPath: require.resolve('./sidebars.js'),
editUrl:
({versionDocsDirPath, docPath}) => {
editUrl: ({ versionDocsDirPath, docPath }) => {
if (docPath === 'intro.md') {
return 'https://github.com/apache/superset/edit/master/README.md'
return 'https://github.com/apache/superset/edit/master/README.md';
}
return `https://github.com/apache/superset/edit/master/docs/${versionDocsDirPath}/${docPath}`
}
return `https://github.com/apache/superset/edit/master/docs/${versionDocsDirPath}/${docPath}`;
},
},
blog: {
showReadingTime: true,
// Please change this to your repo.
editUrl: 'https://github.com/facebook/docusaurus/edit/main/website/blog/',
editUrl:
'https://github.com/facebook/docusaurus/edit/main/website/blog/',
},
theme: {
customCss: require.resolve('./src/styles/custom.css'),
},
}),
} satisfies Options,
],
],
themeConfig:
/** @type {import('@docusaurus/preset-classic').ThemeConfig} */
({
colorMode: {
defaultMode: 'light',
disableSwitch: true,
themeConfig: {
colorMode: {
defaultMode: 'dark',
disableSwitch: false,
respectPrefersColorScheme: true,
},
algolia: {
appId: 'WR5FASX5ED',
apiKey: 'd0d22810f2e9b614ffac3a73b26891fe',
indexName: 'superset-apache',
},
navbar: {
logo: {
alt: 'Superset Logo',
src: '/img/superset-logo-horiz.svg',
srcDark: '/img/superset-logo-horiz-dark.svg',
},
algolia: {
appId: 'WR5FASX5ED',
apiKey: 'd0d22810f2e9b614ffac3a73b26891fe',
indexName: 'superset-apache',
},
navbar: {
logo: {
alt: 'Superset Logo',
src: '/img/superset-logo-horiz.svg',
srcDark: '/img/superset-logo-horiz-dark.svg',
items: [
{
label: 'Documentation',
to: '/docs/intro',
items: [
{
label: 'Getting Started',
to: '/docs/intro',
},
{
label: 'FAQ',
to: '/docs/faq',
},
],
},
items: [
{
label: 'Documentation',
to: '/docs/intro',
items: [
{
label: 'Getting Started',
to: '/docs/intro',
},
{
label: 'FAQ',
to: '/docs/faq',
},
],
},
{
label: 'Community',
to: '/community',
items: [
{
label: 'Resources',
href: '/community',
},
{
label: 'GitHub',
href: 'https://github.com/apache/superset',
},
{
label: 'Slack',
href: 'http://bit.ly/join-superset-slack',
},
{
label: 'Mailing List',
href: 'https://lists.apache.org/list.html?dev@superset.apache.org',
},
{
label: 'Stack Overflow',
href: 'https://stackoverflow.com/questions/tagged/apache-superset',
},
],
},
{
href: '/docs/intro',
position: 'right',
className: 'default-button-theme get-started-button',
label: 'Get Started',
},
{
href: 'https://github.com/apache/superset',
position: 'right',
className: 'github-button',
},
],
},
footer: {
links: [],
copyright: `
{
label: 'Community',
to: '/community',
items: [
{
label: 'Resources',
href: '/community',
},
{
label: 'GitHub',
href: 'https://github.com/apache/superset',
},
{
label: 'Slack',
href: 'http://bit.ly/join-superset-slack',
},
{
label: 'Mailing List',
href: 'https://lists.apache.org/list.html?dev@superset.apache.org',
},
{
label: 'Stack Overflow',
href: 'https://stackoverflow.com/questions/tagged/apache-superset',
},
],
},
{
href: '/docs/intro',
position: 'right',
className: 'default-button-theme get-started-button',
label: 'Get Started',
},
{
href: 'https://github.com/apache/superset',
position: 'right',
className: 'github-button',
},
],
},
footer: {
links: [],
copyright: `
<div class="footer__applitools">
We use &nbsp;<a href="https://applitools.com/" target="_blank" rel="nofollow"><img src="/img/applitools.png" title="Applitools" /></a>
</div>
@@ -320,24 +320,51 @@ const config = {
<!-- telemetry/analytics pixel: -->
<img referrerPolicy="no-referrer-when-downgrade" src="https://static.scarf.sh/a.png?x-pxid=39ae6855-95fc-4566-86e5-360d542b0a68" />
`,
},
prism: {
theme: lightCodeTheme,
darkTheme: darkCodeTheme,
},
docs: {
sidebar: {
hideable: true,
},
prism: {
theme: lightCodeTheme,
darkTheme: darkCodeTheme,
},
docs: {
sidebar: {
hideable: true,
}
},
}),
},
} satisfies ThemeConfig,
scripts: [
'/script/matomo.js',
// {
// src: 'https://www.bugherd.com/sidebarv2.js?apikey=enilpiu7bgexxsnoqfjtxa',
// async: true,
// },
'/script/matomo.js',
{
src: 'https://widget.kapa.ai/kapa-widget.bundle.js',
async: true,
'data-website-id': 'c6a8a8b8-3127-48f9-97a7-51e9e10d20d0',
'data-project-name': 'Apache Superset',
'data-project-color': '#FFFFFF',
'data-project-logo':
'https://images.seeklogo.com/logo-png/50/2/superset-icon-logo-png_seeklogo-500354.png',
'data-modal-override-open-id': 'ask-ai-input',
'data-modal-override-open-class': 'search-input',
'data-modal-disclaimer':
'This is a custom LLM for Apache Superset with access to all [documentation](superset.apache.org/docs/intro/), [GitHub Open Issues, PRs and READMEs](github.com/apache/superset).&#10;&#10;Companies deploy assistants like this ([built by kapa.ai](https://kapa.ai)) on docs via [website widget](https://docs.kapa.ai/integrations/website-widget) (Docker, Reddit), in [support forms](https://docs.kapa.ai/integrations/support-form-deflector) for ticket deflection (Monday.com, Mapbox), or as [Slack bots](https://docs.kapa.ai/integrations/slack-bot) with private sources.',
'data-modal-example-questions':
'How do I install Superset?,How can I contribute to Superset?',
'data-button-text-color': 'rgb(81,166,197)',
'data-modal-header-bg-color': '#ffffff',
'data-modal-title-color': 'rgb(81,166,197)',
'data-modal-title': 'Apache Superset AI',
'data-modal-disclaimer-text-color': '#000000',
'data-consent-required': 'true',
'data-consent-screen-disclaimer':
"By clicking \"I agree, let's chat\", you consent to the use of the AI assistant in accordance with kapa.ai's [Privacy Policy](https://www.kapa.ai/content/privacy-policy). This service uses reCAPTCHA, which requires your consent to Google's [Privacy Policy](https://policies.google.com/privacy) and [Terms of Service](https://policies.google.com/terms). By proceeding, you explicitly agree to both kapa.ai's and Google's privacy policies.",
},
],
customFields: {
matomoUrl: 'https://analytics.apache.org',
matomoSiteId: '22',
},
};
module.exports = config;
export default config;

View File

@@ -14,43 +14,42 @@
"serve": "yarn run _init && docusaurus serve",
"write-translations": "docusaurus write-translations",
"write-heading-ids": "docusaurus write-heading-ids",
"typecheck": "tsc"
"typecheck": "tsc",
"eslint": "eslint . --ext .js,.jsx,.ts,.tsx"
},
"dependencies": {
"@algolia/client-search": "^5.18.0",
"@ant-design/icons": "^5.5.2",
"@docsearch/react": "^3.8.2",
"@docusaurus/core": "^3.5.2",
"@docusaurus/plugin-client-redirects": "^3.5.2",
"@docusaurus/preset-classic": "^3.5.2",
"@emotion/core": "^10.1.1",
"@docusaurus/core": "3.7.0",
"@docusaurus/plugin-client-redirects": "3.7.0",
"@docusaurus/preset-classic": "3.7.0",
"@docusaurus/theme-mermaid": "3.7.0",
"@emotion/styled": "^10.0.27",
"@mdx-js/react": "^3.1.0",
"@saucelabs/theme-github-codeblock": "^0.3.0",
"@superset-ui/style": "^0.14.23",
"@svgr/webpack": "^8.1.0",
"antd": "^5.22.7",
"buffer": "^6.0.3",
"clsx": "^2.1.1",
"antd": "^5.24.5",
"docusaurus-plugin-less": "^2.0.2",
"file-loader": "^6.2.0",
"less": "^4.2.1",
"less": "^4.2.2",
"less-loader": "^11.0.0",
"prism-react-renderer": "^2.4.1",
"react": "^18.3.1",
"react-dom": "^18.3.1",
"react-github-btn": "^1.4.0",
"react-svg-pan-zoom": "^3.13.1",
"stream": "^0.0.3",
"swagger-ui-react": "^5.18.2",
"url-loader": "^4.1.1"
"swagger-ui-react": "^5.20.2"
},
"devDependencies": {
"@docusaurus/module-type-aliases": "^3.6.3",
"@docusaurus/tsconfig": "^3.6.3",
"@docusaurus/module-type-aliases": "^3.7.0",
"@docusaurus/tsconfig": "^3.7.0",
"@types/react": "^18.3.12",
"typescript": "^5.7.2",
"webpack": "^5.97.1"
"@typescript-eslint/eslint-plugin": "^5.0.0",
"@typescript-eslint/parser": "^5.0.0",
"eslint": "^8.0.0",
"eslint-config-prettier": "^10.1.1",
"eslint-plugin-prettier": "^4.0.0",
"eslint-plugin-react": "^7.0.0",
"prettier": "^2.0.0",
"typescript": "~5.8.2",
"webpack": "^5.98.0"
},
"browserslist": {
"production": [

View File

@@ -1,3 +1,4 @@
/* eslint-env node */
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
@@ -39,42 +40,52 @@ const sidebars = {
{
type: 'category',
label: 'Installation',
items: [{
type: 'autogenerated',
dirName: 'installation',
}]
items: [
{
type: 'autogenerated',
dirName: 'installation',
},
],
},
{
type: 'category',
label: 'Configuration',
items: [{
type: 'autogenerated',
dirName: 'configuration',
}]
items: [
{
type: 'autogenerated',
dirName: 'configuration',
},
],
},
{
type: 'category',
label: 'Using Superset',
items: [{
type: 'autogenerated',
dirName: 'using-superset',
}]
items: [
{
type: 'autogenerated',
dirName: 'using-superset',
},
],
},
{
type: 'category',
label: 'Contributing',
items: [{
type: 'autogenerated',
dirName: 'contributing',
}]
items: [
{
type: 'autogenerated',
dirName: 'contributing',
},
],
},
{
type: 'category',
label: 'Security',
items: [{
type: 'autogenerated',
dirName: 'security',
}]
items: [
{
type: 'autogenerated',
dirName: 'security',
},
],
},
{
type: 'doc',
@@ -87,7 +98,6 @@ const sidebars = {
id: 'api',
},
],
};
module.exports = sidebars;

View File

@@ -94,7 +94,7 @@ const StyledSectionHeaderH2 = styled(StyledSectionHeader)`
`;
interface SectionHeaderProps {
level: any;
level: 'h1' | 'h2';
title: string;
subtitle?: string | ReactNode;
dark?: boolean;
@@ -115,7 +115,7 @@ const SectionHeader = ({
<StyledRoot dark={!!dark}>
<Heading className="title">{title}</Heading>
<img className="line" src="/img/community/line.png" alt="line" />
{subtitle && <p className="subtitle">{subtitle}</p>}
{subtitle && <div className="subtitle">{subtitle}</div>}
</StyledRoot>
);
};

View File

@@ -86,7 +86,7 @@ const communityLinks = [
];
const StyledJoinCommunity = styled('section')`
background-color: var(--ifm-off-section-background);
background-color: var(--ifm-background-color);
border-bottom: 1px solid var(--ifm-border-color);
.list {
max-width: 540px;
@@ -118,7 +118,7 @@ const StyledJoinCommunity = styled('section')`
.description {
font-size: 14px;
line-height: 20px;
color: var(--ifm-secondary-text);
color: var(--ifm-font-base-color);
margin-top: -8px;
margin-bottom: 23px;
${mq[1]} {
@@ -143,22 +143,6 @@ const StyledCalendarIframe = styled('iframe')`
}
`;
const StyledNewsletterIframe = styled('iframe')`
display: block;
max-width: 1080px;
width: calc(100% - 40px);
height: 285px;
margin: 30px auto 20px;
border: 0;
@media (max-width: 1200px) {
height: 380px;
}
@media (max-width: 679px) {
height: 680px;
margin-top: 15px;
}
`;
const StyledLink = styled('a')`
display: inline-flex;
align-items: center;
@@ -182,10 +166,9 @@ const StyledLink = styled('a')`
const FinePrint = styled('div')`
font-size: 14px;
color: var(--ifm-secondary-text);
`
`;
const Community = () => {
const [showCalendar, setShowCalendar] = useState(false); // State to control calendar visibility
const toggleCalendar = () => {
@@ -218,14 +201,17 @@ const Community = () => {
className="title"
href={url}
target="_blank"
rel="noreferrer"
aria-label={ariaLabel}
>
<img className="icon" src={`/img/community/${image}`} />
</a>
}
title={
<a className="title" href={url} target="_blank">
{title}
<a href={url} target="_blank" rel="noreferrer">
<p className="title" style={{ marginBottom: 0 }}>
{title}
</p>
</a>
}
description={<p className="description">{description}</p>}
@@ -246,16 +232,22 @@ const Community = () => {
<StyledLink
href="https://calendar.google.com/calendar/u/0/r?cid=superset.committers@gmail.com"
target="_blank"
rel="noreferrer"
>
<img src="/img/calendar-icon.svg" alt="calendar-icon" />
Subscribe to the Superset Community Calendar
</StyledLink>
<br />
<StyledLink onClick={toggleCalendar}>
<img src="/img/calendar-icon.svg" alt="calendar-icon" />
<img src="/img/calendar-icon.svg" alt="calendar-icon" />
{showCalendar ? 'Hide Calendar' : 'Display Calendar*'}
</StyledLink>
{!showCalendar && <FinePrint><sup>*</sup>Clicking on this link will load and send data from and to Google.</FinePrint>}
{!showCalendar && (
<FinePrint>
<sup>*</sup>Clicking on this link will load and send data
from and to Google.
</FinePrint>
)}
</>
}
/>

View File

@@ -16,7 +16,6 @@
* specific language governing permissions and limitations
* under the License.
*/
// @ts-nocheck
import { useRef, useState, useEffect } from 'react';
import Layout from '@theme/Layout';
import Link from '@docusaurus/Link';
@@ -29,8 +28,6 @@ import SectionHeader from '../components/SectionHeader';
import BlurredSection from '../components/BlurredSection';
import '../styles/main.less';
// @ts-ignore
const features = [
{
image: 'powerful-yet-easy.jpg',
@@ -114,7 +111,7 @@ const StyledTitleContainer = styled('div')`
}
`;
const StyledButton = styled(Link)`
const StyledButton = styled(Link as React.ComponentType<any>)`
border-radius: 10px;
font-size: 20px;
font-weight: bold;
@@ -207,7 +204,6 @@ const StyledFeaturesList = styled('ul')`
.item {
text-align: left;
border: 1px solid var(--ifm-border-color);
background-color: #ffffff;
border-radius: 10px;
overflow: hidden;
display: flex;
@@ -230,7 +226,6 @@ const StyledFeaturesList = styled('ul')`
}
.title {
font-size: 24px;
color: var(--ifm-primary-text);
margin: 10px 0 0;
${mq[1]} {
font-size: 23px;
@@ -240,7 +235,6 @@ const StyledFeaturesList = styled('ul')`
.description {
font-size: 17px;
line-height: 23px;
color: var(--ifm-secondary-text);
margin: 5px 0 0;
${mq[1]} {
font-size: 16px;
@@ -647,7 +641,10 @@ export default function Home(): JSX.Element {
</div>
</Carousel>
<video autoPlay muted controls loop>
<source src="https://superset.staged.apache.org/superset-video-4k.mp4" type="video/mp4" />
<source
src="https://superset.staged.apache.org/superset-video-4k.mp4"
type="video/mp4"
/>
</video>
</StyledSliderSection>
<StyledKeyFeatures>

View File

@@ -137,4 +137,9 @@ export const Databases = [
href: 'https://www.denodo.com/',
imgName: 'denodo.png',
},
{
title: 'TDengine',
href: 'https://www.tdengine.com/',
imgName: 'tdengine.png',
},
];

View File

@@ -58,7 +58,6 @@ ul.dropdown__menu svg {
--ifm-code-font-size: 95%;
--ifm-menu-link-padding-vertical: 12px;
--doc-sidebar-width: 350px !important;
--ifm-navbar-height: none;
--ifm-font-family-base: Roboto;
--ifm-footer-background-color: #173036;
--ifm-footer-color: #87939a;
@@ -69,3 +68,15 @@ ul.dropdown__menu svg {
--ifm-code-padding-vertical: 3px;
--ifm-code-padding-horizontal: 5px;
}
[data-theme='dark'] {
--ifm-color-primary: #25c2a0;
--ifm-color-primary-dark: #21af90;
--ifm-color-primary-darker: #1fa588;
--ifm-color-primary-darkest: #1a8870;
--ifm-color-primary-light: #29d5b0;
--ifm-color-primary-lighter: #32d8b4;
--ifm-color-primary-lightest: #4fddbf;
--ifm-font-base-color: #bbb5ac;
--ifm-border-color: #797063;
}

View File

@@ -114,7 +114,6 @@ a > span > svg {
.navbar {
font-size: 14px;
font-weight: 400;
background-color: #fff;
transition: all 0.5s;
.get-started-button {
@@ -190,7 +189,7 @@ a > span > svg {
.navbar .DocSearch {
--docsearch-text-color: #187384;
--docsearch-muted-color: #187384;
--docsearch-searchbox-background: #fff;
--docsearch-searchbox-background: var(--ifm-navbar-background-color);
border: 1px solid #187384;
border-radius: 10px;
@@ -257,13 +256,3 @@ a > span > svg {
height: 28px;
}
}
/* Edit Button */
.edit-page-link {
position: sticky;
bottom: 0px;
right: 0px;
border-radius: 10px;
background-color: #ccc;
}

View File

@@ -1,57 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import styled from '@emotion/styled';
import DocItem from '@theme-original/DocItem';
const EditPageLink = styled('a')`
position: fixed;
bottom: 40px;
right: 10px;
padding: 1rem;
padding-left: 4rem;
background-color: #444;
border-radius: 10px;
z-index: 9999;
background-image: url('/img/github-dark.png');
background-size: 2rem;
background-position: 1rem center;
background-repeat: no-repeat;
transition: background-color 0.3s; /* Smooth transition for hover effect */
bpx-shadow: 0 0 0 0 rgba(0,0,0,0); /* Smooth transition for hover effect */
scale: .9;
transition: all 0.3s;
transform-origin: bottom right;
&:hover {
background-color: #333;
box-shadow: 5px 5px 10px 0 rgba(0,0,0,0.3);
scale: 1;
}
`;
export default function DocItemWrapper(props) {
return (
<>
<EditPageLink href={props.content.metadata.editUrl} target="_blank" rel="noopener noreferrer">
Edit this page on GitHub
</EditPageLink>
<DocItem {...props} />
</>
);
}

125
docs/src/theme/Root.js Normal file
View File

@@ -0,0 +1,125 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/* eslint-disable no-undef */
import { useEffect } from 'react';
import useDocusaurusContext from '@docusaurus/useDocusaurusContext';
export default function Root({ children }) {
const { siteConfig } = useDocusaurusContext();
const { customFields } = siteConfig;
useEffect(() => {
const { matomoUrl, matomoSiteId } = customFields;
if (typeof window !== 'undefined') {
// Making testing easier, logging debug junk if we're in development
const devMode = window.location.hostname === 'localhost' ? true : false;
// Initialize the _paq array first
window._paq = window._paq || [];
// Configure the tracker before loading matomo.js
window._paq.push(['enableHeartBeatTimer']);
window._paq.push(['enableLinkTracking']);
window._paq.push(['setTrackerUrl', `${matomoUrl}/matomo.php`]);
window._paq.push(['setSiteId', matomoSiteId]);
// Initial page view is handled by handleRouteChange
// Now load the matomo.js script
const script = document.createElement('script');
script.async = true;
script.src = `${matomoUrl}/matomo.js`;
document.head.appendChild(script);
// Handle route changes for SPA
const handleRouteChange = () => {
devMode && console.log('Route changed to:', window.location.pathname);
// Short timeout to ensure the page has fully rendered
setTimeout(() => {
// Get the current page title from the document
const currentTitle = document.title;
const currentPath = window.location.pathname;
devMode &&
console.log('Tracking page view:', currentPath, currentTitle);
// For testing: impersonate real domain - ONLY FOR DEVELOPMENT
if (devMode) {
window._paq.push(['setDomains', ['superset.apache.org']]);
window._paq.push([
'setCustomUrl',
'https://superset.apache.org' + currentPath,
]);
} else {
window._paq.push(['setCustomUrl', currentPath]);
}
window._paq.push(['setReferrerUrl', window.location.href]);
window._paq.push(['setDocumentTitle', currentTitle]);
window._paq.push(['trackPageView']);
}, 100); // Increased delay to ensure page has fully rendered
};
// Try all possible Docusaurus events - they've changed between versions
const possibleEvents = [
'docusaurus.routeDidUpdate',
'docusaurusRouteDidUpdate',
'routeDidUpdate',
];
devMode && console.log('Setting up Docusaurus route listeners');
possibleEvents.forEach(eventName => {
document.addEventListener(eventName, () => {
devMode &&
console.log(`Docusaurus route update detected via ${eventName}`);
handleRouteChange();
});
});
// Also set up manual history tracking as fallback
devMode && console.log('Setting up manual history tracking as fallback');
const originalPushState = window.history.pushState;
window.history.pushState = function () {
originalPushState.apply(this, arguments);
handleRouteChange();
};
window.addEventListener('popstate', handleRouteChange);
// Initial page tracking
handleRouteChange();
return () => {
// Cleanup listeners
possibleEvents.forEach(eventName => {
document.removeEventListener(eventName, handleRouteChange);
});
if (originalPushState) {
window.history.pushState = originalPushState;
window.removeEventListener('popstate', handleRouteChange);
}
};
}
}, []);
return children;
}

View File

@@ -19,4 +19,4 @@
const breakpoints = [576, 768, 992, 1200];
export const mq = breakpoints.map((bp) => `@media (max-width: ${bp}px)`);
export const mq = breakpoints.map(bp => `@media (max-width: ${bp}px)`);

View File

@@ -22,7 +22,7 @@ RewriteRule ^(.*)$ https://superset.apache.org/$1 [R,L]
RewriteCond %{HTTP_HOST} ^superset.incubator.apache.org$ [NC]
RewriteRule ^(.*)$ https://superset.apache.org/$1 [R=301,L]
Header set Content-Security-Policy "default-src data: blob: 'self' *.apache.org *.githubusercontent.com *.scarf.sh *.googleapis.com *.github.com *.algolia.net *.algolianet.com 'unsafe-inline' 'unsafe-eval'; frame-src *; frame-ancestors 'self' *.google.com https://sidebar.bugherd.com; form-action 'self'; worker-src blob:; img-src 'self' blob: data: https:; font-src 'self'; object-src 'none'"
Header set Content-Security-Policy "default-src data: blob: 'self' *.apache.org widget.kapa.ai *.githubusercontent.com *.scarf.sh *.googleapis.com *.google.com *.run.app *.gstatic.com *.github.com *.algolia.net *.algolianet.com 'unsafe-inline' 'unsafe-eval'; frame-src *; frame-ancestors 'self' *.google.com https://sidebar.bugherd.com; form-action 'self'; worker-src blob:; img-src 'self' blob: data: https:; font-src 'self'; object-src 'none'"
# REDIRECTS
@@ -64,7 +64,5 @@ RewriteRule ^docs/installation/event-logging/$ /docs/configuration/event-logging
RewriteRule ^docs/databases.*$ /docs/configuration/databases [R=301,L]
RewriteRule ^docs/configuration/setup-ssh-tunneling$ /docs/configuration/networking-settings [R=301,L]
# pre-commit hooks documentation
RewriteRule ^docs/contributing/hooks-and-linting/$ /docs/contributing/development/#git-hooks-1

BIN
docs/static/img/databases/tdengine.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 32 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 84 KiB

Some files were not shown because too many files have changed in this diff Show More