Compare commits

...

239 Commits

Author SHA1 Message Date
Michael S. Molina
c83fb2bb1d chore: CHANGELOG.md for 6.1.0 RC3 2026-05-01 10:02:35 -03:00
Michael S. Molina
0260de3e92 chore: Bump core packages to 0.1.0 RC3 (#39823)
(cherry picked from commit d23b0cad92)
2026-05-01 09:56:22 -03:00
Luiz Otavio
261f0ecafd fix(drill-to-detail): drill to detail by correctly filtering by metric (#39766)
Co-authored-by: Michael S. Molina <michael.s.molina@gmail.com>
(cherry picked from commit df396aa6e9)
2026-04-30 08:44:46 -03:00
Evan Rusackas
e33f676b8b docs: Superset 6.1 documentation catch-up — batch 4 (#39446)
Co-authored-by: Superset Dev <dev@superset.apache.org>
Co-authored-by: Claude Sonnet 4.6 <noreply@anthropic.com>
Co-authored-by: Michael S. Molina <70410625+michael-s-molina@users.noreply.github.com>
(cherry picked from commit 979f60a6d4)
2026-04-29 15:43:04 -03:00
Michael S. Molina
1bff24aa34 chore(build): remove thread-loader from webpack build (#39763)
(cherry picked from commit 6ce3885f2e)
2026-04-29 15:42:47 -03:00
Evan Rusackas
848809630d docs: Superset 6.1 documentation catch-up — batch 3 (#39445)
Co-authored-by: Superset Dev <dev@superset.apache.org>
Co-authored-by: Claude Sonnet 4.6 <noreply@anthropic.com>
Co-authored-by: Michael S. Molina <70410625+michael-s-molina@users.noreply.github.com>
(cherry picked from commit b4f595953e)
2026-04-29 15:41:20 -03:00
Evan Rusackas
6c53dffb09 docs: Superset 6.1 documentation catch-up — batch 2 (#39441)
Co-authored-by: Superset Dev <dev@superset.apache.org>
Co-authored-by: Claude Sonnet 4.6 <noreply@anthropic.com>
(cherry picked from commit 2b623fd09a)
2026-04-29 15:41:09 -03:00
Evan Rusackas
861f8d1bb8 docs(mcp): update MCP server docs for 6.1 (#39422)
Co-authored-by: Superset Dev <dev@superset.apache.org>
Co-authored-by: Claude Sonnet 4.6 <noreply@anthropic.com>
(cherry picked from commit fe074c0d76)
2026-04-29 15:40:51 -03:00
JUST.in DO IT
687f168042 fix(dashboard): escape emoji in position_json before saving to prevent truncation (#39737)
Co-authored-by: Michael S. Molina <michael.s.molina@gmail.com>
(cherry picked from commit 54f1e32763)
2026-04-29 10:34:52 -03:00
Jean Massucatto
2f6254bfb4 fix(chart): use categorical axis for bar charts with numeric x-axis (#39141)
Co-authored-by: Enzo Martellucci <52219496+EnxDev@users.noreply.github.com>
(cherry picked from commit 171414f165)
2026-04-29 10:33:48 -03:00
Maxime Beauchemin
3159ec0356 fix(dashboard): apply full transitive ancestor chain for dependent filters (#39504)
(cherry picked from commit 18d89f25ce)
2026-04-28 11:06:05 -03:00
Michael S. Molina
fa2e1600cd fix(theme): set color-scheme on html to fix dark mode scrollbars (#39704)
Co-authored-by: Claude Sonnet 4.6 <noreply@anthropic.com>
(cherry picked from commit 7bee2afa8e)
2026-04-28 10:57:31 -03:00
Michael S. Molina
b55bb4e3ce fix(query-history): enable sorting by Duration column (#39637)
Co-authored-by: Claude Sonnet 4.6 <noreply@anthropic.com>
(cherry picked from commit c4a8b34b11)
2026-04-28 09:13:15 -03:00
Sam Firke
87e5450cbe fix(table chart): fix rerender bug that continuously cleared search box (#39707)
(cherry picked from commit 3395620b6e)
2026-04-28 09:13:03 -03:00
Evan Rusackas
61bcf578a6 fix(explore): ensure unsaved-changes dialog renders above View SQL modal (#39569)
Co-authored-by: yousoph <sophieyou12@gmail.com>
Co-authored-by: Claude Sonnet 4.6 <noreply@anthropic.com>
(cherry picked from commit 7c4b2b137c)
2026-04-27 15:13:05 -03:00
JUST.in DO IT
4db4ac41c4 fix(sqllab): explore to chart is disabled (#39630)
(cherry picked from commit 78950fc18e)
2026-04-27 09:57:18 -03:00
Michael S. Molina
84a8df2beb fix(sql-lab): show table expand/collapse arrow only on hover (#39627)
Co-authored-by: Claude Sonnet 4.6 <noreply@anthropic.com>
(cherry picked from commit d6bbe6da9b)
2026-04-27 09:57:04 -03:00
Evan Rusackas
62f568d643 docs: Superset 6.1 documentation catch-up — batch 5 (#39454)
Co-authored-by: Superset Dev <dev@superset.apache.org>
Co-authored-by: Claude Sonnet 4.6 <noreply@anthropic.com>
(cherry picked from commit e1ed5003a8)
2026-04-27 09:47:33 -03:00
Maxime Beauchemin
ffc242b5df fix(table): ensure dimensions appear before metrics in column order (#39346)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit 4f19bc4c5f)
2026-04-27 09:43:37 -03:00
Michael S. Molina
78d2fcbf49 perf(sql-lab): debounce schema browser search (#39489)
Co-authored-by: Claude Sonnet 4.6 <noreply@anthropic.com>
(cherry picked from commit 9fe3f634ec)
2026-04-27 09:39:47 -03:00
Michael S. Molina
68bbc68cce chore: CHANGELOG.md for 6.1.0 RC2 2026-04-17 14:41:26 -03:00
JUST.in DO IT
7a558f7ceb fix(sqllab): enhance table explore tree with schema pinning, column sorting, and table schema refresh (#39396)
Co-authored-by: Michael S. Molina <michael.s.molina@gmail.com>
(cherry picked from commit be680408c9)
2026-04-17 13:11:57 -03:00
Michael S. Molina
3b7ddf88e9 Revert "fix(embedded-sdk): wire hideTab to actually hide the dashboard tab (#38846)"
This reverts commit 9619fa2156.
2026-04-17 13:11:01 -03:00
JUST.in DO IT
232a555d55 fix(sqllab): Relocate schema display on table preview (#39420)
(cherry picked from commit 4bdc8d4c68)
2026-04-17 10:27:33 -03:00
Gabriel Torres Ruiz
ba6ec40fed fix(mcp): support explicit query_mode in TableChartConfig (#39412)
(cherry picked from commit 2e0d482ccf)
2026-04-17 10:27:19 -03:00
Gabriel Torres Ruiz
0648f78785 fix(mcp): replace inputSchema with parameters_hint in search_tools results by default (#39411)
(cherry picked from commit e5b3a9c25d)
2026-04-17 10:26:52 -03:00
Gabriel Torres Ruiz
129fcb5451 fix(mcp): prevent LLM from creating new dashboard instead of adding chart to existing one (#39353)
(cherry picked from commit c289731212)
2026-04-17 10:26:38 -03:00
Gabriel Torres Ruiz
6048e2f2e1 fix(parallel-coordinates): improve dark mode visibility for labels, axis text, and data lines (#39415)
(cherry picked from commit f850c6b1b1)
2026-04-17 10:26:24 -03:00
Michael S. Molina
1a537420dc chore: Bump core packages to 0.1.0 RC2 (#39406)
(cherry picked from commit e5820b6b2b)
2026-04-16 15:32:50 -03:00
Mehmet Salih Yavuz
4e9a6db9b3 feat(mcp): add a preview flow to mcp chart updates (#39383)
Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
(cherry picked from commit 69f062b804)
2026-04-16 15:32:37 -03:00
Mehmet Salih Yavuz
02e6d671b4 fix(SelectFilter): auto clear search input (#39157)
(cherry picked from commit 7c76fd3d81)
2026-04-16 15:32:13 -03:00
Luiz Otavio
3cbc83449f fix: add comments to SQL clause validation (#39167)
(cherry picked from commit 0b419a07f5)
2026-04-16 09:22:22 -03:00
JUST.in DO IT
954902db1d fix(sqllab): format_sql to apply db dialect by database_id (#39393)
(cherry picked from commit 0b51e9cd5e)
2026-04-16 09:15:22 -03:00
Amin Ghadersohi
8b9a74a3d4 fix(mcp): always push fresh app context per tool call to prevent g.user race (#39385)
(cherry picked from commit e7b9fb277e)
2026-04-16 09:15:09 -03:00
Amin Ghadersohi
2dca313e41 fix(mcp): update instructions to use correct request wrapper and identifier params (#39392)
(cherry picked from commit 838ee870d0)
2026-04-16 09:14:51 -03:00
innovark
b196e11af2 fix(EmptyState): prevent SVG cropping in empty state images (#37287)
Co-authored-by: Joe Li <joe@preset.io>
(cherry picked from commit eaccb2e471)
2026-04-16 09:11:55 -03:00
Grégoire Gailly
b3c2965eaf fix(i18n): typo in fr language (#36982)
Co-authored-by: Sam Firke <sfirke@users.noreply.github.com>
(cherry picked from commit b11d4f3ef0)
2026-04-16 09:10:54 -03:00
Michael S. Molina
b424683c74 fix(ListView): empty state not filling available width (#39387)
Co-authored-by: Claude Sonnet 4.6 <noreply@anthropic.com>
(cherry picked from commit 998b9e387b)
2026-04-15 16:38:22 -03:00
Maxime Beauchemin
119f173009 fix(table): use column label instead of SQL expression for orderby in downloads (#39332)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit b3e88db87e)
2026-04-15 16:38:11 -03:00
Abdul Rehman
89231addf4 fix(css-templates): add missing height to CSS editor in CssTemplateModal (#39221)
Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
(cherry picked from commit 8471e82342)
2026-04-15 16:37:57 -03:00
Richard Fogaca Nienkotter
04565fa232 fix(dashboard): apply dynamic groupby display controls to scoped charts (#39356)
(cherry picked from commit c3a0f2749b)
2026-04-15 16:37:45 -03:00
Maxime Beauchemin
ea040d3136 fix(table): fix cross-filter not clearing on second click in Interactive Table (#39253)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit c2d96e0dce)
2026-04-15 16:37:28 -03:00
Abdul Rehman
1afa47bf80 fix(explore): dispatch onChange immediately on NumberControl stepper arrow clicks (#39220)
Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
(cherry picked from commit 44e77fdf2b)
2026-04-15 16:37:15 -03:00
Gabriel Torres Ruiz
94dc92df02 feat(mcp): add create_virtual_dataset tool to save SQL queries as datasets (#39279)
(cherry picked from commit 18d6feb499)
2026-04-15 16:36:59 -03:00
Beto Dealmeida
c85d515af3 fix: do_ping takes a connection, not engine (#39013)
(cherry picked from commit 84f7b4a973)
2026-04-15 16:36:29 -03:00
Michael S. Molina
18a8f6cb04 fix(sqllab): show schema refresh icon only on hover (#39367)
Co-authored-by: Claude Sonnet 4.6 <noreply@anthropic.com>
(cherry picked from commit ddcb9be9a7)
2026-04-15 16:36:05 -03:00
Alexandru Soare
9e313ba82f fix(MCP): fix MCP logs (#39159)
(cherry picked from commit ffcc6e8b63)
2026-04-15 16:35:30 -03:00
Luiz Otavio
305927df7b fix(native-filters): prevent infinite recursion in filter scope tree traversal (#39355)
(cherry picked from commit 86575e129b)
2026-04-15 09:47:41 -03:00
JUST.in DO IT
bd7b53e341 fix(native-filter): infinite filter loading by deps (#39175)
(cherry picked from commit 499e27ea54)
2026-04-15 09:09:45 -03:00
Alexandru Soare
3f157d9ca1 fix(popup): Dropdown popup width doesn't match input width when tags collapse in oneLine mode (#39136)
(cherry picked from commit c2a35e2eea)
2026-04-14 08:47:11 -03:00
Alexandru Soare
15fa218529 fix(select): select all button cutoff (#39005)
(cherry picked from commit 5138aa2c11)
2026-04-14 08:45:59 -03:00
Alexandru Soare
9499ccf52c fix(explore): Prevent error toast when navigating away from Explore page (#39065)
(cherry picked from commit 66a9e2e16e)
2026-04-14 08:45:25 -03:00
Richard Fogaca Nienkotter
2b4e9909db fix(dashboard): preserve dynamic group by column order (#39333)
(cherry picked from commit 0f417f0040)
2026-04-14 08:45:13 -03:00
Alexandru Soare
2c163576a7 fix(dashboard): Ensure screenshot downloads always generate fresh images/pdfs (#38880)
(cherry picked from commit 1462ac9282)
2026-04-14 08:45:02 -03:00
Maxime Beauchemin
33531088d6 fix(explore): replace TableView with virtualized GridTable, add row limit controls, restore sample filters (#39212)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit fa1f12a0b5)
2026-04-13 17:01:28 -03:00
Maxime Beauchemin
c14f042185 fix(tests): fix async teardown leak in FiltersConfigModal.test.tsx (#39281)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit de40b58e10)
2026-04-13 15:54:13 -03:00
Mike Bridge
af0f8176e4 fix(dashboard): hide "Filters out of scope" section when empty (#39201)
Co-authored-by: Mike Bridge <michael.bridge@ext.preset.io>
(cherry picked from commit eea3557f61)
2026-04-13 15:53:44 -03:00
Mike Bridge
81ecb7f42b fix(dashboard): allow filter list to scroll in filter config modal sidebar (#39203)
Co-authored-by: Mike Bridge <michael.bridge@ext.preset.io>
(cherry picked from commit 7a243d329e)
2026-04-13 15:52:49 -03:00
Maxime Beauchemin
4e16086a9b fix(tests): improve ShareMenuItems test isolation to fix intermittent suite failure (#39280)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit 98146251c4)
2026-04-13 15:52:34 -03:00
Maxime Beauchemin
c78a599ca6 fix(dataset-editor): fix SQL expression editor extra spaces and height expansion (#39248)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit 0aa8cace1b)
2026-04-13 15:52:11 -03:00
Maxime Beauchemin
1d9dbe0697 fix(SqlLab): improve SQL diff modal — responsive width, padding, tabs, and copy button (#39246)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit 450701ecec)
2026-04-13 15:51:54 -03:00
Richard Fogaca Nienkotter
0d5c7cb9b0 fix(echarts): prevent tooltip crash during dashboard auto-refresh (#39277)
Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
(cherry picked from commit e9911fbac4)
2026-04-13 15:51:15 -03:00
Gabriel Torres Ruiz
dd9724daae fix(ag-grid): jpeg export of ag-grid tables (#38781)
(cherry picked from commit 69c8eef78e)
2026-04-13 15:51:01 -03:00
Enzo Martellucci
1f7838367f fix(table): cross-filtering breaks after renaming column labels via Custom SQL (#38858)
(cherry picked from commit aba7e6dae4)
2026-04-13 15:50:41 -03:00
Mike Bridge
3fdbbb6e7e fix(dashboard): Vertical filter bar gradient is extending past the filter bar area (#39204)
Co-authored-by: Mike Bridge <michael.bridge@ext.preset.io>
(cherry picked from commit 8bcc90c766)
2026-04-13 15:50:16 -03:00
venkateshwaran shanmugham
59d67fb786 fix: implement native browser fullscreen for dashboard charts (#38819)
Signed-off-by: Venkateshwaran Shanmugham <venkateshwaracholan@gmail.com>
Co-authored-by: Michael S. Molina <70410625+michael-s-molina@users.noreply.github.com>
Co-authored-by: Mehmet Salih Yavuz <salih.yavuz@proton.me>
Co-authored-by: Richard Fogaça <richardfogaca@gmail.com>
Co-authored-by: Richard Fogaca Nienkotter <63572350+richardfogaca@users.noreply.github.com>
(cherry picked from commit e39dd1afce)
2026-04-13 15:50:02 -03:00
Amin Ghadersohi
e2d6afa38d fix(mcp): strip json_metadata and position_json from get_dashboard_info response (#39101)
(cherry picked from commit 680cef0ee0)
2026-04-13 15:49:48 -03:00
Amin Ghadersohi
252044702a fix(mcp): wire up compact schema serialization for search_tools results (#39229)
(cherry picked from commit e17cf3c808)
2026-04-13 15:49:29 -03:00
Shaitan
f35654179f fix(sql-lab): apply access check in SqlExecutionResultsCommand (#38952)
Co-authored-by: Claude Sonnet 4.6 <noreply@anthropic.com>
(cherry picked from commit f49310b8ff)
2026-04-13 15:49:15 -03:00
Vitor Avila
8a915c4783 fix: Drill to Detail for Embedded (#39214)
Co-authored-by: Maxime Beauchemin <maximebeauchemin@gmail.com>
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit c7955a38ef)
2026-04-13 15:48:33 -03:00
Amin Ghadersohi
f0d7945ae9 fix(mcp): handle OAuth-authenticated databases in execute_sql (#39166)
(cherry picked from commit 68067d7f44)
2026-04-13 15:47:07 -03:00
Enzo Martellucci
32bd0caec3 fix(reports): escape SQL LIKE wildcards in find_by_extra_metadata (#38738)
Co-authored-by: Mehmet Salih Yavuz <salih.yavuz@proton.me>
(cherry picked from commit 6649f35a0d)
2026-04-13 15:46:51 -03:00
Mehmet Salih Yavuz
7ee70d9aba fix(AlertsReports): untie filters from alerts reports tabs flag (#38722)
Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
(cherry picked from commit 5263abdc60)
2026-04-13 15:46:26 -03:00
Maxime Beauchemin
ccfe29e83f fix(tags): fix Bulk tag modal dropdown clipping and stale tag cache (#39210)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit d915e4f3ff)
2026-04-13 15:40:32 -03:00
Maxime Beauchemin
f59eaff400 fix(explore): constrain Edit Dataset modal height to prevent footer cutoff (#39211)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit bad5a35fce)
2026-04-13 15:40:18 -03:00
Amin Ghadersohi
25fb118883 fix(mcp): resolve null fields in list_datasets, list_databases, and save_sql_query (#39206)
(cherry picked from commit 1bde6f3bfd)
2026-04-13 15:39:05 -03:00
Maxime Beauchemin
09292ba228 fix(frontend): fix loading spinner positioning in Save modal and filters panel (#39205)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
Co-authored-by: yousoph <sophieyou12@gmail.com>
(cherry picked from commit d63308ca37)
2026-04-13 15:38:34 -03:00
Maxime Beauchemin
e4ced6ad82 fix(ace-editor): style bracket matching to blend with theme (#39182)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit b8b2bdedf9)
2026-04-13 15:38:07 -03:00
Maxime Beauchemin
07c548100d fix(sqllab): fix table navigator schema list, pin/unpin UX, copy actions, icons, and toolbar colors (#39173)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit d5017e60c3)
2026-04-13 15:37:54 -03:00
Luiz Otavio
23759b9a02 fix: add template_processor so Jinja gets rendered before SQLGlot parse (#39207)
(cherry picked from commit 2e80f2a473)
2026-04-13 15:37:40 -03:00
JUST.in DO IT
6017b1a3b5 fix(sqllab): Update style for code viewer container (#39075)
(cherry picked from commit 4c2dd63464)
2026-04-13 15:37:25 -03:00
Maxime Beauchemin
6848970d0d fix(sqllab): use monospace font for SQL in database error messages (#39181)
Co-authored-by: Claude Sonnet 4.6 <noreply@anthropic.com>
(cherry picked from commit ed659958f3)
2026-04-13 15:37:05 -03:00
Maxime Beauchemin
a8daa0d4dd fix(plugin-chart-handlebars): improve CSS sanitization tooltip and hide when not needed (#39180)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit 36de05fe36)
2026-04-13 15:36:49 -03:00
Maxime Beauchemin
04ca466c77 fix(explore): add left-indentation to control panel hierarchy (#39177)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit a64609f4f3)
2026-04-13 15:36:32 -03:00
Maxime Beauchemin
374cc592ac fix(sqllab): demote "Save as new" button from primary to secondary (#39179)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit 140f0001f2)
2026-04-13 15:36:16 -03:00
Elizabeth Thompson
e9ca6e04d5 fix(reports): propagate PlaywrightTimeout so execution transitions to ERROR state (#39176)
Co-authored-by: Claude Sonnet 4.6 <noreply@anthropic.com>
(cherry picked from commit 587fe4af63)
2026-04-13 15:36:02 -03:00
Michael S. Molina
fcd2bac21d fix(explore): Unnecessary scroll bars appearing on charts in Explore (#39160)
Co-authored-by: Đỗ Trọng Hải <41283691+hainenber@users.noreply.github.com>
(cherry picked from commit 3a3a6536b7)
2026-04-13 15:35:14 -03:00
Alexandru Soare
c41894ac0b fix(filterReports): _generate_native_filter() crashes on null/empty filterValues (#38954)
(cherry picked from commit 4f695e1b4d)
2026-04-13 15:34:57 -03:00
Maxime Beauchemin
1e4ad1ba52 fix(explore): handle boolean false values correctly in control rendering (#39172)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit 6ba9096870)
2026-04-13 15:32:57 -03:00
Amin Ghadersohi
1423387010 feat(mcp): add get_chart_type_schema tool for on-demand schema discovery (#39142)
(cherry picked from commit 5f9fc31ae2)
2026-04-13 15:32:24 -03:00
Amin Ghadersohi
685d9a0652 fix(mcp): compress chart config schemas to reduce search_tools token usage (#39018)
(cherry picked from commit bf9aff19b5)
2026-04-13 15:32:03 -03:00
Amin Ghadersohi
b9b48fb1e4 fix(mcp): add description and certification fields to default list tool columns (#39017)
(cherry picked from commit 7be2acb2f3)
2026-04-13 15:31:43 -03:00
Amin Ghadersohi
4764eea1dc fix(mcp): add dynamic response truncation for oversized info tool responses (#39107)
(cherry picked from commit 83ad1eca26)
2026-04-13 15:31:26 -03:00
Amin Ghadersohi
bc8690187f fix(mcp): remove JWT ValueError g.user fallback in auth layer (#39106)
(cherry picked from commit 92747246fc)
2026-04-13 15:31:09 -03:00
Amin Ghadersohi
dcca02dd16 fix(mcp): fix form_data null, dataset URL, ASCII preview, and chart rename (#39109)
(cherry picked from commit 7380a59ab8)
2026-04-13 15:30:21 -03:00
Ville Brofeldt
53d427aa79 fix(security_manager): custom auth_view issue (#39098)
(cherry picked from commit e56f8cc4fb)
2026-04-13 15:29:51 -03:00
Ville Brofeldt
a8ff6e463b fix(migrations): check pre-existing foreign keys on create util (#39099)
(cherry picked from commit 7c79b9ab61)
2026-04-13 15:29:27 -03:00
Maxime Beauchemin
a881c20598 feat(mcp): add database connection listing and info tools (#39111)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
Co-authored-by: Amin Ghadersohi <amin.ghadersohi@gmail.com>
(cherry picked from commit a62be684a0)
2026-04-13 15:29:12 -03:00
Sam Firke
096ea4ee42 fix(SQL Lab): handle columns without names (#38986)
(cherry picked from commit 12eb40db01)
2026-04-13 15:28:20 -03:00
Amin Ghadersohi
7eb702e99e fix(mcp): handle stale SSL connections, heatmap duplicate labels, and session rollback (#39015)
(cherry picked from commit b3a402d936)
2026-04-06 09:56:06 -03:00
JUST.in DO IT
9689739e1a fix(dashboard): remove opacity on filter dropdown (#39074)
(cherry picked from commit c7d175b842)
2026-04-06 09:53:10 -03:00
Amin Ghadersohi
4a10bf4ca7 fix(mcp): improve execute_sql response-too-large error to suggest limit parameter (#39003)
(cherry picked from commit 851bbeea48)
2026-04-06 09:52:59 -03:00
Kamil Gabryjelski
42fe0f755a fix(mcp): Created dashboard should be in draft state by default (#39011)
(cherry picked from commit 135e0f8099)
2026-04-06 09:52:09 -03:00
Geidō
b0adfc64c7 fix(reports): log exception traceback in _get_csv_data (#39069)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit 25eea295f6)
2026-04-06 09:51:55 -03:00
Rayan Salhab
4f9ec243b3 fix(ace-editor): fix cursor misalignment in markdown editor (#38928)
(cherry picked from commit ff3b8d8398)
2026-04-06 09:51:09 -03:00
Michael S. Molina
bf399b9f97 fix(echarts): fix stacked horizontal bar chart clipping and duplicate x-axis labels (#39012)
(cherry picked from commit 022342839a)
2026-04-01 16:12:44 -03:00
Michael S. Molina
b1eb6ac7c9 fix(dataset-editor): improve modal layout and fix Settings tab horizontal scroll (#39009)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
Co-authored-by: codeant-ai-for-open-source[bot] <244253245+codeant-ai-for-open-source[bot]@users.noreply.github.com>
(cherry picked from commit 38f0dc74f7)
2026-04-01 15:43:07 -03:00
Amin Ghadersohi
0f676a8e67 fix(mcp): handle table chart raw mode in query builders and sanitize dashboard titles (#38990)
(cherry picked from commit 0bae05d4a9)
2026-04-01 15:42:51 -03:00
Amin Ghadersohi
d4832fac72 fix(mcp): fix dashboard owners Pydantic crash and preserve chart preview filters (#38987)
(cherry picked from commit 190f1a59c5)
2026-04-01 15:42:37 -03:00
Michael S. Molina
81dbfad95a fix(query): pass datasource table to template processor for schema-aware Jinja rendering (#38984)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit 94d8735d4b)
2026-04-01 15:42:23 -03:00
madhushreeag
389897bb9c fix(dashboard): dashboard filters not inherited in charts in Safari sometimes due to race condition (#38851)
Co-authored-by: madhushree agarwal <madhushree_agarwal@apple.com>
(cherry picked from commit 1e2d0faa55)
2026-04-01 15:41:56 -03:00
Michael S. Molina
0f4184db2f fix(mixed-timeseries): apply same axis formatting options as timeseries charts (#38979)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit 8559786cc2)
2026-04-01 15:41:36 -03:00
Michael S. Molina
5d8d170093 fix(dashboard): live CSS preview in PropertiesModal (#38960)
Co-authored-by: Claude Sonnet 4.6 <noreply@anthropic.com>
(cherry picked from commit d4d22909cb)
2026-04-01 15:41:13 -03:00
Rui Zhao
5a348c0dae fix(presto): Fix presto timestamp (#26467)
Co-authored-by: Claude Code <noreply@anthropic.com>
Co-authored-by: Rui Zhao <zhaorui@dropbox.com>
Co-authored-by: Joe Li <joe@preset.io>
(cherry picked from commit 53b1d1097c)
2026-04-01 15:40:22 -03:00
Jessica Morris
fa6801a525 fix(bubble): Fix Bubble chart axis label rotation (#38917)
(cherry picked from commit f6cd8066ab)
2026-04-01 15:39:52 -03:00
Amin Ghadersohi
f8368404cf fix(mcp): batch fix for execute_sql crashes, null timestamps, and deck.gl errors (#38977)
(cherry picked from commit daefedebcd)
2026-04-01 15:39:39 -03:00
Amin Ghadersohi
27c9d806f1 fix(mcp): add TEMPORAL_RANGE filter for temporal x-axis in generate_chart (#38978)
(cherry picked from commit c37a3ec292)
2026-04-01 15:39:26 -03:00
Amin Ghadersohi
0bff78e45b feat(mcp): add Big Number chart type support to MCP service (#38403)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit 4245720851)
2026-04-01 15:39:11 -03:00
Enzo Martellucci
2ca54b418f fix(echarts): adapt y-axis ticks and padding for compact timeseries charts (#38673)
(cherry picked from commit f0b20dc445)
2026-04-01 15:38:55 -03:00
Đỗ Trọng Hải
6cff944b7f fix(AlteredSliceTag): not display undefined filter value for chart change record (#38883)
Signed-off-by: hainenber <dotronghai96@gmail.com>
(cherry picked from commit 11f2140c37)
2026-04-01 15:38:01 -03:00
Michael S. Molina
ec9ffc4c31 fix(pivot-table): safely cast numeric strings to numbers for date formatting (#38953)
Co-authored-by: Claude Sonnet 4.6 <noreply@anthropic.com>
(cherry picked from commit f1cd1ae710)
2026-03-31 09:21:23 -03:00
Enzo Martellucci
ddb285b4c3 fix(charts): add X Axis Number Format control for numeric X-axis columns (#38809)
Co-authored-by: codeant-ai-for-open-source[bot] <244253245+codeant-ai-for-open-source[bot]@users.noreply.github.com>
(cherry picked from commit e0a0a22542)
2026-03-31 08:58:06 -03:00
Amin Ghadersohi
850c33fa6b fix(mcp): enforce MAX_PAGE_SIZE limit on list tools to prevent oversized responses (#38959)
(cherry picked from commit 2c9cf0bd55)
2026-03-31 08:56:48 -03:00
Kamil Gabryjelski
c853d4df63 feat(mcp): support saved metrics from datasets in chart generation (#38955)
Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
(cherry picked from commit 15bab227bb)
2026-03-31 08:54:38 -03:00
Amin Ghadersohi
bda02a3fdc fix(mcp): prevent PendingRollbackError from poisoned sessions after SSL drops (#38934)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit d331a043a3)
2026-03-31 08:54:21 -03:00
Enzo Martellucci
f1f757b5c5 fix(select): ensure filter dropdown matches input field width (#38886)
(cherry picked from commit 41d401a879)
2026-03-31 08:54:09 -03:00
Amin Ghadersohi
bf8e927f2a fix(mcp): validate dataset exists in generate_explore_link before generating URL (#38951)
(cherry picked from commit 89f7e5e7ba)
2026-03-31 08:53:32 -03:00
Amin Ghadersohi
875d0c062f fix(mcp): prevent GRID_ID injection into ROOT_ID on tabbed dashboards (#38890)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit aa1a69555b)
2026-03-30 09:43:35 -03:00
Amin Ghadersohi
e08c305396 fix(mcp): remove @parse_request decorator for cleaner tool schemas (#38918)
(cherry picked from commit d1903afc69)
2026-03-30 09:19:48 -03:00
JUST.in DO IT
df2a0416eb fix(sqllab): invalid treeview folder expansion (#38897)
(cherry picked from commit a5d2324e21)
2026-03-30 09:18:39 -03:00
Mehmet Salih Yavuz
2aaf935db8 fix(Timeseries): dedup x axis labels (#38733)
(cherry picked from commit f832f9b0d5)
2026-03-30 09:17:28 -03:00
Kamil Gabryjelski
54cb0c1fb4 fix(explore): migrate from window.history to React Router history API (#38887)
(cherry picked from commit fc705d94e3)
2026-03-30 09:16:44 -03:00
JUST.in DO IT
807505a89a fix(sqllab): long cell content overflooding (#38855)
(cherry picked from commit 65eae027fa)
2026-03-30 09:14:25 -03:00
Krishna Chaitanya
f968c5cc78 fix(dataset): add email field to owners_data for Chart Explore path (#38836)
(cherry picked from commit ac96f46c76)
2026-03-30 09:14:12 -03:00
Enzo Martellucci
c512e30b17 fix(reports): PUT with empty recipients list does not persist the change (#38711)
(cherry picked from commit 40387d5daa)
2026-03-30 09:13:12 -03:00
Richard Fogaca Nienkotter
9fa0b68575 fix(dataset): add missing currency_code_column to DatasetPostSchema (#38853)
(cherry picked from commit 9c288d66b5)
2026-03-30 09:09:43 -03:00
Beto Dealmeida
e2d103f9a5 fix: type probing (#38889)
(cherry picked from commit 8983edea66)
2026-03-30 09:09:11 -03:00
Amin Ghadersohi
8d47bd7f42 fix(mcp): add try/except around DAO re-fetch to handle session errors in multi-tenant (#38859)
(cherry picked from commit 6dc3d7ad9f)
2026-03-30 09:08:06 -03:00
JUST.in DO IT
bd552209e7 fix(sqllab): inactive leftbar selector on empty state (#38833)
(cherry picked from commit cfa1aba1e0)
2026-03-26 14:01:07 -03:00
JUST.in DO IT
686fa44a5f fix(echart): multiple time shift line pattern (#38866)
(cherry picked from commit e045f49787)
2026-03-26 13:33:09 -03:00
Amin Ghadersohi
77df7c2add fix(mcp): add permission checks to generate_dashboard and update_chart tools (#38845)
(cherry picked from commit 23a5e95884)
2026-03-26 13:32:57 -03:00
Kamil Gabryjelski
6a866335b6 fix(mcp): detect unknown chart config fields and suggest correct ones (#38848)
Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
(cherry picked from commit 16f5a2a41a)
2026-03-26 13:32:47 -03:00
Đỗ Trọng Hải
26c3021b66 fix(ci): install missing msgcat used for Babel translation update (#38830)
Signed-off-by: hainenber <dotronghai96@gmail.com>
(cherry picked from commit 3506773f51)
2026-03-26 13:31:59 -03:00
Mayank Aggarwal
bd8b02c1c0 fix(dashboard): larger JSON metadata editor for better editing UX (#38728) (#38745)
(cherry picked from commit 403f4ad78c)
2026-03-26 13:31:18 -03:00
Shaitan
446358749e fix(datasource): align access validation in legacy views (#38647)
Co-authored-by: Claude Sonnet 4.6 <noreply@anthropic.com>
Co-authored-by: Daniel Vaz Gaspar <danielvazgaspar@gmail.com>
(cherry picked from commit a93e319716)
2026-03-26 13:31:06 -03:00
Richard Fogaca Nienkotter
1e7d781354 fix(echarts): prevent plain legend clipping in dashboards (#38675)
(cherry picked from commit 12aca72074)
2026-03-26 13:30:54 -03:00
Michael S. Molina
9619fa2156 fix(embedded-sdk): wire hideTab to actually hide the dashboard tab (#38846)
(cherry picked from commit 3fb903fdc6)
2026-03-26 13:30:41 -03:00
Joe Li
c4af1cbca7 fix(models): correct TabState.latest_query_id column type to match FK target (#38837)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit 4b26f8c712)
2026-03-26 13:30:28 -03:00
Alexandru Soare
9a35a4f43a fix(report): raise warning when filter type not recognized (#38676)
(cherry picked from commit 37c4a36fdb)
2026-03-26 13:26:31 -03:00
Amin Ghadersohi
a5d4348fb1 feat(mcp): add Handlebars chart type support to MCP service (#38402)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit c596df9294)
2026-03-26 13:25:16 -03:00
Michael S. Molina
dd1f946962 fix(extensions-cli): remove publisher prefix from bundle filename (#38823)
(cherry picked from commit 6852349d24)
2026-03-26 13:25:00 -03:00
JUST.in DO IT
27206fc892 fix(sqllab): FilterText does not apply accordingly (#38813)
(cherry picked from commit 7c9d75b69e)
2026-03-26 13:24:50 -03:00
Amin Ghadersohi
861ce50473 fix(mcp): fix generate_dashboard cross-session SQLAlchemy error (#38827)
(cherry picked from commit 09594b32f9)
2026-03-26 13:24:37 -03:00
Alexandru Soare
1d170f0da1 fix(keys): Unsafe dict access in get_native_filters_params() crashes execution (#38272)
(cherry picked from commit 89d1b80ce7)
2026-03-24 11:59:29 -03:00
Shaitan
a33c2a9c0e fix(sqllab): add authorization check to query cost estimation (#38648)
Co-authored-by: Claude Sonnet 4.6 <noreply@anthropic.com>
(cherry picked from commit 962abf6904)
2026-03-24 11:26:27 -03:00
Amin Ghadersohi
ac8d6b0c53 fix(mcp): prevent encoding errors and fix tool bugs on MCP client transports (#38786)
(cherry picked from commit ed3c5280a9)
2026-03-24 11:26:11 -03:00
Mehmet Salih Yavuz
de42d4a986 fix(Matrixify): readd matrixify_enable guard missing (#38759)
(cherry picked from commit 7222327992)
2026-03-24 11:25:56 -03:00
Levis Mbote
11b510ecad fix(explore): display actual data type instead of "column" in column tooltip (#38554)
(cherry picked from commit e67bc5bee5)
2026-03-23 13:40:39 -03:00
Mehmet Salih Yavuz
a16f0c81b5 fix(MainNav): display all menu items on smaller screens (#38732)
(cherry picked from commit fdcb942f3c)
2026-03-23 13:39:24 -03:00
Mehmet Salih Yavuz
155f55dbe1 fix(AlertsReports): validate anchor_list is a list (#38723)
(cherry picked from commit 100ad7d9ee)
2026-03-23 13:38:49 -03:00
Mehmet Salih Yavuz
deeedef0dc feat(matrixify): Revamp control panel (#38519) 2026-03-23 13:37:06 -03:00
Mehmet Salih Yavuz
967e78a716 fix(DropdownContainer): add fresh to avoid stale data (#38702)
(cherry picked from commit cc34d19d24)
2026-03-23 10:16:46 -03:00
João Pedro Alves Barbosa
a541d69019 fix(table): improve conditional formatting text contrast (#38705)
(cherry picked from commit 02ffb52f4a)
2026-03-23 09:53:59 -03:00
Đỗ Trọng Hải
493f6c0aed feat(sec): harden GHA ref by using its SHA ID to prevent accidental usage of compromised actions (#38782)
Signed-off-by: hainenber <dotronghai96@gmail.com>
(cherry picked from commit 83823911b5)
2026-03-23 09:53:34 -03:00
Đỗ Trọng Hải
c26d2de616 fix(sec): remove compromised Trivy actions (#38780)
Signed-off-by: hainenber <dotronghai96@gmail.com>
(cherry picked from commit 7004369c68)
2026-03-23 09:45:45 -03:00
Amin Ghadersohi
6d1a1b1863 fix(mcp): convert adhoc filters to QueryObject format before query compilation (#38774)
(cherry picked from commit 44c2c765ae)
2026-03-23 09:43:36 -03:00
Amin Ghadersohi
68cc13cfbf fix(mcp): normalize call_tool proxy arguments to prevent encoding TypeError (#38775)
(cherry picked from commit 0d5721910e)
2026-03-23 09:43:22 -03:00
Amin Ghadersohi
935226c736 feat(mcp): add horizontal bar chart orientation support to generate_chart (#38390)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-20 16:40:56 -03:00
Kamil Gabryjelski
9239db5a32 fix(mcp): fix detached Slice instance error in chart/dashboard serialization (#38767)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-20 16:34:03 -03:00
Enzo Martellucci
cf493a928a fix(reports): validate nativeFilters on report create/update and deactivate on dashboard filter deletion (#38715) 2026-03-20 16:34:03 -03:00
Kamil Gabryjelski
67b2bf0646 fix(mcp): use correct permission class for save_sql_query tool (#38764)
Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-03-20 16:34:03 -03:00
Amin Ghadersohi
af54788b5e fix(mcp): fix dashboard slug null and execute_sql encoding error (#38710) 2026-03-20 16:34:03 -03:00
Alexandru Soare
0915e675df fix(button): Theming configurations for button elements is not consistent (#38604) 2026-03-20 16:34:03 -03:00
Levis Mbote
884f8f7a35 fix(timeseries-table): enable proper column sorting in timeseries-table chart (#38579) 2026-03-20 16:34:03 -03:00
Levis Mbote
da5f968a8e fix(theme): persist local theme id so "Local" tag remains after navigation (#38527) 2026-03-20 16:34:03 -03:00
Levis Mbote
e9150afba6 fix(dashboard): correct tab underline width for newly added dashboard tabs. (#38524) 2026-03-20 16:34:03 -03:00
Levis Mbote
519e7c58de fix(theme): ensure colorLink follows colorPrimary when not explicitly set (#38517) 2026-03-20 16:34:03 -03:00
Alexandru Soare
1f94c88025 fix(firebolt): Firebolt SQL entered with EXCLUDE is rewritten to EXCEPT (#38742) 2026-03-20 16:34:03 -03:00
Kamil Gabryjelski
d7633752bd fix(mcp): Chart schema followups - DRY extraction, template fix, alias and test gaps (#38746)
Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-03-20 16:34:03 -03:00
Kamil Gabryjelski
c2b3d36435 fix: Add aliases and groupby list to chart schemas (#38740) 2026-03-20 16:33:58 -03:00
Luiz Otavio
c68dee2caf fix(sql): remove WHERE 1 = 1 when temporal filter has "No filter" selected (#38704) 2026-03-20 16:33:58 -03:00
Joe Li
30690aaf7b fix(dashboard): use inline theme data to prevent 403 for non-admin users (#38384)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-20 16:33:58 -03:00
Michael S. Molina
25915f016b fix(chart): prevent chart list from failing when a datasource lacks explore_url (#38721) 2026-03-20 16:33:58 -03:00
Kamil Gabryjelski
b9259db772 fix: Row limit support for chart mcp tools (#38717) 2026-03-20 16:33:58 -03:00
Amin Ghadersohi
9277f85d65 fix(mcp): expose individual tool parameters when MCP_PARSE_REQUEST_ENABLED=False (#38714)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-20 16:33:53 -03:00
Evan Rusackas
f7e4fe9d0f fix(tests): restore 100% TypeScript coverage for core packages (#38682)
Co-authored-by: Claude Code <noreply@anthropic.com>
Co-authored-by: Joe Li <joe@preset.io>
2026-03-20 16:33:53 -03:00
João Pedro Alves Barbosa
2e09934bb5 fix(map-box): prevent clusters from being smaller than individual points (#38458)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-20 16:33:53 -03:00
Michael S. Molina
435e405263 fix: Simplify extension folder name (#38690) 2026-03-20 16:33:53 -03:00
mcdogg17
54b313c659 fix(dashboard): overload issue in dashboard export to excel (#29418)
Co-authored-by: Evan Rusackas <evan@preset.io>
Co-authored-by: Claude <claude@anthropic.com>
Co-authored-by: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-20 16:33:53 -03:00
Beto Dealmeida
e2495119a3 feat: apply RLS conservatively (#38683) 2026-03-20 16:33:53 -03:00
Amin Ghadersohi
87a63a81e2 feat(mcp): Add tool annotations for MCP directory compliance (#38641) 2026-03-20 16:33:53 -03:00
Amin Ghadersohi
b95e556840 feat(mcp): add save_sql_query tool for SQL Lab saved queries (#38414)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-20 16:33:47 -03:00
Amin Ghadersohi
134e77b7c2 feat(mcp): add BM25 tool search transform to reduce initial context size (#38562)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-20 16:33:47 -03:00
Amin Ghadersohi
5adf07aef3 feat(mcp): add extra_form_data param to get_chart_data for dashboard filters (#38531)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-20 16:33:43 -03:00
Amin Ghadersohi
9b6c4486c0 refactor(mcp): use serialize_user_object in get_instance_info (#38613) 2026-03-20 16:33:43 -03:00
Amin Ghadersohi
4ce0627c73 fix(mcp): extract role names as strings in UserInfo serialization (#38612) 2026-03-20 16:33:43 -03:00
Amin Ghadersohi
530ad25c90 docs: move MCP deployment guide to admin docs, add user-facing AI guide (#38585) 2026-03-20 16:33:43 -03:00
Amin Ghadersohi
f11a4834d2 feat(mcp): implement RBAC permission checking for MCP tools (#38407)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-20 16:33:43 -03:00
Amin Ghadersohi
50ff9fdb3e feat(mcp): auto-generate dashboard title from chart names when omitted (#38410)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-20 16:33:37 -03:00
Đỗ Trọng Hải
a83ff0057c fix(ci): allow docs testing to run despite absence of db diagnostics data (#38655)
Signed-off-by: hainenber <dotronghai96@gmail.com>
(cherry picked from commit ca403dc45d)
2026-03-17 11:58:50 -03:00
João Pedro Alves Barbosa
cffca7367c fix(map-box): make opacity, lon, lat, and zoom controls functional (#38374)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
Co-authored-by: codeant-ai-for-open-source[bot] <244253245+codeant-ai-for-open-source[bot]@users.noreply.github.com>
(cherry picked from commit 96705c156a)
2026-03-17 11:57:36 -03:00
Joe Li
e855fbf71d fix(embedded): default to light theme instead of system preference (#38644)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit aa5adb0fce)
2026-03-17 11:56:19 -03:00
Đỗ Trọng Hải
1726169c1a fix(FilterBar): reduce padded space between filter header and first filter (#38646)
Signed-off-by: hainenber <dotronghai96@gmail.com>
(cherry picked from commit afe093f1ca)
2026-03-17 11:55:46 -03:00
Mayank Aggarwal
8b20d95366 fix(docs): use absolute API doc links in developer docs (#38649)
(cherry picked from commit cc066b3576)
2026-03-17 11:55:32 -03:00
amaannawab923
41371e4816 fix(ag-grid-table): fix failing buildQuery test expectation (#38636)
(cherry picked from commit 6e7d6a85b4)
2026-03-17 11:53:16 -03:00
Luiz Otavio
185c0eb3bf style(metadata-bar): use bold font weight for metadata bar title (#38608)
(cherry picked from commit e8061a9c2b)
2026-03-17 11:53:02 -03:00
Amin Ghadersohi
4a153a0ec3 fix(mcp): return all statement results for multi-statement SQL queries (#38388)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit b6c3b3ef46)
2026-03-17 11:52:09 -03:00
Rafael Benitez
aaaead02f1 fix(world-map): add fallback fill color when colorFn returns null (#38602)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit ba7271b4d8)
2026-03-17 11:51:23 -03:00
amaannawab923
4baedcc794 fix(ag-grid-table): fix AND filter conditions not applied (#38369)
(cherry picked from commit ca2d26a1e2)
2026-03-17 11:51:10 -03:00
Alexandru Soare
825b284492 fix(timeshiftcolor): Time shift color to match the original color (#38473)
(cherry picked from commit f6106cd26f)
2026-03-17 11:50:54 -03:00
Michael S. Molina
33b0b6b3bb fix(editor): implement missing methods, fix cursor position clearing (#38603)
(cherry picked from commit 1867336907)
2026-03-17 11:50:14 -03:00
Ville Brofeldt
e42f6c3a1c fix(extensions): fix gitignore template and bump version (#38614)
(cherry picked from commit f5383263bc)
2026-03-17 11:49:57 -03:00
Amin Ghadersohi
b3eaa5ad01 fix(mcp): fix crashes in list tools, dataset info, chart preview, and add owner/favorite filters (#38277)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit d5cf77cd60)
2026-03-17 11:49:40 -03:00
Kamil Gabryjelski
e41653faff fix(mcp): Support form_data_key without chart identifier for unsaved charts (#38628)
(cherry picked from commit af5e05db2e)
2026-03-17 11:48:15 -03:00
Enzo Martellucci
39bdccd7ee fix(deckgl): polygon chart not rendering when boundary column contains nested geometry JSON (#38595)
(cherry picked from commit 32a64d02c7)
2026-03-17 11:45:42 -03:00
Enzo Martellucci
ba2cfac373 fix(explore/dashboard): fix CSV/Excel downloads for legacy chart types (#38513)
Co-authored-by: Diego Pucci <diegopucci.me@gmail.com>
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
Co-authored-by: codeant-ai-for-open-source[bot] <244253245+codeant-ai-for-open-source[bot]@users.noreply.github.com>
(cherry picked from commit 9516d1a306)
2026-03-17 11:45:20 -03:00
Kamil Gabryjelski
d170decfc5 fix(mcp): Improve validation errors and field aliases to reduce failed LLM tool calls (#38625)
(cherry picked from commit d91b96814e)
2026-03-17 11:44:42 -03:00
Amin Ghadersohi
b791979c37 fix(mcp): replace uuid with url and changed_on_humanized in default list columns (#38566)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit fc156d0014)
2026-03-17 11:43:52 -03:00
Mehmet Salih Yavuz
3ad8d484d7 fix: add parent_slice_id for multilayer charts to embed (#38243)
(cherry picked from commit 95f61bd223)
2026-03-17 11:41:47 -03:00
Mehmet Salih Yavuz
343e7f33af fix: add embedded box sizing rule for layout (#38351)
Co-authored-by: Joe Li <joe@preset.io>
(cherry picked from commit 7f476a79b3)
2026-03-17 11:41:12 -03:00
Yuriy Krasilnikov
3521504b05 fix(embedded): prevent double RLS application in virtual datasets (#37395)
(cherry picked from commit 09e9c6a522)
2026-03-17 11:40:36 -03:00
Ville Brofeldt
f63c0d4caa fix: support nested function calls in cache_key_wrapper (#38569)
(cherry picked from commit a9def2fc15)
2026-03-17 11:40:05 -03:00
Alexandru Soare
cfcaaa9db4 fix(matrixify): Matrixify to not override slice id (#38515)
Co-authored-by: Evan Rusackas <evan@preset.io>
(cherry picked from commit 27197faba9)
2026-03-17 11:39:41 -03:00
amaannawab923
224e9ea411 fix(ag-grid): persist AG Grid column filters in explore permalinks (#38393)
(cherry picked from commit 9215eb5e45)
2026-03-17 11:38:59 -03:00
Amin Ghadersohi
ec8177fe15 fix(charts): set reasonable default y-axis title margin to prevent label overlap (#38389)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit fe7f220c21)
2026-03-17 11:37:41 -03:00
Amin Ghadersohi
88195cdb54 fix(mcp): honor target_tab parameter when adding charts to tabbed dashboards (#38409)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit 3bb9704cd5)
2026-03-17 11:36:22 -03:00
Amin Ghadersohi
a3dba466cb fix(mcp): wrap LoggingMiddleware.on_message event_logger in try/except (#38560)
(cherry picked from commit 6d7cfac8b2)
2026-03-17 11:35:39 -03:00
Đỗ Trọng Hải
1354dc6881 fix(i18n): correct variable name for translated SQL Lab query message (#38494)
Signed-off-by: hainenber <dotronghai96@gmail.com>
Co-authored-by: Evan Rusackas <evan@preset.io>
(cherry picked from commit 31754a39c9)
2026-03-17 11:35:00 -03:00
Michael S. Molina
249c21655f chore: CHANGELOG.md and UPDATING.md for 6.1.0 RC1 2026-03-11 10:32:14 -03:00
Michael S. Molina
224a922341 fix: SQL Lab tab content padding (#38561)
(cherry picked from commit bde48e563e)
2026-03-10 11:45:24 -03:00
Amin Ghadersohi
a9b24da0a2 fix(mcp): improve default chart names with descriptive format (#38406)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit 0cfd760a36)
2026-03-10 08:25:44 -03:00
Amin Ghadersohi
ab64ad7ac7 fix(mcp): add missing __init__.py for chart, dashboard, dataset packages (#38400)
(cherry picked from commit 5c2cbb58bc)
2026-03-10 08:24:46 -03:00
Amin Ghadersohi
d266146bbb fix(mcp): add guardrails to prevent LLM artifact generation (#38391)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit 5fa70bdbd8)
2026-03-10 08:24:00 -03:00
Amin Ghadersohi
10415fe8be fix(mcp): add missing command.validate() to MCP chart data tools (#38521)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit 2a876e8b86)
2026-03-10 08:23:47 -03:00
Amin Ghadersohi
d42caf744f feat(mcp): register GlobalErrorHandlerMiddleware and LoggingMiddleware (#38523)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
(cherry picked from commit 0533ca9941)
2026-03-10 08:23:38 -03:00
Enzo Martellucci
d8e346d52d fix(embedded): add CurrentUserRestApi read permission to Public role defaults (#38474)
(cherry picked from commit a17f38a4e2)
2026-03-10 08:22:45 -03:00
Amin Ghadersohi
8d7a36df5a fix(mcp): resolve chatbot tool call flakiness with URL and instruction fixes (#38532)
(cherry picked from commit 6ef4794778)
2026-03-10 08:22:27 -03:00
Amin Ghadersohi
77c7f9b5e8 fix(mcp): make fastmcp truly optional during Superset startup (#38534)
(cherry picked from commit 4cd3ce164d)
2026-03-10 08:22:09 -03:00
Evan Rusackas
6ebaf5919f fix(docs): swizzle MethodEndpoint to fix SSG crash on all API pages (#38533)
Co-authored-by: Superset Dev <dev@superset.apache.org>
Co-authored-by: Claude Sonnet 4.6 <noreply@anthropic.com>
(cherry picked from commit 8e3e57c1c8)
2026-03-10 08:21:54 -03:00
579 changed files with 41596 additions and 5087 deletions

View File

@@ -26,16 +26,16 @@ runs:
- name: Set up QEMU
if: ${{ inputs.build == 'true' }}
uses: docker/setup-qemu-action@v3
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
- name: Set up Docker Buildx
if: ${{ inputs.build == 'true' }}
uses: docker/setup-buildx-action@v3
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
- name: Try to login to DockerHub
if: ${{ inputs.login-to-dockerhub == 'true' }}
continue-on-error: true
uses: docker/login-action@v3
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
with:
username: ${{ inputs.dockerhub-user }}
password: ${{ inputs.dockerhub-token }}

View File

@@ -32,7 +32,7 @@ jobs:
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: true
ref: master
@@ -41,7 +41,7 @@ jobs:
uses: ./.github/actions/setup-supersetbot/
- name: Set up Python ${{ inputs.python-version }}
uses: actions/setup-python@v6
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6
with:
python-version: "3.10"

View File

@@ -31,7 +31,7 @@ jobs:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
if: steps.check_queued.outputs.count >= 20
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Cancel duplicate workflow runs
if: steps.check_queued.outputs.count >= 20

View File

@@ -18,7 +18,7 @@ jobs:
runs-on: ubuntu-22.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive

View File

@@ -25,9 +25,9 @@ jobs:
pull-requests: write
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Check and notify
uses: actions/github-script@v8
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
github-token: ${{ github.token }}
script: |

View File

@@ -44,7 +44,7 @@ jobs:
pull-requests: write
steps:
- name: Comment access denied
uses: actions/github-script@v8
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
script: |
const message = `👋 Hi @${{ github.event.comment.user.login || github.event.review.user.login || github.event.issue.user.login }}!
@@ -71,12 +71,12 @@ jobs:
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
fetch-depth: 1
- name: Run Claude PR Action
uses: anthropics/claude-code-action@beta
uses: anthropics/claude-code-action@28f83620103c48a57093dcc2837eec89e036bb9f # beta
with:
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
timeout_minutes: "60"

View File

@@ -31,7 +31,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Check for file changes
id: check

View File

@@ -27,9 +27,9 @@ jobs:
runs-on: ubuntu-24.04
steps:
- name: "Checkout Repository"
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: "Dependency Review"
uses: actions/dependency-review-action@v4
uses: actions/dependency-review-action@2031cfc080254a8a887f58cffee85186f0e49e48 # v4.9.0
continue-on-error: true
with:
fail-on-severity: critical
@@ -49,7 +49,7 @@ jobs:
runs-on: ubuntu-22.04
steps:
- name: "Checkout Repository"
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Setup Python
uses: ./.github/actions/setup-backend/

View File

@@ -14,7 +14,6 @@ concurrency:
cancel-in-progress: true
jobs:
setup_matrix:
runs-on: ubuntu-24.04
outputs:
@@ -40,9 +39,8 @@ jobs:
IMAGE_TAG: apache/superset:GHA-${{ matrix.build_preset }}-${{ github.run_id }}
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
@@ -91,7 +89,7 @@ jobs:
# in the context of push (using multi-platform build), we need to pull the image locally
- name: Docker pull
if: github.event_name == 'push' && (steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker)
run: docker pull $IMAGE_TAG
run: docker pull $IMAGE_TAG
- name: Print docker stats
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
@@ -101,23 +99,6 @@ jobs:
docker images $IMAGE_TAG
docker history $IMAGE_TAG
# Scan for vulnerabilities in built container image after pushes to mainline branch.
- name: Run Trivy container image vulnerabity scan
if: github.event_name == 'push' && github.ref == 'refs/heads/master' && (steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker) && matrix.build_preset == 'lean'
uses: aquasecurity/trivy-action@97e0b3872f55f89b95b2f65b3dbab56962816478 # v0.34.2
with:
image-ref: ${{ env.IMAGE_TAG }}
format: 'sarif'
output: 'trivy-results.sarif'
vuln-type: 'os'
severity: 'CRITICAL,HIGH'
ignore-unfixed: true
- name: Upload Trivy scan results to GitHub Security tab
if: github.event_name == 'push' && github.ref == 'refs/heads/master' && (steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker) && matrix.build_preset == 'lean'
uses: github/codeql-action/upload-sarif@1b168cd39490f61582a9beae412bb7057a6b2c4e # v4.31.8
with:
sarif_file: 'trivy-results.sarif'
- name: docker-compose sanity check
if: (steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker) && matrix.build_preset == 'dev'
shell: bash
@@ -134,7 +115,7 @@ jobs:
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
- name: Check for file changes

View File

@@ -28,8 +28,8 @@ jobs:
run:
working-directory: superset-embedded-sdk
steps:
- uses: actions/checkout@v6
- uses: actions/setup-node@v6
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version-file: './superset-embedded-sdk/.nvmrc'
registry-url: 'https://registry.npmjs.org'

View File

@@ -18,8 +18,8 @@ jobs:
run:
working-directory: superset-embedded-sdk
steps:
- uses: actions/checkout@v6
- uses: actions/setup-node@v6
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version-file: './superset-embedded-sdk/.nvmrc'
registry-url: 'https://registry.npmjs.org'

View File

@@ -69,7 +69,7 @@ jobs:
- name: Comment (success)
if: steps.describe-services.outputs.active == 'true'
uses: actions/github-script@v8
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
github-token: ${{github.token}}
script: |

View File

@@ -63,7 +63,7 @@ jobs:
- name: Get event SHA
id: get-sha
if: steps.eval-label.outputs.result == 'up'
uses: actions/github-script@v8
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
@@ -94,7 +94,7 @@ jobs:
core.setOutput("sha", prSha);
- name: Looking for feature flags in PR description
uses: actions/github-script@v8
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
id: eval-feature-flags
if: steps.eval-label.outputs.result == 'up'
with:
@@ -116,7 +116,7 @@ jobs:
return results;
- name: Reply with confirmation comment
uses: actions/github-script@v8
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
if: steps.eval-label.outputs.result == 'up'
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
@@ -160,7 +160,7 @@ jobs:
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ needs.ephemeral-env-label.outputs.sha }} : ${{steps.get-sha.outputs.sha}} )"
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
ref: ${{ needs.ephemeral-env-label.outputs.sha }}
persist-credentials: false
@@ -189,7 +189,7 @@ jobs:
--extra-flags "--build-arg INCLUDE_CHROMIUM=false"
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v6
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7 # v6
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
@@ -197,7 +197,7 @@ jobs:
- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v2
uses: aws-actions/amazon-ecr-login@c962da2960ed15f492addc26fffa274485265950 # v2
- name: Load, tag and push image to ECR
id: push-image
@@ -220,12 +220,12 @@ jobs:
pull-requests: write
steps:
- uses: actions/checkout@v6
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v6
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7 # v6
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
@@ -233,7 +233,7 @@ jobs:
- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v2
uses: aws-actions/amazon-ecr-login@c962da2960ed15f492addc26fffa274485265950 # v2
- name: Check target image exists in ECR
id: check-image
@@ -248,7 +248,7 @@ jobs:
- name: Fail on missing container image
if: steps.check-image.outcome == 'failure'
uses: actions/github-script@v8
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
github-token: ${{ github.token }}
script: |
@@ -263,7 +263,7 @@ jobs:
- name: Fill in the new image ID in the Amazon ECS task definition
id: task-def
uses: aws-actions/amazon-ecs-render-task-definition@v1
uses: aws-actions/amazon-ecs-render-task-definition@77954e213ba1f9f9cb016b86a1d4f6fcdea0d57e # v1
with:
task-definition: .github/workflows/ecs-task-definition.json
container-name: superset-ci
@@ -296,7 +296,7 @@ jobs:
--tags key=pr,value=$PR_NUMBER key=github_user,value=${{ github.actor }}
- name: Deploy Amazon ECS task definition
id: deploy-task
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
uses: aws-actions/amazon-ecs-deploy-task-definition@cbf54ec46642b86ff78c2f5793da6746954cf8ff # v2
with:
task-definition: ${{ steps.task-def.outputs.task-definition }}
service: pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-service
@@ -318,7 +318,7 @@ jobs:
echo "ip=$(aws ec2 describe-network-interfaces --network-interface-ids ${{ steps.get-eni.outputs.eni }} | jq -r '.NetworkInterfaces | first | .Association.PublicIp')" >> $GITHUB_OUTPUT
- name: Comment (success)
if: ${{ success() }}
uses: actions/github-script@v8
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
github-token: ${{github.token}}
script: |
@@ -331,7 +331,7 @@ jobs:
});
- name: Comment (failure)
if: ${{ failure() }}
uses: actions/github-script@v8
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
github-token: ${{github.token}}
script: |

View File

@@ -27,12 +27,12 @@ jobs:
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
- name: Setup Java
uses: actions/setup-java@v5
uses: actions/setup-java@be666c2fcd27ec809703dec50e508c2fdc7f6654 # v5
with:
distribution: "temurin"
java-version: "11"

View File

@@ -14,10 +14,10 @@ jobs:
runs-on: ubuntu-24.04
steps:
- name: Checkout Repository
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Set up Node.js
uses: actions/setup-node@v6
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version: '20'

View File

@@ -17,7 +17,7 @@ jobs:
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false

View File

@@ -12,7 +12,7 @@ jobs:
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive

View File

@@ -15,12 +15,12 @@ jobs:
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
- name: Setup Java
uses: actions/setup-java@v5
uses: actions/setup-java@be666c2fcd27ec809703dec50e508c2fdc7f6654 # v5
with:
distribution: 'temurin'
java-version: '11'

View File

@@ -17,7 +17,7 @@ jobs:
runs-on: ubuntu-24.04
steps:
- name: Check for 'hold' label
uses: actions/github-script@v8
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |

View File

@@ -16,7 +16,7 @@ jobs:
pull-requests: write
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive

View File

@@ -24,7 +24,7 @@ jobs:
python-version: ["current", "previous", "next"]
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
@@ -42,7 +42,7 @@ jobs:
echo "HOMEBREW_REPOSITORY=$HOMEBREW_REPOSITORY" >>"${GITHUB_ENV}"
brew install norwoodj/tap/helm-docs
- name: Setup Node.js
uses: actions/setup-node@v6
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version: '20'
@@ -57,7 +57,7 @@ jobs:
yarn install --immutable
- name: Cache pre-commit environments
uses: actions/cache@v5
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: ~/.cache/pre-commit
key: pre-commit-v2-${{ runner.os }}-py${{ matrix.python-version }}-${{ hashFiles('.pre-commit-config.yaml') }}

View File

@@ -26,7 +26,7 @@ jobs:
name: Bump version and publish package(s)
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v6
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
# pulls all commits (needed for lerna / semantic release to correctly version)
fetch-depth: 0
@@ -42,13 +42,13 @@ jobs:
- name: Install Node.js
if: env.HAS_TAGS
uses: actions/setup-node@v6
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version-file: './superset-frontend/.nvmrc'
- name: Cache npm
if: env.HAS_TAGS
uses: actions/cache@v5
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: ~/.npm # npm cache files are stored in `~/.npm` on Linux/macOS
key: ${{ runner.OS }}-node-${{ hashFiles('**/package-lock.json') }}
@@ -62,7 +62,7 @@ jobs:
run: echo "dir=$(npm config get cache)" >> $GITHUB_OUTPUT
- name: Cache npm
if: env.HAS_TAGS
uses: actions/cache@v5
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
id: npm-cache # use this to check for `cache-hit` (`steps.npm-cache.outputs.cache-hit != 'true'`)
with:
path: ${{ steps.npm-cache-dir-path.outputs.dir }}

View File

@@ -37,7 +37,7 @@ jobs:
steps:
- name: Security Check - Authorize Maintainers Only
id: auth
uses: actions/github-script@v8
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
@@ -147,7 +147,7 @@ jobs:
- name: Checkout PR code (only if build needed)
if: steps.auth.outputs.authorized == 'true' && steps.check.outputs.build_needed == 'true'
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
ref: ${{ steps.check.outputs.target_sha }}
persist-credentials: false

View File

@@ -37,7 +37,7 @@ jobs:
- 16379:6379
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive

View File

@@ -38,21 +38,21 @@ jobs:
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.event.workflow_run.head_sha || github.sha }}"
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
ref: ${{ github.event.workflow_run.head_sha || github.sha }}
persist-credentials: false
submodules: recursive
- name: Set up Node.js
uses: actions/setup-node@v6
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version-file: './docs/.nvmrc'
node-version-file: "./docs/.nvmrc"
- name: Setup Python
uses: ./.github/actions/setup-backend/
- uses: actions/setup-java@v5
- uses: actions/setup-java@be666c2fcd27ec809703dec50e508c2fdc7f6654 # v5
with:
distribution: 'zulu'
java-version: '21'
distribution: "zulu"
java-version: "21"
- name: Install Graphviz
run: sudo apt-get install -y graphviz
- name: Compute Entity Relationship diagram (ERD)
@@ -68,7 +68,7 @@ jobs:
yarn install --check-cache
- name: Download database diagnostics (if triggered by integration tests)
if: github.event_name == 'workflow_run' && github.event.workflow_run.conclusion == 'success'
uses: dawidd6/action-download-artifact@v16
uses: dawidd6/action-download-artifact@8a338493df3d275e4a7a63bcff3b8fe97e51a927 # v19
continue-on-error: true
with:
workflow: superset-python-integrationtest.yml
@@ -77,7 +77,7 @@ jobs:
path: docs/src/data/
- name: Try to download latest diagnostics (for push/dispatch triggers)
if: github.event_name != 'workflow_run'
uses: dawidd6/action-download-artifact@v16
uses: dawidd6/action-download-artifact@8a338493df3d275e4a7a63bcff3b8fe97e51a927 # v19
continue-on-error: true
with:
workflow: superset-python-integrationtest.yml

View File

@@ -24,10 +24,10 @@ jobs:
name: Link Checking
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
# Do not bump this linkinator-action version without opening
# an ASF Infra ticket to allow the new version first!
- uses: JustinBeckwith/linkinator-action@af984b9f30f63e796ae2ea5be5e07cb587f1bbd9 # v2.3
- uses: JustinBeckwith/linkinator-action@af984b9f30f63e796ae2ea5be5e07cb587f1bbd9 # v2.3
continue-on-error: true # This will make the job advisory (non-blocking, no red X)
with:
paths: "**/*.md, **/*.mdx"
@@ -67,14 +67,14 @@ jobs:
working-directory: docs
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
- name: Set up Node.js
uses: actions/setup-node@v6
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version-file: './docs/.nvmrc'
node-version-file: "./docs/.nvmrc"
- name: yarn install
run: |
yarn install --check-cache
@@ -98,25 +98,26 @@ jobs:
working-directory: docs
steps:
- name: "Checkout PR head: ${{ github.event.workflow_run.head_sha }}"
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
ref: ${{ github.event.workflow_run.head_sha }}
persist-credentials: false
submodules: recursive
- name: Set up Node.js
uses: actions/setup-node@v6
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version-file: './docs/.nvmrc'
node-version-file: "./docs/.nvmrc"
- name: yarn install
run: |
yarn install --check-cache
- name: Download database diagnostics from integration tests
uses: dawidd6/action-download-artifact@v16
uses: dawidd6/action-download-artifact@8a338493df3d275e4a7a63bcff3b8fe97e51a927 # v19
with:
workflow: superset-python-integrationtest.yml
run_id: ${{ github.event.workflow_run.id }}
name: database-diagnostics
path: docs/src/data/
if_no_artifact_found: "warning"
- name: Use fresh diagnostics
run: |
if [ -f "src/data/databases-diagnostics.json" ]; then

View File

@@ -69,21 +69,21 @@ jobs:
# Conditional checkout based on context
- name: Checkout for push or pull_request event
if: github.event_name == 'push' || github.event_name == 'pull_request'
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
- name: Checkout using ref (workflow_dispatch)
if: github.event_name == 'workflow_dispatch' && github.event.inputs.ref != ''
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
ref: ${{ github.event.inputs.ref }}
submodules: recursive
- name: Checkout using PR ID (workflow_dispatch)
if: github.event_name == 'workflow_dispatch' && github.event.inputs.pr_id != ''
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
ref: refs/pull/${{ github.event.inputs.pr_id }}/merge
@@ -109,7 +109,7 @@ jobs:
run: testdata
- name: Setup Node.js
if: steps.check.outputs.python || steps.check.outputs.frontend
uses: actions/setup-node@v6
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version-file: './superset-frontend/.nvmrc'
- name: Install npm dependencies
@@ -146,7 +146,7 @@ jobs:
SAFE_APP_ROOT=${APP_ROOT//\//_}
echo "safe_app_root=$SAFE_APP_ROOT" >> $GITHUB_OUTPUT
- name: Upload Artifacts
uses: actions/upload-artifact@v7
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
if: failure()
with:
path: ${{ github.workspace }}/superset-frontend/cypress-base/cypress/screenshots
@@ -186,21 +186,21 @@ jobs:
# Conditional checkout based on context (same as Cypress workflow)
- name: Checkout for push or pull_request event
if: github.event_name == 'push' || github.event_name == 'pull_request'
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
- name: Checkout using ref (workflow_dispatch)
if: github.event_name == 'workflow_dispatch' && github.event.inputs.ref != ''
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
ref: ${{ github.event.inputs.ref }}
submodules: recursive
- name: Checkout using PR ID (workflow_dispatch)
if: github.event_name == 'workflow_dispatch' && github.event.inputs.pr_id != ''
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
ref: refs/pull/${{ github.event.inputs.pr_id }}/merge
@@ -226,7 +226,7 @@ jobs:
run: playwright_testdata
- name: Setup Node.js
if: steps.check.outputs.python || steps.check.outputs.frontend
uses: actions/setup-node@v6
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version-file: './superset-frontend/.nvmrc'
- name: Install npm dependencies
@@ -259,7 +259,7 @@ jobs:
SAFE_APP_ROOT=${APP_ROOT//\//_}
echo "safe_app_root=$SAFE_APP_ROOT" >> $GITHUB_OUTPUT
- name: Upload Playwright Artifacts
uses: actions/upload-artifact@v7
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
if: failure()
with:
path: |

View File

@@ -24,7 +24,7 @@ jobs:
working-directory: superset-extensions-cli
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
@@ -49,7 +49,7 @@ jobs:
- name: Upload coverage reports to Codecov
if: steps.check.outputs.superset-extensions-cli
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5
with:
file: ./coverage.xml
flags: superset-extensions-cli
@@ -58,7 +58,7 @@ jobs:
- name: Upload HTML coverage report
if: steps.check.outputs.superset-extensions-cli
uses: actions/upload-artifact@v7
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: superset-extensions-cli-coverage-html
path: htmlcov/

View File

@@ -23,7 +23,7 @@ jobs:
should-run: ${{ steps.check.outputs.frontend }}
steps:
- name: Checkout Code
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
fetch-depth: 0
@@ -58,7 +58,7 @@ jobs:
- name: Upload Docker Image Artifact
if: steps.check.outputs.frontend
uses: actions/upload-artifact@v7
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: docker-image
path: docker-image.tar.gz
@@ -73,7 +73,7 @@ jobs:
runs-on: ubuntu-24.04
steps:
- name: Download Docker Image Artifact
uses: actions/download-artifact@v8
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
with:
name: docker-image
@@ -90,7 +90,7 @@ jobs:
"npm run test -- --coverage --shard=${{ matrix.shard }}/8 --coverageReporters=json"
- name: Upload Coverage Artifact
uses: actions/upload-artifact@v7
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: coverage-artifacts-${{ matrix.shard }}
path: superset-frontend/coverage
@@ -103,14 +103,14 @@ jobs:
id-token: write
steps:
- name: Checkout Code
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
fetch-depth: 0
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
- name: Download Coverage Artifacts
uses: actions/download-artifact@v8
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
with:
pattern: coverage-artifacts-*
path: coverage/
@@ -127,7 +127,7 @@ jobs:
run: npx nyc merge coverage/ merged-output/coverage-summary.json
- name: Upload Code Coverage
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5
with:
flags: javascript
use_oidc: true
@@ -142,7 +142,7 @@ jobs:
runs-on: ubuntu-24.04
steps:
- name: Download Docker Image Artifact
uses: actions/download-artifact@v8
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
with:
name: docker-image
@@ -166,7 +166,7 @@ jobs:
runs-on: ubuntu-24.04
steps:
- name: Download Docker Image Artifact
uses: actions/download-artifact@v8
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
with:
name: docker-image
@@ -184,7 +184,7 @@ jobs:
runs-on: ubuntu-24.04
steps:
- name: Download Docker Image Artifact
uses: actions/download-artifact@v8
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
with:
name: docker-image

View File

@@ -16,14 +16,14 @@ jobs:
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
fetch-depth: 0
- name: Set up Helm
uses: azure/setup-helm@v4
uses: azure/setup-helm@1a275c3b69536ee54be43f2070a358922e12c8d4 # v4
with:
version: v3.16.4

View File

@@ -29,7 +29,7 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
ref: ${{ inputs.ref || github.ref_name }}
persist-credentials: true
@@ -42,7 +42,7 @@ jobs:
git config user.email "$GITHUB_ACTOR@users.noreply.github.com"
- name: Install Helm
uses: azure/setup-helm@v4
uses: azure/setup-helm@1a275c3b69536ee54be43f2070a358922e12c8d4 # v4
with:
version: v3.5.4
@@ -101,7 +101,7 @@ jobs:
CR_RELEASE_NAME_TEMPLATE: "superset-helm-chart-{{ .Version }}"
- name: Open Pull Request
uses: actions/github-script@v8
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
script: |
const branchName = '${{ env.branch_name }}';

View File

@@ -60,21 +60,21 @@ jobs:
# Conditional checkout based on context (same as Cypress workflow)
- name: Checkout for push or pull_request event
if: github.event_name == 'push' || github.event_name == 'pull_request'
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
- name: Checkout using ref (workflow_dispatch)
if: github.event_name == 'workflow_dispatch' && github.event.inputs.ref != ''
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
ref: ${{ github.event.inputs.ref }}
submodules: recursive
- name: Checkout using PR ID (workflow_dispatch)
if: github.event_name == 'workflow_dispatch' && github.event.inputs.pr_id != ''
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
ref: refs/pull/${{ github.event.inputs.pr_id }}/merge
@@ -100,7 +100,7 @@ jobs:
run: playwright_testdata
- name: Setup Node.js
if: steps.check.outputs.python || steps.check.outputs.frontend
uses: actions/setup-node@v6
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version-file: './superset-frontend/.nvmrc'
- name: Install npm dependencies
@@ -133,7 +133,7 @@ jobs:
SAFE_APP_ROOT=${APP_ROOT//\//_}
echo "safe_app_root=$SAFE_APP_ROOT" >> $GITHUB_OUTPUT
- name: Upload Playwright Artifacts
uses: actions/upload-artifact@v7
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
if: failure()
with:
path: |

View File

@@ -41,7 +41,7 @@ jobs:
- 16379:6379
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
@@ -68,7 +68,7 @@ jobs:
run: |
./scripts/python_tests.sh
- name: Upload code coverage
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5
with:
flags: python,mysql
token: ${{ secrets.CODECOV_TOKEN }}
@@ -98,7 +98,7 @@ jobs:
"
- name: Upload database diagnostics artifact
if: steps.check.outputs.python
uses: actions/upload-artifact@v7
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: database-diagnostics
path: databases-diagnostics.json
@@ -129,7 +129,7 @@ jobs:
- 16379:6379
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
@@ -159,7 +159,7 @@ jobs:
run: |
./scripts/python_tests.sh
- name: Upload code coverage
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5
with:
flags: python,postgres
token: ${{ secrets.CODECOV_TOKEN }}
@@ -182,7 +182,7 @@ jobs:
- 16379:6379
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
@@ -211,7 +211,7 @@ jobs:
run: |
./scripts/python_tests.sh
- name: Upload code coverage
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5
with:
flags: python,sqlite
token: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -48,7 +48,7 @@ jobs:
- 16379:6379
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
@@ -77,7 +77,7 @@ jobs:
run: |
./scripts/python_tests.sh -m 'chart_data_flow or sql_json_flow'
- name: Upload code coverage
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5
with:
flags: python,presto
token: ${{ secrets.CODECOV_TOKEN }}
@@ -108,7 +108,7 @@ jobs:
- 16379:6379
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
@@ -145,7 +145,7 @@ jobs:
pip install -e .[hive]
./scripts/python_tests.sh -m 'chart_data_flow or sql_json_flow'
- name: Upload code coverage
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5
with:
flags: python,hive
token: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -24,7 +24,7 @@ jobs:
PYTHONPATH: ${{ github.workspace }}
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
@@ -53,7 +53,7 @@ jobs:
run: |
pytest --durations-min=0.5 --cov=superset/sql/ ./tests/unit_tests/sql/ --cache-clear --cov-fail-under=100
- name: Upload code coverage
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5
with:
flags: python,unit
token: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -18,7 +18,7 @@ jobs:
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
@@ -31,7 +31,7 @@ jobs:
- name: Setup Node.js
if: steps.check.outputs.frontend
uses: actions/setup-node@v6
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version-file: './superset-frontend/.nvmrc'
- name: Install dependencies
@@ -49,7 +49,7 @@ jobs:
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
@@ -62,6 +62,10 @@ jobs:
- name: Setup Python
if: steps.check.outputs.python
uses: ./.github/actions/setup-backend/
- name: Install msgcat
run: sudo apt update && sudo apt install gettext
- name: Test babel extraction
if: steps.check.outputs.python
run: ./scripts/translations/babel_update.sh

View File

@@ -21,7 +21,7 @@ jobs:
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
- name: Install dependencies

View File

@@ -26,7 +26,7 @@ jobs:
steps:
- name: Quickly add thumbs up!
if: github.event_name == 'issue_comment' && contains(github.event.comment.body, '@supersetbot')
uses: actions/github-script@v8
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
script: |
const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/')
@@ -38,7 +38,7 @@ jobs:
});
- name: "Checkout ( ${{ github.sha }} )"
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false

View File

@@ -47,7 +47,7 @@ jobs:
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
fetch-depth: 0
@@ -60,7 +60,7 @@ jobs:
build: "true"
- name: Use Node.js 20
uses: actions/setup-node@v6
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version: 20
@@ -107,12 +107,12 @@ jobs:
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
fetch-depth: 0
- name: Use Node.js 20
uses: actions/setup-node@v6
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version: 20

View File

@@ -30,10 +30,10 @@ jobs:
name: Generate Reports
steps:
- name: Checkout Repository
uses: actions/checkout@v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Set up Node.js
uses: actions/setup-node@v6
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version-file: './superset-frontend/.nvmrc'

1
.gitignore vendored
View File

@@ -133,6 +133,7 @@ CLAUDE.local.md
PROJECT.md
.aider*
.claude_rc*
.claude/settings.local.json
.env.local
oxc-custom-build/
*.code-workspace

View File

@@ -50,3 +50,4 @@ under the License.
- [4.1.4](./CHANGELOG/4.1.4.md)
- [5.0.0](./CHANGELOG/5.0.0.md)
- [6.0.0](./CHANGELOG/6.0.0.md)
- [6.1.0](./CHANGELOG/6.1.0.md)

1564
CHANGELOG/6.1.0.md Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -24,46 +24,12 @@ assists people when migrating to a new version.
## Next
## 6.1.0
### ClickHouse minimum driver version bump
The minimum required version of `clickhouse-connect` has been raised to `>=0.13.0`. If you are using the ClickHouse connector, please upgrade your `clickhouse-connect` package. The `_mutate_label` workaround that appended hash suffixes to column aliases has also been removed, as it is no longer needed with modern versions of the driver.
### MCP Tool Observability
MCP (Model Context Protocol) tools now include enhanced observability instrumentation for monitoring and debugging:
**Two-layer instrumentation:**
1. **Middleware layer** (`LoggingMiddleware`): Automatically logs all MCP tool calls with `duration_ms` and `success` status in the audit log (Action Log UI, logs table)
2. **Sub-operation tracking**: All 19 MCP tools include granular `event_logger.log_context()` blocks for tracking individual operations like validation, database writes, and query execution
**Action naming convention:**
- Tool-level logs: `mcp_tool_call` (via middleware)
- Sub-operation logs: `mcp.{tool_name}.{operation}` (e.g., `mcp.generate_chart.validation`, `mcp.execute_sql.query_execution`)
**Querying MCP logs:**
```sql
-- Top slowest MCP operations
SELECT action, COUNT(*) as calls, AVG(duration_ms) as avg_ms
FROM logs
WHERE action LIKE 'mcp.%'
GROUP BY action
ORDER BY avg_ms DESC
LIMIT 20;
-- MCP tool success rate
SELECT
json_extract(curated_payload, '$.tool') as tool,
COUNT(*) as total_calls,
SUM(CASE WHEN json_extract(curated_payload, '$.success') = 'true' THEN 1 ELSE 0 END) as successful,
ROUND(100.0 * SUM(CASE WHEN json_extract(curated_payload, '$.success') = 'true' THEN 1 ELSE 0 END) / COUNT(*), 2) as success_rate
FROM logs
WHERE action = 'mcp_tool_call'
GROUP BY tool
ORDER BY total_calls DESC;
```
**Security note:** Sensitive parameters (passwords, API keys, tokens) are automatically redacted in logs as `[REDACTED]`.
### Distributed Coordination Backend
A new `DISTRIBUTED_COORDINATION_CONFIG` configuration provides a unified Redis-based backend for real-time coordination features in Superset. This backend enables:
@@ -75,6 +41,7 @@ A new `DISTRIBUTED_COORDINATION_CONFIG` configuration provides a unified Redis-b
The distributed coordination is used by the Global Task Framework (GTF) for abort notifications and task completion signaling, and will eventually replace `GLOBAL_ASYNC_QUERIES_CACHE_BACKEND` as the standard signaling backend. Configuring this is recommended for Redis enabled production deployments.
Example configuration in `superset_config.py`:
```python
DISTRIBUTED_COORDINATION_CONFIG = {
"CACHE_TYPE": "RedisCache",
@@ -89,9 +56,11 @@ See `superset/config.py` for complete configuration options.
### WebSocket config for GAQ with Docker
[35896](https://github.com/apache/superset/pull/35896) and [37624](https://github.com/apache/superset/pull/37624) updated documentation on how to run and configure Superset with Docker. Specifically for the WebSocket configuration, a new `docker/superset-websocket/config.example.json` was added to the repo, so that users could copy it to create a `docker/superset-websocket/config.json` file. The existing `docker/superset-websocket/config.json` was removed and git-ignored, so if you're using GAQ / WebSocket make sure to:
- Stash/backup your existing `config.json` file, to re-apply it after (will get git-ignored going forward)
- Update the `volumes` configuration for the `superset-websocket` service in your `docker-compose.override.yml` file, to include the `docker/superset-websocket/config.json` file. For example:
``` yaml
```yaml
services:
superset-websocket:
volumes:
@@ -104,7 +73,9 @@ services:
### Example Data Loading Improvements
#### New Directory Structure
Examples are now organized by name with data and configs co-located:
```
superset/examples/
├── _shared/ # Shared database & metadata configs
@@ -116,31 +87,12 @@ superset/examples/
└── ...
```
#### Simplified Parquet-based Loading
- Auto-discovery: create `superset/examples/my_dataset/data.parquet` to add a new example
- Parquet is an Apache project format: compressed (~27% smaller), self-describing schema
- YAML configs define datasets, charts, and dashboards declaratively
- Removed Python-based data generation from individual example files
#### Test Data Reorganization
- Moved `big_data.py` to `superset/cli/test_loaders.py` - better reflects its purpose as a test utility
- Fixed inverted logic for `--load-test-data` flag (now correctly includes .test.yaml files when flag is set)
- Clarified CLI flags:
- `--force` / `-f`: Force reload even if tables exist
- `--only-metadata` / `-m`: Create table metadata without loading data
- `--load-test-data` / `-t`: Include test dashboards and .test.yaml configs
- `--load-big-data` / `-b`: Generate synthetic stress-test data
#### Bug Fixes
- Fixed numpy array serialization for PostgreSQL (converts complex types to JSON strings)
- Fixed KeyError for `allow_csv_upload` field in database configs (now optional with default)
- Fixed test data loading logic that was incorrectly filtering files
### MCP Service
The MCP (Model Context Protocol) service enables AI assistants and automation tools to interact programmatically with Superset.
#### New Features
- MCP service infrastructure with FastMCP framework
- Tools for dashboards, charts, datasets, SQL Lab, and instance metadata
- Optional dependency: install with `pip install apache-superset[fastmcp]`
@@ -150,6 +102,7 @@ The MCP (Model Context Protocol) service enables AI assistants and automation to
#### New Configuration Options
**Development** (single-user, local testing):
```python
# superset_config.py
MCP_DEV_USERNAME = "admin" # User for MCP authentication
@@ -158,6 +111,7 @@ MCP_SERVICE_PORT = 5008
```
**Production** (JWT-based, multi-user):
```python
# superset_config.py
MCP_AUTH_ENABLED = True
@@ -203,12 +157,14 @@ superset mcp run --port 5008 --use-factory-config
The MCP service runs as a **separate process** from the Superset web server.
**Important**:
- Requires same Python environment and configuration as Superset
- Shares database connections with main Superset app
- Can be scaled independently from web server
- Requires `fastmcp` package (optional dependency)
**Installation**:
```bash
# Install with MCP support
pip install apache-superset[fastmcp]
@@ -222,6 +178,7 @@ Use systemd, supervisord, or Kubernetes to manage the MCP service process.
See `superset/mcp_service/PRODUCTION.md` for deployment guides.
**Security**:
- Development: Uses `MCP_DEV_USERNAME` for single-user access
- Production: **MUST** configure JWT authentication
- See `superset/mcp_service/SECURITY.md` for details
@@ -234,14 +191,50 @@ See `superset/mcp_service/PRODUCTION.md` for deployment guides.
- Developer Guide: `superset/mcp_service/CLAUDE.md`
- Quick Start: `superset/mcp_service/README.md`
---
### MCP Tool Observability
- [35621](https://github.com/apache/superset/pull/35621): The default hash algorithm has changed from MD5 to SHA-256 for improved security and FedRAMP compliance. This affects cache keys for thumbnails, dashboard digests, chart digests, and filter option names. Existing cached data will be invalidated upon upgrade. To opt out of this change and maintain backward compatibility, set `HASH_ALGORITHM = "md5"` in your `superset_config.py`.
- [35062](https://github.com/apache/superset/pull/35062): Changed the function signature of `setupExtensions` to `setupCodeOverrides` with options as arguments.
MCP (Model Context Protocol) tools now include enhanced observability instrumentation for monitoring and debugging:
**Two-layer instrumentation:**
1. **Middleware layer** (`LoggingMiddleware`): Automatically logs all MCP tool calls with `duration_ms` and `success` status in the audit log (Action Log UI, logs table)
2. **Sub-operation tracking**: All 19 MCP tools include granular `event_logger.log_context()` blocks for tracking individual operations like validation, database writes, and query execution
**Action naming convention:**
- Tool-level logs: `mcp_tool_call` (via middleware)
- Sub-operation logs: `mcp.{tool_name}.{operation}` (e.g., `mcp.generate_chart.validation`, `mcp.execute_sql.query_execution`)
**Querying MCP logs:**
```sql
-- Top slowest MCP operations
SELECT action, COUNT(*) as calls, AVG(duration_ms) as avg_ms
FROM logs
WHERE action LIKE 'mcp.%'
GROUP BY action
ORDER BY avg_ms DESC
LIMIT 20;
-- MCP tool success rate
SELECT
json_extract(curated_payload, '$.tool') as tool,
COUNT(*) as total_calls,
SUM(CASE WHEN json_extract(curated_payload, '$.success') = 'true' THEN 1 ELSE 0 END) as successful,
ROUND(100.0 * SUM(CASE WHEN json_extract(curated_payload, '$.success') = 'true' THEN 1 ELSE 0 END) / COUNT(*), 2) as success_rate
FROM logs
WHERE action = 'mcp_tool_call'
GROUP BY tool
ORDER BY total_calls DESC;
```
**Security note:** Sensitive parameters (passwords, API keys, tokens) are automatically redacted in logs as `[REDACTED]`.
### APP_NAME configuration
### Breaking Changes
- [37370](https://github.com/apache/superset/pull/37370): The `APP_NAME` configuration variable no longer controls the browser window/tab title or other frontend branding. Application names should now be configured using the theme system with the `brandAppName` token. The `APP_NAME` config is still used for backend contexts (MCP service, logs, etc.) and serves as a fallback if `brandAppName` is not set.
- **Migration:**
```python
# Before (Superset 5.x)
APP_NAME = "My Custom App"
@@ -260,16 +253,22 @@ See `superset/mcp_service/PRODUCTION.md` for deployment guides.
APP_NAME = "My Custom App"
# But you should migrate to THEME_DEFAULT.token.brandAppName
```
- **Note:** For dark mode, set the same tokens in `THEME_DARK` configuration.
### CUSTOM_FONT_URLS configuration
- [36317](https://github.com/apache/superset/pull/36317): The `CUSTOM_FONT_URLS` configuration option has been removed. Use the new per-theme `fontUrls` token in `THEME_DEFAULT` or database-managed themes instead.
- **Before:**
```python
CUSTOM_FONT_URLS = [
"https://fonts.example.com/myfont.css",
]
```
- **After:**
```python
THEME_DEFAULT = {
"token": {
@@ -281,7 +280,13 @@ See `superset/mcp_service/PRODUCTION.md` for deployment guides.
}
```
### Other
- [35621](https://github.com/apache/superset/pull/35621): The default hash algorithm has changed from MD5 to SHA-256 for improved security and FedRAMP compliance. This affects cache keys for thumbnails, dashboard digests, chart digests, and filter option names. Existing cached data will be invalidated upon upgrade. To opt out of this change and maintain backward compatibility, set `HASH_ALGORITHM = "md5"` in your `superset_config.py`.
- [35062](https://github.com/apache/superset/pull/35062): Changed the function signature of `setupExtensions` to `setupCodeOverrides` with options as arguments.
## 6.0.0
- [33055](https://github.com/apache/superset/pull/33055): Upgrades Flask-AppBuilder to 5.0.0. The AUTH_OID authentication type has been deprecated and is no longer available as an option in Flask-AppBuilder. OpenID (OID) is considered a deprecated authentication protocol - if you are using AUTH_OID, you will need to migrate to an alternative authentication method such as OAuth, LDAP, or database authentication before upgrading.
- [34871](https://github.com/apache/superset/pull/34871): Fixed Jest test hanging issue from Ant Design v5 upgrade. MessageChannel is now mocked in test environment to prevent rc-overflow from causing Jest to hang. Test environment only - no production impact.
- [34782](https://github.com/apache/superset/pull/34782): Dataset exports now include the dataset ID in their file name (similar to charts and dashboards). If managing assets as code, make sure to rename existing dataset YAMLs to include the ID (and avoid duplicated files).
@@ -290,8 +295,8 @@ See `superset/mcp_service/PRODUCTION.md` for deployment guides.
- Change any hex color values to one of: `"success"`, `"processing"`, `"error"`, `"warning"`, `"default"`
- Custom colors are no longer supported to maintain consistency with Ant Design components
- [34561](https://github.com/apache/superset/pull/34561) Added tiled screenshot functionality for Playwright-based reports to handle large dashboards more efficiently. When enabled (default: `SCREENSHOT_TILED_ENABLED = True`), dashboards with 20+ charts or height exceeding 5000px will be captured using multiple viewport-sized tiles and combined into a single image. This improves report generation performance and reliability for large dashboards.
Note: Pillow is now a required dependency (previously optional) to support image processing for tiled screenshots.
`thumbnails` optional dependency is now deprecated and will be removed in the next major release (7.0).
Note: Pillow is now a required dependency (previously optional) to support image processing for tiled screenshots.
`thumbnails` optional dependency is now deprecated and will be removed in the next major release (7.0).
- [33084](https://github.com/apache/superset/pull/33084) The DISALLOWED_SQL_FUNCTIONS configuration now includes additional potentially sensitive database functions across PostgreSQL, MySQL, SQLite, MS SQL Server, and ClickHouse. Existing queries using these functions may now be blocked. Review your SQL Lab queries and dashboards if you encounter "disallowed function" errors after upgrading
- [34235](https://github.com/apache/superset/pull/34235) CSV exports now use `utf-8-sig` encoding by default to include a UTF-8 BOM, improving compatibility with Excel.
- [34258](https://github.com/apache/superset/pull/34258) changing the default in Dockerfile to INCLUDE_CHROMIUM="false" (from "true") in the past. This ensures the `lean` layer is lean by default, and people can opt-in to the `chromium` layer by setting the build arg `INCLUDE_CHROMIUM=true`. This is a breaking change for anyone using the `lean` layer, as it will no longer include Chromium by default.
@@ -681,7 +686,6 @@ Note: Pillow is now a required dependency (previously optional) to support image
- [11509](https://github.com/apache/superset/pull/12491): Dataset metadata updates check user ownership, only owners or an Admin are allowed.
- Security simplification (SIP-19), the following permission domains were simplified:
- [12072](https://github.com/apache/superset/pull/12072): `Query` with `can_read`, `can_write`
- [12036](https://github.com/apache/superset/pull/12036): `Database` with `can_read`, `can_write`.
- [12012](https://github.com/apache/superset/pull/12036): `Dashboard` with `can_read`, `can_write`.

View File

@@ -0,0 +1,162 @@
{/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/}
---
title: AWS IAM Authentication
sidebar_label: AWS IAM Authentication
sidebar_position: 15
---
# AWS IAM Authentication for AWS Databases
Superset supports IAM-based authentication for **Amazon Aurora** (PostgreSQL and MySQL) and **Amazon Redshift**. IAM auth eliminates the need for database passwords — Superset generates a short-lived auth token using temporary AWS credentials instead.
Cross-account IAM role assumption via STS `AssumeRole` is supported, allowing a Superset deployment in one AWS account to connect to databases in a different account.
## Prerequisites
- Enable the `AWS_DATABASE_IAM_AUTH` feature flag in `superset_config.py`. IAM authentication is gated behind this flag; if it is disabled, connections using `aws_iam` fail with *"AWS IAM database authentication is not enabled."*
```python
FEATURE_FLAGS = {
"AWS_DATABASE_IAM_AUTH": True,
}
```
- `boto3` must be installed in your Superset environment:
```bash
pip install boto3
```
- The Superset server's IAM role (or static credentials) must have permission to call `sts:AssumeRole` (for cross-account) or the same-account permissions for the target service:
- **Aurora (RDS)**: `rds-db:connect`
- **Redshift provisioned**: `redshift:GetClusterCredentials`
- **Redshift Serverless**: `redshift-serverless:GetCredentials` and `redshift-serverless:GetWorkgroup`
- SSL must be enabled on the Aurora / Redshift endpoint (required for IAM token auth).
## Configuration
IAM authentication is configured via the **encrypted_extra** field of the database connection. Access this field in the **Advanced** → **Security** section of the database connection form, under **Secure Extra**.
### Aurora PostgreSQL or Aurora MySQL
```json
{
"aws_iam": {
"enabled": true,
"role_arn": "arn:aws:iam::222222222222:role/SupersetDatabaseAccess",
"external_id": "superset-prod-12345",
"region": "us-east-1",
"db_username": "superset_iam_user",
"session_duration": 3600
}
}
```
| Field | Required | Description |
|-------|----------|-------------|
| `enabled` | Yes | Set to `true` to activate IAM auth |
| `role_arn` | No | ARN of the cross-account IAM role to assume via STS. Omit for same-account auth |
| `external_id` | No | External ID for the STS `AssumeRole` call, if required by the target role's trust policy |
| `region` | Yes | AWS region of the database cluster |
| `db_username` | Yes | The database username associated with the IAM identity |
| `session_duration` | No | STS session duration in seconds (default: `3600`) |
### Redshift (Serverless)
```json
{
"aws_iam": {
"enabled": true,
"role_arn": "arn:aws:iam::222222222222:role/SupersetRedshiftAccess",
"region": "us-east-1",
"workgroup_name": "my-workgroup",
"db_name": "dev"
}
}
```
### Redshift (Provisioned Cluster)
```json
{
"aws_iam": {
"enabled": true,
"role_arn": "arn:aws:iam::222222222222:role/SupersetRedshiftAccess",
"region": "us-east-1",
"cluster_identifier": "my-cluster",
"db_username": "superset_iam_user",
"db_name": "dev"
}
}
```
## Cross-Account IAM Setup
To connect to a database in Account B from a Superset deployment in Account A:
**1. In Account B — create a database-access role:**
```json
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": ["rds-db:connect"],
"Resource": "arn:aws:rds-db:us-east-1:222222222222:dbuser/db-XXXXXXXXXXXX/superset_iam_user"
}
]
}
```
**Trust policy** (allows Account A's Superset role to assume it):
```json
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Principal": {
"AWS": "arn:aws:iam::111111111111:role/SupersetInstanceRole"
},
"Action": "sts:AssumeRole",
"Condition": {
"StringEquals": {
"sts:ExternalId": "superset-prod-12345"
}
}
}
]
}
```
**2. In Account A — grant Superset's role permission to assume the Account B role:**
```json
{
"Effect": "Allow",
"Action": "sts:AssumeRole",
"Resource": "arn:aws:iam::222222222222:role/SupersetDatabaseAccess"
}
```
**3. Configure the database connection in Superset** using the `role_arn` and `external_id` from the trust policy (as shown in the configuration example above).
## Credential Caching
STS credentials are cached in memory keyed by `(role_arn, region, external_id)` with a 10-minute TTL. This reduces the number of STS API calls when multiple queries are executed with the same connection. Tokens are refreshed automatically before expiry.

View File

@@ -138,14 +138,33 @@ THUMBNAIL_CACHE_CONFIG = init_thumbnail_cache
```
Using the above example cache keys for dashboards will be `superset_thumb__dashboard__{ID}`. You can
override the base URL for selenium using:
override the base URL for Selenium using:
```
WEBDRIVER_BASEURL = "https://superset.company.com"
```
Additional selenium web drive configuration can be set using `WEBDRIVER_CONFIGURATION`. You can
implement a custom function to authenticate selenium. The default function uses the `flask-login`
To control which user account is used for rendering thumbnails and warming up caches, configure
`THUMBNAIL_EXECUTORS` and `CACHE_WARMUP_EXECUTORS`. Each accepts a list of executor types (which
resolve to an owner, creator, modifier, or the currently-logged-in user) and/or a `FixedExecutor`
pinned to a specific username. By default, thumbnails render as the current user
(`ExecutorType.CURRENT_USER`) and cache warmup runs as the chart/dashboard owner
(`ExecutorType.OWNER`).
To force both to run as a dedicated service account (`admin` in this example):
```python
from superset.tasks.types import ExecutorType, FixedExecutor
THUMBNAIL_EXECUTORS = [FixedExecutor("admin")]
CACHE_WARMUP_EXECUTORS = [FixedExecutor("admin")]
```
Use a dedicated read-only service account here rather than a personal admin account, so that
thumbnail rendering and cache warmup tasks don't fail if a specific user's credentials change.
Additional Selenium WebDriver configuration can be set using `WEBDRIVER_CONFIGURATION`. You can
implement a custom function to authenticate Selenium. The default function uses the `flask-login`
session cookie. Here's an example of a custom function signature:
```python
@@ -159,6 +178,20 @@ Then on configuration:
WEBDRIVER_AUTH_FUNC = auth_driver
```
## ETag Support for Thumbnails
Thumbnail and screenshot endpoints return `ETag` response headers based on the cached content digest. Clients can use conditional requests to avoid downloading unchanged images:
```
GET /api/v1/chart/42/thumbnail/
If-None-Match: "abc123..."
→ 304 Not Modified (if unchanged)
→ 200 OK (with new image if changed)
```
This is particularly useful for embedded dashboards and external integrations that periodically poll for updated screenshots — unchanged thumbnails return immediately with no payload.
## Distributed Coordination Backend
Superset supports an optional distributed coordination (`DISTRIBUTED_COORDINATION_CONFIG`) for

View File

@@ -364,6 +364,26 @@ CUSTOM_SECURITY_MANAGER = CustomSsoSecurityManager
]
```
### PKCE Support
For public OAuth2 clients that cannot securely store a client secret, enable Proof Key for Code Exchange (PKCE) by adding `code_challenge_method` to the `remote_app` configuration:
```python
OAUTH_PROVIDERS = [
{
'name': 'myProvider',
'remote_app': {
'client_id': 'myClientId',
'client_secret': 'mySecret', # may be empty for pure public clients
'code_challenge_method': 'S256', # enables PKCE
'server_metadata_url': 'https://myAuthorizationServer/.well-known/openid-configuration'
}
}
]
```
PKCE (`S256`) is recommended for all OAuth2 flows, even when a client secret is present, as it protects against authorization code interception attacks.
## LDAP Authentication
FAB supports authenticating user credentials against an LDAP server.

View File

@@ -10,6 +10,10 @@ version: 1
The superset cli allows you to import and export datasources from and to YAML. Datasources include
databases. The data is expected to be organized in the following hierarchy:
:::info
Superset's ZIP-based import/export also covers **dashboards**, **charts**, and **saved queries**, exercised through the UI and REST API. The [Dashboard Import Overwrite Behavior](#dashboard-import-overwrite-behavior) and [UUIDs in API Responses](#uuids-in-api-responses) sections below document the behavior shared across all asset types.
:::
```text
├──databases
| ├──database_1
@@ -26,6 +30,10 @@ databases. The data is expected to be organized in the following hierarchy:
| └── ... (more databases)
```
:::note
When you export a database connection, the `masked_encrypted_extra` field (used for sensitive connection parameters such as service account JSON, OAuth tokens, and other encrypted credentials) is included in the export. When importing on another instance, these values are decrypted and re-encrypted using the destination instance's `SECRET_KEY`. Ensure the receiving instance has a valid `SECRET_KEY` configured before importing.
:::
## Exporting Datasources to YAML
You can print your current datasources to stdout by running:
@@ -75,6 +83,29 @@ The optional username flag **-u** sets the user used for the datasource import.
superset import_datasources -p <path / filename> -u 'admin'
```
## Dashboard Import Overwrite Behavior
When importing a dashboard ZIP with the **overwrite** option enabled, any existing charts that are part of the dashboard are **replaced** rather than duplicated. This applies to:
- Charts whose UUID matches a chart already present in the target instance
- The full chart configuration (query, visualization type, columns, metrics) is replaced by the imported version
If you import without the overwrite flag, existing charts with conflicting UUIDs are left unchanged and the import skips those objects. Use overwrite when you want to push a fully updated dashboard (including chart definitions) from a development or staging environment to production.
## UUIDs in API Responses
The REST API POST endpoints for **datasets**, **charts**, and **dashboards** include the auto-generated `uuid` field in the response body:
```json
{
"id": 42,
"uuid": "b8a8d5c3-1234-4abc-8def-0123456789ab",
...
}
```
UUIDs remain stable across import/export cycles and can be used for cross-environment workflows — for example, recording a UUID when creating a chart in development and using it to identify the matching chart after importing into production.
## Legacy Importing Datasources
### From older versions of Superset to current version

View File

@@ -1,7 +1,7 @@
---
title: MCP Server Deployment & Authentication
hide_title: true
sidebar_position: 9
sidebar_position: 14
version: 1
---
@@ -30,6 +30,10 @@ Superset includes a built-in [Model Context Protocol (MCP)](https://modelcontext
This guide covers how to run, secure, and deploy the MCP server.
:::tip Looking for user docs?
See **[Using AI with Superset](/user-docs/using-superset/using-ai-with-superset)** for a guide on what AI can do with Superset and how to connect your AI client.
:::
```mermaid
flowchart LR
A["AI Client<br/>(Claude, ChatGPT, etc.)"] -- "MCP protocol<br/>(HTTP + JSON-RPC)" --> B["MCP Server<br/>(:5008/mcp)"]
@@ -497,6 +501,7 @@ All MCP settings go in `superset_config.py`. Defaults are defined in `superset/m
| `MCP_SERVICE_URL` | `None` | Public base URL for MCP-generated links (set this when behind a reverse proxy) |
| `MCP_DEBUG` | `False` | Enable debug logging |
| `MCP_DEV_USERNAME` | -- | Superset username for development mode (no auth) |
| `MCP_RBAC_ENABLED` | `True` | Enforce Superset's role-based access control on MCP tool calls. When `True`, each tool checks that the authenticated user has the required FAB permission before executing. Disable only for testing or trusted-network deployments. |
### Authentication
@@ -512,6 +517,7 @@ All MCP settings go in `superset_config.py`. Defaults are defined in `superset/m
| `MCP_REQUIRED_SCOPES` | `[]` | Required JWT scopes |
| `MCP_JWT_DEBUG_ERRORS` | `False` | Log detailed JWT errors server-side (never exposed in HTTP responses per RFC 6750) |
| `MCP_AUTH_FACTORY` | `None` | Custom auth provider factory `(flask_app) -> auth_provider`. Takes precedence over built-in JWT |
| `MCP_USER_RESOLVER` | `None` | Custom function `(app, access_token) -> username` to extract a Superset username from a validated JWT token. When `None`, the default resolver checks `preferred_username`, `username`, `email`, and `sub` claims in that order. |
### Response Size Guard
@@ -595,6 +601,43 @@ MCP_STORE_CONFIG = {
| `event_store_max_events` | `100` | Maximum events retained per session |
| `event_store_ttl` | `3600` | Event TTL in seconds |
### Tool Search
By default the MCP server exposes a lightweight tool-search interface instead of advertising every tool at once. This reduces the initial context sent to the LLM by ~70%, which lowers cost and latency. The AI client discovers tools on demand by calling `search_tools` and then invokes them via `call_tool`.
```python
MCP_TOOL_SEARCH_CONFIG = {
"enabled": True,
"strategy": "bm25", # "bm25" (natural language) or "regex"
"max_results": 5,
"always_visible": [ # Tools always listed (pinned)
"health_check",
"get_instance_info",
],
"search_tool_name": "search_tools",
"call_tool_name": "call_tool",
"include_schemas": False, # False=summary mode (name + parameters_hint)
"compact_schemas": True, # Strip $defs (only applies when include_schemas=True)
"max_description_length": 300,
}
```
| Key | Default | Description |
|-----|---------|-------------|
| `enabled` | `True` | Enable tool search. When `False`, all tools are listed upfront |
| `strategy` | `"bm25"` | Search ranking algorithm. `"bm25"` supports natural language; `"regex"` supports pattern matching |
| `max_results` | `5` | Maximum tools returned per search query |
| `always_visible` | See above | Tools that always appear in `list_tools`, regardless of search |
| `include_schemas` | `False` | When `False` (default, "summary mode"), search results omit `inputSchema` entirely and include a lightweight `parameters_hint` listing top-level parameter names. Set to `True` to include the full `inputSchema` in search results. Full schemas are always used when a tool is actually invoked via `call_tool`. |
| `compact_schemas` | `True` | Strip `$defs` / `$ref` and replace with `{"type": "object"}` in search results to reduce token cost. Only takes effect when `include_schemas=True` — ignored in summary mode. |
| `max_description_length` | `300` | Truncate tool descriptions in search results (0 = no truncation). Applies in both summary and full-schema modes. |
:::tip
Set `enabled: False` to revert to the traditional "show all tools at once" behavior, which some clients or workflows may prefer.
:::
Tool search reduces the initial token cost from ~1520K tokens (full catalog) down to ~45K tokens (pinned tools + search interface) — roughly 85% savings at the start of each conversation.
### Session & CSRF
These values are flat-merged into the Flask app config used by the MCP server process:
@@ -616,6 +659,102 @@ MCP_CSRF_CONFIG = {
---
## Access Control
### RBAC Enforcement
The MCP server respects Superset's full role-based access control (RBAC). Every authenticated user can only access the data and operations their Superset roles permit — the same rules that apply in the Superset UI apply through MCP.
Each tool declares one or more required FAB permissions. The table below maps tool groups to their permission requirements:
| Tool group | Required FAB permission |
|------------|------------------------|
| `list_charts`, `get_chart_info`, `get_chart_data`, `get_chart_preview`, `generate_chart`, `update_chart` | `can_read` on `Chart` (read), `can_write` on `Chart` (mutate) |
| `list_dashboards`, `get_dashboard_info`, `generate_dashboard`, `add_chart_to_existing_dashboard` | `can_read` on `Dashboard` (read), `can_write` on `Dashboard` (mutate) |
| `list_datasets`, `get_dataset_info`, `create_virtual_dataset` | `can_read` on `Dataset` (read), `can_write` on `Dataset` (mutate) |
| `list_databases`, `get_database_info` | `can_read` on `Database` |
| `execute_sql` | `can_execute_sql_query` on `SQLLab` |
| `open_sql_lab_with_context` | `can_read` on `SQLLab` |
| `save_sql_query` | `can_write` on `SavedQuery` |
| `health_check` | None (public) |
To disable RBAC checking globally (for trusted-network deployments or testing), set:
```python
# superset_config.py
MCP_RBAC_ENABLED = False
```
:::warning
Disabling RBAC removes all permission checks from MCP tool calls. Only do this on isolated, internal deployments where all MCP users are trusted admins.
:::
### Audit Log
All MCP tool calls are recorded in Superset's action log. You can view them at **Settings → Action Log** (admin only). Each log entry records:
- The tool name (e.g., `mcp.generate_chart.db_write`)
- The authenticated user
- A timestamp
This makes MCP activity fully auditable alongside regular Superset activity. The action log uses the same event logger as the rest of Superset, so existing log ingestion pipelines (e.g., sending logs to Elasticsearch or a SIEM) capture MCP events automatically.
### Middleware Pipeline
Every MCP request passes through a middleware stack before reaching the tool function. The default stack (assembled in `build_middleware_list()` in `server.py`) is:
| Middleware | Purpose | Default |
|------------|---------|---------|
| `StructuredContentStripperMiddleware` | Strips `structuredContent` from responses for Claude.ai bridge compatibility | Enabled |
| `LoggingMiddleware` | Logs each tool call with user, parameters, and duration | Enabled |
| `GlobalErrorHandlerMiddleware` | Catches unhandled exceptions and sanitizes sensitive data before it reaches the client | Enabled |
| `ResponseSizeGuardMiddleware` | Estimates token count, warns at 80% of limit, blocks at limit | Enabled (configurable via `MCP_RESPONSE_SIZE_CONFIG`) |
| `ResponseCachingMiddleware` | Caches read-heavy tool responses (in-memory or Redis) | Disabled (enable via `MCP_CACHE_CONFIG`) |
Additional middleware classes (`RateLimitMiddleware`, `FieldPermissionsMiddleware`, `PrivateToolMiddleware`) are implemented in `superset/mcp_service/middleware.py` but are not added to the default pipeline. They are available for operators who want to layer them in via a custom startup path.
### Error Sanitization
The `GlobalErrorHandlerMiddleware` automatically redacts sensitive information from all error messages before they reach the LLM client. The following are replaced with generic messages:
- **Database connection strings** — replaced with a generic connection error message
- **API keys and tokens** — redacted from error traces
- **File system paths** — stripped to prevent information disclosure
- **IP addresses** — removed from error context
This ensures that a misconfigured database connection or an unexpected exception never leaks credentials or internal topology to the LLM or its users. All regex patterns used for redaction are bounded to prevent ReDoS attacks.
---
## Performance
### Connection Pooling
Each MCP server process maintains its own SQLAlchemy connection pool to the database. For multi-worker deployments, total open connections = **workers × pool size**.
```python
# superset_config.py
SQLALCHEMY_POOL_SIZE = 5
SQLALCHEMY_MAX_OVERFLOW = 10
SQLALCHEMY_POOL_TIMEOUT = 30
SQLALCHEMY_POOL_RECYCLE = 3600 # Recycle connections after 1 hour
```
For a 3-pod Kubernetes deployment with the defaults above, expect up to 3 × (5 + 10) = 45 connections. Size your database's `max_connections` accordingly.
### Response Caching
Enable response caching for read-heavy workloads (dashboards/datasets that don't change frequently). With the in-memory backend (default when `MCP_STORE_CONFIG` is disabled), caching is per-process. Use Redis-backed caching for consistent cache hits across multiple pods:
```python
MCP_CACHE_CONFIG = {"enabled": True, "call_tool_ttl": 3600}
MCP_STORE_CONFIG = {"enabled": True, "CACHE_REDIS_URL": "redis://redis:6379/0"}
```
Mutating tools (`generate_chart`, `update_chart`, `execute_sql`, `generate_dashboard`) are always excluded from caching regardless of this setting.
---
## Troubleshooting
### Server won't start
@@ -660,6 +799,32 @@ MCP_CSRF_CONFIG = {
---
## Audit Events
All MCP tool calls are logged to Superset's event logger, the same system used by the web UI (viewable at **Settings → Action Log**). Each event captures:
- **Action**: `mcp.<tool_name>.<phase>` (e.g., `mcp.list_databases.query`)
- **User**: the resolved Superset username from the JWT or dev config
- **Timestamp**: when the operation ran
This means MCP activity is auditable alongside normal user activity. No additional configuration is required — logging is on by default whenever the event logger is enabled in your Superset deployment.
## Tool Pagination
MCP list tools (`list_datasets`, `list_charts`, `list_dashboards`, `list_databases`) use **offset pagination** via `page` (1-based) and `page_size` parameters. Responses include `page`, `page_size`, `total_count`, `total_pages`, `has_previous`, and `has_next`. To iterate through all results:
```python
# Example: fetch all charts across pages
all_charts = []
page = 1
while True:
result = mcp.list_charts(page=page, page_size=50)
all_charts.extend(result["charts"])
if not result.get("has_next"):
break
page += 1
```
## Security Best Practices
- **Use TLS** for all production MCP endpoints -- place the server behind a reverse proxy with HTTPS
@@ -668,12 +833,13 @@ MCP_CSRF_CONFIG = {
- **Secrets management** -- Store `MCP_JWT_SECRET`, database credentials, and API keys in environment variables or a secrets manager, never in config files committed to version control
- **Scoped tokens** -- Use `MCP_REQUIRED_SCOPES` to limit what operations a token can perform
- **Network isolation** -- In Kubernetes, restrict MCP pod network policies to only allow traffic from your AI client endpoints
- Review the **[Security documentation](./security)** for additional extension security guidance
- Review the **[Security documentation](/developer-docs/extensions/security)** for additional extension security guidance
---
## Next Steps
- **[MCP Integration](./mcp)** -- Build custom MCP tools and prompts via Superset extensions
- **[Security](./security)** -- Security best practices for extensions
- **[Deployment](./deployment)** -- Package and deploy Superset extensions
- **[Using AI with Superset](/user-docs/using-superset/using-ai-with-superset)** -- What AI can do with Superset and how to get started
- **[MCP Integration](/developer-docs/extensions/mcp)** -- Build custom MCP tools and prompts via Superset extensions
- **[Security](/developer-docs/extensions/security)** -- Security best practices for extensions
- **[Deployment](/developer-docs/extensions/deployment)** -- Package and deploy Superset extensions

View File

@@ -84,6 +84,35 @@ THEME_DARK = {
# - OS preference detection is automatically enabled
```
### App Branding
The application name shown in the browser title bar and navigation can be
set via the `brandAppName` theme token:
```python
THEME_DEFAULT = {
"token": {
"brandAppName": "Acme Analytics",
# ... other tokens
}
}
```
Or in the theme CRUD UI JSON editor:
```json
{
"token": {
"brandAppName": "Acme Analytics"
}
}
```
The existing `APP_NAME` Python config key continues to work for backward compatibility.
`brandAppName` takes precedence when both are set, and allows different themes to carry different brand names.
Email and alert/report notification subjects are driven by backend settings such as
`EMAIL_REPORTS_SUBJECT_PREFIX` and `APP_NAME`, not by this theme token.
### Migration from Configuration to UI
When `ENABLE_UI_THEME_ADMINISTRATION = True`:
@@ -312,11 +341,25 @@ Available chart types for `echartsOptionsOverridesByChartType`:
- `echarts_heatmap` - Heatmaps
- `echarts_mixed_timeseries` - Mixed time series
### Array Property Overrides
Array properties (such as color palettes) are fully supported in overrides. Arrays are **replaced entirely** rather than merged, so specify the complete array:
```python
THEME_DEFAULT = {
"token": { ... },
"echartsOptionsOverrides": {
# Replace the default color palette for all ECharts visualizations
"color": ["#1f77b4", "#ff7f0e", "#2ca02c", "#d62728", "#9467bd", "#8c564b"]
}
}
```
### Best Practices
1. **Start with global overrides** for consistent styling across all charts
2. **Use chart-specific overrides** for unique requirements per visualization type
3. **Test thoroughly** as overrides use deep merge - nested objects are combined, but arrays are completely replaced
3. **Test thoroughly** as overrides use deep merge for objects, but arrays are completely replaced — always specify the full array value
4. **Document your overrides** to help team members understand custom styling
5. **Consider performance** - complex overrides may impact chart rendering speed

View File

@@ -44,6 +44,15 @@ only see the objects that they have access to.
The **sql_lab** role grants access to SQL Lab. Note that while **Admin** users have access
to all databases by default, both **Alpha** and **Gamma** users need to be given access on a per database basis.
Beyond the base `sql_lab` role, two additional SQL Lab permissions must be explicitly granted for users who need these capabilities:
| Permission | Feature |
|------------|---------|
| `can_estimate_query_cost` on `SQLLab` | Estimate query cost before running |
| `can_format_sql` on `SQLLab` | Format SQL using the database's dialect |
Grant these in **Security → List Roles** by adding the permissions to the relevant role.
### Public
The **Public** role is the most restrictive built-in role, designed specifically for anonymous/unauthenticated
@@ -174,6 +183,8 @@ However, it is crucial to understand the following:
By combining Superset's configurable safeguards with strong database-level security practices, you can achieve a more robust and layered security posture.
**Dataset Sample Access**: The `get_samples()` endpoint now enforces datasource-level access control. Users can only fetch sample rows from datasets they have been explicitly granted access to — the same permission check applied when running chart queries. This closes a prior gap where unauthenticated or under-privileged access could retrieve sample data.
### REST API for user & role management
Flask-AppBuilder supports a REST API for user CRUD,

View File

@@ -47,10 +47,10 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `GET` | [Get the CSRF token](./api/get-the-csrf-token) | `/api/v1/security/csrf_token/` |
| `POST` | [Get a guest token](./api/get-a-guest-token) | `/api/v1/security/guest_token/` |
| `POST` | [Create security login](./api/create-security-login) | `/api/v1/security/login` |
| `POST` | [Create security refresh](./api/create-security-refresh) | `/api/v1/security/refresh` |
| `GET` | [Get the CSRF token](/developer-docs/api/get-the-csrf-token) | `/api/v1/security/csrf_token/` |
| `POST` | [Get a guest token](/developer-docs/api/get-a-guest-token) | `/api/v1/security/guest_token/` |
| `POST` | [Create security login](/developer-docs/api/create-security-login) | `/api/v1/security/login` |
| `POST` | [Create security refresh](/developer-docs/api/create-security-refresh) | `/api/v1/security/refresh` |
---
@@ -63,32 +63,32 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `DELETE` | [Bulk delete dashboards](./api/bulk-delete-dashboards) | `/api/v1/dashboard/` |
| `GET` | [Get a list of dashboards](./api/get-a-list-of-dashboards) | `/api/v1/dashboard/` |
| `POST` | [Create a new dashboard](./api/create-a-new-dashboard) | `/api/v1/dashboard/` |
| `GET` | [Get metadata information about this API resource (dashboard--info)](./api/get-metadata-information-about-this-api-resource-dashboard-info) | `/api/v1/dashboard/_info` |
| `GET` | [Get a dashboard detail information](./api/get-a-dashboard-detail-information) | `/api/v1/dashboard/{id_or_slug}` |
| `GET` | [Get a dashboard's chart definitions.](./api/get-a-dashboard-s-chart-definitions) | `/api/v1/dashboard/{id_or_slug}/charts` |
| `POST` | [Create a copy of an existing dashboard](./api/create-a-copy-of-an-existing-dashboard) | `/api/v1/dashboard/{id_or_slug}/copy/` |
| `GET` | [Get dashboard's datasets](./api/get-dashboard-s-datasets) | `/api/v1/dashboard/{id_or_slug}/datasets` |
| `DELETE` | [Delete a dashboard's embedded configuration](./api/delete-a-dashboard-s-embedded-configuration) | `/api/v1/dashboard/{id_or_slug}/embedded` |
| `GET` | [Get the dashboard's embedded configuration](./api/get-the-dashboard-s-embedded-configuration) | `/api/v1/dashboard/{id_or_slug}/embedded` |
| `POST` | [Set a dashboard's embedded configuration](./api/set-a-dashboard-s-embedded-configuration) | `/api/v1/dashboard/{id_or_slug}/embedded` |
| `PUT` | [Update dashboard by id_or_slug embedded](./api/update-dashboard-by-id-or-slug-embedded) | `/api/v1/dashboard/{id_or_slug}/embedded` |
| `GET` | [Get dashboard's tabs](./api/get-dashboard-s-tabs) | `/api/v1/dashboard/{id_or_slug}/tabs` |
| `DELETE` | [Delete a dashboard](./api/delete-a-dashboard) | `/api/v1/dashboard/{pk}` |
| `PUT` | [Update a dashboard](./api/update-a-dashboard) | `/api/v1/dashboard/{pk}` |
| `POST` | [Compute and cache a screenshot (dashboard-pk-cache-dashboard-screenshot)](./api/compute-and-cache-a-screenshot-dashboard-pk-cache-dashboard-screenshot) | `/api/v1/dashboard/{pk}/cache_dashboard_screenshot/` |
| `PUT` | [Update colors configuration for a dashboard.](./api/update-colors-configuration-for-a-dashboard) | `/api/v1/dashboard/{pk}/colors` |
| `DELETE` | [Remove the dashboard from the user favorite list](./api/remove-the-dashboard-from-the-user-favorite-list) | `/api/v1/dashboard/{pk}/favorites/` |
| `POST` | [Mark the dashboard as favorite for the current user](./api/mark-the-dashboard-as-favorite-for-the-current-user) | `/api/v1/dashboard/{pk}/favorites/` |
| `PUT` | [Update native filters configuration for a dashboard.](./api/update-native-filters-configuration-for-a-dashboard) | `/api/v1/dashboard/{pk}/filters` |
| `GET` | [Get a computed screenshot from cache (dashboard-pk-screenshot-digest)](./api/get-a-computed-screenshot-from-cache-dashboard-pk-screenshot-digest) | `/api/v1/dashboard/{pk}/screenshot/{digest}/` |
| `GET` | [Get dashboard's thumbnail](./api/get-dashboard-s-thumbnail) | `/api/v1/dashboard/{pk}/thumbnail/{digest}/` |
| `GET` | [Download multiple dashboards as YAML files](./api/download-multiple-dashboards-as-yaml-files) | `/api/v1/dashboard/export/` |
| `GET` | [Check favorited dashboards for current user](./api/check-favorited-dashboards-for-current-user) | `/api/v1/dashboard/favorite_status/` |
| `POST` | [Import dashboard(s) with associated charts/datasets/databases](./api/import-dashboard-s-with-associated-charts-datasets-databases) | `/api/v1/dashboard/import/` |
| `GET` | [Get related fields data (dashboard-related-column-name)](./api/get-related-fields-data-dashboard-related-column-name) | `/api/v1/dashboard/related/{column_name}` |
| `DELETE` | [Bulk delete dashboards](/developer-docs/api/bulk-delete-dashboards) | `/api/v1/dashboard/` |
| `GET` | [Get a list of dashboards](/developer-docs/api/get-a-list-of-dashboards) | `/api/v1/dashboard/` |
| `POST` | [Create a new dashboard](/developer-docs/api/create-a-new-dashboard) | `/api/v1/dashboard/` |
| `GET` | [Get metadata information about this API resource (dashboard--info)](/developer-docs/api/get-metadata-information-about-this-api-resource-dashboard-info) | `/api/v1/dashboard/_info` |
| `GET` | [Get a dashboard detail information](/developer-docs/api/get-a-dashboard-detail-information) | `/api/v1/dashboard/{id_or_slug}` |
| `GET` | [Get a dashboard's chart definitions.](/developer-docs/api/get-a-dashboard-s-chart-definitions) | `/api/v1/dashboard/{id_or_slug}/charts` |
| `POST` | [Create a copy of an existing dashboard](/developer-docs/api/create-a-copy-of-an-existing-dashboard) | `/api/v1/dashboard/{id_or_slug}/copy/` |
| `GET` | [Get dashboard's datasets](/developer-docs/api/get-dashboard-s-datasets) | `/api/v1/dashboard/{id_or_slug}/datasets` |
| `DELETE` | [Delete a dashboard's embedded configuration](/developer-docs/api/delete-a-dashboard-s-embedded-configuration) | `/api/v1/dashboard/{id_or_slug}/embedded` |
| `GET` | [Get the dashboard's embedded configuration](/developer-docs/api/get-the-dashboard-s-embedded-configuration) | `/api/v1/dashboard/{id_or_slug}/embedded` |
| `POST` | [Set a dashboard's embedded configuration](/developer-docs/api/set-a-dashboard-s-embedded-configuration) | `/api/v1/dashboard/{id_or_slug}/embedded` |
| `PUT` | [Update dashboard by id_or_slug embedded](/developer-docs/api/update-dashboard-by-id-or-slug-embedded) | `/api/v1/dashboard/{id_or_slug}/embedded` |
| `GET` | [Get dashboard's tabs](/developer-docs/api/get-dashboard-s-tabs) | `/api/v1/dashboard/{id_or_slug}/tabs` |
| `DELETE` | [Delete a dashboard](/developer-docs/api/delete-a-dashboard) | `/api/v1/dashboard/{pk}` |
| `PUT` | [Update a dashboard](/developer-docs/api/update-a-dashboard) | `/api/v1/dashboard/{pk}` |
| `POST` | [Compute and cache a screenshot (dashboard-pk-cache-dashboard-screenshot)](/developer-docs/api/compute-and-cache-a-screenshot-dashboard-pk-cache-dashboard-screenshot) | `/api/v1/dashboard/{pk}/cache_dashboard_screenshot/` |
| `PUT` | [Update colors configuration for a dashboard.](/developer-docs/api/update-colors-configuration-for-a-dashboard) | `/api/v1/dashboard/{pk}/colors` |
| `DELETE` | [Remove the dashboard from the user favorite list](/developer-docs/api/remove-the-dashboard-from-the-user-favorite-list) | `/api/v1/dashboard/{pk}/favorites/` |
| `POST` | [Mark the dashboard as favorite for the current user](/developer-docs/api/mark-the-dashboard-as-favorite-for-the-current-user) | `/api/v1/dashboard/{pk}/favorites/` |
| `PUT` | [Update native filters configuration for a dashboard.](/developer-docs/api/update-native-filters-configuration-for-a-dashboard) | `/api/v1/dashboard/{pk}/filters` |
| `GET` | [Get a computed screenshot from cache (dashboard-pk-screenshot-digest)](/developer-docs/api/get-a-computed-screenshot-from-cache-dashboard-pk-screenshot-digest) | `/api/v1/dashboard/{pk}/screenshot/{digest}/` |
| `GET` | [Get dashboard's thumbnail](/developer-docs/api/get-dashboard-s-thumbnail) | `/api/v1/dashboard/{pk}/thumbnail/{digest}/` |
| `GET` | [Download multiple dashboards as YAML files](/developer-docs/api/download-multiple-dashboards-as-yaml-files) | `/api/v1/dashboard/export/` |
| `GET` | [Check favorited dashboards for current user](/developer-docs/api/check-favorited-dashboards-for-current-user) | `/api/v1/dashboard/favorite_status/` |
| `POST` | [Import dashboard(s) with associated charts/datasets/databases](/developer-docs/api/import-dashboard-s-with-associated-charts-datasets-databases) | `/api/v1/dashboard/import/` |
| `GET` | [Get related fields data (dashboard-related-column-name)](/developer-docs/api/get-related-fields-data-dashboard-related-column-name) | `/api/v1/dashboard/related/{column_name}` |
</details>
@@ -97,26 +97,26 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `DELETE` | [Bulk delete charts](./api/bulk-delete-charts) | `/api/v1/chart/` |
| `GET` | [Get a list of charts](./api/get-a-list-of-charts) | `/api/v1/chart/` |
| `POST` | [Create a new chart](./api/create-a-new-chart) | `/api/v1/chart/` |
| `GET` | [Get metadata information about this API resource (chart--info)](./api/get-metadata-information-about-this-api-resource-chart-info) | `/api/v1/chart/_info` |
| `DELETE` | [Delete a chart](./api/delete-a-chart) | `/api/v1/chart/{pk}` |
| `GET` | [Get a chart detail information](./api/get-a-chart-detail-information) | `/api/v1/chart/{pk}` |
| `PUT` | [Update a chart](./api/update-a-chart) | `/api/v1/chart/{pk}` |
| `GET` | [Compute and cache a screenshot (chart-pk-cache-screenshot)](./api/compute-and-cache-a-screenshot-chart-pk-cache-screenshot) | `/api/v1/chart/{pk}/cache_screenshot/` |
| `GET` | [Return payload data response for a chart](./api/return-payload-data-response-for-a-chart) | `/api/v1/chart/{pk}/data/` |
| `DELETE` | [Remove the chart from the user favorite list](./api/remove-the-chart-from-the-user-favorite-list) | `/api/v1/chart/{pk}/favorites/` |
| `POST` | [Mark the chart as favorite for the current user](./api/mark-the-chart-as-favorite-for-the-current-user) | `/api/v1/chart/{pk}/favorites/` |
| `GET` | [Get a computed screenshot from cache (chart-pk-screenshot-digest)](./api/get-a-computed-screenshot-from-cache-chart-pk-screenshot-digest) | `/api/v1/chart/{pk}/screenshot/{digest}/` |
| `GET` | [Get chart thumbnail](./api/get-chart-thumbnail) | `/api/v1/chart/{pk}/thumbnail/{digest}/` |
| `POST` | [Return payload data response for the given query (chart-data)](./api/return-payload-data-response-for-the-given-query-chart-data) | `/api/v1/chart/data` |
| `GET` | [Return payload data response for the given query (chart-data-cache-key)](./api/return-payload-data-response-for-the-given-query-chart-data-cache-key) | `/api/v1/chart/data/{cache_key}` |
| `GET` | [Download multiple charts as YAML files](./api/download-multiple-charts-as-yaml-files) | `/api/v1/chart/export/` |
| `GET` | [Check favorited charts for current user](./api/check-favorited-charts-for-current-user) | `/api/v1/chart/favorite_status/` |
| `POST` | [Import chart(s) with associated datasets and databases](./api/import-chart-s-with-associated-datasets-and-databases) | `/api/v1/chart/import/` |
| `GET` | [Get related fields data (chart-related-column-name)](./api/get-related-fields-data-chart-related-column-name) | `/api/v1/chart/related/{column_name}` |
| `PUT` | [Warm up the cache for the chart](./api/warm-up-the-cache-for-the-chart) | `/api/v1/chart/warm_up_cache` |
| `DELETE` | [Bulk delete charts](/developer-docs/api/bulk-delete-charts) | `/api/v1/chart/` |
| `GET` | [Get a list of charts](/developer-docs/api/get-a-list-of-charts) | `/api/v1/chart/` |
| `POST` | [Create a new chart](/developer-docs/api/create-a-new-chart) | `/api/v1/chart/` |
| `GET` | [Get metadata information about this API resource (chart--info)](/developer-docs/api/get-metadata-information-about-this-api-resource-chart-info) | `/api/v1/chart/_info` |
| `DELETE` | [Delete a chart](/developer-docs/api/delete-a-chart) | `/api/v1/chart/{pk}` |
| `GET` | [Get a chart detail information](/developer-docs/api/get-a-chart-detail-information) | `/api/v1/chart/{pk}` |
| `PUT` | [Update a chart](/developer-docs/api/update-a-chart) | `/api/v1/chart/{pk}` |
| `GET` | [Compute and cache a screenshot (chart-pk-cache-screenshot)](/developer-docs/api/compute-and-cache-a-screenshot-chart-pk-cache-screenshot) | `/api/v1/chart/{pk}/cache_screenshot/` |
| `GET` | [Return payload data response for a chart](/developer-docs/api/return-payload-data-response-for-a-chart) | `/api/v1/chart/{pk}/data/` |
| `DELETE` | [Remove the chart from the user favorite list](/developer-docs/api/remove-the-chart-from-the-user-favorite-list) | `/api/v1/chart/{pk}/favorites/` |
| `POST` | [Mark the chart as favorite for the current user](/developer-docs/api/mark-the-chart-as-favorite-for-the-current-user) | `/api/v1/chart/{pk}/favorites/` |
| `GET` | [Get a computed screenshot from cache (chart-pk-screenshot-digest)](/developer-docs/api/get-a-computed-screenshot-from-cache-chart-pk-screenshot-digest) | `/api/v1/chart/{pk}/screenshot/{digest}/` |
| `GET` | [Get chart thumbnail](/developer-docs/api/get-chart-thumbnail) | `/api/v1/chart/{pk}/thumbnail/{digest}/` |
| `POST` | [Return payload data response for the given query (chart-data)](/developer-docs/api/return-payload-data-response-for-the-given-query-chart-data) | `/api/v1/chart/data` |
| `GET` | [Return payload data response for the given query (chart-data-cache-key)](/developer-docs/api/return-payload-data-response-for-the-given-query-chart-data-cache-key) | `/api/v1/chart/data/{cache_key}` |
| `GET` | [Download multiple charts as YAML files](/developer-docs/api/download-multiple-charts-as-yaml-files) | `/api/v1/chart/export/` |
| `GET` | [Check favorited charts for current user](/developer-docs/api/check-favorited-charts-for-current-user) | `/api/v1/chart/favorite_status/` |
| `POST` | [Import chart(s) with associated datasets and databases](/developer-docs/api/import-chart-s-with-associated-datasets-and-databases) | `/api/v1/chart/import/` |
| `GET` | [Get related fields data (chart-related-column-name)](/developer-docs/api/get-related-fields-data-chart-related-column-name) | `/api/v1/chart/related/{column_name}` |
| `PUT` | [Warm up the cache for the chart](/developer-docs/api/warm-up-the-cache-for-the-chart) | `/api/v1/chart/warm_up_cache` |
</details>
@@ -125,24 +125,24 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `DELETE` | [Bulk delete datasets](./api/bulk-delete-datasets) | `/api/v1/dataset/` |
| `GET` | [Get a list of datasets](./api/get-a-list-of-datasets) | `/api/v1/dataset/` |
| `POST` | [Create a new dataset](./api/create-a-new-dataset) | `/api/v1/dataset/` |
| `GET` | [Get metadata information about this API resource (dataset--info)](./api/get-metadata-information-about-this-api-resource-dataset-info) | `/api/v1/dataset/_info` |
| `DELETE` | [Delete a dataset](./api/delete-a-dataset) | `/api/v1/dataset/{pk}` |
| `GET` | [Get a dataset](./api/get-a-dataset) | `/api/v1/dataset/{pk}` |
| `PUT` | [Update a dataset](./api/update-a-dataset) | `/api/v1/dataset/{pk}` |
| `DELETE` | [Delete a dataset column](./api/delete-a-dataset-column) | `/api/v1/dataset/{pk}/column/{column_id}` |
| `DELETE` | [Delete a dataset metric](./api/delete-a-dataset-metric) | `/api/v1/dataset/{pk}/metric/{metric_id}` |
| `PUT` | [Refresh and update columns of a dataset](./api/refresh-and-update-columns-of-a-dataset) | `/api/v1/dataset/{pk}/refresh` |
| `GET` | [Get charts and dashboards count associated to a dataset](./api/get-charts-and-dashboards-count-associated-to-a-dataset) | `/api/v1/dataset/{pk}/related_objects` |
| `GET` | [Get distinct values from field data (dataset-distinct-column-name)](./api/get-distinct-values-from-field-data-dataset-distinct-column-name) | `/api/v1/dataset/distinct/{column_name}` |
| `POST` | [Duplicate a dataset](./api/duplicate-a-dataset) | `/api/v1/dataset/duplicate` |
| `GET` | [Download multiple datasets as YAML files](./api/download-multiple-datasets-as-yaml-files) | `/api/v1/dataset/export/` |
| `POST` | [Retrieve a table by name, or create it if it does not exist](./api/retrieve-a-table-by-name-or-create-it-if-it-does-not-exist) | `/api/v1/dataset/get_or_create/` |
| `POST` | [Import dataset(s) with associated databases](./api/import-dataset-s-with-associated-databases) | `/api/v1/dataset/import/` |
| `GET` | [Get related fields data (dataset-related-column-name)](./api/get-related-fields-data-dataset-related-column-name) | `/api/v1/dataset/related/{column_name}` |
| `PUT` | [Warm up the cache for each chart powered by the given table](./api/warm-up-the-cache-for-each-chart-powered-by-the-given-table) | `/api/v1/dataset/warm_up_cache` |
| `DELETE` | [Bulk delete datasets](/developer-docs/api/bulk-delete-datasets) | `/api/v1/dataset/` |
| `GET` | [Get a list of datasets](/developer-docs/api/get-a-list-of-datasets) | `/api/v1/dataset/` |
| `POST` | [Create a new dataset](/developer-docs/api/create-a-new-dataset) | `/api/v1/dataset/` |
| `GET` | [Get metadata information about this API resource (dataset--info)](/developer-docs/api/get-metadata-information-about-this-api-resource-dataset-info) | `/api/v1/dataset/_info` |
| `DELETE` | [Delete a dataset](/developer-docs/api/delete-a-dataset) | `/api/v1/dataset/{pk}` |
| `GET` | [Get a dataset](/developer-docs/api/get-a-dataset) | `/api/v1/dataset/{pk}` |
| `PUT` | [Update a dataset](/developer-docs/api/update-a-dataset) | `/api/v1/dataset/{pk}` |
| `DELETE` | [Delete a dataset column](/developer-docs/api/delete-a-dataset-column) | `/api/v1/dataset/{pk}/column/{column_id}` |
| `DELETE` | [Delete a dataset metric](/developer-docs/api/delete-a-dataset-metric) | `/api/v1/dataset/{pk}/metric/{metric_id}` |
| `PUT` | [Refresh and update columns of a dataset](/developer-docs/api/refresh-and-update-columns-of-a-dataset) | `/api/v1/dataset/{pk}/refresh` |
| `GET` | [Get charts and dashboards count associated to a dataset](/developer-docs/api/get-charts-and-dashboards-count-associated-to-a-dataset) | `/api/v1/dataset/{pk}/related_objects` |
| `GET` | [Get distinct values from field data (dataset-distinct-column-name)](/developer-docs/api/get-distinct-values-from-field-data-dataset-distinct-column-name) | `/api/v1/dataset/distinct/{column_name}` |
| `POST` | [Duplicate a dataset](/developer-docs/api/duplicate-a-dataset) | `/api/v1/dataset/duplicate` |
| `GET` | [Download multiple datasets as YAML files](/developer-docs/api/download-multiple-datasets-as-yaml-files) | `/api/v1/dataset/export/` |
| `POST` | [Retrieve a table by name, or create it if it does not exist](/developer-docs/api/retrieve-a-table-by-name-or-create-it-if-it-does-not-exist) | `/api/v1/dataset/get_or_create/` |
| `POST` | [Import dataset(s) with associated databases](/developer-docs/api/import-dataset-s-with-associated-databases) | `/api/v1/dataset/import/` |
| `GET` | [Get related fields data (dataset-related-column-name)](/developer-docs/api/get-related-fields-data-dataset-related-column-name) | `/api/v1/dataset/related/{column_name}` |
| `PUT` | [Warm up the cache for each chart powered by the given table](/developer-docs/api/warm-up-the-cache-for-each-chart-powered-by-the-given-table) | `/api/v1/dataset/warm_up_cache` |
</details>
@@ -151,37 +151,37 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `GET` | [Get a list of databases](./api/get-a-list-of-databases) | `/api/v1/database/` |
| `POST` | [Create a new database](./api/create-a-new-database) | `/api/v1/database/` |
| `GET` | [Get metadata information about this API resource (database--info)](./api/get-metadata-information-about-this-api-resource-database-info) | `/api/v1/database/_info` |
| `DELETE` | [Delete a database](./api/delete-a-database) | `/api/v1/database/{pk}` |
| `GET` | [Get a database](./api/get-a-database) | `/api/v1/database/{pk}` |
| `PUT` | [Change a database](./api/change-a-database) | `/api/v1/database/{pk}` |
| `GET` | [Get all catalogs from a database](./api/get-all-catalogs-from-a-database) | `/api/v1/database/{pk}/catalogs/` |
| `GET` | [Get a database connection info](./api/get-a-database-connection-info) | `/api/v1/database/{pk}/connection` |
| `GET` | [Get function names supported by a database](./api/get-function-names-supported-by-a-database) | `/api/v1/database/{pk}/function_names/` |
| `GET` | [Get charts and dashboards count associated to a database](./api/get-charts-and-dashboards-count-associated-to-a-database) | `/api/v1/database/{pk}/related_objects/` |
| `GET` | [The list of the database schemas where to upload information](./api/the-list-of-the-database-schemas-where-to-upload-information) | `/api/v1/database/{pk}/schemas_access_for_file_upload/` |
| `GET` | [Get all schemas from a database](./api/get-all-schemas-from-a-database) | `/api/v1/database/{pk}/schemas/` |
| `GET` | [Get database select star for table (database-pk-select-star-table-name)](./api/get-database-select-star-for-table-database-pk-select-star-table-name) | `/api/v1/database/{pk}/select_star/{table_name}/` |
| `GET` | [Get database select star for table (database-pk-select-star-table-name-schema-name)](./api/get-database-select-star-for-table-database-pk-select-star-table-name-schema-name) | `/api/v1/database/{pk}/select_star/{table_name}/{schema_name}/` |
| `DELETE` | [Delete a SSH tunnel](./api/delete-a-ssh-tunnel) | `/api/v1/database/{pk}/ssh_tunnel/` |
| `POST` | [Re-sync all permissions for a database connection](./api/re-sync-all-permissions-for-a-database-connection) | `/api/v1/database/{pk}/sync_permissions/` |
| `GET` | [Get table extra metadata (database-pk-table-extra-table-name-schema-name)](./api/get-table-extra-metadata-database-pk-table-extra-table-name-schema-name) | `/api/v1/database/{pk}/table_extra/{table_name}/{schema_name}/` |
| `GET` | [Get table metadata](./api/get-table-metadata) | `/api/v1/database/{pk}/table_metadata/` |
| `GET` | [Get table extra metadata (database-pk-table-metadata-extra)](./api/get-table-extra-metadata-database-pk-table-metadata-extra) | `/api/v1/database/{pk}/table_metadata/extra/` |
| `GET` | [Get database table metadata](./api/get-database-table-metadata) | `/api/v1/database/{pk}/table/{table_name}/{schema_name}/` |
| `GET` | [Get a list of tables for given database](./api/get-a-list-of-tables-for-given-database) | `/api/v1/database/{pk}/tables/` |
| `POST` | [Upload a file to a database table](./api/upload-a-file-to-a-database-table) | `/api/v1/database/{pk}/upload/` |
| `POST` | [Validate arbitrary SQL](./api/validate-arbitrary-sql) | `/api/v1/database/{pk}/validate_sql/` |
| `GET` | [Get names of databases currently available](./api/get-names-of-databases-currently-available) | `/api/v1/database/available/` |
| `GET` | [Download database(s) and associated dataset(s) as a zip file](./api/download-database-s-and-associated-dataset-s-as-a-zip-file) | `/api/v1/database/export/` |
| `POST` | [Import database(s) with associated datasets](./api/import-database-s-with-associated-datasets) | `/api/v1/database/import/` |
| `GET` | [Receive personal access tokens from OAuth2](./api/receive-personal-access-tokens-from-oauth2) | `/api/v1/database/oauth2/` |
| `GET` | [Get related fields data (database-related-column-name)](./api/get-related-fields-data-database-related-column-name) | `/api/v1/database/related/{column_name}` |
| `POST` | [Test a database connection](./api/test-a-database-connection) | `/api/v1/database/test_connection/` |
| `POST` | [Upload a file and returns file metadata](./api/upload-a-file-and-returns-file-metadata) | `/api/v1/database/upload_metadata/` |
| `POST` | [Validate database connection parameters](./api/validate-database-connection-parameters) | `/api/v1/database/validate_parameters/` |
| `GET` | [Get a list of databases](/developer-docs/api/get-a-list-of-databases) | `/api/v1/database/` |
| `POST` | [Create a new database](/developer-docs/api/create-a-new-database) | `/api/v1/database/` |
| `GET` | [Get metadata information about this API resource (database--info)](/developer-docs/api/get-metadata-information-about-this-api-resource-database-info) | `/api/v1/database/_info` |
| `DELETE` | [Delete a database](/developer-docs/api/delete-a-database) | `/api/v1/database/{pk}` |
| `GET` | [Get a database](/developer-docs/api/get-a-database) | `/api/v1/database/{pk}` |
| `PUT` | [Change a database](/developer-docs/api/change-a-database) | `/api/v1/database/{pk}` |
| `GET` | [Get all catalogs from a database](/developer-docs/api/get-all-catalogs-from-a-database) | `/api/v1/database/{pk}/catalogs/` |
| `GET` | [Get a database connection info](/developer-docs/api/get-a-database-connection-info) | `/api/v1/database/{pk}/connection` |
| `GET` | [Get function names supported by a database](/developer-docs/api/get-function-names-supported-by-a-database) | `/api/v1/database/{pk}/function_names/` |
| `GET` | [Get charts and dashboards count associated to a database](/developer-docs/api/get-charts-and-dashboards-count-associated-to-a-database) | `/api/v1/database/{pk}/related_objects/` |
| `GET` | [The list of the database schemas where to upload information](/developer-docs/api/the-list-of-the-database-schemas-where-to-upload-information) | `/api/v1/database/{pk}/schemas_access_for_file_upload/` |
| `GET` | [Get all schemas from a database](/developer-docs/api/get-all-schemas-from-a-database) | `/api/v1/database/{pk}/schemas/` |
| `GET` | [Get database select star for table (database-pk-select-star-table-name)](/developer-docs/api/get-database-select-star-for-table-database-pk-select-star-table-name) | `/api/v1/database/{pk}/select_star/{table_name}/` |
| `GET` | [Get database select star for table (database-pk-select-star-table-name-schema-name)](/developer-docs/api/get-database-select-star-for-table-database-pk-select-star-table-name-schema-name) | `/api/v1/database/{pk}/select_star/{table_name}/{schema_name}/` |
| `DELETE` | [Delete a SSH tunnel](/developer-docs/api/delete-a-ssh-tunnel) | `/api/v1/database/{pk}/ssh_tunnel/` |
| `POST` | [Re-sync all permissions for a database connection](/developer-docs/api/re-sync-all-permissions-for-a-database-connection) | `/api/v1/database/{pk}/sync_permissions/` |
| `GET` | [Get table extra metadata (database-pk-table-extra-table-name-schema-name)](/developer-docs/api/get-table-extra-metadata-database-pk-table-extra-table-name-schema-name) | `/api/v1/database/{pk}/table_extra/{table_name}/{schema_name}/` |
| `GET` | [Get table metadata](/developer-docs/api/get-table-metadata) | `/api/v1/database/{pk}/table_metadata/` |
| `GET` | [Get table extra metadata (database-pk-table-metadata-extra)](/developer-docs/api/get-table-extra-metadata-database-pk-table-metadata-extra) | `/api/v1/database/{pk}/table_metadata/extra/` |
| `GET` | [Get database table metadata](/developer-docs/api/get-database-table-metadata) | `/api/v1/database/{pk}/table/{table_name}/{schema_name}/` |
| `GET` | [Get a list of tables for given database](/developer-docs/api/get-a-list-of-tables-for-given-database) | `/api/v1/database/{pk}/tables/` |
| `POST` | [Upload a file to a database table](/developer-docs/api/upload-a-file-to-a-database-table) | `/api/v1/database/{pk}/upload/` |
| `POST` | [Validate arbitrary SQL](/developer-docs/api/validate-arbitrary-sql) | `/api/v1/database/{pk}/validate_sql/` |
| `GET` | [Get names of databases currently available](/developer-docs/api/get-names-of-databases-currently-available) | `/api/v1/database/available/` |
| `GET` | [Download database(s) and associated dataset(s) as a zip file](/developer-docs/api/download-database-s-and-associated-dataset-s-as-a-zip-file) | `/api/v1/database/export/` |
| `POST` | [Import database(s) with associated datasets](/developer-docs/api/import-database-s-with-associated-datasets) | `/api/v1/database/import/` |
| `GET` | [Receive personal access tokens from OAuth2](/developer-docs/api/receive-personal-access-tokens-from-oauth2) | `/api/v1/database/oauth2/` |
| `GET` | [Get related fields data (database-related-column-name)](/developer-docs/api/get-related-fields-data-database-related-column-name) | `/api/v1/database/related/{column_name}` |
| `POST` | [Test a database connection](/developer-docs/api/test-a-database-connection) | `/api/v1/database/test_connection/` |
| `POST` | [Upload a file and returns file metadata](/developer-docs/api/upload-a-file-and-returns-file-metadata) | `/api/v1/database/upload_metadata/` |
| `POST` | [Validate database connection parameters](/developer-docs/api/validate-database-connection-parameters) | `/api/v1/database/validate_parameters/` |
</details>
@@ -192,7 +192,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `GET` | [Assemble Explore related information in a single endpoint](./api/assemble-explore-related-information-in-a-single-endpoint) | `/api/v1/explore/` |
| `GET` | [Assemble Explore related information in a single endpoint](/developer-docs/api/assemble-explore-related-information-in-a-single-endpoint) | `/api/v1/explore/` |
</details>
@@ -201,12 +201,12 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `GET` | [Get the bootstrap data for SqlLab page](./api/get-the-bootstrap-data-for-sqllab-page) | `/api/v1/sqllab/` |
| `POST` | [Estimate the SQL query execution cost](./api/estimate-the-sql-query-execution-cost) | `/api/v1/sqllab/estimate/` |
| `POST` | [Execute a SQL query](./api/execute-a-sql-query) | `/api/v1/sqllab/execute/` |
| `GET` | [Export the SQL query results to a CSV](./api/export-the-sql-query-results-to-a-csv) | `/api/v1/sqllab/export/{client_id}/` |
| `POST` | [Format SQL code](./api/format-sql-code) | `/api/v1/sqllab/format_sql/` |
| `GET` | [Get the result of a SQL query execution](./api/get-the-result-of-a-sql-query-execution) | `/api/v1/sqllab/results/` |
| `GET` | [Get the bootstrap data for SqlLab page](/developer-docs/api/get-the-bootstrap-data-for-sqllab-page) | `/api/v1/sqllab/` |
| `POST` | [Estimate the SQL query execution cost](/developer-docs/api/estimate-the-sql-query-execution-cost) | `/api/v1/sqllab/estimate/` |
| `POST` | [Execute a SQL query](/developer-docs/api/execute-a-sql-query) | `/api/v1/sqllab/execute/` |
| `GET` | [Export the SQL query results to a CSV](/developer-docs/api/export-the-sql-query-results-to-a-csv) | `/api/v1/sqllab/export/{client_id}/` |
| `POST` | [Format SQL code](/developer-docs/api/format-sql-code) | `/api/v1/sqllab/format_sql/` |
| `GET` | [Get the result of a SQL query execution](/developer-docs/api/get-the-result-of-a-sql-query-execution) | `/api/v1/sqllab/results/` |
</details>
@@ -215,23 +215,23 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `GET` | [Get a list of queries](./api/get-a-list-of-queries) | `/api/v1/query/` |
| `GET` | [Get query detail information](./api/get-query-detail-information) | `/api/v1/query/{pk}` |
| `GET` | [Get distinct values from field data (query-distinct-column-name)](./api/get-distinct-values-from-field-data-query-distinct-column-name) | `/api/v1/query/distinct/{column_name}` |
| `GET` | [Get related fields data (query-related-column-name)](./api/get-related-fields-data-query-related-column-name) | `/api/v1/query/related/{column_name}` |
| `POST` | [Manually stop a query with client_id](./api/manually-stop-a-query-with-client-id) | `/api/v1/query/stop` |
| `GET` | [Get a list of queries that changed after last_updated_ms](./api/get-a-list-of-queries-that-changed-after-last-updated-ms) | `/api/v1/query/updated_since` |
| `DELETE` | [Bulk delete saved queries](./api/bulk-delete-saved-queries) | `/api/v1/saved_query/` |
| `GET` | [Get a list of saved queries](./api/get-a-list-of-saved-queries) | `/api/v1/saved_query/` |
| `POST` | [Create a saved query](./api/create-a-saved-query) | `/api/v1/saved_query/` |
| `GET` | [Get metadata information about this API resource (saved-query--info)](./api/get-metadata-information-about-this-api-resource-saved-query-info) | `/api/v1/saved_query/_info` |
| `DELETE` | [Delete a saved query](./api/delete-a-saved-query) | `/api/v1/saved_query/{pk}` |
| `GET` | [Get a saved query](./api/get-a-saved-query) | `/api/v1/saved_query/{pk}` |
| `PUT` | [Update a saved query](./api/update-a-saved-query) | `/api/v1/saved_query/{pk}` |
| `GET` | [Get distinct values from field data (saved-query-distinct-column-name)](./api/get-distinct-values-from-field-data-saved-query-distinct-column-name) | `/api/v1/saved_query/distinct/{column_name}` |
| `GET` | [Download multiple saved queries as YAML files](./api/download-multiple-saved-queries-as-yaml-files) | `/api/v1/saved_query/export/` |
| `POST` | [Import saved queries with associated databases](./api/import-saved-queries-with-associated-databases) | `/api/v1/saved_query/import/` |
| `GET` | [Get related fields data (saved-query-related-column-name)](./api/get-related-fields-data-saved-query-related-column-name) | `/api/v1/saved_query/related/{column_name}` |
| `GET` | [Get a list of queries](/developer-docs/api/get-a-list-of-queries) | `/api/v1/query/` |
| `GET` | [Get query detail information](/developer-docs/api/get-query-detail-information) | `/api/v1/query/{pk}` |
| `GET` | [Get distinct values from field data (query-distinct-column-name)](/developer-docs/api/get-distinct-values-from-field-data-query-distinct-column-name) | `/api/v1/query/distinct/{column_name}` |
| `GET` | [Get related fields data (query-related-column-name)](/developer-docs/api/get-related-fields-data-query-related-column-name) | `/api/v1/query/related/{column_name}` |
| `POST` | [Manually stop a query with client_id](/developer-docs/api/manually-stop-a-query-with-client-id) | `/api/v1/query/stop` |
| `GET` | [Get a list of queries that changed after last_updated_ms](/developer-docs/api/get-a-list-of-queries-that-changed-after-last-updated-ms) | `/api/v1/query/updated_since` |
| `DELETE` | [Bulk delete saved queries](/developer-docs/api/bulk-delete-saved-queries) | `/api/v1/saved_query/` |
| `GET` | [Get a list of saved queries](/developer-docs/api/get-a-list-of-saved-queries) | `/api/v1/saved_query/` |
| `POST` | [Create a saved query](/developer-docs/api/create-a-saved-query) | `/api/v1/saved_query/` |
| `GET` | [Get metadata information about this API resource (saved-query--info)](/developer-docs/api/get-metadata-information-about-this-api-resource-saved-query-info) | `/api/v1/saved_query/_info` |
| `DELETE` | [Delete a saved query](/developer-docs/api/delete-a-saved-query) | `/api/v1/saved_query/{pk}` |
| `GET` | [Get a saved query](/developer-docs/api/get-a-saved-query) | `/api/v1/saved_query/{pk}` |
| `PUT` | [Update a saved query](/developer-docs/api/update-a-saved-query) | `/api/v1/saved_query/{pk}` |
| `GET` | [Get distinct values from field data (saved-query-distinct-column-name)](/developer-docs/api/get-distinct-values-from-field-data-saved-query-distinct-column-name) | `/api/v1/saved_query/distinct/{column_name}` |
| `GET` | [Download multiple saved queries as YAML files](/developer-docs/api/download-multiple-saved-queries-as-yaml-files) | `/api/v1/saved_query/export/` |
| `POST` | [Import saved queries with associated databases](/developer-docs/api/import-saved-queries-with-associated-databases) | `/api/v1/saved_query/import/` |
| `GET` | [Get related fields data (saved-query-related-column-name)](/developer-docs/api/get-related-fields-data-saved-query-related-column-name) | `/api/v1/saved_query/related/{column_name}` |
</details>
@@ -240,7 +240,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `GET` | [Get possible values for a datasource column](./api/get-possible-values-for-a-datasource-column) | `/api/v1/datasource/{datasource_type}/{datasource_id}/column/{column_name}/values/` |
| `GET` | [Get possible values for a datasource column](/developer-docs/api/get-possible-values-for-a-datasource-column) | `/api/v1/datasource/{datasource_type}/{datasource_id}/column/{column_name}/values/` |
</details>
@@ -249,8 +249,8 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `GET` | [Return an AdvancedDataTypeResponse](./api/return-an-advanceddatatyperesponse) | `/api/v1/advanced_data_type/convert` |
| `GET` | [Return a list of available advanced data types](./api/return-a-list-of-available-advanced-data-types) | `/api/v1/advanced_data_type/types` |
| `GET` | [Return an AdvancedDataTypeResponse](/developer-docs/api/return-an-advanceddatatyperesponse) | `/api/v1/advanced_data_type/convert` |
| `GET` | [Return a list of available advanced data types](/developer-docs/api/return-a-list-of-available-advanced-data-types) | `/api/v1/advanced_data_type/types` |
</details>
@@ -261,21 +261,21 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `DELETE` | [Bulk delete tags](./api/bulk-delete-tags) | `/api/v1/tag/` |
| `GET` | [Get a list of tags](./api/get-a-list-of-tags) | `/api/v1/tag/` |
| `POST` | [Create a tag](./api/create-a-tag) | `/api/v1/tag/` |
| `GET` | [Get metadata information about tag API endpoints](./api/get-metadata-information-about-tag-api-endpoints) | `/api/v1/tag/_info` |
| `POST` | [Add tags to an object](./api/add-tags-to-an-object) | `/api/v1/tag/{object_type}/{object_id}/` |
| `DELETE` | [Delete a tagged object](./api/delete-a-tagged-object) | `/api/v1/tag/{object_type}/{object_id}/{tag}/` |
| `DELETE` | [Delete a tag](./api/delete-a-tag) | `/api/v1/tag/{pk}` |
| `GET` | [Get a tag detail information](./api/get-a-tag-detail-information) | `/api/v1/tag/{pk}` |
| `PUT` | [Update a tag](./api/update-a-tag) | `/api/v1/tag/{pk}` |
| `DELETE` | [Delete tag by pk favorites](./api/delete-tag-by-pk-favorites) | `/api/v1/tag/{pk}/favorites/` |
| `POST` | [Create tag by pk favorites](./api/create-tag-by-pk-favorites) | `/api/v1/tag/{pk}/favorites/` |
| `POST` | [Bulk create tags and tagged objects](./api/bulk-create-tags-and-tagged-objects) | `/api/v1/tag/bulk_create` |
| `GET` | [Get tag favorite status](./api/get-tag-favorite-status) | `/api/v1/tag/favorite_status/` |
| `GET` | [Get all objects associated with a tag](./api/get-all-objects-associated-with-a-tag) | `/api/v1/tag/get_objects/` |
| `GET` | [Get related fields data (tag-related-column-name)](./api/get-related-fields-data-tag-related-column-name) | `/api/v1/tag/related/{column_name}` |
| `DELETE` | [Bulk delete tags](/developer-docs/api/bulk-delete-tags) | `/api/v1/tag/` |
| `GET` | [Get a list of tags](/developer-docs/api/get-a-list-of-tags) | `/api/v1/tag/` |
| `POST` | [Create a tag](/developer-docs/api/create-a-tag) | `/api/v1/tag/` |
| `GET` | [Get metadata information about tag API endpoints](/developer-docs/api/get-metadata-information-about-tag-api-endpoints) | `/api/v1/tag/_info` |
| `POST` | [Add tags to an object](/developer-docs/api/add-tags-to-an-object) | `/api/v1/tag/{object_type}/{object_id}/` |
| `DELETE` | [Delete a tagged object](/developer-docs/api/delete-a-tagged-object) | `/api/v1/tag/{object_type}/{object_id}/{tag}/` |
| `DELETE` | [Delete a tag](/developer-docs/api/delete-a-tag) | `/api/v1/tag/{pk}` |
| `GET` | [Get a tag detail information](/developer-docs/api/get-a-tag-detail-information) | `/api/v1/tag/{pk}` |
| `PUT` | [Update a tag](/developer-docs/api/update-a-tag) | `/api/v1/tag/{pk}` |
| `DELETE` | [Delete tag by pk favorites](/developer-docs/api/delete-tag-by-pk-favorites) | `/api/v1/tag/{pk}/favorites/` |
| `POST` | [Create tag by pk favorites](/developer-docs/api/create-tag-by-pk-favorites) | `/api/v1/tag/{pk}/favorites/` |
| `POST` | [Bulk create tags and tagged objects](/developer-docs/api/bulk-create-tags-and-tagged-objects) | `/api/v1/tag/bulk_create` |
| `GET` | [Get tag favorite status](/developer-docs/api/get-tag-favorite-status) | `/api/v1/tag/favorite_status/` |
| `GET` | [Get all objects associated with a tag](/developer-docs/api/get-all-objects-associated-with-a-tag) | `/api/v1/tag/get_objects/` |
| `GET` | [Get related fields data (tag-related-column-name)](/developer-docs/api/get-related-fields-data-tag-related-column-name) | `/api/v1/tag/related/{column_name}` |
</details>
@@ -284,20 +284,20 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `DELETE` | [Delete multiple annotation layers in a bulk operation](./api/delete-multiple-annotation-layers-in-a-bulk-operation) | `/api/v1/annotation_layer/` |
| `GET` | [Get a list of annotation layers (annotation-layer)](./api/get-a-list-of-annotation-layers-annotation-layer) | `/api/v1/annotation_layer/` |
| `POST` | [Create an annotation layer (annotation-layer)](./api/create-an-annotation-layer-annotation-layer) | `/api/v1/annotation_layer/` |
| `GET` | [Get metadata information about this API resource (annotation-layer--info)](./api/get-metadata-information-about-this-api-resource-annotation-layer-info) | `/api/v1/annotation_layer/_info` |
| `DELETE` | [Delete annotation layer (annotation-layer-pk)](./api/delete-annotation-layer-annotation-layer-pk) | `/api/v1/annotation_layer/{pk}` |
| `GET` | [Get an annotation layer (annotation-layer-pk)](./api/get-an-annotation-layer-annotation-layer-pk) | `/api/v1/annotation_layer/{pk}` |
| `PUT` | [Update an annotation layer (annotation-layer-pk)](./api/update-an-annotation-layer-annotation-layer-pk) | `/api/v1/annotation_layer/{pk}` |
| `DELETE` | [Bulk delete annotation layers](./api/bulk-delete-annotation-layers) | `/api/v1/annotation_layer/{pk}/annotation/` |
| `GET` | [Get a list of annotation layers (annotation-layer-pk-annotation)](./api/get-a-list-of-annotation-layers-annotation-layer-pk-annotation) | `/api/v1/annotation_layer/{pk}/annotation/` |
| `POST` | [Create an annotation layer (annotation-layer-pk-annotation)](./api/create-an-annotation-layer-annotation-layer-pk-annotation) | `/api/v1/annotation_layer/{pk}/annotation/` |
| `DELETE` | [Delete annotation layer (annotation-layer-pk-annotation-annotation-id)](./api/delete-annotation-layer-annotation-layer-pk-annotation-annotation-id) | `/api/v1/annotation_layer/{pk}/annotation/{annotation_id}` |
| `GET` | [Get an annotation layer (annotation-layer-pk-annotation-annotation-id)](./api/get-an-annotation-layer-annotation-layer-pk-annotation-annotation-id) | `/api/v1/annotation_layer/{pk}/annotation/{annotation_id}` |
| `PUT` | [Update an annotation layer (annotation-layer-pk-annotation-annotation-id)](./api/update-an-annotation-layer-annotation-layer-pk-annotation-annotation-id) | `/api/v1/annotation_layer/{pk}/annotation/{annotation_id}` |
| `GET` | [Get related fields data (annotation-layer-related-column-name)](./api/get-related-fields-data-annotation-layer-related-column-name) | `/api/v1/annotation_layer/related/{column_name}` |
| `DELETE` | [Delete multiple annotation layers in a bulk operation](/developer-docs/api/delete-multiple-annotation-layers-in-a-bulk-operation) | `/api/v1/annotation_layer/` |
| `GET` | [Get a list of annotation layers (annotation-layer)](/developer-docs/api/get-a-list-of-annotation-layers-annotation-layer) | `/api/v1/annotation_layer/` |
| `POST` | [Create an annotation layer (annotation-layer)](/developer-docs/api/create-an-annotation-layer-annotation-layer) | `/api/v1/annotation_layer/` |
| `GET` | [Get metadata information about this API resource (annotation-layer--info)](/developer-docs/api/get-metadata-information-about-this-api-resource-annotation-layer-info) | `/api/v1/annotation_layer/_info` |
| `DELETE` | [Delete annotation layer (annotation-layer-pk)](/developer-docs/api/delete-annotation-layer-annotation-layer-pk) | `/api/v1/annotation_layer/{pk}` |
| `GET` | [Get an annotation layer (annotation-layer-pk)](/developer-docs/api/get-an-annotation-layer-annotation-layer-pk) | `/api/v1/annotation_layer/{pk}` |
| `PUT` | [Update an annotation layer (annotation-layer-pk)](/developer-docs/api/update-an-annotation-layer-annotation-layer-pk) | `/api/v1/annotation_layer/{pk}` |
| `DELETE` | [Bulk delete annotation layers](/developer-docs/api/bulk-delete-annotation-layers) | `/api/v1/annotation_layer/{pk}/annotation/` |
| `GET` | [Get a list of annotation layers (annotation-layer-pk-annotation)](/developer-docs/api/get-a-list-of-annotation-layers-annotation-layer-pk-annotation) | `/api/v1/annotation_layer/{pk}/annotation/` |
| `POST` | [Create an annotation layer (annotation-layer-pk-annotation)](/developer-docs/api/create-an-annotation-layer-annotation-layer-pk-annotation) | `/api/v1/annotation_layer/{pk}/annotation/` |
| `DELETE` | [Delete annotation layer (annotation-layer-pk-annotation-annotation-id)](/developer-docs/api/delete-annotation-layer-annotation-layer-pk-annotation-annotation-id) | `/api/v1/annotation_layer/{pk}/annotation/{annotation_id}` |
| `GET` | [Get an annotation layer (annotation-layer-pk-annotation-annotation-id)](/developer-docs/api/get-an-annotation-layer-annotation-layer-pk-annotation-annotation-id) | `/api/v1/annotation_layer/{pk}/annotation/{annotation_id}` |
| `PUT` | [Update an annotation layer (annotation-layer-pk-annotation-annotation-id)](/developer-docs/api/update-an-annotation-layer-annotation-layer-pk-annotation-annotation-id) | `/api/v1/annotation_layer/{pk}/annotation/{annotation_id}` |
| `GET` | [Get related fields data (annotation-layer-related-column-name)](/developer-docs/api/get-related-fields-data-annotation-layer-related-column-name) | `/api/v1/annotation_layer/related/{column_name}` |
</details>
@@ -306,14 +306,14 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `DELETE` | [Bulk delete CSS templates](./api/bulk-delete-css-templates) | `/api/v1/css_template/` |
| `GET` | [Get a list of CSS templates](./api/get-a-list-of-css-templates) | `/api/v1/css_template/` |
| `POST` | [Create a CSS template](./api/create-a-css-template) | `/api/v1/css_template/` |
| `GET` | [Get metadata information about this API resource (css-template--info)](./api/get-metadata-information-about-this-api-resource-css-template-info) | `/api/v1/css_template/_info` |
| `DELETE` | [Delete a CSS template](./api/delete-a-css-template) | `/api/v1/css_template/{pk}` |
| `GET` | [Get a CSS template](./api/get-a-css-template) | `/api/v1/css_template/{pk}` |
| `PUT` | [Update a CSS template](./api/update-a-css-template) | `/api/v1/css_template/{pk}` |
| `GET` | [Get related fields data (css-template-related-column-name)](./api/get-related-fields-data-css-template-related-column-name) | `/api/v1/css_template/related/{column_name}` |
| `DELETE` | [Bulk delete CSS templates](/developer-docs/api/bulk-delete-css-templates) | `/api/v1/css_template/` |
| `GET` | [Get a list of CSS templates](/developer-docs/api/get-a-list-of-css-templates) | `/api/v1/css_template/` |
| `POST` | [Create a CSS template](/developer-docs/api/create-a-css-template) | `/api/v1/css_template/` |
| `GET` | [Get metadata information about this API resource (css-template--info)](/developer-docs/api/get-metadata-information-about-this-api-resource-css-template-info) | `/api/v1/css_template/_info` |
| `DELETE` | [Delete a CSS template](/developer-docs/api/delete-a-css-template) | `/api/v1/css_template/{pk}` |
| `GET` | [Get a CSS template](/developer-docs/api/get-a-css-template) | `/api/v1/css_template/{pk}` |
| `PUT` | [Update a CSS template](/developer-docs/api/update-a-css-template) | `/api/v1/css_template/{pk}` |
| `GET` | [Get related fields data (css-template-related-column-name)](/developer-docs/api/get-related-fields-data-css-template-related-column-name) | `/api/v1/css_template/related/{column_name}` |
</details>
@@ -324,8 +324,8 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `POST` | [Create a new dashboard's permanent link](./api/create-a-new-dashboard-s-permanent-link) | `/api/v1/dashboard/{pk}/permalink` |
| `GET` | [Get dashboard's permanent link state](./api/get-dashboard-s-permanent-link-state) | `/api/v1/dashboard/permalink/{key}` |
| `POST` | [Create a new dashboard's permanent link](/developer-docs/api/create-a-new-dashboard-s-permanent-link) | `/api/v1/dashboard/{pk}/permalink` |
| `GET` | [Get dashboard's permanent link state](/developer-docs/api/get-dashboard-s-permanent-link-state) | `/api/v1/dashboard/permalink/{key}` |
</details>
@@ -334,8 +334,8 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `POST` | [Create a new permanent link (explore-permalink)](./api/create-a-new-permanent-link-explore-permalink) | `/api/v1/explore/permalink` |
| `GET` | [Get chart's permanent link state](./api/get-chart-s-permanent-link-state) | `/api/v1/explore/permalink/{key}` |
| `POST` | [Create a new permanent link (explore-permalink)](/developer-docs/api/create-a-new-permanent-link-explore-permalink) | `/api/v1/explore/permalink` |
| `GET` | [Get chart's permanent link state](/developer-docs/api/get-chart-s-permanent-link-state) | `/api/v1/explore/permalink/{key}` |
</details>
@@ -344,8 +344,8 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `POST` | [Create a new permanent link (sqllab-permalink)](./api/create-a-new-permanent-link-sqllab-permalink) | `/api/v1/sqllab/permalink` |
| `GET` | [Get permanent link state for SQLLab editor.](./api/get-permanent-link-state-for-sqllab-editor) | `/api/v1/sqllab/permalink/{key}` |
| `POST` | [Create a new permanent link (sqllab-permalink)](/developer-docs/api/create-a-new-permanent-link-sqllab-permalink) | `/api/v1/sqllab/permalink` |
| `GET` | [Get permanent link state for SQLLab editor.](/developer-docs/api/get-permanent-link-state-for-sqllab-editor) | `/api/v1/sqllab/permalink/{key}` |
</details>
@@ -354,7 +354,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `GET` | [Get a report schedule log (embedded-dashboard-uuid)](./api/get-a-report-schedule-log-embedded-dashboard-uuid) | `/api/v1/embedded_dashboard/{uuid}` |
| `GET` | [Get a report schedule log (embedded-dashboard-uuid)](/developer-docs/api/get-a-report-schedule-log-embedded-dashboard-uuid) | `/api/v1/embedded_dashboard/{uuid}` |
</details>
@@ -363,10 +363,10 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `POST` | [Create a dashboard's filter state](./api/create-a-dashboard-s-filter-state) | `/api/v1/dashboard/{pk}/filter_state` |
| `DELETE` | [Delete a dashboard's filter state value](./api/delete-a-dashboard-s-filter-state-value) | `/api/v1/dashboard/{pk}/filter_state/{key}` |
| `GET` | [Get a dashboard's filter state value](./api/get-a-dashboard-s-filter-state-value) | `/api/v1/dashboard/{pk}/filter_state/{key}` |
| `PUT` | [Update a dashboard's filter state value](./api/update-a-dashboard-s-filter-state-value) | `/api/v1/dashboard/{pk}/filter_state/{key}` |
| `POST` | [Create a dashboard's filter state](/developer-docs/api/create-a-dashboard-s-filter-state) | `/api/v1/dashboard/{pk}/filter_state` |
| `DELETE` | [Delete a dashboard's filter state value](/developer-docs/api/delete-a-dashboard-s-filter-state-value) | `/api/v1/dashboard/{pk}/filter_state/{key}` |
| `GET` | [Get a dashboard's filter state value](/developer-docs/api/get-a-dashboard-s-filter-state-value) | `/api/v1/dashboard/{pk}/filter_state/{key}` |
| `PUT` | [Update a dashboard's filter state value](/developer-docs/api/update-a-dashboard-s-filter-state-value) | `/api/v1/dashboard/{pk}/filter_state/{key}` |
</details>
@@ -375,10 +375,10 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `POST` | [Create a new form_data](./api/create-a-new-form-data) | `/api/v1/explore/form_data` |
| `DELETE` | [Delete a form_data](./api/delete-a-form-data) | `/api/v1/explore/form_data/{key}` |
| `GET` | [Get a form_data](./api/get-a-form-data) | `/api/v1/explore/form_data/{key}` |
| `PUT` | [Update an existing form_data](./api/update-an-existing-form-data) | `/api/v1/explore/form_data/{key}` |
| `POST` | [Create a new form_data](/developer-docs/api/create-a-new-form-data) | `/api/v1/explore/form_data` |
| `DELETE` | [Delete a form_data](/developer-docs/api/delete-a-form-data) | `/api/v1/explore/form_data/{key}` |
| `GET` | [Get a form_data](/developer-docs/api/get-a-form-data) | `/api/v1/explore/form_data/{key}` |
| `PUT` | [Update an existing form_data](/developer-docs/api/update-an-existing-form-data) | `/api/v1/explore/form_data/{key}` |
</details>
@@ -389,17 +389,17 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `DELETE` | [Bulk delete report schedules](./api/bulk-delete-report-schedules) | `/api/v1/report/` |
| `GET` | [Get a list of report schedules](./api/get-a-list-of-report-schedules) | `/api/v1/report/` |
| `POST` | [Create a report schedule](./api/create-a-report-schedule) | `/api/v1/report/` |
| `GET` | [Get metadata information about this API resource (report--info)](./api/get-metadata-information-about-this-api-resource-report-info) | `/api/v1/report/_info` |
| `DELETE` | [Delete a report schedule](./api/delete-a-report-schedule) | `/api/v1/report/{pk}` |
| `GET` | [Get a report schedule](./api/get-a-report-schedule) | `/api/v1/report/{pk}` |
| `PUT` | [Update a report schedule](./api/update-a-report-schedule) | `/api/v1/report/{pk}` |
| `GET` | [Get a list of report schedule logs](./api/get-a-list-of-report-schedule-logs) | `/api/v1/report/{pk}/log/` |
| `GET` | [Get a report schedule log (report-pk-log-log-id)](./api/get-a-report-schedule-log-report-pk-log-log-id) | `/api/v1/report/{pk}/log/{log_id}` |
| `GET` | [Get related fields data (report-related-column-name)](./api/get-related-fields-data-report-related-column-name) | `/api/v1/report/related/{column_name}` |
| `GET` | [Get slack channels](./api/get-slack-channels) | `/api/v1/report/slack_channels/` |
| `DELETE` | [Bulk delete report schedules](/developer-docs/api/bulk-delete-report-schedules) | `/api/v1/report/` |
| `GET` | [Get a list of report schedules](/developer-docs/api/get-a-list-of-report-schedules) | `/api/v1/report/` |
| `POST` | [Create a report schedule](/developer-docs/api/create-a-report-schedule) | `/api/v1/report/` |
| `GET` | [Get metadata information about this API resource (report--info)](/developer-docs/api/get-metadata-information-about-this-api-resource-report-info) | `/api/v1/report/_info` |
| `DELETE` | [Delete a report schedule](/developer-docs/api/delete-a-report-schedule) | `/api/v1/report/{pk}` |
| `GET` | [Get a report schedule](/developer-docs/api/get-a-report-schedule) | `/api/v1/report/{pk}` |
| `PUT` | [Update a report schedule](/developer-docs/api/update-a-report-schedule) | `/api/v1/report/{pk}` |
| `GET` | [Get a list of report schedule logs](/developer-docs/api/get-a-list-of-report-schedule-logs) | `/api/v1/report/{pk}/log/` |
| `GET` | [Get a report schedule log (report-pk-log-log-id)](/developer-docs/api/get-a-report-schedule-log-report-pk-log-log-id) | `/api/v1/report/{pk}/log/{log_id}` |
| `GET` | [Get related fields data (report-related-column-name)](/developer-docs/api/get-related-fields-data-report-related-column-name) | `/api/v1/report/related/{column_name}` |
| `GET` | [Get slack channels](/developer-docs/api/get-slack-channels) | `/api/v1/report/slack_channels/` |
</details>
@@ -410,16 +410,16 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `GET` | [Get security roles](./api/get-security-roles) | `/api/v1/security/roles/` |
| `POST` | [Create security roles](./api/create-security-roles) | `/api/v1/security/roles/` |
| `GET` | [Get security roles info](./api/get-security-roles-info) | `/api/v1/security/roles/_info` |
| `DELETE` | [Delete security roles by pk](./api/delete-security-roles-by-pk) | `/api/v1/security/roles/{pk}` |
| `GET` | [Get security roles by pk](./api/get-security-roles-by-pk) | `/api/v1/security/roles/{pk}` |
| `PUT` | [Update security roles by pk](./api/update-security-roles-by-pk) | `/api/v1/security/roles/{pk}` |
| `POST` | [Create security roles by role_id permissions](./api/create-security-roles-by-role-id-permissions) | `/api/v1/security/roles/{role_id}/permissions` |
| `GET` | [Get security roles by role_id permissions](./api/get-security-roles-by-role-id-permissions) | `/api/v1/security/roles/{role_id}/permissions/` |
| `PUT` | [Update security roles by role_id users](./api/update-security-roles-by-role-id-users) | `/api/v1/security/roles/{role_id}/users` |
| `GET` | [List roles](./api/list-roles) | `/api/v1/security/roles/search/` |
| `GET` | [Get security roles](/developer-docs/api/get-security-roles) | `/api/v1/security/roles/` |
| `POST` | [Create security roles](/developer-docs/api/create-security-roles) | `/api/v1/security/roles/` |
| `GET` | [Get security roles info](/developer-docs/api/get-security-roles-info) | `/api/v1/security/roles/_info` |
| `DELETE` | [Delete security roles by pk](/developer-docs/api/delete-security-roles-by-pk) | `/api/v1/security/roles/{pk}` |
| `GET` | [Get security roles by pk](/developer-docs/api/get-security-roles-by-pk) | `/api/v1/security/roles/{pk}` |
| `PUT` | [Update security roles by pk](/developer-docs/api/update-security-roles-by-pk) | `/api/v1/security/roles/{pk}` |
| `POST` | [Create security roles by role_id permissions](/developer-docs/api/create-security-roles-by-role-id-permissions) | `/api/v1/security/roles/{role_id}/permissions` |
| `GET` | [Get security roles by role_id permissions](/developer-docs/api/get-security-roles-by-role-id-permissions) | `/api/v1/security/roles/{role_id}/permissions/` |
| `PUT` | [Update security roles by role_id users](/developer-docs/api/update-security-roles-by-role-id-users) | `/api/v1/security/roles/{role_id}/users` |
| `GET` | [List roles](/developer-docs/api/list-roles) | `/api/v1/security/roles/search/` |
</details>
@@ -428,12 +428,12 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `GET` | [Get security users](./api/get-security-users) | `/api/v1/security/users/` |
| `POST` | [Create security users](./api/create-security-users) | `/api/v1/security/users/` |
| `GET` | [Get security users info](./api/get-security-users-info) | `/api/v1/security/users/_info` |
| `DELETE` | [Delete security users by pk](./api/delete-security-users-by-pk) | `/api/v1/security/users/{pk}` |
| `GET` | [Get security users by pk](./api/get-security-users-by-pk) | `/api/v1/security/users/{pk}` |
| `PUT` | [Update security users by pk](./api/update-security-users-by-pk) | `/api/v1/security/users/{pk}` |
| `GET` | [Get security users](/developer-docs/api/get-security-users) | `/api/v1/security/users/` |
| `POST` | [Create security users](/developer-docs/api/create-security-users) | `/api/v1/security/users/` |
| `GET` | [Get security users info](/developer-docs/api/get-security-users-info) | `/api/v1/security/users/_info` |
| `DELETE` | [Delete security users by pk](/developer-docs/api/delete-security-users-by-pk) | `/api/v1/security/users/{pk}` |
| `GET` | [Get security users by pk](/developer-docs/api/get-security-users-by-pk) | `/api/v1/security/users/{pk}` |
| `PUT` | [Update security users by pk](/developer-docs/api/update-security-users-by-pk) | `/api/v1/security/users/{pk}` |
</details>
@@ -442,9 +442,9 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `GET` | [Get security permissions](./api/get-security-permissions) | `/api/v1/security/permissions/` |
| `GET` | [Get security permissions info](./api/get-security-permissions-info) | `/api/v1/security/permissions/_info` |
| `GET` | [Get security permissions by pk](./api/get-security-permissions-by-pk) | `/api/v1/security/permissions/{pk}` |
| `GET` | [Get security permissions](/developer-docs/api/get-security-permissions) | `/api/v1/security/permissions/` |
| `GET` | [Get security permissions info](/developer-docs/api/get-security-permissions-info) | `/api/v1/security/permissions/_info` |
| `GET` | [Get security permissions by pk](/developer-docs/api/get-security-permissions-by-pk) | `/api/v1/security/permissions/{pk}` |
</details>
@@ -453,12 +453,12 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `GET` | [Get security resources](./api/get-security-resources) | `/api/v1/security/resources/` |
| `POST` | [Create security resources](./api/create-security-resources) | `/api/v1/security/resources/` |
| `GET` | [Get security resources info](./api/get-security-resources-info) | `/api/v1/security/resources/_info` |
| `DELETE` | [Delete security resources by pk](./api/delete-security-resources-by-pk) | `/api/v1/security/resources/{pk}` |
| `GET` | [Get security resources by pk](./api/get-security-resources-by-pk) | `/api/v1/security/resources/{pk}` |
| `PUT` | [Update security resources by pk](./api/update-security-resources-by-pk) | `/api/v1/security/resources/{pk}` |
| `GET` | [Get security resources](/developer-docs/api/get-security-resources) | `/api/v1/security/resources/` |
| `POST` | [Create security resources](/developer-docs/api/create-security-resources) | `/api/v1/security/resources/` |
| `GET` | [Get security resources info](/developer-docs/api/get-security-resources-info) | `/api/v1/security/resources/_info` |
| `DELETE` | [Delete security resources by pk](/developer-docs/api/delete-security-resources-by-pk) | `/api/v1/security/resources/{pk}` |
| `GET` | [Get security resources by pk](/developer-docs/api/get-security-resources-by-pk) | `/api/v1/security/resources/{pk}` |
| `PUT` | [Update security resources by pk](/developer-docs/api/update-security-resources-by-pk) | `/api/v1/security/resources/{pk}` |
</details>
@@ -467,12 +467,12 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `GET` | [Get security permissions resources](./api/get-security-permissions-resources) | `/api/v1/security/permissions-resources/` |
| `POST` | [Create security permissions resources](./api/create-security-permissions-resources) | `/api/v1/security/permissions-resources/` |
| `GET` | [Get security permissions resources info](./api/get-security-permissions-resources-info) | `/api/v1/security/permissions-resources/_info` |
| `DELETE` | [Delete security permissions resources by pk](./api/delete-security-permissions-resources-by-pk) | `/api/v1/security/permissions-resources/{pk}` |
| `GET` | [Get security permissions resources by pk](./api/get-security-permissions-resources-by-pk) | `/api/v1/security/permissions-resources/{pk}` |
| `PUT` | [Update security permissions resources by pk](./api/update-security-permissions-resources-by-pk) | `/api/v1/security/permissions-resources/{pk}` |
| `GET` | [Get security permissions resources](/developer-docs/api/get-security-permissions-resources) | `/api/v1/security/permissions-resources/` |
| `POST` | [Create security permissions resources](/developer-docs/api/create-security-permissions-resources) | `/api/v1/security/permissions-resources/` |
| `GET` | [Get security permissions resources info](/developer-docs/api/get-security-permissions-resources-info) | `/api/v1/security/permissions-resources/_info` |
| `DELETE` | [Delete security permissions resources by pk](/developer-docs/api/delete-security-permissions-resources-by-pk) | `/api/v1/security/permissions-resources/{pk}` |
| `GET` | [Get security permissions resources by pk](/developer-docs/api/get-security-permissions-resources-by-pk) | `/api/v1/security/permissions-resources/{pk}` |
| `PUT` | [Update security permissions resources by pk](/developer-docs/api/update-security-permissions-resources-by-pk) | `/api/v1/security/permissions-resources/{pk}` |
</details>
@@ -481,14 +481,14 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `DELETE` | [Bulk delete RLS rules](./api/bulk-delete-rls-rules) | `/api/v1/rowlevelsecurity/` |
| `GET` | [Get a list of RLS](./api/get-a-list-of-rls) | `/api/v1/rowlevelsecurity/` |
| `POST` | [Create a new RLS rule](./api/create-a-new-rls-rule) | `/api/v1/rowlevelsecurity/` |
| `GET` | [Get metadata information about this API resource (rowlevelsecurity--info)](./api/get-metadata-information-about-this-api-resource-rowlevelsecurity-info) | `/api/v1/rowlevelsecurity/_info` |
| `DELETE` | [Delete an RLS](./api/delete-an-rls) | `/api/v1/rowlevelsecurity/{pk}` |
| `GET` | [Get an RLS](./api/get-an-rls) | `/api/v1/rowlevelsecurity/{pk}` |
| `PUT` | [Update an RLS rule](./api/update-an-rls-rule) | `/api/v1/rowlevelsecurity/{pk}` |
| `GET` | [Get related fields data (rowlevelsecurity-related-column-name)](./api/get-related-fields-data-rowlevelsecurity-related-column-name) | `/api/v1/rowlevelsecurity/related/{column_name}` |
| `DELETE` | [Bulk delete RLS rules](/developer-docs/api/bulk-delete-rls-rules) | `/api/v1/rowlevelsecurity/` |
| `GET` | [Get a list of RLS](/developer-docs/api/get-a-list-of-rls) | `/api/v1/rowlevelsecurity/` |
| `POST` | [Create a new RLS rule](/developer-docs/api/create-a-new-rls-rule) | `/api/v1/rowlevelsecurity/` |
| `GET` | [Get metadata information about this API resource (rowlevelsecurity--info)](/developer-docs/api/get-metadata-information-about-this-api-resource-rowlevelsecurity-info) | `/api/v1/rowlevelsecurity/_info` |
| `DELETE` | [Delete an RLS](/developer-docs/api/delete-an-rls) | `/api/v1/rowlevelsecurity/{pk}` |
| `GET` | [Get an RLS](/developer-docs/api/get-an-rls) | `/api/v1/rowlevelsecurity/{pk}` |
| `PUT` | [Update an RLS rule](/developer-docs/api/update-an-rls-rule) | `/api/v1/rowlevelsecurity/{pk}` |
| `GET` | [Get related fields data (rowlevelsecurity-related-column-name)](/developer-docs/api/get-related-fields-data-rowlevelsecurity-related-column-name) | `/api/v1/rowlevelsecurity/related/{column_name}` |
</details>
@@ -499,8 +499,8 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `GET` | [Export all assets](./api/export-all-assets) | `/api/v1/assets/export/` |
| `POST` | [Import multiple assets](./api/import-multiple-assets) | `/api/v1/assets/import/` |
| `GET` | [Export all assets](/developer-docs/api/export-all-assets) | `/api/v1/assets/export/` |
| `POST` | [Import multiple assets](/developer-docs/api/import-multiple-assets) | `/api/v1/assets/import/` |
</details>
@@ -509,7 +509,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `POST` | [Invalidate cache records and remove the database records](./api/invalidate-cache-records-and-remove-the-database-records) | `/api/v1/cachekey/invalidate` |
| `POST` | [Invalidate cache records and remove the database records](/developer-docs/api/invalidate-cache-records-and-remove-the-database-records) | `/api/v1/cachekey/invalidate` |
</details>
@@ -518,10 +518,10 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `GET` | [Get a list of logs](./api/get-a-list-of-logs) | `/api/v1/log/` |
| `POST` | [Create log](./api/create-log) | `/api/v1/log/` |
| `GET` | [Get a log detail information](./api/get-a-log-detail-information) | `/api/v1/log/{pk}` |
| `GET` | [Get recent activity data for a user](./api/get-recent-activity-data-for-a-user) | `/api/v1/log/recent_activity/` |
| `GET` | [Get a list of logs](/developer-docs/api/get-a-list-of-logs) | `/api/v1/log/` |
| `POST` | [Create log](/developer-docs/api/create-log) | `/api/v1/log/` |
| `GET` | [Get a log detail information](/developer-docs/api/get-a-log-detail-information) | `/api/v1/log/{pk}` |
| `GET` | [Get recent activity data for a user](/developer-docs/api/get-recent-activity-data-for-a-user) | `/api/v1/log/recent_activity/` |
</details>
@@ -532,8 +532,8 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `GET` | [Get the user object](./api/get-the-user-object) | `/api/v1/me/` |
| `GET` | [Get the user roles](./api/get-the-user-roles) | `/api/v1/me/roles/` |
| `GET` | [Get the user object](/developer-docs/api/get-the-user-object) | `/api/v1/me/` |
| `GET` | [Get the user roles](/developer-docs/api/get-the-user-roles) | `/api/v1/me/roles/` |
</details>
@@ -542,7 +542,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `GET` | [Get the user avatar](./api/get-the-user-avatar) | `/api/v1/user/{user_id}/avatar.png` |
| `GET` | [Get the user avatar](/developer-docs/api/get-the-user-avatar) | `/api/v1/user/{user_id}/avatar.png` |
</details>
@@ -551,7 +551,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `GET` | [Get menu](./api/get-menu) | `/api/v1/menu/` |
| `GET` | [Get menu](/developer-docs/api/get-menu) | `/api/v1/menu/` |
</details>
@@ -560,7 +560,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `GET` | [Get all available domains](./api/get-all-available-domains) | `/api/v1/available_domains/` |
| `GET` | [Get all available domains](/developer-docs/api/get-all-available-domains) | `/api/v1/available_domains/` |
</details>
@@ -569,7 +569,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `GET` | [Read off of the Redis events stream](./api/read-off-of-the-redis-events-stream) | `/api/v1/async_event/` |
| `GET` | [Read off of the Redis events stream](/developer-docs/api/read-off-of-the-redis-events-stream) | `/api/v1/async_event/` |
</details>
@@ -578,7 +578,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \
| Method | Endpoint | Description |
|--------|----------|-------------|
| `GET` | [Get api by version openapi](./api/get-api-by-version-openapi) | `/api/{version}/_openapi` |
| `GET` | [Get api by version openapi](/developer-docs/api/get-api-by-version-openapi) | `/api/{version}/_openapi` |
</details>

View File

@@ -485,7 +485,7 @@ Frontend assets (TypeScript, JavaScript, CSS, and images) must be compiled in or
First, be sure you are using the following versions of Node.js and npm:
- `Node.js`: Version 20
- `Node.js`: Version 22 (LTS)
- `npm`: Version 10
We recommend using [nvm](https://github.com/nvm-sh/nvm) to manage your node environment:

View File

@@ -45,7 +45,7 @@ superset-extensions validate: Validates the extension structure and metadata.
When creating a new extension with `superset-extensions init`, the CLI generates a standardized folder structure:
```
my-org.dataset-references/
dataset-references/
├── extension.json
├── frontend/
│ ├── src/
@@ -76,7 +76,7 @@ my-org.dataset-references/
```
**Note**: With publisher `my-org` and name `dataset-references`, the technical names are:
- Directory name: `my-org.dataset-references` (kebab-case)
- Directory name: `dataset-references` (kebab-case)
- Backend Python namespace: `my_org.dataset_references`
- Backend distribution package: `my_org-dataset_references`
- Frontend package name: `@my-org/dataset-references` (scoped)

View File

@@ -75,7 +75,7 @@ This approach ensures that extensions from different organizations cannot confli
This creates a complete project structure:
```
my-org.hello-world/
hello-world/
├── extension.json # Extension metadata and configuration
├── backend/ # Backend Python code
│ ├── src/

View File

@@ -52,7 +52,6 @@ module.exports = {
'extensions/development',
'extensions/deployment',
'extensions/mcp',
'extensions/mcp-server',
'extensions/security',
'extensions/tasks',
'extensions/registry',

View File

@@ -63,6 +63,12 @@ by clicking the **Connect** button in the bottom right corner of the modal windo
Congratulations, you've just added a new data source in Superset!
### Sharing a Database Connection
When adding a new database, you can share the connection with other Superset users. Shared connections appear in other users' database lists, making it easier to collaborate on the same data without requiring each user to configure the same connection separately.
To share a connection, enable the **Share connection with other users** option in the **Advanced** tab of the database connection modal before saving. You can change sharing settings later by editing the database connection.
### Registering a new table
Now that youve configured a data source, you can select specific tables (called **Datasets** in Superset)
@@ -80,6 +86,22 @@ we register the **cleaned_sales_data** table from the **examples** database.
To finish, click the **Add** button in the bottom right corner. You should now see your dataset in the list of datasets.
### Organizing Datasets into Folders
The Datasets list view supports **folders** for organizing datasets into groups. To create and manage folders:
1. In the **Datasets** list, click the **Folders** panel on the left sidebar.
2. Click **+ New Folder** to create a top-level folder, or drag an existing folder to nest it.
3. Drag dataset rows onto a folder to move them in, or right-click a dataset and select **Move to folder**.
Folders are per-user organizational aids — they do not affect dataset access permissions or how other users see the datasets.
### Uploading Files via the OS File Manager (PWA)
When Superset is installed as a **Progressive Web App (PWA)** from your browser, your operating system will offer Superset as an option when opening CSV, Excel (`.xls`/`.xlsx`), and Parquet files. Double-clicking or right-clicking a supported file and selecting "Open with Superset" navigates directly to the upload workflow for that file.
To install Superset as a PWA, look for the install icon in your browser's address bar (Chrome, Edge) when visiting your Superset instance over HTTPS. PWA installation requires HTTPS and a valid manifest — your admin needs to confirm the app manifest is served correctly.
### Customizing column properties
Now that you've registered your dataset, you can configure column properties
@@ -234,6 +256,112 @@ For example, when running the local development build, the following will disabl
Top Nav and remove the Filter Bar:
`http://localhost:8088/superset/dashboard/my-dashboard/?standalone=1&show_filters=0`
### Table Chart Features
The **Table** chart type has several advanced capabilities worth knowing:
#### Conditional Formatting
Conditional formatting rules highlight cells based on their values. Rules can be applied to:
- **Numeric columns** — color cells above/below a threshold, or use a gradient across a range
- **String columns** — highlight cells matching specific text values or patterns
- **Boolean columns** — color cells that are `true` or `false`, or `null`/`not null`
Each rule has a **"Use gradient"** toggle: enabled applies a varying opacity (lighter = further from threshold), disabled applies a solid fill at full opacity regardless of value.
#### HTML Rendering in Table Cells
Table chart cells can render raw HTML, enabling rich formatting such as hyperlinks, colored badges, and icons directly in the data. Enable this per-column in the chart's **Column Configuration** panel by toggling **Render HTML**.
:::caution
Only enable HTML rendering for columns sourced from data you control. Rendering untrusted HTML can expose users to cross-site scripting (XSS) risks.
:::
#### Column Header Tooltips
Column headers display a tooltip with the column's **Description** from the dataset editor when the user hovers over them. Keep dataset column descriptions up to date to improve chart discoverability.
#### Display Controls
In dashboard view mode (without entering Edit mode), charts with configurable display options expose a **Display Controls** panel accessible from the chart's context menu. This surfaces controls such as Time Grain, Time Column, and layer visibility for applicable chart types — making it easy to adjust a chart's view without going to Explore.
### AG Grid Interactive Table
The **AG Grid Interactive Table** chart type is Superset's fully-featured data grid, suitable for large paginated datasets where the standard Table chart is not enough.
#### Server-Side Column Filters
AG Grid supports server-side column filters that query the full dataset — not just the loaded page. Filters are applied before data is sent to the browser, so results are correct even across millions of rows.
**Available filter types:**
| Column type | Filter options |
|---|---|
| Text | Contains, equals, starts with, ends with |
| Number | Equals, not equal, less than, greater than, between |
| Date | Before, after, between, blank |
| Set | Select from a list of distinct values |
**AND / OR logic:** Each column supports combining multiple conditions with AND or OR. Filters from different columns are always combined with AND.
**Interaction with pagination:** Server-side filters run as WHERE clauses in the underlying SQL query, so pagination always operates over the already-filtered result set.
#### Time Shift (Time Comparison)
AG Grid Interactive Table supports **Time Shift** (time comparison), matching the behavior of the standard Table chart. In the **Advanced Analytics** → **Time Comparison** section of the chart configuration, enter a shift expression (e.g., `1 year ago`, `minus 7 days`) to add comparison columns showing values from the offset period. Dashboard-level time range overrides apply to both the base and comparison periods.
### Dynamic Currency Formatting
Chart metric values can display currencies dynamically rather than using a fixed currency code. To enable:
1. Open the dataset editor for your dataset (**Datasets → Edit**).
2. In the **Advanced** tab, set **Currency Code Column** to the name of a column in your dataset that contains ISO 4217 currency codes (e.g., `USD`, `EUR`, `GBP`).
3. In the Explore chart configuration, open the metric's **Number format** section and select **Auto-detect** for currency.
When Auto-detect is active, each row uses the currency code from the designated column, so a single chart can display values in multiple currencies — each formatted correctly for its currency.
### ECharts Option Editor
For ECharts-based chart types (line, bar, area, scatter, pie, and others), Explore includes an advanced **ECharts Option Editor** that accepts raw JSON overrides for the underlying ECharts configuration.
Access it via the **Customize** tab → **ECharts Options** section at the bottom of the panel. The JSON you enter is deep-merged on top of Superset's generated ECharts config, so you can override specific options without rewriting the entire config.
**Example:** override the legend position and add a custom title:
```json
{
"legend": { "orient": "vertical", "right": "5%", "top": "middle" },
"title": { "text": "My Custom Title", "left": "center" }
}
```
:::caution
ECharts option overrides bypass Superset's validation layer. Invalid option keys are silently ignored by ECharts. Overrides that conflict with Superset-generated options (e.g., `series`) may produce unexpected results.
:::
### Table Chart: Exporting Filtered Data
When the **Search Box** is visible in a Table chart, the **Download** action exports only the rows currently visible after the search filter is applied — not the full underlying dataset. This matches the visual output and is intentional. To export the full dataset regardless of search state, use the **Download as CSV** option from the chart's three-dot menu in the dashboard or from the Explore chart toolbar before applying a search filter.
### Sharing a Specific Tab
When a dashboard has tabs, each tab gets its own shareable URL. Navigate to the tab you want to share and copy the URL from your browser's address bar — the tab anchor is encoded in the URL so that anyone opening the link lands directly on that tab.
### Auto-Refresh
Dashboards can be configured to refresh automatically at a fixed interval without user interaction. Open a dashboard, click the **⋮** (more options) menu in the top-right, and select **Set auto-refresh interval**. Choose an interval (e.g., every 10 seconds, 1 minute, or 10 minutes). The setting is per-session and resets when you close the tab.
:::note
Auto-refresh triggers a full data reload for all charts on the dashboard. For dashboards with expensive queries, choose longer intervals to avoid overloading your database.
:::
### Last Queried Timestamp
Charts can display a "Last queried at" timestamp showing when the chart data was last fetched. This is useful on auto-refreshing dashboards to confirm data freshness. Enable it in **Dashboard Properties → Styling → Show last queried time**.
### Saving a Chart to a Specific Tab
When saving or adding a chart to a dashboard from Explore, you can select which tab it should land on using the tab tree-select dropdown in the "Add to dashboard" modal.
:::resources
- [Dashboard Customization](https://docs.preset.io/docs/dashboard-customization) - Advanced dashboard styling and layout options
- [Blog: BI Dashboard Best Practices](https://preset.io/blog/bi-dashboard-best-practices/)

View File

@@ -0,0 +1,130 @@
{/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/}
---
title: Embedding Superset
sidebar_position: 6
---
# Embedding Superset
Superset dashboards can be embedded directly in host applications using the `@superset-ui/embedded-sdk` package.
:::info Prerequisites
- The `EMBEDDED_SUPERSET` feature flag must be enabled.
- The embedding domain and allowed origins must be configured by an admin.
:::
## Quick Start
Install the SDK:
```bash
npm install @superset-ui/embedded-sdk
```
Embed a dashboard:
```javascript
import { embedDashboard } from '@superset-ui/embedded-sdk';
embedDashboard({
id: 'dashboard-uuid-here', // from Dashboard → Embed
supersetDomain: 'https://superset.example.com',
mountPoint: document.getElementById('superset-container'),
fetchGuestToken: () => fetchTokenFromYourBackend(),
dashboardUiConfig: {
hideTitle: true,
filters: { expanded: false },
},
});
```
`fetchGuestToken` must return a **guest token** obtained from your server by calling Superset's `/api/v1/security/guest_token/` endpoint with a service account. Do not call this endpoint from client-side code.
---
## Callbacks
### `resolvePermalinkUrl`
When a user copies a permalink from an embedded dashboard, Superset generates a URL on its own domain. In an embedded context this URL is usually not meaningful to the host application's users — the dashboard is rendered inside the host app, not at the Superset URL.
The `resolvePermalinkUrl` callback lets the host app intercept permalink generation and return a URL on the host domain instead:
```javascript
embedDashboard({
id: 'my-dashboard-uuid',
supersetDomain: 'https://superset.example.com',
mountPoint: document.getElementById('superset-container'),
fetchGuestToken: () => fetchGuestToken(),
/**
* Called when Superset generates a permalink.
* @param {Object} args - { key: string } — the permalink key
* @returns {string | null} - your host URL, or null to use Superset's default
*/
resolvePermalinkUrl: ({ key }) => {
return `https://myapp.example.com/dashboard?permalink=${key}`;
},
});
```
If the callback returns `null` or is not provided, Superset uses its own permalink URL as a fallback.
---
## Feature Flags for Embedded Mode
### `DISABLE_EMBEDDED_SUPERSET_LOGOUT`
Hides the logout button when Superset is embedded in a host application. This is useful when the host application manages the session lifecycle and you do not want users to accidentally log out of the embedded Superset session:
```python
# superset_config.py
FEATURE_FLAGS = {
"EMBEDDED_SUPERSET": True,
"DISABLE_EMBEDDED_SUPERSET_LOGOUT": True,
}
```
When enabled, the **Logout** menu item is removed from the user avatar dropdown in the embedded view. The session can still be invalidated server-side by revoking the guest token.
### `EMBEDDED_SUPERSET`
Must be `True` to enable the embedded SDK and the guest token endpoint. Without this flag, `embedDashboard` will fail to load.
---
## URL Parameters
The following URL parameters can be passed through the `urlParams` option in `dashboardUiConfig` or appended to the embedded iframe URL:
| Parameter | Values | Effect |
|-----------|--------|--------|
| `standalone` | `0`, `1`, `2`, `3` | `0`: normal; `1`: hide nav; `2`: hide nav + title; `3`: hide nav + title + tabs |
| `show_filters` | `0`, `1` | Show or hide the native filter bar |
| `expand_filters` | `0`, `1` | Start with filter bar expanded or collapsed |
---
## Security Notes
- **Guest tokens expire** — their lifetime is controlled by the `GUEST_TOKEN_JWT_EXP_SECONDS` config (default: 5 minutes). Refresh tokens before they expire using a token refresh mechanism in your host app.
- **Row-level security** — pass `rls` rules in the guest token request to restrict which rows are visible to the embedded user.
- **Allowed domains** — restrict which host origins can embed a dashboard by setting **Allowed Domains** per-dashboard in the *Embed* settings modal. Superset checks the request's `Referer` header against this list before serving the embedded view; an empty list allows any origin, so configure this explicitly for production.

View File

@@ -329,6 +329,27 @@ various options in this section, refer to the
Lastly, save your chart as Tutorial Resample and add it to the Tutorial Dashboard. Go to the
tutorial dashboard to see the four charts side by side and compare the different outputs.
### SQL Lab Tips
**Schema and table browser**: The left-side table browser uses a collapsible treeview — click a schema to expand its tables, and click a table to see its columns and sample data inline. This makes navigating large schemas much faster than the previous flat list.
**Find in editor**: Press **Ctrl+F** (or **Cmd+F** on Mac) to open the Monaco find/replace widget inside the SQL editor without leaving the editor.
**Resizable panels**: The dividers between the SQL editor, schema browser, and results pane are draggable. Adjust them to match your workflow and screen size.
**Dialect-aware Format SQL**: The **Format SQL** button applies the SQL dialect of the currently selected database — Trino, Presto, MySQL, PostgreSQL, etc. — rather than a generic formatter. Switch to a different database in the toolbar and re-format to get dialect-specific output. Jinja template syntax (`{{ }}`, `{% %}`) is preserved during formatting and will not cause format errors.
### Time Range Natural Language Expressions
The **Custom** time range picker accepts natural language expressions alongside specific dates:
- **Relative**: `Last 7 days`, `Last month`, `Last quarter`, `Last year`
- **Anchored**: `previous calendar week`, `previous calendar month`
- **"First of" expressions**: `first day of this week`, `first day of this month`, `first day of this quarter`, `first day of this year`, `first week of this year`
- **Offsets**: `30 days ago`, `1 year ago`, `next week`
These expressions are evaluated at query time, so saved charts always display data relative to the current date.
:::resources
- [Chart Walkthroughs](https://docs.preset.io/docs/chart-walkthroughs) - Detailed guides for most chart types
- [Blog: Why Apache ECharts is the Future of Apache Superset](https://preset.io/blog/2021-4-1-why-echarts/)

View File

@@ -33,6 +33,29 @@ SQL templating must be enabled by your administrator via the `ENABLE_TEMPLATE_PR
For advanced configuration options, see the [SQL Templating Configuration Guide](/admin-docs/configuration/sql-templating).
:::
## Using Jinja in Calculated Columns
Jinja template macros are available in calculated column expressions in the dataset editor — not just in SQL Lab queries and virtual datasets. This allows column expressions to reference the current user or dynamic context.
**Example: User-scoped calculated column**
```sql
CASE WHEN sales_rep = '{{ current_username() }}' THEN amount ELSE 0 END
```
**Example: Conditional display based on role**
Because `current_user_roles()` returns a Python list, test role membership with a Jinja
conditional at template time rather than matching against the list's string representation:
```sql
{% if 'Finance' in current_user_roles() %}revenue{% else %}NULL{% endif %} AS finance_revenue
```
:::note
The `ENABLE_TEMPLATE_PROCESSING` feature flag must be enabled by your administrator for Jinja in calculated columns to work.
:::
## Basic Usage
Jinja templates use double curly braces `{{ }}` for expressions and `{% %}` for logic blocks.
@@ -243,6 +266,7 @@ Using `remove_filter=True` applies the filter in the inner query for better perf
- Use `|tojson` to serialize arrays as JSON strings
- Test queries with explicit parameter values before relying on filter context
- For complex templating needs, ask your administrator about custom Jinja macros
- **Format SQL is Jinja-aware**: The "Format SQL" button in SQL Lab correctly preserves `{{ }}` and `{% %}` template syntax and applies your selected database's SQL dialect when formatting.
:::resources
- [Admin Guide: SQL Templating Configuration](/admin-docs/configuration/sql-templating)

View File

@@ -0,0 +1,312 @@
---
title: Using AI with Superset
hide_title: true
sidebar_position: 5
version: 1
---
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
# Using AI with Superset
Superset supports AI assistants through the [Model Context Protocol (MCP)](https://modelcontextprotocol.io/). Connect Claude, ChatGPT, or other MCP-compatible clients to explore your data, build charts, create dashboards, and run SQL -- all through natural language.
:::info
Requires Superset 5.0+. Your admin must enable and deploy the MCP server before you can connect.
See the **[MCP Server admin guide](/admin-docs/configuration/mcp-server)** for setup instructions.
:::
---
## What Can AI Do with Superset?
### Explore Your Data
Ask your AI assistant to browse what's available in your Superset instance:
- **List datasets** -- see all datasets you have access to, with filtering and search
- **Get dataset details** -- column names, types, available metrics, and filters
- **List charts and dashboards** -- find existing visualizations by name or keyword
- **Get chart and dashboard details** -- understand what a chart shows, its query, and configuration
**Example prompts:**
> "What datasets are available?"
> "Show me the columns in the sales_orders dataset"
> "Find dashboards related to revenue"
### Build Charts
Describe the visualization you want and AI creates it for you:
- **Preview-first workflow** -- by default AI generates an Explore link so you can review the chart before it is saved. Say "save it" to commit permanently
- **Create charts from natural language** -- describe what you want to see and AI picks the right chart type, metrics, and dimensions
- **Preview before saving** -- `generate_chart` defaults to `save_chart=False`, showing the chart in Explore before it's committed. Ask AI to save once you're satisfied.
- **Modify existing charts** -- `update_chart` also supports preview mode so you can review changes before saving (update filters, change chart types, add metrics)
- **Get Explore links** -- open any chart in Superset's Explore view for further refinement
**Example prompts:**
> "Create a bar chart showing monthly revenue by region from the sales dataset"
> "Update chart 42 to use a line chart instead"
> "Give me a link to explore this chart further"
:::tip Preview-first workflow
Charts are **not saved by default**. The workflow is intentionally iterative:
1. **Explore** — AI generates an Explore link so you can see the chart before it exists in Superset
2. **Iterate** — ask the AI to adjust the chart; changes are previewed without touching the database
3. **Save** — when you're happy, say "save it" and the chart is permanently stored
To skip the preview and save immediately, include "and save it" in your prompt.
:::
### Create Dashboards
Build dashboards from a collection of charts:
- **Generate dashboards** -- create a new dashboard with a set of charts, automatically laid out
- **Add charts to existing dashboards** -- place a chart on an existing dashboard with automatic positioning
**Example prompts:**
> "Create a dashboard called 'Q4 Sales Overview' with charts 10, 15, and 22"
> "Add the revenue trend chart to the executive dashboard"
### Browse Databases
Discover what database connections are configured in your Superset instance:
- **List databases** -- see all database connections you have access to
- **Get database details** -- name, backend type (PostgreSQL, Snowflake, etc.), and connection status
**Example prompts:**
> "What databases are connected to Superset?"
> "Show me details about the data warehouse connection"
### Create Virtual Datasets
Build ad-hoc SQL datasets that can be used as the basis for charts:
- **Create virtual datasets** -- write a SQL query and save it as a reusable dataset
- **Use immediately in charts** -- the returned dataset ID can be passed directly to chart creation
**Example prompts:**
> "Create a dataset from: SELECT region, SUM(revenue) as total_revenue FROM orders GROUP BY region"
> "Make a virtual dataset called 'monthly_signups' from the users table filtered to last 12 months"
### Run SQL Queries
Execute SQL directly through your AI assistant:
- **Run queries** -- execute SQL with full Superset RBAC enforcement (you can only query data your roles allow)
- **Open SQL Lab** -- get a link to SQL Lab pre-populated with a query, ready to run and explore
- **Save queries** -- save a SQL query to SQL Lab's Saved Queries for later reuse
**Example prompts:**
> "Run this query: SELECT region, SUM(revenue) FROM sales GROUP BY region"
> "Open SQL Lab with a query to show the top 10 customers by order count"
> "Save this query as 'Weekly Revenue Report'"
### Analyze Chart Data
Pull the raw data behind any chart:
- **Get chart data** -- retrieve the data a chart displays, with support for JSON, CSV, and Excel export formats
- **Inspect results** -- useful for verifying what a visualization shows or feeding data into other tools
**Example prompts:**
> "Get the data behind chart 42"
> "Export chart 15 data as CSV"
### Check Instance Status
- **Health check** -- verify your Superset instance is up and the MCP connection is working
- **Instance info** -- get high-level statistics about your Superset instance (number of datasets, charts, dashboards)
**Example prompts:**
> "Is Superset healthy?"
> "How many dashboards are in this instance?"
---
## Connecting Your AI Client
Once your admin has deployed the MCP server, connect your AI client using the instructions below.
### Claude Desktop
Edit your Claude Desktop config file:
- **macOS**: `~/Library/Application Support/Claude/claude_desktop_config.json`
- **Windows**: `%APPDATA%\Claude\claude_desktop_config.json`
- **Linux**: `~/.config/Claude/claude_desktop_config.json`
```json
{
"mcpServers": {
"superset": {
"url": "http://localhost:5008/mcp"
}
}
}
```
Restart Claude Desktop. The hammer icon in the chat bar confirms the connection.
If your admin has enabled JWT authentication, you may need to include a token:
```json
{
"mcpServers": {
"superset": {
"command": "npx",
"args": [
"-y",
"mcp-remote@latest",
"http://your-superset-host:5008/mcp",
"--header",
"Authorization: Bearer YOUR_TOKEN"
]
}
}
}
```
### Claude Code (CLI)
Add to your project's `.mcp.json`:
```json
{
"mcpServers": {
"superset": {
"type": "url",
"url": "http://localhost:5008/mcp"
}
}
}
```
### ChatGPT
1. Click your profile icon > **Settings** > **Apps and Connectors**
2. Enable **Developer Mode** in Advanced Settings
3. In the chat composer, press **+** > **Add sources** > **App** > **Connect more** > **Create app**
4. Enter a name and your MCP server URL
5. Click **I understand and continue**
:::info
ChatGPT MCP connectors require a Pro, Team, Enterprise, or Edu plan.
:::
Ask your admin for the MCP server URL and any authentication tokens you need.
---
## Tips for Best Results
- **Be specific** -- "Create a bar chart of monthly revenue by region from the sales dataset" works better than "Make me a chart"
- **Start with exploration** -- ask what datasets and charts exist before creating new ones
- **Review AI-generated content** -- always check chart configurations and SQL before saving or sharing
- **Use Explore for refinement** -- ask AI for an Explore link, then fine-tune interactively in the Superset UI
- **Check permissions if you get errors** -- AI respects Superset's RBAC, so you can only access data your roles allow
---
## Available Tools Reference
### Exploration & Discovery
| Tool | Description |
|------|-------------|
| `health_check` | Verify the MCP server is running and connected |
| `get_instance_info` | Get instance statistics (dataset, chart, dashboard counts) |
| `get_schema` | Discover available charts, datasets, and dashboards with schema info |
### Datasets
| Tool | Description |
|------|-------------|
| `list_datasets` | List datasets with filtering and search |
| `get_dataset_info` | Get dataset metadata (columns, metrics, filters) |
| `create_virtual_dataset` | Create a virtual dataset from a SQL query |
### Charts
| Tool | Description |
|------|-------------|
| `list_charts` | List charts with filtering and search |
| `get_chart_info` | Get chart metadata and configuration |
| `get_chart_data` | Retrieve chart data (JSON, CSV, or Excel) |
| `get_chart_preview` | Generate a chart preview (URL, ASCII, table, or Vega-Lite) |
| `get_chart_type_schema` | Get the configuration schema for a chart type |
| `generate_chart` | Create a new chart from a specification (defaults to preview mode — review before saving) |
| `update_chart` | Modify an existing chart's configuration (pass `generate_preview=False` to persist immediately instead of returning a preview URL) |
| `update_chart_preview` | Update a cached chart preview without saving |
| `generate_explore_link` | Generate an Explore URL for interactive visualization |
### Dashboards
| Tool | Description |
|------|-------------|
| `list_dashboards` | List dashboards with filtering and search |
| `get_dashboard_info` | Get dashboard metadata and layout |
| `generate_dashboard` | Create a new dashboard with specified charts |
| `add_chart_to_existing_dashboard` | Add a chart to an existing dashboard |
### SQL
| Tool | Description |
|------|-------------|
| `execute_sql` | Run a SQL query with RBAC enforcement |
| `save_sql_query` | Persist a SQL query to SQL Lab's saved queries |
| `open_sql_lab_with_context` | Open SQL Lab with a pre-populated query |
### Databases
| Tool | Description |
|------|-------------|
| `list_databases` | List configured database connections |
| `get_database_info` | Get details about a specific database connection |
---
## Troubleshooting
### "Connection refused" or "Cannot connect"
- Confirm the MCP server URL with your admin
- For Claude Desktop: fully quit the app (not just close the window) and restart after config changes
- Check that the URL path ends with `/mcp` (e.g., `http://localhost:5008/mcp`)
### "Permission denied" or missing data
- Superset's RBAC controls what you can access through AI, just like in the Superset UI
- Ask your admin to verify your roles and permissions
- Try accessing the same data through the Superset web UI to confirm your access
### "Response too large"
- Ask for smaller result sets: use filters, reduce `page_size`, or request specific columns
- Example: "Show me the top 10 rows from the sales dataset" instead of "Show me all sales data"
### AI doesn't see Superset tools
- Verify the connection in your AI client (e.g., the hammer icon in Claude Desktop)
- Ask the AI "What Superset tools are available?" to confirm the connection
- Restart your AI client if you recently changed the configuration

View File

@@ -202,7 +202,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \\
mdx += `| Method | Endpoint | Description |\n`;
mdx += `|--------|----------|-------------|\n`;
for (const ep of tagEndpoints['Security']) {
mdx += `| \`${ep.method}\` | [${ep.summary}](./api/${ep.slug}) | \`${ep.path}\` |\n`;
mdx += `| \`${ep.method}\` | [${ep.summary}](/developer-docs/api/${ep.slug}) | \`${ep.path}\` |\n`;
}
mdx += '\n';
renderedTags.add('Security');
@@ -229,7 +229,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \\
mdx += `|--------|----------|-------------|\n`;
for (const ep of endpoints) {
mdx += `| \`${ep.method}\` | [${ep.summary}](./api/${ep.slug}) | \`${ep.path}\` |\n`;
mdx += `| \`${ep.method}\` | [${ep.summary}](/developer-docs/api/${ep.slug}) | \`${ep.path}\` |\n`;
}
mdx += `\n</details>\n\n`;
@@ -252,7 +252,7 @@ curl -H "Authorization: Bearer YOUR_ACCESS_TOKEN" \\
mdx += `|--------|----------|-------------|\n`;
for (const ep of endpoints) {
mdx += `| \`${ep.method}\` | [${ep.summary}](./api/${ep.slug}) | \`${ep.path}\` |\n`;
mdx += `| \`${ep.method}\` | [${ep.summary}](/developer-docs/api/${ep.slug}) | \`${ep.path}\` |\n`;
}
mdx += `\n</details>\n\n`;

View File

@@ -0,0 +1,120 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
* Swizzled from docusaurus-theme-openapi-docs to fix SSG crash.
*
* The original component calls useTypedSelector (Redux) at the top level,
* which fails during static site generation because no Redux store is
* available. This version moves the hook into a browser-only child component
* so SSG can render the page without a store context.
*/
import React from "react";
import BrowserOnly from "@docusaurus/BrowserOnly";
import { useSelector } from "react-redux";
interface ServerVariable {
default?: string;
}
interface ServerValue {
url: string;
variables?: Record<string, ServerVariable>;
}
interface StoreState {
server: { value: ServerValue | null };
}
function colorForMethod(method: string) {
switch (method.toLowerCase()) {
case "get":
return "primary";
case "post":
return "success";
case "delete":
return "danger";
case "put":
return "info";
case "patch":
return "warning";
case "head":
return "secondary";
case "event":
return "secondary";
default:
return undefined;
}
}
export interface Props {
method: string;
path: string;
context?: "endpoint" | "callback";
}
// Inner component rendered only in the browser, where the Redux store exists.
function ServerUrl() {
const serverValue = useSelector((state: StoreState) => state.server.value);
if (serverValue && serverValue.variables) {
let serverUrlWithVariables = serverValue.url.replace(/\/$/, "");
Object.keys(serverValue.variables).forEach((variable) => {
serverUrlWithVariables = serverUrlWithVariables.replace(
`{${variable}}`,
serverValue.variables?.[variable].default ?? ""
);
});
return <>{serverUrlWithVariables}</>;
}
if (serverValue && serverValue.url) {
return <>{serverValue.url}</>;
}
return null;
}
function MethodEndpoint({ method, path, context }: Props) {
const renderServerUrl = () => {
if (context === "callback") {
return "";
}
return <BrowserOnly>{() => <ServerUrl />}</BrowserOnly>;
};
return (
<>
<pre className="openapi__method-endpoint">
<span className={"badge badge--" + colorForMethod(method)}>
{method === "event" ? "Webhook" : method.toUpperCase()}
</span>{" "}
{method !== "event" && (
<h2 className="openapi__method-endpoint-path">
{renderServerUrl()}
{`${path.replace(/{([a-z0-9-_]+)}/gi, ":$1")}`}
</h2>
)}
</pre>
<div className="openapi__divider" />
</>
);
}
export default MethodEndpoint;

View File

@@ -144,7 +144,7 @@ solr = ["sqlalchemy-solr >= 0.2.0"]
elasticsearch = ["elasticsearch-dbapi>=0.2.12, <0.3.0"]
exasol = ["sqlalchemy-exasol >= 2.4.0, <3.0"]
excel = ["xlrd>=1.2.0, <1.3"]
fastmcp = ["fastmcp==2.14.3"]
fastmcp = ["fastmcp>=3.1.0,<4.0"]
firebird = ["sqlalchemy-firebird>=0.7.0, <0.8"]
firebolt = ["firebolt-sqlalchemy>=1.0.0, <2"]
gevent = ["gevent>=23.9.1"]
@@ -372,6 +372,7 @@ unfixable = []
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
[tool.ruff.lint.per-file-ignores]
"superset/mcp_service/app.py" = ["S608", "E501"] # LLM instruction text: SQL examples (S608) and long lines in multiline string (E501)
"scripts/*" = ["TID251"]
"setup.py" = ["TID251"]
"superset/config.py" = ["TID251"]

View File

@@ -10,6 +10,8 @@
# via
# -r requirements/development.in
# apache-superset
aiofile==3.9.0
# via py-key-value-aio
alembic==1.15.2
# via
# -c requirements/base-constraint.txt
@@ -26,8 +28,10 @@ anyio==4.11.0
# via
# httpx
# mcp
# py-key-value-aio
# sse-starlette
# starlette
# watchfiles
apispec==6.6.1
# via
# -c requirements/base-constraint.txt
@@ -65,9 +69,7 @@ bcrypt==4.3.0
# -c requirements/base-constraint.txt
# paramiko
beartype==0.22.5
# via
# py-key-value-aio
# py-key-value-shared
# via py-key-value-aio
billiard==4.2.1
# via
# -c requirements/base-constraint.txt
@@ -100,6 +102,8 @@ cachetools==6.2.1
# -c requirements/base-constraint.txt
# google-auth
# py-key-value-aio
caio==0.9.25
# via aiofile
cattrs==25.1.1
# via
# -c requirements/base-constraint.txt
@@ -138,7 +142,6 @@ click==8.2.1
# click-repl
# flask
# flask-appbuilder
# typer
# uvicorn
click-didyoumean==0.3.1
# via
@@ -156,8 +159,6 @@ click-repl==0.3.0
# via
# -c requirements/base-constraint.txt
# celery
cloudpickle==3.1.2
# via pydocket
cmdstanpy==1.1.0
# via prophet
colorama==0.4.6
@@ -206,8 +207,6 @@ deprecation==2.1.0
# apache-superset
dill==0.4.0
# via pylint
diskcache==5.6.3
# via py-key-value-aio
distlib==0.3.8
# via virtualenv
dnspython==2.7.0
@@ -237,9 +236,7 @@ et-xmlfile==2.0.0
# openpyxl
exceptiongroup==1.3.0
# via fastmcp
fakeredis==2.32.1
# via pydocket
fastmcp==2.14.3
fastmcp==3.1.0
# via apache-superset
filelock==3.20.3
# via
@@ -474,6 +471,8 @@ jsonpath-ng==1.7.0
# via
# -c requirements/base-constraint.txt
# apache-superset
jsonref==1.1.0
# via fastmcp
jsonschema==4.23.0
# via
# -c requirements/base-constraint.txt
@@ -504,8 +503,6 @@ limits==5.1.0
# via
# -c requirements/base-constraint.txt
# flask-limiter
lupa==2.6
# via fakeredis
mako==1.3.10
# via
# -c requirements/base-constraint.txt
@@ -603,7 +600,7 @@ openpyxl==3.1.5
# -c requirements/base-constraint.txt
# pandas
opentelemetry-api==1.39.1
# via pydocket
# via fastmcp
ordered-set==4.1.0
# via
# -c requirements/base-constraint.txt
@@ -622,6 +619,7 @@ packaging==25.0
# deprecation
# docker
# duckdb-engine
# fastmcp
# google-cloud-bigquery
# gunicorn
# limits
@@ -653,8 +651,6 @@ parsedatetime==2.6
# apache-superset
pathable==0.4.3
# via jsonschema-path
pathvalidate==3.3.1
# via py-key-value-aio
pgsanity==0.2.9
# via
# -c requirements/base-constraint.txt
@@ -691,8 +687,6 @@ prison==0.2.1
# flask-appbuilder
progress==1.6
# via apache-superset
prometheus-client==0.23.1
# via pydocket
prompt-toolkit==3.0.51
# via
# -c requirements/base-constraint.txt
@@ -714,12 +708,8 @@ psutil==6.1.0
# via apache-superset
psycopg2-binary==2.9.9
# via apache-superset
py-key-value-aio==0.3.0
# via
# fastmcp
# pydocket
py-key-value-shared==0.3.0
# via py-key-value-aio
py-key-value-aio==0.4.4
# via fastmcp
pyarrow==16.1.0
# via
# -c requirements/base-constraint.txt
@@ -758,8 +748,6 @@ pydantic-settings==2.10.1
# via mcp
pydata-google-auth==1.9.0
# via pandas-gbq
pydocket==0.17.1
# via fastmcp
pydruid==0.6.9
# via apache-superset
pyfakefs==5.3.5
@@ -844,8 +832,6 @@ python-dotenv==1.1.0
# apache-superset
# fastmcp
# pydantic-settings
python-json-logger==4.0.0
# via pydocket
python-ldap==3.4.4
# via apache-superset
python-multipart==0.0.20
@@ -866,15 +852,13 @@ pyyaml==6.0.2
# -c requirements/base-constraint.txt
# apache-superset
# apispec
# fastmcp
# jsonschema-path
# pre-commit
redis==5.3.1
# via
# -c requirements/base-constraint.txt
# apache-superset
# fakeredis
# py-key-value-aio
# pydocket
referencing==0.36.2
# via
# -c requirements/base-constraint.txt
@@ -910,9 +894,7 @@ rich==13.9.4
# cyclopts
# fastmcp
# flask-limiter
# pydocket
# rich-rst
# typer
rich-rst==1.3.1
# via cyclopts
rpds-py==0.25.0
@@ -944,8 +926,6 @@ setuptools==80.9.0
# pydata-google-auth
# zope-event
# zope-interface
shellingham==1.5.4
# via typer
shillelagh==1.4.3
# via
# -c requirements/base-constraint.txt
@@ -973,7 +953,6 @@ sniffio==1.3.1
sortedcontainers==2.4.0
# via
# -c requirements/base-constraint.txt
# fakeredis
# trio
sqlalchemy==1.4.54
# via
@@ -1034,8 +1013,6 @@ trio-websocket==0.12.2
# via
# -c requirements/base-constraint.txt
# selenium
typer==0.20.0
# via pydocket
typing-extensions==4.15.0
# via
# -c requirements/base-constraint.txt
@@ -1048,16 +1025,14 @@ typing-extensions==4.15.0
# limits
# mcp
# opentelemetry-api
# py-key-value-shared
# py-key-value-aio
# pydantic
# pydantic-core
# pydocket
# pyopenssl
# referencing
# selenium
# shillelagh
# starlette
# typer
# typing-inspection
typing-inspection==0.4.1
# via
@@ -1072,6 +1047,8 @@ tzdata==2025.2
# pandas
tzlocal==5.2
# via trino
uncalled-for==0.2.0
# via fastmcp
url-normalize==2.2.1
# via
# -c requirements/base-constraint.txt
@@ -1101,6 +1078,8 @@ watchdog==6.0.0
# -c requirements/base-constraint.txt
# apache-superset
# apache-superset-extensions-cli
watchfiles==1.1.1
# via fastmcp
wcwidth==0.2.13
# via
# -c requirements/base-constraint.txt

View File

@@ -45,7 +45,17 @@ if [ ${#js_ts_files[@]} -gt 0 ]; then
# Skip custom OXC build in pre-commit for speed
export SKIP_CUSTOM_OXC=true
# Use quiet mode in pre-commit to reduce noise (only show errors)
npx oxlint --config oxlint.json --fix --quiet "${js_ts_files[@]}"
# Capture output so we can treat "No files found" (all files ignored by
# ignorePatterns) as success rather than a false-positive failure.
output=$(npx oxlint --config oxlint.json --fix --quiet "${js_ts_files[@]}" 2>&1) || {
if echo "$output" | grep -q "No files found"; then
echo "No files to lint after applying ignore patterns"
exit 0
fi
echo "$output" >&2
exit 1
}
[ -n "$output" ] && echo "$output"
else
echo "No JavaScript/TypeScript files to lint"
fi

View File

@@ -18,20 +18,20 @@
[project]
name = "apache-superset-core"
version = "0.1.0rc1"
version = "0.1.0rc3"
description = "Core Python package for building Apache Superset backend extensions and integrations"
readme = "README.md"
authors = [
{ name = "Apache Software Foundation", email = "dev@superset.apache.org" },
]
license = { file="LICENSE.txt" }
license = "Apache-2.0"
license-files = ["LICENSE.txt"]
requires-python = ">=3.10"
keywords = ["superset", "apache", "analytics", "business-intelligence", "extensions", "visualization"]
classifiers = [
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.10",

View File

@@ -37,6 +37,13 @@ Usage:
from typing import Any, Callable, TypeVar
try:
from mcp.types import ToolAnnotations
except (
ImportError
): # MCP extras may not be installed in superset-core-only environments
ToolAnnotations = dict
# Type variable for decorated functions
F = TypeVar("F", bound=Callable[..., Any])
@@ -48,11 +55,15 @@ def tool(
description: str | None = None,
tags: list[str] | None = None,
protect: bool = True,
class_permission_name: str | None = None,
method_permission_name: str | None = None,
annotations: ToolAnnotations | None = None,
) -> Any: # Use Any to avoid mypy issues with dependency injection
"""
Decorator to register an MCP tool with optional authentication.
This decorator combines FastMCP tool registration with optional authentication.
This decorator combines FastMCP tool registration with optional authentication
and RBAC permission checking.
Can be used as:
@tool
@@ -69,6 +80,13 @@ def tool(
description: Tool description (defaults to function docstring)
tags: List of tags for categorizing the tool (defaults to empty list)
protect: Whether to require Superset authentication (defaults to True)
class_permission_name: FAB view/resource name for RBAC checking
(e.g., "Chart", "Dashboard", "SQLLab"). When set, enables
permission checking via security_manager.can_access().
method_permission_name: FAB action name (e.g., "read", "write").
Defaults to "write" if tags includes "mutate", else "read".
annotations: MCP tool annotations (title, readOnlyHint, destructiveHint, etc.)
These hints help MCP clients understand tool behavior and safety.
Returns:
Decorator function that registers and wraps the tool, or the wrapped function
@@ -90,6 +108,18 @@ def tool(
def public_tool() -> str:
'''Public tool accessible without auth'''
return "Hello world"
@tool(class_permission_name="Chart") # RBAC: requires can_read on Chart
def list_charts() -> list:
'''List charts the user can access'''
return []
@tool( # RBAC: can_write on Chart
tags=["mutate"], class_permission_name="Chart",
)
def create_chart(name: str) -> dict:
'''Create a new chart'''
return {"name": name}
"""
raise NotImplementedError(
"MCP tool decorator not initialized. "
@@ -158,4 +188,5 @@ def prompt(
__all__ = [
"tool",
"prompt",
"ToolAnnotations",
]

View File

@@ -17,20 +17,20 @@
[project]
name = "apache-superset-extensions-cli"
version = "0.1.0rc1"
version = "0.1.0rc3"
description = "Official command-line interface for building, bundling, and managing Apache Superset extensions"
readme = "README.md"
authors = [
{ name = "Apache Software Foundation", email = "dev@superset.apache.org" },
]
license = { file="LICENSE.txt" }
license = "Apache-2.0"
license-files = ["LICENSE.txt"]
requires-python = ">=3.10"
keywords = ["superset", "apache", "cli", "extensions", "analytics", "business-intelligence", "development-tools"]
classifiers = [
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.10",

View File

@@ -426,9 +426,9 @@ def bundle(ctx: click.Context, output: Path | None) -> None:
sys.exit(1)
manifest = json.loads(manifest_path.read_text())
id_ = manifest["id"]
name = manifest["name"]
version = manifest["version"]
default_filename = f"{id_}-{version}.supx"
default_filename = f"{name}-{version}.supx"
if output is None:
zip_path = Path(default_filename)
@@ -663,7 +663,7 @@ def init(
else click.confirm("Include backend?", default=True)
)
target_dir = Path.cwd() / names["id"]
target_dir = Path.cwd() / names["name"]
if target_dir.exists():
click.secho(f"❌ Directory {target_dir} already exists.", fg="red")
sys.exit(1)
@@ -686,7 +686,7 @@ def init(
click.secho("✅ Created extension.json", fg="green")
# Create .gitignore
gitignore = env.get_template(".gitignore.j2").render(ctx)
gitignore = env.get_template("gitignore.j2").render(ctx)
(target_dir / ".gitignore").write_text(gitignore)
click.secho("✅ Created .gitignore", fg="green")

View File

@@ -43,10 +43,10 @@ def test_bundle_command_creates_zip_with_default_name(
result = cli_runner.invoke(app, ["bundle"])
assert result.exit_code == 0
assert "✅ Bundle created: test-org.test-extension-1.0.0.supx" in result.output
assert "✅ Bundle created: test-extension-1.0.0.supx" in result.output
# Verify zip file was created
zip_path = isolated_filesystem / "test-org.test-extension-1.0.0.supx"
zip_path = isolated_filesystem / "test-extension-1.0.0.supx"
assert_file_exists(zip_path)
# Verify zip contents
@@ -100,7 +100,7 @@ def test_bundle_command_with_output_directory(
assert result.exit_code == 0
# Verify zip file was created in output directory
expected_path = output_dir / "test-org.test-extension-1.0.0.supx"
expected_path = output_dir / "test-extension-1.0.0.supx"
assert_file_exists(expected_path)
assert f"✅ Bundle created: {expected_path}" in result.output
@@ -193,7 +193,7 @@ def test_bundle_includes_all_files_recursively(
assert result.exit_code == 0
# Verify zip file and contents
zip_path = isolated_filesystem / "complex-org.complex-extension-2.1.0.supx"
zip_path = isolated_filesystem / "complex-extension-2.1.0.supx"
assert_file_exists(zip_path)
with zipfile.ZipFile(zip_path, "r") as zipf:

View File

@@ -48,12 +48,12 @@ def test_init_creates_extension_with_both_frontend_and_backend(
)
# Verify directory structure
extension_path = isolated_filesystem / "test-org.test-extension"
extension_path = isolated_filesystem / "test-extension"
assert_directory_exists(extension_path, "main extension directory")
expected_structure = create_test_extension_structure(
isolated_filesystem,
"test-org.test-extension",
"test-extension",
include_frontend=True,
include_backend=True,
)
@@ -74,7 +74,7 @@ def test_init_creates_extension_with_frontend_only(
assert result.exit_code == 0, f"Command failed with output: {result.output}"
extension_path = isolated_filesystem / "test-org.test-extension"
extension_path = isolated_filesystem / "test-extension"
assert_directory_exists(extension_path)
# Should have frontend directory and package.json
@@ -97,7 +97,7 @@ def test_init_creates_extension_with_backend_only(
assert result.exit_code == 0, f"Command failed with output: {result.output}"
extension_path = isolated_filesystem / "test-org.test-extension"
extension_path = isolated_filesystem / "test-extension"
assert_directory_exists(extension_path)
# Should have backend directory and pyproject.toml
@@ -120,7 +120,7 @@ def test_init_creates_extension_with_neither_frontend_nor_backend(
assert result.exit_code == 0, f"Command failed with output: {result.output}"
extension_path = isolated_filesystem / "test-org.test-extension"
extension_path = isolated_filesystem / "test-extension"
assert_directory_exists(extension_path)
# Should only have extension.json
@@ -138,8 +138,8 @@ def test_init_accepts_valid_display_name(cli_runner, isolated_filesystem):
result = cli_runner.invoke(app, ["init"], input=cli_input)
assert result.exit_code == 0, f"Should accept display name: {result.output}"
assert Path("test-org.my-awesome-extension").exists(), (
"Directory for generated composite ID should be created"
assert Path("my-awesome-extension").exists(), (
"Directory with extension name should be created"
)
@@ -152,23 +152,21 @@ def test_init_accepts_mixed_alphanumeric_name(cli_runner, isolated_filesystem):
assert result.exit_code == 0, (
f"Mixed alphanumeric display name should be valid: {result.output}"
)
assert Path("test-org.tool-123").exists(), (
"Directory for 'test-org.tool-123' should be created"
)
assert Path("tool-123").exists(), "Directory for 'tool-123' should be created"
@pytest.mark.cli
@pytest.mark.parametrize(
"display_name,expected_id",
"display_name,expected_dir",
[
("Test Extension", "test-org.test-extension"),
("My Tool v2", "test-org.my-tool-v2"),
("Dashboard Helper", "test-org.dashboard-helper"),
("Chart Builder Pro", "test-org.chart-builder-pro"),
("Test Extension", "test-extension"),
("My Tool v2", "my-tool-v2"),
("Dashboard Helper", "dashboard-helper"),
("Chart Builder Pro", "chart-builder-pro"),
],
)
def test_init_with_various_display_names(cli_runner, display_name, expected_id):
"""Test that init accepts various display names and generates proper IDs."""
def test_init_with_various_display_names(cli_runner, display_name, expected_dir):
"""Test that init accepts various display names and creates directory named after extension."""
with cli_runner.isolated_filesystem():
cli_input = f"{display_name}\n\ntest-org\n0.1.0\nApache-2.0\ny\ny\n"
result = cli_runner.invoke(app, ["init"], input=cli_input)
@@ -176,8 +174,8 @@ def test_init_with_various_display_names(cli_runner, display_name, expected_id):
assert result.exit_code == 0, (
f"Valid display name '{display_name}' was rejected: {result.output}"
)
assert Path(expected_id).exists(), (
f"Directory for '{expected_id}' was not created"
assert Path(expected_dir).exists(), (
f"Directory '{expected_dir}' was not created"
)
@@ -187,7 +185,7 @@ def test_init_fails_when_directory_already_exists(
):
"""Test that init fails gracefully when target directory already exists."""
# Create the directory first
existing_dir = isolated_filesystem / "test-org.test-extension"
existing_dir = isolated_filesystem / "test-extension"
existing_dir.mkdir()
result = cli_runner.invoke(app, ["init"], input=cli_input_both)
@@ -204,7 +202,7 @@ def test_extension_json_content_is_correct(
result = cli_runner.invoke(app, ["init"], input=cli_input_both)
assert result.exit_code == 0
extension_path = isolated_filesystem / "test-org.test-extension"
extension_path = isolated_filesystem / "test-extension"
extension_json_path = extension_path / "extension.json"
# Verify the JSON structure and values
@@ -238,7 +236,7 @@ def test_frontend_package_json_content_is_correct(
result = cli_runner.invoke(app, ["init"], input=cli_input_both)
assert result.exit_code == 0
extension_path = isolated_filesystem / "test-org.test-extension"
extension_path = isolated_filesystem / "test-extension"
package_json_path = extension_path / "frontend" / "package.json"
# Verify the package.json structure and values
@@ -267,7 +265,7 @@ def test_backend_pyproject_toml_is_created(
result = cli_runner.invoke(app, ["init"], input=cli_input_both)
assert result.exit_code == 0
extension_path = isolated_filesystem / "test-org.test-extension"
extension_path = isolated_filesystem / "test-extension"
pyproject_path = extension_path / "backend" / "pyproject.toml"
assert_file_exists(pyproject_path, "backend pyproject.toml")
@@ -305,7 +303,7 @@ def test_gitignore_content_is_correct(cli_runner, isolated_filesystem, cli_input
result = cli_runner.invoke(app, ["init"], input=cli_input_both)
assert result.exit_code == 0
extension_path = isolated_filesystem / "test-org.test-extension"
extension_path = isolated_filesystem / "test-extension"
gitignore_path = extension_path / ".gitignore"
assert_file_exists(gitignore_path, ".gitignore")
@@ -330,7 +328,7 @@ def test_init_with_custom_version_and_license(cli_runner, isolated_filesystem):
assert result.exit_code == 0
extension_path = isolated_filesystem / "test-org.my-extension"
extension_path = isolated_filesystem / "my-extension"
extension_json_path = extension_path / "extension.json"
assert_json_content(
@@ -357,10 +355,10 @@ def test_full_init_workflow_integration(cli_runner, isolated_filesystem):
assert result.exit_code == 0
# Verify complete directory structure
extension_path = isolated_filesystem / "awesome-org.awesome-charts"
extension_path = isolated_filesystem / "awesome-charts"
expected_structure = create_test_extension_structure(
isolated_filesystem,
"awesome-org.awesome-charts",
"awesome-charts",
include_frontend=True,
include_backend=True,
)
@@ -412,7 +410,7 @@ def test_init_non_interactive_with_all_options(cli_runner, isolated_filesystem):
assert result.exit_code == 0, f"Command failed with output: {result.output}"
assert "🎉 Extension My Extension (ID: my-org.my-ext) initialized" in result.output
extension_path = isolated_filesystem / "my-org.my-ext"
extension_path = isolated_filesystem / "my-ext"
assert_directory_exists(extension_path)
assert_directory_exists(extension_path / "frontend")
assert_directory_exists(extension_path / "backend")
@@ -449,7 +447,7 @@ def test_init_frontend_only_with_cli_options(cli_runner, isolated_filesystem):
assert result.exit_code == 0, f"Command failed with output: {result.output}"
extension_path = isolated_filesystem / "frontend-org.frontend-ext"
extension_path = isolated_filesystem / "frontend-ext"
assert_directory_exists(extension_path / "frontend")
assert not (extension_path / "backend").exists()
@@ -478,7 +476,7 @@ def test_init_backend_only_with_cli_options(cli_runner, isolated_filesystem):
assert result.exit_code == 0, f"Command failed with output: {result.output}"
extension_path = isolated_filesystem / "backend-org.backend-ext"
extension_path = isolated_filesystem / "backend-ext"
assert not (extension_path / "frontend").exists()
assert_directory_exists(extension_path / "backend")
@@ -505,7 +503,7 @@ def test_init_prompts_for_missing_options(cli_runner, isolated_filesystem):
assert result.exit_code == 0, f"Command failed with output: {result.output}"
extension_path = isolated_filesystem / "default-org.default-ext"
extension_path = isolated_filesystem / "default-ext"
extension_json = load_json_file(extension_path / "extension.json")
assert extension_json["version"] == "0.1.0"
assert extension_json["license"] == "Apache-2.0"

View File

@@ -242,6 +242,7 @@
"eslint-plugin-import": "^2.32.0",
"eslint-plugin-jest-dom": "^5.5.0",
"eslint-plugin-lodash": "^7.4.0",
"eslint-plugin-no-only-tests": "^3.3.0",
"eslint-plugin-prettier": "^5.5.5",
"eslint-plugin-react-prefer-function-component": "^5.0.0",
"eslint-plugin-react-you-might-not-need-an-effect": "^0.9.1",
@@ -279,7 +280,6 @@
"style-loader": "^4.0.0",
"swc-loader": "^0.2.7",
"terser-webpack-plugin": "^5.3.17",
"thread-loader": "^4.0.4",
"ts-jest": "^29.4.6",
"tscw-config": "^1.1.2",
"tsx": "^4.21.0",
@@ -24351,6 +24351,16 @@
"eslint": ">=2"
}
},
"node_modules/eslint-plugin-no-only-tests": {
"version": "3.3.0",
"resolved": "https://registry.npmjs.org/eslint-plugin-no-only-tests/-/eslint-plugin-no-only-tests-3.3.0.tgz",
"integrity": "sha512-brcKcxGnISN2CcVhXJ/kEQlNa0MEfGRtwKtWA16SkqXHKitaKIMrfemJKLKX1YqDU5C/5JY3PvZXd5jEW04e0Q==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=5.0.0"
}
},
"node_modules/eslint-plugin-prettier": {
"version": "5.5.5",
"resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-5.5.5.tgz",
@@ -46715,29 +46725,6 @@
"tslib": "^2"
}
},
"node_modules/thread-loader": {
"version": "4.0.4",
"resolved": "https://registry.npmjs.org/thread-loader/-/thread-loader-4.0.4.tgz",
"integrity": "sha512-tXagu6Hivd03wB2tiS1bqvw345sc7mKei32EgpYpq31ZLes9FN0mEK2nKzXLRFgwt3PsBB0E/MZDp159rDoqwg==",
"dev": true,
"license": "MIT",
"dependencies": {
"json-parse-better-errors": "^1.0.2",
"loader-runner": "^4.1.0",
"neo-async": "^2.6.2",
"schema-utils": "^4.2.0"
},
"engines": {
"node": ">= 16.10.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/webpack"
},
"peerDependencies": {
"webpack": "^5.0.0"
}
},
"node_modules/throttle-debounce": {
"version": "5.0.2",
"resolved": "https://registry.npmjs.org/throttle-debounce/-/throttle-debounce-5.0.2.tgz",
@@ -51390,8 +51377,8 @@
},
"packages/superset-core": {
"name": "@apache-superset/core",
"version": "0.1.0-rc1",
"license": "ISC",
"version": "0.1.0-rc3",
"license": "Apache-2.0",
"devDependencies": {
"@babel/cli": "^7.28.6",
"@babel/core": "^7.29.0",
@@ -51434,7 +51421,8 @@
"dependencies": {
"@apache-superset/core": "*",
"@types/react": "*",
"lodash": "^4.17.23"
"lodash": "^4.17.23",
"tinycolor2": "*"
},
"peerDependencies": {
"@ant-design/icons": "^5.2.6",

View File

@@ -1,6 +1,6 @@
{
"name": "superset",
"version": "0.0.0-dev",
"version": "6.1.0",
"description": "Superset is a data exploration platform designed to be visual, intuitive, and interactive.",
"keywords": [
"big",
@@ -323,6 +323,7 @@
"eslint-plugin-import": "^2.32.0",
"eslint-plugin-jest-dom": "^5.5.0",
"eslint-plugin-lodash": "^7.4.0",
"eslint-plugin-no-only-tests": "^3.3.0",
"eslint-plugin-prettier": "^5.5.5",
"eslint-plugin-react-prefer-function-component": "^5.0.0",
"eslint-plugin-react-you-might-not-need-an-effect": "^0.9.1",
@@ -360,7 +361,6 @@
"style-loader": "^4.0.0",
"swc-loader": "^0.2.7",
"terser-webpack-plugin": "^5.3.17",
"thread-loader": "^4.0.4",
"ts-jest": "^29.4.6",
"tscw-config": "^1.1.2",
"tsx": "^4.21.0",

View File

@@ -1,6 +1,6 @@
{
"name": "@apache-superset/core",
"version": "0.1.0-rc1",
"version": "0.1.0-rc3",
"description": "This package contains UI elements, APIs, and utility functions used by Superset.",
"sideEffects": false,
"main": "lib/index.js",
@@ -70,8 +70,8 @@
"files": [
"lib"
],
"author": "",
"license": "ISC",
"author": "Apache Software Foundation",
"license": "Apache-2.0",
"devDependencies": {
"@babel/cli": "^7.28.6",
"@babel/core": "^7.29.0",

View File

@@ -369,6 +369,28 @@ export interface EditorProps {
theme?: SupersetTheme;
}
/**
* A single text change expressed as an offset-based replacement.
*/
export interface ContentChange {
/** Character offset in the document where the replaced range starts */
rangeOffset: number;
/** Length in characters of the replaced range (0 for pure insertions) */
rangeLength: number;
/** Text inserted at rangeOffset (empty string for pure deletions) */
text: string;
}
/**
* Payload delivered to `onDidChangeContent` listeners.
*/
export interface ContentChangeEvent {
/** Returns the full current content of the editor */
getValue(): string;
/** The individual changes that occurred in this event */
changes: ReadonlyArray<ContentChange>;
}
/**
* Imperative API for controlling the editor programmatically.
*
@@ -492,6 +514,27 @@ export interface EditorHandle {
* - CodeMirror: editor.requestMeasure()
*/
resize(): void;
/**
* Subscribe to content changes in the editor.
*
* The listener receives a {@link ContentChangeEvent} with:
* - `getValue()` — lazy accessor for the full content (call only when needed
* to avoid unnecessary O(n) string allocation on every keystroke)
* - `changes` — the individual edits that occurred, as offset-based replacements
*
* @param listener Called with a ContentChangeEvent on every change
* @param thisArgs Optional `this` context for the listener
* @returns A Disposable that unsubscribes the listener when disposed
*
* @example
* const disposable = editor.onDidChangeContent(e => {
* setStatements(parseStatements(e.getValue()));
* });
* // Later, to unsubscribe:
* disposable.dispose();
*/
onDidChangeContent: Event<ContentChangeEvent>;
}
/**

View File

@@ -252,6 +252,22 @@ export interface QueryResult {
*/
export declare const getActivePanel: () => Panel;
/**
* Switches the active panel in the SQL Lab south pane.
* Built-in panel IDs are 'Results' and 'History'.
* Pinned table panels use the table's ID as their panel ID.
*
* @param panelId The ID of the panel to activate
* @returns Promise that resolves when the panel is activated
*
* @example
* ```typescript
* // Focus the Results panel after running a query
* await setActivePanel('Results');
* ```
*/
export declare function setActivePanel(panelId: string): Promise<void>;
/**
* Gets the currently active tab in SQL Lab.
*

View File

@@ -29,14 +29,20 @@ import '@fontsource/ibm-plex-mono/600.css';
/* eslint-enable import/extensions */
import { css, useTheme, Global } from '@emotion/react';
import { useThemeMode } from './utils/themeUtils';
export const GlobalStyles = () => {
const theme = useTheme();
const isDark = useThemeMode();
return (
<Global
key={`global-${theme.colorLink}`}
styles={css`
// SPA
html {
color-scheme: ${isDark ? 'dark' : 'light'};
}
html,
body,
#app {

View File

@@ -839,3 +839,73 @@ test('Theme includes both echartsOptionsOverrides and echartsOptionsOverridesByC
},
});
});
test('colorLink derives from colorPrimary when merging with base theme', () => {
const baseTheme: AnyThemeConfig = {
token: {
colorPrimary: '#2893B3',
colorLink: '#2893B3',
colorInfo: '#66bcfe',
},
};
const userTheme: AnyThemeConfig = {
token: {
colorPrimary: '#f759ab',
},
};
const theme = Theme.fromConfig(userTheme, baseTheme);
expect(theme.theme.colorPrimary).toBe('#f759ab');
expect(theme.theme.colorLink).toBe('#f759ab');
expect(theme.theme.colorInfo).toBe('#66bcfe');
});
test('colorLink is not overridden when user explicitly sets it', () => {
const baseTheme: AnyThemeConfig = {
token: {
colorPrimary: '#2893B3',
colorLink: '#2893B3',
},
};
const userTheme: AnyThemeConfig = {
token: {
colorPrimary: '#f759ab',
colorLink: '#ff0000',
},
};
const theme = Theme.fromConfig(userTheme, baseTheme);
expect(theme.theme.colorPrimary).toBe('#f759ab');
expect(theme.theme.colorLink).toBe('#ff0000');
});
test('colorLink derives from colorPrimary in setConfig when not explicitly set', () => {
const theme = Theme.fromConfig();
theme.setConfig({
token: {
colorPrimary: '#f759ab',
},
});
expect(theme.theme.colorPrimary).toBe('#f759ab');
expect(theme.theme.colorLink).toBe('#f759ab');
});
test('colorLink is preserved in setConfig when explicitly set', () => {
const theme = Theme.fromConfig();
theme.setConfig({
token: {
colorPrimary: '#f759ab',
colorLink: '#ff0000',
},
});
expect(theme.theme.colorPrimary).toBe('#f759ab');
expect(theme.theme.colorLink).toBe('#ff0000');
});

View File

@@ -66,6 +66,17 @@ export class Theme {
mergedConfig = mergeWith({}, baseTheme, config, (objValue, srcValue) =>
Array.isArray(srcValue) ? srcValue : undefined,
);
// In Ant Design v5, colorLink derives from colorInfo, not colorPrimary.
// Currently we expectlinks to follow the brand/primary color. When the user
// overrides colorPrimary without explicitly setting colorLink, update the
// merged colorLink so links match the new primary palette.
if (config.token?.colorPrimary && !config.token?.colorLink) {
const mToken = mergedConfig?.token;
if (mToken) {
mToken.colorLink = mToken.colorPrimary;
}
}
} else if (baseTheme && !config) {
mergedConfig = baseTheme;
}
@@ -98,6 +109,10 @@ export class Theme {
setConfig(config: AnyThemeConfig): void {
const antdConfig = normalizeThemeConfig(config);
if (antdConfig.token?.colorPrimary && !antdConfig.token?.colorLink) {
antdConfig.token.colorLink = antdConfig.token.colorPrimary;
}
// First phase: Let Ant Design compute the tokens
const tokens = Theme.getFilteredAntdTheme(antdConfig);

View File

@@ -168,6 +168,55 @@ export interface SupersetSpecificTokens {
* Defaults to colorPrimaryBgHover if not specified.
*/
colorEditorSelection?: string;
// Secondary button tokens (Superset-specific)
// Ant Design's filled variant has no component tokens, so we provide our own.
// These fallback to colorPrimary* derived tokens when not set.
/**
* Text color for secondary buttons.
* Fallback: colorPrimary
*/
buttonSecondaryColor?: string;
/**
* Background color for secondary buttons.
* Fallback: colorPrimaryBg
*/
buttonSecondaryBg?: string;
/**
* Border color for secondary buttons.
* Fallback: transparent
*/
buttonSecondaryBorderColor?: string;
/**
* Text color for secondary buttons on hover.
* Fallback: colorPrimary
*/
buttonSecondaryHoverColor?: string;
/**
* Background color for secondary buttons on hover.
* Fallback: colorPrimaryBgHover
*/
buttonSecondaryHoverBg?: string;
/**
* Border color for secondary buttons on hover.
* Fallback: transparent
*/
buttonSecondaryHoverBorderColor?: string;
/**
* Text color for secondary buttons when active/pressed.
* Fallback: colorPrimary
*/
buttonSecondaryActiveColor?: string;
/**
* Background color for secondary buttons when active/pressed.
* Fallback: colorPrimaryBorder
*/
buttonSecondaryActiveBg?: string;
/**
* Border color for secondary buttons when active/pressed.
* Fallback: transparent
*/
buttonSecondaryActiveBorderColor?: string;
}
/**
@@ -426,6 +475,7 @@ export interface ThemeControllerOptions {
canUpdateTheme?: () => boolean;
canUpdateMode?: () => boolean;
isGlobalContext?: boolean;
initialMode?: ThemeMode;
}
export interface ThemeContextType {

View File

@@ -26,7 +26,8 @@
"dependencies": {
"@apache-superset/core": "*",
"@types/react": "*",
"lodash": "^4.17.23"
"lodash": "^4.17.23",
"tinycolor2": "*"
},
"peerDependencies": {
"@ant-design/icons": "^5.2.6",

View File

@@ -59,7 +59,7 @@ export function ColumnOption({
const type = hasExpression ? 'expression' : type_generic;
const [tooltipText, setTooltipText] = useState<ReactNode>(column.column_name);
const [columnTypeTooltipText, setcolumnTypeTooltipText] = useState<ReactNode>(
column.type,
getColumnTypeTooltipNode(column),
);
useLayoutEffect(() => {

View File

@@ -21,7 +21,11 @@ import { styled, css } from '@apache-superset/core/theme';
export const ControlSubSectionHeader = styled.div`
${({ theme }) => css`
font-weight: ${theme.fontWeightStrong};
margin-top: ${theme.sizeUnit * 3}px;
margin-bottom: ${theme.sizeUnit}px;
font-size: ${theme.fontSizeSM}px;
text-transform: uppercase;
letter-spacing: 0.05em;
color: ${theme.colorTextSecondary};
`}
`;

View File

@@ -20,6 +20,7 @@ import { ReactNode, RefObject } from 'react';
import { t } from '@apache-superset/core/translation';
import { css, styled } from '@apache-superset/core/theme';
import { GenericDataType } from '@apache-superset/core/common';
import { ColumnMeta, Metric } from '@superset-ui/chart-controls';
const TooltipSectionWrapper = styled.div`
@@ -64,11 +65,29 @@ export const getColumnLabelText = (column: ColumnMeta): string =>
column.verbose_name || column.column_name;
export const getColumnTypeTooltipNode = (column: ColumnMeta): ReactNode => {
if (!column.type) {
const rawType = typeof column.type === 'string' ? column.type.trim() : '';
let typeLabel: ReactNode | null = null;
if (rawType && rawType.toLowerCase() !== 'column') {
typeLabel = rawType;
} else if (typeof column.type_generic === 'number') {
if (column.type_generic === GenericDataType.String) {
typeLabel = t('string');
} else if (column.type_generic === GenericDataType.Numeric) {
typeLabel = t('numeric');
} else if (column.type_generic === GenericDataType.Temporal) {
typeLabel = t('timestamp');
} else if (column.type_generic === GenericDataType.Boolean) {
typeLabel = t('boolean');
}
}
if (!typeLabel) {
return null;
}
return <TooltipSection label={t('Column type')} text={column.type} />;
return <TooltipSection label={t('Column type')} text={typeLabel} />;
};
export const getColumnTooltipNode = (

View File

@@ -28,7 +28,9 @@ export const getTimeOffset = (
// offset is represented as <offset>, group by list
series.name.includes(`${timeOffset},`) ||
// offset is represented as <metric>__<offset>
series.name.includes(`__${timeOffset}`),
series.name.includes(`__${timeOffset}`) ||
// offset is represented as <metric>, <offset>
series.name.includes(`, ${timeOffset}`),
);
export const hasTimeOffset = (
@@ -45,10 +47,14 @@ export const getOriginalSeries = (
): string => {
let result = seriesName;
timeCompare.forEach(compare => {
// offset is represented as <offset>, group by list
// offset in the middle: <metric>, <offset>, <dimension>
result = result.replace(`, ${compare},`, ',');
// offset at start: <offset>, <dimension>
result = result.replace(`${compare},`, '');
// offset is represented as <metric>__<offset>
// offset with double underscore: <metric>__<offset>
result = result.replace(`__${compare}`, '');
// offset at end: <metric>, <offset>
result = result.replace(`, ${compare}`, '');
});
return result.trim();
};

View File

@@ -25,30 +25,17 @@ export const matrixifyEnableSection: ControlPanelSectionConfig = {
controlSetRows: [
[
{
name: 'matrixify_enable_horizontal_layout',
name: 'matrixify_enable',
config: {
type: 'CheckboxControl',
label: t('Enable horizontal layout (columns)'),
description: t(
'Create matrix columns by placing charts side-by-side',
),
default: false,
renderTrigger: true,
},
},
],
[
{
name: 'matrixify_enable_vertical_layout',
config: {
type: 'CheckboxControl',
label: t('Enable vertical layout (rows)'),
description: t('Create matrix rows by stacking charts vertically'),
type: 'SwitchControl',
label: t('Enable matrixify'),
default: false,
renderTrigger: true,
},
},
],
['matrixify_mode_columns'],
['matrixify_mode_rows'],
],
tabOverride: 'matrixify',
};
@@ -57,8 +44,11 @@ export const matrixifySection: ControlPanelSectionConfig = {
label: t('Cell layout & styling'),
expanded: false,
visibility: ({ controls }) =>
controls?.matrixify_enable_vertical_layout?.value === true ||
controls?.matrixify_enable_horizontal_layout?.value === true,
controls?.matrixify_enable?.value === true &&
(controls?.matrixify_mode_rows?.value === 'metrics' ||
controls?.matrixify_mode_rows?.value === 'dimensions' ||
controls?.matrixify_mode_columns?.value === 'metrics' ||
controls?.matrixify_mode_columns?.value === 'dimensions'),
controlSetRows: [
[
{
@@ -119,13 +109,13 @@ export const matrixifySection: ControlPanelSectionConfig = {
};
export const matrixifyRowSection: ControlPanelSectionConfig = {
label: t('Vertical layout (rows)'),
expanded: false,
visibility: ({ controls }) =>
controls?.matrixify_enable_vertical_layout?.value === true,
controls?.matrixify_enable?.value === true &&
(controls?.matrixify_mode_rows?.value === 'metrics' ||
controls?.matrixify_mode_rows?.value === 'dimensions'),
controlSetRows: [
['matrixify_show_row_labels'],
['matrixify_mode_rows'],
['matrixify_rows'],
['matrixify_dimension_rows'],
['matrixify_dimension_selection_mode_rows'],
@@ -137,13 +127,13 @@ export const matrixifyRowSection: ControlPanelSectionConfig = {
};
export const matrixifyColumnSection: ControlPanelSectionConfig = {
label: t('Horizontal layout (columns)'),
expanded: false,
visibility: ({ controls }) =>
controls?.matrixify_enable_horizontal_layout?.value === true,
controls?.matrixify_enable?.value === true &&
(controls?.matrixify_mode_columns?.value === 'metrics' ||
controls?.matrixify_mode_columns?.value === 'dimensions'),
controlSetRows: [
['matrixify_show_column_headers'],
['matrixify_mode_columns'],
['matrixify_columns'],
['matrixify_dimension_columns'],
['matrixify_dimension_selection_mode_columns'],

View File

@@ -0,0 +1,139 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { isMatrixifyVisible } from './matrixifyControls';
/**
* Helper to build a controls object matching the shape used by
* control panel visibility callbacks.
*/
function makeControls(
overrides: Record<string, unknown> = {},
): Record<string, { value: unknown }> {
const defaults: Record<string, unknown> = {
matrixify_enable: false,
matrixify_mode_rows: 'disabled',
matrixify_mode_columns: 'disabled',
matrixify_dimension_selection_mode_rows: 'members',
matrixify_dimension_selection_mode_columns: 'members',
};
const merged = { ...defaults, ...overrides };
return Object.fromEntries(
Object.entries(merged).map(([k, v]) => [k, { value: v }]),
);
}
// ── matrixify_enable guard ──────────────────────────────────────────
test('returns false when matrixify_enable is false, even with active axis modes', () => {
const controls = makeControls({
matrixify_enable: false,
matrixify_mode_rows: 'metrics',
matrixify_mode_columns: 'dimensions',
});
expect(isMatrixifyVisible(controls, 'rows')).toBe(false);
expect(isMatrixifyVisible(controls, 'columns')).toBe(false);
});
test('returns false when matrixify_enable is undefined (old form_data without the field)', () => {
const controls = makeControls({
matrixify_mode_rows: 'metrics',
});
delete (controls as any).matrixify_enable;
expect(isMatrixifyVisible(controls, 'rows')).toBe(false);
});
test('returns false when controls object is undefined', () => {
expect(isMatrixifyVisible(undefined, 'rows')).toBe(false);
});
// ── axis mode checks ────────────────────────────────────────────────
test('returns false when axis mode is disabled', () => {
const controls = makeControls({
matrixify_enable: true,
matrixify_mode_rows: 'disabled',
});
expect(isMatrixifyVisible(controls, 'rows')).toBe(false);
});
test('returns true when matrixify_enable is true and axis mode is metrics', () => {
const controls = makeControls({
matrixify_enable: true,
matrixify_mode_rows: 'metrics',
});
expect(isMatrixifyVisible(controls, 'rows')).toBe(true);
});
test('returns true when matrixify_enable is true and axis mode is dimensions', () => {
const controls = makeControls({
matrixify_enable: true,
matrixify_mode_columns: 'dimensions',
});
expect(isMatrixifyVisible(controls, 'columns')).toBe(true);
});
// ── mode filter ─────────────────────────────────────────────────────
test('returns false when mode filter does not match axis value', () => {
const controls = makeControls({
matrixify_enable: true,
matrixify_mode_rows: 'metrics',
});
expect(isMatrixifyVisible(controls, 'rows', 'dimensions')).toBe(false);
});
test('returns true when mode filter matches axis value', () => {
const controls = makeControls({
matrixify_enable: true,
matrixify_mode_rows: 'dimensions',
});
expect(isMatrixifyVisible(controls, 'rows', 'dimensions')).toBe(true);
});
// ── selectionMode filter ────────────────────────────────────────────
test('returns true when selectionMode matches', () => {
const controls = makeControls({
matrixify_enable: true,
matrixify_mode_rows: 'dimensions',
matrixify_dimension_selection_mode_rows: 'topn',
});
expect(isMatrixifyVisible(controls, 'rows', 'dimensions', 'topn')).toBe(true);
});
test('returns false when selectionMode does not match', () => {
const controls = makeControls({
matrixify_enable: true,
matrixify_mode_rows: 'dimensions',
matrixify_dimension_selection_mode_rows: 'members',
});
expect(isMatrixifyVisible(controls, 'rows', 'dimensions', 'topn')).toBe(
false,
);
});
test('ignores selectionMode filter when mode is metrics', () => {
const controls = makeControls({
matrixify_enable: true,
matrixify_mode_columns: 'metrics',
});
// selectionMode only applies to dimensions mode, should be ignored
expect(isMatrixifyVisible(controls, 'columns', 'metrics', 'topn')).toBe(true);
});

View File

@@ -34,19 +34,20 @@ const isMatrixifyVisible = (
controls: any,
axis: 'rows' | 'columns',
mode?: 'metrics' | 'dimensions',
selectionMode?: 'members' | 'topn',
selectionMode?: 'members' | 'topn' | 'all',
) => {
const layoutControl = `matrixify_enable_${axis === 'rows' ? 'vertical' : 'horizontal'}_layout`;
if (controls?.matrixify_enable?.value !== true) return false;
const modeControl = `matrixify_mode_${axis}`;
const selectionModeControl = `matrixify_dimension_selection_mode_${axis}`;
const isLayoutEnabled = controls?.[layoutControl]?.value === true;
const modeValue = controls?.[modeControl]?.value;
const isLayoutEnabled = modeValue === 'metrics' || modeValue === 'dimensions';
if (!isLayoutEnabled) return false;
if (mode) {
const isModeMatch = controls?.[modeControl]?.value === mode;
if (!isModeMatch) return false;
if (modeValue !== mode) return false;
if (selectionMode && mode === 'dimensions') {
return controls?.[selectionModeControl]?.value === selectionMode;
@@ -66,22 +67,20 @@ const matrixifyControls: Record<string, SharedControlConfig<any>> = {};
matrixifyControls[`matrixify_mode_${axis}`] = {
type: 'RadioButtonControl',
label: t(`Metrics / Dimensions`),
default: axis === 'columns' ? 'metrics' : 'dimensions',
default: 'disabled',
renderTrigger: true,
tabOverride: 'matrixify',
visibility: ({ controls }) => isMatrixifyVisible(controls, axis),
mapStateToProps: ({ controls }) => {
const otherAxisControlName = `matrixify_mode_${otherAxis}`;
const otherAxisValue =
controls?.[otherAxisControlName]?.value ??
(otherAxis === 'columns' ? 'metrics' : 'dimensions');
controls?.[otherAxisControlName]?.value ?? 'disabled';
const isMetricsDisabled = otherAxisValue === 'metrics';
return {
options: [
{ value: 'disabled', label: t('Disabled') },
{
value: 'metrics',
label: t('Metrics'),
@@ -92,7 +91,7 @@ const matrixifyControls: Record<string, SharedControlConfig<any>> = {};
)
: undefined,
},
{ value: 'dimensions', label: t('Dimension members') },
{ value: 'dimensions', label: t('Dimensions') },
],
};
},
@@ -125,6 +124,7 @@ const matrixifyControls: Record<string, SharedControlConfig<any>> = {};
`matrixify_topn_metric_${axis}`,
`matrixify_topn_order_${axis}`,
`matrixify_dimension_selection_mode_${axis}`,
`matrixify_all_sort_by_${axis}`,
];
return fieldsToCheck.some(
@@ -161,7 +161,10 @@ const matrixifyControls: Record<string, SharedControlConfig<any>> = {};
selectionMode,
topNMetric: getValue(`matrixify_topn_metric_${axis}`),
topNValue: getValue(`matrixify_topn_value_${axis}`),
topNOrder: getValue(`matrixify_topn_order_${axis}`),
topNOrder: getValue(`matrixify_topn_order_${axis}`, true)
? 'DESC'
: 'ASC',
allSortBy: getValue(`matrixify_all_sort_by_${axis}`, 'a_to_z'),
formData: form_data,
validators,
};
@@ -187,19 +190,24 @@ const matrixifyControls: Record<string, SharedControlConfig<any>> = {};
visibility: () => false,
};
// Add selection mode control (Dimension Members vs TopN)
// Add selection mode control (Dimension Members / Top N / All)
matrixifyControls[`matrixify_dimension_selection_mode_${axis}`] = {
type: 'RadioButtonControl',
type: 'VerticalRadioControl',
label: t(`Selection method`),
default: 'members',
options: [
['members', t('Dimension members')],
['topn', t('Top n')],
],
renderTrigger: true,
tabOverride: 'matrixify',
visibility: ({ controls }) =>
isMatrixifyVisible(controls, axis, 'dimensions'),
options: [
{ value: 'members', label: t('Dimension members') },
{ value: 'topn', label: t('Top n') },
{
value: 'all',
label: t('All dimensions'),
tooltip: t('Uses the first 25 values if the dimension has more.'),
},
],
};
// TopN controls
@@ -236,15 +244,15 @@ const matrixifyControls: Record<string, SharedControlConfig<any>> = {};
description: t(`Metric to use for ordering Top N values`),
tabOverride: 'matrixify',
visibility: ({ controls }) =>
isMatrixifyVisible(controls, axis, 'dimensions', 'topn'),
isMatrixifyVisible(controls, axis, 'dimensions', 'topn') ||
(isMatrixifyVisible(controls, axis, 'dimensions', 'all') &&
controls?.[`matrixify_all_sort_by_${axis}`]?.value === 'metric'),
mapStateToProps: (state, controlState) => {
const { controls, datasource } = state;
const isVisible = isMatrixifyVisible(
controls,
axis,
'dimensions',
'topn',
);
const isVisible =
isMatrixifyVisible(controls, axis, 'dimensions', 'topn') ||
(isMatrixifyVisible(controls, axis, 'dimensions', 'all') &&
controls?.[`matrixify_all_sort_by_${axis}`]?.value === 'metric');
const originalProps =
dndAdhocMetricControl.mapStateToProps?.(state, controlState) || {};
@@ -261,17 +269,31 @@ const matrixifyControls: Record<string, SharedControlConfig<any>> = {};
};
matrixifyControls[`matrixify_topn_order_${axis}`] = {
type: 'RadioButtonControl',
label: t(`Sort order`),
default: 'desc',
options: [
['asc', t('Ascending')],
['desc', t('Descending')],
],
type: 'CheckboxControl',
label: t('Sort descending'),
default: true,
renderTrigger: true,
tabOverride: 'matrixify',
visibility: ({ controls }) =>
isMatrixifyVisible(controls, axis, 'dimensions', 'topn'),
isMatrixifyVisible(controls, axis, 'dimensions', 'topn') ||
(isMatrixifyVisible(controls, axis, 'dimensions', 'all') &&
controls?.[`matrixify_all_sort_by_${axis}`]?.value === 'metric'),
};
matrixifyControls[`matrixify_all_sort_by_${axis}`] = {
type: 'SelectControl',
label: t('Sort by'),
default: 'a_to_z',
clearable: false,
renderTrigger: true,
tabOverride: 'matrixify',
visibility: ({ controls }) =>
isMatrixifyVisible(controls, axis, 'dimensions', 'all'),
choices: [
['a_to_z', t('A-Z')],
['z_to_a', t('Z-A')],
['metric', t('Metric')],
],
};
});
@@ -317,24 +339,6 @@ matrixifyControls.matrixify_charts_per_row = {
!controls?.matrixify_fit_columns_dynamically?.value,
};
matrixifyControls.matrixify_enable_vertical_layout = {
type: 'CheckboxControl',
label: t('Enable vertical layout (rows)'),
description: t('Create matrix rows by stacking charts vertically'),
default: false,
renderTrigger: true,
tabOverride: 'matrixify',
};
matrixifyControls.matrixify_enable_horizontal_layout = {
type: 'CheckboxControl',
label: t('Enable horizontal layout (columns)'),
description: t('Create matrix columns by placing charts side-by-side'),
default: false,
renderTrigger: true,
tabOverride: 'matrixify',
};
// Cell title control for Matrixify
matrixifyControls.matrixify_cell_title_template = {
type: 'TextControl',
@@ -345,8 +349,8 @@ matrixifyControls.matrixify_cell_title_template = {
default: '',
renderTrigger: true,
visibility: ({ controls }) =>
controls?.matrixify_enable_vertical_layout?.value === true ||
controls?.matrixify_enable_horizontal_layout?.value === true,
isMatrixifyVisible(controls, 'rows') ||
isMatrixifyVisible(controls, 'columns'),
};
// Matrix display controls
@@ -357,8 +361,7 @@ matrixifyControls.matrixify_show_row_labels = {
default: true,
renderTrigger: true,
tabOverride: 'matrixify',
visibility: ({ controls }) =>
controls?.matrixify_enable_vertical_layout?.value === true,
visibility: ({ controls }) => isMatrixifyVisible(controls, 'rows'),
};
matrixifyControls.matrixify_show_column_headers = {
@@ -368,8 +371,7 @@ matrixifyControls.matrixify_show_column_headers = {
default: true,
renderTrigger: true,
tabOverride: 'matrixify',
visibility: ({ controls }) =>
controls?.matrixify_enable_horizontal_layout?.value === true,
visibility: ({ controls }) => isMatrixifyVisible(controls, 'columns'),
};
export { matrixifyControls };
export { matrixifyControls, isMatrixifyVisible };

View File

@@ -507,6 +507,11 @@ export type ColorFormatters = {
) => string | undefined;
}[];
export type ResolvedColorFormatterResult = {
backgroundColor?: string;
color?: string;
};
export default {};
export function isColumnMeta(column: AnyDict): column is ColumnMeta {

View File

@@ -20,11 +20,13 @@ import memoizeOne from 'memoize-one';
import { isString, isBoolean } from 'lodash';
import { isBlank } from '@apache-superset/core/utils';
import { addAlpha, DataRecord } from '@superset-ui/core';
import tinycolor from 'tinycolor2';
import {
ColorFormatters,
Comparator,
ConditionalFormattingConfig,
MultipleValueComparators,
ResolvedColorFormatterResult,
} from '../types';
export const round = (num: number, precision = 0) =>
@@ -33,6 +35,11 @@ export const round = (num: number, precision = 0) =>
const MIN_OPACITY_BOUNDED = 0.05;
const MIN_OPACITY_UNBOUNDED = 0;
const MAX_OPACITY = 1;
const READABLE_TEXT_COLORS = [
{ r: 0, g: 0, b: 0 },
{ r: 255, g: 255, b: 255 },
];
export const getOpacity = (
value: number | string | boolean | null,
cutoffPoint: number | string,
@@ -325,3 +332,59 @@ export const getColorFormatters = memoizeOne(
[],
) ?? [],
);
export const getReadableTextColor = (
backgroundColor: string | undefined,
surfaceColor: string,
): string | undefined => {
if (!backgroundColor) {
return undefined;
}
const background = tinycolor(backgroundColor);
const surface = tinycolor(surfaceColor);
if (!background.isValid() || !surface.isValid()) {
return undefined;
}
const { r: bgR, g: bgG, b: bgB, a: bgAlpha } = background.toRgb();
const { r: surfaceR, g: surfaceG, b: surfaceB } = surface.toRgb();
const alpha = bgAlpha;
const compositeColor = tinycolor({
r: bgR * alpha + surfaceR * (1 - alpha),
g: bgG * alpha + surfaceG * (1 - alpha),
b: bgB * alpha + surfaceB * (1 - alpha),
});
return tinycolor
.mostReadable(compositeColor, READABLE_TEXT_COLORS, {
includeFallbackColors: true,
level: 'AA',
size: 'small',
})
.toRgbString();
};
export const getNormalizedTextColor = (
color: string | undefined,
): string | undefined => {
if (!color) {
return undefined;
}
const parsedColor = tinycolor(color);
if (!parsedColor.isValid()) {
return color;
}
return parsedColor.setAlpha(1).toRgbString();
};
export const getTextColorForBackground = (
result: ResolvedColorFormatterResult,
surfaceColor: string,
): string | undefined =>
getNormalizedTextColor(result.color) ??
getReadableTextColor(result.backgroundColor, surfaceColor);

View File

@@ -24,6 +24,7 @@ import {
getMetricTooltipNode,
getColumnTypeTooltipNode,
} from '../../src/components/labelUtils';
import { GenericDataType } from '@apache-superset/core/common';
test("should get column name when column doesn't have verbose_name", () => {
expect(
@@ -89,6 +90,24 @@ test('should get column datatype rendered as tooltip when column has a type', ()
expect(screen.getByText('text')).toBeVisible();
});
test('should fall back to generic data type label when type is "column"', () => {
render(
<>
{getColumnTypeTooltipNode({
id: 123,
column_name: 'column name',
verbose_name: '',
description: '',
type: 'column',
type_generic: GenericDataType.String,
})}
</>,
);
expect(screen.getByText('Column type')).toBeVisible();
expect(screen.getByText('string')).toBeVisible();
});
test('should get column name, verbose name and description when it has a verbose name', () => {
const ref = { current: { scrollWidth: 100, clientWidth: 100 } };
render(

View File

@@ -16,15 +16,101 @@
* specific language governing permissions and limitations
* under the License.
*/
import { getOriginalSeries } from '@superset-ui/chart-controls';
import {
getOriginalSeries,
getTimeOffset,
hasTimeOffset,
} from '@superset-ui/chart-controls';
test('returns the series name when time compare is empty', () => {
test('getOriginalSeries returns the series name when time compare is empty', () => {
const seriesName = 'sum';
expect(getOriginalSeries(seriesName, [])).toEqual(seriesName);
});
test('returns the original series name', () => {
test('getOriginalSeries returns the original series name with __ pattern', () => {
const seriesName = 'sum__1_month_ago';
const timeCompare = ['1_month_ago'];
expect(getOriginalSeries(seriesName, timeCompare)).toEqual('sum');
});
test('getOriginalSeries returns the original series name with <offset>, pattern', () => {
const seriesName = '1 year ago, groupby_value';
const timeCompare = ['1 year ago'];
expect(getOriginalSeries(seriesName, timeCompare)).toEqual('groupby_value');
});
test('getOriginalSeries returns the original series name with , <offset> pattern', () => {
const seriesName = 'AVG(price_each), 1 year ago';
const timeCompare = ['1 year ago'];
expect(getOriginalSeries(seriesName, timeCompare)).toEqual('AVG(price_each)');
});
test('getOriginalSeries handles multiple time compares', () => {
const seriesName = 'count, 1 year ago';
const timeCompare = ['1 month ago', '1 year ago'];
expect(getOriginalSeries(seriesName, timeCompare)).toEqual('count');
});
test('getOriginalSeries strips offset in the middle with dimension', () => {
const seriesName = 'SUM(sales), 28 days ago, Medium';
const timeCompare = ['28 days ago'];
expect(getOriginalSeries(seriesName, timeCompare)).toEqual(
'SUM(sales), Medium',
);
});
test('getOriginalSeries strips offset in the middle with multiple dimensions', () => {
const seriesName = 'SUM(sales), 1 year ago, Medium, 11';
const timeCompare = ['1 year ago'];
expect(getOriginalSeries(seriesName, timeCompare)).toEqual(
'SUM(sales), Medium, 11',
);
});
test('getTimeOffset returns undefined when no time offset pattern matches', () => {
const series = { name: 'count' };
const timeCompare = ['1 year ago'];
expect(getTimeOffset(series, timeCompare)).toBeUndefined();
});
test('getTimeOffset detects __ pattern', () => {
const series = { name: 'count__1 year ago' };
const timeCompare = ['1 year ago'];
expect(getTimeOffset(series, timeCompare)).toEqual('1 year ago');
});
test('getTimeOffset detects <offset>, pattern', () => {
const series = { name: '1 year ago, groupby_value' };
const timeCompare = ['1 year ago'];
expect(getTimeOffset(series, timeCompare)).toEqual('1 year ago');
});
test('getTimeOffset detects , <offset> pattern', () => {
const series = { name: 'AVG(price_each), 1 year ago' };
const timeCompare = ['1 year ago'];
expect(getTimeOffset(series, timeCompare)).toEqual('1 year ago');
});
test('getTimeOffset detects , <offset>, pattern (offset in middle)', () => {
const series = { name: 'SUM(sales), 28 days ago, Medium' };
const timeCompare = ['28 days ago'];
expect(getTimeOffset(series, timeCompare)).toEqual('28 days ago');
});
test('hasTimeOffset returns false for original series', () => {
const series = { name: 'count' };
const timeCompare = ['1 year ago'];
expect(hasTimeOffset(series, timeCompare)).toBe(false);
});
test('hasTimeOffset returns true for derived series with , <offset> pattern', () => {
const series = { name: 'AVG(price_each), 1 year ago' };
const timeCompare = ['1 year ago'];
expect(hasTimeOffset(series, timeCompare)).toBe(true);
});
test('hasTimeOffset returns false when series name is not a string', () => {
const series = { name: 123 };
const timeCompare = ['1 year ago'];
expect(hasTimeOffset(series, timeCompare)).toBe(false);
});

View File

@@ -24,6 +24,11 @@ import {
getColorFormatters,
getColorFunction,
} from '../../src';
import {
getReadableTextColor,
getNormalizedTextColor,
getTextColorForBackground,
} from '../../src/utils/getColorFormatters';
configure();
const mockData = [
@@ -107,6 +112,64 @@ test('getColorFunction LESS_THAN', () => {
expect(colorFunction(50)).toEqual('#FF0000FF');
});
test('getReadableTextColor returns white for dark backgrounds', () => {
expect(getReadableTextColor('#111111', '#ffffff')).toBe('rgb(255, 255, 255)');
});
test('getReadableTextColor returns black for light backgrounds', () => {
expect(getReadableTextColor('#f5f5f5', '#ffffff')).toBe('rgb(0, 0, 0)');
});
test('getReadableTextColor blends alpha over the provided surface', () => {
expect(getReadableTextColor('rgba(0, 0, 0, 0.6)', '#ffffff')).toBe(
'rgb(255, 255, 255)',
);
expect(getReadableTextColor('rgba(255, 255, 255, 0.6)', '#000000')).toBe(
'rgb(0, 0, 0)',
);
});
test('getReadableTextColor returns undefined for invalid colors', () => {
expect(getReadableTextColor('not-a-color', '#ffffff')).toBeUndefined();
expect(getReadableTextColor('#111111', 'not-a-color')).toBeUndefined();
});
test('getTextColorForBackground prefers explicit text color', () => {
expect(
getTextColorForBackground(
{ backgroundColor: '#111111', color: '#ace1c4ff' },
'#ffffff',
),
).toBe('rgb(172, 225, 196)');
});
test('getNormalizedTextColor removes alpha from explicit text colors', () => {
expect(getNormalizedTextColor('#ace1c40d')).toBe('rgb(172, 225, 196)');
expect(getNormalizedTextColor('rgba(172, 225, 196, 0.2)')).toBe(
'rgb(172, 225, 196)',
);
});
test('getNormalizedTextColor preserves invalid explicit text colors', () => {
expect(getNormalizedTextColor('not-a-color')).toBe('not-a-color');
});
test('getTextColorForBackground normalizes explicit text color alpha', () => {
expect(
getTextColorForBackground(
{ backgroundColor: '#111111', color: '#ace1c40d' },
'#ffffff',
),
).toBe('rgb(172, 225, 196)');
});
test('getTextColorForBackground falls back to adaptive contrast', () => {
expect(
getTextColorForBackground({ backgroundColor: '#111111' }, '#ffffff'),
).toBe('rgb(255, 255, 255)');
expect(getTextColorForBackground({}, '#ffffff')).toBeUndefined();
});
test('getColorFunction GREATER_OR_EQUAL', () => {
const colorFunction = getColorFunction(
{

View File

@@ -39,6 +39,7 @@ const createSqlMetric = (label: string, sql: string): AdhocMetric => ({
const baseFormData: TestFormData = {
viz_type: 'table',
datasource: '1__table',
matrixify_enable: true,
matrixify_mode_rows: 'metrics',
matrixify_mode_columns: 'metrics',
matrixify_rows: [createAdhocMetric('Revenue'), createAdhocMetric('Profit')],
@@ -77,6 +78,7 @@ test('should generate grid for dimensions mode', () => {
const dimensionFormData: TestFormData = {
viz_type: 'table',
datasource: '1__table',
matrixify_enable: true,
matrixify_mode_rows: 'dimensions',
matrixify_mode_columns: 'dimensions',
matrixify_dimension_rows: {
@@ -117,6 +119,7 @@ test('should generate grid for mixed mode (metrics rows, dimensions columns)', (
const mixedFormData: TestFormData = {
viz_type: 'table',
datasource: '1__table',
matrixify_enable: true,
matrixify_mode_rows: 'metrics',
matrixify_mode_columns: 'dimensions',
matrixify_rows: [createAdhocMetric('Total Sales')],
@@ -139,6 +142,7 @@ test('should handle empty configuration', () => {
const emptyFormData: TestFormData = {
viz_type: 'table',
datasource: '1__table',
matrixify_enable: true,
matrixify_mode_rows: 'metrics',
matrixify_mode_columns: 'metrics',
matrixify_rows: [],
@@ -157,6 +161,7 @@ test('should handle single row and column', () => {
const singleCellFormData: TestFormData = {
viz_type: 'table',
datasource: '1__table',
matrixify_enable: true,
matrixify_mode_rows: 'metrics',
matrixify_mode_columns: 'metrics',
matrixify_rows: [createAdhocMetric('Count')],
@@ -177,6 +182,7 @@ test('should handle string metrics', () => {
const stringMetricFormData: TestFormData = {
viz_type: 'table',
datasource: '1__table',
matrixify_enable: true,
matrixify_mode_rows: 'metrics',
matrixify_mode_columns: 'metrics',
matrixify_rows: ['count', 'sum'],
@@ -190,10 +196,30 @@ test('should handle string metrics', () => {
expect(grid!.colHeaders).toEqual(['avg', 'max']);
});
test('should skip missing column metrics when generating cell form data', () => {
const missingColumnMetricFormData: TestFormData = {
viz_type: 'table',
datasource: '1__table',
matrixify_enable: true,
matrixify_mode_rows: 'metrics',
matrixify_mode_columns: 'metrics',
matrixify_rows: [createAdhocMetric('Revenue')],
matrixify_columns: [null],
};
const grid = generateMatrixifyGrid(missingColumnMetricFormData);
expect(grid).not.toBeNull();
expect(grid!.cells[0][0]!.formData.metrics).toEqual([
createAdhocMetric('Revenue'),
]);
});
test('should not escape HTML entities in cell titles', () => {
const formDataWithSpecialChars: TestFormData = {
viz_type: 'table',
datasource: '1__table',
matrixify_enable: true,
matrixify_mode_rows: 'metrics',
matrixify_mode_columns: 'metrics',
matrixify_rows: [createAdhocMetric('Sales & Revenue')],
@@ -309,6 +335,7 @@ test('should generate single-column grid when only rows are configured', () => {
const rowsOnlyFormData: TestFormData = {
viz_type: 'table',
datasource: '1__table',
matrixify_enable: true,
matrixify_mode_rows: 'metrics',
matrixify_rows: [createAdhocMetric('Revenue'), createAdhocMetric('Profit')],
// No column config
@@ -326,6 +353,7 @@ test('should generate single-row grid when only columns are configured', () => {
const colsOnlyFormData: TestFormData = {
viz_type: 'table',
datasource: '1__table',
matrixify_enable: true,
matrixify_mode_columns: 'metrics',
matrixify_columns: [
createSqlMetric('Q1', 'SUM(q1)'),
@@ -359,6 +387,7 @@ test('should return empty string header for null metric in array (line 76)', ()
const formData: TestFormData = {
viz_type: 'table',
datasource: '1__table',
matrixify_enable: true,
matrixify_mode_rows: 'metrics',
matrixify_mode_columns: 'metrics',
matrixify_rows: [null],
@@ -373,6 +402,7 @@ test('should return empty string header for empty-string dimension value (line 8
const formData: TestFormData = {
viz_type: 'table',
datasource: '1__table',
matrixify_enable: true,
matrixify_mode_rows: 'dimensions',
matrixify_mode_columns: 'dimensions',
matrixify_dimension_rows: { dimension: 'country', values: [''] },
@@ -387,6 +417,7 @@ test('should skip dimension filter when value is undefined (lines 151, 165)', ()
const formData: TestFormData = {
viz_type: 'table',
datasource: '1__table',
matrixify_enable: true,
matrixify_mode_rows: 'dimensions',
matrixify_mode_columns: 'dimensions',
matrixify_dimension_rows: {
@@ -418,6 +449,7 @@ test('should handle metrics without labels', () => {
const metricsWithoutLabels: TestFormData = {
viz_type: 'table',
datasource: '1__table',
matrixify_enable: true,
matrixify_mode_rows: 'metrics',
matrixify_mode_columns: 'metrics',
matrixify_rows: [
@@ -438,3 +470,23 @@ test('should handle metrics without labels', () => {
expect(grid!.rowHeaders).toEqual(['']);
expect(grid!.colHeaders).toEqual(['count']);
});
test('should preserve slice_id and dashboardId for embedded dashboard permissions', () => {
const formDataWithDashboardContext: TestFormData = {
...baseFormData,
slice_id: 42,
dashboardId: 123,
};
const grid = generateMatrixifyGrid(formDataWithDashboardContext);
expect(grid).not.toBeNull();
const cell = grid!.cells[0][0];
// slice_id must be preserved for embedded dashboard permission checks
// The backend uses slice_id to verify the chart belongs to the dashboard
expect(cell!.formData.slice_id).toBe(42);
// dashboardId must be preserved for embedded dashboard context
expect(cell!.formData.dashboardId).toBe(123);
});

View File

@@ -125,9 +125,9 @@ function generateCellFormData(
});
// Override fields that could cause issues in grid cells
// Note: slice_id is intentionally preserved for embedded dashboard permission checks
const overrides: Partial<QueryFormData> = {
slice_name: undefined,
slice_id: undefined,
header_font_size: undefined,
subheader: undefined,
show_title: undefined,

View File

@@ -22,6 +22,7 @@ import '@testing-library/jest-dom';
import { ThemeProvider } from '@apache-superset/core/theme';
import { supersetTheme } from '@apache-superset/core/theme';
import MatrixifyGridRenderer from './MatrixifyGridRenderer';
import type { MatrixifyMode } from '../../types/matrixify';
import { generateMatrixifyGrid } from './MatrixifyGridGenerator';
// Mock the MatrixifyGridGenerator
@@ -74,8 +75,9 @@ test('should create single group when fitting columns dynamically', () => {
const formData = {
viz_type: 'test_chart',
matrixify_enable_vertical_layout: true,
matrixify_enable_horizontal_layout: true,
matrixify_enable: true,
matrixify_mode_rows: 'metrics' as MatrixifyMode,
matrixify_mode_columns: 'metrics' as MatrixifyMode,
matrixify_fit_columns_dynamically: true,
matrixify_charts_per_row: 3,
matrixify_show_row_labels: true,
@@ -124,8 +126,9 @@ test('should create multiple groups when not fitting columns dynamically', () =>
const formData = {
viz_type: 'test_chart',
matrixify_enable_vertical_layout: true,
matrixify_enable_horizontal_layout: true,
matrixify_enable: true,
matrixify_mode_rows: 'metrics' as MatrixifyMode,
matrixify_mode_columns: 'metrics' as MatrixifyMode,
matrixify_fit_columns_dynamically: false,
matrixify_charts_per_row: 3,
matrixify_show_row_labels: true,
@@ -160,8 +163,9 @@ test('should handle exact division of columns', () => {
const formData = {
viz_type: 'test_chart',
matrixify_enable_vertical_layout: true,
matrixify_enable_horizontal_layout: true,
matrixify_enable: true,
matrixify_mode_rows: 'metrics' as MatrixifyMode,
matrixify_mode_columns: 'metrics' as MatrixifyMode,
matrixify_fit_columns_dynamically: false,
matrixify_charts_per_row: 2,
matrixify_show_row_labels: true,
@@ -189,8 +193,9 @@ test('should handle case where charts_per_row exceeds total columns', () => {
const formData = {
viz_type: 'test_chart',
matrixify_enable_vertical_layout: true,
matrixify_enable_horizontal_layout: true,
matrixify_enable: true,
matrixify_mode_rows: 'metrics' as MatrixifyMode,
matrixify_mode_columns: 'metrics' as MatrixifyMode,
matrixify_fit_columns_dynamically: false,
matrixify_charts_per_row: 5,
matrixify_show_row_labels: true,
@@ -220,8 +225,9 @@ test('should show headers for each group when wrapping occurs', () => {
const formData = {
viz_type: 'test_chart',
matrixify_enable_vertical_layout: true,
matrixify_enable_horizontal_layout: true,
matrixify_enable: true,
matrixify_mode_rows: 'metrics' as MatrixifyMode,
matrixify_mode_columns: 'metrics' as MatrixifyMode,
matrixify_fit_columns_dynamically: false,
matrixify_charts_per_row: 2,
matrixify_show_row_labels: true,
@@ -255,8 +261,9 @@ test('should show headers only on first row when not wrapping', () => {
const formData = {
viz_type: 'test_chart',
matrixify_enable_vertical_layout: true,
matrixify_enable_horizontal_layout: true,
matrixify_enable: true,
matrixify_mode_rows: 'metrics' as MatrixifyMode,
matrixify_mode_columns: 'metrics' as MatrixifyMode,
matrixify_fit_columns_dynamically: true, // No wrapping
matrixify_show_row_labels: true,
matrixify_show_column_headers: true,
@@ -285,8 +292,9 @@ test('should hide headers when disabled', () => {
const formData = {
viz_type: 'test_chart',
matrixify_enable_vertical_layout: true,
matrixify_enable_horizontal_layout: true,
matrixify_enable: true,
matrixify_mode_rows: 'metrics' as MatrixifyMode,
matrixify_mode_columns: 'metrics' as MatrixifyMode,
matrixify_show_row_labels: false,
matrixify_show_column_headers: false,
};
@@ -313,8 +321,9 @@ test('should place cells correctly in wrapped layout', () => {
const formData = {
viz_type: 'test_chart',
matrixify_enable_vertical_layout: true,
matrixify_enable_horizontal_layout: true,
matrixify_enable: true,
matrixify_mode_rows: 'metrics' as MatrixifyMode,
matrixify_mode_columns: 'metrics' as MatrixifyMode,
matrixify_fit_columns_dynamically: false,
matrixify_charts_per_row: 2,
matrixify_show_row_labels: true,
@@ -344,8 +353,9 @@ test('should handle null grid gracefully', () => {
const formData = {
viz_type: 'test_chart',
matrixify_enable_vertical_layout: true,
matrixify_enable_horizontal_layout: true,
matrixify_enable: true,
matrixify_mode_rows: 'metrics' as MatrixifyMode,
matrixify_mode_columns: 'metrics' as MatrixifyMode,
};
const { container } = renderWithTheme(
@@ -366,8 +376,9 @@ test('should handle empty grid gracefully', () => {
const formData = {
viz_type: 'test_chart',
matrixify_enable_vertical_layout: true,
matrixify_enable_horizontal_layout: true,
matrixify_enable: true,
matrixify_mode_rows: 'metrics' as MatrixifyMode,
matrixify_mode_columns: 'metrics' as MatrixifyMode,
};
const { container } = renderWithTheme(
@@ -391,8 +402,9 @@ test('should use default values for missing configuration', () => {
const formData = {
viz_type: 'test_chart',
matrixify_enable_vertical_layout: true,
matrixify_enable_horizontal_layout: true,
matrixify_enable: true,
matrixify_mode_rows: 'metrics' as MatrixifyMode,
matrixify_mode_columns: 'metrics' as MatrixifyMode,
// Missing optional configurations
};

View File

@@ -130,10 +130,12 @@ function MatrixifyGridRenderer({
// Determine layout parameters - only show headers/labels if layout is enabled
const showRowLabels =
formData.matrixify_enable_vertical_layout === true &&
formData.matrixify_mode_rows !== undefined &&
formData.matrixify_mode_rows !== 'disabled' &&
(formData.matrixify_show_row_labels ?? true);
const showColumnHeaders =
formData.matrixify_enable_horizontal_layout === true &&
formData.matrixify_mode_columns !== undefined &&
formData.matrixify_mode_columns !== 'disabled' &&
(formData.matrixify_show_column_headers ?? true);
const rowHeight = formData.matrixify_row_height || DEFAULT_ROW_HEIGHT;
const fitColumnsDynamically =

View File

@@ -37,12 +37,11 @@ test('isMatrixifyEnabled should return false when no matrixify configuration exi
expect(isMatrixifyEnabled(formData)).toBe(false);
});
test('isMatrixifyEnabled should return false when layout controls are false', () => {
test('isMatrixifyEnabled should return false when layout controls are disabled', () => {
const formData = {
viz_type: 'table',
matrixify_enable_vertical_layout: false,
matrixify_enable_horizontal_layout: false,
matrixify_mode_rows: 'metrics',
matrixify_mode_rows: 'disabled',
matrixify_mode_columns: 'disabled',
matrixify_rows: [createMetric('Revenue')],
} as MatrixifyFormData;
@@ -52,7 +51,7 @@ test('isMatrixifyEnabled should return false when layout controls are false', ()
test('isMatrixifyEnabled should return true for valid metrics mode configuration', () => {
const formData = {
viz_type: 'table',
matrixify_enable_vertical_layout: true,
matrixify_enable: true,
matrixify_mode_rows: 'metrics',
matrixify_mode_columns: 'metrics',
matrixify_rows: [createMetric('Revenue')],
@@ -65,7 +64,7 @@ test('isMatrixifyEnabled should return true for valid metrics mode configuration
test('isMatrixifyEnabled should return true for valid dimensions mode configuration', () => {
const formData = {
viz_type: 'table',
matrixify_enable_vertical_layout: true,
matrixify_enable: true,
matrixify_mode_rows: 'dimensions',
matrixify_mode_columns: 'dimensions',
matrixify_dimension_rows: { dimension: 'country', values: ['USA'] },
@@ -78,7 +77,7 @@ test('isMatrixifyEnabled should return true for valid dimensions mode configurat
test('isMatrixifyEnabled should return true for mixed mode configuration', () => {
const formData = {
viz_type: 'table',
matrixify_enable_vertical_layout: true,
matrixify_enable: true,
matrixify_mode_rows: 'metrics',
matrixify_mode_columns: 'dimensions',
matrixify_rows: [createMetric('Revenue')],
@@ -91,7 +90,7 @@ test('isMatrixifyEnabled should return true for mixed mode configuration', () =>
test('isMatrixifyEnabled should return true for topn dimension selection mode', () => {
const formData = {
viz_type: 'table',
matrixify_enable_vertical_layout: true,
matrixify_enable: true,
matrixify_mode_rows: 'dimensions',
matrixify_mode_columns: 'dimensions',
matrixify_dimension_rows: {
@@ -110,7 +109,7 @@ test('isMatrixifyEnabled should return true for topn dimension selection mode',
test('isMatrixifyEnabled should return false when both axes have empty metrics arrays', () => {
const formData = {
viz_type: 'table',
matrixify_enable_vertical_layout: true,
matrixify_enable: true,
matrixify_mode_rows: 'metrics',
matrixify_mode_columns: 'metrics',
matrixify_rows: [],
@@ -123,7 +122,7 @@ test('isMatrixifyEnabled should return false when both axes have empty metrics a
test('isMatrixifyEnabled should return false when both dimensions have empty values and no topn mode', () => {
const formData = {
viz_type: 'table',
matrixify_enable_vertical_layout: true,
matrixify_enable: true,
matrixify_mode_rows: 'dimensions',
matrixify_mode_columns: 'dimensions',
matrixify_dimension_rows: { dimension: 'country', values: [] },
@@ -138,10 +137,31 @@ test('getMatrixifyConfig should return null when no matrixify configuration exis
expect(getMatrixifyConfig(formData)).toBeNull();
});
test('getMatrixifyConfig should return null when matrixify_enable is false', () => {
const formData = {
viz_type: 'table',
matrixify_enable: false,
matrixify_mode_rows: 'metrics',
matrixify_mode_columns: 'metrics',
matrixify_rows: [createMetric('Revenue')],
matrixify_columns: [createMetric('Q1')],
} as MatrixifyFormData;
expect(getMatrixifyConfig(formData)).toBeNull();
});
test('getMatrixifyConfig should return null when no axes are enabled', () => {
const formData = {
viz_type: 'table',
matrixify_enable: true,
} as MatrixifyFormData;
expect(getMatrixifyConfig(formData)).toBeNull();
});
test('getMatrixifyConfig should return valid config for metrics mode', () => {
const formData = {
viz_type: 'table',
matrixify_enable_vertical_layout: true,
matrixify_enable: true,
matrixify_mode_rows: 'metrics',
matrixify_mode_columns: 'metrics',
matrixify_rows: [createMetric('Revenue')],
@@ -159,7 +179,7 @@ test('getMatrixifyConfig should return valid config for metrics mode', () => {
test('getMatrixifyConfig should return valid config for dimensions mode', () => {
const formData = {
viz_type: 'table',
matrixify_enable_vertical_layout: true,
matrixify_enable: true,
matrixify_mode_rows: 'dimensions',
matrixify_mode_columns: 'dimensions',
matrixify_dimension_rows: { dimension: 'country', values: ['USA'] },
@@ -183,7 +203,7 @@ test('getMatrixifyConfig should return valid config for dimensions mode', () =>
test('getMatrixifyConfig should handle topn selection mode', () => {
const formData = {
viz_type: 'table',
matrixify_enable_vertical_layout: true,
matrixify_enable: true,
matrixify_mode_rows: 'dimensions',
matrixify_mode_columns: 'dimensions',
matrixify_dimension_rows: {
@@ -201,11 +221,50 @@ test('getMatrixifyConfig should handle topn selection mode', () => {
expect(config!.rows.dimension).toEqual(formData.matrixify_dimension_rows);
});
test('getMatrixifyConfig should preserve ascending topn order when explicitly disabled', () => {
const formData = {
viz_type: 'table',
matrixify_enable: true,
matrixify_mode_rows: 'dimensions',
matrixify_mode_columns: 'dimensions',
matrixify_dimension_rows: { dimension: 'country', values: ['USA'] },
matrixify_dimension_columns: { dimension: 'product', values: ['Widget'] },
matrixify_topn_order_rows: false,
matrixify_topn_order_columns: false,
} as MatrixifyFormData;
const config = getMatrixifyConfig(formData);
expect(config).not.toBeNull();
expect(config!.rows.topnOrder).toBe('asc');
expect(config!.columns.topnOrder).toBe('asc');
});
test('getMatrixifyValidationErrors should return empty array when matrixify is not enabled', () => {
const formData = {
viz_type: 'table',
matrixify_enable_vertical_layout: false,
matrixify_enable_horizontal_layout: false,
matrixify_mode_rows: 'disabled',
matrixify_mode_columns: 'disabled',
} as MatrixifyFormData;
expect(getMatrixifyValidationErrors(formData)).toEqual([]);
});
test('getMatrixifyValidationErrors should return empty array when matrixify_enable is false even with stale mode values', () => {
const formData = {
viz_type: 'table',
matrixify_enable: false,
matrixify_mode_rows: 'metrics',
matrixify_mode_columns: 'dimensions',
} as MatrixifyFormData;
expect(getMatrixifyValidationErrors(formData)).toEqual([]);
});
test('getMatrixifyValidationErrors should return empty array when matrixify_enable is undefined with stale defaults', () => {
const formData = {
viz_type: 'bar',
matrixify_mode_rows: 'metrics',
matrixify_rows: [],
} as MatrixifyFormData;
expect(getMatrixifyValidationErrors(formData)).toEqual([]);
@@ -214,7 +273,7 @@ test('getMatrixifyValidationErrors should return empty array when matrixify is n
test('getMatrixifyValidationErrors should return empty array when properly configured', () => {
const formData = {
viz_type: 'table',
matrixify_enable_vertical_layout: true,
matrixify_enable: true,
matrixify_mode_rows: 'metrics',
matrixify_mode_columns: 'metrics',
matrixify_rows: [createMetric('Revenue')],
@@ -224,20 +283,30 @@ test('getMatrixifyValidationErrors should return empty array when properly confi
expect(getMatrixifyValidationErrors(formData)).toEqual([]);
});
test('getMatrixifyValidationErrors should return empty array when enabled with no active axes', () => {
const formData = {
viz_type: 'table',
matrixify_enable: true,
} as MatrixifyFormData;
expect(getMatrixifyValidationErrors(formData)).toEqual([]);
});
test('getMatrixifyValidationErrors should return error when enabled but no configuration exists', () => {
const formData = {
viz_type: 'table',
matrixify_enable_vertical_layout: true,
matrixify_enable: true,
matrixify_mode_rows: 'metrics',
} as MatrixifyFormData;
const errors = getMatrixifyValidationErrors(formData);
expect(errors).toContain('Please configure at least one row or column axis');
expect(errors.length).toBeGreaterThan(0);
});
test('getMatrixifyValidationErrors should return error when metrics mode has no metrics', () => {
const formData = {
viz_type: 'table',
matrixify_enable_vertical_layout: true,
matrixify_enable: true,
matrixify_mode_rows: 'metrics',
matrixify_rows: [],
matrixify_columns: [],
@@ -260,19 +329,29 @@ test('should handle empty form data object', () => {
expect(isMatrixifyEnabled(formData)).toBe(false);
});
test('isMatrixifyEnabled should return false when layout enabled but no axis modes configured', () => {
test('isMatrixifyEnabled should return false when no axis modes configured', () => {
const formData = {
viz_type: 'table',
matrixify_enable_vertical_layout: true,
matrixify_enable: true,
// No matrixify_mode_rows or matrixify_mode_columns set
} as MatrixifyFormData;
expect(isMatrixifyEnabled(formData)).toBe(false);
});
test('isMatrixifyEnabled should return false when switch is off even with valid axis config', () => {
const formData = {
viz_type: 'table',
matrixify_enable: false,
matrixify_mode_rows: 'metrics',
matrixify_rows: [createMetric('Revenue')],
} as MatrixifyFormData;
expect(isMatrixifyEnabled(formData)).toBe(false);
});
test('getMatrixifyValidationErrors should return dimension error for rows when dimension has no data', () => {
const formData = {
viz_type: 'table',
matrixify_enable_vertical_layout: true,
matrixify_enable: true,
matrixify_mode_rows: 'dimensions',
// No matrixify_dimension_rows set
matrixify_mode_columns: 'metrics',
@@ -286,7 +365,7 @@ test('getMatrixifyValidationErrors should return dimension error for rows when d
test('getMatrixifyValidationErrors should return metric error for columns when metrics array is empty', () => {
const formData = {
viz_type: 'table',
matrixify_enable_vertical_layout: true,
matrixify_enable: true,
matrixify_mode_rows: 'metrics',
matrixify_rows: [createMetric('Revenue')],
matrixify_mode_columns: 'metrics',
@@ -300,7 +379,7 @@ test('getMatrixifyValidationErrors should return metric error for columns when m
test('getMatrixifyValidationErrors should return dimension error for columns when no dimension data', () => {
const formData = {
viz_type: 'table',
matrixify_enable_vertical_layout: true,
matrixify_enable: true,
matrixify_mode_rows: 'metrics',
matrixify_rows: [createMetric('Revenue')],
matrixify_mode_columns: 'dimensions',
@@ -311,10 +390,10 @@ test('getMatrixifyValidationErrors should return dimension error for columns whe
expect(errors).toContain('Please select a dimension and values for columns');
});
test('getMatrixifyValidationErrors skips row check when matrixify_mode_rows is not set (line 240 false, line 279 || false)', () => {
test('getMatrixifyValidationErrors skips row check when matrixify_mode_rows is not set', () => {
const formData = {
viz_type: 'table',
matrixify_enable_vertical_layout: true,
matrixify_enable: true,
// No matrixify_mode_rows — hasRowMode = false
matrixify_mode_columns: 'metrics',
matrixify_columns: [createMetric('Q1')],
@@ -324,10 +403,10 @@ test('getMatrixifyValidationErrors skips row check when matrixify_mode_rows is n
expect(errors).toEqual([]);
});
test('getMatrixifyValidationErrors evaluates full && expression when dimension is set but values are empty (lines 244, 264, 283, 291 true branches)', () => {
test('getMatrixifyValidationErrors evaluates full && expression when dimension is set but values are empty', () => {
const formData = {
viz_type: 'table',
matrixify_enable_vertical_layout: true,
matrixify_enable: true,
matrixify_mode_rows: 'dimensions',
matrixify_dimension_rows: { dimension: 'country', values: [] },
matrixify_mode_columns: 'dimensions',
@@ -345,7 +424,7 @@ test('getMatrixifyValidationErrors evaluates full && expression when dimension i
test('should handle partial configuration with one axis only', () => {
const formData = {
viz_type: 'table',
matrixify_enable_vertical_layout: true,
matrixify_enable: true,
matrixify_mode_rows: 'metrics',
matrixify_rows: [createMetric('Revenue')],
// No columns configuration

Some files were not shown because too many files have changed in this diff Show More