Compare commits

..

162 Commits

Author SHA1 Message Date
Maxime Beauchemin
774910f40d touching superset/ 2024-12-02 12:09:33 -08:00
Maxime Beauchemin
dbbcc11a98 fix: add python-base layer to fix CI builds in 4.1 2024-12-02 12:02:09 -08:00
Geido
cebd45778f fix(Dashboard): Native & Cross-Filters Scoping Performance (#30881)
(cherry picked from commit f4c36a6d05)
2024-11-22 11:23:37 -08:00
Linden
1b2ecc6955 fix(imports): import query_context for imports with charts (#30887)
(cherry picked from commit 8905508d8f)
2024-11-22 11:19:02 -08:00
JUST.in DO IT
1f5e567645 fix(explore): verified props is not updated (#31008)
(cherry picked from commit 9e5b568cc9)
2024-11-22 11:18:33 -08:00
Geido
c467fb566d fix(Dashboard): Retain colors when color scheme not set (#30646)
(cherry picked from commit 90572be95a)
2024-11-22 11:17:55 -08:00
Geido
cc0ed0fef4 fix(Dashboard): Exclude edit param in async screenshot (#30962)
(cherry picked from commit 1b63b8f3c7)
2024-11-22 11:17:17 -08:00
yousoph
4f463399a7 docs: Updating 4.1 Release Notes (#30865)
(cherry picked from commit 88eb95c39a)
2024-11-22 11:16:43 -08:00
Joe Li
6264ff5165 chore: Adds 4.1.1 RC1 data to CHANGELOG.md and update frontend package 2024-11-15 14:35:11 -08:00
Sukuna
1410e528a4 fix: blocks UI elements on right side (#30886)
Co-authored-by: Evan Rusackas <evan@preset.io>
(cherry picked from commit df479940a6)
2024-11-15 09:27:39 -08:00
Geido
f704b0f556 fix(package.json): Pin luxon version to unblock master (#30859)
(cherry picked from commit de8282cea0)
2024-11-13 15:50:29 -08:00
Maxime Beauchemin
bdfd5cd4ec fix(explore): column data type tooltip format (#30588)
(cherry picked from commit 73768f6313)
2024-11-13 15:48:42 -08:00
Elizabeth Thompson
af44b14fbe chore: add link to Superset when report error (#30576)
(cherry picked from commit 4d5f70c694)
2024-11-13 12:23:27 -08:00
Ayush Tripathi
29c76ef1d5 fix: Rename database from 'couchbasedb' to 'couchbase' in documentation and db_engine_specs (#29911)
(cherry picked from commit f5d614d80d)
2024-11-13 12:21:04 -08:00
Geido
7953c89d51 fix(TimezoneSelector): Failing unit tests due to timezone change (#30828)
(cherry picked from commit 5820d31b5c)
2024-11-13 12:17:14 -08:00
Joe Li
fd4c3dce44 fix: don't show metadata for embedded dashboards (#30875)
(cherry picked from commit ac3a10d8f1)
2024-11-13 12:10:59 -08:00
Michael S. Molina
234f8c94d1 fix: Graph chart colors (#30851)
(cherry picked from commit 0e165c1a21)
2024-11-13 12:10:17 -08:00
Evan Rusackas
bc2e51d8d0 fix(capitalization): Capitalizing a button. (#29867)
(cherry picked from commit 052b38bdf3)
2024-11-13 12:08:26 -08:00
Geido
2787167abe refactor(Slider): Upgrade Slider to Antd 5 (#29786)
(cherry picked from commit d877d46557)
2024-11-13 12:07:26 -08:00
Ross Mabbett
9e84e13888 refactor(ChartCreation): Migrate tests to RTL (#29674)
(cherry picked from commit 6bc8567802)
2024-11-13 12:06:44 -08:00
Ross Mabbett
cfd24e3ccd refactor(controls): Migrate AdhocMetricOption.test to RTL (#29843)
(cherry picked from commit 819597faf6)
2024-11-13 12:06:19 -08:00
Ross Mabbett
aaecec2e03 refactor(controls): Migrate MetricDefinitionValue.test to RTL (#29845)
(cherry picked from commit 27c08d0e0e)
2024-11-13 12:05:47 -08:00
Evan Rusackas
66fe0b0594 fix(translations): Translate embedded errors (#29782)
(cherry picked from commit 0d62bb2261)
2024-11-13 12:01:13 -08:00
Evan Rusackas
0d0b43062e fix: Fixing incomplete string escaping. (#29772)
(cherry picked from commit 2bce20f790)
2024-11-13 12:00:03 -08:00
Đỗ Trọng Hải
72df46a729 fix(frontend/docker, ci): fix borked Docker build due to Lerna v8 uplift (#29725)
Signed-off-by: hainenber <dotronghai96@gmail.com>
(cherry picked from commit 8891f04f11)
2024-11-13 11:58:44 -08:00
Evan Rusackas
f7cfd9182a docs: Check markdown files for bad links using linkinator (#28424)
(cherry picked from commit c3702be9d4)
2024-11-13 11:57:22 -08:00
Sam Firke
5faaaf978b docs(contributing): fix broken link to translations sub-section (#29768)
(cherry picked from commit c22dfa1abb)
2024-11-13 11:56:47 -08:00
Joe Li
855f4c4897 chore: Adds 4.1.0 RC4 data to CHANGELOG.md 2024-11-01 16:49:06 -07:00
Ville Brofeldt
008ab202f3 fix(plugin-chart-echarts): sort tooltip correctly (#30819)
(cherry picked from commit b02d18a39e)
2024-11-01 16:11:26 -07:00
Daniel Vaz Gaspar
db311eb376 chore: bump werkzeug to address vulnerability (#30729)
(cherry picked from commit f19c4280c0)
2024-11-01 10:25:05 -07:00
Geido
d5f33c4c02 fix(Dashboard): Sync/Async Dashboard Screenshot Generation and Default Cache (#30755)
Co-authored-by: Michael S. Molina <michael.s.molina@gmail.com>
Co-authored-by: Michael S. Molina <70410625+michael-s-molina@users.noreply.github.com>
(cherry picked from commit 3e29777526)
2024-11-01 10:08:29 -07:00
Beto Dealmeida
ad82a8c14e fix: catalog migration w/o connection (#30773)
(cherry picked from commit 402c29c2bc)
2024-11-01 09:57:26 -07:00
Phil Dumbreck
45c18368f6 ci: Add Python 3.11 images to Docker Hub (#30733)
(cherry picked from commit eecb537808)
2024-10-31 09:48:44 -07:00
Maxime Beauchemin
6f656914fe fix: CI remove cypress command --headed (#30429)
(cherry picked from commit 63e17ca546)
2024-10-30 15:55:58 -07:00
Maxime Beauchemin
6706d1308f chore: alter scripts/cypress_run to run one file per command + retry (#30397)
Co-authored-by: Joe Li <joe@preset.io>
(cherry picked from commit a3bfbd0186)
2024-10-30 15:55:02 -07:00
Elizabeth Thompson
cbf1aeec7d chore: split cypress files for less memory (#30354)
(cherry picked from commit 43721f1206)
2024-10-30 15:54:28 -07:00
Geido
3f7907b266 chore(Dashboard): Simplify scoping logic for cross/native filters (#30719)
(cherry picked from commit d5a98e0189)
2024-10-30 10:10:06 -07:00
Vitor Avila
ba0d118fdd fix(Jinja): Extra cache keys for calculated columns and metrics using Jinja (#30735)
(cherry picked from commit 09d3f60d85)
2024-10-29 09:47:12 -07:00
Michael S. Molina
49aa74cec8 fix: Nested transaction is inactive when embedding dashboard (#30699)
(cherry picked from commit c9ff09a418)
2024-10-28 10:40:06 -07:00
Vitor Avila
7c569abaf6 fix(dashboard): Include urlParams in the screenshot generation (#30675)
(cherry picked from commit 16981d6316)
2024-10-28 10:36:59 -07:00
Geido
a70f2cee72 fix(Jinja): Extra cache keys for Jinja columns (#30715)
(cherry picked from commit a12ccf2c1d)
2024-10-28 10:36:35 -07:00
JUST.in DO IT
7b343f7fac fix(chart): Table and page entries misaligned (#30680)
(cherry picked from commit 87deb19bcb)
2024-10-23 09:53:56 -07:00
Kamil Gabryjelski
742ad92189 fix(explore): Missing markarea component broke annotations in echarts (#30348)
(cherry picked from commit 038ef32454)
2024-10-23 09:51:34 -07:00
Joe Li
03b72628fa chore: update change log for 4.1.0rc3 and linting 2024-10-16 10:01:42 -07:00
Michael S. Molina
27ca7ba7d7 fix: First item hovered on stacked bar (#30628)
(cherry picked from commit c8edd1fb25)
2024-10-16 09:11:28 -07:00
Joe Li
1074d1e618 chore: Update to Dockerfile to get creating releases to work (#29937)
(cherry picked from commit 955db48c59)
2024-10-16 09:06:20 -07:00
Sam Firke
b6edf148e2 fix(docs): address two linkinator failures (#30617)
(cherry picked from commit 53a121d9e1)
2024-10-15 16:37:46 -07:00
Beto Dealmeida
046770cf76 feat: use dialect when tokenizing (#30614)
(cherry picked from commit 4cac7feb67)
2024-10-15 16:26:56 -07:00
Geido
0f1064eab8 fix(Filters): Apply native & cross filters on common columns (#30438)
(cherry picked from commit 362948324c)
2024-10-15 16:25:10 -07:00
ObservabilityTeam
82fc8879b0 fix(filters): Adds a fix for saving time range adhoc_filters (#30581)
Co-authored-by: Muhammad Musfir <muhammad.musfir@de-cix.net>
(cherry picked from commit 2c3ba95768)
2024-10-15 16:23:57 -07:00
David Markey
159958e577 feat(embedded): add hook to allow superset admins to validate guest token parameters (#30132)
Co-authored-by: David Markey <markey@rapidraitngs.com>
(cherry picked from commit a31a4eebdd)
2024-10-15 16:19:51 -07:00
Kamil Gabryjelski
2a98780d2c perf: Implement Echarts treeshaking (#29874)
(cherry picked from commit c220245414)
2024-10-15 16:18:25 -07:00
Beto Dealmeida
a84da1c5cc fix: sqlparse fallback for formatting queries (#30578)
(cherry picked from commit 47c1e09c75)
2024-10-11 13:02:52 -07:00
Joe Li
8c329c445f fix: update html rendering to true from false (#30565) 2024-10-10 16:32:26 -07:00
Beto Dealmeida
05cccf6404 fix: adhoc metrics (#30202)
(cherry picked from commit 0db59b45b8)
2024-10-10 16:29:59 -07:00
Geido
92808ffe38 fix(Jinja): Extra cache keys to consider vars with set (#30549)
(cherry picked from commit 318eff7327)
2024-10-09 14:14:57 -07:00
Felix
0a7635fc05 fix(dashboard-export): Fixes datasetId is not replaced with datasetUuid in Dashboard export in 4.1.x (#30425)
(cherry picked from commit 211564a6da)
2024-10-09 14:12:59 -07:00
Michael S. Molina
4fe51c6db9 fix: Horizon Chart are not working any more (#30563)
(cherry picked from commit 7b47e43fd0)
2024-10-09 11:12:12 -07:00
Michael S. Molina
0eaa8c5894 fix: Incorrect type in config.py (#30564)
(cherry picked from commit 7a8e8f890f)
2024-10-09 11:11:42 -07:00
Michael S. Molina
f56dfb35b2 fix: Unable to parse escaped tables (#30560)
(cherry picked from commit fc857d987b)
2024-10-09 11:10:48 -07:00
JUST.in DO IT
597e207eff fix(explore): don't discard controls on deprecated (#30447)
(cherry picked from commit b627011463)
2024-10-07 11:17:58 -07:00
JUST.in DO IT
95ae663e88 chore(chart-controls): migrate enzyme to RTL (#26257)
(cherry picked from commit c049771a7f)
2024-10-07 11:17:30 -07:00
Ville Brofeldt
d0def80d3b fix(migration): replace unquote with double percentages (#30532)
(cherry picked from commit 163b71e019)
2024-10-07 10:41:31 -07:00
Geido
9f5f0895f6 fix(Explore): Apply RLS at column values (#30490)
Co-authored-by: Beto Dealmeida <roberto@dealmeida.net>
(cherry picked from commit f314685a8e)
2024-10-07 09:56:02 -07:00
Jack
dce7e47399 fix(imports): Error when importing charts / dashboards with missing DB credentials (#30503)
(cherry picked from commit 95325c4673)
2024-10-07 09:53:14 -07:00
Beto Dealmeida
4b9ae07fe5 fix: don't reformat generated queries (#30350)
(cherry picked from commit 0b34197815)
2024-10-04 09:04:22 -07:00
Michael S. Molina
7519cab379 fix: Open control with Simple tab selected when there is no column selected (#30502)
(cherry picked from commit 03146b21be)
2024-10-03 16:56:36 -07:00
Beto Dealmeida
84c1ad97dc fix(embedded): sankey charts (#30491)
(cherry picked from commit e0172a24b8)
2024-10-02 14:50:44 -07:00
Michael S. Molina
f743ae36dc fix: Histogram chart not able to use decimal datatype column (#30416)
(cherry picked from commit 4834390e6a)
2024-09-30 10:37:56 -07:00
Beto Dealmeida
ca5ed8b7b0 chore: improve DML check (#30417)
(cherry picked from commit cc9fd88c0d)
2024-09-30 10:36:45 -07:00
Beto Dealmeida
f1a6aaad63 chore: organize SQL parsing files (#30258)
(cherry picked from commit bdf29cb7c2)
2024-09-30 10:36:40 -07:00
Michael S. Molina
995182270c fix: Incorrect hovered items in tooltips (#30405)
(cherry picked from commit 36f7a3f524)
2024-09-27 18:30:54 -07:00
Michael S. Molina
c864e6cd2b fix: Allows X-Axis Sort By for custom SQL (#30393)
(cherry picked from commit abf2943e4d)
2024-09-25 17:46:46 -07:00
Michael S. Molina
a3d6ef07c1 fix: Pre-query normalization with custom SQL (#30389)
(cherry picked from commit ad2998598f)
2024-09-25 17:46:16 -07:00
Michael S. Molina
072540f321 fix: KeyError 'sql' when opening a Trino virtual dataset (#30339)
(cherry picked from commit ef9e5e523d)
2024-09-19 14:03:38 -07:00
Antonio Rivero
2561b267ab fix(table): Use extras in queries (#30335)
(cherry picked from commit 6c2bd2a968)
2024-09-19 09:42:54 -07:00
Ville Brofeldt
8fc4c50050 fix(migration): 87d38ad83218 failing on upgrade (#30275)
(cherry picked from commit 78099b0d1f)
2024-09-18 18:18:45 -07:00
JUST.in DO IT
359d7baaf5 fix(dashboard): Invalid owner's name displayed after updates (#30272)
(cherry picked from commit 2f0c9947ce)
2024-09-18 10:33:10 -07:00
JUST.in DO IT
437151a95f fix: unable to disallow csv upload on header menu (#30271)
(cherry picked from commit cd8b56706b)
2024-09-18 10:32:54 -07:00
Maxime Beauchemin
2157fe3f28 chore: move SLACK_ENABLE_AVATARS from config to feature flag (#30274)
(cherry picked from commit f315a4f02c)
2024-09-16 16:13:34 -07:00
JUST.in DO IT
1f6ef6a870 chore(sqllab): Add shortcuts for switching tabs (#30173)
(cherry picked from commit f553344aa1)
2024-09-16 09:28:05 -07:00
Geido
35de980081 fix(Screenshot): Dashboard screenshot cache key to include state (#30265)
(cherry picked from commit 0679454b48)
2024-09-16 09:27:05 -07:00
Geido
90ce1b5012 fix(CrossFilters): Do not reload unrelated filters in global scope (#30252)
Co-authored-by: JUST.in DO IT <justin.park@airbnb.com>
(cherry picked from commit dbab2fb955)
2024-09-13 10:59:01 -07:00
Beto Dealmeida
4a6dd94a6c chore: remove duplicate _process_sql_expression (#30213)
(cherry picked from commit cddf1530da)
2024-09-12 11:05:05 -07:00
Geido
860c9c08a1 fix(Fave): Charts and Dashboards fave/unfave do not commit transactions (#30215)
(cherry picked from commit 23467bd7e4)
2024-09-12 10:57:15 -07:00
JUST.in DO IT
f0c42b0a01 feat(sqllab): Add timeout on fetching query results (#29959)
(cherry picked from commit ff3b86b5ff)
2024-09-12 10:55:39 -07:00
Sam Firke
889ab36dff fix(uploads): respect db engine spec's supports_multivalues_insert value for file uploads & enable multi-insert for MSSQL (#30222)
Co-authored-by: Ville Brofeldt <33317356+villebro@users.noreply.github.com>
(cherry picked from commit f8a77537a7)
2024-09-12 09:16:14 -07:00
JUST.in DO IT
d85fdf4bf9 fix: filters panel broken due to tabs scroll (#30180)
(cherry picked from commit be0a0ced25)
2024-09-12 09:14:58 -07:00
Sam Firke
afd5379bb0 chore(docs): note that release-tagged docker images no longer ship with metadata db drivers as of 4.1.0 (#30243)
(cherry picked from commit 4385b44e86)
2024-09-11 11:15:51 -07:00
Geido
789ca738dc fix(Celery): Pass guest_token as user context is not available in Celery (#30224)
(cherry picked from commit 1b34ad65fa)
2024-09-11 09:27:07 -07:00
Vitor Avila
40568fd1ff fix(Dashboard download): Download dashboard screenshot/PDF using SupersetClient (#30212)
(cherry picked from commit d191e67e51)
2024-09-11 09:26:50 -07:00
Geido
6205fb4e48 fix(Embedded): Dashboard screenshot should use GuestUser (#30200)
(cherry picked from commit 52a03f18a1)
2024-09-11 09:26:34 -07:00
Beto Dealmeida
c3bc7de75f feat: is_mutating method (#30177)
(cherry picked from commit 1f890718a2)
2024-09-11 09:25:53 -07:00
Ross Masters
d33f1534e2 fix: Chart cache-warmup task fails on Superset 4.0 (#28706)
(cherry picked from commit 0744abe87b)
2024-09-11 09:24:12 -07:00
Maxime Beauchemin
1ccc147670 fix: set default mysql isolation level to 'READ COMMITTED' (#30174)
Co-authored-by: Ville Brofeldt <33317356+villebro@users.noreply.github.com>
(cherry picked from commit 6baeb659a7)
2024-09-09 17:14:22 -07:00
Kamil Gabryjelski
d8b9f38609 fix: Disable cross filtering on charts with no dimensions (#30176)
(cherry picked from commit 3aafd29768)
2024-09-09 09:55:21 -07:00
Michael S. Molina
e8d5ff1264 fix: Delete modal button with lowercase text (#30060)
(cherry picked from commit cd6b8b2f6d)
2024-09-06 11:10:08 -07:00
JUST.in DO IT
3becd6b72e chore(shared components): Migrate enzyme to RTL (#26258)
(cherry picked from commit 1a1548da3b)
2024-09-06 11:09:26 -07:00
JUST.in DO IT
cea8ede3f0 fix(sqllab): Skip AceEditor in inactive tabs (#30171)
(cherry picked from commit 4d1db9e32c)
2024-09-06 10:44:32 -07:00
JUST.in DO IT
e94667820f fix(native filter): undefined layout type on filterInScope (#30164)
(cherry picked from commit e02b18c63c)
2024-09-06 10:44:03 -07:00
Jonathan Schneider
41e611b413 fix(plugins): display correct tooltip (fixes #3342) (#30023)
(cherry picked from commit c428108713)
2024-09-06 10:43:28 -07:00
Michael S. Molina
d47430ac21 fix: FacePile is requesting avatars when SLACK_ENABLE_AVATARS is false (#30156)
(cherry picked from commit de3de541e7)
2024-09-05 10:35:41 -07:00
JUST.in DO IT
f2c0d3aa48 fix(sqllab): race condition when updating cursor position (#30154)
(cherry picked from commit 2097b716f4)
2024-09-04 17:31:59 -07:00
Maxime Beauchemin
f49a426ada docs: document how docker-compose-image-tag requires -dev suffixed images (#30144)
Co-authored-by: Sam Firke <sfirke@users.noreply.github.com>
(cherry picked from commit 34e240ef0e)
2024-09-04 17:30:50 -07:00
Antonio Rivero
acf3e12230 fix(catalog): Table Schema View with no catalog (#30139)
(cherry picked from commit 6009023fad)
2024-09-04 10:21:51 -07:00
Michael S. Molina
1d90ee3517 fix: New tooltip inappropriately combines series on mixed chart (#30137)
(cherry picked from commit 9cb9e5beee)
2024-09-04 10:20:34 -07:00
Michael S. Molina
0f32116734 fix: JSON loading logs (#30138)
(cherry picked from commit 5c5b4d0f5f)
2024-09-03 16:23:39 -07:00
Michael S. Molina
8d7ceebbc3 fix: DeckGL legend layout (#30140)
(cherry picked from commit af066a4630)
2024-09-03 16:22:59 -07:00
Evan Rusackas
45da3f4519 fix(accessibility): logo outline on tab navigation, but not on click (#30077)
(cherry picked from commit 9c3eb8f51f)
2024-09-03 16:21:43 -07:00
Joe Li
122057bac5 fix: pass if table is already removed on upgrade (#30017)
(cherry picked from commit c929f5ed7a)
2024-09-03 16:20:58 -07:00
hao-zhuventures
997cd60d43 fix: use StrEnum type for GuestTokenResourceType to fix token parsing (#30042)
(cherry picked from commit e2c4435cab)
2024-09-03 10:42:07 -07:00
Michael S. Molina
c57f47ddce fix: When hovering Drill By the dashboard is scrolled to the top (#30073)
(cherry picked from commit 548d543efe)
2024-09-03 10:41:18 -07:00
Michael S. Molina
b4068f1fca fix: Retrieving Slack channels when Slack is disabled (#30074)
(cherry picked from commit 72a520fba4)
2024-09-03 10:40:39 -07:00
Antonio Rivero
e7b136b822 fix(migrations): Fix the time comparison migration (#30029)
(cherry picked from commit d80f23ed94)
2024-08-27 14:36:26 -07:00
Michael S. Molina
86ca2b3d08 fix: Partition calls from Jinja context (#30019)
(cherry picked from commit 07a90ad4fe)
2024-08-27 10:44:33 -07:00
Michael S. Molina
36b229cd18 fix: Dashboard list row height does not match other lists (#30025)
(cherry picked from commit 2afb66d68d)
2024-08-27 10:44:12 -07:00
Ville Brofeldt
fff9f874b1 fix(user-dao): return user model instances (#30020)
(cherry picked from commit fcf0450294)
2024-08-27 10:43:41 -07:00
Jack
7dc65072c0 fix(screenshots): dashboard screenshots do not capture filter state (#29989)
(cherry picked from commit 7db34b994e)
2024-08-27 10:42:09 -07:00
squalou
5411d40a7a fix: set columns numeric datatypes when exporting to excel (#27229)
Co-authored-by: Elizabeth Thompson <eschutho@gmail.com>
(cherry picked from commit ce72a0ac27)
2024-08-27 10:40:55 -07:00
Ville Brofeldt
a7eb28ddd4 fix(trino): handle missing db in migration (#29997)
(cherry picked from commit 17eecb1981)
2024-08-27 10:39:29 -07:00
Beto Dealmeida
d488c78472 chore: improve mask/unmask encrypted_extra (#29943)
(cherry picked from commit 4b59e42d3f)
2024-08-27 10:38:37 -07:00
Hugh A. Miles II
fe33689917 fix: Gamma users shouldn't be able to create roles (#29687)
(cherry picked from commit 7650c47e72)
2024-08-27 10:37:07 -07:00
Michael S. Molina
b0a2aea760 fix: Security manager incorrect calls (#29884)
(cherry picked from commit d497dcad41)
2024-08-23 10:03:16 -07:00
Joe Li
8f93ad7068 chore: Adds 4.1.0 RC2 data to CHANGELOG.md 2024-08-22 11:49:10 -07:00
Michael S. Molina
cced1c5a4e fix: Duplicated example dataset (#29993)
(cherry picked from commit eb2d69a5e6)
2024-08-22 11:03:00 -07:00
Daniel Vaz Gaspar
c332eebc37 fix: trino thread app missing full context (#29981)
(cherry picked from commit 4d821f44ae)
2024-08-22 11:01:32 -07:00
JUST.in DO IT
106d755931 fix(sqllab): flaky json explore modal due to shallow equality checks for extra data (#29978)
(cherry picked from commit 1ca5947a7d)
2024-08-22 11:01:09 -07:00
Đỗ Trọng Hải
ef31710c2b fix(ci): remove unused "type: ignore" comment to unblock precommit check in CI (#29830)
Signed-off-by: hainenber <dotronghai96@gmail.com>
(cherry picked from commit 71786dba64)
2024-08-21 10:19:17 -07:00
JUST.in DO IT
6a5c293a04 fix(sqllab): Add abort call on query refresh timeout (#29956)
(cherry picked from commit 6e1ef193dd)
2024-08-19 11:02:18 -07:00
Joe Li
86bfb2ade6 fix: try to prevent deadlocks when running upgrade (#29625) 2024-08-19 11:00:43 -07:00
Michael S. Molina
f8ed0cec74 chore: Allow auto pruning of the query table (#29936) 2024-08-19 09:59:09 -07:00
Michael S. Molina
b70c5e1d9d fix: upgrade_catalog_perms and downgrade_catalog_perms implementation (#29860)
(cherry picked from commit e8f5d7680f)
2024-08-16 09:03:25 -07:00
Vitor Avila
f4b201857e fix(embedded): Remove CSRF requirement for dashboard download API (#29953)
(cherry picked from commit 47715c39d0)
2024-08-16 09:02:59 -07:00
JUST.in DO IT
16385322db fix(explore): missing column autocomplete in custom SQL (#29672)
(cherry picked from commit 3c971455e7)
2024-08-14 18:05:26 -07:00
Beto Dealmeida
9677fa97ff fix: handle empty catalog when DB supports them (#29840)
(cherry picked from commit 39209c2b40)
2024-08-13 13:23:17 -07:00
Markus Eriksson
16295b086a fix: Add user filtering to changed_by. Fixes #27986 (#29287)
Co-authored-by: Markus Eriksson <markus.eriksson@sinch.com>
(cherry picked from commit 922128f6e0)
2024-08-12 17:04:53 -07:00
Joe Li
afe580bb8a fix: add imports back to celery file (#29921)
(cherry picked from commit 9f5eb899e8)
2024-08-12 17:04:05 -07:00
Michael S. Molina
d102b45692 fix: Error when downgrading add_catalog_perm_to_tables migration (#29906)
(cherry picked from commit fb7f50868d)
2024-08-12 17:03:49 -07:00
Geido
c0c6486e70 fix(Embedded): Deleting Embedded Dashboards does not commit the transaction (#29894)
(cherry picked from commit b323bf0fb6)
2024-08-12 17:03:32 -07:00
Michael S. Molina
a2d8590f0a chore: Logs the duration of migrations execution (#29893)
(cherry picked from commit 57a4199f52)
2024-08-12 17:03:18 -07:00
Maxime Beauchemin
bfb6ff3394 fix: update celery config imports (#29862)
(cherry picked from commit 9fed576cb4)
2024-08-12 17:02:59 -07:00
Elizabeth Thompson
8ea94916d9 fix: load slack channels earlier (#29846)
(cherry picked from commit 0c3aa7d8fe)
2024-08-12 17:02:38 -07:00
Elizabeth Thompson
642de0ad63 fix: bump packages to unblock ci (#29805)
(cherry picked from commit 2cbd945692)
2024-08-12 17:01:37 -07:00
Beto Dealmeida
6954db023c fix: create permissions on DB import (#29802)
(cherry picked from commit 61c0970968)
2024-08-12 16:55:26 -07:00
Michael S. Molina
eca7c57083 fix: Downgrade of revision 678eefb4ab44 throws error (#29799)
(cherry picked from commit 249f5ec31a)
2024-08-12 16:55:08 -07:00
Beto Dealmeida
4dca9bceed fix: catalog upgrade/downgrade (#29780)
(cherry picked from commit 525e837c5b)
2024-08-12 16:54:51 -07:00
Geido
7219310267 fix(Dashboard): Copying a Dashboard does not commit the transaction (#29776)
(cherry picked from commit 4c52ecc4d8)
2024-08-12 16:54:23 -07:00
Elizabeth Thompson
77ade18107 fix: pass slack recipients correctly (#29721)
(cherry picked from commit 57e8cd2ba2)
2024-08-12 16:53:57 -07:00
Geido
bca2366d5a fix(Database): Refresh catalogs on db update returns database error (#29681)
(cherry picked from commit 134ca38b8d)
2024-08-12 16:52:59 -07:00
Joe Li
de2eedd16f chore: Add the 4.1 release notes (#29262)
(cherry picked from commit 422aa6b657)
2024-08-12 16:50:24 -07:00
Geido
0f1663b2ec refactor(ProgressBar): Upgrade ProgressBar to Antd 5 (#29666)
(cherry picked from commit 3de2b7c989)
2024-08-12 16:47:58 -07:00
Kamil Gabryjelski
604fe27ed1 fix: Use default custom time range time without timezone (#29669)
(cherry picked from commit cd713a239e)
2024-08-12 16:47:33 -07:00
Kamil Gabryjelski
3d7f6dae90 fix: Dashboard editable title weird behavior when adding spaces (#29667)
(cherry picked from commit 453e6deb97)
2024-08-12 16:47:02 -07:00
nsivarajan
a8c6bb5b52 feat(alert/report): Added optional CC and BCC fields for email notifi… (#29088)
Co-authored-by: Sivarajan Narayanan <sivarajannarayanan@Sivarajans-MacBook-Pro.local>
Co-authored-by: Sivarajan Narayanan <narayanan_sivarajan@apple.com>
(cherry picked from commit 27dde2a811)
2024-08-12 16:46:46 -07:00
Jaswanth-Sriram-Veturi
30fbfa1b14 docs: update creating-your-first-dashboard.mdx (#29631)
(cherry picked from commit 2a9a1d3194)
2024-08-12 16:46:14 -07:00
Michael S. Molina
3e297d130e fix: Layout of native filters modal with lengthy columns (#29648)
(cherry picked from commit be833dce4f)
2024-08-12 16:45:54 -07:00
Michael S. Molina
dc754e2d26 fix: Loading of native filter column (#29647)
(cherry picked from commit 92537f1fd5)
2024-08-12 16:45:35 -07:00
Beto Dealmeida
f59fb6f780 chore: add catalog_access to OBJECT_SPEC_PERMISSIONS (#29650)
(cherry picked from commit ae0edbfdce)
2024-08-12 16:45:18 -07:00
Michael S. Molina
fea187a36a fix: Required native filter message wrongfully appearing (#29643)
(cherry picked from commit 9487d6c9d6)
2024-08-12 16:45:04 -07:00
JUST.in DO IT
a9ba3b325f fix(sqllab): prev shema/table options remained on fail (#29638)
(cherry picked from commit 5539f87912)
2024-08-12 16:44:24 -07:00
Michael S. Molina
c8008e6225 refactor: Remove dead code from the Word Cloud plugin (#29594)
(cherry picked from commit 85b66946ed)
2024-08-12 16:43:46 -07:00
Joe Li
4369967732 chore: Adds 4.1.0 RC1 daa to CHANGELOG.md and UPDATING.md (#29637) 2024-07-22 18:07:56 -07:00
1564 changed files with 71734 additions and 83857 deletions

View File

@@ -53,9 +53,6 @@ github:
merge: false
rebase: false
ghp_branch: gh-pages
ghp_path: /
protected_branches:
master:
required_status_checks:
@@ -72,11 +69,9 @@ github:
- cypress-matrix (3, chrome)
- cypress-matrix (4, chrome)
- cypress-matrix (5, chrome)
- dependency-review
- frontend-build
- pre-commit (current)
- pre-commit (next)
- pre-commit (previous)
- pre-commit
- python-lint
- test-mysql
- test-postgres (current)
- test-postgres (next)
@@ -92,10 +87,3 @@ github:
required_approving_review_count: 1
required_signatures: false
gh-pages:
required_pull_request_reviews:
dismiss_stale_reviews: false
require_code_owner_reviews: true
required_approving_review_count: 1
required_signatures: false

View File

@@ -42,8 +42,6 @@ docs/
install/
superset-frontend/cypress-base/
superset-frontend/coverage/
superset-frontend/.temp_cache/
superset/static/assets/
superset-websocket/dist/
venv
.venv

1
.gitattributes vendored
View File

@@ -1,3 +1,2 @@
docker/**/*.sh text eol=lf
*.svg binary
*.ipynb binary

14
.github/CODEOWNERS vendored
View File

@@ -2,7 +2,7 @@
# https://github.com/apache/superset/issues/13351
/superset/migrations/ @mistercrunch @michael-s-molina @betodealmeida @eschutho
/superset/migrations/ @apache/superset-committers
# Notify some committers of changes in the components
@@ -12,21 +12,21 @@
# Notify Helm Chart maintainers about changes in it
/helm/superset/ @craig-rueda @dpgaspar @villebro @nytai @michael-s-molina
/helm/superset/ @craig-rueda @dpgaspar @villebro
# Notify E2E test maintainers of changes
/superset-frontend/cypress-base/ @sadpandajoe @geido @eschutho @rusackas @betodealmeida
/superset-frontend/cypress-base/ @jinghua-qa @geido @eschutho @rusackas @betodealmeida
# Notify PMC members of changes to GitHub Actions
/.github/ @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @kgabryje @dpgaspar
/.github/ @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @john-bodley @kgabryje @dpgaspar
# Notify PMC members of changes to required GitHub Actions
# Notify PMC members of changes to required Github Actions
/.asf.yaml @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @kgabryje @dpgaspar
/.asf.yaml @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @john-bodley @kgabryje @dpgaspar
# Maps are a finicky contribution process we care about
# Maps are a finnicky contribution process we care about
**/*.geojson @villebro @rusackas
/superset-frontend/plugins/legacy-plugin-chart-country-map/ @villebro @rusackas

View File

@@ -15,9 +15,14 @@ body:
id: bug-description
attributes:
label: Bug description
description: A clear description of what the bug is, including reproduction steps and expected behavior.
description: A clear and concise description of what the bug is.
validations:
required: true
- type: textarea
id: repro-steps
attributes:
label: How to reproduce the bug
placeholder: |
The bug is that...
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
@@ -41,8 +46,8 @@ body:
label: Superset version
options:
- master / latest-dev
- "4.1.1"
- "4.0.2"
- "3.1.3"
validations:
required: true
- type: dropdown

View File

@@ -43,14 +43,11 @@ runs:
run: |
if [ "${{ inputs.install-superset }}" = "true" ]; then
sudo apt-get update && sudo apt-get -y install libldap2-dev libsasl2-dev
pip install --upgrade pip setuptools wheel uv
pip install --upgrade pip setuptools wheel
if [ "${{ inputs.requirements-type }}" = "dev" ]; then
uv pip install --system -r requirements/development.txt
pip install -r requirements/development.txt
elif [ "${{ inputs.requirements-type }}" = "base" ]; then
uv pip install --system -r requirements/base.txt
pip install -r requirements/base.txt
fi
uv pip install --system -e .
fi
shell: bash

View File

@@ -1,69 +0,0 @@
name: "Setup Docker Environment"
description: "Reusable steps for setting up QEMU, Docker Buildx, DockerHub login, Supersetbot, and optionally Docker Compose"
inputs:
build:
description: "Used for building?"
required: false
default: "false"
dockerhub-user:
description: "DockerHub username"
required: false
dockerhub-token:
description: "DockerHub token"
required: false
install-docker-compose:
description: "Flag to install Docker Compose"
required: false
default: "true"
login-to-dockerhub:
description: "Whether you want to log into dockerhub"
required: false
default: "true"
outputs: {}
runs:
using: "composite"
steps:
- name: Set up QEMU
if: ${{ inputs.build == 'true' }}
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
if: ${{ inputs.build == 'true' }}
uses: docker/setup-buildx-action@v3
- name: Try to login to DockerHub
if: ${{ inputs.login-to-dockerhub == 'true' }}
continue-on-error: true
uses: docker/login-action@v3
with:
username: ${{ inputs.dockerhub-user }}
password: ${{ inputs.dockerhub-token }}
- name: Install Docker Compose
if: ${{ inputs.install-docker-compose == 'true' }}
shell: bash
run: |
sudo apt-get update
sudo apt-get install -y ca-certificates curl
sudo install -m 0755 -d /etc/apt/keyrings
# Download and save the Docker GPG key in the correct format
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /etc/apt/keyrings/docker.gpg
# Ensure the key file is readable
sudo chmod a+r /etc/apt/keyrings/docker.gpg
# Add the Docker repository using the correct key
echo \
"deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/ubuntu \
$(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \
sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
# Update package lists and install Docker Compose plugin
sudo apt update
sudo apt install -y docker-compose-plugin
- name: Docker Version Info
shell: bash
run: docker info

View File

@@ -8,9 +8,8 @@ updates:
- package-ecosystem: "npm"
ignore:
# not until React >= 18.0.0
- dependency-name: "storybook"
- dependency-name: "@storybook*"
# not until node >= 18.12.0
- dependency-name: "css-minimizer-webpack-plugin"
directory: "/superset-frontend/"
schedule:
interval: "monthly"
@@ -22,7 +21,8 @@ updates:
# - package-ecosystem: "pip"
# NOTE: as dependabot isn't compatible with our usage of `uv pip compile` we're using
# NOTE: as dependabot isn't compatible with our python
# dependency setup (pip-compile-multi), we'll be using
# `supersetbot` instead
- package-ecosystem: "npm"

View File

@@ -165,7 +165,7 @@ cypress-run-all() {
# UNCOMMENT the next few commands to monitor memory usage
# monitor_memory & # Start memory monitoring in the background
# memoryMonitorPid=$!
python ../../scripts/cypress_run.py --parallelism $PARALLELISM --parallelism-id $PARALLEL_ID --group $PARALLEL_ID --retries 5 $USE_DASHBOARD_FLAG
python ../../scripts/cypress_run.py --parallelism $PARALLELISM --parallelism-id $PARALLEL_ID --retries 5 $USE_DASHBOARD_FLAG
# kill $memoryMonitorPid
# After job is done, print out Flask log for debugging

View File

@@ -14,16 +14,10 @@ on:
required: true
description: Max number of PRs to open (0 for no limit)
default: 5
extra-flags:
required: false
default: --only-base
description: Additional flags to pass to the bump-python command
#schedule:
# - cron: '0 0 * * *' # Runs daily at midnight UTC
jobs:
bump-python-package:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
permissions:
actions: write
contents: write
@@ -45,8 +39,8 @@ jobs:
with:
python-version: "3.10"
- name: Install uv
run: pip install uv
- name: Install pip-compile-multi
run: pip install pip-compile-multi
- name: supersetbot bump-python -p "${{ github.event.inputs.package }}"
env:
@@ -65,13 +59,10 @@ jobs:
GROUP_OPT="-g ${{ github.event.inputs.group }}"
fi
EXTRA_FLAGS="${{ github.event.inputs.extra-flags }}"
supersetbot bump-python \
--verbose \
--use-current-repo \
--include-subpackages \
--limit ${{ github.event.inputs.limit }} \
$PACKAGE_OPT \
$GROUP_OPT \
$EXTRA_FLAGS
$GROUP_OPT

View File

@@ -9,7 +9,7 @@ on:
jobs:
cancel-duplicate-runs:
name: Cancel duplicate workflow runs
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
permissions:
actions: write
contents: read

View File

@@ -1,44 +0,0 @@
name: Check python dependencies
on:
push:
branches:
- "master"
- "[0-9].[0-9]*"
pull_request:
types: [synchronize, opened, reopened, ready_for_review]
# cancel previous workflow jobs for PRs
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
jobs:
check-python-deps:
runs-on: ubuntu-22.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
depth: 1
- name: Setup Python
if: steps.check.outputs.python
uses: ./.github/actions/setup-backend/
- name: Run uv
if: steps.check.outputs.python
run: ./scripts/uv-pip-compile.sh
- name: Check for uncommitted changes
run: |
if [[ -n "$(git diff)" ]]; then
echo "ERROR: The pinned dependencies are not up-to-date."
echo "Please run './scripts/uv-pip-compile.sh' and commit the changes."
exit 1
else
echo "Pinned dependencies are up-to-date."
fi

View File

@@ -19,7 +19,7 @@ concurrency:
jobs:
check_db_migration_conflict:
name: Check DB migration conflict
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
permissions:
contents: read
pull-requests: write

View File

@@ -17,7 +17,7 @@ concurrency:
jobs:
analyze:
name: Analyze
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
permissions:
actions: read
contents: read

View File

@@ -5,26 +5,14 @@
# Source repository: https://github.com/actions/dependency-review-action
# Public documentation: https://docs.github.com/en/code-security/supply-chain-security/understanding-your-software-supply-chain/about-dependency-review#dependency-review-enforcement
name: "Dependency Review"
on:
push:
branches:
- "master"
- "[0-9].[0-9]*"
pull_request:
types: [synchronize, opened, reopened, ready_for_review]
# cancel previous workflow jobs for PRs
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
on: [pull_request]
permissions:
contents: read
jobs:
dependency-review:
if: github.event_name == 'pull_request'
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
steps:
- name: "Checkout Repository"
uses: actions/checkout@v4
@@ -44,25 +32,4 @@ jobs:
# license: https://applitools.com/legal/open-source-terms-of-use/
# pkg:npm/node-forge@1.3.1
# selecting BSD-3-Clause licensing terms for node-forge to ensure compatibility with Apache
allow-dependencies-licenses: pkg:npm/store2@2.14.2, pkg:npm/applitools/core, pkg:npm/applitools/core-base, pkg:npm/applitools/css-tree, pkg:npm/applitools/ec-client, pkg:npm/applitools/eg-socks5-proxy-server, pkg:npm/applitools/eyes, pkg:npm/applitools/eyes-cypress, pkg:npm/applitools/nml-client, pkg:npm/applitools/tunnel-client, pkg:npm/applitools/utils, pkg:npm/node-forge@1.3.1, pkg:npm/rgbcolor, pkg:npm/jszip@3.10.1
python-dependency-liccheck:
runs-on: ubuntu-22.04
steps:
- name: "Checkout Repository"
uses: actions/checkout@v4
- name: Setup Python
uses: ./.github/actions/setup-backend/
with:
requirements-type: base
- name: "Set up liccheck"
run: |
uv pip install --system liccheck
- name: "Run liccheck"
run: |
# run the checks
liccheck -R output.txt
# Print the report
cat output.txt
allow-dependencies-licenses: pkg:npm/store2@2.14.2, pkg:npm/applitools/core, pkg:npm/applitools/core-base, pkg:npm/applitools/css-tree, pkg:npm/applitools/ec-client, pkg:npm/applitools/eg-socks5-proxy-server, pkg:npm/applitools/eyes, pkg:npm/applitools/eyes-cypress, pkg:npm/applitools/nml-client, pkg:npm/applitools/tunnel-client, pkg:npm/applitools/utils, pkg:npm/node-forge@1.3.1, pkg:npm/rgbcolor

View File

@@ -15,20 +15,20 @@ concurrency:
jobs:
setup_matrix:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
outputs:
matrix_config: ${{ steps.set_matrix.outputs.matrix_config }}
steps:
- id: set_matrix
run: |
MATRIX_CONFIG=$(if [ "${{ github.event_name }}" == "pull_request" ]; then echo '["dev", "lean"]'; else echo '["dev", "lean", "py310", "websocket", "dockerize", "py311"]'; fi)
MATRIX_CONFIG=$(if [ "${{ github.event_name }}" == "pull_request" ]; then echo '["dev"]'; else echo '["dev", "lean", "py310", "websocket", "dockerize", "py311"]'; fi)
echo "matrix_config=${MATRIX_CONFIG}" >> $GITHUB_OUTPUT
echo $GITHUB_OUTPUT
docker-build:
name: docker-build
needs: setup_matrix
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
strategy:
matrix:
build_preset: ${{fromJson(needs.setup_matrix.outputs.matrix_config)}}
@@ -50,13 +50,21 @@ jobs:
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Setup Docker Environment
- name: Set up QEMU
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
uses: ./.github/actions/setup-docker
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
uses: docker/setup-buildx-action@v3
- name: Try to login to DockerHub
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
continue-on-error: true
uses: docker/login-action@v3
with:
dockerhub-user: ${{ secrets.DOCKERHUB_USER }}
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
build: "true"
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Setup supersetbot
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
@@ -76,30 +84,7 @@ jobs:
fi
supersetbot docker \
--push \
--preset ${{ matrix.build_preset }} \
--context "$EVENT" \
--context-ref "$RELEASE" $FORCE_LATEST \
--extra-flags "--build-arg INCLUDE_CHROMIUM=false" \
$PLATFORM_ARG
- name: Docker pull
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
run: docker pull apache/superset:GHA-${GITHUB_RUN_ID}
- name: Print docker stats
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
run: |
IMAGE_ID=$(docker images --filter "label=sha=${{ github.sha }}" --format "{{.ID}}" | head -n 1)
echo "SHA: ${{ github.sha }}"
echo "IMAGE: $IMAGE_ID"
docker images $IMAGE_ID
docker history $IMAGE_ID
- name: docker-compose sanity check
if: (steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker) && (matrix.build_preset == 'dev' || matrix.build_preset == 'lean')
shell: bash
run: |
export SUPERSET_BUILD_TARGET=${{ matrix.build_preset }}
docker compose build superset-init --build-arg DEV_MODE=false --build-arg INCLUDE_CHROMIUM=false
docker compose up superset-init --exit-code-from superset-init

View File

@@ -8,7 +8,7 @@ on:
jobs:
config:
runs-on: ubuntu-24.04
runs-on: "ubuntu-22.04"
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
@@ -23,7 +23,7 @@ jobs:
build:
needs: config
if: needs.config.outputs.has-secrets
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
defaults:
run:
working-directory: superset-embedded-sdk
@@ -31,7 +31,7 @@ jobs:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: "20"
node-version: "18"
registry-url: 'https://registry.npmjs.org'
- run: npm ci
- run: npm run ci:release

View File

@@ -13,7 +13,7 @@ concurrency:
jobs:
embedded-sdk-test:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
defaults:
run:
working-directory: superset-embedded-sdk
@@ -21,7 +21,7 @@ jobs:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: "20"
node-version: "18"
registry-url: 'https://registry.npmjs.org'
- run: npm ci
- run: npm test

View File

@@ -6,7 +6,7 @@ on:
jobs:
config:
runs-on: ubuntu-24.04
runs-on: "ubuntu-22.04"
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
@@ -22,7 +22,7 @@ jobs:
needs: config
if: needs.config.outputs.has-secrets
name: Cleanup ephemeral envs
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
permissions:
pull-requests: write
steps:

View File

@@ -1,35 +1,37 @@
name: Ephemeral env workflow
# Example manual trigger: gh workflow run ephemeral-env.yml --ref fix_ephemerals --field comment_body="/testenv up" --field issue_number=666
on:
issue_comment:
types: [created]
workflow_dispatch:
inputs:
comment_body:
description: 'Comment body to simulate /testenv command'
required: true
default: '/testenv up'
issue_number:
description: 'Issue or PR number'
required: true
jobs:
config:
runs-on: "ubuntu-22.04"
if: github.event.issue.pull_request
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
- name: "Check for secrets"
id: check
shell: bash
run: |
if [ -n "${{ (secrets.AWS_ACCESS_KEY_ID != '' && secrets.AWS_SECRET_ACCESS_KEY != '') || '' }}" ]; then
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
fi
ephemeral-env-comment:
concurrency:
group: ${{ github.workflow }}-${{ github.event.inputs.issue_number || github.event.issue.number || github.run_id }}-comment
group: ${{ github.workflow }}-${{ github.event.issue.number || github.run_id }}-comment
cancel-in-progress: true
needs: config
if: needs.config.outputs.has-secrets
name: Evaluate ephemeral env comment trigger (/testenv)
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
permissions:
pull-requests: write
outputs:
slash-command: ${{ steps.eval-body.outputs.result }}
feature-flags: ${{ steps.eval-feature-flags.outputs.result }}
env:
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
steps:
- name: Debug
@@ -38,26 +40,22 @@ jobs:
- name: Eval comment body for /testenv slash command
uses: actions/github-script@v7
env:
COMMENT_BODY: ${{ github.event.inputs.comment_body || github.event.comment.body }}
id: eval-body
with:
result-encoding: string
script: |
const pattern = /^\/testenv (up|down)/;
const result = pattern.exec(process.env.COMMENT_BODY || '');
return result === null ? 'noop' : result[1];
const pattern = /^\/testenv (up|down)/
const result = pattern.exec(context.payload.comment.body)
return result === null ? 'noop' : result[1]
- name: Looking for feature flags
- name: Eval comment body for feature flags
uses: actions/github-script@v7
env:
COMMENT_BODY: ${{ github.event.inputs.comment_body || github.event.comment.body }}
id: eval-feature-flags
with:
script: |
const pattern = /FEATURE_(\w+)=(\w+)/g;
let results = [];
[...process.env.COMMENT_BODY.matchAll(pattern)].forEach(match => {
[...context.payload.comment.body.matchAll(pattern)].forEach(match => {
const config = {
name: `SUPERSET_FEATURE_${match[1]}`,
value: match[2],
@@ -69,53 +67,28 @@ jobs:
- name: Limit to committers
if: >
steps.eval-body.outputs.result != 'noop' &&
github.event_name == 'issue_comment' &&
github.event.comment.author_association != 'MEMBER' &&
github.event.comment.author_association != 'OWNER'
uses: actions/github-script@v7
with:
github-token: ${{ github.token }}
github-token: ${{github.token}}
script: |
const errMsg = '@${{ github.event.comment.user.login }} Ephemeral environment creation is currently limited to committers.';
const errMsg = '@${{ github.event.comment.user.login }} Ephemeral environment creation is currently limited to committers.'
github.rest.issues.createComment({
issue_number: ${{ github.event.issue.number }},
owner: context.repo.owner,
repo: context.repo.repo,
body: errMsg
});
core.setFailed(errMsg);
- name: Reply with confirmation comment
uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const issueNumber = ${{ github.event.inputs.issue_number || github.event.issue.number }};
const user = '${{ github.event.comment.user.login || github.actor }}';
const action = '${{ steps.eval-body.outputs.result }}';
const runId = context.runId;
const workflowUrl = `${context.serverUrl}/${context.repo.owner}/${context.repo.repo}/actions/runs/${runId}`;
const body = `@${user} Processing your ephemeral environment request [here](${workflowUrl}).`;
if (action !== 'noop') {
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: issueNumber,
body,
});
}
else {
core.setFailed('No ephemeral environment action detected.');
}
})
core.setFailed(errMsg)
ephemeral-docker-build:
concurrency:
group: ${{ github.workflow }}-${{ github.event.inputs.issue_number || github.event.issue.number || github.run_id }}-build
group: ${{ github.workflow }}-${{ github.event.issue.number || github.run_id }}-build
cancel-in-progress: true
needs: ephemeral-env-comment
if: needs.ephemeral-env-comment.outputs.slash-command == 'up'
name: ephemeral-docker-build
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
steps:
- name: Get Info from comment
uses: actions/github-script@v7
@@ -125,9 +98,9 @@ jobs:
const request = {
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: ${{ github.event.inputs.issue_number || github.event.issue.number }},
};
core.info(`Getting PR #${request.pull_number} from ${request.owner}/${request.repo}`);
pull_number: ${{ github.event.issue.number }},
}
core.info(`Getting PR #${request.pull_number} from ${request.owner}/${request.repo}`)
const pr = await github.rest.pulls.get(request);
return pr.data;
@@ -142,28 +115,18 @@ jobs:
ref: ${{ steps.get-sha.outputs.sha }}
persist-credentials: false
- name: Setup Docker Environment
uses: ./.github/actions/setup-docker
with:
dockerhub-user: ${{ secrets.DOCKERHUB_USER }}
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
build: "true"
install-docker-compose: "false"
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Setup supersetbot
uses: ./.github/actions/setup-supersetbot/
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build ephemeral env image
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
supersetbot docker \
--push \
--load \
--preset ci \
--platform linux/amd64 \
--context-ref "$RELEASE" \
--extra-flags "--build-arg INCLUDE_CHROMIUM=false"
./scripts/build_docker.py \
"ci" \
"pull_request" \
--build_context_ref ${{ github.event.issue.number }}
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
@@ -183,14 +146,14 @@ jobs:
ECR_REPOSITORY: superset-ci
IMAGE_TAG: apache/superset:${{ steps.get-sha.outputs.sha }}-ci
run: |
docker tag $IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-ci
docker tag $IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:pr-${{ github.event.issue.number }}-ci
docker push -a $ECR_REGISTRY/$ECR_REPOSITORY
ephemeral-env-up:
needs: [ephemeral-env-comment, ephemeral-docker-build]
if: needs.ephemeral-env-comment.outputs.slash-command == 'up'
name: Spin up an ephemeral environment
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
permissions:
contents: read
pull-requests: write
@@ -218,22 +181,22 @@ jobs:
aws ecr describe-images \
--registry-id $(echo "${{ steps.login-ecr.outputs.registry }}" | grep -Eo "^[0-9]+") \
--repository-name superset-ci \
--image-ids imageTag=pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-ci
--image-ids imageTag=pr-${{ github.event.issue.number }}-ci
- name: Fail on missing container image
if: steps.check-image.outcome == 'failure'
uses: actions/github-script@v7
with:
github-token: ${{ github.token }}
github-token: ${{github.token}}
script: |
const errMsg = '@${{ github.event.comment.user.login }} Container image not yet published for this PR. Please try again when build is complete.';
const errMsg = '@${{ github.event.comment.user.login }} Container image not yet published for this PR. Please try again when build is complete.'
github.rest.issues.createComment({
issue_number: ${{ github.event.inputs.issue_number || github.event.issue.number }},
issue_number: ${{ github.event.issue.number }},
owner: context.repo.owner,
repo: context.repo.repo,
body: errMsg
});
core.setFailed(errMsg);
})
core.setFailed(errMsg)
- name: Fill in the new image ID in the Amazon ECS task definition
id: task-def
@@ -241,7 +204,7 @@ jobs:
with:
task-definition: .github/workflows/ecs-task-definition.json
container-name: superset-ci
image: ${{ steps.login-ecr.outputs.registry }}/superset-ci:pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-ci
image: ${{ steps.login-ecr.outputs.registry }}/superset-ci:pr-${{ github.event.issue.number }}-ci
- name: Update env vars in the Amazon ECS task definition
run: |
@@ -250,29 +213,30 @@ jobs:
- name: Describe ECS service
id: describe-services
run: |
echo "active=$(aws ecs describe-services --cluster superset-ci --services pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-service | jq '.services[] | select(.status == "ACTIVE") | any')" >> $GITHUB_OUTPUT
echo "active=$(aws ecs describe-services --cluster superset-ci --services pr-${{ github.event.issue.number }}-service | jq '.services[] | select(.status == "ACTIVE") | any')" >> $GITHUB_OUTPUT
- name: Create ECS service
id: create-service
if: steps.describe-services.outputs.active != 'true'
id: create-service
env:
ECR_SUBNETS: subnet-0e15a5034b4121710,subnet-0e8efef4a72224974
ECR_SECURITY_GROUP: sg-092ff3a6ae0574d91
run: |
aws ecs create-service \
--cluster superset-ci \
--service-name pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-service \
--service-name pr-${{ github.event.issue.number }}-service \
--task-definition superset-ci \
--launch-type FARGATE \
--desired-count 1 \
--platform-version LATEST \
--network-configuration "awsvpcConfiguration={subnets=[$ECR_SUBNETS],securityGroups=[$ECR_SECURITY_GROUP],assignPublicIp=ENABLED}" \
--tags key=pr,value=${{ github.event.inputs.issue_number || github.event.issue.number }} key=github_user,value=${{ github.actor }}
--tags key=pr,value=${{ github.event.issue.number }} key=github_user,value=${{ github.actor }}
- name: Deploy Amazon ECS task definition
id: deploy-task
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.task-def.outputs.task-definition }}
service: pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-service
service: pr-${{ github.event.issue.number }}-service
cluster: superset-ci
wait-for-service-stability: true
wait-for-minutes: 10
@@ -280,15 +244,18 @@ jobs:
- name: List tasks
id: list-tasks
run: |
echo "task=$(aws ecs list-tasks --cluster superset-ci --service-name pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-service | jq '.taskArns | first')" >> $GITHUB_OUTPUT
echo "task=$(aws ecs list-tasks --cluster superset-ci --service-name pr-${{ github.event.issue.number }}-service | jq '.taskArns | first')" >> $GITHUB_OUTPUT
- name: Get network interface
id: get-eni
run: |
echo "eni=$(aws ecs describe-tasks --cluster superset-ci --tasks ${{ steps.list-tasks.outputs.task }} | jq '.tasks | .[0] | .attachments | .[0] | .details | map(select(.name==\"networkInterfaceId\")) | .[0] | .value')" >> $GITHUB_OUTPUT
echo "eni=$(aws ecs describe-tasks --cluster superset-ci --tasks ${{ steps.list-tasks.outputs.task }} | jq '.tasks | .[0] | .attachments | .[0] | .details | map(select(.name=="networkInterfaceId")) | .[0] | .value')" >> $GITHUB_OUTPUT
- name: Get public IP
id: get-ip
run: |
echo "ip=$(aws ec2 describe-network-interfaces --network-interface-ids ${{ steps.get-eni.outputs.eni }} | jq -r '.NetworkInterfaces | first | .Association.PublicIp')" >> $GITHUB_OUTPUT
- name: Comment (success)
if: ${{ success() }}
uses: actions/github-script@v7
@@ -296,11 +263,12 @@ jobs:
github-token: ${{github.token}}
script: |
github.rest.issues.createComment({
issue_number: ${{ github.event.inputs.issue_number || github.event.issue.number }},
issue_number: ${{ github.event.issue.number }},
owner: context.repo.owner,
repo: context.repo.repo,
body: '@${{ github.event.inputs.user_login || github.event.comment.user.login }} Ephemeral environment spinning up at http://${{ steps.get-ip.outputs.ip }}:8080. Credentials are `admin`/`admin`. Please allow several minutes for bootstrapping and startup.'
body: '@${{ github.event.comment.user.login }} Ephemeral environment spinning up at http://${{ steps.get-ip.outputs.ip }}:8080. Credentials are `admin`/`admin`. Please allow several minutes for bootstrapping and startup.'
})
- name: Comment (failure)
if: ${{ failure() }}
uses: actions/github-script@v7
@@ -308,8 +276,8 @@ jobs:
github-token: ${{github.token}}
script: |
github.rest.issues.createComment({
issue_number: ${{ github.event.inputs.issue_number || github.event.issue.number }},
issue_number: ${{ github.event.issue.number }},
owner: context.repo.owner,
repo: context.repo.repo,
body: '@${{ github.event.inputs.user_login || github.event.comment.user.login }} Ephemeral environment creation failed. Please check the Actions logs for details.'
body: '@${{ github.event.comment.user.login }} Ephemeral environment creation failed. Please check the Actions logs for details.'
})

View File

@@ -8,7 +8,7 @@ on:
jobs:
config:
runs-on: ubuntu-24.04
runs-on: "ubuntu-22.04"
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
@@ -24,7 +24,7 @@ jobs:
needs: config
if: needs.config.outputs.has-secrets
name: Generate Report
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4

View File

@@ -11,7 +11,7 @@ on:
jobs:
validate-all-ghas:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
steps:
- name: Checkout Repository
uses: actions/checkout@v4
@@ -19,7 +19,7 @@ jobs:
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: '20'
node-version: '18'
- name: Install Dependencies
run: npm install -g @action-validator/core @action-validator/cli --save-dev

View File

@@ -9,7 +9,7 @@ on:
jobs:
superbot-orglabel:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
permissions:
contents: read
pull-requests: write

View File

@@ -7,7 +7,7 @@ jobs:
permissions:
contents: read
pull-requests: write
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
steps:
- uses: actions/labeler@v5
with:

View File

@@ -6,7 +6,7 @@ on:
jobs:
latest-release:
name: Add/update tag to new release
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
permissions:
contents: write

View File

@@ -12,7 +12,7 @@ concurrency:
jobs:
license_check:
name: License Check
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4

View File

@@ -11,7 +11,7 @@ concurrency:
jobs:
check-hold-label:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
steps:
- name: Check for 'hold' label
uses: actions/github-script@v7

View File

@@ -10,7 +10,7 @@ on:
jobs:
lint-check:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
permissions:
contents: read
pull-requests: write

View File

@@ -15,10 +15,7 @@ concurrency:
jobs:
pre-commit:
runs-on: ubuntu-24.04
strategy:
matrix:
python-version: ["current", "next", "previous"]
runs-on: ubuntu-22.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
@@ -27,8 +24,6 @@ jobs:
submodules: recursive
- name: Setup Python
uses: ./.github/actions/setup-backend/
with:
python-version: ${{ matrix.python-version }}
- name: Enable brew and helm-docs
# Add brew to the path - see https://github.com/actions/runner-images/issues/6283
run: |
@@ -40,11 +35,8 @@ jobs:
brew install norwoodj/tap/helm-docs
- name: pre-commit
run: |
set +e # Don't exit immediately on failure
pre-commit run --all-files
if [ $? -ne 0 ] || ! git diff --quiet --exit-code; then
echo "❌ Pre-commit check failed."
echo "🚒 To prevent/address this CI issue, please install/use pre-commit locally."
echo "📖 More details here: https://superset.apache.org/docs/contributing/development#git-hooks"
if ! pre-commit run --all-files; then
git status
git diff
exit 1
fi

View File

@@ -21,7 +21,7 @@ jobs:
prefer_typescript:
if: github.ref == 'ref/heads/master' && github.event_name == 'pull_request'
name: Prefer TypeScript
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
permissions:
contents: read
pull-requests: write

View File

@@ -8,7 +8,7 @@ on:
jobs:
config:
runs-on: ubuntu-24.04
runs-on: "ubuntu-22.04"
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
@@ -25,11 +25,11 @@ jobs:
if: needs.config.outputs.has-secrets
name: Bump version and publish package(s)
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
strategy:
matrix:
node-version: [20]
node-version: [18]
steps:
- uses: actions/checkout@v4

View File

@@ -6,7 +6,7 @@ on:
jobs:
config:
runs-on: ubuntu-24.04
runs-on: "ubuntu-22.04"
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
@@ -21,12 +21,12 @@ jobs:
cypress-applitools:
needs: config
if: needs.config.outputs.has-secrets
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
strategy:
fail-fast: false
matrix:
browser: ["chrome"]
node: [20]
node: [18]
env:
SUPERSET_ENV: development
SUPERSET_CONFIG: tests.integration_tests.superset_test_config

View File

@@ -12,7 +12,7 @@ env:
jobs:
config:
runs-on: ubuntu-24.04
runs-on: "ubuntu-22.04"
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
@@ -27,10 +27,10 @@ jobs:
cron:
needs: config
if: needs.config.outputs.has-secrets
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
strategy:
matrix:
node: [20]
node: [18]
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4

View File

@@ -15,7 +15,7 @@ concurrency:
jobs:
test-load-examples:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
env:
PYTHONPATH: ${{ github.workspace }}
SUPERSET_CONFIG: tests.integration_tests.superset_test_config

View File

@@ -12,7 +12,7 @@ on:
jobs:
config:
runs-on: ubuntu-24.04
runs-on: "ubuntu-22.04"
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
@@ -28,7 +28,7 @@ jobs:
needs: config
if: needs.config.outputs.has-secrets
name: Build & Deploy
runs-on: ubuntu-24.04
runs-on: "ubuntu-22.04"
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4

View File

@@ -4,7 +4,6 @@ on:
pull_request:
paths:
- "docs/**"
- ".github/workflows/superset-docs-verify.yml"
types: [synchronize, opened, reopened, ready_for_review]
# cancel previous workflow jobs for PRs
@@ -14,44 +13,18 @@ concurrency:
jobs:
linkinator:
# See docs here: https://github.com/marketplace/actions/linkinator
name: Link Checking
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
# Do not bump this linkinator-action version without opening
# an ASF Infra ticket to allow the new verison first!
- uses: JustinBeckwith/linkinator-action@v1.11.0
continue-on-error: true # This will make the job advisory (non-blocking, no red X)
- uses: JustinBeckwith/linkinator-action@v1.10.4
with:
paths: "**/*.md, **/*.mdx"
linksToSkip: >-
^https://github.com/apache/(superset|incubator-superset)/(pull|issue)/\d+,
http://localhost:8088/,
docker/.env-non-dev,
http://127.0.0.1:3000/,
http://localhost:9001/,
https://charts.bitnami.com/bitnami,
https://www.li.me/,
https://www.fanatics.com/,
https://tails.com/gb/,
https://www.techaudit.info/,
https://avetilearning.com/,
https://www.udemy.com/,
https://trustmedis.com/,
http://theiconic.com.au/,
https://dev.mysql.com/doc/refman/5.7/en/innodb-limits.html,
^https://img\.shields\.io/.*,
https://vkusvill.ru/
https://www.linkedin.com/in/mark-thomas-b16751158/
https://theiconic.com.au/
https://wattbewerb.de/
https://timbr.ai/
https://opensource.org/license/apache-2-0
https://www.plaidcloud.com/
linksToSkip: '^https://github.com/apache/(superset|incubator-superset)/(pull|issue)/\d+, http://localhost:8088/, docker/.env-non-dev, http://127.0.0.1:3000/, http://localhost:9001/, https://charts.bitnami.com/bitnami, https://www.li.me/, https://www.fanatics.com/, https://tails.com/gb/, https://www.techaudit.info/, https://avetilearning.com/, https://www.udemy.com/, https://trustmedis.com/, http://theiconic.com.au/, https://dev.mysql.com/doc/refman/5.7/en/innodb-limits.html, https://img.shields.io/librariesio/release/npm/%40superset-ui%2Fembedded-sdk?style=flat, https://img.shields.io/librariesio/release/npm/%40superset-ui%2Fplugin-chart-pivot-table?style=flat, https://vkusvill.ru/'
# verbosity: 'ERROR'
build-deploy:
name: Build & Deploy
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
defaults:
run:
working-directory: docs

View File

@@ -28,7 +28,6 @@ concurrency:
jobs:
cypress-matrix:
# Somehow one test flakes on 24.04 for unknown reasons, this is the only GHA left on 22.04
runs-on: ubuntu-22.04
permissions:
contents: read
@@ -49,8 +48,7 @@ jobs:
PYTHONPATH: ${{ github.workspace }}
REDIS_PORT: 16379
GITHUB_TOKEN: ${{ github.token }}
# use the dashboard feature when running manually OR merging to master
USE_DASHBOARD: ${{ github.event.inputs.use_dashboard == 'true'|| (github.ref == 'refs/heads/master' && 'true') || 'false' }}
USE_DASHBOARD: ${{ github.event.inputs.use_dashboard || (github.ref == 'refs/heads/master' && 'true') || 'false' }}
services:
postgres:
image: postgres:15-alpine
@@ -109,7 +107,7 @@ jobs:
if: steps.check.outputs.python || steps.check.outputs.frontend
uses: actions/setup-node@v4
with:
node-version: "20"
node-version: "18"
- name: Install npm dependencies
if: steps.check.outputs.python || steps.check.outputs.frontend
uses: ./.github/actions/cached-dependencies
@@ -133,7 +131,6 @@ jobs:
PARALLEL_ID: ${{ matrix.parallel_id }}
PARALLELISM: 6
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
NODE_OPTIONS: "--max-old-space-size=4096"
with:
run: cypress-run-all ${{ env.USE_DASHBOARD }}
- name: Upload Artifacts

View File

@@ -15,7 +15,7 @@ concurrency:
jobs:
frontend-build:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
@@ -33,7 +33,7 @@ jobs:
if: steps.check.outputs.frontend
uses: actions/setup-node@v4
with:
node-version: "20"
node-version: "18"
- name: Install dependencies
if: steps.check.outputs.frontend
uses: ./.github/actions/cached-dependencies
@@ -49,6 +49,11 @@ jobs:
working-directory: ./superset-frontend
run: |
npm run type
- name: prettier
if: steps.check.outputs.frontend
working-directory: ./superset-frontend
run: |
npm run prettier-check
- name: Build plugins packages
if: steps.check.outputs.frontend
working-directory: ./superset-frontend
@@ -73,7 +78,7 @@ jobs:
working-directory: ./superset-frontend/packages/generator-superset
run: npm run test
- name: Upload code coverage
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@v4
with:
flags: javascript
token: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -1,4 +1,4 @@
name: "Helm: lint and test charts"
name: Lint and Test Charts
on:
pull_request:
@@ -13,7 +13,7 @@ concurrency:
jobs:
lint-test:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4

View File

@@ -1,8 +1,4 @@
# This workflow automates the release process for Helm charts.
# The workflow creates a new branch for the release and opens a pull request against the 'gh-pages' branch,
# allowing the changes to be reviewed and merged manually.
name: "Helm: release charts"
name: Release Charts
on:
push:
@@ -11,28 +7,18 @@ on:
- "[0-9].[0-9]*"
paths:
- "helm/**"
workflow_dispatch:
inputs:
ref:
description: "The branch, tag, or commit SHA to check out"
required: false
default: "master"
jobs:
release:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
permissions:
contents: write
pull-requests: write
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- name: Checkout code
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
ref: ${{ inputs.ref || github.ref_name }}
persist-credentials: true
persist-credentials: false
submodules: recursive
fetch-depth: 0
@@ -49,77 +35,11 @@ jobs:
- name: Add bitnami repo dependency
run: helm repo add bitnami https://charts.bitnami.com/bitnami
- name: Fetch/list all tags
run: |
# Debugging tags
git fetch --tags --force
git tag -d superset-helm-chart-0.13.4 || true
echo "DEBUG TAGS"
git show-ref --tags
- name: Create unique pages branch name
id: vars
run: echo "branch_name=helm-publish-${GITHUB_SHA:0:7}" >> $GITHUB_ENV
- name: Force recreate branch from gh-pages
run: |
# Ensure a clean working directory
git reset --hard
git clean -fdx
git checkout -b local_gha_temp
git submodule update
# Fetch the latest gh-pages branch
git fetch origin gh-pages
# Check out and reset the target branch based on gh-pages
git checkout -B ${{ env.branch_name }} origin/gh-pages
# Remove submodules from the branch
git submodule deinit -f --all
# Force push to the remote branch
git push origin ${{ env.branch_name }} --force
# Return to the original branch
git checkout local_gha_temp
- name: Fetch/list all tags
run: |
git submodule update
cat .github/actions/chart-releaser-action/action.yml
- name: Run chart-releaser
uses: ./.github/actions/chart-releaser-action
uses: helm/chart-releaser-action@v1.6.0
with:
version: v1.6.0
charts_dir: helm
mark_as_latest: false
pages_branch: ${{ env.branch_name }}
env:
CR_TOKEN: "${{ github.token }}"
CR_RELEASE_NAME_TEMPLATE: "superset-helm-chart-{{ .Version }}"
- name: Open Pull Request
uses: actions/github-script@v7
with:
script: |
const branchName = '${{ env.branch_name }}';
const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');
if (!branchName) {
throw new Error("Branch name is not defined.");
}
const pr = await github.rest.pulls.create({
owner,
repo,
title: `Helm chart release for ${branchName}`,
head: branchName,
base: "gh-pages", // Adjust if the target branch is different
body: `This PR releases Helm charts to the gh-pages branch.`,
});
core.info(`Pull request created: ${pr.data.html_url}`);
env:
BRANCH_NAME: ${{ env.branch_name }}

View File

@@ -15,7 +15,7 @@ concurrency:
jobs:
test-mysql:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
env:
PYTHONPATH: ${{ github.workspace }}
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
@@ -68,13 +68,13 @@ jobs:
run: |
./scripts/python_tests.sh
- name: Upload code coverage
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@v4
with:
flags: python,mysql
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
test-postgres:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
strategy:
matrix:
python-version: ["current", "next", "previous"]
@@ -129,14 +129,14 @@ jobs:
run: |
./scripts/python_tests.sh
- name: Upload code coverage
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@v4
with:
flags: python,postgres
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
test-sqlite:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
env:
PYTHONPATH: ${{ github.workspace }}
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
@@ -181,7 +181,7 @@ jobs:
run: |
./scripts/python_tests.sh
- name: Upload code coverage
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@v4
with:
flags: python,sqlite
token: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -0,0 +1,53 @@
# Python Misc unit tests
name: Python Misc
on:
push:
branches:
- "master"
- "[0-9].[0-9]*"
pull_request:
types: [synchronize, opened, reopened, ready_for_review]
# cancel previous workflow jobs for PRs
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
jobs:
python-lint:
runs-on: ubuntu-22.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Check for file changes
id: check
uses: ./.github/actions/change-detector/
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Setup Python
uses: ./.github/actions/setup-backend/
if: steps.check.outputs.python
babel-extract:
runs-on: ubuntu-22.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Check for file changes
id: check
uses: ./.github/actions/change-detector/
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Setup Python
if: steps.check.outputs.python
uses: ./.github/actions/setup-backend/
- name: Test babel extraction
if: steps.check.outputs.python
run: scripts/translations/babel_update.sh

View File

@@ -16,7 +16,7 @@ concurrency:
jobs:
test-postgres-presto:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
env:
PYTHONPATH: ${{ github.workspace }}
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
@@ -77,14 +77,14 @@ jobs:
run: |
./scripts/python_tests.sh -m 'chart_data_flow or sql_json_flow'
- name: Upload code coverage
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@v4
with:
flags: python,presto
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
test-postgres-hive:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
env:
PYTHONPATH: ${{ github.workspace }}
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
@@ -142,10 +142,9 @@ jobs:
- name: Python unit tests (PostgreSQL)
if: steps.check.outputs.python
run: |
pip install -e .[hive]
./scripts/python_tests.sh -m 'chart_data_flow or sql_json_flow'
- name: Upload code coverage
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@v4
with:
flags: python,hive
token: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -16,7 +16,7 @@ concurrency:
jobs:
unit-tests:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
strategy:
matrix:
python-version: ["current", "next"]
@@ -46,7 +46,7 @@ jobs:
run: |
pytest --durations-min=0.5 --cov-report= --cov=superset ./tests/common ./tests/unit_tests --cache-clear
- name: Upload code coverage
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@v4
with:
flags: python,unit
token: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -15,7 +15,7 @@ concurrency:
jobs:
frontend-check-translations:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
@@ -46,7 +46,7 @@ jobs:
npm run build-translation
babel-extract:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4

View File

@@ -18,7 +18,7 @@ concurrency:
jobs:
app-checks:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4

View File

@@ -15,7 +15,7 @@ on:
jobs:
supersetbot:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
if: >
github.event_name == 'workflow_dispatch' ||
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@supersetbot'))

View File

@@ -23,7 +23,7 @@ on:
- 'false'
jobs:
config:
runs-on: ubuntu-24.04
runs-on: "ubuntu-22.04"
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
@@ -39,34 +39,35 @@ jobs:
needs: config
if: needs.config.outputs.has-secrets
name: docker-release
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
strategy:
matrix:
build_preset: ["dev", "lean", "py310", "websocket", "dockerize", "py311"]
fail-fast: false
steps:
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
tags: true
fetch-depth: 0
- name: Setup Docker Environment
uses: ./.github/actions/setup-docker
with:
dockerhub-user: ${{ secrets.DOCKERHUB_USER }}
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
install-docker-compose: "false"
build: "true"
- name: Use Node.js 20
uses: actions/setup-node@v4
with:
node-version: 20
- name: Setup supersetbot
uses: ./.github/actions/setup-supersetbot/
- name: Try to login to DockerHub
continue-on-error: true
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Execute custom Node.js script
env:
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
@@ -87,45 +88,22 @@ jobs:
fi
supersetbot docker \
--push \
--preset ${{ matrix.build_preset }} \
--context "$EVENT" \
--context-ref "$RELEASE" $FORCE_LATEST \
--platform "linux/arm64" \
--platform "linux/amd64"
# Returning to master to support closing setup-supersetbot
git checkout master
update-prs-with-release-info:
needs: config
if: needs.config.outputs.has-secrets
runs-on: ubuntu-24.04
permissions:
contents: read
pull-requests: write
steps:
# Going back on original branch to allow "post" GHA operations
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Use Node.js 20
uses: actions/setup-node@v4
with:
node-version: 20
- name: Setup supersetbot
uses: ./.github/actions/setup-supersetbot/
persist-credentials: false
- name: Label the PRs with the right release-related labels
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
export GITHUB_ACTOR=""
git fetch --all --tags
git checkout master
RELEASE="${{ github.event.release.tag_name }}"
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
# in the case of a manually-triggered run, read release from input

View File

@@ -8,7 +8,7 @@ on:
jobs:
config:
runs-on: ubuntu-24.04
runs-on: "ubuntu-22.04"
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
@@ -23,7 +23,7 @@ jobs:
process-and-upload:
needs: config
if: needs.config.outputs.has-secrets
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
name: Generate Reports
steps:
- name: Checkout Repository
@@ -32,7 +32,7 @@ jobs:
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: '20'
node-version: '18'
- name: Install Dependencies
run: npm install

View File

@@ -6,7 +6,7 @@ on:
jobs:
welcome:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
permissions:
pull-requests: write

1
.gitignore vendored
View File

@@ -121,4 +121,3 @@ docker/*local*
# Jest test report
test-report.html
superset/static/stats/statistics.html

View File

@@ -16,11 +16,11 @@
#
repos:
- repo: https://github.com/MarcoGorelli/auto-walrus
rev: 0.3.4
rev: v0.2.2
hooks:
- id: auto-walrus
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.13.0
rev: v1.3.0
hooks:
- id: mypy
args: [--check-untyped-defs]
@@ -38,26 +38,26 @@ repos:
types-paramiko,
types-Markdown,
]
- repo: https://github.com/peterdemin/pip-compile-multi
rev: v2.6.2
hooks:
- id: pip-compile-multi-verify
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
rev: v4.4.0
hooks:
- id: check-docstring-first
- id: check-added-large-files
exclude: ^.*\.(geojson)$|^docs/static/img/screenshots/.*|^superset-frontend/CHANGELOG\.md$
exclude: ^.*\.(geojson)$|^docs/static/img/screenshots/.*
- id: check-yaml
exclude: ^helm/superset/templates/
- id: debug-statements
- id: end-of-file-fixer
exclude: .*/lerna\.json$
- id: trailing-whitespace
exclude: ^.*\.(snap)
args: ["--markdown-linebreak-ext=md"]
- repo: https://github.com/pre-commit/mirrors-prettier
rev: v4.0.0-alpha.8 # Use the sha or tag you want to point at
rev: v3.1.0 # Use the sha or tag you want to point at
hooks:
- id: prettier
additional_dependencies:
- prettier@3.3.3
args: ["--ignore-path=./superset-frontend/.prettierignore"]
files: "superset-frontend"
# blacklist unsafe functions like make_url (see #19526)
@@ -67,13 +67,27 @@ repos:
- id: blacklist
args: ["--blacklisted-names=make_url", "--ignore=tests/"]
- repo: https://github.com/norwoodj/helm-docs
rev: v1.14.2
rev: v1.11.0
hooks:
- id: helm-docs
files: helm
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.8.0
rev: v0.4.0
hooks:
- id: ruff
args: [ --fix ]
- id: ruff-format
- repo: local
hooks:
- id: pylint
name: pylint
entry: pylint
language: system
types: [python]
exclude: ^(tests/|superset/migrations/|scripts/|RELEASING/|docker/)
args:
[
"-rn", # Only display messages
"-sn", # Don't display the score
"--rcfile=.pylintrc",
]

380
.pylintrc Normal file
View File

@@ -0,0 +1,380 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
[MASTER]
# Specify a configuration file.
#rcfile=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=CVS,migrations
# Add files or directories matching the regex patterns to the blacklist. The
# regex matches against base names, not paths.
ignore-patterns=
# Pickle collected data for later comparisons.
persistent=yes
# List of plugins (as comma separated values of python modules names) to load,
# usually to register additional checkers.
load-plugins=superset.extensions.pylint
# Use multiple processes to speed up Pylint.
jobs=2
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
unsafe-load-any-extension=no
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code
extension-pkg-whitelist=pyarrow
[MESSAGES CONTROL]
# Only show warnings with the listed confidence levels. Leave empty to show
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
confidence=
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once). See also the "--disable" option for examples.
enable=
useless-suppression,
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once).You can also use "--disable=all" to
# disable everything first and then reenable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use"--disable=all --enable=classes
# --disable=W"
disable=
cyclic-import, # re-enable once this no longer raises false positives
missing-docstring,
duplicate-code,
line-too-long,
unspecified-encoding,
too-many-instance-attributes # re-enable once this no longer raises false positives
[REPORTS]
# Set the output format. Available formats are text, parseable, colorized, msvs
# (visual studio) and html. You can also give a reporter class, eg
# mypackage.mymodule.MyReporterClass.
output-format=text
# Tells whether to display a full report or only the messages
reports=yes
# Python expression which should return a note less than 10 (10 is the highest
# note). You have access to the variables errors warning, statement which
# respectively contain the number of errors / warnings messages and the total
# number of statements analyzed. This is used by the global evaluation report
# (RP0004).
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details
#msg-template=
[BASIC]
# Good variable names which should always be accepted, separated by a comma
good-names=_,df,ex,f,i,id,j,k,l,o,pk,Run,ts,v,x,y
# Bad variable names which should always be refused, separated by a comma
bad-names=bar,baz,db,fd,foo,sesh,session,tata,toto,tutu
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
name-group=
# Include a hint for the correct naming format with invalid-name
include-naming-hint=no
# List of decorators that produce properties, such as abc.abstractproperty. Add
# to this list to register other decorators that produce valid properties.
property-classes=
abc.abstractproperty,
sqlalchemy.ext.hybrid.hybrid_property
# Regular expression matching correct argument names
argument-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct method names
method-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct variable names
variable-rgx=[a-z_][a-z0-9_]{1,30}$
# Regular expression matching correct inline iteration names
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
# Regular expression matching correct constant names
const-rgx=(([A-Za-z_][A-Za-z0-9_]*)|(__.*__))$
# Regular expression matching correct class names
class-rgx=[A-Z_][a-zA-Z0-9]+$
# Regular expression matching correct class attribute names
class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
# Regular expression matching correct module names
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Regular expression matching correct attribute names
attr-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct function names
function-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=^_
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=10
[ELIF]
# Maximum number of nested blocks for function / method body
max-nested-blocks=5
[FORMAT]
# Maximum number of characters on a single line.
max-line-length=100
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=no
# Maximum number of lines in a module
max-module-lines=1000
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren=4
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
expected-line-ending-format=
[LOGGING]
# Logging modules to check that the string format arguments are in logging
# function parameter format
logging-modules=logging
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,XXX
[SIMILARITIES]
# Minimum lines number of a similarity.
min-similarity-lines=5
# Ignore comments when computing similarities.
ignore-comments=yes
# Ignore docstrings when computing similarities.
ignore-docstrings=yes
# Ignore imports when computing similarities.
ignore-imports=no
[SPELLING]
# Spelling dictionary name. Available dictionaries: none. To make it working
# install python-enchant package.
spelling-dict=
# List of comma separated words that should not be checked.
spelling-ignore-words=
# A path to a file that contains private dictionary; one word per line.
spelling-private-dict-file=
# Tells whether to store unknown words to indicated private dictionary in
# --spelling-private-dict-file option instead of raising a message.
spelling-store-unknown-words=no
[TYPECHECK]
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis. It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=numpy,pandas,alembic.op,sqlalchemy,alembic.context,flask_appbuilder.security.sqla.PermissionView.role,flask_appbuilder.Model.metadata,flask_appbuilder.Base.metadata
# List of class names for which member attributes should not be checked (useful
# for classes with dynamically set attributes). This supports the use of
# qualified names.
ignored-classes=contextlib.closing,optparse.Values,thread._local,_thread._local
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
generated-members=
# List of decorators that produce context managers, such as
# contextlib.contextmanager. Add to this list to register other decorators that
# produce valid context managers.
contextmanager-decorators=contextlib.contextmanager
[VARIABLES]
# Tells whether we should check for unused import in __init__ files.
init-import=no
# A regular expression matching the name of dummy variables (i.e. expectedly
# not used).
dummy-variables-rgx=(_+[a-zA-Z0-9]*?$)|dummy
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid to define new builtins when possible.
additional-builtins=
# List of strings which can identify a callback function by name. A callback
# name must start or end with one of those strings.
callbacks=cb_,_cb
# List of qualified module names which can have objects that can redefine
# builtins.
redefining-builtins-modules=six.moves,future.builtins
[CLASSES]
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,__new__,setUp
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=mcs
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected=_asdict,_fields,_replace,_source,_make
[DESIGN]
# Maximum number of arguments for function / method
max-args=5
# Argument names that match this expression will be ignored. Default to name
# with leading underscore
ignored-argument-names=_.*
# Maximum number of locals for function / method body
max-locals=15
# Maximum number of return / yield for function / method body
max-returns=10
# Maximum number of branch for function / method body
max-branches=15
# Maximum number of statements in function / method body
max-statements=50
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of attributes for a class (see R0902).
max-attributes=8
# Minimum number of public methods for a class (see R0903).
min-public-methods=2
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
# Maximum number of boolean expressions in a if statement
max-bool-expr=5
[IMPORTS]
# Deprecated modules which should not be used, separated by a comma
deprecated-modules=optparse
# Create a graph of every (i.e. internal and external) dependencies in the
# given file (report RP0402 must not be disabled)
import-graph=
# Create a graph of external dependencies in the given file (report RP0402 must
# not be disabled)
ext-import-graph=
# Create a graph of internal dependencies in the given file (report RP0402 must
# not be disabled)
int-import-graph=
# Force import order to recognize a module as part of the standard
# compatibility libraries.
known-standard-library=
# Force import order to recognize a module as part of a third party library.
known-third-party=enchant
# Analyse import fallback blocks. This can be used to support both Python 2 and
# 3 compatible code, which means that the block might have code that exists
# only in one or another interpreter, leading to false positives when analysed.
analyse-fallback-blocks=no
[EXCEPTIONS]
# Exceptions that will emit a warning when being caught. Defaults to
# "Exception"
overgeneral-exceptions=builtins.Exception

View File

@@ -61,16 +61,12 @@ tsconfig.tsbuildinfo
generator-superset/*
temporary_superset_ui/*
# skip license checks for auto-generated test snapshots
.*snap
# docs overrides for third party logos we don't have the rights to
google-big-query.svg
google-sheets.svg
ibm-db2.svg
postgresql.svg
snowflake.svg
ydb.svg
# docs-related
erd.puml

50
CHANGELOG/4.1.1.md Normal file
View File

@@ -0,0 +1,50 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
## Change Log
### 4.1 (Fri Nov 15 22:13:57 2024 +0530)
**Database Migrations**
**Features**
**Fixes**
- [#30886](https://github.com/apache/superset/pull/30886) fix: blocks UI elements on right side (@samarsrivastav)
- [#30859](https://github.com/apache/superset/pull/30859) fix(package.json): Pin luxon version to unblock master (@geido)
- [#30588](https://github.com/apache/superset/pull/30588) fix(explore): column data type tooltip format (@mistercrunch)
- [#29911](https://github.com/apache/superset/pull/29911) fix: Rename database from 'couchbasedb' to 'couchbase' in documentation and db_engine_specs (@ayush-couchbase)
- [#30828](https://github.com/apache/superset/pull/30828) fix(TimezoneSelector): Failing unit tests due to timezone change (@geido)
- [#30875](https://github.com/apache/superset/pull/30875) fix: don't show metadata for embedded dashboards (@sadpandajoe)
- [#30851](https://github.com/apache/superset/pull/30851) fix: Graph chart colors (@michael-s-molina)
- [#29867](https://github.com/apache/superset/pull/29867) fix(capitalization): Capitalizing a button. (@rusackas)
- [#29782](https://github.com/apache/superset/pull/29782) fix(translations): Translate embedded errors (@rusackas)
- [#29772](https://github.com/apache/superset/pull/29772) fix: Fixing incomplete string escaping. (@rusackas)
- [#29725](https://github.com/apache/superset/pull/29725) fix(frontend/docker, ci): fix borked Docker build due to Lerna v8 uplift (@hainenber)
**Others**
- [#30576](https://github.com/apache/superset/pull/30576) chore: add link to Superset when report error (@eschutho)
- [#29786](https://github.com/apache/superset/pull/29786) refactor(Slider): Upgrade Slider to Antd 5 (@geido)
- [#29674](https://github.com/apache/superset/pull/29674) refactor(ChartCreation): Migrate tests to RTL (@rtexelm)
- [#29843](https://github.com/apache/superset/pull/29843) refactor(controls): Migrate AdhocMetricOption.test to RTL (@rtexelm)
- [#29845](https://github.com/apache/superset/pull/29845) refactor(controls): Migrate MetricDefinitionValue.test to RTL (@rtexelm)
- [#28424](https://github.com/apache/superset/pull/28424) docs: Check markdown files for bad links using linkinator (@rusackas)
- [#29768](https://github.com/apache/superset/pull/29768) docs(contributing): fix broken link to translations sub-section (@sfirke)

View File

@@ -80,9 +80,9 @@ If you believe someone is violating this code of conduct, you may reply to them
Or one of our volunteers:
* [Mark Thomas](https://www.linkedin.com/in/mark-thomas-b16751158/)
* [Joan Touzet](https://www.apache.org/foundation/conduct-team/wohali.html)
* [Sharan Foga](https://www.linkedin.com/in/sfoga/)
* [Mark Thomas](http://home.apache.org/~markt/coc.html)
* [Joan Touzet](http://home.apache.org/~wohali/)
* [Sharan Foga](http://home.apache.org/~sharan/coc.html)
If the violation is in documentation or code, for example inappropriate pronoun usage or word choice within official documentation, we ask that people report these privately to the project in question at <private@project.apache.org>, and, if they have sufficient ability within the project, to resolve or remove the concerning material, being mindful of the perspective of the person originally reporting the issue.

View File

@@ -20,82 +20,50 @@
######################################################################
ARG PY_VER=3.10-slim-bookworm
# If BUILDPLATFORM is null, set it to 'amd64' (or leave as is otherwise).
# if BUILDPLATFORM is null, set it to 'amd64' (or leave as is otherwise).
ARG BUILDPLATFORM=${BUILDPLATFORM:-amd64}
FROM --platform=${BUILDPLATFORM} node:18-bullseye-slim AS superset-node
######################################################################
# superset-node used for building frontend assets
######################################################################
FROM --platform=${BUILDPLATFORM} node:20-bullseye-slim AS superset-node
ARG BUILD_TRANSLATIONS="false" # Include translations in the final build
ENV BUILD_TRANSLATIONS=${BUILD_TRANSLATIONS}
ARG DEV_MODE="false" # Skip frontend build in dev mode
ENV DEV_MODE=${DEV_MODE}
COPY docker/ /app/docker/
# Arguments for build configuration
ARG NPM_BUILD_CMD="build"
# Install system dependencies required for node-gyp
RUN /app/docker/apt-install.sh build-essential python3 zstd
# Somehow we need python3 + build-essential on this side of the house to install node-gyp
RUN apt-get update -qq \
&& apt-get install \
-yqq --no-install-recommends \
build-essential \
python3
# Define environment variables for frontend build
ENV BUILD_CMD=${NPM_BUILD_CMD} \
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true
# NPM ci first, as to NOT invalidate previous steps except for when package.json changes
# Run the frontend memory monitoring script
RUN /app/docker/frontend-mem-nag.sh
RUN --mount=type=bind,target=/frontend-mem-nag.sh,src=./docker/frontend-mem-nag.sh \
/frontend-mem-nag.sh
WORKDIR /app/superset-frontend
# Create necessary folders to avoid errors in subsequent steps
RUN mkdir -p /app/superset/static/assets \
/app/superset/translations
# Mount package files and install dependencies if not in dev mode
RUN --mount=type=bind,source=./superset-frontend/package.json,target=./package.json \
--mount=type=bind,source=./superset-frontend/package-lock.json,target=./package-lock.json \
--mount=type=cache,target=/root/.cache \
--mount=type=cache,target=/root/.npm \
if [ "$DEV_MODE" = "false" ]; then \
npm ci; \
else \
echo "Skipping 'npm ci' in dev mode"; \
fi
RUN --mount=type=bind,target=./package.json,src=./superset-frontend/package.json \
--mount=type=bind,target=./package-lock.json,src=./superset-frontend/package-lock.json \
npm ci
# Runs the webpack build process
COPY superset-frontend /app/superset-frontend
RUN npm run ${BUILD_CMD}
# Build the frontend if not in dev mode
RUN --mount=type=cache,target=/app/superset-frontend/.temp_cache \
--mount=type=cache,target=/root/.npm \
if [ "$DEV_MODE" = "false" ]; then \
echo "Running 'npm run ${BUILD_CMD}'"; \
npm run ${BUILD_CMD}; \
else \
echo "Skipping 'npm run ${BUILD_CMD}' in dev mode"; \
fi;
# Copy translation files
# This copies the .po files needed for translation
RUN mkdir -p /app/superset/translations
COPY superset/translations /app/superset/translations
# Compiles .json files from the .po files, then deletes the .po files
RUN npm run build-translation
RUN rm /app/superset/translations/*/LC_MESSAGES/*.po
RUN rm /app/superset/translations/messages.pot
# Build the frontend if not in dev mode
RUN if [ "$BUILD_TRANSLATIONS" = "true" ]; then \
npm run build-translation; \
fi; \
rm -rf /app/superset/translations/*/*/*.po; \
rm -rf /app/superset/translations/*/*/*.mo;
######################################################################
# Base python layer
######################################################################
FROM python:${PY_VER} AS python-base
ARG BUILD_TRANSLATIONS="false" # Include translations in the final build
ENV BUILD_TRANSLATIONS=${BUILD_TRANSLATIONS}
ARG DEV_MODE="false" # Skip frontend build in dev mode
ENV DEV_MODE=${DEV_MODE}
######################################################################
# Final lean image...
######################################################################
FROM python-base AS lean
WORKDIR /app
ENV LANG=C.UTF-8 \
LC_ALL=C.UTF-8 \
SUPERSET_ENV=production \
@@ -104,144 +72,109 @@ ENV LANG=C.UTF-8 \
SUPERSET_HOME="/app/superset_home" \
SUPERSET_PORT=8088
RUN mkdir -p ${PYTHONPATH} superset/static requirements superset-frontend apache_superset.egg-info requirements \
&& useradd --user-group -d ${SUPERSET_HOME} -m --no-log-init --shell /bin/bash superset \
&& apt-get update -qq && apt-get install -yqq --no-install-recommends \
curl \
default-libmysqlclient-dev \
libsasl2-dev \
libsasl2-modules-gssapi-mit \
libpq-dev \
libecpg-dev \
libldap2-dev \
&& touch superset/static/version_info.json \
&& chown -R superset:superset ./* \
&& rm -rf /var/lib/apt/lists/*
RUN useradd --user-group -d ${SUPERSET_HOME} -m --no-log-init --shell /bin/bash superset
COPY --chown=superset:superset pyproject.toml setup.py MANIFEST.in README.md ./
# setup.py uses the version information in package.json
COPY --chown=superset:superset superset-frontend/package.json superset-frontend/
COPY --chown=superset:superset requirements/base.txt requirements/
RUN --mount=type=cache,target=/root/.cache/pip \
apt-get update -qq && apt-get install -yqq --no-install-recommends \
build-essential \
&& pip install --upgrade setuptools pip \
&& pip install -r requirements/base.txt \
&& apt-get autoremove -yqq --purge build-essential \
&& rm -rf /var/lib/apt/lists/*
# Some bash scripts needed throughout the layers
COPY --chmod=755 docker/*.sh /app/docker/
# Copy the compiled frontend assets
COPY --chown=superset:superset --from=superset-node /app/superset/static/assets superset/static/assets
RUN pip install --no-cache-dir --upgrade uv
## Lastly, let's install superset itself
COPY --chown=superset:superset superset superset
RUN --mount=type=cache,target=/root/.cache/pip \
pip install -e .
# Using uv as it's faster/simpler than pip
RUN uv venv /app/.venv
ENV PATH="/app/.venv/bin:${PATH}"
# Copy the .json translations from the frontend layer
COPY --chown=superset:superset --from=superset-node /app/superset/translations superset/translations
# Install Playwright and optionally setup headless browsers
ARG INCLUDE_CHROMIUM="true"
ARG INCLUDE_FIREFOX="false"
RUN --mount=type=cache,target=/root/.cache/uv\
if [ "$INCLUDE_CHROMIUM" = "true" ] || [ "$INCLUDE_FIREFOX" = "true" ]; then \
uv pip install playwright && \
playwright install-deps && \
if [ "$INCLUDE_CHROMIUM" = "true" ]; then playwright install chromium; fi && \
if [ "$INCLUDE_FIREFOX" = "true" ]; then playwright install firefox; fi; \
else \
echo "Skipping browser installation"; \
fi
# Compile translations for the backend - this generates .mo files, then deletes the .po files
COPY ./scripts/translations/generate_mo_files.sh ./scripts/translations/
RUN ./scripts/translations/generate_mo_files.sh \
&& chown -R superset:superset superset/translations \
&& rm superset/translations/messages.pot \
&& rm superset/translations/*/LC_MESSAGES/*.po
######################################################################
# Python translation compiler layer
######################################################################
FROM python-base AS python-translation-compiler
# Install Python dependencies using docker/pip-install.sh
COPY requirements/translations.txt requirements/
RUN --mount=type=cache,target=/root/.cache/uv \
/app/docker/pip-install.sh --requires-build-essential -r requirements/translations.txt
COPY superset/translations/ /app/translations_mo/
RUN if [ "$BUILD_TRANSLATIONS" = "true" ]; then \
pybabel compile -d /app/translations_mo | true; \
fi; \
rm -f /app/translations_mo/*/*/*.po; \
rm -f /app/translations_mo/*/*/*.json;
######################################################################
# Python APP common layer
######################################################################
FROM python-base AS python-common
# Copy the entrypoints, make them executable in userspace
COPY --chmod=755 docker/entrypoints /app/docker/entrypoints
WORKDIR /app
# Set up necessary directories and user
RUN mkdir -p \
${SUPERSET_HOME} \
${PYTHONPATH} \
superset/static \
requirements \
superset-frontend \
apache_superset.egg-info \
requirements \
&& touch superset/static/version_info.json
# Copy required files for Python build
COPY pyproject.toml setup.py MANIFEST.in README.md ./
COPY superset-frontend/package.json superset-frontend/
COPY scripts/check-env.py scripts/
# keeping for backward compatibility
COPY --chmod=755 ./docker/entrypoints/run-server.sh /usr/bin/
# Some debian libs
RUN /app/docker/apt-install.sh \
curl \
libsasl2-dev \
libsasl2-modules-gssapi-mit \
libpq-dev \
libecpg-dev \
libldap2-dev
# Copy compiled things from previous stages
COPY --from=superset-node /app/superset/static/assets superset/static/assets
# TODO, when the next version comes out, use --exclude superset/translations
COPY superset superset
# TODO in the meantime, remove the .po files
RUN rm superset/translations/*/*/*.po
# Merging translations from backend and frontend stages
COPY --from=superset-node /app/superset/translations superset/translations
COPY --from=python-translation-compiler /app/translations_mo superset/translations
COPY --chmod=755 ./docker/run-server.sh /usr/bin/
USER superset
HEALTHCHECK CMD curl -f "http://localhost:${SUPERSET_PORT}/health"
CMD ["/app/docker/entrypoints/run-server.sh"]
EXPOSE ${SUPERSET_PORT}
######################################################################
# Final lean image...
######################################################################
FROM python-common AS lean
# Install Python dependencies using docker/pip-install.sh
COPY requirements/base.txt requirements/
RUN --mount=type=cache,target=/root/.cache/uv \
/app/docker/pip-install.sh --requires-build-essential -r requirements/base.txt
# Install the superset package
RUN --mount=type=cache,target=/root/.cache/uv \
uv pip install .
RUN python -m compileall /app/superset
USER superset
CMD ["/usr/bin/run-server.sh"]
######################################################################
# Dev image...
######################################################################
FROM python-common AS dev
FROM lean AS dev
# Debian libs needed for dev
RUN /app/docker/apt-install.sh \
git \
pkg-config \
default-libmysqlclient-dev
USER root
RUN apt-get update -qq \
&& apt-get install -yqq --no-install-recommends \
libnss3 \
libdbus-glib-1-2 \
libgtk-3-0 \
libx11-xcb1 \
libasound2 \
libxtst6 \
git \
pkg-config \
&& rm -rf /var/lib/apt/lists/*
# Copy development requirements and install them
COPY requirements/*.txt requirements/
# Install Python dependencies using docker/pip-install.sh
RUN --mount=type=cache,target=/root/.cache/uv \
/app/docker/pip-install.sh --requires-build-essential -r requirements/development.txt
# Install the superset package
RUN --mount=type=cache,target=/root/.cache/uv \
uv pip install .
RUN --mount=type=cache,target=/root/.cache/pip \
pip install playwright
RUN playwright install-deps
RUN playwright install chromium
RUN python -m compileall /app/superset
# Install GeckoDriver WebDriver
ARG GECKODRIVER_VERSION=v0.34.0 \
FIREFOX_VERSION=125.0.3
RUN apt-get update -qq \
&& apt-get install -yqq --no-install-recommends wget bzip2 \
&& wget -q https://github.com/mozilla/geckodriver/releases/download/${GECKODRIVER_VERSION}/geckodriver-${GECKODRIVER_VERSION}-linux64.tar.gz -O - | tar xfz - -C /usr/local/bin \
# Install Firefox
&& wget -q https://download-installer.cdn.mozilla.net/pub/firefox/releases/${FIREFOX_VERSION}/linux-x86_64/en-US/firefox-${FIREFOX_VERSION}.tar.bz2 -O - | tar xfj - -C /opt \
&& ln -s /opt/firefox/firefox /usr/local/bin/firefox \
&& apt-get autoremove -yqq --purge wget bzip2 && rm -rf /var/[log,tmp]/* /tmp/* /var/lib/apt/lists/*
# Cache everything for dev purposes...
COPY --chown=superset:superset requirements/development.txt requirements/
RUN --mount=type=cache,target=/root/.cache/pip \
apt-get update -qq && apt-get install -yqq --no-install-recommends \
build-essential \
&& pip install -r requirements/development.txt \
&& apt-get autoremove -yqq --purge build-essential \
&& rm -rf /var/lib/apt/lists/*
USER superset
######################################################################
# CI image...
######################################################################
FROM lean AS ci
CMD ["/app/docker/entrypoints/docker-ci.sh"]
COPY --chown=superset:superset --chmod=755 ./docker/*.sh /app/docker/
CMD ["/app/docker/docker-ci.sh"]

View File

@@ -87,6 +87,9 @@ format: py-format js-format
py-format: pre-commit
pre-commit run black --all-files
py-lint: pre-commit
pylint -j 0 superset
js-format:
cd superset-frontend; npm run prettier

View File

@@ -19,7 +19,7 @@ under the License.
# Superset
[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/license/apache-2-0)
[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0)
[![GitHub release (latest SemVer)](https://img.shields.io/github/v/release/apache/superset?sort=semver)](https://github.com/apache/superset/tree/latest)
[![Build Status](https://github.com/apache/superset/workflows/Python/badge.svg)](https://github.com/apache/superset/actions)
[![PyPI version](https://badge.fury.io/py/apache-superset.svg)](https://badge.fury.io/py/apache-superset)
@@ -134,9 +134,6 @@ Here are some of the major database solutions that are supported:
<img src="https://superset.apache.org/img/databases/starrocks.png" alt="starrocks" border="0" width="200" />
<img src="https://superset.apache.org/img/databases/doris.png" alt="doris" border="0" width="200" />
<img src="https://superset.apache.org/img/databases/oceanbase.svg" alt="oceanbase" border="0" width="220" />
<img src="https://superset.apache.org/img/databases/sap-hana.png" alt="oceanbase" border="0" width="220" />
<img src="https://superset.apache.org/img/databases/denodo.png" alt="denodo" border="0" width="200" />
<img src="https://superset.apache.org/img/databases/ydb.svg" alt="ydb" border="0" width="200" />
</p>
**A more comprehensive list of supported databases** along with the configuration instructions can be found [here](https://superset.apache.org/docs/configuration/databases).

View File

@@ -437,7 +437,7 @@ cd ${SUPERSET_RELEASE_RC}
python3 -m venv venv
source venv/bin/activate
pip install -r requirements/base.txt
pip install build twine
pip install twine
```
Create the distribution
@@ -455,7 +455,7 @@ cd ../
./scripts/translations/generate_po_files.sh
# build the python distribution
python -m build
python setup.py sdist
```
Publish to PyPI
@@ -466,7 +466,6 @@ an account first if you don't have one, and reference your username
while requesting access to push packages.
```bash
twine upload dist/apache_superset-${SUPERSET_VERSION}-py3-none-any.whl
twine upload dist/apache-superset-${SUPERSET_VERSION}.tar.gz
```

View File

@@ -272,14 +272,14 @@ class GitLogs:
@staticmethod
def _git_get_current_head() -> str:
output = os.popen("git status | head -1").read() # noqa: S605, S607
output = os.popen("git status | head -1").read()
match = re.match("(?:HEAD detached at|On branch) (.*)", output)
if not match:
return ""
return match.group(1)
def _git_checkout(self, git_ref: str) -> None:
os.popen(f"git checkout {git_ref}").read() # noqa: S605
os.popen(f"git checkout {git_ref}").read()
current_head = self._git_get_current_head()
if current_head != git_ref:
print(f"Could not checkout {git_ref}")
@@ -290,7 +290,7 @@ class GitLogs:
current_git_ref = self._git_get_current_head()
self._git_checkout(self._git_ref)
output = (
os.popen('git --no-pager log --pretty=format:"%h|%an|%ae|%ad|%s|"') # noqa: S605, S607
os.popen('git --no-pager log --pretty=format:"%h|%an|%ae|%ad|%s|"')
.read()
.split("\n")
)

View File

@@ -31,7 +31,7 @@ except ModuleNotFoundError:
RECEIVER_EMAIL = "dev@superset.apache.org"
PROJECT_NAME = "Superset"
PROJECT_MODULE = "superset"
PROJECT_DESCRIPTION = "Apache Superset is a modern, enterprise-ready business intelligence web application." # noqa: E501
PROJECT_DESCRIPTION = "Apache Superset is a modern, enterprise-ready business intelligence web application."
def string_comma_to_list(message: str) -> list[str]:

View File

@@ -137,4 +137,4 @@ There is now a [metadata bar](https://github.com/apache/superset/pull/27857) add
## Change to Docker image builds
Starting in 4.1.0, the release's docker image does not ship with drivers needed to operate Superset. Users may need to install a driver for their metadata database (MySQL or Postgres) as well as the driver for their data warehouse. This is a result of changes to the `lean` docker image that official releases come from; see [Docker Build Presets](/docs/docs/installation/docker-builds.mdx#build-presets) for more details.
Starting in 4.1.0, the release's docker image does not ship with drivers needed to operate Superset. Users may need to install a driver for their metadata database (MySQL or Postgres) as well as the driver for their data warehouse. This is a result of changes to the `lean` docker image that official releases come from; see [Docker Build Presets](/docs/installation/docker-builds#build-presets) for more details.

View File

@@ -23,12 +23,12 @@ from typing import Optional
import requests
# Part 1: Verify SHA512 hash - this is the same as running `shasum -a 512 {release}` and comparing it against `{release}.sha512` # noqa: E501
# Part 1: Verify SHA512 hash - this is the same as running `shasum -a 512 {release}` and comparing it against `{release}.sha512`
def get_sha512_hash(filename: str) -> str:
"""Run the shasum command on the file and return the SHA512 hash."""
result = subprocess.run(["shasum", "-a", "512", filename], stdout=subprocess.PIPE) # noqa: S603, S607
result = subprocess.run(["shasum", "-a", "512", filename], stdout=subprocess.PIPE)
sha512_hash = result.stdout.decode().split()[0]
return sha512_hash
@@ -43,7 +43,7 @@ def read_sha512_file(filename: str) -> str:
def verify_sha512(filename: str) -> str:
"""Verify if the SHA512 hash of the file matches with the hash in the .sha512 file.""" # noqa: E501
"""Verify if the SHA512 hash of the file matches with the hash in the .sha512 file."""
sha512_hash = get_sha512_hash(filename)
sha512_file_content = read_sha512_file(filename)
@@ -53,56 +53,47 @@ def verify_sha512(filename: str) -> str:
return "SHA failed"
# Part 2: Verify RSA key - this is the same as running `gpg --verify {release}.asc {release}` and comparing the RSA key and email address against the KEYS file # noqa: E501
# Part 2: Verify RSA key - this is the same as running `gpg --verify {release}.asc {release}` and comparing the RSA key and email address against the KEYS file
def get_gpg_info(filename: str) -> tuple[Optional[str], Optional[str]]:
"""Run the GPG verify command and extract RSA key and email address."""
asc_filename = filename + ".asc"
result = subprocess.run( # noqa: S603
["gpg", "--verify", asc_filename, filename], # noqa: S607
capture_output=True, # noqa: S607
result = subprocess.run(
["gpg", "--verify", asc_filename, filename], capture_output=True
)
output = result.stderr.decode()
rsa_key = re.search(r"RSA key ([0-9A-F]+)", output)
eddsa_key = re.search(r"EDDSA key ([0-9A-F]+)", output)
email = re.search(r'issuer "([^"]+)"', output)
rsa_key_result = rsa_key.group(1) if rsa_key else None
eddsa_key_result = eddsa_key.group(1) if eddsa_key else None
email_result = email.group(1) if email else None
key_result = rsa_key_result or eddsa_key_result
# Debugging:
if key_result:
print("RSA or EDDSA Key found")
else:
print("Warning: No RSA or EDDSA key found in GPG verification output.")
if email_result:
print("email found")
else:
# Debugging: print warnings if rsa_key or email is not found
if rsa_key_result is None:
print("Warning: No RSA key found in GPG verification output.")
if email_result is None:
print("Warning: No email address found in GPG verification output.")
return key_result, email_result
return rsa_key_result, email_result
def verify_key(key: str, email: Optional[str]) -> str:
"""Fetch the KEYS file and verify if the RSA/EDDSA key and email match."""
def verify_rsa_key(rsa_key: str, email: Optional[str]) -> str:
"""Fetch the KEYS file and verify if the RSA key and email match."""
url = "https://downloads.apache.org/superset/KEYS"
response = requests.get(url) # noqa: S113
response = requests.get(url)
if response.status_code == 200:
if key not in response.text:
return "RSA/EDDSA key not found on KEYS page"
if rsa_key not in response.text:
return "RSA key not found on KEYS page"
# Check if email is None or not in response.text
if email and email in response.text:
return "RSA/EDDSA key and email verified against Apache KEYS file"
return "RSA key and email verified against Apache KEYS file"
elif email:
return "RSA/EDDSA key verified, but Email not found on KEYS page"
return "RSA key verified, but Email not found on KEYS page"
else:
return "RSA/EDDSA key verified, but Email not available for verification"
return "RSA key verified, but Email not available for verification"
else:
return "Failed to fetch KEYS file"
@@ -112,9 +103,9 @@ def verify_sha512_and_rsa(filename: str) -> None:
sha_result = verify_sha512(filename)
print(sha_result)
key, email = get_gpg_info(filename)
if key:
rsa_result = verify_key(key, email)
rsa_key, email = get_gpg_info(filename)
if rsa_key:
rsa_result = verify_rsa_key(rsa_key, email)
print(rsa_result)
else:
print("GPG verification failed: RSA key or email not found")

View File

@@ -25,87 +25,87 @@ all you have to do is file a simple PR [like this one](https://github.com/apache
the categorization is inaccurate, please file a PR with your correction as well.
Join our growing community!
### Sharing Economy
- [Airbnb](https://github.com/airbnb)
- [Faasos](https://faasos.com/) [@shashanksingh]
- [Free2Move](https://www.free2move.com/) [@PaoloTerzi]
- [Faasos](http://faasos.com/) [@shashanksingh]
- [Hostnfly](https://www.hostnfly.com/) [@alexisrosuel]
- [Lime](https://www.li.me/) [@cxmcc]
- [Lyft](https://www.lyft.com/)
- [Ontruck](https://www.ontruck.com/)
### Financial Services
- [Aktia Bank plc](https://www.aktia.com)
- [Aktia Bank plc](https://www.aktia.com) [@villebro]
- [American Express](https://www.americanexpress.com) [@TheLastSultan]
- [bumper](https://www.bumper.co/) [@vasu-ram, @JamiePercival]
- [Cape Crypto](https://capecrypto.com)
- [Capital Service S.A.](https://capitalservice.pl) [@pkonarzewski]
- [Clark.de](https://clark.de/)
- [Capital Service S.A.](http://capitalservice.pl) [@pkonarzewski]
- [Clark.de](http://clark.de/)
- [KarrotPay](https://www.daangnpay.com/)
- [Taveo](https://www.taveo.com) [@codek]
- [Unit](https://www.unit.co/about-us) [@amitmiran137]
- [Wise](https://wise.com) [@koszti]
- [Xendit](https://xendit.co/) [@LieAlbertTriAdrian]
- [Xendit](http://xendit.co/) [@LieAlbertTriAdrian]
- [bumper](https://www.bumper.co/) [@vasu-ram, @JamiePercival]
### Gaming
- [Popoko VM Games Studio](https://popoko.live)
### E-Commerce
- [AiHello](https://www.aihello.com) [@ganeshkrishnan1]
- [Bazaar Technologies](https://www.bazaartech.com) [@umair-abro]
- [Dragonpass](https://www.dragonpass.com.cn/) [@zhxjdwh]
- [Dropit Shopping](https://www.dropit.shop/) [@dropit-dev]
- [Fanatics](https://www.fanatics.com/) [@coderfender]
- [Fordeal](https://www.fordeal.com) [@Renkai]
- [Fordeal](http://www.fordeal.com) [@Renkai]
- [GFG - Global Fashion Group](https://global-fashion-group.com) [@ksaagariconic]
- [HuiShouBao](https://www.huishoubao.com/) [@Yukinoshita-Yukino]
- [HuiShouBao](http://www.huishoubao.com/) [@Yukinoshita-Yukino]
- [Now](https://www.now.vn/) [@davidkohcw]
- [Qunar](https://www.qunar.com/) [@flametest]
- [Rakuten Viki](https://www.viki.com)
- [Shopee](https://shopee.sg) [@xiaohanyu]
- [Shopkick](https://www.shopkick.com) [@LAlbertalli]
- [Tails.com](https://tails.com/gb/) [@alanmcruickshank]
- [THE ICONIC](https://theiconic.com.au/) [@ksaagariconic]
- [THE ICONIC](http://theiconic.com.au/) [@ksaagariconic]
- [Utair](https://www.utair.ru) [@utair-digital]
- [VkusVill](https://vkusvill.ru/) [@ETselikov]
- [Zalando](https://www.zalando.com) [@dmigo]
- [Zalora](https://www.zalora.com) [@ksaagariconic]
### Enterprise Technology
- [A3Data](https://a3data.com.br) [@neylsoncrepalde]
- [Analytics Aura](https://analyticsaura.com/) [@Analytics-Aura]
- [Apollo GraphQL](https://www.apollographql.com/) [@evans]
- [Astronomer](https://www.astronomer.io) [@ryw]
- [Avesta Technologies](https://avestatechnologies.com/) [@TheRum]
- [Caizin](https://caizin.com/) [@tejaskatariya]
- [Careem](https://www.careem.com/) [@samraHanif0340]
- [Careem](https://www.careem.com/) [@SamraHanifCareem]
- [Cloudsmith](https://cloudsmith.io) [@alancarson]
- [CnOvit](https://www.cnovit.com/) [@xieshaohu]
- [Cyberhaven](https://www.cyberhaven.com/) [@toliver-ch]
- [Deepomatic](https://deepomatic.com/) [@Zanoellia]
- [Dial Once](https://www.dial-once.com/)
- [Dremio](https://dremio.com) [@narendrans]
- [EFinance](https://www.efinance.com.eg) [@habeeb556]
- [Elestio](https://elest.io/) [@kaiwalyakoparkar]
- [ELMO Cloud HR & Payroll](https://elmosoftware.com.au/)
- [Endress+Hauser](https://www.endress.com/) [@rumbin]
- [FBK - ICT center](https://ict.fbk.eu)
- [Endress+Hauser](http://www.endress.com/) [@rumbin]
- [FBK - ICT center](http://ict.fbk.eu)
- [Gavagai](https://gavagai.io) [@gavagai-corp]
- [GfK Data Lab](https://www.gfk.com/home) [@mherr]
- [Hydrolix](https://www.hydrolix.io/)
- [Intercom](https://www.intercom.com/) [@kate-gallo]
- [jampp](https://jampp.com/)
- [Konfío](https://konfio.mx) [@uis-rodriguez]
- [Konfío](http://konfio.mx) [@uis-rodriguez]
- [Mainstrat](https://mainstrat.com/)
- [mishmash io](https://mishmash.io/) [@mishmash-io]
- [Myra Labs](https://www.myralabs.com/) [@viksit]
- [Nielsen](https://www.nielsen.com/) [@amitNielsen]
- [mishmash io](https://mishmash.io/)[@mishmash-io]
- [Myra Labs](http://www.myralabs.com/) [@viksit]
- [Nielsen](http://www.nielsen.com/) [@amitNielsen]
- [Ona](https://ona.io) [@pld]
- [Orange](https://www.orange.com) [@icsu]
- [Oslandia](https://oslandia.com)
- [Peak AI](https://www.peak.ai/) [@azhar22k]
- [PeopleDoc](https://www.people-doc.com) [@rodo]
- [PlaidCloud](https://www.plaidcloud.com)
- [Preset, Inc.](https://preset.io)
- [PubNub](https://pubnub.com) [@jzucker2]
- [ReadyTech](https://www.readytech.io)
@@ -114,16 +114,17 @@ Join our growing community!
- [Showmax](https://showmax.com) [@bobek]
- [TechAudit](https://www.techaudit.info) [@ETselikov]
- [Tenable](https://www.tenable.com) [@dflionis]
- [Tentacle](https://www.linkedin.com/company/tentacle-cmi/) [@jdclarke5]
- [Tentacle](https://tentaclecmi.com) [@jdclarke5]
- [timbr.ai](https://timbr.ai/) [@semantiDan]
- [Tobii](https://www.tobii.com/) [@dwa]
- [Tobii](http://www.tobii.com/) [@dwa]
- [Tooploox](https://www.tooploox.com/) [@jakubczaplicki]
- [Unvired](https://unvired.com) [@srinisubramanian]
- [Whale](https://whale.im)
- [Unvired](https://unvired.com)[@srinisubramanian]
- [Whale](http://whale.im)
- [Windsor.ai](https://www.windsor.ai/) [@octaviancorlade]
- [Zeta](https://www.zeta.tech/) [@shaikidris]
### Media & Entertainment
- [6play](https://www.6play.fr) [@CoryChaplin]
- [bilibili](https://www.bilibili.com) [@Moinheart]
- [BurdaForward](https://www.burda-forward.de/en/)
@@ -131,21 +132,23 @@ Join our growing community!
- [Kuaishou](https://www.kuaishou.com/) [@zhaoyu89730105]
- [Netflix](https://www.netflix.com/)
- [Prensa Iberica](https://www.prensaiberica.es/) [@zamar-roura]
- [TME QQMUSIC/WESING](https://www.tencentmusic.com/) [@shenyuanli,@marklaw]
- [TME QQMUSIC/WESING](https://www.tencentmusic.com/)[@shenyuanli,@marklaw]
- [Xite](https://xite.com/) [@shashankkoppar]
- [Zaihang](https://www.zaih.com/)
- [Zaihang](http://www.zaih.com/)
### Education
- [Aveti Learning](https://avetilearning.com/) [@TheShubhendra]
- [Brilliant.org](https://brilliant.org/)
- [Platzi.com](https://platzi.com/)
- [Sunbird](https://www.sunbird.org/) [@eksteporg]
- [The GRAPH Network](https://thegraphnetwork.org/) [@fccoelho]
- [The GRAPH Network](https://thegraphnetwork.org/)[@fccoelho]
- [Udemy](https://www.udemy.com/) [@sungjuly]
- [VIPKID](https://www.vipkid.com.cn/) [@illpanda]
- [WikiMedia Foundation](https://wikimediafoundation.org) [@vg]
### Energy
- [Airboxlab](https://foobot.io) [@antoine-galataud]
- [DouroECI](https://www.douroeci.com/) [@nunohelibeires]
- [Safaricom](https://www.safaricom.co.ke/) [@mmutiso]
@@ -153,32 +156,35 @@ Join our growing community!
- [Wattbewerb](https://wattbewerb.de/) [@wattbewerb]
### Healthcare
- [Amino](https://amino.com) [@shkr]
- [Bluesquare](https://www.bluesquarehub.com/) [@madewulf]
- [Care](https://www.getcare.io/) [@alandao2021]
- [Care](https://www.getcare.io/)[@alandao2021]
- [Living Goods](https://www.livinggoods.org) [@chelule]
- [Maieutical Labs](https://maieuticallabs.it) [@xrmx]
- [Medic](https://medic.org) [@1yuv]
- [QPID Health](http://www.qpidhealth.com/)
- [REDCap Cloud](https://www.redcapcloud.com/)
- [TrustMedis](https://trustmedis.com/) [@famasya]
- [WeSure](https://www.wesure.cn/)
- [2070Health](https://2070health.com/)
### HR / Staffing
- [Swile](https://www.swile.co/) [@PaoloTerzi]
- [Symmetrics](https://www.symmetrics.fyi)
- [bluquist](https://bluquist.com/)
### Government / Non-Profit
### Government
- [City of Ann Arbor, MI](https://www.a2gov.org/) [@sfirke]
- [RIS3 Strategy of CZ, MIT CR](https://www.ris3.cz/) [@RIS3CZ]
- [NRLM - Sarathi, India](https://pib.gov.in/PressReleasePage.aspx?PRID=1999586)
### Travel
- [Agoda](https://www.agoda.com/) [@lostseaway, @maiake, @obombayo]
- [Skyscanner](https://www.skyscanner.net/) [@cleslie, @stanhoucke]
### Others
- [10Web](https://10web.io/)
- [AI inside](https://inside.ai/en/)
- [Automattic](https://automattic.com/) [@Khrol, @Usiel]
@@ -189,6 +195,6 @@ Join our growing community!
- [komoot](https://www.komoot.com/) [@christophlingg]
- [Let's Roam](https://www.letsroam.com/)
- [Onebeat](https://1beat.com/) [@GuyAttia]
- [X](https://x.com/)
- [Twitter](https://twitter.com/)
- [VLMedia](https://www.vlmedia.com.tr/) [@ibotheperfect]
- [Yahoo!](https://yahoo.com/)

View File

@@ -22,18 +22,6 @@ under the License.
This file documents any backwards-incompatible changes in Superset and
assists people when migrating to a new version.
## Next
- [31198](https://github.com/apache/superset/pull/31198) Disallows by default the use of the following ClickHouse functions: "version", "currentDatabase", "hostName".
- [29798](https://github.com/apache/superset/pull/29798) Since 3.1.0, the intial schedule for an alert or report was mistakenly offset by the specified timezone's relation to UTC. The initial schedule should now begin at the correct time.
- [30021](https://github.com/apache/superset/pull/30021) The `dev` layer in our Dockerfile no long includes firefox binaries, only Chromium to reduce bloat/docker-build-time.
- [30099](https://github.com/apache/superset/pull/30099) Translations are no longer included in the default docker image builds. If your environment requires translations, you'll want to set the docker build arg `BUILD_TRANSACTION=true`.
- [31262](https://github.com/apache/superset/pull/31262) NOTE: deprecated `pylint` in favor of `ruff` as our only python linter. Only affect development workflows positively (not the release itself). It should cover most important rules, be much faster, but some things linting rules that were enforced before may not be enforce in the exact same way as before.
- [31173](https://github.com/apache/superset/pull/31173) Modified `fetch_csrf_token` to align with HTTP standards, particularly regarding how cookies are handled. If you encounter any issues related to CSRF functionality, please report them as a new issue and reference this PR for context.
- [31385](https://github.com/apache/superset/pull/31385) Significant docker refactor, reducing access levels for the `superset` user, streamlining layer building, ...
### Potential Downtime
## 4.1.0
- [29274](https://github.com/apache/superset/pull/29274): We made it easier to trigger CI on your
@@ -44,9 +32,9 @@ assists people when migrating to a new version.
`requirements/` folder. If you use these files for your builds you may want to double
check that your builds are not affected. `base.txt` should be the same as before, though
`development.txt` becomes a bigger set, incorporating the now defunct local,testing,integration, and docker
- [27434](https://github.com/apache/superset/pull/27434/files): DO NOT USE our docker compose.\*
- [27434](https://github.com/apache/superset/pull/27434/files): DO NOT USE our docker-compose.\*
files for production use cases! While we never really supported
or should have tried to support docker compose for production use cases, we now actively
or should have tried to support docker-compose for production use cases, we now actively
have taken a stance against supporting it. See the PR for details.
- [24112](https://github.com/apache/superset/pull/24112): Python 3.10 is now the recommended python version to use, 3.9 still
supported but getting deprecated in the nearish future. CI/CD runs on py310 so you probably want to align. If you
@@ -70,7 +58,7 @@ assists people when migrating to a new version.
backend, as well as the .json files used by the frontend. If you were doing anything before
as part of your bundling to expose translation packages, it's probably not needed anymore.
- [29264](https://github.com/apache/superset/pull/29264) Slack has updated its file upload api, and we are now supporting this new api in Superset, although the Slack api is not backward compatible. The original Slack integration is deprecated and we will require a new Slack scope `channels:read` to be added to Slack workspaces in order to use this new api. In an upcoming release, we will make this new Slack scope mandatory and remove the old Slack functionality.
- [30274](https://github.com/apache/superset/pull/30274) Moved SLACK_ENABLE_AVATAR from config.py to the feature flag framework, please adapt your configs.
- [30274](https://github.com/apache/superset/pull/30274) Moved SLACK_ENABLE_AVATAR from config.py to the feature flag framework, please adapt your configs
### Potential Downtime
@@ -128,7 +116,7 @@ assists people when migrating to a new version.
- [24911](https://github.com/apache/superset/pull/24911): Changes the column type from `TEXT` to `MediumText` in table `logs`, potentially requiring a table lock on MySQL dbs or taking some time to complete on large deployments.
- [24939](https://github.com/apache/superset/pull/24939): Augments the foreign key constraints for the `embedded_dashboards` table to include an explicit CASCADE ON DELETE to ensure the relevant records are deleted when a dashboard is deleted. Scheduled downtime may be advised.
- [24938](https://github.com/apache/superset/pull/24938): Augments the foreign key constraints for the `dashboard_slices` table to include an explicit CASCADE ON DELETE to ensure the relevant records are deleted when a dashboard or slice is deleted. Scheduled downtime may be advised.
- [24628](https://github.com/apache/superset/pull/24628): Augments the foreign key constraints for the `dashboard_owner`, `report_schedule_owner`, and `slice_owner` tables to include an explicit CASCADE ON DELETE to ensure the relevant ownership records are deleted when a dataset is deleted. Scheduled downtime may be advised.
- [24628]https://github.com/apache/superset/pull/24628): Augments the foreign key constraints for the `dashboard_owner`, `report_schedule_owner`, and `slice_owner` tables to include an explicit CASCADE ON DELETE to ensure the relevant ownership records are deleted when a dataset is deleted. Scheduled downtime may be advised.
- [24488](https://github.com/apache/superset/pull/24488): Augments the foreign key constraints for the `sql_metrics`, `sqlatable_user`, and `table_columns` tables which reference the `tables` table to include an explicit CASCADE ON DELETE to ensure the relevant records are deleted when a dataset is deleted. Scheduled downtime may be advised.
- [24232](https://github.com/apache/superset/pull/24232): Enables ENABLE_TEMPLATE_REMOVE_FILTERS, DRILL_TO_DETAIL, DASHBOARD_CROSS_FILTERS by default, marks VERSIONED_EXPORT and ENABLE_TEMPLATE_REMOVE_FILTERS as deprecated.
- [23652](https://github.com/apache/superset/pull/23652): Enables GENERIC_CHART_AXES feature flag by default.
@@ -144,7 +132,7 @@ assists people when migrating to a new version.
### Breaking Changes
- [24686](https://github.com/apache/superset/pull/24686): All dataset's custom explore_url are handled as relative URLs on the frontend, behaviour controlled by PREVENT_UNSAFE_DEFAULT_URLS_ON_DATASET.
- [24686]https://github.com/apache/superset/pull/24686): All dataset's custom explore_url are handled as relative URLs on the frontend, behaviour controlled by PREVENT_UNSAFE_DEFAULT_URLS_ON_DATASET.
- [24262](https://github.com/apache/superset/pull/24262): Enabled `TALISMAN_ENABLED` flag by default and provided stricter default Content Security Policy
- [24415](https://github.com/apache/superset/pull/24415): Removed the obsolete Druid NoSQL REGEX operator.
- [24423](https://github.com/apache/superset/pull/24423): Removed deprecated APIs `/superset/slice_json/...`, `/superset/annotation_json/...`
@@ -241,7 +229,7 @@ assists people when migrating to a new version.
- [19231](https://github.com/apache/superset/pull/19231): The `ENABLE_REACT_CRUD_VIEWS` feature flag has been removed (permanently enabled). Any deployments which had set this flag to false will need to verify that the React views support their use case.
- [19230](https://github.com/apache/superset/pull/19230): The `ROW_LEVEL_SECURITY` feature flag has been removed (permanently enabled). Any deployments which had set this flag to false will need to verify that the presence of the Row Level Security feature does not interfere with their use case.
- [19168](https://github.com/apache/superset/pull/19168): Celery upgrade to 5.X resulted in breaking changes to its command line invocation.
html#step-1-adjust-your-command-line-invocation) instructions for adjustments. Also consider migrating you Celery config per [here](https://docs.celeryq.dev/en/stable/userguide/configuration.html#conf-old-settings-map).
html#step-1-adjust-your-command-line-invocation) instructions for adjustments. Also consider migrating you Celery config per [here](https://docs.celeryq.dev/en/stable/userguide/configuration.html#conf-old-settings-map).
- [19142](https://github.com/apache/superset/pull/19142): The `VERSIONED_EXPORT` config key is now `True` by default.
- [19113](https://github.com/apache/superset/pull/19113): The `ENABLE_JAVASCRIPT_CONTROLS` config key has moved from an app config to a feature flag. Any deployments who overrode this setting will now need to override the feature flag from here onward.
- [19107](https://github.com/apache/superset/pull/19107): The `SQLLAB_BACKEND_PERSISTENCE` feature flag is now `True` by default, which enables persisting SQL Lab tabs in the backend instead of the browser's `localStorage`.
@@ -329,7 +317,8 @@ assists people when migrating to a new version.
### Potential Downtime
- [14234](https://github.com/apache/superset/pull/14234): Adds the `limiting_factor` column to the `query` table. Give the migration includes a DDL operation on a heavily trafficked table, potential service downtime may be required.
- [16454](https://github.com/apache/superset/pull/16454): Adds the `extra` column to the `table_columns` table. Users using MySQL will either need to schedule downtime or use the percona toolkit (or similar) to perform the migration.
-[16454](https://github.com/apache/superset/pull/16454): Adds the `extra` column to the `table_columns` table. Users using MySQL will either need to schedule downtime or use the percona toolkit (or similar) to perform the migration.
## 1.2.0

View File

@@ -16,7 +16,7 @@
#
# -----------------------------------------------------------------------
# We don't support docker compose for production environments.
# We don't support docker-compose for production environments.
# If you choose to use this type of deployment make sure to
# create you own docker environment file (docker/.env) with your own
# unique random secure passwords and SECRET_KEY.

View File

@@ -16,7 +16,7 @@
#
# -----------------------------------------------------------------------
# We don't support docker compose for production environments.
# We don't support docker-compose for production environments.
# If you choose to use this type of deployment make sure to
# create you own docker environment file (docker/.env) with your own
# unique random secure passwords and SECRET_KEY.

View File

@@ -16,7 +16,7 @@
#
# -----------------------------------------------------------------------
# We don't support docker compose for production environments.
# We don't support docker-compose for production environments.
# If you choose to use this type of deployment make sure to
# create you own docker environment file (docker/.env) with your own
# unique random secure passwords and SECRET_KEY.
@@ -35,14 +35,9 @@ x-superset-volumes: &superset-volumes
x-common-build: &common-build
context: .
target: ${SUPERSET_BUILD_TARGET:-dev} # can use `dev` (default) or `lean`
target: dev
cache_from:
- apache/superset-cache:3.10-slim-bookworm
args:
DEV_MODE: "true"
INCLUDE_CHROMIUM: ${INCLUDE_CHROMIUM:-false}
INCLUDE_FIREFOX: ${INCLUDE_FIREFOX:-false}
BUILD_TRANSLATIONS: ${BUILD_TRANSLATIONS:-false}
services:
nginx:
@@ -125,7 +120,7 @@ services:
- /home/superset-websocket/dist
# Mounting a config file that contains a dummy secret required to boot up.
# do not use this docker compose in production
# do not use this docker-compose in production
- ./docker/superset-websocket/config.json:/home/superset-websocket/config.json
environment:
- PORT=8080
@@ -152,20 +147,10 @@ services:
disable: true
superset-node:
build:
context: .
target: superset-node
args:
# This prevents building the frontend bundle since we'll mount local folder
# and build it on startup while firing docker-frontend.sh in dev mode, where
# it'll mount and watch local files and rebuild as you update them
DEV_MODE: "true"
BUILD_TRANSLATIONS: ${BUILD_TRANSLATIONS:-false}
image: node:18
environment:
# set this to false if you have perf issues running the npm i; npm run dev in-docker
# if you do so, you have to run this manually on the host, which should perform better!
BUILD_SUPERSET_FRONTEND_IN_DOCKER: true
NPM_RUN_PRUNE: false
SCARF_ANALYTICS: "${SCARF_ANALYTICS:-}"
container_name: superset_node
command: ["/app/docker/docker-frontend.sh"]

View File

@@ -17,7 +17,6 @@
COMPOSE_PROJECT_NAME=superset
DEV_MODE=true
# database configurations (do not modify)
DATABASE_DB=superset

View File

@@ -1,51 +0,0 @@
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -euo pipefail
# Ensure this script is run as root
if [[ $EUID -ne 0 ]]; then
echo "This script must be run as root" >&2
exit 1
fi
# Check for required arguments
if [[ $# -lt 1 ]]; then
echo "Usage: $0 <package1> [<package2> ...]" >&2
exit 1
fi
# Colors for better logging (optional)
GREEN='\033[0;32m'
RED='\033[0;31m'
RESET='\033[0m'
# Install packages with clean-up
echo -e "${GREEN}Updating package lists...${RESET}"
apt-get update -qq
echo -e "${GREEN}Installing packages: $@${RESET}"
apt-get install -yqq --no-install-recommends "$@"
echo -e "${GREEN}Autoremoving unnecessary packages...${RESET}"
apt-get autoremove -y
echo -e "${GREEN}Cleaning up package cache and metadata...${RESET}"
apt-get clean
rm -rf /var/lib/apt/lists/* /var/cache/apt/archives/* /tmp/* /var/tmp/*
echo -e "${GREEN}Installation and cleanup complete.${RESET}"

View File

@@ -18,11 +18,6 @@
set -eo pipefail
# Make python interactive
if [ "$DEV_MODE" == "true" ]; then
echo "Reinstalling the app in editable mode"
uv pip install -e .
fi
REQUIREMENTS_LOCAL="/app/docker/requirements-local.txt"
# If Cypress run overwrite the password for admin and export env variables
if [ "$CYPRESS_CONFIG" == "true" ]; then
@@ -30,16 +25,12 @@ if [ "$CYPRESS_CONFIG" == "true" ]; then
export SUPERSET_TESTENV=true
export SUPERSET__SQLALCHEMY_DATABASE_URI=postgresql+psycopg2://superset:superset@db:5432/superset
fi
if [[ "$DATABASE_DIALECT" == postgres* ]] ; then
echo "Installing postgres requirements"
uv pip install -e .[postgres]
fi
#
# Make sure we have dev requirements installed
#
if [ -f "${REQUIREMENTS_LOCAL}" ]; then
echo "Installing local overrides at ${REQUIREMENTS_LOCAL}"
uv pip install --no-cache-dir -r "${REQUIREMENTS_LOCAL}"
pip install --no-cache-dir -r "${REQUIREMENTS_LOCAL}"
else
echo "Skipping local overrides"
fi

View File

@@ -24,21 +24,12 @@ if [ "$PUPPETEER_SKIP_CHROMIUM_DOWNLOAD" = "false" ]; then
fi
if [ "$BUILD_SUPERSET_FRONTEND_IN_DOCKER" = "true" ]; then
echo "Building Superset frontend in dev mode inside docker container"
cd /app/superset-frontend
npm install -f --no-optional --global webpack webpack-cli
npm install -f
if [ "$NPM_RUN_PRUNE" = "true" ]; then
echo "Running `npm run prune`"
npm run prune
fi
echo "Running `npm install`"
npm install
echo "Start webpack dev server"
echo "Running frontend"
npm run dev
else
echo "Skipping frontend build steps - YOU NEED TO RUN IT MANUALLY ON THE HOST!"
echo "https://superset.apache.org/docs/contributing/development/#webpack-dev-server"
echo "Skipping frontend build steps - YOU RUN IT MANUALLY ON THE HOST!"
fi

View File

@@ -22,11 +22,7 @@ set -e
#
/app/docker/docker-bootstrap.sh
if [ "$SUPERSET_LOAD_EXAMPLES" = "yes" ]; then
STEP_CNT=4
else
STEP_CNT=3
fi
STEP_CNT=4
echo_step() {
cat <<EOF

View File

@@ -1,64 +0,0 @@
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -euo pipefail
# Default flag
REQUIRES_BUILD_ESSENTIAL=false
USE_CACHE=true
# Filter arguments
ARGS=()
for arg in "$@"; do
case "$arg" in
--requires-build-essential)
REQUIRES_BUILD_ESSENTIAL=true
;;
--no-cache)
USE_CACHE=false
;;
*)
ARGS+=("$arg")
;;
esac
done
# Install build-essential if required
if $REQUIRES_BUILD_ESSENTIAL; then
echo "Installing build-essential for package builds..."
apt-get update -qq \
&& apt-get install -yqq --no-install-recommends build-essential
fi
# Choose whether to use pip cache
if $USE_CACHE; then
echo "Using pip cache..."
uv pip install "${ARGS[@]}"
else
echo "Disabling pip cache..."
uv pip install --no-cache-dir "${ARGS[@]}"
fi
# Remove build-essential if it was installed
if $REQUIRES_BUILD_ESSENTIAL; then
echo "Removing build-essential to keep the image lean..."
apt-get autoremove -yqq --purge build-essential \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* /var/cache/apt/archives/*
fi
echo "Python packages installed successfully."

View File

@@ -99,7 +99,7 @@ CELERY_CONFIG = CeleryConfig
FEATURE_FLAGS = {"ALERT_REPORTS": True}
ALERT_REPORTS_NOTIFICATION_DRY_RUN = True
WEBDRIVER_BASEURL = "http://superset:8088/" # When using docker compose baseurl should be http://superset_app:8088/ # noqa: E501
WEBDRIVER_BASEURL = "http://superset:8088/" # When using docker compose baseurl should be http://superset_app:8088/
# The base URL for the email report hyperlinks.
WEBDRIVER_BASEURL_USER_FRIENDLY = WEBDRIVER_BASEURL
SQLLAB_CTAS_NO_LIMIT = True

View File

@@ -26,7 +26,6 @@ gunicorn \
--workers ${SERVER_WORKER_AMOUNT:-1} \
--worker-class ${SERVER_WORKER_CLASS:-gthread} \
--threads ${SERVER_THREADS_AMOUNT:-20} \
--log-level "${GUNICORN_LOGLEVEL:info}" \
--timeout ${GUNICORN_TIMEOUT:-60} \
--keep-alive ${GUNICORN_KEEPALIVE:-2} \
--max-requests ${WORKER_MAX_REQUESTS:-0} \

View File

@@ -1 +1 @@
v20.16.0
v20.12.2

View File

@@ -16,7 +16,6 @@ KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
This is the public documentation site for Superset, built using
[Docusaurus 2](https://docusaurus.io/). See
[CONTRIBUTING.md](../CONTRIBUTING.md#documentation) for documentation on

View File

@@ -63,7 +63,6 @@
"Fiji",
"Finland",
"France",
"France (with overseas)",
"France (regions)",
"French Polynesia",
"Gabon",
@@ -78,7 +77,6 @@
"Guyana",
"Haiti",
"Honduras",
"Hungary",
"Iceland",
"India",
"Indonesia",

View File

@@ -53,14 +53,11 @@ To send alerts and reports to Slack channels, you need to create a new Slack App
- `incoming-webhook`
- `files:write`
- `chat:write`
- `channels:read`
- `groups:read`
4. At the top of the "OAuth and Permissions" section, click "install to workspace".
5. Select a default channel for your app and continue.
(You can post to any channel by inviting your Superset app into that channel).
6. The app should now be installed in your workspace, and a "Bot User OAuth Access Token" should have been created. Copy that token in the `SLACK_API_TOKEN` variable of your `superset_config.py`.
7. Ensure the feature flag `ALERT_REPORT_SLACK_V2` is set to True in `superset_config.py`
8. Restart the service (or run `superset init`) to pull in the new configuration.
7. Restart the service (or run `superset init`) to pull in the new configuration.
Note: when you configure an alert or a report, the Slack channel list takes channel names without the leading '#' e.g. use `alerts` instead of `#alerts`.
@@ -254,18 +251,15 @@ FROM apache/superset:3.1.0
USER root
RUN apt-get update && \
apt-get install -y wget zip libaio1
RUN export CHROMEDRIVER_VERSION=$(curl --silent https://googlechromelabs.github.io/chrome-for-testing/LATEST_RELEASE_116) && \
wget -O google-chrome-stable_current_amd64.deb -q http://dl.google.com/linux/chrome/deb/pool/main/g/google-chrome-stable/google-chrome-stable_${CHROMEDRIVER_VERSION}-1_amd64.deb && \
wget -q https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb && \
apt-get install -y --no-install-recommends ./google-chrome-stable_current_amd64.deb && \
rm -f google-chrome-stable_current_amd64.deb
RUN export CHROMEDRIVER_VERSION=$(curl --silent https://googlechromelabs.github.io/chrome-for-testing/LATEST_RELEASE_116) && \
wget -q https://storage.googleapis.com/chrome-for-testing-public/${CHROMEDRIVER_VERSION}/linux64/chromedriver-linux64.zip && \
unzip -j chromedriver-linux64.zip -d /usr/bin && \
RUN export CHROMEDRIVER_VERSION=$(curl --silent https://chromedriver.storage.googleapis.com/LATEST_RELEASE_102) && \
wget -q https://chromedriver.storage.googleapis.com/${CHROMEDRIVER_VERSION}/chromedriver_linux64.zip && \
unzip chromedriver_linux64.zip -d /usr/bin && \
chmod 755 /usr/bin/chromedriver && \
rm -f chromedriver-linux64.zip
rm -f chromedriver_linux64.zip
RUN pip install --no-cache gevent psycopg2 redis

View File

@@ -13,8 +13,8 @@ SimpleCache (in-memory), or the local filesystem.
[Custom cache backends](https://flask-caching.readthedocs.io/en/latest/#custom-cache-backends)
are also supported.
Caching can be configured by providing dictionaries in
`superset_config.py` that comply with [the Flask-Caching config specifications](https://flask-caching.readthedocs.io/en/latest/#configuring-flask-caching).
Caching can be configured by providing a dictionaries in
`superset_config.py` that comply with[the Flask-Caching config specifications](https://flask-caching.readthedocs.io/en/latest/#configuring-flask-caching).
The following cache configurations can be customized in this way:
- Dashboard filter state (required): `FILTER_STATE_CACHE_CONFIG`.
@@ -22,7 +22,7 @@ The following cache configurations can be customized in this way:
- Metadata cache (optional): `CACHE_CONFIG`
- Charting data queried from datasets (optional): `DATA_CACHE_CONFIG`
For example, to configure the filter state cache using Redis:
For example, to configure the filter state cache using redis:
```python
FILTER_STATE_CACHE_CONFIG = {

View File

@@ -37,7 +37,7 @@ ENV SUPERSET_CONFIG_PATH /app/superset_config.py
```
Docker compose deployments handle application configuration differently using specific conventions.
Refer to the [docker compose tips & configuration](/docs/installation/docker-compose#docker-compose-tips--configuration)
Refer to the [docker-compose tips & configuration](/docs/installation/docker-compose#docker-compose-tips--configuration)
for details.
The following is an example of just a few of the parameters you can set in your `superset_config.py` file:
@@ -314,9 +314,9 @@ CUSTOM_SECURITY_MANAGER = CustomSsoSecurityManager
]
```
### Keycloak-Specific Configuration using Flask-OIDC
If you are using Keycloak as OpenID Connect 1.0 Provider, the above configuration based on [`Authlib`](https://authlib.org/) might not work. In this case using [`Flask-OIDC`](https://pypi.org/project/flask-oidc/) is a viable option.
If you are using Keycloak as OpenID Connect 1.0 Provider, the above configuration based on [`Authlib`](https://authlib.org/) might not work. In this case using [`Flask-OIDC`](https://https://pypi.org/project/flask-oidc/) is a viable option.
Make sure the pip package [`Flask-OIDC`](https://pypi.org/project/flask-oidc/) is installed on the webserver. This was succesfully tested using version 2.2.0. This package requires [`Flask-OpenID`](https://pypi.org/project/Flask-OpenID/) as a dependency.
Make sure the pip package [`Flask-OIDC`](https://https://pypi.org/project/flask-oidc/) is installed on the webserver. This was succesfully tested using version 2.2.0. This package requires [`Flask-OpenID`](https://pypi.org/project/Flask-OpenID/) as a dependency.
The following code defines a new security manager. Add it to a new file named `keycloak_security_manager.py`, placed in the same directory as your `superset_config.py` file.
```python

View File

@@ -55,9 +55,7 @@ are compatible with Superset.
| [ClickHouse](/docs/configuration/databases#clickhouse) | `pip install clickhouse-connect` | `clickhousedb://{username}:{password}@{hostname}:{port}/{database}` |
| [CockroachDB](/docs/configuration/databases#cockroachdb) | `pip install cockroachdb` | `cockroachdb://root@{hostname}:{port}/{database}?sslmode=disable` |
| [Couchbase](/docs/configuration/databases#couchbase) | `pip install couchbase-sqlalchemy` | `couchbase://{username}:{password}@{hostname}:{port}?truststorepath={ssl certificate path}` |
| [CrateDB](/docs/configuration/databases#cratedb) | `pip install sqlalchemy-cratedb` | `crate://{username}:{password}@{hostname}:{port}`, often useful: `?ssl=true/false` or `?schema=testdrive`. |
| [Denodo](/docs/configuration/databases#denodo) | `pip install denodo-sqlalchemy` | `denodo://{username}:{password}@{hostname}:{port}/{database}` |
| [Dremio](/docs/configuration/databases#dremio) | `pip install sqlalchemy_dremio` |`dremio+flight://{username}:{password}@{host}:32010`, often useful: `?UseEncryption=true/false`. For Legacy ODBC: `dremio+pyodbc://{username}:{password}@{host}:31010` |
| [Dremio](/docs/configuration/databases#dremio) | `pip install sqlalchemy_dremio` | `dremio://user:pwd@host:31010/` |
| [Elasticsearch](/docs/configuration/databases#elasticsearch) | `pip install elasticsearch-dbapi` | `elasticsearch+http://{user}:{password}@{host}:9200/` |
| [Exasol](/docs/configuration/databases#exasol) | `pip install sqlalchemy-exasol` | `exa+pyodbc://{username}:{password}@{hostname}:{port}/my_schema?CONNECTIONLCALL=en_US.UTF-8&driver=EXAODBC` |
| [Google BigQuery](/docs/configuration/databases#google-bigquery) | `pip install sqlalchemy-bigquery` | `bigquery://{project_id}` |
@@ -81,7 +79,6 @@ are compatible with Superset.
| [TimescaleDB](/docs/configuration/databases#timescaledb) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>:<Port>/<Database Name>` |
| [Trino](/docs/configuration/databases#trino) | `pip install trino` | `trino://{username}:{password}@{hostname}:{port}/{catalog}` |
| [Vertica](/docs/configuration/databases#vertica) | `pip install sqlalchemy-vertica-python` | `vertica+vertica_python://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
| [YDB](/docs/configuration/databases#ydb) | `pip install ydb-sqlalchemy` | `ydb://{host}:{port}/{database_name}` |
| [YugabyteDB](/docs/configuration/databases#yugabytedb) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
---
@@ -396,33 +393,21 @@ couchbase://{username}:{password}@{hostname}:{port}?truststorepath={certificate
#### CrateDB
The connector library for CrateDB is [sqlalchemy-cratedb].
We recommend to add the following item to your `requirements.txt` file:
The recommended connector library for CrateDB is
[crate](https://pypi.org/project/crate/).
You need to install the extras as well for this library.
We recommend adding something like the following
text to your requirements file:
```
sqlalchemy-cratedb>=0.40.1,<1
crate[sqlalchemy]==0.26.0
```
An SQLAlchemy connection string for [CrateDB Self-Managed] on localhost,
for evaluation purposes, looks like this:
The expected connection string is formatted as follows:
```
crate://crate@127.0.0.1:4200
```
An SQLAlchemy connection string for connecting to [CrateDB Cloud] looks like
this:
```
crate://<username>:<password>@<clustername>.cratedb.net:4200/?ssl=true
```
Follow the steps [here](/docs/configuration/databases#installing-database-drivers)
to install the CrateDB connector package when setting up Superset locally using
Docker Compose.
```
echo "sqlalchemy-cratedb" >> ./docker/requirements-local.txt
```
[CrateDB Cloud]: https://cratedb.com/product/cloud
[CrateDB Self-Managed]: https://cratedb.com/product/self-managed
[sqlalchemy-cratedb]: https://pypi.org/project/sqlalchemy-cratedb/
#### Databend
@@ -527,16 +512,6 @@ For a connection to a SQL endpoint you need to use the HTTP path from the endpoi
```
#### Denodo
The recommended connector library for Denodo is
[denodo-sqlalchemy](https://pypi.org/project/denodo-sqlalchemy/).
The expected connection string is formatted as follows (default port is 9996):
```
denodo://{username}:{password}@{hostname}:{port}/{database}
```
#### Dremio
@@ -547,7 +522,7 @@ The recommended connector library for Dremio is
The expected connection string for ODBC (Default port is 31010) is formatted as follows:
```
dremio+pyodbc://{username}:{password}@{host}:{port}/{database_name}/dremio?SSL=1
dremio://{username}:{password}@{host}:{port}/{database_name}/dremio?SSL=1
```
The expected connection string for Arrow Flight (Dremio 4.9.1+. Default port is 32010) is formatted as follows:
@@ -1332,10 +1307,6 @@ Here's what the connection string looks like:
starrocks://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>
```
:::note
StarRocks maintains their Superset docuementation [here](https://docs.starrocks.io/docs/integrations/BI_integrations/Superset/).
:::
#### Teradata
The recommended connector library is
@@ -1538,78 +1509,6 @@ Other parameters:
- Load Balancer - Backup Host
#### YDB
The recommended connector library for [YDB](https://ydb.tech/) is
[ydb-sqlalchemy](https://pypi.org/project/ydb-sqlalchemy/).
##### Connection String
The connection string for YDB looks like this:
```
ydb://{host}:{port}/{database_name}
```
##### Protocol
You can specify `protocol` in the `Secure Extra` field at `Advanced / Security`:
```
{
"protocol": "grpcs"
}
```
Default is `grpc`.
##### Authentication Methods
###### Static Credentials
To use `Static Credentials` you should provide `username`/`password` in the `Secure Extra` field at `Advanced / Security`:
```
{
"credentials": {
"username": "...",
"password": "..."
}
}
```
###### Access Token Credentials
To use `Access Token Credentials` you should provide `token` in the `Secure Extra` field at `Advanced / Security`:
```
{
"credentials": {
"token": "...",
}
}
```
##### Service Account Credentials
To use Service Account Credentials, you should provide `service_account_json` in the `Secure Extra` field at `Advanced / Security`:
```
{
"credentials": {
"service_account_json": {
"id": "...",
"service_account_id": "...",
"created_at": "...",
"key_algorithm": "...",
"public_key": "...",
"private_key": "..."
}
}
}
```
#### YugabyteDB
[YugabyteDB](https://www.yugabyte.com/) is a distributed SQL database built on top of PostgreSQL.

View File

@@ -24,65 +24,9 @@ The following keys in `superset_config.py` can be specified to configure CORS:
## HTTP headers
Note that Superset bundles [flask-talisman](https://pypi.org/project/talisman/)
Self-described as a small Flask extension that handles setting HTTP headers that can help
Self-descried as a small Flask extension that handles setting HTTP headers that can help
protect against a few common web application security issues.
## HTML Embedding of Dashboards and Charts
There are two ways to embed a dashboard: Using the [SDK](https://www.npmjs.com/package/@superset-ui/embedded-sdk) or embedding a direct link. Note that in the latter case everybody who knows the link is able to access the dashboard.
### Embedding a Public Direct Link to a Dashboard
This works by first changing the content security policy (CSP) of [flask-talisman](https://github.com/GoogleCloudPlatform/flask-talisman) to allow for certain domains to display Superset content. Then a dashboard can be made publicly accessible, i.e. **bypassing authentication**. Once made public, the dashboard's URL can be added to an iframe in another website's HTML code.
#### Changing flask-talisman CSP
Add to `superset_config.py` the entire `TALISMAN_CONFIG` section from `config.py` and include a `frame-ancestors` section:
```python
TALISMAN_ENABLED = True
TALISMAN_CONFIG = {
"content_security_policy": {
...
"frame-ancestors": ["*.my-domain.com", "*.another-domain.com"],
...
```
Restart Superset for this configuration change to take effect.
#### Making a Dashboard Public
1. Add the `'DASHBOARD_RBAC': True` [Feature Flag](https://github.com/apache/superset/blob/master/RESOURCES/FEATURE_FLAGS.md) to `superset_config.py`
2. Add the `Public` role to your dashboard as described [here](https://superset.apache.org/docs/using-superset/creating-your-first-dashboard/#manage-access-to-dashboards)
#### Embedding a Public Dashboard
Now anybody can directly access the dashboard's URL. You can embed it in an iframe like so:
```html
<iframe
width="600"
height="400"
seamless
frameBorder="0"
scrolling="no"
src="https://superset.my-domain.com/superset/dashboard/10/?standalone=1&height=400"
>
</iframe>
```
#### Embedding a Chart
A chart's embed code can be generated by going to a chart's edit view and then clicking at the top right on `...` > `Share` > `Embed code`
### Enabling Embedding via the SDK
Clicking on `...` next to `EDIT DASHBOARD` on the top right of the dashboard's overview page should yield a drop-down menu including the entry "Embed dashboard".
To enable this entry, add the following line to the `.env` file:
```text
SUPERSET_FEATURE_EMBEDDED_SUPERSET=true
```
## CSRF settings
Similarly, [flask-wtf](https://flask-wtf.readthedocs.io/en/0.15.x/config/) is used manage

View File

@@ -17,8 +17,8 @@ made available in the Jinja context:
- `columns`: columns which to group by in the query
- `filter`: filters applied in the query
- `from_dttm`: start `datetime` value from the selected time range (`None` if undefined) (deprecated beginning in version 5.0, use `get_time_filter` instead)
- `to_dttm`: end `datetime` value from the selected time range (`None` if undefined). (deprecated beginning in version 5.0, use `get_time_filter` instead)
- `from_dttm`: start `datetime` value from the selected time range (`None` if undefined)
- `to_dttm`: end `datetime` value from the selected time range (`None` if undefined)
- `groupby`: columns which to group by in the query (deprecated)
- `metrics`: aggregate expressions in the query
- `row_limit`: row limit of the query
@@ -48,15 +48,12 @@ WHERE (
{% if to_dttm is not none %}
dttm_col < '{{ to_dttm }}' AND
{% endif %}
1 = 1
true
)
```
The `1 = 1` at the end ensures a value is present for the `WHERE` clause even when
the time filter is not set. For many database engines, this could be replaced with `true`.
Note that the Jinja parameters are called within _double_ brackets in the query and with
_single_ brackets in the logic blocks.
Note how the Jinja parameters are called within double brackets in the query, and without in the
logic blocks.
To add custom functionality to the Jinja context, you need to overload the default Jinja
context in your environment by defining the `JINJA_CONTEXT_ADDONS` in your superset configuration
@@ -97,7 +94,7 @@ There is a special ``_filters`` parameter which can be used to test filters used
```sql
SELECT action, count(*) as times
FROM logs
WHERE action in {{ filter_values('action_type')|where_in }}
WHERE action in {{ filter_values('action_type'))|where_in }}
GROUP BY action
```
@@ -349,78 +346,6 @@ Here's a concrete example:
order by lineage, level
```
**Time Filter**
The `{{ get_time_filter() }}` macro returns the time filter applied to a specific column. This is useful if you want
to handle time filters inside the virtual dataset, as by default the time filter is placed on the outer query. This can
considerably improve performance, as many databases and query engines are able to optimize the query better
if the temporal filter is placed on the inner query, as opposed to the outer query.
The macro takes the following parameters:
- `column`: Name of the temporal column. Leave undefined to reference the time range from a Dashboard Native Time Range
filter (when present).
- `default`: The default value to fall back to if the time filter is not present, or has the value `No filter`
- `target_type`: The target temporal type as recognized by the target database (e.g. `TIMESTAMP`, `DATE` or
`DATETIME`). If `column` is defined, the format will default to the type of the column. This is used to produce
the format of the `from_expr` and `to_expr` properties of the returned `TimeFilter` object.
- `strftime`: format using the `strftime` method of `datetime` for custom time formatting.
([see docs for valid format codes](https://docs.python.org/3/library/datetime.html#strftime-and-strptime-format-codes)).
When defined `target_type` will be ignored.
- `remove_filter`: When set to true, mark the filter as processed, removing it from the outer query. Useful when a
filter should only apply to the inner query.
The return type has the following properties:
- `from_expr`: the start of the time filter (if any)
- `to_expr`: the end of the time filter (if any)
- `time_range`: The applied time range
Here's a concrete example using the `logs` table from the Superset metastore:
```
{% set time_filter = get_time_filter("dttm", remove_filter=True) %}
{% set from_expr = time_filter.from_expr %}
{% set to_expr = time_filter.to_expr %}
{% set time_range = time_filter.time_range %}
SELECT
*,
'{{ time_range }}' as time_range
FROM logs
{% if from_expr or to_expr %}WHERE 1 = 1
{% if from_expr %}AND dttm >= {{ from_expr }}{% endif %}
{% if to_expr %}AND dttm < {{ to_expr }}{% endif %}
{% endif %}
```
Assuming we are creating a table chart with a simple `COUNT(*)` as the metric with a time filter `Last week` on the
`dttm` column, this would render the following query on Postgres (note the formatting of the temporal filters, and
the absence of time filters on the outer query):
```
SELECT COUNT(*) AS count
FROM
(SELECT *,
'Last week' AS time_range
FROM public.logs
WHERE 1 = 1
AND dttm >= TO_TIMESTAMP('2024-08-27 00:00:00.000000', 'YYYY-MM-DD HH24:MI:SS.US')
AND dttm < TO_TIMESTAMP('2024-09-03 00:00:00.000000', 'YYYY-MM-DD HH24:MI:SS.US')) AS virtual_table
ORDER BY count DESC
LIMIT 1000;
```
When using the `default` parameter, the templated query can be simplified, as the endpoints will always be defined
(to use a fixed time range, you can also use something like `default="2024-08-27 : 2024-09-03"`)
```
{% set time_filter = get_time_filter("dttm", default="Last week", remove_filter=True) %}
SELECT
*,
'{{ time_filter.time_range }}' as time_range
FROM logs
WHERE
dttm >= {{ time_filter.from_expr }}
AND dttm < {{ time_filter.to_expr }}
```
**Datasets**
It's possible to query physical and virtual datasets using the `dataset` macro. This is useful if you've defined computed columns and metrics on your datasets, and want to reuse the definition in adhoc SQL Lab queries.

View File

@@ -6,13 +6,13 @@ version: 1
# Setting up a Development Environment
The documentation in this section is a bit of a patchwork of knowledge representing the
multitude of ways that exist to run Superset (`docker compose`, just "docker", on "metal", using
multitude of ways that exist to run Superset (`docker-compose`, just "docker", on "metal", using
a Makefile).
:::note
Now we have evolved to recommend and support `docker compose` more actively as the main way
Now we have evolved to recommend and support `docker-compose` more actively as the main way
to run Superset for development and preserve your sanity. **Most people should stick to
the first few sections - ("Fork & Clone", "docker compose" and "Installing Dev Tools")**
the first few sections - ("Fork & Clone", "docker-compose" and "Installing Dev Tools")**
:::
## Fork and Clone
@@ -27,14 +27,12 @@ git clone git@github.com:your-username/superset.git
cd superset
```
## docker compose (recommended!)
## docker-compose (recommended!)
Setting things up to squeeze a "hello world" into any part of Superset should be as simple as
Setting things up to squeeze an "hello world" into any part of Superset should be as simple as
```bash
# getting docker compose to fire up services, and rebuilding if some docker layers have changed
# using the `--build` suffix may be slower and optional if layers like py dependencies haven't changed
docker compose up --build
docker-compose up
```
Note that:
@@ -47,7 +45,7 @@ Note that:
- **Postgres** as the metadata database and to store example datasets, charts and dashboards which
should be populated upon startup
- **Redis** as the message queue for our async backend and caching backend
- It'll load up examples into the database upon the first startup
- It'll load up examples into the database upon first startup
- all other details and pointers available in
[docker-compose.yml](https://github.com/apache/superset/blob/master/docker-compose.yml)
- The local repository is mounted within the services, meaning updating
@@ -55,61 +53,23 @@ Note that:
- Superset is served at localhost:8088/
- You can login with admin/admin
:::note
Installing and building Node modules for Apache Superset inside `superset-node` can take a
significant amount of time. This is normal due to the size of the dependencies. Please be
patient while the process completes, as long wait times do not indicate an issue with your setup.
If delays seem excessive, check your internet connection or system resources.
:::
:::caution
Since `docker compose` is primarily designed to run a set of containers on **a single host**
Since `docker-compose` is primarily designed to run a set of containers on **a single host**
and can't credibly support **high availability** as a result, we do not support nor recommend
using our `docker compose` constructs to support production-type use-cases. For single host
using our `docker-compose` constructs to support production-type use-cases. For single host
environments, we recommend using [minikube](https://minikube.sigs.k8s.io/docs/start/) along
our [installing on k8s](https://superset.apache.org/docs/installation/running-on-kubernetes)
documentation.
configured to be secure.
:::
### Supported environment variables
Affecting the Docker build process:
- **SUPERSET_BUILD_TARGET (default=dev):** which --target to build, either `lean` or `dev` are commonly used
- **INCLUDE_FIREFOX (default=false):** whether to include the Firefox headless browser in the build
- **INCLUDE_CHROMIUM (default=false):** whether to include the Firefox headless browser in the build
- **BUILD_TRANSLATIONS(default=false):** whether to compile the translations from the .po files available
For more env vars that affect your configuration, see this
[superset_config.py](https://github.com/apache/superset/blob/master/docker/pythonpath_dev/superset_config.py)
used in the `docker compose` context to assign env vars to the superset configuration.
### Nuking the postgres database
At times, it's possible to end up with your development database in a bad state, it's
common while switching branches that contain migrations for instance, where the database
version stamp that `alembic` manages is no longer available after switching branch.
In that case, the easy solution is to nuke the postgres db and start fresh. Note that the full
state of the database will be gone after doing this, so be cautious.
```bash
# first stop docker-compose if it's running
docker-compose down
# delete the volume containing the database
docker volume rm superset_db_home
# restart docker-compose, which will init a fresh database and load examples
docker-compose up
```
## Installing Development Tools
:::note
While `docker compose` simplifies a lot of the setup, there are still
While docker-compose simplifies a lot of the setup, there are still
many things you'll want to set up locally to power your IDE, and things like
**commit hooks**, **linters**, and **test-runners**. Note that you can do these
things inside docker images with commands like `docker compose exec superset_app bash` for
things inside docker images with commands like `docker-compose exec superset_app bash` for
instance, but many people like to run that tooling from their host.
:::
@@ -132,55 +92,13 @@ To install run the following:
pre-commit install
```
This will install the hooks in your local repository. From now on, a series of checks will
automatically run whenever you make a Git commit.
A series of checks will now run when you make a git commit.
#### Running Pre-commit Manually
You can also run the pre-commit checks manually in various ways:
- **Run pre-commit on all files (same as CI):**
To run the pre-commit checks across all files in your repository, use the following command:
```bash
pre-commit run --all-files
```
This is the same set of checks that will run during CI, ensuring your changes meet the project's standards.
- **Run pre-commit on a specific file:**
If you want to check or fix a specific file, you can do so by specifying the file path:
```bash
pre-commit run --files path/to/your/file.py
```
This will only run the checks on the file(s) you specify.
- **Run a specific pre-commit check:**
To run a specific check (hook) across all files or a particular file, use the following command:
```bash
pre-commit run <hook_id> --all-files
```
Or for a specific file:
```bash
pre-commit run <hook_id> --files path/to/your/file.py
```
Replace `<hook_id>` with the ID of the specific hook you want to run. You can find the list
of available hooks in the `.pre-commit-config.yaml` file.
## Alternatives to `docker compose`
## Alternatives to docker-compose
:::caution
This part of the documentation is a patchwork of information related to setting up
development environments without `docker compose` and is documented/supported to varying
development environments without `docker-compose` and are documented/supported to varying
degrees. It's been difficult to maintain this wide array of methods and insure they're
functioning across environments.
:::
@@ -190,7 +108,7 @@ functioning across environments.
#### OS Dependencies
Make sure your machine meets the [OS dependencies](https://superset.apache.org/docs/installation/pypi#os-dependencies) before following these steps.
You also need to install MySQL.
You also need to install MySQL or [MariaDB](https://mariadb.com/downloads).
Ensure that you are using Python version 3.9, 3.10 or 3.11, then proceed with:
@@ -220,11 +138,11 @@ superset load-examples
# Start the Flask dev web server from inside your virtualenv.
# Note that your page may not have CSS at this point.
# See instructions below on how to build the front-end assets.
# See instructions below how to build the front-end assets.
superset run -p 8088 --with-threads --reload --debugger --debug
```
Or you can install it via our Makefile
Or you can install via our Makefile
```bash
# Create a virtual environment and activate it (recommended)
@@ -242,7 +160,7 @@ $ make pre-commit
```
**Note: the FLASK_APP env var should not need to be set, as it's currently controlled
via `.flaskenv`, however, if needed, it should be set to `superset.app:create_app()`**
via `.flaskenv`, however if needed, it should be set to `superset.app:create_app()`**
If you have made changes to the FAB-managed templates, which are not built the same way as the newer, React-powered front-end assets, you need to start the app without the `--with-threads` argument like so:
`superset run -p 8088 --reload --debugger --debug`
@@ -255,19 +173,19 @@ If you add a new requirement or update an existing requirement (per the `install
$ python3 -m venv venv
$ source venv/bin/activate
$ python3 -m pip install -r requirements/development.txt
$ ./scripts/uv-pip-compile.sh
$ pip-compile-multi --no-upgrade
```
When upgrading the version number of a single package, you should run `./scripts/uv-pip-compile.sh` with the `-P` flag:
When upgrading the version number of a single package, you should run `pip-compile-multi` with the `-P` flag:
```bash
$ ./scripts/uv-pip-compile.sh -P some-package-to-upgrade
$ pip-compile-multi -P my-package
```
To bring all dependencies up to date as per the restrictions defined in `setup.py` and `requirements/*.in`, run `./scripts/uv-pip-compile.sh --upgrade`
To bring all dependencies up to date as per the restrictions defined in `setup.py` and `requirements/*.in`, run pip-compile-multi` without any flags:
```bash
$ ./scripts/uv-pip-compile.sh --upgrade
$ pip-compile-multi
```
This should be done periodically, but it is recommended to do thorough manual testing of the application to ensure no breaking changes have been introduced that aren't caught by the unit and integration tests.
@@ -307,7 +225,7 @@ Frontend assets (TypeScript, JavaScript, CSS, and images) must be compiled in or
First, be sure you are using the following versions of Node.js and npm:
- `Node.js`: Version 20
- `Node.js`: Version 18
- `npm`: Version 10
We recommend using [nvm](https://github.com/nvm-sh/nvm) to manage your node environment:
@@ -345,7 +263,7 @@ cd superset-frontend
npm ci
```
Note that Superset uses [Scarf](https://docs.scarf.sh) to capture telemetry/analytics about versions being installed, including the `scarf-js` npm package and an analytics pixel. As noted elsewhere in this documentation, Scarf gathers aggregated stats for the sake of security/release strategy and does not capture/retain PII. [You can read here](https://docs.scarf.sh/package-analytics/) about the `scarf-js` package, and various means to opt out of it, but you can opt out of the npm package _and_ the pixel by setting the `SCARF_ANALYTICS` environment variable to `false` or opt out of the pixel by adding this setting in `superset-frontent/package.json`:
Note that Superset uses [Scarf](https://docs.scarf.sh) to capture telemetry/analytics about versions being installed, including the `scarf-js` npm package and an analytics pixel. As noted elsewhere in this documentation, Scarf gathers aggregated stats for the sake of security/release strategy, and does not capture/retain PII. [You can read here](https://docs.scarf.sh/package-analytics/) about the `scarf-js` package, and various means to opt out of it, but you can opt out of the npm package _and_ the pixel by setting the `SCARF_ANALYTICS` environment variable to `false` or opt out of the pixel by adding this setting in `superset-frontent/package.json`:
```json
// your-package/package.json
@@ -373,7 +291,7 @@ Error: ENOSPC: System limit for number of file watchers reached
```
The error is thrown because the number of files monitored by the system has reached the limit.
You can address this error by increasing the number of inotify watchers.
You can address this this error by increasing the number of inotify watchers.
The current value of max watches can be checked with:
@@ -384,13 +302,13 @@ cat /proc/sys/fs/inotify/max_user_watches
Edit the file `/etc/sysctl.conf` to increase this value.
The value needs to be decided based on the system memory [(see this StackOverflow answer for more context)](https://stackoverflow.com/questions/535768/what-is-a-reasonable-amount-of-inotify-watches-with-linux).
Open the file in an editor and add a line at the bottom specifying the max watches values.
Open the file in editor and add a line at the bottom specifying the max watches values.
```bash
fs.inotify.max_user_watches=524288
```
Save the file and exit the editor.
Save the file and exit editor.
To confirm that the change succeeded, run the following command to load the updated value of max_user_watches from `sysctl.conf`:
```bash
@@ -488,124 +406,97 @@ pre-commit install
A series of checks will now run when you make a git commit.
Alternatively it is possible to run pre-commit via tox:
```bash
tox -e pre-commit
```
Or by running pre-commit manually:
```bash
pre-commit run --all-files
```
## Linting
See [how tos](/docs/contributing/howtos#linting)
### Python
## GitHub Actions and `act`
:::tip
`act` compatibility of Superset's GHAs is not fully tested. Running `act` locally may or may not
work for different actions, and may require fine tunning and local secret-handling.
For those more intricate GHAs that are tricky to run locally, we recommend iterating
directly on GHA's infrastructure, by pushing directly on a branch and monitoring GHA logs.
For more targetted iteration, see the `gh workflow run --ref {BRANCH}` subcommand of the GitHub CLI.
:::
For automation and CI/CD, Superset makes extensive use of GitHub Actions (GHA). You
can find all of the workflows and other assets under the `.github/` folder. This includes:
- running the backend unit test suites (`tests/`)
- running the frontend test suites (`superset-frontend/src/**.*.test.*`)
- running our Cypress end-to-end tests (`superset-frontend/cypress-base/`)
- linting the codebase, including all Python, Typescript and Javascript, yaml and beyond
- checking for all sorts of other rules conventions
When you open a pull request (PR), the appropriate GitHub Actions (GHA) workflows will
automatically run depending on the changes in your branch. It's perfectly reasonable
(and required!) to rely on this automation. However, the downside is that it's mostly an
all-or-nothing approach and doesn't provide much control to target specific tests or
iterate quickly.
At times, it may be more convenient to run GHA workflows locally. For that purpose
we use [act](https://github.com/nektos/act), a tool that allows you to run GitHub Actions (GHA)
workflows locally. It simulates the GitHub Actions environment, enabling developers to
test and debug workflows on their local machines before pushing changes to the repository. More
on how to use it in the next section.
:::note
In both GHA and `act`, we can run a more complex matrix for our tests, executing against different
database engines (PostgreSQL, MySQL, SQLite) and different versions of Python.
This enables us to ensure compatibility and stability across various environments.
:::
### Using `act`
First, install `act` -> https://nektosact.com/
To list the workflows, simply:
We use [Pylint](https://pylint.org/) for linting which can be invoked via:
```bash
act --list
# for python
tox -e pylint
```
To run a specific workflow:
In terms of best practices please avoid blanket disabling of Pylint messages globally (via `.pylintrc`) or top-level within the file header, albeit there being a few exceptions. Disabling should occur inline as it prevents masking issues and provides context as to why said message is disabled.
Additionally, the Python code is auto-formatted using [Black](https://github.com/python/black) which
is configured as a pre-commit hook. There are also numerous [editor integrations](https://black.readthedocs.io/en/stable/integrations/editors.html)
### TypeScript
```bash
act pull_request --job {workflow_name} --secret GITHUB_TOKEN=$GITHUB_TOKEN --container-architecture linux/amd64
cd superset-frontend
npm ci
# run eslint checks
npm run eslint -- .
# run tsc (typescript) checks
npm run type
```
In the example above, notice that:
- we target a specific workflow, using `--job`
- we pass a secret using `--secret`, as many jobs require read access (public) to the repo
- we simulate a `pull_request` event by specifying it as the first arg,
similarly, we could simulate a `push` event or something else
- we specify `--container-architecture`, which tends to emulate GHA more reliably
If using the eslint extension with vscode, put the following in your workspace `settings.json` file:
:::note
`act` is a rich tool that offers all sorts of features, allowing you to simulate different
events (pull_request, push, ...), semantics around passing secrets where required and much
more. For more information, refer to [act's documentation](https://nektosact.com/)
:::
:::note
Some jobs require secrets to interact with external systems and accounts that you may
not have in your possession. In those cases you may have to rely on remote CI or parameterize the
job further to target a different environment/sandbox or your own alongside the related
secrets.
:::
---
```json
"eslint.workingDirectories": [
"superset-frontend"
]
```
## Testing
### Python Testing
#### Unit Tests
For unit tests located in `tests/unit_tests/`, it's usually easy to simply run the script locally using:
All python tests are carried out in [tox](https://tox.readthedocs.io/en/latest/index.html)
a standardized testing framework.
All python tests can be run with any of the tox [environments](https://tox.readthedocs.io/en/latest/example/basic.html#a-simple-tox-ini-default-environments), via,
```bash
pytest tests/unit_tests/*
tox -e <environment>
```
#### Integration Tests
For more complex pytest-defined integration tests (not to be confused with our end-to-end Cypress tests), many tests will require having a working test environment. Some tests require a database, Celery, and potentially other services or libraries installed.
### Running Tests with `act`
To run integration tests locally using `act`, ensure you have followed the setup instructions from the [GitHub Actions and `act`](#github-actions-and-act) section. You can run specific workflows or jobs that include integration tests. For example:
For example,
```bash
act --job test-python-38 --secret GITHUB_TOKEN=$GITHUB_TOKEN --event pull_request --container-architecture linux/amd64
tox -e py38
```
#### Running locally using a test script
Alternatively, you can run all tests in a single file via,
There is also a utility script included in the Superset codebase to run Python integration tests. The [readme can be found here](https://github.com/apache/superset/tree/master/scripts/tests).
```bash
tox -e <environment> -- tests/test_file.py
```
or for a specific test via,
```bash
tox -e <environment> -- tests/test_file.py::TestClassName::test_method_name
```
Note that the test environment uses a temporary directory for defining the
SQLite databases which will be cleared each time before the group of test
commands are invoked.
There is also a utility script included in the Superset codebase to run python integration tests. The [readme can be
found here](https://github.com/apache/superset/tree/master/scripts/tests)
To run all integration tests, for example, run this script from the root directory:
To run all integration tests for example, run this script from the root directory:
```bash
scripts/tests/run.sh
```
You can run unit tests found in `./tests/unit_tests` with pytest. It is a simple way to run an isolated test that doesn't need any database setup:
You can run unit tests found in './tests/unit_tests' for example with pytest. It is a simple way to run an isolated test that doesn't need any database setup
```bash
pytest ./link_to_test.py
@@ -628,7 +519,7 @@ npm run test -- path/to/file.js
### Integration Testing
We use [Cypress](https://www.cypress.io/) for integration tests. To open Cypress and explore tests first setup and run test server:
We use [Cypress](https://www.cypress.io/) for integration tests. Tests can be run by `tox -e cypress`. To open Cypress and explore tests first setup and run test server:
```bash
export SUPERSET_CONFIG=tests.integration_tests.superset_test_config
@@ -674,14 +565,14 @@ As an alternative you can use docker compose environment for testing:
Make sure you have added below line to your /etc/hosts file:
`127.0.0.1 db`
If you already have launched Docker environment please use the following command to ensure a fresh database instance:
If you already have launched Docker environment please use the following command to assure a fresh database instance:
`docker compose down -v`
Launch environment:
`CYPRESS_CONFIG=true docker compose up --build`
`CYPRESS_CONFIG=true docker compose up`
It will serve the backend and frontend on port 8088.
It will serve backend and frontend on port 8088.
Run Cypress tests:
@@ -718,12 +609,12 @@ For debugging locally using VSCode, you can configure a launch configuration fil
}
```
#### Raw Docker (without `docker compose`)
#### Raw Docker (without docker-compose)
Follow these instructions to debug the Flask app running inside a docker container. Note that
this will run a barebones Superset web server,
First, add the following to the ./docker-compose.yaml file
First add the following to the ./docker-compose.yaml file
```diff
superset:
@@ -747,7 +638,7 @@ superset:
Start Superset as usual
```bash
docker compose up --build
docker compose up
```
Install the required libraries and packages to the docker container
@@ -837,7 +728,7 @@ See (set capabilities for a container)[https://kubernetes.io/docs/tasks/configur
Once the pod is running as root and has the SYS_PTRACE capability it will be able to debug the Flask app.
You can follow the same instructions as in `docker compose`. Enter the pod and install the required library and packages; gdb, netstat and debugpy.
You can follow the same instructions as in the docker-compose. Enter the pod and install the required library and packages; gdb, netstat and debugpy.
Often in a Kubernetes environment nodes are not addressable from outside the cluster. VSCode will thus be unable to remotely connect to port 5678 on a Kubernetes node. In order to do this you need to create a tunnel that port forwards 5678 to your local machine.
@@ -845,11 +736,11 @@ Often in a Kubernetes environment nodes are not addressable from outside the clu
kubectl port-forward pod/superset-<some random id> 5678:5678
```
You can now launch your VSCode debugger with the same config as above. VSCode will connect to 127.0.0.1:5678 which is forwarded by kubectl to your remote kubernetes POD.
You can now launch your VSCode debugger with the same config as above. VSCode will connect to to 127.0.0.1:5678 which is forwarded by kubectl to your remote kubernetes POD.
### Storybook
Superset includes a [Storybook](https://storybook.js.org/) to preview the layout/styling of various Superset components and variations thereof. To open and view the Storybook:
Superset includes a [Storybook](https://storybook.js.org/) to preview the layout/styling of various Superset components, and variations thereof. To open and view the Storybook:
```bash
cd superset-frontend
@@ -999,7 +890,7 @@ To fix it:
from alembic import op
```
Alternatively, you may also run `superset db merge` to create a migration script
Alternatively you may also run `superset db merge` to create a migration script
just for merging the heads.
```bash

View File

@@ -170,10 +170,31 @@ npm run dev-server
### Python Testing
`pytest`, backend by docker-compose is how we recommend running tests locally.
All python tests are carried out in [tox](https://tox.readthedocs.io/en/latest/index.html)
a standardized testing framework.
All python tests can be run with any of the tox [environments](https://tox.readthedocs.io/en/latest/example/basic.html#a-simple-tox-ini-default-environments), via,
For a more complex test matrix (against different database backends, python versions, ...) you
can rely on our GitHub Actions by simply opening a draft pull request.
```bash
tox -e <environment>
```
For example,
```bash
tox -e py38
```
Alternatively, you can run all tests in a single file via,
```bash
tox -e <environment> -- tests/test_file.py
```
or for a specific test via,
```bash
tox -e <environment> -- tests/test_file.py::TestClassName::test_method_name
```
Note that the test environment uses a temporary directory for defining the
SQLite databases which will be cleared each time before the group of test
@@ -225,7 +246,13 @@ npm run test -- path/to/file.js
### e2e Integration Testing
For e2e testing, we recommend that you use a `docker-compose` backed-setup
We use [Cypress](https://www.cypress.io/) for end-to-end integration
tests. One easy option to get started quickly is to leverage `tox` to
run the whole suite in an isolated environment.
```bash
tox -e cypress
```
Alternatively, you can go lower level and set things up in your
development environment by following these steps:
@@ -411,7 +438,7 @@ See [set capabilities for a container](https://kubernetes.io/docs/tasks/configur
Once the pod is running as root and has the `SYS_PTRACE` capability it will be able to debug the Flask app.
You can follow the same instructions as in `docker compose`. Enter the pod and install the required library and packages; gdb, netstat and debugpy.
You can follow the same instructions as in the docker-compose. Enter the pod and install the required library and packages; gdb, netstat and debugpy.
Often in a Kubernetes environment nodes are not addressable from outside the cluster. VSCode will thus be unable to remotely connect to port 5678 on a Kubernetes node. In order to do this you need to create a tunnel that port forwards 5678 to your local machine.
@@ -571,24 +598,17 @@ pybabel compile -d superset/translations
### Python
We use [ruff](https://github.com/astral-sh/ruff) for linting which can be invoked via:
We use [Pylint](https://pylint.org/) for linting which can be invoked via:
```
# auto-reformat using ruff
ruff format
# lint check with ruff
ruff check
# lint fix with ruff
ruff check --fix
```bash
# for python
tox -e pylint
```
Ruff configuration is located in our
(pyproject.toml)[https://github.com/apache/superset/blob/master/pyproject.toml] file
In terms of best practices please avoid blanket disabling of Pylint messages globally (via `.pylintrc`) or top-level within the file header, albeit there being a few exceptions. Disabling should occur inline as it prevents masking issues and provides context as to why said message is disabled.
All this is configured to run in pre-commit hooks, which we encourage you to setup
with `pre-commit install`
Additionally, the Python code is auto-formatted using [Black](https://github.com/python/black) which
is configured as a pre-commit hook. There are also numerous [editor integrations](https://black.readthedocs.io/en/stable/integrations/editors.html)
### TypeScript

View File

@@ -174,16 +174,13 @@ You can take a look at this Flask-AppBuilder
## Is there a way to force the dashboard to use specific colors?
It is possible on a per-dashboard basis by providing a mapping of labels to colors in the JSON
Metadata attribute using the `label_colors` key. You can use either the full hex color, a named color,
like `red`, `coral` or `lightblue`, or the index in the current color palette (0 for first color, 1 for
second etc). Example:
Metadata attribute using the `label_colors` key.
```json
{
"label_colors": {
"foo": "#FF69B4",
"bar": "lightblue",
"baz": 0
"Girls": "#FF69B4",
"Boys": "#ADD8E6"
}
}
```

View File

@@ -29,7 +29,7 @@ We have a set of build "presets" that each represent a combination of
parameters for the build, mostly pointing to either different target layer
for the build, and/or base image.
Here are the build presets that are exposed through the `supersetbot docker` utility:
Here are the build presets that are exposed through the `build_docker.py` script:
- `lean`: The default Docker image, including both frontend and backend. Tags
without a build_preset are lean builds (ie: `latest`, `4.0.0`, `3.0.0`, ...). `lean`
@@ -59,29 +59,11 @@ Here are the build presets that are exposed through the `supersetbot docker` uti
this specific SHA, which could be from a `master` merge, or release.
- `websocket-latest`: The WebSocket image for use in a Superset cluster.
For insights or modifications to the build matrix and tagging conventions,
check the [supersetbot docker](https://github.com/apache-superset/supersetbot)
subcommand and the [docker.yml](https://github.com/apache/superset/blob/master/.github/workflows/docker.yml)
check the [build_docker.py](https://github.com/apache/superset/blob/master/scripts/build_docker.py)
script and the [docker.yml](https://github.com/apache/superset/blob/master/.github/workflows/docker.yml)
GitHub action.
## Key ARGs in Dockerfile
- `BUILD_TRANSLATIONS`: whether to build the translations into the image. For the
frontend build this tells webpack to strip out all locales other than `en` from
the `moment-timezone` library. For the backendthis skips compiling the
`*.po` translation files
- `DEV_MODE`: whether to skip the frontend build, this is used by our `docker-compose` dev setup
where we mount the local volume and build using `webpack` in `--watch` mode, meaning as you
alter the code in the local file system, webpack, from within a docker image used for this
purpose, will constantly rebuild the frontend as you go. This ARG enables the initial
`docker-compose` build to take much less time and resources
- `INCLUDE_CHROMIUM`: whether to include chromium in the backend build so that it can be
used as a headless browser for workloads related to "Alerts & Reports" and thumbnail generation
- `INCLUDE_FIREFOX`: same as above, but for firefox
- `PY_VER`: specifying the base image for the python backend, we don't recommend altering
this setting if you're not working on forwards or backwards compatibility
## Caching
To accelerate builds, we follow Docker best practices and use `apache/superset-cache`.
@@ -101,7 +83,7 @@ add database support for the database you need.
Currently all automated builds are multi-platform, supporting both `linux/arm64`
and `linux/amd64`. This enables higher level constructs like `helm` and
`docker compose` to point to these images and effectively be multi-platform
docker-compose to point to these images and effectively be multi-platform
as well.
Pull requests and master builds

View File

@@ -13,9 +13,9 @@ import useBaseUrl from "@docusaurus/useBaseUrl";
<br /><br />
:::caution
Since `docker compose` is primarily designed to run a set of containers on **a single host**
Since `docker-compose` is primarily designed to run a set of containers on **a single host**
and can't support requirements for **high availability**, we do not support nor recommend
using our `docker compose` constructs to support production-type use-cases. For single host
using our `docker-compose` constructs to support production-type use-cases. For single host
environments, we recommend using [minikube](https://minikube.sigs.k8s.io/docs/start/) along
our [installing on k8s](https://superset.apache.org/docs/installation/running-on-kubernetes)
documentation.
@@ -26,7 +26,7 @@ Superset locally is using Docker Compose on a Linux or Mac OSX
computer. Superset does not have official support for Windows. It's also the easiest
way to launch a fully functioning **development environment** quickly.
Note that there are 3 major ways we support to run `docker compose`:
Note that there are 3 major ways we support to run docker-compose:
1. **docker-compose.yml:** for interactive development, where we mount your local folder with the
frontend/backend files that you can edit and experience the changes you
@@ -49,9 +49,9 @@ More on these two approaches after setting up the requirements for either.
## Requirements
Note that this documentation assumes that you have [Docker](https://www.docker.com) and
[git](https://git-scm.com/) installed. Note also that we used to use `docker-compose` but that
is on the path to deprecation so we now use `docker compose` instead.
Note that this documentation assumes that you have [Docker](https://www.docker.com),
[docker-compose](https://docs.docker.com/compose/), and
[git](https://git-scm.com/) installed.
## 1. Clone Superset's GitHub repository
@@ -67,7 +67,7 @@ current directory.
## 2. Launch Superset Through Docker Compose
First let's assume you're familiar with `docker compose` mechanics. Here we'll refer generally
First let's assume you're familiar with docker-compose mechanics. Here we'll refer generally
to `docker compose up` even though in some cases you may want to force a check for newer remote
images using `docker compose pull`, force a build with `docker compose build` or force a build
on latest base images using `docker compose build --pull`. In most cases though, the simple
@@ -76,8 +76,7 @@ on latest base images using `docker compose build --pull`. In most cases though,
### Option #1 - for an interactive development environment
```bash
# The --build argument insures all the layers are up-to-date
docker compose up --build
docker compose up
```
:::tip
@@ -96,14 +95,6 @@ perform those operations. In this case, we recommend you set the env var
Simply trigger `npm i && npm run dev`, this should be MUCH faster.
:::
:::tip
Sometimes, your npm-related state can get out-of-wack, running `npm run prune` from
the `superset-frontend/` folder will nuke the various' packages `node_module/` folders
and help you start fresh. In the context of `docker compose` setting
`export NPM_RUN_PRUNE=true` prior to running `docker compose up` will trigger that
from within docker. This will slow down the startup, but will fix various npm-related issues.
:::
### Option #2 - build a set of immutable images from the local branch
```bash
@@ -121,7 +112,7 @@ Here various release tags, github SHA, and latest `master` can be referenced by
Refer to the docker-related documentation to learn more about existing tags you can point to
from Docker Hub.
## `docker compose` tips & configuration
## docker-compose tips & configuration
:::caution
All of the content belonging to a Superset instance - charts, dashboards, users, etc. - is stored in
@@ -146,7 +137,7 @@ You can install additional python packages and apply config overrides by followi
mentioned in [docker/README.md](https://github.com/apache/superset/tree/master/docker#configuration)
Note that `docker/.env` sets the default environment variables for all the docker images
used by `docker compose`, and that `docker/.env-local` can be used to override those defaults.
used by `docker-compose`, and that `docker/.env-local` can be used to override those defaults.
Also note that `docker/.env-local` is referenced in our `.gitignore`,
preventing developers from risking committing potentially sensitive configuration to the repository.
@@ -236,11 +227,3 @@ may want to find the exact hostname you want to use, for that you can do `ifconf
Docker for you. Alternately if you don't even see the `docker0` interface try (if needed with sudo)
`docker network inspect bridge` and see if there is an entry for `"Gateway"` and note the IP
address.
## 4. To build or not to build
When running `docker compose up`, docker will build what is required behind the scene, but
may use the docker cache if assets already exist. Running `docker compose build` prior to
`docker compose up` or the equivalent shortcut `docker compose up --build` ensures that your
docker images matche the definition in the repository. This should only apply to the main
docker-compose.yml file (default) and not to the alternative methods defined above.

View File

@@ -153,7 +153,9 @@ See [Install Database Drivers](/docs/configuration/databases) for more informati
:::
The following example installs the drivers for BigQuery and Elasticsearch, allowing you to connect to these data sources within your Superset setup:
The following example installs the Big Query and Elasticsearch database drivers so that you can
connect to those datasources in your Superset installation:
```yaml
bootstrapScript: |
#!/bin/bash

View File

@@ -22,18 +22,18 @@ level dependencies.
**Debian and Ubuntu**
In Ubuntu **20.04 and 22.04** the following command will ensure that the required dependencies are installed:
```bash
sudo apt-get install build-essential libssl-dev libffi-dev python3-dev python3-pip libsasl2-dev libldap2-dev default-libmysqlclient-dev
```
In Ubuntu **before 20.04** the following command will ensure that the required dependencies are installed:
The following command will ensure that the required dependencies are installed:
```bash
sudo apt-get install build-essential libssl-dev libffi-dev python-dev python-pip libsasl2-dev libldap2-dev default-libmysqlclient-dev
```
In Ubuntu 20.04 the following command will ensure that the required dependencies are installed:
```bash
sudo apt-get install build-essential libssl-dev libffi-dev python3-dev python3-pip libsasl2-dev libldap2-dev default-libmysqlclient-dev
```
**Fedora and RHEL-derivative Linux distributions**
Install the following packages using the `yum` package manager:
@@ -77,6 +77,10 @@ versions officially supported by Superset. We'd recommend using a Python version
like [pyenv](https://github.com/pyenv/pyenv)
(and also [pyenv-virtualenv](https://github.com/pyenv/pyenv-virtualenv)).
:::tip
To identify the Python version used by the official docker image, see the [Dockerfile](https://github.com/apache/superset/blob/master/Dockerfile). Additional docker images published for newer versions of Python can be found in [this file](https://github.com/apache/superset/blob/master/scripts/build_docker.py).
:::
Let's also make sure we have the latest version of `pip` and `setuptools`:
```bash
@@ -130,22 +134,21 @@ First, start by installing `apache-superset`:
pip install apache-superset
```
Then, define mandatory configurations, SECRET_KEY and FLASK_APP:
```bash
export SUPERSET_SECRET_KEY=YOUR-SECRET-KEY
export FLASK_APP=superset
```
Then, you need to initialize the database:
```bash
superset db upgrade
```
:::tip
Note that some configuration is mandatory for production instances of Superset. In particular, Superset will not start without a user-specified value of SECRET_KEY. Please see [Configuring Superset](/docs/configuration/configuring-superset).
:::
Finish installing by running through the following commands:
```bash
# Create an admin user in your metadata database (use `admin` as username to be able to load the examples)
export FLASK_APP=superset
superset fab create-admin
# Load some data to play with

View File

@@ -40,7 +40,6 @@ container images and will load up some examples. Once all containers
are downloaded and the output settles, you're ready to log in.
⚠️ If you get an error message like `validating superset\docker-compose-image-tag.yml: services.superset-worker-beat.env_file.0 must be a string`, you need to update your version of `docker-compose`.
Note that `docker-compose` is on the path to deprecation and you should now use `docker compose` instead.
### 3. Log into Superset

View File

@@ -2,15 +2,6 @@
title: CVEs fixed by release
sidebar_position: 2
---
#### Version 4.1.0
| CVE | Title | Affected |
|:---------------|:-----------------------------------------------------------------------------------|---------:|
| CVE-2024-53947 | Improper SQL authorisation, parse for specific postgres functions | < 4.1.0 |
| CVE-2024-53948 | Error verbosity exposes metadata in analytics databases | < 4.1.0 |
| CVE-2024-53949 | Lower privilege users are able to create Role when FAB_ADD_SECURITY_API is enabled | < 4.1.0 |
| CVE-2024-55633 | SQLLab Improper readonly query validation allows unauthorized write access | < 4.1.0 |
#### Version 4.0.2
| CVE | Title | Affected |

View File

@@ -27,34 +27,33 @@ following information about each flight is given:
You may need to enable the functionality to upload a CSV or Excel file to your database. The following section
explains how to enable this functionality for the examples database.
In the top menu, select **Settings ‣ Data ‣ Database Connections**. Find the **examples** database in the list and
In the top menu, select **Data ‣ Databases**. Find the **examples** database in the list and
select the **Edit** button.
<img src={useBaseUrl("/img/tutorial/edit-record.png" )} />
In the resulting modal window, switch to the **Advanced** tab and open **Security** section.
Then, tick the checkbox for **Allow file uploads to database**. End by clicking the **Finish** button.
In the resulting modal window, switch to the **Extra** tab and
tick the checkbox for **Allow Data Upload**. End by clicking the **Save** button.
<img src={useBaseUrl("/img/tutorial/allow-file-uploads.png" )} />
<img src={useBaseUrl("/img/tutorial/add-data-upload.png" )} />
### Loading CSV Data
Download the CSV dataset to your computer from
[GitHub](https://raw.githubusercontent.com/apache-superset/examples-data/master/tutorial_flights.csv).
In the top menu, select **Settings ‣ Data ‣ Database Connections**. Then, **Upload file to database ‣ Upload CSV**.
In the Superset menu, select **Data ‣ Upload a CSV**.
<img src={useBaseUrl("/img/tutorial/upload_a_csv.png" )} />
Then, select select the CSV file from your computer, select **Database** and **Schema**, and enter the **Table Name**
as _tutorial_flights_.
Then, enter the **Table Name** as _tutorial_flights_ and select the CSV file from your computer.
<img src={useBaseUrl("/img/tutorial/csv_to_database_configuration.png" )} />
Next enter the text _Travel Date_ into the **File settings ‣ Columns to be parsed as dates** field.
Next enter the text _Travel Date_ into the **Parse Dates** field.
<img src={useBaseUrl("/img/tutorial/parse_dates_column.png" )} />
Leaving all the other options in their default settings, select **Upload** at the bottom of the page.
Leaving all the other options in their default settings, select **Save** at the bottom of the page.
### Table Visualization

View File

@@ -203,18 +203,13 @@ const config = {
({
docs: {
sidebarPath: require.resolve('./sidebars.js'),
editUrl:
({versionDocsDirPath, docPath}) => {
if (docPath === 'intro.md') {
return 'https://github.com/apache/superset/edit/master/README.md'
}
return `https://github.com/apache/superset/edit/master/docs/${versionDocsDirPath}/${docPath}`
}
editUrl: 'https://github.com/apache/superset/edit/master/docs',
},
blog: {
showReadingTime: true,
// Please change this to your repo.
editUrl: 'https://github.com/facebook/docusaurus/edit/main/website/blog/',
editUrl:
'https://github.com/facebook/docusaurus/edit/main/website/blog/',
},
theme: {
customCss: require.resolve('./src/styles/custom.css'),

View File

@@ -17,40 +17,40 @@
"typecheck": "tsc"
},
"dependencies": {
"@algolia/client-search": "^5.15.0",
"@ant-design/icons": "^5.5.2",
"@docsearch/react": "^3.6.3",
"@docusaurus/core": "^3.5.2",
"@docusaurus/plugin-client-redirects": "^3.5.2",
"@docusaurus/preset-classic": "^3.5.2",
"@algolia/client-search": "^4.24.0",
"@ant-design/icons": "^5.3.7",
"@docsearch/react": "^3.6.0",
"@docusaurus/core": "^3.4.0",
"@docusaurus/plugin-client-redirects": "^3.4.0",
"@docusaurus/preset-classic": "^3.4.0",
"@emotion/core": "^10.1.1",
"@emotion/styled": "^10.0.27",
"@mdx-js/react": "^3.1.0",
"@saucelabs/theme-github-codeblock": "^0.3.0",
"@mdx-js/react": "^3.0.0",
"@saucelabs/theme-github-codeblock": "^0.2.3",
"@superset-ui/style": "^0.14.23",
"@svgr/webpack": "^8.1.0",
"antd": "^5.22.2",
"antd": "^4.19.3",
"buffer": "^6.0.3",
"clsx": "^2.1.1",
"docusaurus-plugin-less": "^2.0.2",
"file-loader": "^6.2.0",
"less": "^4.2.1",
"less": "^4.2.0",
"less-loader": "^11.0.0",
"prism-react-renderer": "^2.4.0",
"prism-react-renderer": "^2.3.1",
"react": "^18.3.1",
"react-dom": "^18.3.1",
"react-github-btn": "^1.4.0",
"react-svg-pan-zoom": "^3.13.1",
"react-svg-pan-zoom": "^3.12.1",
"stream": "^0.0.3",
"swagger-ui-react": "^5.18.2",
"swagger-ui-react": "^5.17.14",
"url-loader": "^4.1.1"
},
"devDependencies": {
"@docusaurus/module-type-aliases": "^3.6.3",
"@docusaurus/tsconfig": "^3.6.3",
"@types/react": "^18.3.12",
"typescript": "^5.7.2",
"webpack": "^5.96.1"
"@docusaurus/module-type-aliases": "^3.4.0",
"@docusaurus/tsconfig": "^3.4.0",
"@types/react": "^18.3.3",
"typescript": "^5.5.2",
"webpack": "^5.92.1"
},
"browserslist": {
"production": [

Some files were not shown because too many files have changed in this diff Show More