Compare commits

..

12 Commits

Author SHA1 Message Date
Maxime Beauchemin
7f14e434c8 fix: loading examples in CI returns http error "too many requests" (#33412) 2025-05-13 08:36:12 -07:00
Mehmet Salih Yavuz
21ca26acd7 fix(Row): don't unload charts while embedded to reduce rerenders (#33422) 2025-05-13 15:32:39 +02:00
Damian Pendrak
33e48146b0 chore: Add missing ECharts tags (#33397) 2025-05-12 18:10:04 +02:00
irodriguez-nebustream
73701b7295 fix(embedded): handle SUPERSET_APP_ROOT in embedded dashboard URLs (#33356)
Co-authored-by: Irving Rodriguez <irodriguez@Mac.lan>
2025-05-09 15:25:40 -07:00
amaannawab923
22475e787e feat(Table Chart): Row limit Increase , Backend Sorting , Backend Search , Excel/CSV Improvements (#33357)
Co-authored-by: Amaan Nawab <nelsondrew07@gmail.com>
2025-05-09 11:27:31 -06:00
VED PRAKASH KASHYAP
9e38a0cc29 docs: fix for role sync issues in case of custom OAuth2 configuration (#30878) 2025-05-09 11:12:23 -06:00
Rafael Benitez
a391ebecca feat: Run SQL on DataSourceEditor implementation (#33340) 2025-05-09 17:35:59 +02:00
Vitor Avila
72cd9dffa3 fix: Persist catalog change during dataset update + validation fixes (#33384) 2025-05-08 15:22:25 -03:00
Đỗ Trọng Hải
4ed05f4ff1 fix(be/utils): sync cache timeout for memoized function (#31917)
Signed-off-by: hainenber <dotronghai96@gmail.com>
2025-05-07 15:45:15 -06:00
Shao Yu-Lung (Allen)
871cfe0c78 fix(i18n): zh_TW pybabel compile error: placeholders are incompatible (#33345) 2025-05-07 15:18:05 -06:00
Fardin Mustaque
a928f8cd9e feat: add metric name for big number chart types #33013 (#33099)
Co-authored-by: Fardin Mustaque <fardinmustaque@Fardins-Mac-mini.local>
2025-05-07 16:56:02 +02:00
dependabot[bot]
afaaf64f52 chore(deps): bump antd from 5.24.5 to 5.24.9 in /docs (#33319)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-05-07 21:14:46 +07:00
78 changed files with 2377 additions and 596 deletions

View File

@@ -41,8 +41,6 @@ jobs:
node-version-file: './docs/.nvmrc'
- name: Setup Python
uses: ./.github/actions/setup-backend/
- name: Update openapi docs
run: superset update_api_docs
- uses: actions/setup-java@v4
with:
distribution: 'zulu'

View File

@@ -302,6 +302,15 @@ AUTH_USER_REGISTRATION = True
AUTH_USER_REGISTRATION_ROLE = "Public"
```
In case you want to assign the `Admin` role on new user registration, it can be assigned as follows:
```python
AUTH_USER_REGISTRATION_ROLE = "Admin"
```
If you encounter the [issue](https://github.com/apache/superset/issues/13243) of not being able to list users from the Superset main page settings, although a newly registered user has an `Admin` role, please re-run `superset init` to sync the required permissions. Below is the command to re-run `superset init` using docker compose.
```
docker-compose exec superset superset init
```
Then, create a `CustomSsoSecurityManager` that extends `SupersetSecurityManager` and overrides
`oauth_user_info`:

View File

@@ -26,7 +26,7 @@
"@emotion/styled": "^10.0.27",
"@saucelabs/theme-github-codeblock": "^0.3.0",
"@superset-ui/style": "^0.14.23",
"antd": "^5.24.5",
"antd": "^5.24.9",
"docusaurus-plugin-less": "^2.0.2",
"less": "^4.3.0",
"less-loader": "^11.0.0",

View File

@@ -1092,20 +1092,13 @@
core-js-pure "^3.30.2"
regenerator-runtime "^0.14.0"
"@babel/runtime@^7.1.2", "@babel/runtime@^7.10.3", "@babel/runtime@^7.12.13", "@babel/runtime@^7.12.5", "@babel/runtime@^7.25.9", "@babel/runtime@^7.8.4":
"@babel/runtime@^7.1.2", "@babel/runtime@^7.10.1", "@babel/runtime@^7.10.3", "@babel/runtime@^7.10.4", "@babel/runtime@^7.11.1", "@babel/runtime@^7.11.2", "@babel/runtime@^7.12.13", "@babel/runtime@^7.12.5", "@babel/runtime@^7.16.7", "@babel/runtime@^7.18.0", "@babel/runtime@^7.18.3", "@babel/runtime@^7.20.0", "@babel/runtime@^7.20.7", "@babel/runtime@^7.21.0", "@babel/runtime@^7.22.5", "@babel/runtime@^7.23.2", "@babel/runtime@^7.23.6", "@babel/runtime@^7.23.9", "@babel/runtime@^7.24.4", "@babel/runtime@^7.24.7", "@babel/runtime@^7.24.8", "@babel/runtime@^7.25.7", "@babel/runtime@^7.25.9", "@babel/runtime@^7.26.0", "@babel/runtime@^7.3.1", "@babel/runtime@^7.5.5", "@babel/runtime@^7.7.2", "@babel/runtime@^7.8.4":
version "7.27.0"
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.27.0.tgz#fbee7cf97c709518ecc1f590984481d5460d4762"
integrity sha512-VtPOkrdPHZsKc/clNqyi9WUA8TINkZ4cGk63UUE3u4pmB2k+ZMQRDuIOagv8UVd6j7k0T3+RRIb7beKTebNbcw==
dependencies:
regenerator-runtime "^0.14.0"
"@babel/runtime@^7.10.1", "@babel/runtime@^7.10.4", "@babel/runtime@^7.11.1", "@babel/runtime@^7.11.2", "@babel/runtime@^7.16.7", "@babel/runtime@^7.18.0", "@babel/runtime@^7.18.3", "@babel/runtime@^7.20.0", "@babel/runtime@^7.20.7", "@babel/runtime@^7.21.0", "@babel/runtime@^7.22.5", "@babel/runtime@^7.23.2", "@babel/runtime@^7.23.6", "@babel/runtime@^7.23.9", "@babel/runtime@^7.24.4", "@babel/runtime@^7.24.7", "@babel/runtime@^7.24.8", "@babel/runtime@^7.25.7", "@babel/runtime@^7.26.0", "@babel/runtime@^7.3.1", "@babel/runtime@^7.5.5", "@babel/runtime@^7.7.2":
version "7.26.10"
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.26.10.tgz#a07b4d8fa27af131a633d7b3524db803eb4764c2"
integrity sha512-2WJMeRQPHKSPemqk/awGrAiuFfzBmOIPXKizAsVhWH9YJqLZ0H+HS4c8loHGgW6utJ3E/ejXQUsiGaQy2NZ9Fw==
dependencies:
regenerator-runtime "^0.14.0"
"@babel/template@^7.25.9", "@babel/template@^7.26.9", "@babel/template@^7.27.0":
version "7.27.0"
resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.27.0.tgz#b253e5406cc1df1c57dcd18f11760c2dbf40c0b4"
@@ -4186,10 +4179,10 @@ ansi-styles@^6.1.0:
resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-6.2.1.tgz#0e62320cf99c21afff3b3012192546aacbfb05c5"
integrity sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==
antd@^5.24.5:
version "5.24.5"
resolved "https://registry.yarnpkg.com/antd/-/antd-5.24.5.tgz#b0976a113163888d1477f9e666c3c23352b098e9"
integrity sha512-1lAv/G+9ewQanyoAo3JumQmIlVxwo5QwWGb6QCHYc40Cq0NxC/EzITcjsgq1PSaTUpLkKq8A2l7Fjtu47vqQBg==
antd@^5.24.9:
version "5.24.9"
resolved "https://registry.yarnpkg.com/antd/-/antd-5.24.9.tgz#c5862e02ed770bd95e312961f4f0b7b158a004d9"
integrity sha512-liB+Y/JwD5/KSKbK1Z1EVAbWcoWYvWJ1s97AbbT+mOdigpJQuWwH7kG8IXNEljI7onvj0DdD43TXhSRLUu9AMA==
dependencies:
"@ant-design/colors" "^7.2.0"
"@ant-design/cssinjs" "^1.23.0"
@@ -4213,13 +4206,13 @@ antd@^5.24.5:
rc-drawer "~7.2.0"
rc-dropdown "~4.2.1"
rc-field-form "~2.7.0"
rc-image "~7.11.1"
rc-input "~1.7.3"
rc-input-number "~9.4.0"
rc-mentions "~2.19.1"
rc-image "~7.12.0"
rc-input "~1.8.0"
rc-input-number "~9.5.0"
rc-mentions "~2.20.0"
rc-menu "~9.16.1"
rc-motion "^2.9.5"
rc-notification "~5.6.3"
rc-notification "~5.6.4"
rc-pagination "~5.1.0"
rc-picker "~4.11.3"
rc-progress "~4.0.0"
@@ -4231,8 +4224,8 @@ antd@^5.24.5:
rc-steps "~6.0.1"
rc-switch "~4.1.0"
rc-table "~7.50.4"
rc-tabs "~15.5.1"
rc-textarea "~1.9.0"
rc-tabs "~15.6.1"
rc-textarea "~1.10.0"
rc-tooltip "~6.4.0"
rc-tree "~5.13.1"
rc-tree-select "~5.27.0"
@@ -5674,12 +5667,7 @@ data-view-byte-offset@^1.0.1:
es-errors "^1.3.0"
is-data-view "^1.0.1"
dayjs@^1.11.11:
version "1.11.12"
resolved "https://registry.npmjs.org/dayjs/-/dayjs-1.11.12.tgz"
integrity sha512-Rt2g+nTbLlDWZTwwrIXjy9MeiZmSDI375FvZs72ngxx8PDC6YXOeR3q5LAuPzjZQxhiWdRKac7RKV+YyQYfYIg==
dayjs@^1.11.13:
dayjs@^1.11.11, dayjs@^1.11.13:
version "1.11.13"
resolved "https://registry.yarnpkg.com/dayjs/-/dayjs-1.11.13.tgz#92430b0139055c3ebb60150aa13e860a4b5a366c"
integrity sha512-oaMBel6gjolK862uaPQOVTA7q3TZhuSvuMQAAglQDOWYO9A91IrAOUJEyKVlqJlHE0vq5p5UXxzdPfMH/x6xNg==
@@ -10688,10 +10676,10 @@ rc-field-form@~2.7.0:
"@rc-component/async-validator" "^5.0.3"
rc-util "^5.32.2"
rc-image@~7.11.1:
version "7.11.1"
resolved "https://registry.yarnpkg.com/rc-image/-/rc-image-7.11.1.tgz#3ab290708dc053d3681de94186522e4e594f6772"
integrity sha512-XuoWx4KUXg7hNy5mRTy1i8c8p3K8boWg6UajbHpDXS5AlRVucNfTi5YxTtPBTBzegxAZpvuLfh3emXFt6ybUdA==
rc-image@~7.12.0:
version "7.12.0"
resolved "https://registry.yarnpkg.com/rc-image/-/rc-image-7.12.0.tgz#95e9314701e668217d113c1f29b4f01ac025cafe"
integrity sha512-cZ3HTyyckPnNnUb9/DRqduqzLfrQRyi+CdHjdqgsyDpI3Ln5UX1kXnAhPBSJj9pVRzwRFgqkN7p9b6HBDjmu/Q==
dependencies:
"@babel/runtime" "^7.11.2"
"@rc-component/portal" "^1.0.2"
@@ -10700,37 +10688,37 @@ rc-image@~7.11.1:
rc-motion "^2.6.2"
rc-util "^5.34.1"
rc-input-number@~9.4.0:
version "9.4.0"
resolved "https://registry.npmjs.org/rc-input-number/-/rc-input-number-9.4.0.tgz"
integrity sha512-Tiy4DcXcFXAf9wDhN8aUAyMeCLHJUHA/VA/t7Hj8ZEx5ETvxG7MArDOSE6psbiSCo+vJPm4E3fGN710ITVn6GA==
rc-input-number@~9.5.0:
version "9.5.0"
resolved "https://registry.yarnpkg.com/rc-input-number/-/rc-input-number-9.5.0.tgz#b47963d0f2cbd85ab2f1badfdc089a904c073f38"
integrity sha512-bKaEvB5tHebUURAEXw35LDcnRZLq3x1k7GxfAqBMzmpHkDGzjAtnUL8y4y5N15rIFIg5IJgwr211jInl3cipag==
dependencies:
"@babel/runtime" "^7.10.1"
"@rc-component/mini-decimal" "^1.0.1"
classnames "^2.2.5"
rc-input "~1.7.1"
rc-input "~1.8.0"
rc-util "^5.40.1"
rc-input@~1.7.1, rc-input@~1.7.3:
version "1.7.3"
resolved "https://registry.yarnpkg.com/rc-input/-/rc-input-1.7.3.tgz#cb334a17b93ce985bceb243b4c111a5ed641e0e3"
integrity sha512-A5w4egJq8+4JzlQ55FfQjDnPvOaAbzwC3VLOAdOytyek3TboSOP9qxN+Gifup+shVXfvecBLBbWBpWxmk02SWQ==
rc-input@~1.8.0:
version "1.8.0"
resolved "https://registry.yarnpkg.com/rc-input/-/rc-input-1.8.0.tgz#d2f4404befebf2fbdc28390d5494c302f74ae974"
integrity sha512-KXvaTbX+7ha8a/k+eg6SYRVERK0NddX8QX7a7AnRvUa/rEH0CNMlpcBzBkhI0wp2C8C4HlMoYl8TImSN+fuHKA==
dependencies:
"@babel/runtime" "^7.11.1"
classnames "^2.2.1"
rc-util "^5.18.1"
rc-mentions@~2.19.1:
version "2.19.1"
resolved "https://registry.npmjs.org/rc-mentions/-/rc-mentions-2.19.1.tgz"
integrity sha512-KK3bAc/bPFI993J3necmaMXD2reZTzytZdlTvkeBbp50IGH1BDPDvxLdHDUrpQx2b2TGaVJsn+86BvYa03kGqA==
rc-mentions@~2.20.0:
version "2.20.0"
resolved "https://registry.yarnpkg.com/rc-mentions/-/rc-mentions-2.20.0.tgz#3bbeac0352b02e0ce3e1244adb48701bb6903bf7"
integrity sha512-w8HCMZEh3f0nR8ZEd466ATqmXFCMGMN5UFCzEUL0bM/nGw/wOS2GgRzKBcm19K++jDyuWCOJOdgcKGXU3fXfbQ==
dependencies:
"@babel/runtime" "^7.22.5"
"@rc-component/trigger" "^2.0.0"
classnames "^2.2.6"
rc-input "~1.7.1"
rc-input "~1.8.0"
rc-menu "~9.16.0"
rc-textarea "~1.9.0"
rc-textarea "~1.10.0"
rc-util "^5.34.1"
rc-menu@~9.16.0, rc-menu@~9.16.1:
@@ -10754,10 +10742,10 @@ rc-motion@^2.0.0, rc-motion@^2.0.1, rc-motion@^2.3.0, rc-motion@^2.3.4, rc-motio
classnames "^2.2.1"
rc-util "^5.44.0"
rc-notification@~5.6.3:
version "5.6.3"
resolved "https://registry.npmjs.org/rc-notification/-/rc-notification-5.6.3.tgz"
integrity sha512-42szwnn8VYQoT6GnjO00i1iwqV9D1TTMvxObWsuLwgl0TsOokzhkYiufdtQBsJMFjJravS1hfDKVMHLKLcPE4g==
rc-notification@~5.6.4:
version "5.6.4"
resolved "https://registry.yarnpkg.com/rc-notification/-/rc-notification-5.6.4.tgz#ea89c39c13cd517fdfd97fe63f03376fabb78544"
integrity sha512-KcS4O6B4qzM3KH7lkwOB7ooLPZ4b6J+VMmQgT51VZCeEcmghdeR4IrMcFq0LG+RPdnbe/ArT086tGM8Snimgiw==
dependencies:
"@babel/runtime" "^7.10.1"
classnames "2.x"
@@ -10885,10 +10873,10 @@ rc-table@~7.50.4:
rc-util "^5.44.3"
rc-virtual-list "^3.14.2"
rc-tabs@~15.5.1:
version "15.5.1"
resolved "https://registry.npmjs.org/rc-tabs/-/rc-tabs-15.5.1.tgz"
integrity sha512-yiWivLAjEo5d1v2xlseB2dQocsOhkoVSfo1krS8v8r+02K+TBUjSjXIf7dgyVSxp6wRIPv5pMi5hanNUlQMgUA==
rc-tabs@~15.6.1:
version "15.6.1"
resolved "https://registry.yarnpkg.com/rc-tabs/-/rc-tabs-15.6.1.tgz#f0b6c65384dfa09a64eb539e86a0667c7a650708"
integrity sha512-/HzDV1VqOsUWyuC0c6AkxVYFjvx9+rFPKZ32ejxX0Uc7QCzcEjTA9/xMgv4HemPKwzBNX8KhGVbbumDjnj92aA==
dependencies:
"@babel/runtime" "^7.11.2"
classnames "2.x"
@@ -10898,14 +10886,14 @@ rc-tabs@~15.5.1:
rc-resize-observer "^1.0.0"
rc-util "^5.34.1"
rc-textarea@~1.9.0:
version "1.9.0"
resolved "https://registry.npmjs.org/rc-textarea/-/rc-textarea-1.9.0.tgz"
integrity sha512-dQW/Bc/MriPBTugj2Kx9PMS5eXCCGn2cxoIaichjbNvOiARlaHdI99j4DTxLl/V8+PIfW06uFy7kjfUIDDKyxQ==
rc-textarea@~1.10.0:
version "1.10.0"
resolved "https://registry.yarnpkg.com/rc-textarea/-/rc-textarea-1.10.0.tgz#f8f962ef83be0b8e35db97cf03dbfb86ddd9c46c"
integrity sha512-ai9IkanNuyBS4x6sOL8qu/Ld40e6cEs6pgk93R+XLYg0mDSjNBGey6/ZpDs5+gNLD7urQ14po3V6Ck2dJLt9SA==
dependencies:
"@babel/runtime" "^7.10.1"
classnames "^2.2.1"
rc-input "~1.7.1"
rc-input "~1.8.0"
rc-resize-observer "^1.0.0"
rc-util "^5.27.0"

View File

@@ -252,4 +252,215 @@ describe('Visualization > Table', () => {
});
cy.get('td').contains(/\d*%/);
});
it('Test row limit with server pagination toggle', () => {
cy.visitChartByParams({
...VIZ_DEFAULTS,
metrics: ['count'],
row_limit: 100,
});
// Enable server pagination
cy.get('[data-test="server_pagination-header"] div.pull-left').click();
// Click row limit control and select high value (200k)
cy.get('div[aria-label="Row limit"]').click();
// Type 200000 and press enter to select the option
cy.get('div[aria-label="Row limit"]')
.find('.ant-select-selection-search-input:visible')
.type('200000{enter}');
// Verify that there is no error tooltip when server pagination is enabled
cy.get('[data-test="error-tooltip"]').should('not.exist');
// Disable server pagination
cy.get('[data-test="server_pagination-header"] div.pull-left').click();
// Verify error tooltip appears
cy.get('[data-test="error-tooltip"]').should('be.visible');
// Trigger mouseover and verify tooltip text
cy.get('[data-test="error-tooltip"]').trigger('mouseover');
// Verify tooltip content
cy.get('.antd5-tooltip-inner').should('be.visible');
cy.get('.antd5-tooltip-inner').should(
'contain',
'Server pagination needs to be enabled for values over',
);
// Hide the tooltip by adding display:none style
cy.get('.antd5-tooltip').invoke('attr', 'style', 'display: none');
// Enable server pagination again
cy.get('[data-test="server_pagination-header"] div.pull-left').click();
cy.get('[data-test="error-tooltip"]').should('not.exist');
cy.get('div[aria-label="Row limit"]').click();
// Type 1000000
cy.get('div[aria-label="Row limit"]')
.find('.ant-select-selection-search-input:visible')
.type('1000000');
// Wait for 1 second
cy.wait(1000);
// Press enter
cy.get('div[aria-label="Row limit"]')
.find('.ant-select-selection-search-input:visible')
.type('{enter}');
// Wait for error tooltip to appear and verify its content
cy.get('[data-test="error-tooltip"]')
.should('be.visible')
.trigger('mouseover');
// Wait for tooltip content and verify
cy.get('.antd5-tooltip-inner').should('exist');
cy.get('.antd5-tooltip-inner').should('be.visible');
// Verify tooltip content separately
cy.get('.antd5-tooltip-inner').should('contain', 'Value cannot exceed');
});
it('Test sorting with server pagination enabled', () => {
cy.visitChartByParams({
...VIZ_DEFAULTS,
metrics: ['count'],
groupby: ['name'],
row_limit: 100000,
server_pagination: true, // Enable server pagination
});
// Wait for the initial data load
cy.wait('@chartData');
// Get the first column header (name)
cy.get('.chart-container th').contains('name').as('nameHeader');
// Click to sort ascending
cy.get('@nameHeader').click();
cy.wait('@chartData');
// Verify first row starts with 'A'
cy.get('.chart-container td:first').invoke('text').should('match', /^[Aa]/);
// Click again to sort descending
cy.get('@nameHeader').click();
cy.wait('@chartData');
// Verify first row starts with 'Z'
cy.get('.chart-container td:first').invoke('text').should('match', /^[Zz]/);
// Test numeric sorting
cy.get('.chart-container th').contains('COUNT').as('countHeader');
// Click to sort ascending by count
cy.get('@countHeader').click();
cy.wait('@chartData');
// Get first two count values and verify ascending order
cy.get('.chart-container td:nth-child(2)').then($cells => {
const first = parseFloat($cells[0].textContent || '0');
const second = parseFloat($cells[1].textContent || '0');
expect(first).to.be.at.most(second);
});
// Click again to sort descending
cy.get('@countHeader').click();
cy.wait('@chartData');
// Get first two count values and verify descending order
cy.get('.chart-container td:nth-child(2)').then($cells => {
const first = parseFloat($cells[0].textContent || '0');
const second = parseFloat($cells[1].textContent || '0');
expect(first).to.be.at.least(second);
});
});
it('Test search with server pagination enabled', () => {
cy.visitChartByParams({
...VIZ_DEFAULTS,
metrics: ['count'],
groupby: ['name', 'state'],
row_limit: 100000,
server_pagination: true,
include_search: true,
});
cy.wait('@chartData');
// Basic search test
cy.get('span.dt-global-filter input.form-control.input-sm').should(
'be.visible',
);
cy.get('span.dt-global-filter input.form-control.input-sm').type('John');
cy.wait('@chartData');
cy.get('.chart-container tbody tr').each($row => {
cy.wrap($row).contains(/John/i);
});
// Clear and test case-insensitive search
cy.get('span.dt-global-filter input.form-control.input-sm').clear();
cy.wait('@chartData');
cy.get('span.dt-global-filter input.form-control.input-sm').type('mary');
cy.wait('@chartData');
cy.get('.chart-container tbody tr').each($row => {
cy.wrap($row).contains(/Mary/i);
});
// Test special characters
cy.get('span.dt-global-filter input.form-control.input-sm').clear();
cy.get('span.dt-global-filter input.form-control.input-sm').type('Nicole');
cy.wait('@chartData');
cy.get('.chart-container tbody tr').each($row => {
cy.wrap($row).contains(/Nicole/i);
});
// Test no results
cy.get('span.dt-global-filter input.form-control.input-sm').clear();
cy.get('span.dt-global-filter input.form-control.input-sm').type('XYZ123');
cy.wait('@chartData');
cy.get('.chart-container').contains('No records found');
// Test column-specific search
cy.get('.search-select').should('be.visible');
cy.get('.search-select').click();
cy.get('.ant-select-dropdown').should('be.visible');
cy.get('.ant-select-item-option').contains('state').should('be.visible');
cy.get('.ant-select-item-option').contains('state').click();
cy.get('span.dt-global-filter input.form-control.input-sm').clear();
cy.get('span.dt-global-filter input.form-control.input-sm').type('CA');
cy.wait('@chartData');
cy.wait(1000);
cy.get('td[aria-labelledby="header-state"]').should('be.visible');
cy.get('td[aria-labelledby="header-state"]')
.first()
.should('contain', 'CA');
});
});

View File

@@ -26,6 +26,7 @@ import {
import { ColumnMeta, SortSeriesData, SortSeriesType } from './types';
export const DEFAULT_MAX_ROW = 100000;
export const DEFAULT_MAX_ROW_TABLE_SERVER = 500000;
// eslint-disable-next-line import/prefer-default-export
export const TIME_FILTER_LABELS = {

View File

@@ -25,3 +25,4 @@ export { default as validateNonEmpty } from './validateNonEmpty';
export { default as validateMaxValue } from './validateMaxValue';
export { default as validateMapboxStylesUrl } from './validateMapboxStylesUrl';
export { default as validateTimeComparisonRangeValues } from './validateTimeComparisonRangeValues';
export { default as validateServerPagination } from './validateServerPagination';

View File

@@ -0,0 +1,30 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { t } from '../translation';
export default function validateServerPagination(
v: unknown,
serverPagination: boolean,
max: number,
) {
if (Number(v) > +max && !serverPagination) {
return t('Server pagination needs to be enabled for values over %s', max);
}
return false;
}

View File

@@ -0,0 +1,46 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { validateServerPagination } from '@superset-ui/core';
import './setup';
test('validateServerPagination returns warning message when server pagination is disabled and value exceeds max', () => {
expect(validateServerPagination(100001, false, 100000)).toBeTruthy();
expect(validateServerPagination('150000', false, 100000)).toBeTruthy();
expect(validateServerPagination(200000, false, 100000)).toBeTruthy();
});
test('validateServerPagination returns false when server pagination is enabled', () => {
expect(validateServerPagination(100001, true, 100000)).toBeFalsy();
expect(validateServerPagination(150000, true, 100000)).toBeFalsy();
expect(validateServerPagination('200000', true, 100000)).toBeFalsy();
});
test('validateServerPagination returns false when value is below max', () => {
expect(validateServerPagination(50000, false, 100000)).toBeFalsy();
expect(validateServerPagination('75000', false, 100000)).toBeFalsy();
expect(validateServerPagination(99999, false, 100000)).toBeFalsy();
});
test('validateServerPagination handles edge cases', () => {
expect(validateServerPagination(undefined, false, 100000)).toBeFalsy();
expect(validateServerPagination(null, false, 100000)).toBeFalsy();
expect(validateServerPagination(NaN, false, 100000)).toBeFalsy();
expect(validateServerPagination('invalid', false, 100000)).toBeFalsy();
});

View File

@@ -36,13 +36,25 @@ import {
} from './types';
import { useOverflowDetection } from './useOverflowDetection';
const MetricNameText = styled.div<{ metricNameFontSize?: number }>`
${({ theme, metricNameFontSize }) => `
font-family: ${theme.typography.families.sansSerif};
font-weight: ${theme.typography.weights.normal};
font-size: ${metricNameFontSize || theme.typography.sizes.s * 2}px;
text-align: center;
margin-bottom: ${theme.gridUnit * 3}px;
`}
`;
const NumbersContainer = styled.div`
display: flex;
justify-content: center;
align-items: center;
flex-direction: column;
width: 100%;
height: 100%;
overflow: auto;
padding: 12px;
`;
const ComparisonValue = styled.div<PopKPIComparisonValueStyleProps>`
@@ -73,6 +85,8 @@ export default function PopKPI(props: PopKPIProps) {
prevNumber,
valueDifference,
percentDifferenceFormattedString,
metricName,
metricNameFontSize,
headerFontSize,
subheaderFontSize,
comparisonColorEnabled,
@@ -84,8 +98,8 @@ export default function PopKPI(props: PopKPIProps) {
subtitle,
subtitleFontSize,
dashboardTimeRange,
showMetricName,
} = props;
const [comparisonRange, setComparisonRange] = useState<string>('');
useEffect(() => {
@@ -260,9 +274,16 @@ export default function PopKPI(props: PopKPIProps) {
width: fit-content;
margin: auto;
align-items: flex-start;
overflow: auto;
`
}
>
{showMetricName && metricName && (
<MetricNameText metricNameFontSize={metricNameFontSize}>
{metricName}
</MetricNameText>
)}
<div css={bigValueContainerStyles}>
{bigNumber}
{percentDifferenceNumber !== 0 && (

View File

@@ -28,6 +28,8 @@ import {
subheaderFontSize,
subtitleControl,
subtitleFontSize,
showMetricNameControl,
metricNameFontSizeWithVisibility,
} from '../sharedControls';
import { ColorSchemeEnum } from './types';
@@ -70,6 +72,8 @@ const config: ControlPanelConfig = {
],
[subtitleControl],
[subtitleFontSize],
[showMetricNameControl],
[metricNameFontSizeWithVisibility],
[
{
...subheaderFontSize,

View File

@@ -32,6 +32,7 @@ export default class PopKPIPlugin extends ChartPlugin {
tags: [
t('Comparison'),
t('Business'),
t('ECharts'),
t('Percentages'),
t('Report'),
t('Advanced-Analytics'),

View File

@@ -26,7 +26,13 @@ import {
SimpleAdhocFilter,
ensureIsArray,
} from '@superset-ui/core';
import { getComparisonFontSize, getHeaderFontSize } from './utils';
import {
getComparisonFontSize,
getHeaderFontSize,
getMetricNameFontSize,
} from './utils';
import { getOriginalLabel } from '../utils';
dayjs.extend(utc);
@@ -83,6 +89,7 @@ export default function transformProps(chartProps: ChartProps) {
headerFontSize,
headerText,
metric,
metricNameFontSize,
yAxisFormat,
currencyFormat,
subheaderFontSize,
@@ -91,11 +98,14 @@ export default function transformProps(chartProps: ChartProps) {
percentDifferenceFormat,
subtitle = '',
subtitleFontSize,
columnConfig,
columnConfig = {},
} = formData;
const { data: dataA = [] } = queriesData[0];
const data = dataA;
const metricName = metric ? getMetricLabel(metric) : '';
const metrics = chartProps.datasource?.metrics || [];
const originalLabel = getOriginalLabel(metric, metrics);
const showMetricName = chartProps.rawFormData?.show_metric_name ?? false;
const timeComparison = ensureIsArray(chartProps.rawFormData?.time_compare)[0];
const startDateOffset = chartProps.rawFormData?.start_date_offset;
const currentTimeRangeFilter = chartProps.rawFormData?.adhoc_filters?.filter(
@@ -179,7 +189,7 @@ export default function transformProps(chartProps: ChartProps) {
width,
height,
data,
metricName,
metricName: originalLabel,
bigNumber,
prevNumber,
valueDifference,
@@ -187,6 +197,8 @@ export default function transformProps(chartProps: ChartProps) {
boldText,
subtitle,
subtitleFontSize,
showMetricName,
metricNameFontSize: getMetricNameFontSize(metricNameFontSize),
headerFontSize: getHeaderFontSize(headerFontSize),
subheaderFontSize: getComparisonFontSize(subheaderFontSize),
headerText,

View File

@@ -61,6 +61,8 @@ export type PopKPIProps = PopKPIStylesProps &
data: TimeseriesDataRecord[];
metrics: Metric[];
metricName: string;
metricNameFontSize?: number;
showMetricName: boolean;
bigNumber: string;
prevNumber: string;
subtitle?: string;

View File

@@ -16,10 +16,19 @@
* specific language governing permissions and limitations
* under the License.
*/
import { headerFontSize, subheaderFontSize } from '../sharedControls';
import {
headerFontSize,
subheaderFontSize,
metricNameFontSize,
} from '../sharedControls';
const headerFontSizes = [16, 20, 30, 48, 60];
const comparisonFontSizes = [16, 20, 26, 32, 40];
const sharedFontSizes = [16, 20, 26, 32, 40];
const metricNameProportionValues =
metricNameFontSize.config.options.map(
(option: { label: string; value: number }) => option.value,
) ?? [];
const headerProportionValues =
headerFontSize.config.options.map(
@@ -40,6 +49,10 @@ const getFontSizeMapping = (
return acc;
}, {});
const metricNameFontSizesMapping = getFontSizeMapping(
metricNameProportionValues,
sharedFontSizes,
);
const headerFontSizesMapping = getFontSizeMapping(
headerProportionValues,
headerFontSizes,
@@ -47,13 +60,17 @@ const headerFontSizesMapping = getFontSizeMapping(
const comparisonFontSizesMapping = getFontSizeMapping(
subheaderProportionValues,
comparisonFontSizes,
sharedFontSizes,
);
export const getMetricNameFontSize = (proportionValue: number) =>
metricNameFontSizesMapping[proportionValue] ??
sharedFontSizes[sharedFontSizes.length - 1];
export const getHeaderFontSize = (proportionValue: number) =>
headerFontSizesMapping[proportionValue] ??
headerFontSizes[headerFontSizes.length - 1];
export const getComparisonFontSize = (proportionValue: number) =>
comparisonFontSizesMapping[proportionValue] ??
comparisonFontSizes[comparisonFontSizes.length - 1];
sharedFontSizes[sharedFontSizes.length - 1];

View File

@@ -28,6 +28,8 @@ import {
headerFontSize,
subtitleFontSize,
subtitleControl,
showMetricNameControl,
metricNameFontSizeWithVisibility,
} from '../sharedControls';
export default {
@@ -44,6 +46,8 @@ export default {
[headerFontSize],
[subtitleControl],
[subtitleFontSize],
[showMetricNameControl],
[metricNameFontSizeWithVisibility],
['y_axis_format'],
['currency_format'],
[

View File

@@ -39,6 +39,7 @@ const metadata = {
tags: [
t('Additive'),
t('Business'),
t('ECharts'),
t('Legacy'),
t('Percentages'),
t('Featured'),

View File

@@ -36,6 +36,7 @@ jest.mock('@superset-ui/core', () => ({
jest.mock('../utils', () => ({
getDateFormatter: jest.fn(() => (v: any) => `${v}pm`),
parseMetricValue: jest.fn(val => Number(val)),
getOriginalLabel: jest.fn((metric, metrics) => metric),
}));
describe('BigNumberTotal transformProps', () => {

View File

@@ -29,7 +29,7 @@ import {
getValueFormatter,
} from '@superset-ui/core';
import { BigNumberTotalChartProps, BigNumberVizProps } from '../types';
import { getDateFormatter, parseMetricValue } from '../utils';
import { getDateFormatter, getOriginalLabel, parseMetricValue } from '../utils';
import { Refs } from '../../types';
export default function transformProps(
@@ -45,6 +45,7 @@ export default function transformProps(
datasource: { currencyFormats = {}, columnFormats = {} },
} = chartProps;
const {
metricNameFontSize,
headerFontSize,
metric = 'value',
subtitle,
@@ -58,9 +59,12 @@ export default function transformProps(
subheaderFontSize,
} = formData;
const refs: Refs = {};
const { data = [], coltypes = [] } = queriesData[0];
const { data = [], coltypes = [] } = queriesData[0] || {};
const granularity = extractTimegrain(rawFormData as QueryFormData);
const metrics = chartProps.datasource?.metrics || [];
const originalLabel = getOriginalLabel(metric, metrics);
const metricName = getMetricLabel(metric);
const showMetricName = chartProps.rawFormData?.show_metric_name ?? false;
const formattedSubtitle = subtitle?.trim() ? subtitle : subheader || '';
const formattedSubtitleFontSize = subtitle?.trim()
? (subtitleFontSize ?? 1)
@@ -103,7 +107,6 @@ export default function transformProps(
const colorThresholdFormatters =
getColorFormatters(conditionalFormatting, data, false) ??
defaultColorFormatters;
return {
width,
height,
@@ -116,5 +119,8 @@ export default function transformProps(
onContextMenu,
refs,
colorThresholdFormatters,
metricName: originalLabel,
showMetricName,
metricNameFontSize,
};
}

View File

@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
import { PureComponent, MouseEvent } from 'react';
import { PureComponent, MouseEvent, createRef } from 'react';
import {
t,
getNumberFormatter,
@@ -35,6 +35,7 @@ const defaultNumberFormatter = getNumberFormatter();
const PROPORTION = {
// text size: proportion of the chart container sans trendline
METRIC_NAME: 0.125,
KICKER: 0.1,
HEADER: 0.3,
SUBHEADER: 0.125,
@@ -42,13 +43,20 @@ const PROPORTION = {
TRENDLINE: 0.3,
};
class BigNumberVis extends PureComponent<BigNumberVizProps> {
type BigNumberVisState = {
elementsRendered: boolean;
recalculateTrigger: boolean;
};
class BigNumberVis extends PureComponent<BigNumberVizProps, BigNumberVisState> {
static defaultProps = {
className: '',
headerFormatter: defaultNumberFormatter,
formatTime: getTimeFormatter(SMART_DATE_VERBOSE_ID),
headerFontSize: PROPORTION.HEADER,
kickerFontSize: PROPORTION.KICKER,
metricNameFontSize: PROPORTION.METRIC_NAME,
showMetricName: true,
mainColor: BRAND_COLOR,
showTimestamp: false,
showTrendLine: false,
@@ -58,6 +66,40 @@ class BigNumberVis extends PureComponent<BigNumberVizProps> {
timeRangeFixed: false,
};
// Create refs for each component to measure heights
metricNameRef = createRef<HTMLDivElement>();
kickerRef = createRef<HTMLDivElement>();
headerRef = createRef<HTMLDivElement>();
subheaderRef = createRef<HTMLDivElement>();
subtitleRef = createRef<HTMLDivElement>();
state = {
elementsRendered: false,
recalculateTrigger: false,
};
componentDidMount() {
// Wait for elements to render and then calculate heights
setTimeout(() => {
this.setState({ elementsRendered: true });
}, 0);
}
componentDidUpdate(prevProps: BigNumberVizProps) {
if (
prevProps.height !== this.props.height ||
prevProps.showTrendLine !== this.props.showTrendLine
) {
this.setState(prevState => ({
recalculateTrigger: !prevState.recalculateTrigger,
}));
}
}
getClassName() {
const { className, showTrendLine, bigNumberFallback } = this.props;
const names = `superset-legacy-chart-big-number ${className} ${
@@ -92,6 +134,37 @@ class BigNumberVis extends PureComponent<BigNumberVizProps> {
);
}
renderMetricName(maxHeight: number) {
const { metricName, width, showMetricName } = this.props;
if (!showMetricName || !metricName) return null;
const text = metricName;
const container = this.createTemporaryContainer();
document.body.append(container);
const fontSize = computeMaxFontSize({
text,
maxWidth: width,
maxHeight,
className: 'metric-name',
container,
});
container.remove();
return (
<div
ref={this.metricNameRef}
className="metric-name"
style={{
fontSize,
height: 'auto',
}}
>
{text}
</div>
);
}
renderKicker(maxHeight: number) {
const { timestamp, showTimestamp, formatTime, width } = this.props;
if (
@@ -118,6 +191,7 @@ class BigNumberVis extends PureComponent<BigNumberVizProps> {
return (
<div
ref={this.kickerRef}
className="kicker"
style={{
fontSize,
@@ -173,6 +247,7 @@ class BigNumberVis extends PureComponent<BigNumberVizProps> {
return (
<div
ref={this.headerRef}
className="header-line"
style={{
display: 'flex',
@@ -211,6 +286,7 @@ class BigNumberVis extends PureComponent<BigNumberVizProps> {
return (
<div
ref={this.subheaderRef}
className="subheader-line"
style={{
fontSize,
@@ -256,6 +332,7 @@ class BigNumberVis extends PureComponent<BigNumberVizProps> {
return (
<>
<div
ref={this.subtitleRef}
className="subtitle-line subheader-line"
style={{
fontSize: `${fontSize}px`,
@@ -316,6 +393,35 @@ class BigNumberVis extends PureComponent<BigNumberVizProps> {
);
}
getTotalElementsHeight() {
const marginPerElement = 8; // theme.gridUnit = 4, so margin-bottom = 8px
const refs = [
this.metricNameRef,
this.kickerRef,
this.headerRef,
this.subheaderRef,
this.subtitleRef,
];
// Filter refs to only those with a current element
const visibleRefs = refs.filter(ref => ref.current);
const totalHeight = visibleRefs.reduce((sum, ref, index) => {
const height = ref.current?.offsetHeight || 0;
const margin = index < visibleRefs.length - 1 ? marginPerElement : 0;
return sum + height + margin;
}, 0);
return totalHeight;
}
shouldApplyOverflow(availableHeight: number) {
if (!this.state.elementsRendered) return false;
const totalHeight = this.getTotalElementsHeight();
return totalHeight > availableHeight;
}
render() {
const {
showTrendLine,
@@ -323,6 +429,7 @@ class BigNumberVis extends PureComponent<BigNumberVizProps> {
kickerFontSize,
headerFontSize,
subtitleFontSize,
metricNameFontSize,
subheaderFontSize,
} = this.props;
const className = this.getClassName();
@@ -330,11 +437,31 @@ class BigNumberVis extends PureComponent<BigNumberVizProps> {
if (showTrendLine) {
const chartHeight = Math.floor(PROPORTION.TRENDLINE * height);
const allTextHeight = height - chartHeight;
const shouldApplyOverflow = this.shouldApplyOverflow(allTextHeight);
return (
<div className={className}>
<div className="text-container" style={{ height: allTextHeight }}>
<div
className="text-container"
style={{
height: allTextHeight,
...(shouldApplyOverflow
? {
display: 'block',
boxSizing: 'border-box',
overflowX: 'hidden',
overflowY: 'auto',
width: '100%',
}
: {}),
}}
>
{this.renderFallbackWarning()}
{this.renderMetricName(
Math.ceil(
(metricNameFontSize || 0) * (1 - PROPORTION.TRENDLINE) * height,
),
)}
{this.renderKicker(
Math.ceil(
(kickerFontSize || 0) * (1 - PROPORTION.TRENDLINE) * height,
@@ -356,16 +483,33 @@ class BigNumberVis extends PureComponent<BigNumberVizProps> {
</div>
);
}
const shouldApplyOverflow = this.shouldApplyOverflow(height);
return (
<div className={className} style={{ height }}>
{this.renderFallbackWarning()}
{this.renderKicker((kickerFontSize || 0) * height)}
{this.renderHeader(Math.ceil(headerFontSize * height))}
{this.rendermetricComparisonSummary(
Math.ceil(subheaderFontSize * height),
)}
{this.renderSubtitle(Math.ceil(subtitleFontSize * height))}
<div
className={className}
style={{
height,
...(shouldApplyOverflow
? {
display: 'block',
boxSizing: 'border-box',
overflowX: 'hidden',
overflowY: 'auto',
width: '100%',
}
: {}),
}}
>
<div className="text-container">
{this.renderFallbackWarning()}
{this.renderMetricName((metricNameFontSize || 0) * height)}
{this.renderKicker((kickerFontSize || 0) * height)}
{this.renderHeader(Math.ceil(headerFontSize * height))}
{this.rendermetricComparisonSummary(
Math.ceil(subheaderFontSize * height),
)}
{this.renderSubtitle(Math.ceil(subtitleFontSize * height))}
</div>
</div>
);
}
@@ -400,7 +544,12 @@ export default styled(BigNumberVis)`
.kicker {
line-height: 1em;
padding-bottom: 2em;
margin-bottom: ${theme.gridUnit * 2}px;
}
.metric-name {
line-height: 1em;
margin-bottom: ${theme.gridUnit * 2}px;
}
.header-line {
@@ -416,12 +565,12 @@ export default styled(BigNumberVis)`
.subheader-line {
line-height: 1em;
padding-bottom: 0;
margin-bottom: ${theme.gridUnit * 2}px;
}
.subtitle-line {
line-height: 1em;
padding-bottom: 0;
margin-bottom: ${theme.gridUnit * 2}px;
}
&.is-fallback-value {

View File

@@ -31,6 +31,8 @@ import {
subheaderFontSize,
subtitleFontSize,
subtitleControl,
showMetricNameControl,
metricNameFontSizeWithVisibility,
} from '../sharedControls';
const config: ControlPanelConfig = {
@@ -141,6 +143,8 @@ const config: ControlPanelConfig = {
[subheaderFontSize],
[subtitleControl],
[subtitleFontSize],
[showMetricNameControl],
[metricNameFontSizeWithVisibility],
['y_axis_format'],
['currency_format'],
[

View File

@@ -37,6 +37,7 @@ const metadata = {
name: t('Big Number with Trendline'),
tags: [
t('Advanced-Analytics'),
t('ECharts'),
t('Line'),
t('Percentages'),
t('Featured'),

View File

@@ -39,6 +39,7 @@ jest.mock('@superset-ui/core', () => ({
jest.mock('../utils', () => ({
getDateFormatter: jest.fn(() => (v: any) => `${v}pm`),
parseMetricValue: jest.fn(val => Number(val)),
getOriginalLabel: jest.fn((metric, metrics) => metric),
}));
jest.mock('../../utils/tooltip', () => ({

View File

@@ -35,7 +35,7 @@ import {
BigNumberWithTrendlineChartProps,
TimeSeriesDatum,
} from '../types';
import { getDateFormatter, parseMetricValue } from '../utils';
import { getDateFormatter, parseMetricValue, getOriginalLabel } from '../utils';
import { getDefaultTooltip } from '../../utils/tooltip';
import { Refs } from '../../types';
@@ -62,6 +62,7 @@ export default function transformProps(
compareLag: compareLag_,
compareSuffix = '',
timeFormat,
metricNameFontSize,
headerFontSize,
metric = 'value',
showTimestamp,
@@ -96,6 +97,9 @@ export default function transformProps(
const aggregatedData = hasAggregatedData ? aggregatedQueryData.data[0] : null;
const refs: Refs = {};
const metricName = getMetricLabel(metric);
const metrics = chartProps.datasource?.metrics || [];
const originalLabel = getOriginalLabel(metric, metrics);
const showMetricName = chartProps.rawFormData?.show_metric_name ?? false;
const compareLag = Number(compareLag_) || 0;
let formattedSubheader = subheader;
@@ -303,6 +307,9 @@ export default function transformProps(
headerFormatter,
formatTime,
formData,
metricName: originalLabel,
showMetricName,
metricNameFontSize,
headerFontSize,
subtitleFontSize,
subtitle,

View File

@@ -21,106 +21,68 @@
import { t } from '@superset-ui/core';
import { CustomControlItem } from '@superset-ui/chart-controls';
export const headerFontSize: CustomControlItem = {
name: 'header_font_size',
config: {
type: 'SelectControl',
label: t('Big Number Font Size'),
renderTrigger: true,
clearable: false,
default: 0.4,
// Values represent the percentage of space a header should take
options: [
{
label: t('Tiny'),
value: 0.2,
},
{
label: t('Small'),
value: 0.3,
},
{
label: t('Normal'),
value: 0.4,
},
{
label: t('Large'),
value: 0.5,
},
{
label: t('Huge'),
value: 0.6,
},
],
},
};
const FONT_SIZE_OPTIONS_SMALL = [
{ label: t('Tiny'), value: 0.125 },
{ label: t('Small'), value: 0.15 },
{ label: t('Normal'), value: 0.2 },
{ label: t('Large'), value: 0.3 },
{ label: t('Huge'), value: 0.4 },
];
export const subtitleFontSize: CustomControlItem = {
name: 'subtitle_font_size',
config: {
type: 'SelectControl',
label: t('Subtitle Font Size'),
renderTrigger: true,
clearable: false,
default: 0.15,
// Values represent the percentage of space a subtitle should take
options: [
{
label: t('Tiny'),
value: 0.125,
},
{
label: t('Small'),
value: 0.15,
},
{
label: t('Normal'),
value: 0.2,
},
{
label: t('Large'),
value: 0.3,
},
{
label: t('Huge'),
value: 0.4,
},
],
},
};
export const subheaderFontSize: CustomControlItem = {
name: 'subheader_font_size',
config: {
type: 'SelectControl',
label: t('Subheader Font Size'),
renderTrigger: true,
clearable: false,
default: 0.15,
// Values represent the percentage of space a subheader should take
options: [
{
label: t('Tiny'),
value: 0.125,
},
{
label: t('Small'),
value: 0.15,
},
{
label: t('Normal'),
value: 0.2,
},
{
label: t('Large'),
value: 0.3,
},
{
label: t('Huge'),
value: 0.4,
},
],
},
};
const FONT_SIZE_OPTIONS_LARGE = [
{ label: t('Tiny'), value: 0.2 },
{ label: t('Small'), value: 0.3 },
{ label: t('Normal'), value: 0.4 },
{ label: t('Large'), value: 0.5 },
{ label: t('Huge'), value: 0.6 },
];
function makeFontSizeControl(
name: string,
label: string,
defaultValue: number,
options: { label: string; value: number }[],
): CustomControlItem {
return {
name,
config: {
type: 'SelectControl',
label: t(label),
renderTrigger: true,
clearable: false,
default: defaultValue,
options,
},
};
}
export const headerFontSize = makeFontSizeControl(
'header_font_size',
'Big Number Font Size',
0.4,
FONT_SIZE_OPTIONS_LARGE,
);
export const subtitleFontSize = makeFontSizeControl(
'subtitle_font_size',
'Subtitle Font Size',
0.15,
FONT_SIZE_OPTIONS_SMALL,
);
export const subheaderFontSize = makeFontSizeControl(
'subheader_font_size',
'Subheader Font Size',
0.15,
FONT_SIZE_OPTIONS_SMALL,
);
export const metricNameFontSize = makeFontSizeControl(
'metric_name_font_size',
'Metric Name Font Size',
0.15,
FONT_SIZE_OPTIONS_SMALL,
);
export const subtitleControl: CustomControlItem = {
name: 'subtitle',
@@ -131,3 +93,23 @@ export const subtitleControl: CustomControlItem = {
description: t('Description text that shows up below your Big Number'),
},
};
export const showMetricNameControl: CustomControlItem = {
name: 'show_metric_name',
config: {
type: 'CheckboxControl',
label: t('Show Metric Name'),
renderTrigger: true,
default: false,
description: t('Whether to display the metric name'),
},
};
export const metricNameFontSizeWithVisibility: CustomControlItem = {
...metricNameFontSize,
config: {
...metricNameFontSize.config,
visibility: ({ controls }) => controls?.show_metric_name?.value === true,
resetOnHide: false,
},
};

View File

@@ -75,6 +75,10 @@ export type BigNumberVizProps = {
bigNumberFallback?: TimeSeriesDatum;
headerFormatter: ValueFormatter | TimeFormatter;
formatTime?: TimeFormatter;
metricName?: string;
friendlyMetricName?: string;
metricNameFontSize?: number;
showMetricName?: boolean;
headerFontSize: number;
kickerFontSize?: number;
subheader?: string;

View File

@@ -22,6 +22,10 @@ import utc from 'dayjs/plugin/utc';
import {
getTimeFormatter,
getTimeFormatterForGranularity,
isAdhocMetricSimple,
isSavedMetric,
Metric,
QueryFormMetric,
SMART_DATE_ID,
TimeGranularity,
} from '@superset-ui/core';
@@ -47,3 +51,43 @@ export const getDateFormatter = (
timeFormat === SMART_DATE_ID
? getTimeFormatterForGranularity(granularity)
: getTimeFormatter(timeFormat ?? fallbackFormat);
export function getOriginalLabel(
metric: QueryFormMetric,
metrics: Metric[] = [],
): string {
const metricLabel = typeof metric === 'string' ? metric : metric.label || '';
if (isSavedMetric(metric)) {
const metricEntry = metrics.find(m => m.metric_name === metric);
return (
metricEntry?.verbose_name ||
metricEntry?.metric_name ||
metric ||
'Unknown Metric'
);
}
if (isAdhocMetricSimple(metric)) {
const column = metric.column || {};
const columnName = column.column_name || 'unknown_column';
const verboseName = column.verbose_name || columnName;
const aggregate = metric.aggregate || 'UNKNOWN';
return metric.hasCustomLabel && metric.label
? metric.label
: `${aggregate}(${verboseName})`;
}
if (
typeof metric === 'object' &&
'expressionType' in metric &&
metric.expressionType === 'SQL' &&
'sqlExpression' in metric
) {
return metric.hasCustomLabel && metric.label
? metric.label
: metricLabel || 'Custom Metric';
}
return metricLabel || 'Unknown Metric';
}

View File

@@ -57,7 +57,7 @@ export default class EchartsSankeyChartPlugin extends ChartPlugin<
),
exampleGallery: [{ url: example1 }, { url: example2 }],
name: t('Sankey Chart'),
tags: [t('Directional'), t('Distribution'), t('Flow')],
tags: [t('Directional'), t('ECharts'), t('Distribution'), t('Flow')],
thumbnail,
}),
transformProps,

View File

@@ -58,6 +58,7 @@ export default class EchartsTimeseriesChartPlugin extends EchartsChartPlugin<
name: t('Generic Chart'),
tags: [
t('Advanced-Analytics'),
t('ECharts'),
t('Line'),
t('Predictive'),
t('Time'),

View File

@@ -16,6 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
/* eslint-disable import/no-extraneous-dependencies */
import {
useCallback,
useRef,
@@ -24,6 +25,7 @@ import {
MutableRefObject,
CSSProperties,
DragEvent,
useEffect,
} from 'react';
import {
@@ -39,8 +41,9 @@ import {
Row,
} from 'react-table';
import { matchSorter, rankings } from 'match-sorter';
import { typedMemo, usePrevious } from '@superset-ui/core';
import { styled, typedMemo, usePrevious } from '@superset-ui/core';
import { isEqual } from 'lodash';
import { Space } from 'antd';
import GlobalFilter, { GlobalFilterProps } from './components/GlobalFilter';
import SelectPageSize, {
SelectPageSizeProps,
@@ -50,6 +53,8 @@ import SimplePagination from './components/Pagination';
import useSticky from './hooks/useSticky';
import { PAGE_SIZE_OPTIONS } from '../consts';
import { sortAlphanumericCaseInsensitive } from './utils/sortAlphanumericCaseInsensitive';
import { SearchOption, SortByItem } from '../types';
import SearchSelectDropdown from './components/SearchSelectDropdown';
export interface DataTableProps<D extends object> extends TableOptions<D> {
tableClassName?: string;
@@ -62,7 +67,12 @@ export interface DataTableProps<D extends object> extends TableOptions<D> {
height?: string | number;
serverPagination?: boolean;
onServerPaginationChange: (pageNumber: number, pageSize: number) => void;
serverPaginationData: { pageSize?: number; currentPage?: number };
serverPaginationData: {
pageSize?: number;
currentPage?: number;
sortBy?: SortByItem[];
searchColumn?: string;
};
pageSize?: number;
noResults?: string | ((filterString: string) => ReactNode);
sticky?: boolean;
@@ -71,6 +81,14 @@ export interface DataTableProps<D extends object> extends TableOptions<D> {
onColumnOrderChange: () => void;
renderGroupingHeaders?: () => JSX.Element;
renderTimeComparisonDropdown?: () => JSX.Element;
handleSortByChange: (sortBy: SortByItem[]) => void;
sortByFromParent: SortByItem[];
manualSearch?: boolean;
onSearchChange?: (searchText: string) => void;
initialSearchText?: string;
searchInputId?: string;
onSearchColChange: (searchCol: string) => void;
searchOptions: SearchOption[];
}
export interface RenderHTMLCellProps extends HTMLProps<HTMLTableCellElement> {
@@ -81,6 +99,20 @@ const sortTypes = {
alphanumeric: sortAlphanumericCaseInsensitive,
};
const StyledSpace = styled(Space)`
display: flex;
justify-content: flex-end;
.search-select-container {
display: flex;
}
.search-by-label {
align-self: center;
margin-right: 4px;
}
`;
// Be sure to pass our updateMyData and the skipReset option
export default typedMemo(function DataTable<D extends object>({
tableClassName,
@@ -105,6 +137,14 @@ export default typedMemo(function DataTable<D extends object>({
onColumnOrderChange,
renderGroupingHeaders,
renderTimeComparisonDropdown,
handleSortByChange,
sortByFromParent = [],
manualSearch = false,
onSearchChange,
initialSearchText,
searchInputId,
onSearchColChange,
searchOptions,
...moreUseTableOptions
}: DataTableProps<D>): JSX.Element {
const tableHooks: PluginHook<D>[] = [
@@ -115,6 +155,7 @@ export default typedMemo(function DataTable<D extends object>({
doSticky ? useSticky : [],
hooks || [],
].flat();
const columnNames = Object.keys(data?.[0] || {});
const previousColumnNames = usePrevious(columnNames);
const resultsSize = serverPagination ? rowCount : data.length;
@@ -127,7 +168,8 @@ export default typedMemo(function DataTable<D extends object>({
...initialState_,
// zero length means all pages, the `usePagination` plugin does not
// understand pageSize = 0
sortBy: sortByRef.current,
// sortBy: sortByRef.current,
sortBy: serverPagination ? sortByFromParent : sortByRef.current,
pageSize: initialPageSize > 0 ? initialPageSize : resultsSize || 10,
};
const defaultWrapperRef = useRef<HTMLDivElement>(null);
@@ -188,7 +230,13 @@ export default typedMemo(function DataTable<D extends object>({
wrapStickyTable,
setColumnOrder,
allColumns,
state: { pageIndex, pageSize, globalFilter: filterValue, sticky = {} },
state: {
pageIndex,
pageSize,
globalFilter: filterValue,
sticky = {},
sortBy,
},
} = useTable<D>(
{
columns,
@@ -198,10 +246,46 @@ export default typedMemo(function DataTable<D extends object>({
globalFilter: defaultGlobalFilter,
sortTypes,
autoResetSortBy: !isEqual(columnNames, previousColumnNames),
manualSortBy: !!serverPagination,
...moreUseTableOptions,
},
...tableHooks,
);
const handleSearchChange = useCallback(
(query: string) => {
if (manualSearch && onSearchChange) {
onSearchChange(query);
} else {
setGlobalFilter(query);
}
},
[manualSearch, onSearchChange, setGlobalFilter],
);
// updating the sort by to the own State of table viz
useEffect(() => {
const serverSortBy = serverPaginationData?.sortBy || [];
if (serverPagination && !isEqual(sortBy, serverSortBy)) {
if (Array.isArray(sortBy) && sortBy.length > 0) {
const [sortByItem] = sortBy;
const matchingColumn = columns.find(col => col?.id === sortByItem?.id);
if (matchingColumn && 'columnKey' in matchingColumn) {
const sortByWithColumnKey: SortByItem = {
...sortByItem,
key: (matchingColumn as { columnKey: string }).columnKey,
};
handleSortByChange([sortByWithColumnKey]);
}
} else {
handleSortByChange([]);
}
}
}, [sortBy]);
// make setPageSize accept 0
const setPageSize = (size: number) => {
if (serverPagination) {
@@ -355,6 +439,7 @@ export default typedMemo(function DataTable<D extends object>({
resultOnPageChange = (pageNumber: number) =>
onServerPaginationChange(pageNumber, serverPageSize);
}
return (
<div
ref={wrapperRef}
@@ -381,16 +466,31 @@ export default typedMemo(function DataTable<D extends object>({
) : null}
</div>
{searchInput ? (
<div className="col-sm-6">
<StyledSpace className="col-sm-6">
{serverPagination && (
<div className="search-select-container">
<span className="search-by-label">Search by: </span>
<SearchSelectDropdown
searchOptions={searchOptions}
value={serverPaginationData?.searchColumn || ''}
onChange={onSearchColChange}
/>
</div>
)}
<GlobalFilter<D>
searchInput={
typeof searchInput === 'boolean' ? undefined : searchInput
}
preGlobalFilteredRows={preGlobalFilteredRows}
setGlobalFilter={setGlobalFilter}
filterValue={filterValue}
setGlobalFilter={
manualSearch ? handleSearchChange : setGlobalFilter
}
filterValue={manualSearch ? initialSearchText : filterValue}
id={searchInputId}
serverPagination={!!serverPagination}
rowCount={rowCount}
/>
</div>
</StyledSpace>
) : null}
{renderTimeComparisonDropdown ? (
<div

View File

@@ -16,7 +16,13 @@
* specific language governing permissions and limitations
* under the License.
*/
import { memo, ComponentType, ChangeEventHandler } from 'react';
import {
memo,
ComponentType,
ChangeEventHandler,
useRef,
useEffect,
} from 'react';
import { Row, FilterValue } from 'react-table';
import useAsyncState from '../utils/useAsyncState';
@@ -24,8 +30,12 @@ export interface SearchInputProps {
count: number;
value: string;
onChange: ChangeEventHandler<HTMLInputElement>;
onBlur?: () => void;
inputRef?: React.RefObject<HTMLInputElement>;
}
const isSearchFocused = new Map();
export interface GlobalFilterProps<D extends object> {
preGlobalFilteredRows: Row<D>[];
// filter value cannot be `undefined` otherwise React will report component
@@ -33,17 +43,28 @@ export interface GlobalFilterProps<D extends object> {
filterValue: string;
setGlobalFilter: (filterValue: FilterValue) => void;
searchInput?: ComponentType<SearchInputProps>;
id?: string;
serverPagination: boolean;
rowCount: number;
}
function DefaultSearchInput({ count, value, onChange }: SearchInputProps) {
function DefaultSearchInput({
count,
value,
onChange,
onBlur,
inputRef,
}: SearchInputProps) {
return (
<span className="dt-global-filter">
Search{' '}
<input
ref={inputRef}
className="form-control input-sm"
placeholder={`${count} records...`}
value={value}
onChange={onChange}
onBlur={onBlur}
/>
</span>
);
@@ -56,8 +77,13 @@ export default (memo as <T>(fn: T) => T)(function GlobalFilter<
filterValue = '',
searchInput,
setGlobalFilter,
id = '',
serverPagination,
rowCount,
}: GlobalFilterProps<D>) {
const count = preGlobalFilteredRows.length;
const count = serverPagination ? rowCount : preGlobalFilteredRows.length;
const inputRef = useRef<HTMLInputElement>(null);
const [value, setValue] = useAsyncState(
filterValue,
(newValue: string) => {
@@ -66,17 +92,37 @@ export default (memo as <T>(fn: T) => T)(function GlobalFilter<
200,
);
// Preserve focus during server-side filtering to maintain a better user experience
useEffect(() => {
if (
serverPagination &&
isSearchFocused.get(id) &&
document.activeElement !== inputRef.current
) {
inputRef.current?.focus();
}
}, [value, serverPagination]);
const handleChange = (e: React.ChangeEvent<HTMLInputElement>) => {
const target = e.target as HTMLInputElement;
e.preventDefault();
isSearchFocused.set(id, true);
setValue(target.value);
};
const handleBlur = () => {
isSearchFocused.set(id, false);
};
const SearchInput = searchInput || DefaultSearchInput;
return (
<SearchInput
count={count}
value={value}
onChange={e => {
const target = e.target as HTMLInputElement;
e.preventDefault();
setValue(target.value);
}}
inputRef={inputRef}
onChange={handleChange}
onBlur={handleBlur}
/>
);
});

View File

@@ -0,0 +1,53 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/* eslint-disable import/no-extraneous-dependencies */
import { styled } from '@superset-ui/core';
import { Select } from 'antd';
import { SearchOption } from '../../types';
const StyledSelect = styled(Select)`
width: 120px;
margin-right: 8px;
`;
interface SearchSelectDropdownProps {
/** The currently selected search column value */
value?: string;
/** Callback triggered when a new search column is selected */
onChange: (searchCol: string) => void;
/** Available search column options to populate the dropdown */
searchOptions: SearchOption[];
}
function SearchSelectDropdown({
value,
onChange,
searchOptions,
}: SearchSelectDropdownProps) {
return (
<StyledSelect
className="search-select"
value={value || (searchOptions?.[0]?.value ?? '')}
options={searchOptions}
onChange={onChange}
/>
);
}
export default SearchSelectDropdown;

View File

@@ -115,3 +115,11 @@ declare module 'react-table' {
extends UseTableHooks<D>,
UseSortByHooks<D> {}
}
interface TableOwnState {
currentPage?: number;
pageSize?: number;
sortColumn?: string;
sortOrder?: 'asc' | 'desc';
searchText?: string;
}

View File

@@ -18,6 +18,7 @@
*/
import { SetDataMaskHook } from '@superset-ui/core';
import { TableOwnState } from '../types/react-table';
export const updateExternalFormData = (
setDataMask: SetDataMaskHook = () => {},
@@ -30,3 +31,11 @@ export const updateExternalFormData = (
pageSize,
},
});
export const updateTableOwnState = (
setDataMask: SetDataMaskHook = () => {},
modifiedOwnState: TableOwnState,
) =>
setDataMask({
ownState: modifiedOwnState,
});

View File

@@ -24,6 +24,7 @@ import {
useState,
MouseEvent,
KeyboardEvent as ReactKeyboardEvent,
useEffect,
} from 'react';
import {
@@ -61,10 +62,12 @@ import {
PlusCircleOutlined,
TableOutlined,
} from '@ant-design/icons';
import { isEmpty } from 'lodash';
import { debounce, isEmpty, isEqual } from 'lodash';
import {
ColorSchemeEnum,
DataColumnMeta,
SearchOption,
SortByItem,
TableChartTransformedProps,
} from './types';
import DataTable, {
@@ -77,7 +80,7 @@ import DataTable, {
import Styles from './Styles';
import { formatColumnValue } from './utils/formatValue';
import { PAGE_SIZE_OPTIONS } from './consts';
import { updateExternalFormData } from './DataTable/utils/externalAPIs';
import { updateTableOwnState } from './DataTable/utils/externalAPIs';
import getScrollBarSize from './DataTable/utils/getScrollBarSize';
type ValueRange = [number, number];
@@ -176,20 +179,26 @@ function SortIcon<D extends object>({ column }: { column: ColumnInstance<D> }) {
return sortIcon;
}
function SearchInput({ count, value, onChange }: SearchInputProps) {
return (
<span className="dt-global-filter">
{t('Search')}{' '}
<input
aria-label={t('Search %s records', count)}
className="form-control input-sm"
placeholder={tn('search.num_records', count)}
value={value}
onChange={onChange}
/>
</span>
);
}
const SearchInput = ({
count,
value,
onChange,
onBlur,
inputRef,
}: SearchInputProps) => (
<span className="dt-global-filter">
{t('Search')}{' '}
<input
ref={inputRef}
aria-label={t('Search %s records', count)}
className="form-control input-sm"
placeholder={tn('search.num_records', count)}
value={value}
onChange={onChange}
onBlur={onBlur}
/>
</span>
);
function SelectPageSize({
options,
@@ -267,6 +276,9 @@ export default function TableChart<D extends DataRecord = DataRecord>(
isUsingTimeComparison,
basicColorFormatters,
basicColorColumnFormatters,
hasServerPageLengthChanged,
serverPageLength,
slice_id,
} = props;
const comparisonColumns = [
{ key: 'all', label: t('Display all') },
@@ -679,7 +691,12 @@ export default function TableChart<D extends DataRecord = DataRecord>(
);
const getColumnConfigs = useCallback(
(column: DataColumnMeta, i: number): ColumnWithLooseAccessor<D> => {
(
column: DataColumnMeta,
i: number,
): ColumnWithLooseAccessor<D> & {
columnKey: string;
} => {
const {
key,
label: originalLabel,
@@ -766,6 +783,7 @@ export default function TableChart<D extends DataRecord = DataRecord>(
// must use custom accessor to allow `.` in column names
// typing is incorrect in current version of `@types/react-table`
// so we ask TS not to check.
columnKey: key,
accessor: ((datum: D) => datum[key]) as never,
Cell: ({ value, row }: { value: DataRecordValue; row: Row<D> }) => {
const [isHtml, text] = formatColumnValue(column, value);
@@ -1058,13 +1076,50 @@ export default function TableChart<D extends DataRecord = DataRecord>(
[visibleColumnsMeta, getColumnConfigs],
);
const [searchOptions, setSearchOptions] = useState<SearchOption[]>([]);
useEffect(() => {
const options = (
columns as unknown as ColumnWithLooseAccessor &
{
columnKey: string;
sortType?: string;
}[]
)
.filter(col => col?.sortType === 'alphanumeric')
.map(column => ({
value: column.columnKey,
label: column.columnKey,
}));
if (!isEqual(options, searchOptions)) {
setSearchOptions(options || []);
}
}, [columns]);
const handleServerPaginationChange = useCallback(
(pageNumber: number, pageSize: number) => {
updateExternalFormData(setDataMask, pageNumber, pageSize);
const modifiedOwnState = {
...serverPaginationData,
currentPage: pageNumber,
pageSize,
};
updateTableOwnState(setDataMask, modifiedOwnState);
},
[setDataMask],
);
useEffect(() => {
if (hasServerPageLengthChanged) {
const modifiedOwnState = {
...serverPaginationData,
currentPage: 0,
pageSize: serverPageLength,
};
updateTableOwnState(setDataMask, modifiedOwnState);
}
}, []);
const handleSizeChange = useCallback(
({ width, height }: { width: number; height: number }) => {
setTableSize({ width, height });
@@ -1100,6 +1155,42 @@ export default function TableChart<D extends DataRecord = DataRecord>(
const { width: widthFromState, height: heightFromState } = tableSize;
const handleSortByChange = useCallback(
(sortBy: SortByItem[]) => {
if (!serverPagination) return;
const modifiedOwnState = {
...serverPaginationData,
sortBy,
};
updateTableOwnState(setDataMask, modifiedOwnState);
},
[setDataMask, serverPagination],
);
const handleSearch = (searchText: string) => {
const modifiedOwnState = {
...(serverPaginationData || {}),
searchColumn:
serverPaginationData?.searchColumn || searchOptions[0]?.value,
searchText,
currentPage: 0, // Reset to first page when searching
};
updateTableOwnState(setDataMask, modifiedOwnState);
};
const debouncedSearch = debounce(handleSearch, 800);
const handleChangeSearchCol = (searchCol: string) => {
if (!isEqual(searchCol, serverPaginationData?.searchColumn)) {
const modifiedOwnState = {
...(serverPaginationData || {}),
searchColumn: searchCol,
searchText: '',
};
updateTableOwnState(setDataMask, modifiedOwnState);
}
};
return (
<Styles>
<DataTable<D>
@@ -1115,6 +1206,9 @@ export default function TableChart<D extends DataRecord = DataRecord>(
serverPagination={serverPagination}
onServerPaginationChange={handleServerPaginationChange}
onColumnOrderChange={() => setColumnOrderToggle(!columnOrderToggle)}
initialSearchText={serverPaginationData?.searchText || ''}
sortByFromParent={serverPaginationData?.sortBy || []}
searchInputId={`${slice_id}-search`}
// 9 page items in > 340px works well even for 100+ pages
maxPageItemCount={width > 340 ? 9 : 7}
noResults={getNoResultsMessage}
@@ -1128,6 +1222,11 @@ export default function TableChart<D extends DataRecord = DataRecord>(
renderTimeComparisonDropdown={
isUsingTimeComparison ? renderTimeComparisonDropdown : undefined
}
handleSortByChange={handleSortByChange}
onSearchColChange={handleChangeSearchCol}
manualSearch={serverPagination}
onSearchChange={debouncedSearch}
searchOptions={searchOptions}
/>
</Styles>
);

View File

@@ -22,6 +22,7 @@ import {
ensureIsArray,
getMetricLabel,
isPhysicalColumn,
QueryFormOrderBy,
QueryMode,
QueryObject,
removeDuplicates,
@@ -34,7 +35,7 @@ import {
} from '@superset-ui/chart-controls';
import { isEmpty } from 'lodash';
import { TableChartFormData } from './types';
import { updateExternalFormData } from './DataTable/utils/externalAPIs';
import { updateTableOwnState } from './DataTable/utils/externalAPIs';
/**
* Infer query mode from form data. If `all_columns` is set, then raw records mode,
@@ -191,18 +192,40 @@ const buildQuery: BuildQuery<TableChartFormData> = (
const moreProps: Partial<QueryObject> = {};
const ownState = options?.ownState ?? {};
if (formDataCopy.server_pagination) {
moreProps.row_limit =
ownState.pageSize ?? formDataCopy.server_page_length;
moreProps.row_offset =
(ownState.currentPage ?? 0) * (ownState.pageSize ?? 0);
// Build Query flag to check if its for either download as csv, excel or json
const isDownloadQuery =
['csv', 'xlsx'].includes(formData?.result_format || '') ||
(formData?.result_format === 'json' &&
formData?.result_type === 'results');
if (isDownloadQuery) {
moreProps.row_limit = Number(formDataCopy.row_limit) || 0;
moreProps.row_offset = 0;
}
if (!isDownloadQuery && formDataCopy.server_pagination) {
const pageSize = ownState.pageSize ?? formDataCopy.server_page_length;
const currentPage = ownState.currentPage ?? 0;
moreProps.row_limit = pageSize;
moreProps.row_offset = currentPage * pageSize;
}
// getting sort by in case of server pagination from own state
let sortByFromOwnState: QueryFormOrderBy[] | undefined;
if (Array.isArray(ownState?.sortBy) && ownState?.sortBy.length > 0) {
const sortByItem = ownState?.sortBy[0];
sortByFromOwnState = [[sortByItem?.key, !sortByItem?.desc]];
}
let queryObject = {
...baseQueryObject,
columns,
extras,
orderby,
orderby:
formData.server_pagination && sortByFromOwnState
? sortByFromOwnState
: orderby,
metrics,
post_processing: postProcessing,
time_offsets: timeOffsets,
@@ -216,11 +239,12 @@ const buildQuery: BuildQuery<TableChartFormData> = (
JSON.stringify(queryObject.filters)
) {
queryObject = { ...queryObject, row_offset: 0 };
updateExternalFormData(
options?.hooks?.setDataMask,
0,
queryObject.row_limit ?? 0,
);
const modifiedOwnState = {
...(options?.ownState || {}),
currentPage: 0,
pageSize: queryObject.row_limit ?? 0,
};
updateTableOwnState(options?.hooks?.setDataMask, modifiedOwnState);
}
// Because we use same buildQuery for all table on the page we need split them by id
options?.hooks?.setCachedChanges({
@@ -252,12 +276,32 @@ const buildQuery: BuildQuery<TableChartFormData> = (
}
if (formData.server_pagination) {
// Add search filter if search text exists
if (ownState.searchText && ownState?.searchColumn) {
queryObject = {
...queryObject,
filters: [
...(queryObject.filters || []),
{
col: ownState?.searchColumn,
op: 'ILIKE',
val: `${ownState.searchText}%`,
},
],
};
}
}
// Now since row limit control is always visible even
// in case of server pagination
// we must use row limit from form data
if (formData.server_pagination && !isDownloadQuery) {
return [
{ ...queryObject },
{
...queryObject,
time_offsets: [],
row_limit: 0,
row_limit: Number(formData?.row_limit) ?? 0,
row_offset: 0,
post_processing: [],
is_rowcount: true,

View File

@@ -28,7 +28,10 @@ import {
ControlStateMapping,
D3_TIME_FORMAT_OPTIONS,
Dataset,
DEFAULT_MAX_ROW,
DEFAULT_MAX_ROW_TABLE_SERVER,
defineSavedMetrics,
formatSelectOptions,
getStandardizedControls,
QueryModeLabel,
sections,
@@ -40,11 +43,14 @@ import {
getMetricLabel,
isAdhocColumn,
isPhysicalColumn,
legacyValidateInteger,
QueryFormColumn,
QueryFormMetric,
QueryMode,
SMART_DATE_ID,
t,
validateMaxValue,
validateServerPagination,
} from '@superset-ui/core';
import { isEmpty, last } from 'lodash';
@@ -188,6 +194,15 @@ const processComparisonColumns = (columns: any[], suffix: string) =>
})
.flat();
/*
Options for row limit control
*/
export const ROW_LIMIT_OPTIONS_TABLE = [
10, 50, 100, 250, 500, 1000, 5000, 10000, 50000, 100000, 150000, 200000,
250000, 300000, 350000, 400000, 450000, 500000,
];
const config: ControlPanelConfig = {
controlPanelSections: [
{
@@ -342,14 +357,6 @@ const config: ControlPanelConfig = {
},
],
[
{
name: 'row_limit',
override: {
default: 1000,
visibility: ({ controls }: ControlPanelsContainerProps) =>
!controls?.server_pagination?.value,
},
},
{
name: 'server_page_length',
config: {
@@ -364,6 +371,47 @@ const config: ControlPanelConfig = {
},
},
],
[
{
name: 'row_limit',
config: {
type: 'SelectControl',
freeForm: true,
label: t('Row limit'),
clearable: false,
mapStateToProps: state => ({
maxValue: state?.common?.conf?.TABLE_VIZ_MAX_ROW_SERVER,
server_pagination: state?.form_data?.server_pagination,
maxValueWithoutServerPagination:
state?.common?.conf?.SQL_MAX_ROW,
}),
validators: [
legacyValidateInteger,
(v, state) =>
validateMaxValue(
v,
state?.maxValue || DEFAULT_MAX_ROW_TABLE_SERVER,
),
(v, state) =>
validateServerPagination(
v,
state?.server_pagination,
state?.maxValueWithoutServerPagination || DEFAULT_MAX_ROW,
),
],
// Re run the validations when this control value
validationDependancies: ['server_pagination'],
default: 10000,
choices: formatSelectOptions(ROW_LIMIT_OPTIONS_TABLE),
description: t(
'Limits the number of the rows that are computed in the query that is the source of the data used for this chart.',
),
},
override: {
default: 1000,
},
},
],
[
{
name: 'order_desc',

View File

@@ -90,6 +90,15 @@ const processDataRecords = memoizeOne(function processDataRecords(
return data;
});
// Create a map to store cached values per slice
const sliceCache = new Map<
number,
{
cachedServerLength: number;
passedColumns?: DataColumnMeta[];
}
>();
const calculateDifferences = (
originalValue: number,
comparisonValue: number,
@@ -480,6 +489,7 @@ const transformProps = (
comparison_color_enabled: comparisonColorEnabled = false,
comparison_color_scheme: comparisonColorScheme = ColorSchemeEnum.Green,
comparison_type,
slice_id,
} = formData;
const isUsingTimeComparison =
!isEmpty(time_compare) &&
@@ -675,6 +685,26 @@ const transformProps = (
conditionalFormatting,
);
// Get cached values for this slice
const cachedValues = sliceCache.get(slice_id);
let hasServerPageLengthChanged = false;
if (
cachedValues?.cachedServerLength !== undefined &&
cachedValues.cachedServerLength !== serverPageLength
) {
hasServerPageLengthChanged = true;
}
// Update cache with new values
sliceCache.set(slice_id, {
cachedServerLength: serverPageLength,
passedColumns:
Array.isArray(passedColumns) && passedColumns?.length > 0
? passedColumns
: cachedValues?.passedColumns,
});
const startDateOffset = chartProps.rawFormData?.start_date_offset;
return {
height,
@@ -682,7 +712,10 @@ const transformProps = (
isRawRecords: queryMode === QueryMode.Raw,
data: passedData,
totals,
columns: passedColumns,
columns:
Array.isArray(passedColumns) && passedColumns?.length > 0
? passedColumns
: cachedValues?.passedColumns || [],
serverPagination,
metrics,
percentMetrics,
@@ -697,7 +730,9 @@ const transformProps = (
includeSearch,
rowCount,
pageSize: serverPagination
? serverPageLength
? serverPaginationData?.pageSize
? serverPaginationData?.pageSize
: serverPageLength
: getPageSize(pageLength, data.length, columns.length),
filters: filterState.filters,
emitCrossFilters,
@@ -711,6 +746,9 @@ const transformProps = (
basicColorFormatters,
startDateOffset,
basicColorColumnFormatters,
hasServerPageLengthChanged,
serverPageLength,
slice_id,
};
};

View File

@@ -114,13 +114,32 @@ export type BasicColorFormatterType = {
mainArrow: string;
};
export type SortByItem = {
id: string;
key: string;
desc?: boolean;
};
export type SearchOption = {
value: string;
label: string;
};
export interface ServerPaginationData {
pageSize?: number;
currentPage?: number;
sortBy?: SortByItem[];
searchText?: string;
searchColumn?: string;
}
export interface TableChartTransformedProps<D extends DataRecord = DataRecord> {
timeGrain?: TimeGranularity;
height: number;
width: number;
rowCount?: number;
serverPagination: boolean;
serverPaginationData: { pageSize?: number; currentPage?: number };
serverPaginationData: ServerPaginationData;
setDataMask: SetDataMaskHook;
isRawRecords?: boolean;
data: D[];
@@ -152,6 +171,11 @@ export interface TableChartTransformedProps<D extends DataRecord = DataRecord> {
basicColorFormatters?: { [Key: string]: BasicColorFormatterType }[];
basicColorColumnFormatters?: { [Key: string]: BasicColorFormatterType }[];
startDateOffset?: string;
// For explore page to reset the server Pagination data
// if server page length is changed from control panel
hasServerPageLengthChanged: boolean;
serverPageLength: number;
slice_id: number;
}
export enum ColorSchemeEnum {

View File

@@ -327,6 +327,10 @@ class ChartRenderer extends Component {
?.behaviors.find(behavior => behavior === Behavior.DrillToDetail)
? { inContextMenu: this.state.inContextMenu }
: {};
// By pass no result component when server pagination is enabled & the table has a backend search query
const bypassNoResult = !(
formData?.server_pagination && (ownState?.searchText?.length || 0) > 0
);
return (
<>
@@ -367,6 +371,7 @@ class ChartRenderer extends Component {
postTransformProps={postTransformProps}
emitCrossFilters={emitCrossFilters}
legendState={this.state.legendState}
enableNoResults={bypassNoResult}
{...drillToDetailProps}
/>
</div>

View File

@@ -34,6 +34,7 @@ import {
t,
withTheme,
getClientErrorObject,
getExtensionsRegistry,
} from '@superset-ui/core';
import { Select, AsyncSelect, Row, Col } from 'src/components';
import { FormLabel } from 'src/components/Form';
@@ -53,10 +54,15 @@ import SpatialControl from 'src/explore/components/controls/SpatialControl';
import withToasts from 'src/components/MessageToasts/withToasts';
import { Icons } from 'src/components/Icons';
import CurrencyControl from 'src/explore/components/controls/CurrencyControl';
import { executeQuery, resetDatabaseState } from 'src/database/actions';
import { connect } from 'react-redux';
import CollectionTable from './CollectionTable';
import Fieldset from './Fieldset';
import Field from './Field';
import { fetchSyncedColumns, updateColumns } from './utils';
import FilterableTable from '../FilterableTable';
const extensionsRegistry = getExtensionsRegistry();
const DatasourceContainer = styled.div`
.change-warning {
@@ -586,6 +592,8 @@ function OwnersSelector({ datasource, onChange }) {
/>
);
}
const ResultTable =
extensionsRegistry.get('sqleditor.extension.resultTable') ?? FilterableTable;
class DatasourceEditor extends PureComponent {
constructor(props) {
@@ -698,6 +706,23 @@ class DatasourceEditor extends PureComponent {
this.validate(this.onChange);
}
async onQueryRun() {
this.props.runQuery({
client_id: this.props.clientId,
database_id: this.state.datasource.database.id,
json: true,
runAsync: false,
catalog: this.state.datasource.catalog,
schema: this.state.datasource.schema,
sql: this.state.datasource.sql,
tmp_table_name: '',
select_as_cta: false,
ctas_method: 'TABLE',
queryLimit: 25,
expand_data: true,
});
}
tableChangeAndSyncMetadata() {
this.validate(() => {
this.syncMetadata();
@@ -1078,14 +1103,62 @@ class DatasourceEditor extends PureComponent {
<TextAreaControl
language="sql"
offerEditInModal={false}
minLines={20}
minLines={10}
maxLines={Infinity}
readOnly={!this.state.isEditMode}
resize="both"
tooltipOptions={sqlTooltipOptions}
/>
}
additionalControl={
<div
css={css`
position: absolute;
right: 0;
top: 0;
z-index: 2;
`}
>
<Button
css={css`
align-self: flex-end;
height: 24px;
padding-left: 6px;
padding-right: 6px;
`}
size="small"
buttonStyle="primary"
onClick={() => {
this.onQueryRun();
}}
>
<Icons.CaretRightFilled
iconSize="s"
css={theme => ({
color: theme.colors.grayscale.light5,
})}
/>
</Button>
</div>
}
errorMessage={
this.props.database?.error && t('Error executing query.')
}
/>
{this.props.database?.queryResult && (
<ResultTable
data={this.props.database.queryResult.data}
queryId={this.props.database.queryResult.query.id}
orderedColumnKeys={this.props.database.queryResult.columns.map(
col => col.column_name,
)}
height={100}
expandedColumns={
this.props.database.queryResult.expandedColumns
}
allowHTML
/>
)}
</>
)}
</div>
@@ -1466,6 +1539,10 @@ class DatasourceEditor extends PureComponent {
</DatasourceContainer>
);
}
componentWillUnmount() {
this.props.resetQuery();
}
}
DatasourceEditor.defaultProps = defaultProps;
@@ -1473,4 +1550,14 @@ DatasourceEditor.propTypes = propTypes;
const DataSourceComponent = withTheme(DatasourceEditor);
export default withToasts(DataSourceComponent);
const mapDispatchToProps = dispatch => ({
runQuery: payload => dispatch(executeQuery(payload)),
resetQuery: () => dispatch(resetDatabaseState()),
});
const mapStateToProps = state => ({
test: state.queryApi,
database: state.database,
});
export default withToasts(
connect(mapStateToProps, mapDispatchToProps)(DataSourceComponent),
);

View File

@@ -120,71 +120,83 @@ const DatasourceModal: FunctionComponent<DatasourceModalProps> = ({
const [isEditing, setIsEditing] = useState<boolean>(false);
const dialog = useRef<any>(null);
const [modal, contextHolder] = Modal.useModal();
const buildPayload = (datasource: Record<string, any>) => ({
table_name: datasource.table_name,
database_id: datasource.database?.id,
sql: datasource.sql,
filter_select_enabled: datasource.filter_select_enabled,
fetch_values_predicate: datasource.fetch_values_predicate,
schema:
datasource.tableSelector?.schema ||
datasource.databaseSelector?.schema ||
datasource.schema,
description: datasource.description,
main_dttm_col: datasource.main_dttm_col,
normalize_columns: datasource.normalize_columns,
always_filter_main_dttm: datasource.always_filter_main_dttm,
offset: datasource.offset,
default_endpoint: datasource.default_endpoint,
cache_timeout:
datasource.cache_timeout === '' ? null : datasource.cache_timeout,
is_sqllab_view: datasource.is_sqllab_view,
template_params: datasource.template_params,
extra: datasource.extra,
is_managed_externally: datasource.is_managed_externally,
external_url: datasource.external_url,
metrics: datasource?.metrics?.map((metric: DatasetObject['metrics'][0]) => {
const metricBody: any = {
expression: metric.expression,
description: metric.description,
metric_name: metric.metric_name,
metric_type: metric.metric_type,
d3format: metric.d3format || null,
currency: !isDefined(metric.currency)
? null
: JSON.stringify(metric.currency),
verbose_name: metric.verbose_name,
warning_text: metric.warning_text,
uuid: metric.uuid,
extra: buildExtraJsonObject(metric),
};
if (!Number.isNaN(Number(metric.id))) {
metricBody.id = metric.id;
}
return metricBody;
}),
columns: datasource?.columns?.map(
(column: DatasetObject['columns'][0]) => ({
id: typeof column.id === 'number' ? column.id : undefined,
column_name: column.column_name,
type: column.type,
advanced_data_type: column.advanced_data_type,
verbose_name: column.verbose_name,
description: column.description,
expression: column.expression,
filterable: column.filterable,
groupby: column.groupby,
is_active: column.is_active,
is_dttm: column.is_dttm,
python_date_format: column.python_date_format || null,
uuid: column.uuid,
extra: buildExtraJsonObject(column),
}),
),
owners: datasource.owners.map(
(o: Record<string, number>) => o.value || o.id,
),
});
const buildPayload = (datasource: Record<string, any>) => {
const payload: Record<string, any> = {
table_name: datasource.table_name,
database_id: datasource.database?.id,
sql: datasource.sql,
filter_select_enabled: datasource.filter_select_enabled,
fetch_values_predicate: datasource.fetch_values_predicate,
schema:
datasource.tableSelector?.schema ||
datasource.databaseSelector?.schema ||
datasource.schema,
description: datasource.description,
main_dttm_col: datasource.main_dttm_col,
normalize_columns: datasource.normalize_columns,
always_filter_main_dttm: datasource.always_filter_main_dttm,
offset: datasource.offset,
default_endpoint: datasource.default_endpoint,
cache_timeout:
datasource.cache_timeout === '' ? null : datasource.cache_timeout,
is_sqllab_view: datasource.is_sqllab_view,
template_params: datasource.template_params,
extra: datasource.extra,
is_managed_externally: datasource.is_managed_externally,
external_url: datasource.external_url,
metrics: datasource?.metrics?.map(
(metric: DatasetObject['metrics'][0]) => {
const metricBody: any = {
expression: metric.expression,
description: metric.description,
metric_name: metric.metric_name,
metric_type: metric.metric_type,
d3format: metric.d3format || null,
currency: !isDefined(metric.currency)
? null
: JSON.stringify(metric.currency),
verbose_name: metric.verbose_name,
warning_text: metric.warning_text,
uuid: metric.uuid,
extra: buildExtraJsonObject(metric),
};
if (!Number.isNaN(Number(metric.id))) {
metricBody.id = metric.id;
}
return metricBody;
},
),
columns: datasource?.columns?.map(
(column: DatasetObject['columns'][0]) => ({
id: typeof column.id === 'number' ? column.id : undefined,
column_name: column.column_name,
type: column.type,
advanced_data_type: column.advanced_data_type,
verbose_name: column.verbose_name,
description: column.description,
expression: column.expression,
filterable: column.filterable,
groupby: column.groupby,
is_active: column.is_active,
is_dttm: column.is_dttm,
python_date_format: column.python_date_format || null,
uuid: column.uuid,
extra: buildExtraJsonObject(column),
}),
),
owners: datasource.owners.map(
(o: Record<string, number>) => o.value || o.id,
),
};
// Handle catalog based on database's allow_multi_catalog setting
// If multi-catalog is disabled, don't include catalog in payload
// The backend will use the default catalog
// If multi-catalog is enabled, include the selected catalog
if (datasource.database?.allow_multi_catalog) {
payload.catalog = datasource.catalog;
}
return payload;
};
const onConfirmSave = async () => {
// Pull out extra fields into the extra object
setIsSaving(true);

View File

@@ -29,13 +29,20 @@ const defaultProps = {
onChange: jest.fn(),
compact: false,
inline: false,
additionalControl: (
<input type="button" data-test="mock-text-aditional-control" />
),
};
test('should render', () => {
const { container } = render(<Field {...defaultProps} />);
expect(container).toBeInTheDocument();
});
test('should render with aditional control', () => {
const { getByTestId } = render(<Field {...defaultProps} />);
const additionalControl = getByTestId('mock-text-aditional-control');
expect(additionalControl).toBeInTheDocument();
});
test('should call onChange', () => {
const { getByTestId } = render(<Field {...defaultProps} />);
const textArea = getByTestId('mock-text-control');
@@ -47,3 +54,9 @@ test('should render compact', () => {
render(<Field {...defaultProps} compact />);
expect(screen.queryByText(defaultProps.description)).not.toBeInTheDocument();
});
test('shiuld render error message', () => {
const { getByText } = render(
<Field {...defaultProps} errorMessage="error message" />,
);
expect(getByText('error message')).toBeInTheDocument();
});

View File

@@ -21,6 +21,7 @@ import { useCallback, ReactNode, ReactElement, cloneElement } from 'react';
import { css, SupersetTheme } from '@superset-ui/core';
import { Tooltip } from 'src/components/Tooltip';
import { FormItem, FormLabel } from 'src/components/Form';
import { Icons } from 'src/components/Icons';
const formItemInlineCss = css`
.ant-form-item-control-input-content {
@@ -28,16 +29,17 @@ const formItemInlineCss = css`
flex-direction: row;
}
`;
interface FieldProps<V> {
fieldKey: string;
value?: V;
label: string;
description?: ReactNode;
control: ReactElement;
additionalControl?: ReactElement;
onChange: (fieldKey: string, newValue: V) => void;
compact: boolean;
inline: boolean;
errorMessage?: string;
}
export default function Field<V>({
@@ -46,9 +48,11 @@ export default function Field<V>({
label,
description = null,
control,
additionalControl,
onChange = () => {},
compact = false,
inline,
errorMessage,
}: FieldProps<V>) {
const onControlChange = useCallback(
newValue => {
@@ -62,32 +66,51 @@ export default function Field<V>({
onChange: onControlChange,
});
return (
<FormItem
label={
<FormLabel className="m-r-5">
{label || fieldKey}
{compact && description && (
<Tooltip id="field-descr" placement="right" title={description}>
{/* TODO: Remove fa-icon */}
{/* eslint-disable-next-line icons/no-fa-icons-usage */}
<i className="fa fa-info-circle m-l-5" />
</Tooltip>
)}
</FormLabel>
<div
css={
additionalControl &&
css`
position: relative;
`
}
css={inline && formItemInlineCss}
>
{hookedControl}
{!compact && description && (
{additionalControl}
<FormItem
label={
<FormLabel className="m-r-5">
{label || fieldKey}
{compact && description && (
<Tooltip id="field-descr" placement="right" title={description}>
<Icons.InfoCircleFilled iconSize="s" className="m-l-5" />
</Tooltip>
)}
</FormLabel>
}
css={inline && formItemInlineCss}
>
{hookedControl}
{!compact && description && (
<div
css={(theme: SupersetTheme) => ({
color: theme.colors.grayscale.base,
[inline ? 'marginLeft' : 'marginTop']: theme.gridUnit,
})}
>
{description}
</div>
)}
</FormItem>
{errorMessage && (
<div
css={(theme: SupersetTheme) => ({
color: theme.colors.grayscale.base,
[inline ? 'marginLeft' : 'marginTop']: theme.gridUnit,
color: theme.colors.error.base,
marginTop: -16,
fontSize: theme.typography.sizes.s,
})}
>
{description}
{errorMessage}
</div>
)}
</FormItem>
</div>
);
}

View File

@@ -34,6 +34,7 @@ import {
CaretDownOutlined,
CaretLeftOutlined,
CaretRightOutlined,
CaretRightFilled,
CalendarOutlined,
CheckOutlined,
CheckCircleOutlined,
@@ -134,6 +135,7 @@ const AntdIcons = {
CaretDownOutlined,
CaretLeftOutlined,
CaretRightOutlined,
CaretRightFilled,
CalendarOutlined,
CheckOutlined,
CheckCircleOutlined,

View File

@@ -51,6 +51,7 @@ import WithPopoverMenu from 'src/dashboard/components/menu/WithPopoverMenu';
import { componentShape } from 'src/dashboard/util/propShapes';
import backgroundStyleOptions from 'src/dashboard/util/backgroundStyleOptions';
import { BACKGROUND_TRANSPARENT } from 'src/dashboard/util/constants';
import { isEmbedded } from 'src/dashboard/util/isEmbedded';
import { EMPTY_CONTAINER_Z_INDEX } from 'src/dashboard/constants';
import { isCurrentUserBot } from 'src/utils/isBot';
import { useDebouncedEffect } from '../../../explore/exploreUtils';
@@ -188,7 +189,10 @@ const Row = props => {
observerDisabler = new IntersectionObserver(
([entry]) => {
if (!entry.isIntersecting && isComponentVisibleRef.current) {
setIsInView(false);
// Reference: https://www.w3.org/TR/intersection-observer/#dom-intersectionobserver-rootmargin
if (!isEmbedded()) {
setIsInView(false);
}
}
},
{

View File

@@ -0,0 +1,26 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export const isEmbedded = () => {
try {
return window.self !== window.top || window.frameElement !== null;
} catch (e) {
return true;
}
};

View File

@@ -0,0 +1,68 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { makeApi } from '@superset-ui/core';
import { ThunkDispatch } from 'redux-thunk';
import { AnyAction } from 'redux';
import { QueryExecutePayload, QueryExecuteResponse } from './types';
export const executeQueryApi = makeApi<
QueryExecutePayload,
QueryExecuteResponse
>({
method: 'POST',
endpoint: '/api/v1/sqllab/execute',
});
export function setQueryIsLoading(isLoading: boolean) {
return {
type: 'SET_QUERY_IS_LOADING',
payload: isLoading,
};
}
export function setQueryResult(queryResult: QueryExecuteResponse) {
return {
type: 'SET_QUERY_RESULT',
payload: queryResult,
};
}
export function resetDatabaseState() {
return {
type: 'RESET_DATABASE_STATE',
};
}
export function setQueryError(error: string) {
return {
type: 'SET_QUERY_ERROR',
payload: error,
};
}
export function executeQuery(payload: QueryExecutePayload) {
return async function (dispatch: ThunkDispatch<any, undefined, AnyAction>) {
try {
dispatch(setQueryIsLoading(true));
const result = await executeQueryApi(payload);
dispatch(setQueryResult(result as QueryExecuteResponse));
} catch (error) {
dispatch(setQueryError(error.message));
} finally {
dispatch(setQueryIsLoading(false));
}
};
}

View File

@@ -0,0 +1,56 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import type { QueryAdhocState } from './types';
const initialState: QueryAdhocState = {
isLoading: null,
sql: null,
queryResult: null,
error: null,
};
export default function databaseReducer(
state: QueryAdhocState = initialState,
action: any,
): QueryAdhocState {
switch (action.type) {
case 'SET_QUERY_IS_LOADING':
return {
...state,
isLoading: action.payload,
};
case 'SET_QUERY_RESULT':
return {
...state,
sql: action.payload.query.sql ?? '',
queryResult: action.payload,
error: null,
};
case 'SET_QUERY_ERROR':
return {
...initialState,
error: action.payload,
};
case 'RESET_DATABASE_STATE':
return initialState;
default:
return state;
}
}

View File

@@ -0,0 +1,57 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export interface QueryExecutePayload {
client_id: string;
database_id: number;
json: boolean;
runAsync: boolean;
catalog: string | null;
schema: string;
sql: string;
tmp_table_name: string;
select_as_cta: boolean;
ctas_method: string;
queryLimit: number;
expand_data: boolean;
}
export interface Column {
name: string;
type: string;
is_dttm: boolean;
type_generic: number;
is_hidden: boolean;
column_name: string;
}
export interface QueryExecuteResponse {
status: string;
query_id: string;
data: any[];
columns: Column[];
selected_columns: Column[];
expanded_columns: Column[];
query: any;
}
export interface QueryAdhocState {
isLoading: boolean | null;
sql: string | null;
queryResult: QueryExecuteResponse | null;
error: string | null;
}

View File

@@ -23,7 +23,7 @@ import ReactDOM from 'react-dom';
import { BrowserRouter as Router, Route } from 'react-router-dom';
import { makeApi, t, logging } from '@superset-ui/core';
import Switchboard from '@superset-ui/switchboard';
import getBootstrapData from 'src/utils/getBootstrapData';
import getBootstrapData, { applicationRoot } from 'src/utils/getBootstrapData';
import setupClient from 'src/setup/setupClient';
import setupPlugins from 'src/setup/setupPlugins';
import { useUiConfig } from 'src/components/UiConfigContext';
@@ -94,7 +94,7 @@ const EmbeddedRoute = () => (
);
const EmbeddedApp = () => (
<Router>
<Router basename={applicationRoot()}>
{/* todo (embedded) remove this line after uuids are deployed */}
<Route path="/dashboard/:idOrSlug/embedded/" component={EmbeddedRoute} />
<Route path="/embedded/:uuid/" component={EmbeddedRoute} />
@@ -187,6 +187,7 @@ function start() {
*/
function setupGuestClient(guestToken: string) {
setupClient({
appRoot: applicationRoot(),
guestToken,
guestTokenHeaderName: bootstrapData.config?.GUEST_TOKEN_HEADER_NAME,
unauthorizedHandler: guestUnauthorizedHandler,

View File

@@ -214,6 +214,52 @@ export default function exploreReducer(state = {}, action) {
currentControlsState = transformed.controlsState;
}
const dependantControls = Object.entries(state.controls)
.filter(
([, item]) =>
Array.isArray(item?.validationDependancies) &&
item.validationDependancies.includes(controlName),
)
.map(([key, item]) => ({
controlState: item,
dependantControlName: key,
}));
let updatedControlStates = {};
if (dependantControls.length > 0) {
const updatedControls = dependantControls.map(
({ controlState, dependantControlName }) => {
// overwrite state form data with current control value as the redux state will not
// have latest action value
const overWrittenState = {
...state,
form_data: {
...state.form_data,
[controlName]: action.value,
},
};
return {
// Re run validation for dependant controls
controlState: getControlStateFromControlConfig(
controlState,
overWrittenState,
controlState?.value,
),
dependantControlName,
};
},
);
updatedControlStates = updatedControls.reduce(
(acc, { controlState, dependantControlName }) => {
acc[dependantControlName] = { ...controlState };
return acc;
},
{},
);
}
return {
...state,
form_data: new_form_data,
@@ -227,6 +273,7 @@ export default function exploreReducer(state = {}, action) {
},
}),
...rerenderedControls,
...updatedControlStates,
},
};
},

View File

@@ -62,6 +62,7 @@ export type DatasetObject = {
filter_select_enabled?: boolean;
fetch_values_predicate?: string;
schema?: string;
catalog?: string;
description: string | null;
main_dttm_col: string;
offset?: number;

View File

@@ -38,7 +38,6 @@ import logger from 'src/middleware/loggerMiddleware';
import saveModal from 'src/explore/reducers/saveModalReducer';
import explore from 'src/explore/reducers/exploreReducer';
import exploreDatasources from 'src/explore/reducers/datasourcesReducer';
import { persistSqlLabStateEnhancer } from 'src/SqlLab/middlewares/persistSqlLabStateEnhancer';
import sqlLabReducer from 'src/SqlLab/reducers/sqlLab';
import getInitialState from 'src/SqlLab/reducers/getInitialState';
@@ -57,6 +56,7 @@ import { AnyDatasourcesAction } from 'src/explore/actions/datasourcesActions';
import { HydrateExplore } from 'src/explore/actions/hydrateExplore';
import getBootstrapData from 'src/utils/getBootstrapData';
import { Dataset } from '@superset-ui/chart-controls';
import databaseReducer from 'src/database/reducers';
// Some reducers don't do anything, and redux is just used to reference the initial "state".
// This may change later, as the client application takes on more responsibilities.
@@ -139,6 +139,7 @@ const reducers = {
reports,
saveModal,
explore,
database: databaseReducer,
};
/* In some cases the jinja template injects two separate React apps into basic.html

View File

@@ -33,6 +33,18 @@ def get_dataset_exist_error_msg(table: Table) -> str:
return _("Dataset %(table)s already exists", table=table)
class MultiCatalogDisabledValidationError(ValidationError):
"""
Validation error for using a non-default catalog when multi-catalog is disabled
"""
def __init__(self) -> None:
super().__init__(
[_("Only the default catalog is supported for this connection")],
field_name="catalog",
)
class DatabaseNotFoundValidationError(ValidationError):
"""
Marshmallow validation error for database does not exist
@@ -42,15 +54,6 @@ class DatabaseNotFoundValidationError(ValidationError):
super().__init__([_("Database does not exist")], field_name="database")
class DatabaseChangeValidationError(ValidationError):
"""
Marshmallow validation error database changes are not allowed on update
"""
def __init__(self) -> None:
super().__init__([_("Database not allowed to change")], field_name="database")
class DatasetExistsValidationError(ValidationError):
"""
Marshmallow validation error for dataset already exists

View File

@@ -14,6 +14,8 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
from collections import Counter
from functools import partial
@@ -26,7 +28,7 @@ from sqlalchemy.exc import SQLAlchemyError
from superset import is_feature_enabled, security_manager
from superset.commands.base import BaseCommand, UpdateMixin
from superset.commands.dataset.exceptions import (
DatabaseChangeValidationError,
DatabaseNotFoundValidationError,
DatasetColumnNotFoundValidationError,
DatasetColumnsDuplicateValidationError,
DatasetColumnsExistsValidationError,
@@ -38,11 +40,13 @@ from superset.commands.dataset.exceptions import (
DatasetMetricsNotFoundValidationError,
DatasetNotFoundError,
DatasetUpdateFailedError,
MultiCatalogDisabledValidationError,
)
from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn
from superset.daos.dataset import DatasetDAO
from superset.datasets.schemas import FolderSchema
from superset.exceptions import SupersetSecurityException
from superset.models.core import Database
from superset.sql_parse import Table
from superset.utils.decorators import on_error, transaction
@@ -86,38 +90,12 @@ class UpdateDatasetCommand(UpdateMixin, BaseCommand):
if not self._model:
raise DatasetNotFoundError()
# Check ownership
# Check permission to update the dataset
try:
security_manager.raise_for_ownership(self._model)
except SupersetSecurityException as ex:
raise DatasetForbiddenError() from ex
database_id = self._properties.get("database")
catalog = self._properties.get("catalog")
if not catalog:
catalog = self._properties["catalog"] = (
self._model.database.get_default_catalog()
)
table = Table(
self._properties.get("table_name"), # type: ignore
self._properties.get("schema"),
catalog,
)
# Validate uniqueness
if not DatasetDAO.validate_update_uniqueness(
self._model.database,
table,
self._model_id,
):
exceptions.append(DatasetExistsValidationError(table))
# Validate/Populate database not allowed to change
if database_id and database_id != self._model:
exceptions.append(DatabaseChangeValidationError())
# Validate/Populate owner
try:
owners = self.compute_owners(
@@ -128,15 +106,68 @@ class UpdateDatasetCommand(UpdateMixin, BaseCommand):
except ValidationError as ex:
exceptions.append(ex)
self._validate_dataset_source(exceptions)
self._validate_semantics(exceptions)
if exceptions:
raise DatasetInvalidError(exceptions=exceptions)
def _validate_dataset_source(self, exceptions: list[ValidationError]) -> None:
# we know we have a valid model
self._model = cast(SqlaTable, self._model)
database_id = self._properties.pop("database_id", None)
catalog = self._properties.get("catalog")
new_db_connection: Database | None = None
if database_id and database_id != self._model.database.id:
if new_db_connection := DatasetDAO.get_database_by_id(database_id):
self._properties["database"] = new_db_connection
else:
exceptions.append(DatabaseNotFoundValidationError())
db = new_db_connection or self._model.database
default_catalog = db.get_default_catalog()
# If multi-catalog is disabled, and catalog provided is not
# the default one, fail
if (
"catalog" in self._properties
and catalog != default_catalog
and not db.allow_multi_catalog
):
exceptions.append(MultiCatalogDisabledValidationError())
# If the DB connection does not support multi-catalog,
# use the default catalog
elif not db.allow_multi_catalog:
catalog = self._properties["catalog"] = default_catalog
# Fallback to using the previous value if not provided
elif "catalog" not in self._properties:
catalog = self._model.catalog
schema = (
self._properties["schema"]
if "schema" in self._properties
else self._model.schema
)
table = Table(
self._properties.get("table_name", self._model.table_name),
schema,
catalog,
)
# Validate uniqueness
if not DatasetDAO.validate_update_uniqueness(
db,
table,
self._model_id,
):
exceptions.append(DatasetExistsValidationError(table))
def _validate_semantics(self, exceptions: list[ValidationError]) -> None:
# we know we have a valid model
self._model = cast(SqlaTable, self._model)
if columns := self._properties.get("columns"):
self._validate_columns(columns, exceptions)

View File

@@ -65,12 +65,23 @@ class QueryContextFactory: # pylint: disable=too-few-public-methods
result_type = result_type or ChartDataResultType.FULL
result_format = result_format or ChartDataResultFormat.JSON
# The server pagination var is extracted from form data as the
# row limit for server pagination is more
# This particular flag server_pagination only exists for table viz type
server_pagination = (
bool(form_data.get("server_pagination")) if form_data else False
)
queries_ = [
self._process_query_object(
datasource_model_instance,
form_data,
self._query_object_factory.create(
result_type, datasource=datasource, **query_obj
result_type,
datasource=datasource,
server_pagination=server_pagination,
**query_obj,
),
)
for query_obj in queries

View File

@@ -57,6 +57,7 @@ class QueryObjectFactory: # pylint: disable=too-few-public-methods
row_limit: int | None = None,
time_range: str | None = None,
time_shift: str | None = None,
server_pagination: bool | None = None,
**kwargs: Any,
) -> QueryObject:
datasource_model_instance = None
@@ -64,7 +65,12 @@ class QueryObjectFactory: # pylint: disable=too-few-public-methods
datasource_model_instance = self._convert_to_model(datasource)
processed_extras = self._process_extras(extras)
result_type = kwargs.setdefault("result_type", parent_result_type)
row_limit = self._process_row_limit(row_limit, result_type)
# Process row limit taking server pagination into account
row_limit = self._process_row_limit(
row_limit, result_type, server_pagination=server_pagination
)
processed_time_range = self._process_time_range(
time_range, kwargs.get("filters"), kwargs.get("columns")
)
@@ -96,14 +102,27 @@ class QueryObjectFactory: # pylint: disable=too-few-public-methods
return extras
def _process_row_limit(
self, row_limit: int | None, result_type: ChartDataResultType
self,
row_limit: int | None,
result_type: ChartDataResultType,
server_pagination: bool | None = None,
) -> int:
"""Process row limit taking into account server pagination.
:param row_limit: The requested row limit
:param result_type: The type of result being processed
:param server_pagination: Whether server-side pagination is enabled
:return: The processed row limit
"""
default_row_limit = (
self._config["SAMPLES_ROW_LIMIT"]
if result_type == ChartDataResultType.SAMPLES
else self._config["ROW_LIMIT"]
)
return apply_max_row_limit(row_limit or default_row_limit)
return apply_max_row_limit(
row_limit or default_row_limit,
server_pagination=server_pagination,
)
@staticmethod
def _process_time_range(

View File

@@ -969,6 +969,10 @@ MAPBOX_API_KEY = os.environ.get("MAPBOX_API_KEY", "")
# Maximum number of rows returned for any analytical database query
SQL_MAX_ROW = 100000
# Maximum number of rows for any query with Server Pagination in Table Viz type
TABLE_VIZ_MAX_ROW_SERVER = 500000
# Maximum number of rows displayed in SQL Lab UI
# Is set to avoid out of memory/localstorage issues in browsers. Does not affect
# exported CSVs

View File

@@ -16,7 +16,6 @@
# under the License.
import logging
import pandas as pd
import polyline
from sqlalchemy import inspect, String, Text
@@ -25,7 +24,7 @@ from superset.sql_parse import Table
from superset.utils import json
from ..utils.database import get_example_database
from .helpers import get_example_url, get_table_connector_registry
from .helpers import get_table_connector_registry, read_example_data
logger = logging.getLogger(__name__)
@@ -38,8 +37,9 @@ def load_bart_lines(only_metadata: bool = False, force: bool = False) -> None:
table_exists = database.has_table(Table(tbl_name, schema))
if not only_metadata and (not table_exists or force):
url = get_example_url("bart-lines.json.gz")
df = pd.read_json(url, encoding="latin-1", compression="gzip")
df = read_example_data(
"bart-lines.json.gz", encoding="latin-1", compression="gzip"
)
df["path_json"] = df.path.map(json.dumps)
df["polyline"] = df.path.map(polyline.encode)
del df["path"]

View File

@@ -33,11 +33,11 @@ from superset.utils.core import DatasourceType
from ..utils.database import get_example_database
from .helpers import (
get_example_url,
get_slice_json,
get_table_connector_registry,
merge_slice,
misc_dash_slices,
read_example_data,
update_slice_ids,
)
@@ -57,8 +57,8 @@ def gen_filter(
def load_data(tbl_name: str, database: Database, sample: bool = False) -> None:
url = get_example_url("birth_names2.json.gz")
pdf = pd.read_json(url, compression="gzip")
pdf = read_example_data("birth_names2.json.gz", compression="gzip")
# TODO(bkyryliuk): move load examples data into the pytest fixture
if database.backend == "presto":
pdf.ds = pd.to_datetime(pdf.ds, unit="ms")

View File

@@ -17,7 +17,6 @@
import datetime
import logging
import pandas as pd
from sqlalchemy import BigInteger, Date, inspect, String
from sqlalchemy.sql import column
@@ -29,11 +28,11 @@ from superset.sql_parse import Table
from superset.utils.core import DatasourceType
from .helpers import (
get_example_url,
get_slice_json,
get_table_connector_registry,
merge_slice,
misc_dash_slices,
read_example_data,
)
logger = logging.getLogger(__name__)
@@ -49,8 +48,9 @@ def load_country_map_data(only_metadata: bool = False, force: bool = False) -> N
table_exists = database.has_table(Table(tbl_name, schema))
if not only_metadata and (not table_exists or force):
url = get_example_url("birth_france_data_for_country_map.csv")
data = pd.read_csv(url, encoding="utf-8")
data = read_example_data(
"birth_france_data_for_country_map.csv", encoding="utf-8"
)
data["dttm"] = datetime.datetime.now().date()
data.to_sql(
tbl_name,

View File

@@ -17,7 +17,6 @@
import logging
import textwrap
import pandas as pd
from sqlalchemy import Float, inspect, String
from sqlalchemy.sql import column
@@ -29,11 +28,11 @@ from superset.sql_parse import Table
from superset.utils.core import DatasourceType
from .helpers import (
get_example_url,
get_slice_json,
get_table_connector_registry,
merge_slice,
misc_dash_slices,
read_example_data,
)
logger = logging.getLogger(__name__)
@@ -51,8 +50,7 @@ def load_energy(
table_exists = database.has_table(Table(tbl_name, schema))
if not only_metadata and (not table_exists or force):
url = get_example_url("energy.json.gz")
pdf = pd.read_json(url, compression="gzip")
pdf = read_example_data("energy.json.gz", compression="gzip")
pdf = pdf.head(100) if sample else pdf
pdf.to_sql(
tbl_name,

View File

@@ -23,7 +23,7 @@ import superset.utils.database as database_utils
from superset import db
from superset.sql_parse import Table
from .helpers import get_example_url, get_table_connector_registry
from .helpers import get_table_connector_registry, read_example_data
logger = logging.getLogger(__name__)
@@ -37,12 +37,14 @@ def load_flights(only_metadata: bool = False, force: bool = False) -> None:
table_exists = database.has_table(Table(tbl_name, schema))
if not only_metadata and (not table_exists or force):
flight_data_url = get_example_url("flight_data.csv.gz")
pdf = pd.read_csv(flight_data_url, encoding="latin-1", compression="gzip")
pdf = read_example_data(
"flight_data.csv.gz", encoding="latin-1", compression="gzip"
)
# Loading airports info to join and get lat/long
airports_url = get_example_url("airports.csv.gz")
airports = pd.read_csv(airports_url, encoding="latin-1", compression="gzip")
airports = read_example_data(
"airports.csv.gz", encoding="latin-1", compression="gzip"
)
airports = airports.set_index("IATA_CODE")
pdf[ # pylint: disable=unsupported-assignment-operation,useless-suppression

View File

@@ -43,7 +43,11 @@ Environment knobs
from __future__ import annotations
import os
import time
from typing import Any
from urllib.error import HTTPError
import pandas as pd
from superset import app, db
from superset.connectors.sqla.models import SqlaTable
@@ -119,3 +123,33 @@ def get_example_url(filepath: str) -> str:
paths like ``datasets/examples/slack/messages.csv``.
"""
return f"{BASE_URL}{filepath}"
def read_example_data(
filepath: str,
max_attempts: int = 5,
wait_seconds: float = 60,
**kwargs: Any,
) -> pd.DataFrame:
"""Load CSV or JSON from example data mirror with retry/backoff."""
from superset.examples.helpers import get_example_url
url = get_example_url(filepath)
is_json = filepath.endswith(".json") or filepath.endswith(".json.gz")
for attempt in range(1, max_attempts + 1):
try:
if is_json:
return pd.read_json(url, **kwargs)
return pd.read_csv(url, **kwargs)
except HTTPError as e:
if e.code == 429 and attempt < max_attempts:
sleep_time = wait_seconds * (2 ** (attempt - 1))
print(
f"HTTP 429 received from {url}. ",
f"Retrying in {sleep_time:.1f}s ",
f"(attempt {attempt}/{max_attempts})...",
)
time.sleep(sleep_time)
else:
raise

View File

@@ -19,7 +19,6 @@ import logging
import random
import geohash
import pandas as pd
from sqlalchemy import DateTime, Float, inspect, String
import superset.utils.database as database_utils
@@ -29,11 +28,11 @@ from superset.sql_parse import Table
from superset.utils.core import DatasourceType
from .helpers import (
get_example_url,
get_slice_json,
get_table_connector_registry,
merge_slice,
misc_dash_slices,
read_example_data,
)
logger = logging.getLogger(__name__)
@@ -48,8 +47,9 @@ def load_long_lat_data(only_metadata: bool = False, force: bool = False) -> None
table_exists = database.has_table(Table(tbl_name, schema))
if not only_metadata and (not table_exists or force):
url = get_example_url("san_francisco.csv.gz")
pdf = pd.read_csv(url, encoding="utf-8", compression="gzip")
pdf = read_example_data(
"san_francisco.csv.gz", encoding="utf-8", compression="gzip"
)
start = datetime.datetime.now().replace(
hour=0, minute=0, second=0, microsecond=0
)

View File

@@ -27,11 +27,11 @@ from superset.utils.core import DatasourceType
from ..utils.database import get_example_database
from .helpers import (
get_example_url,
get_slice_json,
get_table_connector_registry,
merge_slice,
misc_dash_slices,
read_example_data,
)
logger = logging.getLogger(__name__)
@@ -48,8 +48,10 @@ def load_multiformat_time_series( # pylint: disable=too-many-locals
table_exists = database.has_table(Table(tbl_name, schema))
if not only_metadata and (not table_exists or force):
url = get_example_url("multiformat_time_series.json.gz")
pdf = pd.read_json(url, compression="gzip")
pdf = read_example_data(
"multiformat_time_series.json.gz", compression="gzip"
)
# TODO(bkyryliuk): move load examples data into the pytest fixture
if database.backend == "presto":
pdf.ds = pd.to_datetime(pdf.ds, unit="s")

View File

@@ -17,7 +17,6 @@
import logging
import pandas as pd
from sqlalchemy import inspect, String, Text
import superset.utils.database as database_utils
@@ -25,7 +24,7 @@ from superset import db
from superset.sql_parse import Table
from superset.utils import json
from .helpers import get_example_url, get_table_connector_registry
from .helpers import get_table_connector_registry, read_example_data
logger = logging.getLogger(__name__)
@@ -38,8 +37,7 @@ def load_paris_iris_geojson(only_metadata: bool = False, force: bool = False) ->
table_exists = database.has_table(Table(tbl_name, schema))
if not only_metadata and (not table_exists or force):
url = get_example_url("paris_iris.json.gz")
df = pd.read_json(url, compression="gzip")
df = read_example_data("paris_iris.json.gz", compression="gzip")
df["features"] = df.features.map(json.dumps)
df.to_sql(

View File

@@ -26,10 +26,10 @@ from superset.sql_parse import Table
from superset.utils.core import DatasourceType
from .helpers import (
get_example_url,
get_slice_json,
get_table_connector_registry,
merge_slice,
read_example_data,
)
logger = logging.getLogger(__name__)
@@ -46,8 +46,7 @@ def load_random_time_series_data(
table_exists = database.has_table(Table(tbl_name, schema))
if not only_metadata and (not table_exists or force):
url = get_example_url("random_time_series.json.gz")
pdf = pd.read_json(url, compression="gzip")
pdf = read_example_data("random_time_series.json.gz", compression="gzip")
if database.backend == "presto":
pdf.ds = pd.to_datetime(pdf.ds, unit="s")
pdf.ds = pdf.ds.dt.strftime("%Y-%m-%d %H:%M%:%S")

View File

@@ -17,7 +17,6 @@
import logging
import pandas as pd
from sqlalchemy import BigInteger, Float, inspect, Text
import superset.utils.database as database_utils
@@ -25,7 +24,7 @@ from superset import db
from superset.sql_parse import Table
from superset.utils import json
from .helpers import get_example_url, get_table_connector_registry
from .helpers import get_table_connector_registry, read_example_data
logger = logging.getLogger(__name__)
@@ -40,8 +39,7 @@ def load_sf_population_polygons(
table_exists = database.has_table(Table(tbl_name, schema))
if not only_metadata and (not table_exists or force):
url = get_example_url("sf_population.json.gz")
df = pd.read_json(url, compression="gzip")
df = read_example_data("sf_population.json.gz", compression="gzip")
df["contour"] = df.contour.map(json.dumps)
df.to_sql(

View File

@@ -25,12 +25,12 @@ import superset.utils.database
from superset import app, db
from superset.connectors.sqla.models import BaseDatasource, SqlMetric
from superset.examples.helpers import (
get_example_url,
get_examples_folder,
get_slice_json,
get_table_connector_registry,
merge_slice,
misc_dash_slices,
read_example_data,
update_slice_ids,
)
from superset.models.dashboard import Dashboard
@@ -55,8 +55,7 @@ def load_world_bank_health_n_pop( # pylint: disable=too-many-locals
table_exists = database.has_table(Table(tbl_name, schema))
if not only_metadata and (not table_exists or force):
url = get_example_url("countries.json.gz")
pdf = pd.read_json(url, compression="gzip")
pdf = read_example_data("countries.json.gz", compression="gzip")
pdf.columns = [col.replace(".", "_") for col in pdf.columns]
if database.backend == "presto":
pdf.year = pd.to_datetime(pdf.year)

View File

@@ -123,7 +123,7 @@ msgid ""
msgstr ""
"來確保字符的表達順序與時間順序一致的標準。如果時間戳格式不符合 ISO 8601 "
"標準,则需要定義表達式和類型,以便將字符串轉换為日期或時間戳。注意:當前不支持時區。如果時間以 epoch 格式儲存,請输入 `epoch_s` "
"or `epoch_ms` 。如果没有指定任何模式,我們可以通過額外的参數在每個資料庫/列名級别上使用可選的預設值。"
"or `epoch_ms` 。如果没有指定任何模式,我們可以通過額外的参數在每個 資料庫/欄位 名稱級別上使用可選的預設值。"
#, fuzzy
msgid " to add calculated columns"
@@ -155,11 +155,11 @@ msgstr "% 計算"
#, fuzzy, python-format
msgid "% of parent"
msgstr "父類"
msgstr "% 父類"
#, fuzzy, python-format
msgid "% of total"
msgstr "顯示總計"
msgstr "% 顯示總計"
#, python-format
msgid "%(dialect)s cannot be used as a data source for security reasons."
@@ -278,7 +278,7 @@ msgstr "%s 個選項"
#, fuzzy, python-format
msgid "%s recipients"
msgstr "最近"
msgstr "%s 最近"
#, fuzzy, python-format
msgid "%s row"
@@ -1174,7 +1174,7 @@ msgstr "所有"
#, fuzzy, python-format
msgid "All %s hidden columns"
msgstr "表的列"
msgstr "所有 %s 隱藏的欄位"
msgid "All Text"
msgstr "所有文本"
@@ -1625,7 +1625,7 @@ msgstr "應用的過濾器 (%d)"
#, fuzzy, python-format
msgid "Applied filters (%s)"
msgstr "應用的過濾器 (%d)"
msgstr "應用的過濾器 (%s)"
#, fuzzy, python-format
msgid "Applied filters: %s"
@@ -3615,7 +3615,7 @@ msgstr "無法從结果後端檢索數據。您需要重新運行原始查詢。
#, fuzzy, python-format
msgid "Data for %s"
msgstr "給 JS 的額外數據"
msgstr "給 %s 的數據"
#, fuzzy
msgid "Data imported"
@@ -3729,7 +3729,7 @@ msgstr "數據集"
#, fuzzy, python-format
msgid "Dataset %(table)s already exists"
msgstr "數據集 %(name)s 已存在"
msgstr "數據集 %(table)s 已存在"
#, fuzzy
msgid "Dataset Name"
@@ -4471,7 +4471,7 @@ msgstr "複製"
#, fuzzy, python-format
msgid "Duplicate role %(name)s"
msgstr "重复的列名%(columns)s"
msgstr "重复的角色 %(name)s"
msgid "Duplicate tab"
msgstr "複製選項卡"
@@ -5948,7 +5948,7 @@ msgstr "描述要發送給你的報告"
#, fuzzy, python-format
msgid "Include description to be sent with %s"
msgstr "描述要發送給你的報告"
msgstr ""
msgid "Include series name as an axis"
msgstr "包括系列名稱作為軸"
@@ -9393,7 +9393,7 @@ msgstr "搜索"
#, fuzzy, python-format
msgid "Search %s records"
msgstr "搜索 / 過濾"
msgstr "搜索 %s 紀錄"
msgid "Search / Filter"
msgstr "搜索 / 過濾"
@@ -9913,7 +9913,7 @@ msgstr "顯示"
#, fuzzy, python-format
msgid "Show %s entries"
msgstr "顯示指標"
msgstr "顯示 %s 指標"
msgid "Show Bubbles"
msgstr "顯示氣泡"
@@ -10659,7 +10659,7 @@ msgstr "表名"
msgid ""
"Table [%(table)s] could not be found, please double check your database "
"connection, schema, and table name"
msgstr "找不到 [%(table_name)s] 表請仔細檢查您的資料庫連接、Schema 和 表名"
msgstr "找不到 [%(table)s] 資料請仔細檢查您的資料庫連接、Schema 和 資料表名"
#, fuzzy
msgid "Table actions"
@@ -12968,7 +12968,7 @@ msgstr ""
#, fuzzy, python-format
msgid "Waiting on %s"
msgstr "顯示 %s 個 總計 %s "
msgstr "等待 %s "
#, fuzzy
msgid "Waiting on database..."
@@ -14252,14 +14252,14 @@ msgid ""
" Lab tabs using this database open. Are you sure you want to continue? "
"Deleting the database will break those objects."
msgstr ""
"資料庫 %s 已經關聯了 %s 圖表和 %s 看板,並且用戶在該資料庫上打開了 %s 個 SQL "
"編輯器選項卡。確定要繼續嗎?删除資料庫將破這些對象。"
"已經關聯了 %s 圖表和 %s 看板,並且用戶在該資料庫上打開了 %s 個 SQL "
"編輯器選項卡。確定要繼續嗎?删除資料庫將破這些對象。"
#, fuzzy, python-format
msgid ""
"is linked to %s charts that appear on %s dashboards. Are you sure you "
"want to continue? Deleting the dataset will break those objects."
msgstr "數據集 %s 已經链接到 %s 圖表和 %s 看板内。確定要繼續嗎?删除數據集將破這些對象。"
msgstr "已經關聯到 %s 圖表和 %s 看板内。確定要繼續嗎?删除數據集將破這些對象。"
msgid "is not"
msgstr ""

View File

@@ -109,7 +109,7 @@ def memoized_func(key: str, cache: Cache = cache_manager.cache) -> Callable[...,
force means whether to force refresh the cache and is treated as False by default,
except force = True is passed to the decorated function.
timeout of cache is set to 600 seconds by default,
timeout of cache is set to CACHE_DEFAULT_TIMEOUT seconds by default,
except cache_timeout = {timeout in seconds} is passed to the decorated function.
:param key: a callable function that takes function arguments and returns
@@ -121,7 +121,9 @@ def memoized_func(key: str, cache: Cache = cache_manager.cache) -> Callable[...,
def wrapped_f(*args: Any, **kwargs: Any) -> Any:
should_cache = kwargs.pop("cache", True)
force = kwargs.pop("force", False)
cache_timeout = kwargs.pop("cache_timeout", 0)
cache_timeout = kwargs.pop(
"cache_timeout", app.config["CACHE_DEFAULT_TIMEOUT"]
)
if not should_cache:
return f(*args, **kwargs)

View File

@@ -1741,24 +1741,30 @@ def parse_boolean_string(bool_str: str | None) -> bool:
def apply_max_row_limit(
limit: int,
max_limit: int | None = None,
server_pagination: bool | None = None,
) -> int:
"""
Override row limit if max global limit is defined
Override row limit based on server pagination setting
:param limit: requested row limit
:param max_limit: Maximum allowed row limit
:param server_pagination: whether server-side pagination
is enabled, defaults to None
:return: Capped row limit
>>> apply_max_row_limit(100000, 10)
10
>>> apply_max_row_limit(10, 100000)
10
>>> apply_max_row_limit(0, 10000)
10000
>>> apply_max_row_limit(600000, server_pagination=True) # Server pagination
500000
>>> apply_max_row_limit(600000, server_pagination=False) # No pagination
50000
>>> apply_max_row_limit(5000) # No server_pagination specified
5000
>>> apply_max_row_limit(0) # Zero returns default max limit
50000
"""
if max_limit is None:
max_limit = current_app.config["SQL_MAX_ROW"]
max_limit = (
current_app.config["TABLE_VIZ_MAX_ROW_SERVER"]
if server_pagination
else current_app.config["SQL_MAX_ROW"]
)
if limit != 0:
return min(max_limit, limit)
return max_limit

View File

@@ -109,6 +109,7 @@ FRONTEND_CONF_KEYS = (
"JWT_ACCESS_CSRF_COOKIE_NAME",
"SQLLAB_QUERY_RESULT_TIMEOUT",
"SYNC_DB_PERMISSIONS_IN_ASYNC_MODE",
"TABLE_VIZ_MAX_ROW_SERVER",
)
logger = logging.getLogger(__name__)

View File

@@ -14,11 +14,10 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Unit tests for Superset"""
from __future__ import annotations
import unittest
from io import BytesIO
from typing import Optional
from unittest.mock import ANY, patch
from zipfile import is_zipfile, ZipFile
@@ -70,14 +69,26 @@ from tests.integration_tests.fixtures.importexport import (
class TestDatasetApi(SupersetTestCase):
fixture_tables_names = ("ab_permission", "ab_permission_view", "ab_view_menu")
fixture_virtual_table_names = ("sql_virtual_dataset_1", "sql_virtual_dataset_2")
items_to_delete: list[SqlaTable | Database | TableColumn] = []
def setUp(self):
self.items_to_delete = []
def tearDown(self):
for item in self.items_to_delete:
db.session.delete(item)
db.session.commit()
super().tearDown()
@staticmethod
def insert_dataset(
table_name: str,
owners: list[int],
database: Database,
sql: Optional[str] = None,
schema: Optional[str] = None,
sql: str | None = None,
schema: str | None = None,
catalog: str | None = None,
fetch_metadata: bool = True,
) -> SqlaTable:
obj_owners = list() # noqa: C408
for owner in owners:
@@ -89,10 +100,12 @@ class TestDatasetApi(SupersetTestCase):
owners=obj_owners,
database=database,
sql=sql,
catalog=catalog,
)
db.session.add(table)
db.session.commit()
table.fetch_metadata()
if fetch_metadata:
table.fetch_metadata()
return table
def insert_default_dataset(self):
@@ -100,6 +113,16 @@ class TestDatasetApi(SupersetTestCase):
"ab_permission", [self.get_user("admin").id], get_main_database()
)
def insert_database(self, name: str, allow_multi_catalog: bool = False) -> Database:
db_connection = Database(
database_name=name,
sqlalchemy_uri=get_example_database().sqlalchemy_uri,
extra=('{"allow_multi_catalog": true}' if allow_multi_catalog else "{}"),
)
db.session.add(db_connection)
db.session.commit()
return db_connection
def get_fixture_datasets(self) -> list[SqlaTable]:
return (
db.session.query(SqlaTable)
@@ -315,8 +338,7 @@ class TestDatasetApi(SupersetTestCase):
# revert gamma permission
gamma_role.permissions.remove(main_db_pvm)
db.session.delete(dataset)
db.session.commit()
self.items_to_delete = [dataset]
def test_get_dataset_related_database_gamma(self):
"""
@@ -480,8 +502,7 @@ class TestDatasetApi(SupersetTestCase):
],
}
db.session.delete(dataset)
db.session.commit()
self.items_to_delete = [dataset]
def test_get_dataset_render_jinja_exceptions(self):
"""
@@ -547,8 +568,7 @@ class TestDatasetApi(SupersetTestCase):
== "Unable to render expression from dataset calculated column."
)
db.session.delete(dataset)
db.session.commit()
self.items_to_delete = [dataset]
def test_get_dataset_distinct_schema(self):
"""
@@ -618,9 +638,7 @@ class TestDatasetApi(SupersetTestCase):
},
)
for dataset in datasets:
db.session.delete(dataset)
db.session.commit()
self.items_to_delete = datasets
def test_get_dataset_distinct_not_allowed(self):
"""
@@ -647,8 +665,7 @@ class TestDatasetApi(SupersetTestCase):
assert response["count"] == 0
assert response["result"] == []
db.session.delete(dataset)
db.session.commit()
self.items_to_delete = [dataset]
def test_get_dataset_info(self):
"""
@@ -722,8 +739,7 @@ class TestDatasetApi(SupersetTestCase):
)
assert columns[0].expression == "COUNT(*)"
db.session.delete(model)
db.session.commit()
self.items_to_delete = [model]
def test_create_dataset_item_normalize(self):
"""
@@ -749,8 +765,7 @@ class TestDatasetApi(SupersetTestCase):
assert model.database_id == table_data["database"]
assert model.normalize_columns is True
db.session.delete(model)
db.session.commit()
self.items_to_delete = [model]
def test_create_dataset_item_gamma(self):
"""
@@ -791,8 +806,7 @@ class TestDatasetApi(SupersetTestCase):
model = db.session.query(SqlaTable).get(data.get("id"))
assert admin in model.owners
assert alpha in model.owners
db.session.delete(model)
db.session.commit()
self.items_to_delete = [model]
def test_create_dataset_item_owners_invalid(self):
"""
@@ -839,8 +853,7 @@ class TestDatasetApi(SupersetTestCase):
model = db.session.query(SqlaTable).get(data.get("id"))
assert admin in model.owners
assert alpha in model.owners
db.session.delete(model)
db.session.commit()
self.items_to_delete = [model]
@unittest.skip("test is failing stochastically")
def test_create_dataset_same_name_different_schema(self):
@@ -991,8 +1004,7 @@ class TestDatasetApi(SupersetTestCase):
model = db.session.query(SqlaTable).get(dataset.id)
assert model.owners == current_owners
db.session.delete(dataset)
db.session.commit()
self.items_to_delete = [dataset]
def test_update_dataset_clear_owner_list(self):
"""
@@ -1008,8 +1020,7 @@ class TestDatasetApi(SupersetTestCase):
model = db.session.query(SqlaTable).get(dataset.id)
assert model.owners == []
db.session.delete(dataset)
db.session.commit()
self.items_to_delete = [dataset]
def test_update_dataset_populate_owner(self):
"""
@@ -1026,8 +1037,7 @@ class TestDatasetApi(SupersetTestCase):
model = db.session.query(SqlaTable).get(dataset.id)
assert model.owners == [gamma]
db.session.delete(dataset)
db.session.commit()
self.items_to_delete = [dataset]
def test_update_dataset_item(self):
"""
@@ -1045,8 +1055,7 @@ class TestDatasetApi(SupersetTestCase):
assert model.description == dataset_data["description"]
assert model.owners == current_owners
db.session.delete(dataset)
db.session.commit()
self.items_to_delete = [dataset]
def test_update_dataset_item_w_override_columns(self):
"""
@@ -1082,8 +1091,7 @@ class TestDatasetApi(SupersetTestCase):
col.advanced_data_type for col in columns
]
db.session.delete(dataset)
db.session.commit()
self.items_to_delete = [dataset]
def test_update_dataset_item_w_override_columns_same_columns(self):
"""
@@ -1130,8 +1138,7 @@ class TestDatasetApi(SupersetTestCase):
columns = db.session.query(TableColumn).filter_by(table_id=dataset.id).all()
assert len(columns) != prev_col_len
assert len(columns) == 3
db.session.delete(dataset)
db.session.commit()
self.items_to_delete = [dataset]
def test_update_dataset_create_column_and_metric(self):
"""
@@ -1226,8 +1233,7 @@ class TestDatasetApi(SupersetTestCase):
assert metrics[1].warning_text == new_metric_data["warning_text"]
assert str(metrics[1].uuid) == new_metric_data["uuid"]
db.session.delete(dataset)
db.session.commit()
self.items_to_delete = [dataset]
def test_update_dataset_delete_column(self):
"""
@@ -1276,8 +1282,7 @@ class TestDatasetApi(SupersetTestCase):
assert columns[1].column_name == "name"
assert len(columns) == 2
db.session.delete(dataset)
db.session.commit()
self.items_to_delete = [dataset]
def test_update_dataset_update_column(self):
"""
@@ -1313,8 +1318,7 @@ class TestDatasetApi(SupersetTestCase):
assert columns[0].groupby is False
assert columns[0].filterable is False
db.session.delete(dataset)
db.session.commit()
self.items_to_delete = [dataset]
def test_update_dataset_delete_metric(self):
"""
@@ -1357,8 +1361,7 @@ class TestDatasetApi(SupersetTestCase):
metrics = metrics_query.all()
assert len(metrics) == 1
db.session.delete(dataset)
db.session.commit()
self.items_to_delete = [dataset]
def test_update_dataset_update_column_uniqueness(self):
"""
@@ -1378,8 +1381,7 @@ class TestDatasetApi(SupersetTestCase):
"message": {"columns": ["One or more columns already exist"]}
}
assert data == expected_result
db.session.delete(dataset)
db.session.commit()
self.items_to_delete = [dataset]
def test_update_dataset_update_metric_uniqueness(self):
"""
@@ -1399,8 +1401,7 @@ class TestDatasetApi(SupersetTestCase):
"message": {"metrics": ["One or more metrics already exist"]}
}
assert data == expected_result
db.session.delete(dataset)
db.session.commit()
self.items_to_delete = [dataset]
def test_update_dataset_update_column_duplicate(self):
"""
@@ -1425,8 +1426,7 @@ class TestDatasetApi(SupersetTestCase):
"message": {"columns": ["One or more columns are duplicated"]}
}
assert data == expected_result
db.session.delete(dataset)
db.session.commit()
self.items_to_delete = [dataset]
def test_update_dataset_update_metric_duplicate(self):
"""
@@ -1451,8 +1451,7 @@ class TestDatasetApi(SupersetTestCase):
"message": {"metrics": ["One or more metrics are duplicated"]}
}
assert data == expected_result
db.session.delete(dataset)
db.session.commit()
self.items_to_delete = [dataset]
def test_update_dataset_item_gamma(self):
"""
@@ -1465,8 +1464,7 @@ class TestDatasetApi(SupersetTestCase):
uri = f"api/v1/dataset/{dataset.id}"
rv = self.client.put(uri, json=table_data)
assert rv.status_code == 403
db.session.delete(dataset)
db.session.commit()
self.items_to_delete = [dataset]
def test_dataset_get_list_no_username(self):
"""
@@ -1491,8 +1489,7 @@ class TestDatasetApi(SupersetTestCase):
assert current_dataset["description"] == "changed_description"
assert "username" not in current_dataset["changed_by"].keys()
db.session.delete(dataset)
db.session.commit()
self.items_to_delete = [dataset]
def test_dataset_get_no_username(self):
"""
@@ -1512,8 +1509,7 @@ class TestDatasetApi(SupersetTestCase):
assert res["description"] == "changed_description"
assert "username" not in res["changed_by"].keys()
db.session.delete(dataset)
db.session.commit()
self.items_to_delete = [dataset]
def test_update_dataset_item_not_owned(self):
"""
@@ -1526,8 +1522,7 @@ class TestDatasetApi(SupersetTestCase):
uri = f"api/v1/dataset/{dataset.id}"
rv = self.put_assert_metric(uri, table_data, "put")
assert rv.status_code == 403
db.session.delete(dataset)
db.session.commit()
self.items_to_delete = [dataset]
def test_update_dataset_item_owners_invalid(self):
"""
@@ -1540,8 +1535,7 @@ class TestDatasetApi(SupersetTestCase):
uri = f"api/v1/dataset/{dataset.id}"
rv = self.put_assert_metric(uri, table_data, "put")
assert rv.status_code == 422
db.session.delete(dataset)
db.session.commit()
self.items_to_delete = [dataset]
@patch("superset.daos.dataset.DatasetDAO.update")
def test_update_dataset_sqlalchemy_error(self, mock_dao_update):
@@ -1560,8 +1554,7 @@ class TestDatasetApi(SupersetTestCase):
assert rv.status_code == 422
assert data == {"message": "Dataset could not be updated."}
db.session.delete(dataset)
db.session.commit()
self.items_to_delete = [dataset]
@with_feature_flags(DATASET_FOLDERS=True)
def test_update_dataset_add_folders(self):
@@ -1607,7 +1600,6 @@ class TestDatasetApi(SupersetTestCase):
uri = f"api/v1/dataset/{dataset.id}"
rv = self.put_assert_metric(uri, dataset_data, "put")
print(rv.data.decode("utf-8"))
assert rv.status_code == 200
model = db.session.query(SqlaTable).get(dataset.id)
@@ -1643,8 +1635,229 @@ class TestDatasetApi(SupersetTestCase):
},
]
db.session.delete(dataset)
db.session.commit()
self.items_to_delete = [dataset]
def test_update_dataset_change_db_connection_multi_catalog_disabled(self):
"""
Dataset API: Test changing the DB connection powering the dataset
to a connection with multi-catalog disabled.
"""
self.login(ADMIN_USERNAME)
db_connection = self.insert_database("db_connection")
new_db_connection = self.insert_database("new_db_connection")
dataset = self.insert_dataset(
table_name="test_dataset",
owners=[],
database=db_connection,
sql="select 1 as one",
schema="test_schema",
catalog="old_default_catalog",
fetch_metadata=False,
)
with patch.object(
new_db_connection, "get_default_catalog", return_value="new_default_catalog"
):
payload = {"database_id": new_db_connection.id}
uri = f"api/v1/dataset/{dataset.id}"
rv = self.put_assert_metric(uri, payload, "put")
assert rv.status_code == 200
model = db.session.query(SqlaTable).get(dataset.id)
assert model.database == new_db_connection
# Catalog should have been updated to new connection's default catalog
assert model.catalog == "new_default_catalog"
self.items_to_delete = [dataset, db_connection, new_db_connection]
def test_update_dataset_change_db_connection_multi_catalog_enabled(self):
"""
Dataset API: Test changing the DB connection powering the dataset
to a connection with multi-catalog enabled.
"""
self.login(ADMIN_USERNAME)
db_connection = self.insert_database("db_connection")
new_db_connection = self.insert_database(
"new_db_connection", allow_multi_catalog=True
)
dataset = self.insert_dataset(
table_name="test_dataset",
owners=[],
database=db_connection,
sql="select 1 as one",
schema="test_schema",
catalog="old_default_catalog",
fetch_metadata=False,
)
with patch.object(
new_db_connection, "get_default_catalog", return_value="default"
):
payload = {"database_id": new_db_connection.id}
uri = f"api/v1/dataset/{dataset.id}"
rv = self.put_assert_metric(uri, payload, "put")
assert rv.status_code == 200
model = db.session.query(SqlaTable).get(dataset.id)
assert model.database == new_db_connection
# Catalog was not changed as not provided and multi-catalog is enabled
assert model.catalog == "old_default_catalog"
self.items_to_delete = [dataset, db_connection, new_db_connection]
def test_update_dataset_change_db_connection_not_found(self):
"""
Dataset API: Test changing the DB connection powering the dataset
to an invalid DB connection.
"""
self.login(ADMIN_USERNAME)
dataset = self.insert_default_dataset()
payload = {"database_id": 1500}
uri = f"api/v1/dataset/{dataset.id}"
rv = self.put_assert_metric(uri, payload, "put")
response = json.loads(rv.data.decode("utf-8"))
assert rv.status_code == 422
assert response["message"] == {"database": ["Database does not exist"]}
self.items_to_delete = [dataset]
def test_update_dataset_change_catalog(self):
"""
Dataset API: Test changing the catalog associated with the dataset.
"""
self.login(ADMIN_USERNAME)
db_connection = self.insert_database("db_connection", allow_multi_catalog=True)
dataset = self.insert_dataset(
table_name="test_dataset",
owners=[],
database=db_connection,
sql="select 1 as one",
schema="test_schema",
catalog="test_catalog",
fetch_metadata=False,
)
with patch.object(db_connection, "get_default_catalog", return_value="default"):
payload = {"catalog": "other_catalog"}
uri = f"api/v1/dataset/{dataset.id}"
rv = self.put_assert_metric(uri, payload, "put")
assert rv.status_code == 200
model = db.session.query(SqlaTable).get(dataset.id)
assert model.catalog == "other_catalog"
self.items_to_delete = [dataset, db_connection]
def test_update_dataset_change_catalog_not_allowed(self):
"""
Dataset API: Test changing the catalog associated with the dataset fails
when multi-catalog is disabled on the DB connection.
"""
self.login(ADMIN_USERNAME)
db_connection = self.insert_database("db_connection")
dataset = self.insert_dataset(
table_name="test_dataset",
owners=[],
database=db_connection,
sql="select 1 as one",
schema="test_schema",
catalog="test_catalog",
fetch_metadata=False,
)
with patch.object(db_connection, "get_default_catalog", return_value="default"):
payload = {"catalog": "other_catalog"}
uri = f"api/v1/dataset/{dataset.id}"
rv = self.put_assert_metric(uri, payload, "put")
response = json.loads(rv.data.decode("utf-8"))
assert rv.status_code == 422
assert response["message"] == {
"catalog": ["Only the default catalog is supported for this connection"]
}
self.items_to_delete = [dataset, db_connection]
def test_update_dataset_validate_uniqueness(self):
"""
Dataset API: Test the dataset uniqueness validation takes into
consideration the new database connection.
"""
test_db = get_main_database()
if test_db.backend == "sqlite":
# Skip this test for SQLite as it doesn't support multiple
# schemas.
return
self.login(ADMIN_USERNAME)
db_connection = self.insert_database("db_connection")
new_db_connection = self.insert_database("new_db_connection")
first_schema_dataset = self.insert_dataset(
table_name="test_dataset",
owners=[],
database=db_connection,
sql="select 1 as one",
schema="first_schema",
fetch_metadata=False,
)
second_schema_dataset = self.insert_dataset(
table_name="test_dataset",
owners=[],
database=db_connection,
sql="select 1 as one",
schema="second_schema",
fetch_metadata=False,
)
new_db_conn_dataset = self.insert_dataset(
table_name="test_dataset",
owners=[],
database=new_db_connection,
sql="select 1 as one",
schema="first_schema",
fetch_metadata=False,
)
with patch.object(
db_connection,
"get_default_catalog",
return_value=None,
):
payload = {"schema": "second_schema"}
uri = f"api/v1/dataset/{first_schema_dataset.id}"
rv = self.put_assert_metric(uri, payload, "put")
response = json.loads(rv.data.decode("utf-8"))
assert rv.status_code == 422
assert response["message"] == {
"table": ["Dataset second_schema.test_dataset already exists"]
}
with patch.object(
new_db_connection,
"get_default_catalog",
return_value=None,
):
payload["database_id"] = new_db_connection.id
uri = f"api/v1/dataset/{first_schema_dataset.id}"
rv = self.put_assert_metric(uri, payload, "put")
assert rv.status_code == 200
model = db.session.query(SqlaTable).get(first_schema_dataset.id)
assert model.database == new_db_connection
assert model.schema == "second_schema"
self.items_to_delete = [
first_schema_dataset,
second_schema_dataset,
new_db_conn_dataset,
new_db_connection,
db_connection,
]
def test_delete_dataset_item(self):
"""
@@ -1674,8 +1887,7 @@ class TestDatasetApi(SupersetTestCase):
uri = f"api/v1/dataset/{dataset.id}"
rv = self.delete_assert_metric(uri, "delete")
assert rv.status_code == 403
db.session.delete(dataset)
db.session.commit()
self.items_to_delete = [dataset]
def test_delete_dataset_item_not_authorized(self):
"""
@@ -1687,8 +1899,7 @@ class TestDatasetApi(SupersetTestCase):
uri = f"api/v1/dataset/{dataset.id}"
rv = self.client.delete(uri)
assert rv.status_code == 403
db.session.delete(dataset)
db.session.commit()
self.items_to_delete = [dataset]
@patch("superset.daos.dataset.DatasetDAO.delete")
def test_delete_dataset_sqlalchemy_error(self, mock_dao_delete):
@@ -1705,8 +1916,7 @@ class TestDatasetApi(SupersetTestCase):
data = json.loads(rv.data.decode("utf-8"))
assert rv.status_code == 422
assert data == {"message": "Datasets could not be deleted."}
db.session.delete(dataset)
db.session.commit()
self.items_to_delete = [dataset]
@pytest.mark.usefixtures("create_datasets")
def test_delete_dataset_column(self):
@@ -1947,8 +2157,7 @@ class TestDatasetApi(SupersetTestCase):
.filter_by(table_id=dataset.id, column_name="id")
.one()
)
db.session.delete(id_column)
db.session.commit()
self.items_to_delete = [id_column]
self.login(ADMIN_USERNAME)
uri = f"api/v1/dataset/{dataset.id}/refresh"
@@ -1961,8 +2170,7 @@ class TestDatasetApi(SupersetTestCase):
.one()
)
assert id_column is not None
db.session.delete(dataset)
db.session.commit()
self.items_to_delete = [dataset]
def test_dataset_item_refresh_not_found(self):
"""
@@ -1987,8 +2195,7 @@ class TestDatasetApi(SupersetTestCase):
rv = self.put_assert_metric(uri, {}, "refresh")
assert rv.status_code == 403
db.session.delete(dataset)
db.session.commit()
self.items_to_delete = [dataset]
@unittest.skip("test is failing stochastically")
def test_export_dataset(self):
@@ -2250,8 +2457,7 @@ class TestDatasetApi(SupersetTestCase):
dataset = (
db.session.query(SqlaTable).filter_by(table_name="birth_names_2").one()
)
db.session.delete(dataset)
db.session.commit()
self.items_to_delete = [dataset]
@patch("superset.commands.database.importers.v1.utils.add_permissions")
def test_import_dataset_overwrite(self, mock_add_permissions):
@@ -2447,8 +2653,7 @@ class TestDatasetApi(SupersetTestCase):
response = json.loads(rv.data.decode("utf-8"))
assert response.get("count") == 1
db.session.delete(table_w_certification)
db.session.commit()
self.items_to_delete = [table_w_certification]
@pytest.mark.usefixtures("create_virtual_datasets")
def test_duplicate_virtual_dataset(self):
@@ -2473,8 +2678,7 @@ class TestDatasetApi(SupersetTestCase):
assert len(new_dataset.columns) == 2
assert new_dataset.columns[0].column_name == "id"
assert new_dataset.columns[1].column_name == "name"
db.session.delete(new_dataset)
db.session.commit()
self.items_to_delete = [new_dataset]
@pytest.mark.usefixtures("create_datasets")
def test_duplicate_physical_dataset(self):
@@ -2604,8 +2808,7 @@ class TestDatasetApi(SupersetTestCase):
assert table.template_params == '{"param": 1}'
assert table.normalize_columns is False
db.session.delete(table)
db.session.commit()
self.items_to_delete = [table]
with examples_db.get_sqla_engine() as engine:
engine.execute("DROP TABLE test_create_sqla_table_api")

View File

@@ -15,78 +15,125 @@
# specific language governing permissions and limitations
# under the License.
from typing import cast
from unittest.mock import MagicMock
from typing import Any, cast
import pytest
from marshmallow import ValidationError
from pytest_mock import MockerFixture
from superset import db
from superset.commands.dataset.exceptions import DatasetInvalidError
from superset.commands.dataset.exceptions import (
DatabaseNotFoundValidationError,
DatasetExistsValidationError,
DatasetForbiddenError,
DatasetInvalidError,
DatasetNotFoundError,
MultiCatalogDisabledValidationError,
)
from superset.commands.dataset.update import UpdateDatasetCommand, validate_folders
from superset.connectors.sqla.models import SqlaTable
from superset.commands.exceptions import OwnersNotFoundValidationError
from superset.datasets.schemas import FolderSchema
from superset.models.core import Database
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from superset.exceptions import SupersetSecurityException
from tests.unit_tests.conftest import with_feature_flags
@pytest.mark.usefixture("session")
def test_update_uniqueness_error(mocker: MockerFixture) -> None:
def test_update_dataset_not_found(mocker: MockerFixture) -> None:
"""
Test uniqueness validation in dataset update command.
Test updating an unexisting ID raises a `DatasetNotFoundError`.
"""
SqlaTable.metadata.create_all(db.session.get_bind())
mock_dataset_dao = mocker.patch("superset.commands.dataset.update.DatasetDAO")
mock_dataset_dao.find_by_id.return_value = None
# First, make sure session is clean
db.session.rollback()
with pytest.raises(DatasetNotFoundError):
UpdateDatasetCommand(1, {"name": "test"}).run()
try:
# Set up test data
database = Database(database_name="my_db", sqlalchemy_uri="sqlite://")
bar = SqlaTable(table_name="bar", schema="foo", database=database)
baz = SqlaTable(table_name="baz", schema="qux", database=database)
db.session.add_all([database, bar, baz])
db.session.commit()
# Set up mocks
mock_g = mocker.patch("superset.security.manager.g")
mock_g.user = MagicMock()
mocker.patch(
"superset.views.base.security_manager.can_access_all_datasources",
return_value=True,
)
mocker.patch(
"superset.commands.dataset.update.security_manager.raise_for_ownership",
return_value=None,
)
mocker.patch.object(UpdateDatasetCommand, "compute_owners", return_value=[])
def test_update_dataset_forbidden(mocker: MockerFixture) -> None:
"""
Test try updating a dataset without permission raises a `DatasetForbiddenError`.
"""
mock_dataset_dao = mocker.patch("superset.commands.dataset.update.DatasetDAO")
mock_dataset_dao.find_by_id.return_value = mocker.MagicMock()
# Run the test that should fail
with pytest.raises(DatasetInvalidError):
UpdateDatasetCommand(
bar.id,
{
"table_name": "baz",
"schema": "qux",
},
).run()
except Exception:
db.session.rollback()
raise
finally:
# Clean up - this will run even if the test fails
try:
db.session.query(SqlaTable).filter(
SqlaTable.table_name.in_(["bar", "baz"]),
SqlaTable.schema.in_(["foo", "qux"]),
).delete(synchronize_session=False)
db.session.query(Database).filter(Database.database_name == "my_db").delete(
synchronize_session=False
mocker.patch(
"superset.commands.dataset.update.security_manager.raise_for_ownership",
side_effect=SupersetSecurityException(
SupersetError(
error_type=SupersetErrorType.MISSING_OWNERSHIP_ERROR,
message="Sample message",
level=ErrorLevel.ERROR,
)
db.session.commit()
except Exception:
db.session.rollback()
),
)
with pytest.raises(DatasetForbiddenError):
UpdateDatasetCommand(1, {"name": "test"}).run()
@pytest.mark.parametrize(
("payload, exception, error_msg"),
[
(
{"database_id": 2},
DatabaseNotFoundValidationError,
"Database does not exist",
),
(
{"catalog": "test"},
MultiCatalogDisabledValidationError,
"Only the default catalog is supported for this connection",
),
(
{"table_name": "table", "schema": "schema"},
DatasetExistsValidationError,
"Dataset catalog.schema.table already exists",
),
(
{"owners": [1]},
OwnersNotFoundValidationError,
"Owners are invalid",
),
],
)
def test_update_validation_errors(
payload: dict[str, Any],
exception: Exception,
error_msg: str,
mocker: MockerFixture,
) -> None:
"""
Test validation errors for the `UpdateDatasetCommand`.
"""
mock_dataset_dao = mocker.patch("superset.commands.dataset.update.DatasetDAO")
mocker.patch(
"superset.commands.dataset.update.security_manager.raise_for_ownership",
)
mocker.patch("superset.commands.utils.security_manager.is_admin", return_value=True)
mocker.patch(
"superset.commands.utils.security_manager.get_user_by_id", return_value=None
)
mock_database = mocker.MagicMock()
mock_database.id = 1
mock_database.get_default_catalog.return_value = "catalog"
mock_database.allow_multi_catalog = False
mock_dataset = mocker.MagicMock()
mock_dataset.database = mock_database
mock_dataset.catalog = "catalog"
mock_dataset_dao.find_by_id.return_value = mock_dataset
if exception == DatabaseNotFoundValidationError:
mock_dataset_dao.get_database_by_id.return_value = None
else:
mock_dataset_dao.get_database_by_id.return_value = mock_database
if exception == DatasetExistsValidationError:
mock_dataset_dao.validate_update_uniqueness.return_value = False
else:
mock_dataset_dao.validate_update_uniqueness.return_value = True
with pytest.raises(DatasetInvalidError) as excinfo:
UpdateDatasetCommand(1, payload).run()
assert any(error_msg in str(exc) for exc in excinfo.value._exceptions)
@with_feature_flags(DATASET_FOLDERS=True)

View File

@@ -14,7 +14,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from typing import Any, Optional
from typing import Any
from unittest.mock import Mock
from pytest import fixture # noqa: PT013
@@ -45,9 +45,15 @@ def connector_registry() -> Mock:
return mock
def apply_max_row_limit(limit: int, max_limit: Optional[int] = None) -> int:
if max_limit is None:
max_limit = create_app_config()["SQL_MAX_ROW"]
def apply_max_row_limit(
limit: int,
server_pagination: bool | None = None,
) -> int:
max_limit = (
create_app_config()["TABLE_VIZ_MAX_ROW_SERVER"]
if server_pagination
else create_app_config()["SQL_MAX_ROW"]
)
if limit != 0:
return min(max_limit, limit)
return max_limit