Compare commits

..

5 Commits

Author SHA1 Message Date
Evan Rusackas
ac864cc94b Merge branch 'master' into fix/postgresql-interval-chart-rendering 2026-04-25 02:51:48 -04:00
Evan Rusackas
0a2b837c89 fix: address review feedback for INTERVAL type handling
- Extract lambda to named `_normalize_interval` method for testability
- Return None for NULL values to preserve NULL semantics (not 0)
- Exclude bool from numeric branch (bool is subclass of int in Python)
- Return None for unconvertible types to avoid mixed-type columns
- Add tests for zero duration, negative intervals, and bool handling
- Add INTERVAL to column spec test (NUMERIC type)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-04-22 08:53:15 -07:00
Evan Rusackas
d3c562657a feat(postgres): convert INTERVAL to milliseconds for DURATION formatter
Changed INTERVAL values to be converted to milliseconds instead of
seconds, enabling users to use Superset's built-in "DURATION" number
format for human-readable display (e.g., "1d 2h 30m 45s" instead of
raw numeric values like "95445000").

This addresses the review feedback about making interval values more
user-friendly in charts while maintaining numeric operations.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-04-22 08:53:14 -07:00
Evan Rusackas
be90e08f83 feat(postgres): improve INTERVAL type handling for robust chart rendering
- Enhanced mutator to handle multiple PostgreSQL INTERVAL formats
- Added support for timedelta, numeric, None, and string values
- Improved test coverage with comprehensive test cases
- Added documentation explaining the mutator's purpose

Addresses review comments from @korbit-ai and @giftig
2026-04-22 08:53:14 -07:00
Evan Rusackas
4e74dc0250 fix(charts): handle PostgreSQL INTERVAL type in bar and pie charts
PostgreSQL INTERVAL types were causing bar and pie charts to fail rendering when used as metrics. This fix converts INTERVAL values (timedelta objects) to numeric seconds so they can be properly displayed in charts.

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
2026-04-22 08:53:09 -07:00
25 changed files with 6034 additions and 6059 deletions

View File

@@ -52,7 +52,7 @@ attrs==25.3.0
# referencing
# requests-cache
# trio
authlib==1.6.9
authlib==1.6.7
# via fastmcp
babel==2.17.0
# via

View File

@@ -68,19 +68,21 @@ describe('Add database', () => {
cy.get('input[name="username"]').type('testusername', { force: true });
cy.get('input[name="database"]').type('testdb', { force: true });
cy.get('input[name="password"]').type('testpass', { force: true });
cy.get('body').click(0, 0);
// Wait for all intermediate validation calls to settle, then check the button
cy.getBySel('btn-submit-connection').should('not.be.disabled', {
timeout: 60000,
});
cy.wait('@validateParams', { timeout: 30000 });
cy.getBySel('btn-submit-connection').should('not.be.disabled');
cy.getBySel('btn-submit-connection').click({ force: true });
cy.wait('@createDb', { timeout: 60000 }).then(() => {
cy.contains(
'.ant-form-item-explain-error',
"The hostname provided can't be resolved",
).should('exist');
cy.wait('@validateParams', { timeout: 30000 }).then(() => {
cy.wait('@createDb', { timeout: 60000 }).then(() => {
cy.contains(
'.ant-form-item-explain-error',
"The hostname provided can't be resolved",
).should('exist');
});
});
});
@@ -88,22 +90,29 @@ describe('Add database', () => {
cy.get('.preferred > :nth-child(1)').click();
cy.get('input[name="host"]').type('localhost', { force: true });
cy.get('body').click(0, 0);
cy.wait('@validateParams', { timeout: 30000 });
cy.get('input[name="port"]').type('5430', { force: true });
cy.get('input[name="database"]').type('testdb', { force: true });
cy.get('input[name="username"]').type('testusername', { force: true });
cy.wait('@validateParams', { timeout: 30000 });
cy.get('input[name="password"]').type('testpass', { force: true });
cy.get('body').click(0, 0);
cy.wait('@validateParams');
// Wait for all intermediate validation calls to settle, then check the button
cy.getBySel('btn-submit-connection').should('not.be.disabled', {
timeout: 60000,
});
cy.getBySel('btn-submit-connection').should('not.be.disabled');
cy.getBySel('btn-submit-connection').click({ force: true });
cy.wait('@createDb', { timeout: 60000 }).then(() => {
cy.contains('.ant-form-item-explain-error', 'The port is closed').should(
'exist',
);
cy.wait('@validateParams', { timeout: 30000 }).then(() => {
cy.get('body').click(0, 0);
cy.getBySel('btn-submit-connection').click({ force: true });
cy.wait('@createDb', { timeout: 60000 }).then(() => {
cy.contains(
'.ant-form-item-explain-error',
'The port is closed',
).should('exist');
});
});
});
});

View File

@@ -151,7 +151,7 @@
"use-query-params": "^2.2.2",
"uuid": "^14.0.0",
"xlsx": "https://cdn.sheetjs.com/xlsx-0.20.3/xlsx-0.20.3.tgz",
"yargs": "^18.0.0"
"yargs": "^17.7.2"
},
"devDependencies": {
"@babel/cli": "^7.28.6",
@@ -12769,25 +12769,6 @@
"dev": true,
"license": "ISC"
},
"node_modules/@storybook/test-runner/node_modules/yargs": {
"version": "17.7.2",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz",
"integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==",
"dev": true,
"license": "MIT",
"dependencies": {
"cliui": "^8.0.1",
"escalade": "^3.1.1",
"get-caller-file": "^2.0.5",
"require-directory": "^2.1.1",
"string-width": "^4.2.3",
"y18n": "^5.0.5",
"yargs-parser": "^21.1.1"
},
"engines": {
"node": ">=12"
}
},
"node_modules/@storybook/test-runner/node_modules/yargs-parser": {
"version": "18.1.3",
"resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz",
@@ -12812,59 +12793,6 @@
"node": ">=6"
}
},
"node_modules/@storybook/test-runner/node_modules/yargs/node_modules/cliui": {
"version": "8.0.1",
"resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz",
"integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==",
"dev": true,
"license": "ISC",
"dependencies": {
"string-width": "^4.2.0",
"strip-ansi": "^6.0.1",
"wrap-ansi": "^7.0.0"
},
"engines": {
"node": ">=12"
}
},
"node_modules/@storybook/test-runner/node_modules/yargs/node_modules/wrap-ansi": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
"integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-styles": "^4.0.0",
"string-width": "^4.1.0",
"strip-ansi": "^6.0.0"
},
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
}
},
"node_modules/@storybook/test-runner/node_modules/yargs/node_modules/y18n": {
"version": "5.0.8",
"resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz",
"integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==",
"dev": true,
"license": "ISC",
"engines": {
"node": ">=10"
}
},
"node_modules/@storybook/test-runner/node_modules/yargs/node_modules/yargs-parser": {
"version": "21.1.1",
"resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz",
"integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==",
"dev": true,
"license": "ISC",
"engines": {
"node": ">=12"
}
},
"node_modules/@storybook/test/node_modules/@storybook/instrumenter": {
"version": "8.6.18",
"resolved": "https://registry.npmjs.org/@storybook/instrumenter/-/instrumenter-8.6.18.tgz",
@@ -20468,25 +20396,6 @@
"url": "https://github.com/chalk/supports-color?sponsor=1"
}
},
"node_modules/concurrently/node_modules/yargs": {
"version": "17.7.2",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz",
"integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==",
"dev": true,
"license": "MIT",
"dependencies": {
"cliui": "^8.0.1",
"escalade": "^3.1.1",
"get-caller-file": "^2.0.5",
"require-directory": "^2.1.1",
"string-width": "^4.2.3",
"y18n": "^5.0.5",
"yargs-parser": "^21.1.1"
},
"engines": {
"node": ">=12"
}
},
"node_modules/config-chain": {
"version": "1.1.13",
"resolved": "https://registry.npmjs.org/config-chain/-/config-chain-1.1.13.tgz",
@@ -27102,6 +27011,7 @@
"resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.3.0.tgz",
"integrity": "sha512-vpeMIQKxczTD/0s2CdEWHcb0eeJe6TFjxb+J5xgX7hScxqrGuyjmv4c1D4A/gelKfyox0gJJwIHF+fLjeaM8kQ==",
"license": "MIT",
"peer": true,
"engines": {
"node": ">=18"
},
@@ -30799,25 +30709,6 @@
"url": "https://github.com/sponsors/jonschlinkert"
}
},
"node_modules/jest-cli/node_modules/yargs": {
"version": "17.7.2",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz",
"integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==",
"dev": true,
"license": "MIT",
"dependencies": {
"cliui": "^8.0.1",
"escalade": "^3.1.1",
"get-caller-file": "^2.0.5",
"require-directory": "^2.1.1",
"string-width": "^4.2.3",
"y18n": "^5.0.5",
"yargs-parser": "^21.1.1"
},
"engines": {
"node": ">=12"
}
},
"node_modules/jest-config": {
"version": "30.3.0",
"resolved": "https://registry.npmjs.org/jest-config/-/jest-config-30.3.0.tgz",
@@ -35069,25 +34960,6 @@
"dev": true,
"license": "ISC"
},
"node_modules/lerna/node_modules/yargs": {
"version": "17.7.2",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz",
"integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==",
"dev": true,
"license": "MIT",
"dependencies": {
"cliui": "^8.0.1",
"escalade": "^3.1.1",
"get-caller-file": "^2.0.5",
"require-directory": "^2.1.1",
"string-width": "^4.2.3",
"y18n": "^5.0.5",
"yargs-parser": "^21.1.1"
},
"engines": {
"node": ">=12"
}
},
"node_modules/leven": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz",
@@ -38029,25 +37901,6 @@
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
}
},
"node_modules/nx/node_modules/yargs": {
"version": "17.7.2",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz",
"integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==",
"dev": true,
"license": "MIT",
"dependencies": {
"cliui": "^8.0.1",
"escalade": "^3.1.1",
"get-caller-file": "^2.0.5",
"require-directory": "^2.1.1",
"string-width": "^4.2.3",
"y18n": "^5.0.5",
"yargs-parser": "^21.1.1"
},
"engines": {
"node": ">=12"
}
},
"node_modules/nyc": {
"version": "17.1.0",
"resolved": "https://registry.npmjs.org/nyc/-/nyc-17.1.0.tgz",
@@ -48094,24 +47947,6 @@
"integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==",
"license": "MIT"
},
"node_modules/typescript-json-schema/node_modules/yargs": {
"version": "17.7.2",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz",
"integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==",
"license": "MIT",
"dependencies": {
"cliui": "^8.0.1",
"escalade": "^3.1.1",
"get-caller-file": "^2.0.5",
"require-directory": "^2.1.1",
"string-width": "^4.2.3",
"y18n": "^5.0.5",
"yargs-parser": "^21.1.1"
},
"engines": {
"node": ">=12"
}
},
"node_modules/typewise": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/typewise/-/typewise-1.0.3.tgz",
@@ -50529,20 +50364,21 @@
}
},
"node_modules/yargs": {
"version": "18.0.0",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-18.0.0.tgz",
"integrity": "sha512-4UEqdc2RYGHZc7Doyqkrqiln3p9X2DZVxaGbwhn2pi7MrRagKaOcIKe8L3OxYcbhXLgLFUS3zAYuQjKBQgmuNg==",
"version": "17.7.2",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz",
"integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==",
"license": "MIT",
"dependencies": {
"cliui": "^9.0.1",
"cliui": "^8.0.1",
"escalade": "^3.1.1",
"get-caller-file": "^2.0.5",
"string-width": "^7.2.0",
"require-directory": "^2.1.1",
"string-width": "^4.2.3",
"y18n": "^5.0.5",
"yargs-parser": "^22.0.0"
"yargs-parser": "^21.1.1"
},
"engines": {
"node": "^20.19.0 || ^22.12.0 || >=23"
"node": ">=12"
}
},
"node_modules/yargs-parser": {
@@ -50554,108 +50390,6 @@
"node": ">=12"
}
},
"node_modules/yargs/node_modules/ansi-regex": {
"version": "6.2.2",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz",
"integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==",
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/ansi-regex?sponsor=1"
}
},
"node_modules/yargs/node_modules/ansi-styles": {
"version": "6.2.3",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz",
"integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==",
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
}
},
"node_modules/yargs/node_modules/cliui": {
"version": "9.0.1",
"resolved": "https://registry.npmjs.org/cliui/-/cliui-9.0.1.tgz",
"integrity": "sha512-k7ndgKhwoQveBL+/1tqGJYNz097I7WOvwbmmU2AR5+magtbjPWQTS1C5vzGkBC8Ym8UWRzfKUzUUqFLypY4Q+w==",
"license": "ISC",
"dependencies": {
"string-width": "^7.2.0",
"strip-ansi": "^7.1.0",
"wrap-ansi": "^9.0.0"
},
"engines": {
"node": ">=20"
}
},
"node_modules/yargs/node_modules/emoji-regex": {
"version": "10.6.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.6.0.tgz",
"integrity": "sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A==",
"license": "MIT"
},
"node_modules/yargs/node_modules/string-width": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz",
"integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==",
"license": "MIT",
"dependencies": {
"emoji-regex": "^10.3.0",
"get-east-asian-width": "^1.0.0",
"strip-ansi": "^7.1.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/yargs/node_modules/strip-ansi": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.2.0.tgz",
"integrity": "sha512-yDPMNjp4WyfYBkHnjIRLfca1i6KMyGCtsVgoKe/z1+6vukgaENdgGBZt+ZmKPc4gavvEZ5OgHfHdrazhgNyG7w==",
"license": "MIT",
"dependencies": {
"ansi-regex": "^6.2.2"
},
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/strip-ansi?sponsor=1"
}
},
"node_modules/yargs/node_modules/wrap-ansi": {
"version": "9.0.2",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.2.tgz",
"integrity": "sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww==",
"license": "MIT",
"dependencies": {
"ansi-styles": "^6.2.1",
"string-width": "^7.0.0",
"strip-ansi": "^7.1.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
}
},
"node_modules/yargs/node_modules/yargs-parser": {
"version": "22.0.0",
"resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-22.0.0.tgz",
"integrity": "sha512-rwu/ClNdSMpkSrUb+d6BRsSkLUq1fmfsY6TOpYzTwvwkg1/NRG85KBy3kq++A8LKQwX6lsu+aWad+2khvuXrqw==",
"license": "ISC",
"engines": {
"node": "^20.19.0 || ^22.12.0 || >=23"
}
},
"node_modules/yauzl": {
"version": "2.10.0",
"resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz",
@@ -53351,7 +53085,7 @@
"license": "Apache-2.0",
"dependencies": {
"@types/d3-scale": "^4.0.9",
"d3-cloud": "^1.2.9",
"d3-cloud": "^1.2.8",
"d3-scale": "^4.0.2"
},
"devDependencies": {

View File

@@ -232,7 +232,7 @@
"use-query-params": "^2.2.2",
"uuid": "^14.0.0",
"xlsx": "https://cdn.sheetjs.com/xlsx-0.20.3/xlsx-0.20.3.tgz",
"yargs": "^18.0.0"
"yargs": "^17.7.2"
},
"devDependencies": {
"@babel/cli": "^7.28.6",

View File

@@ -79,7 +79,7 @@ export const LabeledErrorBoundInput = ({
isValidating ? 'validating' : hasError ? 'error' : 'success'
}
help={errorMessage || helpText}
hasFeedback={isValidating || !!hasError}
hasFeedback={!!hasError}
>
{visibilityToggle || props.name === 'password' ? (
<StyledInputPassword

View File

@@ -27,11 +27,10 @@ process.env.PATH = `./node_modules/.bin:${process.env.PATH}`;
const { spawnSync } = require('child_process');
const fastGlob = require('fast-glob');
const yargs = require('yargs');
const { hideBin } = require('yargs/helpers');
const { argv } = require('yargs');
const { globs } = yargs(hideBin(process.argv)).parse();
const glob = globs?.length > 1 ? `{${globs.join(',')}}` : globs?.[0] || '*';
const { _: globs } = argv;
const glob = globs.length > 1 ? `{${globs.join(',')}}` : globs[0] || '*';
const BABEL_CONFIG = '--config-file=../../babel.config.js';

View File

@@ -34,8 +34,6 @@ import Chart from 'src/types/Chart';
import { FacePile } from 'src/components';
import { handleChartDelete, CardStyles } from 'src/views/CRUD/utils';
import { assetUrl } from 'src/utils/assetUrl';
import type { ListViewFetchDataConfig as FetchDataConfig } from 'src/components';
import { TableTab } from 'src/views/CRUD/types';
interface ChartCardProps {
chart: Chart;
@@ -44,7 +42,7 @@ interface ChartCardProps {
bulkSelectEnabled: boolean;
addDangerToast: (msg: string) => void;
addSuccessToast: (msg: string) => void;
refreshData: (config?: FetchDataConfig | null) => void;
refreshData: () => void;
loading?: boolean;
saveFavoriteStatus: (id: number, isStarred: boolean) => void;
favoriteStatus: boolean;
@@ -52,7 +50,6 @@ interface ChartCardProps {
userId?: string | number;
showThumbnails?: boolean;
handleBulkChartExport: (chartsToExport: Chart[]) => void;
getData?: (tab: TableTab) => void;
}
export default function ChartCard({
@@ -70,7 +67,6 @@ export default function ChartCard({
chartFilter,
userId,
handleBulkChartExport,
getData,
}: ChartCardProps) {
const history = useHistory();
const canEdit = hasPerm('can_write');
@@ -140,7 +136,6 @@ export default function ChartCard({
refreshData,
chartFilter,
userId,
getData,
)
}
>

View File

@@ -243,7 +243,6 @@ export const accessTokenField = ({
validationErrors,
db,
isEditMode,
isValidating,
default_value,
description,
}: FieldPropTypes) => (
@@ -251,7 +250,6 @@ export const accessTokenField = ({
id="access_token"
name="access_token"
required={required}
isValidating={isValidating}
visibilityToggle={!isEditMode}
value={db?.parameters?.access_token}
validationMethods={{ onBlur: getValidation }}

View File

@@ -33,7 +33,6 @@ export const TableCatalog = ({
getValidation,
validationErrors,
db,
isValidating,
}: FieldPropTypes) => {
const tableCatalog = db?.catalog || [];
const catalogError = validationErrors || {};
@@ -52,7 +51,6 @@ export const TableCatalog = ({
<ValidatedInput
className="catalog-name-input"
required={required}
isValidating={isValidating}
validationMethods={{ onBlur: getValidation }}
errorMessage={catalogError[idx]?.name}
placeholder={t('Enter a name for this sheet')}
@@ -86,7 +84,6 @@ export const TableCatalog = ({
<ValidatedInput
className="catalog-name-url"
required={required}
isValidating={isValidating}
validationMethods={{ onBlur: getValidation }}
errorMessage={catalogError[idx]?.url}
placeholder={t('Paste the shareable Google Sheet URL here')}

View File

@@ -49,13 +49,11 @@ export const validatedInputField = ({
validationErrors,
db,
field,
isValidating,
}: FieldPropTypes) => (
<ValidatedInput
id={field}
name={field}
required={required}
isValidating={isValidating}
value={db?.parameters?.[field as keyof DatabaseParameters]}
validationMethods={{ onBlur: getValidation }}
errorMessage={validationErrors?.[field]}

View File

@@ -18,20 +18,18 @@
*/
import { useState } from 'react';
import { t } from '@apache-superset/core/translation';
import { JsonObject } from '@superset-ui/core';
import { styled } from '@apache-superset/core/theme';
import {
Form,
FormLabel,
Col,
Row,
LabeledErrorBoundInput,
Icons,
Tooltip,
} from '@superset-ui/core/components';
import { Input } from '@superset-ui/core/components/Input';
import { Radio } from '@superset-ui/core/components/Radio';
import { DatabaseObject, CustomEventHandlerType } from '../types';
import { Icons } from '@superset-ui/core/components/Icons';
import { DatabaseObject, FieldPropTypes } from '../types';
import { AuthType } from '.';
const StyledDiv = styled.div`
@@ -50,60 +48,50 @@ const StyledFormItem = styled(Form.Item)`
margin-bottom: 0 !important;
`;
interface SSHTunnelFormProps {
db: DatabaseObject | null;
onSSHTunnelParametersChange: CustomEventHandlerType;
setSSHTunnelLoginMethod: (method: AuthType) => void;
isValidating?: boolean;
validationErrors?: JsonObject | null;
getValidation: () => void;
}
const StyledInputPassword = styled(Input.Password)`
margin: ${({ theme }) => `${theme.sizeUnit}px 0 ${theme.sizeUnit * 2}px`};
`;
const SSHTunnelForm = ({
db,
onSSHTunnelParametersChange,
setSSHTunnelLoginMethod,
isValidating = false,
validationErrors,
getValidation,
}: SSHTunnelFormProps) => {
}: {
db: DatabaseObject | null;
onSSHTunnelParametersChange: FieldPropTypes['changeMethods']['onSSHTunnelParametersChange'];
setSSHTunnelLoginMethod: (method: AuthType) => void;
}) => {
const [usePassword, setUsePassword] = useState<AuthType>(AuthType.Password);
const sshErrors = validationErrors?.ssh_tunnel || {};
return (
<Form>
<StyledRow gutter={16}>
<Col xs={24} md={12}>
<StyledDiv>
<LabeledErrorBoundInput
id="server_address"
<FormLabel htmlFor="server_address" required>
{t('SSH Host')}
</FormLabel>
<Input
name="server_address"
label={t('SSH Host')}
required
type="text"
placeholder={t('e.g. 127.0.0.1')}
value={db?.ssh_tunnel?.server_address || ''}
onChange={onSSHTunnelParametersChange}
validationMethods={{ onBlur: getValidation }}
errorMessage={sshErrors?.server_address}
isValidating={isValidating}
data-test="ssh-tunnel-server_address-input"
/>
</StyledDiv>
</Col>
<Col xs={24} md={12}>
<StyledDiv>
<LabeledErrorBoundInput
id="server_port"
<FormLabel htmlFor="server_port" required>
{t('SSH Port')}
</FormLabel>
<Input
name="server_port"
label={t('SSH Port')}
required
placeholder={t('22')}
type="number"
value={db?.ssh_tunnel?.server_port}
onChange={onSSHTunnelParametersChange}
validationMethods={{ onBlur: getValidation }}
errorMessage={sshErrors?.server_port}
isValidating={isValidating}
data-test="ssh-tunnel-server_port-input"
/>
</StyledDiv>
@@ -112,17 +100,15 @@ const SSHTunnelForm = ({
<StyledRow gutter={16}>
<Col xs={24}>
<StyledDiv>
<LabeledErrorBoundInput
id="username"
<FormLabel htmlFor="username" required>
{t('Username')}
</FormLabel>
<Input
name="username"
label={t('Username')}
required
type="text"
placeholder={t('e.g. Analytics')}
value={db?.ssh_tunnel?.username || ''}
onChange={onSSHTunnelParametersChange}
validationMethods={{ onBlur: getValidation }}
errorMessage={sshErrors?.username}
isValidating={isValidating}
data-test="ssh-tunnel-username-input"
/>
</StyledDiv>
@@ -162,20 +148,16 @@ const SSHTunnelForm = ({
<StyledRow gutter={16}>
<Col xs={24}>
<StyledDiv>
<LabeledErrorBoundInput
id="password"
<FormLabel htmlFor="password" required>
{t('SSH Password')}
</FormLabel>
<StyledInputPassword
name="password"
label={t('SSH Password')}
required
visibilityToggle
placeholder={t('e.g. ********')}
value={db?.ssh_tunnel?.password || ''}
onChange={onSSHTunnelParametersChange}
validationMethods={{ onBlur: getValidation }}
errorMessage={sshErrors?.password}
isValidating={isValidating}
data-test="ssh-tunnel-password-input"
iconRender={(visible: boolean) =>
iconRender={visible =>
visible ? (
<Tooltip title={t('Hide password.')}>
<Icons.EyeInvisibleOutlined />
@@ -200,47 +182,30 @@ const SSHTunnelForm = ({
<FormLabel htmlFor="private_key" required>
{t('Private Key')}
</FormLabel>
<StyledFormItem
validateStatus={
isValidating
? 'validating'
: sshErrors?.private_key
? 'error'
: 'success'
}
help={sshErrors?.private_key}
hasFeedback={isValidating || !!sshErrors?.private_key}
>
<Input.TextArea
name="private_key"
placeholder={t('Paste Private Key here')}
value={db?.ssh_tunnel?.private_key || ''}
onChange={onSSHTunnelParametersChange}
onBlur={getValidation}
data-test="ssh-tunnel-private_key-input"
rows={4}
/>
</StyledFormItem>
<Input.TextArea
name="private_key"
placeholder={t('Paste Private Key here')}
value={db?.ssh_tunnel?.private_key || ''}
onChange={onSSHTunnelParametersChange}
data-test="ssh-tunnel-private_key-input"
rows={4}
/>
</StyledDiv>
</Col>
</StyledRow>
<StyledRow gutter={16}>
<Col xs={24}>
<StyledDiv>
<LabeledErrorBoundInput
id="private_key_password"
<FormLabel htmlFor="private_key_password" required>
{t('Private Key Password')}
</FormLabel>
<StyledInputPassword
name="private_key_password"
label={t('Private Key Password')}
required
visibilityToggle
placeholder={t('e.g. ********')}
value={db?.ssh_tunnel?.private_key_password || ''}
onChange={onSSHTunnelParametersChange}
validationMethods={{ onBlur: getValidation }}
errorMessage={sshErrors?.private_key_password}
isValidating={isValidating}
data-test="ssh-tunnel-private_key_password-input"
iconRender={(visible: boolean) =>
iconRender={visible =>
visible ? (
<Tooltip title={t('Hide password.')}>
<Icons.EyeInvisibleOutlined />

View File

@@ -26,6 +26,7 @@ import {
userEvent,
within,
waitFor,
fireEvent,
} from 'spec/helpers/testing-library';
import { getExtensionsRegistry } from '@superset-ui/core';
import setupCodeOverrides from 'src/setup/setupCodeOverrides';
@@ -435,7 +436,11 @@ describe('DatabaseModal', () => {
userEvent.click(selectInput);
// Simulate pasting text into the input
expect(() => userEvent.paste(selectInput, 'post')).not.toThrow();
expect(() =>
fireEvent.paste(selectInput, {
clipboardData: { getData: () => 'post' },
}),
).not.toThrow();
});
test('renders the "Basic" tab of SQL Alchemy form (step 2 of 2) correctly', async () => {
@@ -1210,31 +1215,25 @@ describe('DatabaseModal', () => {
);
expect(SSHTunnelServerAddressInput).toHaveValue('');
userEvent.type(SSHTunnelServerAddressInput, 'localhost');
await waitFor(() =>
expect(SSHTunnelServerAddressInput).toHaveValue('localhost'),
);
expect(SSHTunnelServerAddressInput).toHaveValue('localhost');
const SSHTunnelServerPortInput = screen.getByTestId(
'ssh-tunnel-server_port-input',
);
expect(SSHTunnelServerPortInput).toHaveValue(null);
userEvent.type(SSHTunnelServerPortInput, '22');
await waitFor(() => expect(SSHTunnelServerPortInput).toHaveValue(22));
expect(SSHTunnelServerPortInput).toHaveValue(22);
const SSHTunnelUsernameInput = screen.getByTestId(
'ssh-tunnel-username-input',
);
expect(SSHTunnelUsernameInput).toHaveValue('');
userEvent.type(SSHTunnelUsernameInput, 'test');
await waitFor(() =>
expect(SSHTunnelUsernameInput).toHaveValue('test'),
);
expect(SSHTunnelUsernameInput).toHaveValue('test');
const SSHTunnelPasswordInput = screen.getByTestId(
'ssh-tunnel-password-input',
);
expect(SSHTunnelPasswordInput).toHaveValue('');
userEvent.type(SSHTunnelPasswordInput, 'pass');
await waitFor(() =>
expect(SSHTunnelPasswordInput).toHaveValue('pass'),
);
expect(SSHTunnelPasswordInput).toHaveValue('pass');
});
test('properly interacts with SSH Tunnel form textboxes', async () => {
@@ -1254,31 +1253,25 @@ describe('DatabaseModal', () => {
);
expect(SSHTunnelServerAddressInput).toHaveValue('');
userEvent.type(SSHTunnelServerAddressInput, 'localhost');
await waitFor(() =>
expect(SSHTunnelServerAddressInput).toHaveValue('localhost'),
);
expect(SSHTunnelServerAddressInput).toHaveValue('localhost');
const SSHTunnelServerPortInput = screen.getByTestId(
'ssh-tunnel-server_port-input',
);
expect(SSHTunnelServerPortInput).toHaveValue(null);
userEvent.type(SSHTunnelServerPortInput, '22');
await waitFor(() => expect(SSHTunnelServerPortInput).toHaveValue(22));
expect(SSHTunnelServerPortInput).toHaveValue(22);
const SSHTunnelUsernameInput = screen.getByTestId(
'ssh-tunnel-username-input',
);
expect(SSHTunnelUsernameInput).toHaveValue('');
userEvent.type(SSHTunnelUsernameInput, 'test');
await waitFor(() =>
expect(SSHTunnelUsernameInput).toHaveValue('test'),
);
expect(SSHTunnelUsernameInput).toHaveValue('test');
const SSHTunnelPasswordInput = screen.getByTestId(
'ssh-tunnel-password-input',
);
expect(SSHTunnelPasswordInput).toHaveValue('');
userEvent.type(SSHTunnelPasswordInput, 'pass');
await waitFor(() =>
expect(SSHTunnelPasswordInput).toHaveValue('pass'),
);
expect(SSHTunnelPasswordInput).toHaveValue('pass');
});
test('if the SSH Tunneling toggle is not true, no inputs are displayed', async () => {
@@ -1373,10 +1366,7 @@ describe('DatabaseModal', () => {
}),
);
// Wait for step 2 to render
expect(await screen.findByText(/step 2 of 3/i)).toBeInTheDocument();
const textboxes = await screen.findAllByRole('textbox');
const textboxes = screen.getAllByRole('textbox');
const hostField = textboxes[0];
const portField = screen.getByRole('spinbutton');
const databaseNameField = textboxes[1];
@@ -1393,19 +1383,14 @@ describe('DatabaseModal', () => {
expect(connectButton).toBeDisabled();
userEvent.type(hostField, 'localhost');
userEvent.tab();
userEvent.type(portField, '5432');
userEvent.tab();
userEvent.type(databaseNameField, 'postgres');
userEvent.tab();
userEvent.type(usernameField, 'testdb');
userEvent.tab();
userEvent.type(passwordField, 'demoPassword');
userEvent.tab();
await waitFor(() => expect(connectButton).toBeEnabled());
await waitFor(() => expect(portField).toHaveValue(5432));
expect(await screen.findByDisplayValue(/5432/i)).toBeInTheDocument();
expect(hostField).toHaveValue('localhost');
expect(portField).toHaveValue(5432);
expect(databaseNameField).toHaveValue('postgres');
@@ -1414,48 +1399,10 @@ describe('DatabaseModal', () => {
expect(connectButton).toBeEnabled();
userEvent.click(connectButton);
// Verify that validation was called during the form interaction
// Note: With the optimized validation, redundant calls on the same db state are skipped
await waitFor(() => {
expect(
fetchMock.callHistory.calls(VALIDATE_PARAMS_ENDPOINT).length,
).toBeGreaterThan(0);
});
});
test('does not fire redundant validation on blur when db has not changed', async () => {
setup();
userEvent.click(
await screen.findByRole('button', {
name: /postgresql/i,
}),
);
expect(await screen.findByText(/step 2 of 3/i)).toBeInTheDocument();
const textboxes = await screen.findAllByRole('textbox');
const hostField = textboxes[0];
// Type a value and blur - should trigger validation
userEvent.type(hostField, 'localhost');
userEvent.tab();
await waitFor(() => {
expect(
fetchMock.callHistory.calls(VALIDATE_PARAMS_ENDPOINT).length,
).toEqual(1);
});
// Blur again without changing the value - should NOT trigger another validation
userEvent.click(hostField);
userEvent.tab();
// Wait a tick to ensure no additional calls are made
await waitFor(() => {
expect(
fetchMock.callHistory.calls(VALIDATE_PARAMS_ENDPOINT).length,
).toEqual(1);
).toEqual(5);
});
});
});

View File

@@ -617,7 +617,6 @@ const DatabaseModal: FunctionComponent<DatabaseModalProps> = ({
hasValidated,
setHasValidated,
] = useDatabaseValidation();
const lastValidatedDbSnapshotRef = useRef<string | null>(null);
const [hasConnectedDb, setHasConnectedDb] = useState<boolean>(false);
const [showCTAbtns, setShowCTAbtns] = useState(false);
const [dbName, setDbName] = useState('');
@@ -725,7 +724,6 @@ const DatabaseModal: FunctionComponent<DatabaseModalProps> = ({
const handleClearValidationErrors = useCallback(() => {
setValidationErrors(null);
setHasValidated(false);
lastValidatedDbSnapshotRef.current = null;
clearError();
}, [setValidationErrors, setHasValidated, clearError]);
@@ -802,16 +800,6 @@ const DatabaseModal: FunctionComponent<DatabaseModalProps> = ({
[onChange],
);
const handleTextChange = useCallback(
({ target }: { target: HTMLInputElement }) => {
onChange(ActionType.TextChange, {
name: target.name,
value: target.value,
});
},
[onChange],
);
const handleChangeWithValidation = useCallback(
(
actionType: ActionType,
@@ -823,15 +811,6 @@ const DatabaseModal: FunctionComponent<DatabaseModalProps> = ({
[onChange, handleClearValidationErrors],
);
const getBlurValidation = useCallback(() => {
const currentDbSnapshot = JSON.stringify(db);
if (currentDbSnapshot === lastValidatedDbSnapshotRef.current) {
return Promise.resolve([]);
}
lastValidatedDbSnapshotRef.current = currentDbSnapshot;
return getValidation(db);
}, [db, getValidation]);
const onClose = () => {
setDB({ type: ActionType.Reset });
setHasConnectedDb(false);
@@ -1817,6 +1796,7 @@ const DatabaseModal: FunctionComponent<DatabaseModalProps> = ({
name: target.name,
value: target.value,
});
handleClearValidationErrors();
}}
setSSHTunnelLoginMethod={(method: AuthType) =>
setDB({
@@ -1824,9 +1804,6 @@ const DatabaseModal: FunctionComponent<DatabaseModalProps> = ({
payload: { login_method: method },
})
}
isValidating={isValidating}
validationErrors={validationErrors}
getValidation={getBlurValidation}
/>
);
@@ -1895,8 +1872,13 @@ const DatabaseModal: FunctionComponent<DatabaseModalProps> = ({
});
}}
onParametersChange={handleParametersChange}
onChange={handleTextChange}
getValidation={getBlurValidation}
onChange={({ target }: { target: HTMLInputElement }) =>
handleChangeWithValidation(ActionType.TextChange, {
name: target.name,
value: target.value,
})
}
getValidation={() => getValidation(db)}
validationErrors={validationErrors}
getPlaceholder={getPlaceholder}
clearValidationErrors={handleClearValidationErrors}

View File

@@ -26,7 +26,6 @@ import { VizType } from '@superset-ui/core';
import fetchMock from 'fetch-mock';
import { act } from 'react-dom/test-utils';
import handleResourceExport from 'src/utils/export';
import { LocalStorageKeys } from 'src/utils/localStorageHelpers';
import ChartTable from './ChartTable';
// Mock the export module
@@ -54,16 +53,12 @@ const mockCharts = Array.from({ length: 3 }).map((_, i) => ({
thumbnail_url: '',
}));
fetchMock.get(
chartsEndpoint,
{
result: mockCharts,
},
{ name: chartsEndpoint },
);
fetchMock.get(chartsEndpoint, {
result: mockCharts,
});
fetchMock.get(chartsInfoEndpoint, {
permissions: ['can_add', 'can_write', 'can_delete', 'can_export'],
permissions: ['can_add', 'can_edit', 'can_delete', 'can_export'],
});
fetchMock.get(chartFavoriteStatusEndpoint, {
@@ -104,10 +99,6 @@ const renderChartTable = (props: any) =>
render(<ChartTable {...props} />, renderOptions);
});
beforeEach(() => {
window.localStorage.removeItem(LocalStorageKeys.HomepageChartFilter);
});
test('renders with EmptyState if no data present', async () => {
await renderChartTable(mockedProps);
expect(screen.getAllByRole('tab')).toHaveLength(3);
@@ -187,58 +178,3 @@ test('handles chart export with correct ID and shows spinner', async () => {
{ timeout: 3000 },
);
});
test('refreshes other tab data after deleting a chart', async () => {
fetchMock.removeRoute(chartsEndpoint);
fetchMock.get(
chartsEndpoint,
{
result: mockCharts.slice(1),
count: mockCharts.length - 1,
},
{ name: chartsEndpoint },
);
fetchMock.delete('glob:*/api/v1/chart/0', {
message: 'Chart deleted',
});
await renderChartTable({
...otherTabProps,
otherTabTitle: 'All',
});
expect(screen.getByText('cool chart 0')).toBeInTheDocument();
const refreshCallsBeforeDelete =
fetchMock.callHistory.calls(chartsEndpoint).length;
const moreButtons = screen.getAllByRole('img', { name: /more/i });
await userEvent.click(moreButtons[0]);
await userEvent.click(await screen.findByText('Delete'));
const deleteInput = screen.getByTestId('delete-modal-input');
await userEvent.type(deleteInput, 'DELETE');
await userEvent.click(screen.getByTestId('modal-confirm-button'));
await waitFor(() => {
expect(
fetchMock.callHistory.calls(/api\/v1\/chart\/0/, {
method: 'DELETE',
}),
).toHaveLength(1);
});
await waitFor(() => {
expect(fetchMock.callHistory.calls(chartsEndpoint).length).toBe(
refreshCallsBeforeDelete + 1,
);
});
await waitFor(() => {
expect(screen.queryByText('cool chart 0')).not.toBeInTheDocument();
});
expect(screen.getByText('cool chart 1')).toBeInTheDocument();
expect(screen.getByText('cool chart 2')).toBeInTheDocument();
});

View File

@@ -112,19 +112,18 @@ function ChartTable({
const [preparingExport, setPreparingExport] = useState<boolean>(false);
const [loaded, setLoaded] = useState<boolean>(false);
const getChartFetchDataConfig = (tab: TableTab) => ({
pageIndex: 0,
pageSize: PAGE_SIZE,
sortBy: [
{
id: 'changed_on_delta_humanized',
desc: true,
},
],
filters: getFilterValues(tab, WelcomeTable.Charts, user, otherTabFilters),
});
const getData = (tab: TableTab) => fetchData(getChartFetchDataConfig(tab));
const getData = (tab: TableTab) =>
fetchData({
pageIndex: 0,
pageSize: PAGE_SIZE,
sortBy: [
{
id: 'changed_on_delta_humanized',
desc: true,
},
],
filters: getFilterValues(tab, WelcomeTable.Charts, user, otherTabFilters),
});
useEffect(() => {
if (loaded || activeTab === TableTab.Favorite) {
@@ -235,7 +234,6 @@ function ChartTable({
refreshData={refreshData}
addDangerToast={addDangerToast}
addSuccessToast={addSuccessToast}
getData={getData}
favoriteStatus={favoriteStatus[e.id]}
saveFavoriteStatus={saveFavoriteStatus}
handleBulkChartExport={handleBulkChartExport}

View File

@@ -822,6 +822,13 @@ export function useDatabaseValidation() {
const getValidation = useCallback(
async (database: Partial<DatabaseObject> | null, onCreate = false) => {
if (database?.parameters?.ssh) {
setValidationErrors(null);
setIsValidating(false);
setHasValidated(true);
return Promise.resolve([]);
}
setIsValidating(true);
try {
@@ -859,19 +866,6 @@ export function useDatabaseValidation() {
return acc;
}
if (extra?.ssh_tunnel) {
acc.ssh_tunnel = {
...acc.ssh_tunnel,
...Object.fromEntries(
(extra.missing ?? []).map((field: string) => [
field,
'This is a required field',
]),
),
};
return acc;
}
if (extra?.invalid) {
extra.invalid.forEach((field: string) => {
acc[field] = message;

View File

@@ -327,7 +327,6 @@ export function handleChartDelete(
refreshData: (arg0?: FetchDataConfig | null) => void,
chartFilter?: string,
userId?: string | number,
getData?: (tab: TableTab) => void,
) {
const filters = {
pageIndex: 0,
@@ -351,7 +350,6 @@ export function handleChartDelete(
}).then(
() => {
if (chartFilter === 'Mine') refreshData(filters);
else if (chartFilter && getData) getData(chartFilter as TableTab);
else refreshData();
addSuccessToast(t('Deleted: %s', sliceName));
},

View File

@@ -36,14 +36,11 @@ const {
} = require('webpack-manifest-plugin');
const ForkTsCheckerWebpackPlugin = require('fork-ts-checker-webpack-plugin');
const ReactRefreshWebpackPlugin = require('@pmmmwh/react-refresh-webpack-plugin');
const yargs = require('yargs');
const { hideBin } = require('yargs/helpers');
const parsedArgs = require('yargs').argv;
const Visualizer = require('webpack-visualizer-plugin2');
const getProxyConfig = require('./webpack.proxy-config');
const packageConfig = require('./package.json');
const parsedArgs = yargs(hideBin(process.argv)).parse();
// input dir
const APP_DIR = path.resolve(__dirname, './');
// output dir

View File

@@ -20,8 +20,8 @@ const zlib = require('zlib');
const { ZSTDDecompress } = require('simple-zstd');
const yargs = require('yargs');
const { hideBin } = require('yargs/helpers');
const parsedArgs = yargs(hideBin(process.argv)).parse();
// eslint-disable-next-line import/no-extraneous-dependencies
const parsedArgs = yargs.argv;
const parsedEnvArg = () => {
let envArgs = {};

View File

@@ -19,7 +19,6 @@ from typing import Any, Optional
from flask_babel import gettext as __
from superset import is_feature_enabled
from superset.commands.base import BaseCommand
from superset.commands.database.exceptions import (
DatabaseOfflineError,
@@ -27,10 +26,6 @@ from superset.commands.database.exceptions import (
InvalidEngineError,
InvalidParametersError,
)
from superset.commands.database.ssh_tunnel.exceptions import (
SSHTunnelDatabasePortError,
SSHTunnelingNotEnabledError,
)
from superset.daos.database import DatabaseDAO
from superset.databases.utils import make_url_safe
from superset.db_engine_specs import get_engine_spec
@@ -47,7 +42,7 @@ class ValidateDatabaseParametersCommand(BaseCommand):
self._properties = properties.copy()
self._model: Optional[Database] = None
def run(self) -> None: # noqa: C901
def run(self) -> None:
self.validate()
engine = self._properties["engine"]
@@ -55,8 +50,6 @@ class ValidateDatabaseParametersCommand(BaseCommand):
if engine in BYPASS_VALIDATION_ENGINES:
# Skip engines that are only validated onCreate
# But still validate database_name uniqueness
self._validate_database_name()
return
engine_spec = get_engine_spec(engine, driver)
@@ -72,17 +65,8 @@ class ValidateDatabaseParametersCommand(BaseCommand):
),
)
# perform initial validation (host, port, database, username)
# perform initial validation
errors = engine_spec.validate_parameters(self._properties) # type: ignore
# Collect database_name errors along with parameter errors
if database_name_error := self._get_database_name_error():
errors.append(database_name_error)
# Collect SSH tunnel errors
ssh_tunnel_errors = self._get_ssh_tunnel_errors()
errors.extend(ssh_tunnel_errors)
if errors:
event_logger.log_with_context(action="validation_error", engine=engine)
raise InvalidParametersError(errors)
@@ -154,101 +138,6 @@ class ValidateDatabaseParametersCommand(BaseCommand):
),
)
def _load_model(self) -> None:
"""Load the existing database model if updating."""
def validate(self) -> None:
if (database_id := self._properties.get("id")) is not None:
self._model = DatabaseDAO.find_by_id(database_id)
def _get_database_name_error(self) -> Optional[SupersetError]:
"""Check for duplicate database name and return error if found."""
database_id = self._properties.get("id")
if database_name := self._properties.get("database_name"):
is_unique = (
DatabaseDAO.validate_update_uniqueness(database_id, database_name)
if database_id is not None
else DatabaseDAO.validate_uniqueness(database_name)
)
if not is_unique:
return SupersetError(
message=__("A database with the same name already exists."),
error_type=SupersetErrorType.INVALID_PAYLOAD_SCHEMA_ERROR,
level=ErrorLevel.ERROR,
extra={"invalid": ["database_name"]},
)
return None
def _validate_database_name(self) -> None:
"""Check for duplicate database name and raise if found."""
if error := self._get_database_name_error():
raise InvalidParametersError([error])
def validate(self) -> None:
"""Load the model and validate SSH tunnel if enabled."""
self._load_model()
self._validate_ssh_tunnel()
def _validate_ssh_tunnel(self) -> None:
"""Validate SSH tunnel configuration if enabled."""
ssh_tunnel = self._properties.get("ssh_tunnel")
if ssh_tunnel:
if not is_feature_enabled("SSH_TUNNELING"):
raise SSHTunnelingNotEnabledError()
# Check if port is provided (required for SSH tunneling)
parameters = self._properties.get("parameters", {})
if not parameters.get("port"):
raise SSHTunnelDatabasePortError()
def _get_ssh_tunnel_errors(self) -> list[SupersetError]:
"""Validate SSH tunnel fields and return list of errors."""
errors: list[SupersetError] = []
ssh_tunnel = self._properties.get("ssh_tunnel") or {}
parameters = self._properties.get("parameters", {})
# Check if SSH is enabled via parameters.ssh flag
ssh_enabled = parameters.get("ssh", False)
# Only validate SSH tunnel if SSH is enabled or ssh_tunnel is provided
if not ssh_enabled and not ssh_tunnel:
return errors
# Required fields
required_fields = ["server_address", "server_port", "username"]
missing = [f for f in required_fields if not ssh_tunnel.get(f)]
if missing:
errors.append(
SupersetError(
message=__("One or more parameters are missing: %(missing)s"),
error_type=SupersetErrorType.CONNECTION_MISSING_PARAMETERS_ERROR,
level=ErrorLevel.WARNING,
extra={"missing": missing, "ssh_tunnel": True},
)
)
# Either password or private_key is required
has_password = bool(ssh_tunnel.get("password"))
has_private_key = bool(ssh_tunnel.get("private_key"))
if not has_password and not has_private_key:
errors.append(
SupersetError(
message=__("Must provide credentials for the SSH Tunnel"),
error_type=SupersetErrorType.CONNECTION_MISSING_PARAMETERS_ERROR,
level=ErrorLevel.WARNING,
extra={"missing": ["password"], "ssh_tunnel": True},
)
)
# If private_key is provided, private_key_password is required
if has_private_key and not ssh_tunnel.get("private_key_password"):
errors.append(
SupersetError(
message=__("One or more parameters are missing: %(missing)s"),
error_type=SupersetErrorType.CONNECTION_MISSING_PARAMETERS_ERROR,
level=ErrorLevel.WARNING,
extra={"missing": ["private_key_password"], "ssh_tunnel": True},
)
)
return errors

View File

@@ -443,24 +443,6 @@ class DatabaseValidateParametersSchema(Schema):
required=True,
metadata={"description": configuration_method_description},
)
ssh_tunnel = fields.Nested("DatabaseSSHTunnelValidation", allow_none=True)
class DatabaseSSHTunnelValidation(Schema):
"""SSH Tunnel schema for validation.
Allows partial data without strict authentication requirements.
"""
id = fields.Integer(
allow_none=True, metadata={"description": "SSH Tunnel ID (for updates)"}
)
server_address = fields.String(allow_none=True)
server_port = fields.Integer(allow_none=True)
username = fields.String(allow_none=True)
password = fields.String(required=False, allow_none=True)
private_key = fields.String(required=False, allow_none=True)
private_key_password = fields.String(required=False, allow_none=True)
class DatabaseSSHTunnel(Schema):

View File

@@ -21,10 +21,11 @@ import logging
import re
from datetime import datetime
from re import Pattern
from typing import Any, Optional, TYPE_CHECKING
from typing import Any, Callable, Optional, TYPE_CHECKING
from flask_babel import gettext as __
from sqlalchemy.dialects.postgresql import DOUBLE_PRECISION, ENUM, JSON
from sqlalchemy import types
from sqlalchemy.dialects.postgresql import DOUBLE_PRECISION, ENUM, INTERVAL, JSON
from sqlalchemy.dialects.postgresql.base import PGInspector
from sqlalchemy.engine.reflection import Inspector
from sqlalchemy.engine.url import URL
@@ -526,8 +527,37 @@ class PostgresEngineSpec(BasicParametersMixin, PostgresBaseEngineSpec):
ENUM(),
GenericDataType.STRING,
),
(
re.compile(r"^interval", re.IGNORECASE),
INTERVAL(),
GenericDataType.NUMERIC,
),
)
@staticmethod
def _normalize_interval(v: Any) -> Any:
"""Convert PostgreSQL INTERVAL values to milliseconds.
psycopg2 returns timedelta objects which we convert to milliseconds for
numeric operations in bar/pie charts. Using milliseconds allows users to
apply the built-in "DURATION" number format for human-readable display
(e.g., "1d 2h 30m 45s").
Returns None for values that cannot be converted to preserve NULL semantics
and avoid mixed-type columns.
"""
if v is None:
return None
if hasattr(v, "total_seconds"):
return v.total_seconds() * 1000
if isinstance(v, (int, float)) and not isinstance(v, bool):
return float(v) * 1000
return None # Can't convert to numeric — treat as missing
column_type_mutators: dict[types.TypeEngine, Callable[[Any], Any]] = {
INTERVAL: _normalize_interval.__func__, # type: ignore[attr-defined]
}
@classmethod
def get_schema_from_engine_params(
cls,

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -15,14 +15,14 @@
# specific language governing permissions and limitations
# under the License.
from datetime import datetime
from datetime import datetime, timedelta
from typing import Any, Optional
from unittest.mock import MagicMock
import pytest
from pytest_mock import MockerFixture
from sqlalchemy import column, types
from sqlalchemy.dialects.postgresql import DOUBLE_PRECISION, ENUM, JSON
from sqlalchemy.dialects.postgresql import DOUBLE_PRECISION, ENUM, INTERVAL, JSON
from sqlalchemy.engine.interfaces import Dialect
from sqlalchemy.engine.url import make_url
@@ -87,6 +87,8 @@ def test_convert_dttm(
("TIME", types.Time, None, GenericDataType.TEMPORAL, True),
# Boolean
("BOOLEAN", types.Boolean, None, GenericDataType.BOOLEAN, False),
# Interval (mapped to NUMERIC for chart rendering)
("INTERVAL", INTERVAL, None, GenericDataType.NUMERIC, False),
],
)
def test_get_column_spec(
@@ -363,3 +365,41 @@ class TestRedshiftDetection:
spec.update_params_from_encrypted_extra(database, params)
assert "pool_events" not in params
def test_interval_type_mutator() -> None:
"""
DB Eng Specs (postgres): Test INTERVAL type mutator
INTERVAL values are converted to milliseconds so users can apply
the built-in "DURATION" number format for human-readable display.
"""
mutator = spec.column_type_mutators[INTERVAL]
# Test timedelta conversion (most common case from psycopg2)
# Result is in milliseconds for compatibility with DURATION formatter
td = timedelta(days=1, hours=2, minutes=30, seconds=45)
assert mutator(td) == 95445000.0 # Total ms: (1*86400 + 2*3600 + 30*60 + 45) * 1000
# Test zero duration
assert mutator(timedelta(0)) == 0.0
# Test negative interval
assert mutator(timedelta(days=-1)) == -86400000.0
# Test numeric values (assumed to be seconds) are converted to milliseconds
assert mutator(12345) == 12345000.0
assert mutator(123.45) == 123450.0
# Test None preserves NULL semantics (not converted to 0)
assert mutator(None) is None
# Test bool is not treated as numeric (bool is subclass of int in Python)
assert mutator(True) is None
assert mutator(False) is None
# Test unconvertible types return None to avoid mixed-type columns
assert mutator("1 day 02:30:45") is None
assert mutator("P1DT2H30M45S") is None
assert mutator([1, 2, 3]) is None
assert mutator({"days": 1}) is None