mirror of
https://github.com/apache/superset.git
synced 2026-05-03 15:04:28 +00:00
Compare commits
7 Commits
docs/testi
...
fire-alert
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
58a2eb465a | ||
|
|
3c51194bb2 | ||
|
|
19be7020c7 | ||
|
|
7c9794cc2f | ||
|
|
7a6b084ff7 | ||
|
|
17eeeaccac | ||
|
|
4dd1e80f4c |
2
.github/copilot-instructions.md
vendored
2
.github/copilot-instructions.md
vendored
@@ -1 +1 @@
|
||||
../AGENTS.md
|
||||
../LLMS.md
|
||||
@@ -82,7 +82,6 @@ intro_header.txt
|
||||
|
||||
# for LLMs
|
||||
llm-context.md
|
||||
AGENTS.md
|
||||
LLMS.md
|
||||
CLAUDE.md
|
||||
CURSOR.md
|
||||
|
||||
@@ -68,11 +68,7 @@ superset/
|
||||
|
||||
### Apache License Headers
|
||||
- **New files require ASF license headers** - When creating new code files, include the standard Apache Software Foundation license header
|
||||
- **LLM instruction files are excluded** - Files like AGENTS.md, CLAUDE.md, etc. are in `.rat-excludes` to avoid header token overhead
|
||||
|
||||
### Code Comments
|
||||
- **Avoid time-specific language** - Don't use words like "now", "currently", "today" in code comments as they become outdated
|
||||
- **Write timeless comments** - Comments should remain accurate regardless of when they're read
|
||||
- **LLM instruction files are excluded** - Files like LLMS.md, CLAUDE.md, etc. are in `.rat-excludes` to avoid header token overhead
|
||||
|
||||
## Documentation Requirements
|
||||
|
||||
@@ -102,17 +98,6 @@ superset/
|
||||
- **`selectOption()`** - Select component helper
|
||||
- **React Testing Library** - NO Enzyme (removed)
|
||||
|
||||
### Test Structure Guidelines
|
||||
- **Use `test()` instead of `describe()` and `it()`** - Follow the [avoid nesting when testing](https://kentcdodds.com/blog/avoid-nesting-when-youre-testing) principle
|
||||
- **Why**: Reduces unnecessary nesting, improves test isolation, and makes tests more readable
|
||||
- **Pattern**: Write flat test files with descriptive test names that fully describe what's being tested
|
||||
- **Example**: Instead of nested `describe('Component', () => { it('should render', ...) })`, use `test('Component renders correctly', ...)`
|
||||
- **Benefits**:
|
||||
- Each test stands alone with a clear, searchable name
|
||||
- Easier to run individual tests
|
||||
- Forces you to write more descriptive test names
|
||||
- Reduces cognitive overhead from nested context switching
|
||||
|
||||
### Test Database Patterns
|
||||
- **Mock patterns**: Use `MagicMock()` for config objects, avoid `AsyncMock` for synchronous code
|
||||
- **API tests**: Update expected columns when adding new model fields
|
||||
@@ -28,7 +28,6 @@ x-superset-image: &superset-image apachesuperset.docker.scarf.sh/apache/superset
|
||||
x-superset-volumes:
|
||||
&superset-volumes # /app/pythonpath_docker will be appended to the PYTHONPATH in the final container
|
||||
- ./docker:/app/docker
|
||||
- ./superset-core:/app/superset-core
|
||||
- superset_home:/app/superset_home
|
||||
|
||||
services:
|
||||
|
||||
@@ -29,11 +29,9 @@ x-superset-volumes: &superset-volumes
|
||||
# /app/pythonpath_docker will be appended to the PYTHONPATH in the final container
|
||||
- ./docker:/app/docker
|
||||
- ./superset:/app/superset
|
||||
- ./superset-core:/app/superset-core
|
||||
- ./superset-frontend:/app/superset-frontend
|
||||
- superset_home:/app/superset_home
|
||||
- ./tests:/app/tests
|
||||
- superset_data:/app/data
|
||||
x-common-build: &common-build
|
||||
context: .
|
||||
target: ${SUPERSET_BUILD_TARGET:-dev} # can use `dev` (default) or `lean`
|
||||
@@ -186,8 +184,10 @@ services:
|
||||
SCARF_ANALYTICS: "${SCARF_ANALYTICS:-}"
|
||||
# configuring the dev-server to use the host.docker.internal to connect to the backend
|
||||
superset: "http://superset:8088"
|
||||
WEBPACK_DEVSERVER_HOST: "${WEBPACK_DEVSERVER_HOST:-0.0.0.0}"
|
||||
WEBPACK_DEVSERVER_PORT: "${WEBPACK_DEVSERVER_PORT:-9000}"
|
||||
ports:
|
||||
- "127.0.0.1:9000:9000" # exposing the dynamic webpack dev server
|
||||
- "9000:9000" # exposing the dynamic webpack dev server
|
||||
container_name: superset_node
|
||||
command: ["/app/docker/docker-frontend.sh"]
|
||||
env_file:
|
||||
@@ -276,5 +276,3 @@ volumes:
|
||||
external: false
|
||||
redis:
|
||||
external: false
|
||||
superset_data:
|
||||
external: false
|
||||
|
||||
@@ -21,15 +21,8 @@ set -eo pipefail
|
||||
# Make python interactive
|
||||
if [ "$DEV_MODE" == "true" ]; then
|
||||
if [ "$(whoami)" = "root" ] && command -v uv > /dev/null 2>&1; then
|
||||
# Always ensure superset-core is available
|
||||
echo "Installing superset-core in editable mode"
|
||||
uv pip install --no-deps -e /app/superset-core
|
||||
|
||||
# Only reinstall the main app for non-worker processes
|
||||
if [ "$1" != "worker" ] && [ "$1" != "beat" ]; then
|
||||
echo "Reinstalling the app in editable mode"
|
||||
uv pip install -e .
|
||||
fi
|
||||
echo "Reinstalling the app in editable mode"
|
||||
uv pip install -e .
|
||||
fi
|
||||
fi
|
||||
REQUIREMENTS_LOCAL="/app/docker/requirements-local.txt"
|
||||
@@ -41,8 +34,7 @@ if [ "$CYPRESS_CONFIG" == "true" ]; then
|
||||
export SUPERSET__SQLALCHEMY_DATABASE_URI=postgresql+psycopg2://superset:superset@db:5432/superset_cypress
|
||||
PORT=8081
|
||||
fi
|
||||
# Skip postgres requirements installation for workers to avoid conflicts
|
||||
if [[ "$DATABASE_DIALECT" == postgres* ]] && [ "$(whoami)" = "root" ] && [ "$1" != "worker" ] && [ "$1" != "beat" ]; then
|
||||
if [[ "$DATABASE_DIALECT" == postgres* ]] && [ "$(whoami)" = "root" ]; then
|
||||
# older images may not have the postgres dev requirements installed
|
||||
echo "Installing postgres requirements"
|
||||
if command -v uv > /dev/null 2>&1; then
|
||||
|
||||
@@ -36,11 +36,11 @@ Screenshots will be taken but no messages actually sent as long as `ALERT_REPORT
|
||||
#### In your `Dockerfile`
|
||||
|
||||
You'll need to extend the Superset image to include a headless browser. Your options include:
|
||||
- Use Playwright with Chrome: this is the recommended approach as of version 4.1.x or greater. A working example of a Dockerfile that installs these tools is provided under "Building your own production Docker image" on the [Docker Builds](/docs/installation/docker-builds#building-your-own-production-docker-image) page. Read the code comments there as you'll also need to change a feature flag in your config.
|
||||
- Use Playwright with Chrome: this is the recommended approach as of version >=4.1.x. A working example of a Dockerfile that installs these tools is provided under “Building your own production Docker image” on the [Docker Builds](/docs/installation/docker-builds#building-your-own-production-docker-image) page. Read the code comments there as you'll also need to change a feature flag in your config.
|
||||
- Use Firefox: you'll need to install geckodriver and Firefox.
|
||||
- Use Chrome without Playwright: you'll need to install Chrome and set the value of `WEBDRIVER_TYPE` to `"chrome"` in your `superset_config.py`.
|
||||
|
||||
In Superset versions prior to 4.1, users installed Firefox or Chrome and that was documented here.
|
||||
In Superset versions <=4.0x, users installed Firefox or Chrome and that was documented here.
|
||||
|
||||
Only the worker container needs the browser.
|
||||
|
||||
|
||||
@@ -67,7 +67,7 @@ are compatible with Superset.
|
||||
| [IBM Netezza Performance Server](/docs/configuration/databases#ibm-netezza-performance-server) | `pip install nzalchemy` | `netezza+nzpy://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [MySQL](/docs/configuration/databases#mysql) | `pip install mysqlclient` | `mysql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [OceanBase](/docs/configuration/databases#oceanbase) | `pip install oceanbase_py` | `oceanbase://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [Oracle](/docs/configuration/databases#oracle) | `pip install oracledb` | `oracle://<username>:<password>@<hostname>:<port>` |
|
||||
| [Oracle](/docs/configuration/databases#oracle) | `pip install cx_Oracle` | `oracle://<username>:<password>@<hostname>:<port>` |
|
||||
| [Parseable](/docs/configuration/databases#parseable) | `pip install sqlalchemy-parseable` | `parseable://<UserName>:<DBPassword>@<Database Host>/<Stream Name>` |
|
||||
| [PostgreSQL](/docs/configuration/databases#postgres) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [Presto](/docs/configuration/databases#presto) | `pip install pyhive` | `presto://{username}:{password}@{hostname}:{port}/{database}` |
|
||||
|
||||
@@ -7,7 +7,7 @@ version: 1
|
||||
# Theming Superset
|
||||
|
||||
:::note
|
||||
`apache-superset>=6.0`
|
||||
apache-superset>=6.0
|
||||
:::
|
||||
|
||||
Superset now rides on **Ant Design v5's token-based theming**.
|
||||
|
||||
@@ -130,7 +130,7 @@ Committers may also update title to reflect the issue/PR content if the author-p
|
||||
|
||||
If the PR passes CI tests and does not have any `need:` labels, it is ready for review, add label `review` and/or `design-review`.
|
||||
|
||||
If an issue/PR has been inactive for at least 30 days, it will be closed. If it does not have any status label, add `inactive`.
|
||||
If an issue/PR has been inactive for >=30 days, it will be closed. If it does not have any status label, add `inactive`.
|
||||
|
||||
When creating a PR, if you're aiming to have it included in a specific release, please tag it with the version label. For example, to have a PR considered for inclusion in Superset 1.1 use the label `v1.1`.
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ maintainers:
|
||||
- name: craig-rueda
|
||||
email: craig@craigrueda.com
|
||||
url: https://github.com/craig-rueda
|
||||
version: 0.15.1 # See [README](https://github.com/apache/superset/blob/master/helm/superset/README.md#versioning) for version details.
|
||||
version: 0.15.0 # See [README](https://github.com/apache/superset/blob/master/helm/superset/README.md#versioning) for version details.
|
||||
dependencies:
|
||||
- name: postgresql
|
||||
version: 13.4.4
|
||||
|
||||
@@ -23,7 +23,7 @@ NOTE: This file is generated by helm-docs: https://github.com/norwoodj/helm-docs
|
||||
|
||||
# superset
|
||||
|
||||

|
||||

|
||||
|
||||
Apache Superset is a modern, enterprise-ready business intelligence web application
|
||||
|
||||
@@ -203,7 +203,6 @@ On helm this can be set on `extraSecretEnv.SUPERSET_SECRET_KEY` or `configOverri
|
||||
| supersetNode.connections.db_name | string | `"superset"` | |
|
||||
| supersetNode.connections.db_pass | string | `"superset"` | |
|
||||
| supersetNode.connections.db_port | string | `"5432"` | |
|
||||
| supersetNode.connections.db_type | string | `"postgresql"` | Database type for Superset metadata (Supported types: "postgresql", "mysql") |
|
||||
| supersetNode.connections.db_user | string | `"superset"` | |
|
||||
| supersetNode.connections.redis_cache_db | string | `"1"` | |
|
||||
| supersetNode.connections.redis_celery_db | string | `"0"` | |
|
||||
|
||||
@@ -96,18 +96,7 @@ CACHE_CONFIG = {
|
||||
}
|
||||
DATA_CACHE_CONFIG = CACHE_CONFIG
|
||||
|
||||
|
||||
if os.getenv("SQLALCHEMY_DATABASE_URI"):
|
||||
SQLALCHEMY_DATABASE_URI = os.getenv("SQLALCHEMY_DATABASE_URI")
|
||||
else:
|
||||
{{- if eq .Values.supersetNode.connections.db_type "postgresql" }}
|
||||
SQLALCHEMY_DATABASE_URI = f"postgresql+psycopg2://{os.getenv('DB_USER')}:{os.getenv('DB_PASS')}@{os.getenv('DB_HOST')}:{os.getenv('DB_PORT')}/{os.getenv('DB_NAME')}"
|
||||
{{- else if eq .Values.supersetNode.connections.db_type "mysql" }}
|
||||
SQLALCHEMY_DATABASE_URI = f"mysql+mysqldb://{os.getenv('DB_USER')}:{os.getenv('DB_PASS')}@{os.getenv('DB_HOST')}:{os.getenv('DB_PORT')}/{os.getenv('DB_NAME')}"
|
||||
{{- else }}
|
||||
{{ fail (printf "Unsupported database type: %s. Please use 'postgresql' or 'mysql'." .Values.supersetNode.connections.db_type) }}
|
||||
{{- end }}
|
||||
|
||||
SQLALCHEMY_DATABASE_URI = f"postgresql+psycopg2://{env('DB_USER')}:{env('DB_PASS')}@{env('DB_HOST')}:{env('DB_PORT')}/{env('DB_NAME')}"
|
||||
SQLALCHEMY_TRACK_MODIFICATIONS = True
|
||||
|
||||
class CeleryConfig:
|
||||
|
||||
@@ -289,8 +289,6 @@ supersetNode:
|
||||
enabled: false
|
||||
ssl_cert_reqs: CERT_NONE
|
||||
# You need to change below configuration incase bringing own PostgresSQL instance and also set postgresql.enabled:false
|
||||
# -- Database type for Superset metadata (Supported types: "postgresql", "mysql")
|
||||
db_type: "postgresql"
|
||||
db_host: "{{ .Release.Name }}-postgresql"
|
||||
db_port: "5432"
|
||||
db_user: superset
|
||||
|
||||
@@ -100,7 +100,7 @@ dependencies = [
|
||||
"slack_sdk>=3.19.0, <4",
|
||||
"sqlalchemy>=1.4, <2",
|
||||
"sqlalchemy-utils>=0.38.3, <0.39",
|
||||
"sqlglot>=27.15.2, <28",
|
||||
"sqlglot>=27.3.0, <28",
|
||||
# newer pandas needs 0.9+
|
||||
"tabulate>=0.9.0, <1.0",
|
||||
"typing-extensions>=4, <5",
|
||||
|
||||
@@ -395,7 +395,7 @@ sqlalchemy-utils==0.38.3
|
||||
# via
|
||||
# apache-superset (pyproject.toml)
|
||||
# flask-appbuilder
|
||||
sqlglot==27.15.2
|
||||
sqlglot==27.3.0
|
||||
# via apache-superset (pyproject.toml)
|
||||
sshtunnel==0.4.0
|
||||
# via apache-superset (pyproject.toml)
|
||||
|
||||
@@ -848,7 +848,7 @@ sqlalchemy-utils==0.38.3
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
# flask-appbuilder
|
||||
sqlglot==27.15.2
|
||||
sqlglot==27.3.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
|
||||
@@ -83,7 +83,6 @@ module.exports = {
|
||||
'plugin:react-hooks/recommended',
|
||||
'plugin:react-prefer-function-component/recommended',
|
||||
'plugin:storybook/recommended',
|
||||
'plugin:react-you-might-not-need-an-effect/legacy-recommended',
|
||||
],
|
||||
parser: '@babel/eslint-parser',
|
||||
parserOptions: {
|
||||
@@ -413,6 +412,13 @@ module.exports = {
|
||||
'icons/no-fa-icons-usage': 'error',
|
||||
'i18n-strings/no-template-vars': ['error', true],
|
||||
'i18n-strings/sentence-case-buttons': 'error',
|
||||
camelcase: [
|
||||
'error',
|
||||
{
|
||||
allow: ['^UNSAFE_'],
|
||||
properties: 'never',
|
||||
},
|
||||
],
|
||||
'class-methods-use-this': 0,
|
||||
curly: 2,
|
||||
'func-names': 0,
|
||||
|
||||
1
superset-frontend/.gitignore
vendored
1
superset-frontend/.gitignore
vendored
@@ -3,4 +3,3 @@ cypress/screenshots
|
||||
cypress/videos
|
||||
src/temp
|
||||
.temp_cache/
|
||||
.tsbuildinfo
|
||||
|
||||
58
superset-frontend/package-lock.json
generated
58
superset-frontend/package-lock.json
generated
@@ -232,7 +232,6 @@
|
||||
"eslint-plugin-react": "^7.37.2",
|
||||
"eslint-plugin-react-hooks": "^4.6.2",
|
||||
"eslint-plugin-react-prefer-function-component": "^3.3.0",
|
||||
"eslint-plugin-react-you-might-not-need-an-effect": "^0.5.1",
|
||||
"eslint-plugin-storybook": "^0.8.0",
|
||||
"eslint-plugin-testing-library": "^6.4.0",
|
||||
"eslint-plugin-theme-colors": "file:eslint-rules/eslint-plugin-theme-colors",
|
||||
@@ -8886,9 +8885,9 @@
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/@ndelangen/get-tarball/node_modules/tar-fs": {
|
||||
"version": "2.1.4",
|
||||
"resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.4.tgz",
|
||||
"integrity": "sha512-mDAjwmZdh7LTT6pNleZ05Yt65HC3E+NiQzl672vQG38jIrehtJk/J3mNwIg+vShQPcLF/LV7CMnDW6vjj6sfYQ==",
|
||||
"version": "2.1.3",
|
||||
"resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.3.tgz",
|
||||
"integrity": "sha512-090nwYJDmlhwFwEW3QQl+vaNnxsO2yVsd45eTKRBzSzu+hlb1w2K9inVq5b0ngXuLVqQ4ApvsUHHnu/zQNkWAg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@@ -25930,36 +25929,6 @@
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/eslint-plugin-react-you-might-not-need-an-effect": {
|
||||
"version": "0.5.1",
|
||||
"resolved": "https://registry.npmjs.org/eslint-plugin-react-you-might-not-need-an-effect/-/eslint-plugin-react-you-might-not-need-an-effect-0.5.1.tgz",
|
||||
"integrity": "sha512-Gi2kfHLkXUT3j+IAwgb8TEhY10iMwsdwSsgbIxk98zPpuPW7M52ey9fU1oPZrWUlyekr5eXwUCjeTHekS6Isrw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"eslint-utils": "^3.0.0",
|
||||
"globals": "^16.2.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"eslint": ">=8.40.0"
|
||||
}
|
||||
},
|
||||
"node_modules/eslint-plugin-react-you-might-not-need-an-effect/node_modules/globals": {
|
||||
"version": "16.4.0",
|
||||
"resolved": "https://registry.npmjs.org/globals/-/globals-16.4.0.tgz",
|
||||
"integrity": "sha512-ob/2LcVVaVGCYN+r14cnwnoDPUufjiYgSqRhiFD0Q1iI4Odora5RE8Iv1D24hAz5oMophRGkGz+yuvQmmUMnMw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/eslint-plugin-react/node_modules/doctrine": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz",
|
||||
@@ -26075,25 +26044,6 @@
|
||||
"node": ">=4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/eslint-utils": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-3.0.0.tgz",
|
||||
"integrity": "sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"eslint-visitor-keys": "^2.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^10.0.0 || ^12.0.0 || >= 14.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/mysticatea"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"eslint": ">=5"
|
||||
}
|
||||
},
|
||||
"node_modules/eslint-visitor-keys": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz",
|
||||
@@ -60740,7 +60690,7 @@
|
||||
},
|
||||
"packages/superset-core": {
|
||||
"name": "@apache-superset/core",
|
||||
"version": "0.0.1-rc5",
|
||||
"version": "0.0.1-rc4",
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.26.4",
|
||||
|
||||
@@ -305,7 +305,6 @@
|
||||
"eslint-plugin-react": "^7.37.2",
|
||||
"eslint-plugin-react-hooks": "^4.6.2",
|
||||
"eslint-plugin-react-prefer-function-component": "^3.3.0",
|
||||
"eslint-plugin-react-you-might-not-need-an-effect": "^0.5.1",
|
||||
"eslint-plugin-storybook": "^0.8.0",
|
||||
"eslint-plugin-testing-library": "^6.4.0",
|
||||
"eslint-plugin-theme-colors": "file:eslint-rules/eslint-plugin-theme-colors",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@apache-superset/core",
|
||||
"version": "0.0.1-rc5",
|
||||
"version": "0.0.1-rc4",
|
||||
"description": "This package contains UI elements, APIs, and utility functions used by Superset.",
|
||||
"sideEffects": false,
|
||||
"main": "lib/index.js",
|
||||
|
||||
@@ -1,15 +1,8 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
// Path Resolution: Override baseUrl to maintain correct path mappings from parent config
|
||||
// (e.g., "@apache-superset/core" -> "./packages/superset-core/src")
|
||||
"baseUrl": "../..",
|
||||
|
||||
// Directory Overrides: Parent config paths are relative to frontend root,
|
||||
// but packages need paths relative to their own directory
|
||||
"outDir": "lib",
|
||||
"rootDir": "src",
|
||||
"declarationDir": "lib"
|
||||
"outDir": "lib"
|
||||
},
|
||||
"include": ["src/**/*", "types/**/*"],
|
||||
"exclude": ["src/**/*.test.*", "src/**/*.stories.*"]
|
||||
|
||||
@@ -18,8 +18,7 @@
|
||||
* under the License.
|
||||
*/
|
||||
import { ReactNode } from 'react';
|
||||
import { css, styled, t } from '@superset-ui/core';
|
||||
import { GenericDataType } from '@apache-superset/core/api/core';
|
||||
import { css, GenericDataType, styled, t } from '@superset-ui/core';
|
||||
import {
|
||||
ClockCircleOutlined,
|
||||
QuestionOutlined,
|
||||
|
||||
@@ -16,8 +16,13 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { DTTM_ALIAS, QueryColumn, QueryMode, t } from '@superset-ui/core';
|
||||
import { GenericDataType } from '@apache-superset/core/api/core';
|
||||
import {
|
||||
DTTM_ALIAS,
|
||||
GenericDataType,
|
||||
QueryColumn,
|
||||
QueryMode,
|
||||
t,
|
||||
} from '@superset-ui/core';
|
||||
import { ColumnMeta, SortSeriesData, SortSeriesType } from './types';
|
||||
|
||||
export const DEFAULT_MAX_ROW = 100000;
|
||||
|
||||
@@ -16,8 +16,7 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { DatasourceType } from '@superset-ui/core';
|
||||
import { GenericDataType } from '@apache-superset/core/api/core';
|
||||
import { DatasourceType, GenericDataType } from '@superset-ui/core';
|
||||
import { Dataset } from './types';
|
||||
|
||||
export const TestDataset: Dataset = {
|
||||
|
||||
@@ -20,13 +20,13 @@
|
||||
import {
|
||||
ContributionType,
|
||||
ensureIsArray,
|
||||
GenericDataType,
|
||||
getColumnLabel,
|
||||
getMetricLabel,
|
||||
QueryFormColumn,
|
||||
QueryFormMetric,
|
||||
t,
|
||||
} from '@superset-ui/core';
|
||||
import { GenericDataType } from '@apache-superset/core/api/core';
|
||||
import {
|
||||
ControlPanelState,
|
||||
ControlState,
|
||||
|
||||
@@ -17,8 +17,12 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { QueryColumn, t, validateNonEmpty } from '@superset-ui/core';
|
||||
import { GenericDataType } from '@apache-superset/core/api/core';
|
||||
import {
|
||||
GenericDataType,
|
||||
QueryColumn,
|
||||
t,
|
||||
validateNonEmpty,
|
||||
} from '@superset-ui/core';
|
||||
import {
|
||||
ExtraControlProps,
|
||||
SharedControlConfig,
|
||||
|
||||
@@ -16,8 +16,7 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { ensureIsArray, ValueOf } from '@superset-ui/core';
|
||||
import { GenericDataType } from '@apache-superset/core/api/core';
|
||||
import { ensureIsArray, GenericDataType, ValueOf } from '@superset-ui/core';
|
||||
import { ControlPanelState, isDataset, isQueryResponse } from '../types';
|
||||
|
||||
export function checkColumnType(
|
||||
|
||||
@@ -16,8 +16,7 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { QueryColumn, QueryResponse } from '@superset-ui/core';
|
||||
import { GenericDataType } from '@apache-superset/core/api/core';
|
||||
import { GenericDataType, QueryColumn, QueryResponse } from '@superset-ui/core';
|
||||
import { ColumnMeta, Dataset, isDataset, isQueryResponse } from '../types';
|
||||
|
||||
export function columnsByType(
|
||||
|
||||
@@ -17,11 +17,11 @@
|
||||
* under the License.
|
||||
*/
|
||||
import {
|
||||
GenericDataType,
|
||||
getColumnLabel,
|
||||
isPhysicalColumn,
|
||||
QueryFormColumn,
|
||||
} from '@superset-ui/core';
|
||||
import { GenericDataType } from '@apache-superset/core/api/core';
|
||||
import { checkColumnType, ControlStateMapping } from '..';
|
||||
|
||||
export function isSortable(controls: ControlStateMapping): boolean {
|
||||
|
||||
@@ -18,7 +18,8 @@
|
||||
*/
|
||||
import '@testing-library/jest-dom';
|
||||
import { render } from '@superset-ui/core/spec';
|
||||
import { GenericDataType } from '@apache-superset/core/api/core';
|
||||
import { GenericDataType } from '@superset-ui/core';
|
||||
|
||||
import { ColumnOption, ColumnOptionProps } from '../../src';
|
||||
|
||||
jest.mock('@superset-ui/chart-controls/components/SQLPopover', () => ({
|
||||
|
||||
@@ -19,7 +19,8 @@
|
||||
import { isValidElement } from 'react';
|
||||
import { render, screen } from '@superset-ui/core/spec';
|
||||
import '@testing-library/jest-dom';
|
||||
import { GenericDataType } from '@apache-superset/core/api/core';
|
||||
import { GenericDataType } from '@superset-ui/core';
|
||||
|
||||
import { ColumnTypeLabel, ColumnTypeLabelProps } from '../../src';
|
||||
|
||||
describe('ColumnOption', () => {
|
||||
|
||||
@@ -16,8 +16,7 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { testQueryResponse } from '@superset-ui/core';
|
||||
import { GenericDataType } from '@apache-superset/core/api/core';
|
||||
import { GenericDataType, testQueryResponse } from '@superset-ui/core';
|
||||
import { checkColumnType, TestDataset } from '../../src';
|
||||
|
||||
test('checkColumnType columns from a Dataset', () => {
|
||||
|
||||
@@ -16,8 +16,11 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { DatasourceType, testQueryResponse } from '@superset-ui/core';
|
||||
import { GenericDataType } from '@apache-superset/core/api/core';
|
||||
import {
|
||||
DatasourceType,
|
||||
GenericDataType,
|
||||
testQueryResponse,
|
||||
} from '@superset-ui/core';
|
||||
import { columnChoices } from '../../src';
|
||||
|
||||
describe('columnChoices()', () => {
|
||||
|
||||
@@ -16,8 +16,11 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { testQueryResponse, testQueryResults } from '@superset-ui/core';
|
||||
import { GenericDataType } from '@apache-superset/core/api/core';
|
||||
import {
|
||||
GenericDataType,
|
||||
testQueryResponse,
|
||||
testQueryResults,
|
||||
} from '@superset-ui/core';
|
||||
import {
|
||||
Dataset,
|
||||
getTemporalColumns,
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
* under the License.
|
||||
*/
|
||||
import { ControlStateMapping } from '@superset-ui/chart-controls';
|
||||
import { GenericDataType } from '@apache-superset/core/api/core';
|
||||
import { GenericDataType } from '@superset-ui/core';
|
||||
import { isSortable } from '../../src/utils/isSortable';
|
||||
|
||||
const controls: ControlStateMapping = {
|
||||
|
||||
@@ -1,15 +1,8 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
// Path Resolution: Override baseUrl to maintain correct path mappings from parent config
|
||||
// (e.g., "@apache-superset/core" -> "./packages/superset-core/src")
|
||||
"baseUrl": "../..",
|
||||
|
||||
// Directory Overrides: Parent config paths are relative to frontend root,
|
||||
// but packages need paths relative to their own directory
|
||||
"outDir": "lib",
|
||||
"rootDir": "src",
|
||||
"declarationDir": "lib"
|
||||
"outDir": "lib"
|
||||
},
|
||||
"include": ["src/**/*", "types/**/*"],
|
||||
"exclude": ["src/**/*.test.*", "src/**/*.stories.*"],
|
||||
|
||||
@@ -127,9 +127,13 @@ const Select = forwardRef(
|
||||
const shouldShowSearch = allowNewOptions ? true : showSearch;
|
||||
const [selectValue, setSelectValue] = useState(value);
|
||||
const [inputValue, setInputValue] = useState('');
|
||||
const [isLoading, setIsLoading] = useState(loading);
|
||||
const [isDropdownVisible, setIsDropdownVisible] = useState(false);
|
||||
const [isSearching, setIsSearching] = useState(false);
|
||||
const [visibleOptions, setVisibleOptions] = useState<SelectOptionsType>([]);
|
||||
const [maxTagCount, setMaxTagCount] = useState(
|
||||
propsMaxTagCount ?? MAX_TAG_COUNT,
|
||||
);
|
||||
const [onChangeCount, setOnChangeCount] = useState(0);
|
||||
const previousChangeCount = usePrevious(onChangeCount, 0);
|
||||
const fireOnChange = useCallback(
|
||||
@@ -137,11 +141,11 @@ const Select = forwardRef(
|
||||
[onChangeCount],
|
||||
);
|
||||
|
||||
const maxTagCount = oneLine
|
||||
? isDropdownVisible
|
||||
? 0
|
||||
: 1
|
||||
: (propsMaxTagCount ?? MAX_TAG_COUNT);
|
||||
useEffect(() => {
|
||||
if (oneLine) {
|
||||
setMaxTagCount(isDropdownVisible ? 0 : 1);
|
||||
}
|
||||
}, [isDropdownVisible, oneLine]);
|
||||
|
||||
const mappedMode = isSingleMode ? undefined : 'multiple';
|
||||
|
||||
@@ -506,8 +510,6 @@ const Select = forwardRef(
|
||||
],
|
||||
);
|
||||
|
||||
const isLoading = loading ?? false;
|
||||
|
||||
const popupRender = (
|
||||
originNode: ReactElement & { ref?: RefObject<HTMLElement> },
|
||||
) =>
|
||||
@@ -534,6 +536,12 @@ const Select = forwardRef(
|
||||
setVisibleOptions(initialOptions);
|
||||
}, [initialOptions]);
|
||||
|
||||
useEffect(() => {
|
||||
if (loading !== undefined && loading !== isLoading) {
|
||||
setIsLoading(loading);
|
||||
}
|
||||
}, [isLoading, loading]);
|
||||
|
||||
useEffect(() => {
|
||||
setSelectValue(value);
|
||||
}, [value]);
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { GenericDataType } from '@apache-superset/core/api/core';
|
||||
import { GenericDataType } from './QueryResponse';
|
||||
import { QueryFormColumn } from './QueryFormData';
|
||||
|
||||
export interface AdhocColumn {
|
||||
|
||||
@@ -17,7 +17,6 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { GenericDataType } from '@apache-superset/core/api/core';
|
||||
import { DatasourceType } from './Datasource';
|
||||
import { BinaryOperator, SetOperator, UnaryOperator } from './Operator';
|
||||
import { AppliedTimeExtras, TimeRange } from './Time';
|
||||
@@ -32,7 +31,7 @@ import { Maybe } from '../../types';
|
||||
import { PostProcessingRule } from './PostProcessing';
|
||||
import { JsonObject } from '../../connection';
|
||||
import { TimeGranularity } from '../../time-format';
|
||||
import { DataRecordValue } from './QueryResponse';
|
||||
import { GenericDataType, DataRecordValue } from './QueryResponse';
|
||||
|
||||
export type BaseQueryObjectFilterClause = {
|
||||
col: QueryFormColumn;
|
||||
|
||||
@@ -17,10 +17,19 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { GenericDataType } from '@apache-superset/core/api/core';
|
||||
import { TimeseriesDataRecord } from '../../chart';
|
||||
import { AnnotationData } from './AnnotationLayer';
|
||||
|
||||
/**
|
||||
* Generic data types, see enum of the same name in superset/utils/core.py.
|
||||
*/
|
||||
export enum GenericDataType {
|
||||
Numeric = 0,
|
||||
String = 1,
|
||||
Temporal = 2,
|
||||
Boolean = 3,
|
||||
}
|
||||
|
||||
/**
|
||||
* Primitive types for data field values.
|
||||
*/
|
||||
|
||||
@@ -16,8 +16,7 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { AdhocMetric } from '@superset-ui/core';
|
||||
import { GenericDataType } from '@apache-superset/core/api/core';
|
||||
import { AdhocMetric, GenericDataType } from '@superset-ui/core';
|
||||
|
||||
export const NUM_METRIC: AdhocMetric = {
|
||||
expressionType: 'SIMPLE',
|
||||
|
||||
@@ -1,15 +1,8 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
// Path Resolution: Override baseUrl to maintain correct path mappings from parent config
|
||||
// (e.g., "@apache-superset/core" -> "./packages/superset-core/src")
|
||||
"baseUrl": "../..",
|
||||
|
||||
// Directory Overrides: Parent config paths are relative to frontend root,
|
||||
// but packages need paths relative to their own directory
|
||||
"outDir": "lib",
|
||||
"rootDir": "src",
|
||||
"declarationDir": "lib"
|
||||
"outDir": "lib"
|
||||
},
|
||||
"include": ["src/**/*", "types/**/*"],
|
||||
"exclude": ["src/**/*.test.*", "src/**/*.stories.*"],
|
||||
|
||||
@@ -1,15 +1,8 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
// Path Resolution: Override baseUrl to maintain correct path mappings from parent config
|
||||
// (e.g., "@apache-superset/core" -> "./packages/superset-core/src")
|
||||
"baseUrl": "../..",
|
||||
|
||||
// Directory Overrides: Parent config paths are relative to frontend root,
|
||||
// but packages need paths relative to their own directory
|
||||
"outDir": "lib",
|
||||
"rootDir": "src",
|
||||
"declarationDir": "lib"
|
||||
"outDir": "lib"
|
||||
},
|
||||
"include": ["src/**/*", "types/**/*"],
|
||||
"exclude": ["src/**/*.test.*", "src/**/*.stories.*"]
|
||||
|
||||
@@ -1,15 +1,8 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
// Path Resolution: Override baseUrl to maintain correct path mappings from parent config
|
||||
// (e.g., "@apache-superset/core" -> "./packages/superset-core/src")
|
||||
"baseUrl": "../..",
|
||||
|
||||
// Directory Overrides: Parent config paths are relative to frontend root,
|
||||
// but packages need paths relative to their own directory
|
||||
"outDir": "lib",
|
||||
"rootDir": "src",
|
||||
"declarationDir": "lib"
|
||||
"outDir": "lib"
|
||||
},
|
||||
"include": ["src/**/*.ts", "src/**/*.tsx", "types/**/*"],
|
||||
"exclude": [
|
||||
|
||||
@@ -1,15 +1,8 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
// Path Resolution: Override baseUrl to maintain correct path mappings from parent config
|
||||
// (e.g., "@apache-superset/core" -> "./packages/superset-core/src")
|
||||
"baseUrl": "../..",
|
||||
|
||||
// Directory Overrides: Parent config paths are relative to frontend root,
|
||||
// but packages need paths relative to their own directory
|
||||
"outDir": "lib",
|
||||
"rootDir": "src",
|
||||
"declarationDir": "lib"
|
||||
"outDir": "lib"
|
||||
},
|
||||
"include": ["src/**/*.ts", "src/**/*.tsx", "types/**/*"],
|
||||
"exclude": [
|
||||
|
||||
@@ -1,15 +1,8 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
// Path Resolution: Override baseUrl to maintain correct path mappings from parent config
|
||||
// (e.g., "@apache-superset/core" -> "./packages/superset-core/src")
|
||||
"baseUrl": "../..",
|
||||
|
||||
// Directory Overrides: Parent config paths are relative to frontend root,
|
||||
// but packages need paths relative to their own directory
|
||||
"outDir": "lib",
|
||||
"rootDir": "src",
|
||||
"declarationDir": "lib"
|
||||
"outDir": "lib"
|
||||
},
|
||||
"include": ["src/**/*.ts", "src/**/*.tsx", "types/**/*"],
|
||||
"exclude": [
|
||||
|
||||
@@ -1,15 +1,8 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
// Path Resolution: Override baseUrl to maintain correct path mappings from parent config
|
||||
// (e.g., "@apache-superset/core" -> "./packages/superset-core/src")
|
||||
"baseUrl": "../..",
|
||||
|
||||
// Directory Overrides: Parent config paths are relative to frontend root,
|
||||
// but packages need paths relative to their own directory
|
||||
"outDir": "lib",
|
||||
"rootDir": "src",
|
||||
"declarationDir": "lib"
|
||||
"outDir": "lib"
|
||||
},
|
||||
"include": ["src/**/*.ts", "src/**/*.tsx", "types/**/*"],
|
||||
"exclude": [
|
||||
|
||||
@@ -1,15 +1,8 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
// Path Resolution: Override baseUrl to maintain correct path mappings from parent config
|
||||
// (e.g., "@apache-superset/core" -> "./packages/superset-core/src")
|
||||
"baseUrl": "../..",
|
||||
|
||||
// Directory Overrides: Parent config paths are relative to frontend root,
|
||||
// but packages need paths relative to their own directory
|
||||
"outDir": "lib",
|
||||
"rootDir": "src",
|
||||
"declarationDir": "lib"
|
||||
"outDir": "lib"
|
||||
},
|
||||
"include": ["src/**/*.ts", "src/**/*.tsx", "types/**/*"],
|
||||
"exclude": [
|
||||
|
||||
@@ -1,15 +1,8 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
// Path Resolution: Override baseUrl to maintain correct path mappings from parent config
|
||||
// (e.g., "@apache-superset/core" -> "./packages/superset-core/src")
|
||||
"baseUrl": "../..",
|
||||
|
||||
// Directory Overrides: Parent config paths are relative to frontend root,
|
||||
// but packages need paths relative to their own directory
|
||||
"outDir": "lib",
|
||||
"rootDir": "src",
|
||||
"declarationDir": "lib"
|
||||
"outDir": "lib"
|
||||
},
|
||||
"include": ["src/**/*.ts", "src/**/*.tsx", "types/**/*"],
|
||||
"exclude": [
|
||||
|
||||
@@ -1,15 +1,8 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
// Path Resolution: Override baseUrl to maintain correct path mappings from parent config
|
||||
// (e.g., "@apache-superset/core" -> "./packages/superset-core/src")
|
||||
"baseUrl": "../..",
|
||||
|
||||
// Directory Overrides: Parent config paths are relative to frontend root,
|
||||
// but packages need paths relative to their own directory
|
||||
"outDir": "lib",
|
||||
"rootDir": "src",
|
||||
"declarationDir": "lib"
|
||||
"outDir": "lib"
|
||||
},
|
||||
"include": ["src/**/*.ts", "src/**/*.tsx", "types/**/*"],
|
||||
"exclude": [
|
||||
|
||||
@@ -1,15 +1,8 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
// Path Resolution: Override baseUrl to maintain correct path mappings from parent config
|
||||
// (e.g., "@apache-superset/core" -> "./packages/superset-core/src")
|
||||
"baseUrl": "../..",
|
||||
|
||||
// Directory Overrides: Parent config paths are relative to frontend root,
|
||||
// but packages need paths relative to their own directory
|
||||
"outDir": "lib",
|
||||
"rootDir": "src",
|
||||
"declarationDir": "lib"
|
||||
"outDir": "lib"
|
||||
},
|
||||
"include": ["src/**/*.ts", "src/**/*.tsx", "types/**/*"],
|
||||
"exclude": [
|
||||
|
||||
@@ -1,15 +1,8 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
// Path Resolution: Override baseUrl to maintain correct path mappings from parent config
|
||||
// (e.g., "@apache-superset/core" -> "./packages/superset-core/src")
|
||||
"baseUrl": "../..",
|
||||
|
||||
// Directory Overrides: Parent config paths are relative to frontend root,
|
||||
// but packages need paths relative to their own directory
|
||||
"outDir": "lib",
|
||||
"rootDir": "src",
|
||||
"declarationDir": "lib"
|
||||
"outDir": "lib"
|
||||
},
|
||||
"include": ["src/**/*.ts", "src/**/*.tsx", "types/**/*"],
|
||||
"exclude": [
|
||||
|
||||
@@ -1,15 +1,8 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
// Path Resolution: Override baseUrl to maintain correct path mappings from parent config
|
||||
// (e.g., "@apache-superset/core" -> "./packages/superset-core/src")
|
||||
"baseUrl": "../..",
|
||||
|
||||
// Directory Overrides: Parent config paths are relative to frontend root,
|
||||
// but packages need paths relative to their own directory
|
||||
"outDir": "lib",
|
||||
"rootDir": "src",
|
||||
"declarationDir": "lib"
|
||||
"outDir": "lib"
|
||||
},
|
||||
"include": ["src/**/*.ts", "src/**/*.tsx", "types/**/*"],
|
||||
"exclude": [
|
||||
|
||||
@@ -169,12 +169,12 @@ const CategoricalDeckGLContainer = (props: CategoricalDeckGLContainerProps) => {
|
||||
}));
|
||||
}
|
||||
case COLOR_SCHEME_TYPES.color_breakpoints: {
|
||||
const defaultBreakpointColor = fd.default_breakpoint_color
|
||||
const defaultBreakpointColor = fd.deafult_breakpoint_color
|
||||
? [
|
||||
fd.default_breakpoint_color.r,
|
||||
fd.default_breakpoint_color.g,
|
||||
fd.default_breakpoint_color.b,
|
||||
fd.default_breakpoint_color.a * 255,
|
||||
fd.deafult_breakpoint_color.r,
|
||||
fd.deafult_breakpoint_color.g,
|
||||
fd.deafult_breakpoint_color.b,
|
||||
fd.deafult_breakpoint_color.a * 255,
|
||||
]
|
||||
: [
|
||||
DEFAULT_DECKGL_COLOR.r,
|
||||
|
||||
@@ -1,96 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import {
|
||||
buildQueryContext,
|
||||
ensureIsArray,
|
||||
SqlaFormData,
|
||||
} from '@superset-ui/core';
|
||||
import {
|
||||
getSpatialColumns,
|
||||
addSpatialNullFilters,
|
||||
SpatialFormData,
|
||||
} from '../spatialUtils';
|
||||
import { addTooltipColumnsToQuery } from '../buildQueryUtils';
|
||||
|
||||
export interface DeckArcFormData extends SqlaFormData {
|
||||
start_spatial: SpatialFormData['spatial'];
|
||||
end_spatial: SpatialFormData['spatial'];
|
||||
dimension?: string;
|
||||
js_columns?: string[];
|
||||
tooltip_contents?: unknown[];
|
||||
tooltip_template?: string;
|
||||
}
|
||||
|
||||
export default function buildQuery(formData: DeckArcFormData) {
|
||||
const {
|
||||
start_spatial,
|
||||
end_spatial,
|
||||
dimension,
|
||||
js_columns,
|
||||
tooltip_contents,
|
||||
} = formData;
|
||||
|
||||
if (!start_spatial || !end_spatial) {
|
||||
throw new Error(
|
||||
'Start and end spatial configurations are required for Arc charts',
|
||||
);
|
||||
}
|
||||
|
||||
return buildQueryContext(formData, baseQueryObject => {
|
||||
const startSpatialColumns = getSpatialColumns(start_spatial);
|
||||
const endSpatialColumns = getSpatialColumns(end_spatial);
|
||||
|
||||
let columns = [
|
||||
...(baseQueryObject.columns || []),
|
||||
...startSpatialColumns,
|
||||
...endSpatialColumns,
|
||||
];
|
||||
|
||||
if (dimension) {
|
||||
columns = [...columns, dimension];
|
||||
}
|
||||
|
||||
const jsColumns = ensureIsArray(js_columns || []);
|
||||
jsColumns.forEach(col => {
|
||||
if (!columns.includes(col)) {
|
||||
columns.push(col);
|
||||
}
|
||||
});
|
||||
|
||||
columns = addTooltipColumnsToQuery(columns, tooltip_contents);
|
||||
|
||||
let filters = addSpatialNullFilters(
|
||||
start_spatial,
|
||||
ensureIsArray(baseQueryObject.filters || []),
|
||||
);
|
||||
filters = addSpatialNullFilters(end_spatial, filters);
|
||||
|
||||
const isTimeseries = !!formData.time_grain_sqla;
|
||||
|
||||
return [
|
||||
{
|
||||
...baseQueryObject,
|
||||
columns,
|
||||
filters,
|
||||
is_timeseries: isTimeseries,
|
||||
row_limit: baseQueryObject.row_limit,
|
||||
},
|
||||
];
|
||||
});
|
||||
}
|
||||
@@ -21,8 +21,7 @@ import thumbnail from './images/thumbnail.png';
|
||||
import thumbnailDark from './images/thumbnail-dark.png';
|
||||
import example from './images/example.png';
|
||||
import exampleDark from './images/example-dark.png';
|
||||
import transformProps from './transformProps';
|
||||
import buildQuery from './buildQuery';
|
||||
import transformProps from '../../transformProps';
|
||||
import controlPanel from './controlPanel';
|
||||
|
||||
const metadata = new ChartMetadata({
|
||||
@@ -40,13 +39,13 @@ const metadata = new ChartMetadata({
|
||||
thumbnail,
|
||||
thumbnailDark,
|
||||
exampleGallery: [{ url: example, urlDark: exampleDark }],
|
||||
useLegacyApi: true,
|
||||
tags: [t('deckGL'), t('Geo'), t('3D'), t('Relational'), t('Web')],
|
||||
});
|
||||
|
||||
export default class ArcChartPlugin extends ChartPlugin {
|
||||
constructor() {
|
||||
super({
|
||||
buildQuery,
|
||||
loadChart: () => import('./Arc'),
|
||||
controlPanel,
|
||||
metadata,
|
||||
|
||||
@@ -1,108 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { ChartProps } from '@superset-ui/core';
|
||||
import {
|
||||
processSpatialData,
|
||||
addJsColumnsToExtraProps,
|
||||
DataRecord,
|
||||
} from '../spatialUtils';
|
||||
import {
|
||||
createBaseTransformResult,
|
||||
getRecordsFromQuery,
|
||||
addPropertiesToFeature,
|
||||
} from '../transformUtils';
|
||||
import { DeckArcFormData } from './buildQuery';
|
||||
|
||||
interface ArcPoint {
|
||||
sourcePosition: [number, number];
|
||||
targetPosition: [number, number];
|
||||
cat_color?: string;
|
||||
__timestamp?: number;
|
||||
extraProps?: Record<string, unknown>;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
function processArcData(
|
||||
records: DataRecord[],
|
||||
startSpatial: DeckArcFormData['start_spatial'],
|
||||
endSpatial: DeckArcFormData['end_spatial'],
|
||||
dimension?: string,
|
||||
jsColumns?: string[],
|
||||
): ArcPoint[] {
|
||||
if (!startSpatial || !endSpatial || !records.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const startFeatures = processSpatialData(records, startSpatial);
|
||||
const endFeatures = processSpatialData(records, endSpatial);
|
||||
const excludeKeys = new Set(
|
||||
['__timestamp', dimension, ...(jsColumns || [])].filter(
|
||||
(key): key is string => key != null,
|
||||
),
|
||||
);
|
||||
|
||||
return records
|
||||
.map((record, index) => {
|
||||
const startFeature = startFeatures[index];
|
||||
const endFeature = endFeatures[index];
|
||||
|
||||
if (!startFeature || !endFeature) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let arcPoint: ArcPoint = {
|
||||
sourcePosition: startFeature.position,
|
||||
targetPosition: endFeature.position,
|
||||
extraProps: {},
|
||||
};
|
||||
|
||||
arcPoint = addJsColumnsToExtraProps(arcPoint, record, jsColumns);
|
||||
|
||||
if (dimension && record[dimension] != null) {
|
||||
arcPoint.cat_color = String(record[dimension]);
|
||||
}
|
||||
|
||||
// eslint-disable-next-line no-underscore-dangle
|
||||
if (record.__timestamp != null) {
|
||||
// eslint-disable-next-line no-underscore-dangle
|
||||
arcPoint.__timestamp = Number(record.__timestamp);
|
||||
}
|
||||
|
||||
arcPoint = addPropertiesToFeature(arcPoint, record, excludeKeys);
|
||||
return arcPoint;
|
||||
})
|
||||
.filter((point): point is ArcPoint => point !== null);
|
||||
}
|
||||
|
||||
export default function transformProps(chartProps: ChartProps) {
|
||||
const { rawFormData: formData } = chartProps;
|
||||
const { start_spatial, end_spatial, dimension, js_columns } =
|
||||
formData as DeckArcFormData;
|
||||
|
||||
const records = getRecordsFromQuery(chartProps.queriesData);
|
||||
const features = processArcData(
|
||||
records,
|
||||
start_spatial,
|
||||
end_spatial,
|
||||
dimension,
|
||||
js_columns,
|
||||
);
|
||||
|
||||
return createBaseTransformResult(chartProps, features);
|
||||
}
|
||||
@@ -1,34 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { SpatialFormData, buildSpatialQuery } from '../spatialUtils';
|
||||
|
||||
export interface DeckContourFormData extends SpatialFormData {
|
||||
cellSize?: string;
|
||||
aggregation?: string;
|
||||
contours?: Array<{
|
||||
color: { r: number; g: number; b: number };
|
||||
lowerThreshold: number;
|
||||
upperThreshold?: number;
|
||||
strokeWidth?: number;
|
||||
}>;
|
||||
}
|
||||
|
||||
export default function buildQuery(formData: DeckContourFormData) {
|
||||
return buildSpatialQuery(formData);
|
||||
}
|
||||
@@ -17,13 +17,12 @@
|
||||
* under the License.
|
||||
*/
|
||||
import { t, ChartMetadata, ChartPlugin, Behavior } from '@superset-ui/core';
|
||||
import transformProps from '../../transformProps';
|
||||
import controlPanel from './controlPanel';
|
||||
import thumbnail from './images/thumbnail.png';
|
||||
import thumbnailDark from './images/thumbnail-dark.png';
|
||||
import example from './images/example.png';
|
||||
import exampleDark from './images/example-dark.png';
|
||||
import buildQuery from './buildQuery';
|
||||
import transformProps from './transformProps';
|
||||
import controlPanel from './controlPanel';
|
||||
|
||||
const metadata = new ChartMetadata({
|
||||
category: t('Map'),
|
||||
@@ -35,6 +34,7 @@ const metadata = new ChartMetadata({
|
||||
name: t('deck.gl Contour'),
|
||||
thumbnail,
|
||||
thumbnailDark,
|
||||
useLegacyApi: true,
|
||||
tags: [t('deckGL'), t('Spatial'), t('Comparison')],
|
||||
behaviors: [Behavior.InteractiveChart],
|
||||
});
|
||||
@@ -42,7 +42,6 @@ const metadata = new ChartMetadata({
|
||||
export default class ContourChartPlugin extends ChartPlugin {
|
||||
constructor() {
|
||||
super({
|
||||
buildQuery,
|
||||
loadChart: () => import('./Contour'),
|
||||
controlPanel,
|
||||
metadata,
|
||||
|
||||
@@ -76,7 +76,7 @@ export const getLayer: GetLayerType<GridLayer> = function ({
|
||||
|
||||
const colorSchemeType = fd.color_scheme_type;
|
||||
const colorRange = getColorRange({
|
||||
defaultBreakpointsColor: fd.default_breakpoint_color,
|
||||
defaultBreakpointsColor: fd.deafult_breakpoint_color,
|
||||
colorSchemeType,
|
||||
colorScale,
|
||||
colorBreakpoints,
|
||||
|
||||
@@ -21,8 +21,7 @@ import thumbnail from './images/thumbnail.png';
|
||||
import thumbnailDark from './images/thumbnail-dark.png';
|
||||
import example from './images/example.png';
|
||||
import exampleDark from './images/example-dark.png';
|
||||
import buildQuery from './buildQuery';
|
||||
import transformProps from './transformProps';
|
||||
import transformProps from '../../transformProps';
|
||||
import controlPanel from './controlPanel';
|
||||
|
||||
const metadata = new ChartMetadata({
|
||||
@@ -35,6 +34,7 @@ const metadata = new ChartMetadata({
|
||||
thumbnail,
|
||||
thumbnailDark,
|
||||
exampleGallery: [{ url: example, urlDark: exampleDark }],
|
||||
useLegacyApi: true,
|
||||
tags: [t('deckGL'), t('3D'), t('Comparison')],
|
||||
behaviors: [Behavior.InteractiveChart],
|
||||
});
|
||||
@@ -42,7 +42,6 @@ const metadata = new ChartMetadata({
|
||||
export default class GridChartPlugin extends ChartPlugin {
|
||||
constructor() {
|
||||
super({
|
||||
buildQuery,
|
||||
loadChart: () => import('./Grid'),
|
||||
controlPanel,
|
||||
metadata,
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { ChartProps } from '@superset-ui/core';
|
||||
import { transformSpatialProps } from '../spatialUtils';
|
||||
|
||||
export default function transformProps(chartProps: ChartProps) {
|
||||
return transformSpatialProps(chartProps);
|
||||
}
|
||||
@@ -126,7 +126,7 @@ export const getLayer: GetLayerType<HeatmapLayer> = ({
|
||||
|
||||
const colorSchemeType = fd.color_scheme_type;
|
||||
const colorRange = getColorRange({
|
||||
defaultBreakpointsColor: fd.default_breakpoint_color,
|
||||
defaultBreakpointsColor: fd.deafult_breakpoint_color,
|
||||
colorBreakpoints: fd.color_breakpoints,
|
||||
fixedColor: fd.color_picker,
|
||||
colorSchemeType,
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { SpatialFormData, buildSpatialQuery } from '../spatialUtils';
|
||||
|
||||
export default function buildQuery(formData: SpatialFormData) {
|
||||
return buildSpatialQuery(formData);
|
||||
}
|
||||
@@ -17,13 +17,12 @@
|
||||
* under the License.
|
||||
*/
|
||||
import { t, ChartMetadata, ChartPlugin, Behavior } from '@superset-ui/core';
|
||||
import transformProps from '../../transformProps';
|
||||
import controlPanel from './controlPanel';
|
||||
import thumbnail from './images/thumbnail.png';
|
||||
import thumbnailDark from './images/thumbnail-dark.png';
|
||||
import example from './images/example.png';
|
||||
import exampleDark from './images/example-dark.png';
|
||||
import buildQuery from './buildQuery';
|
||||
import transformProps from './transformProps';
|
||||
import controlPanel from './controlPanel';
|
||||
|
||||
const metadata = new ChartMetadata({
|
||||
category: t('Map'),
|
||||
@@ -35,6 +34,7 @@ const metadata = new ChartMetadata({
|
||||
name: t('deck.gl Heatmap'),
|
||||
thumbnail,
|
||||
thumbnailDark,
|
||||
useLegacyApi: true,
|
||||
tags: [t('deckGL'), t('Spatial'), t('Comparison')],
|
||||
behaviors: [Behavior.InteractiveChart],
|
||||
});
|
||||
@@ -42,7 +42,6 @@ const metadata = new ChartMetadata({
|
||||
export default class HeatmapChartPlugin extends ChartPlugin {
|
||||
constructor() {
|
||||
super({
|
||||
buildQuery,
|
||||
loadChart: () => import('./Heatmap'),
|
||||
controlPanel,
|
||||
metadata,
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { ChartProps } from '@superset-ui/core';
|
||||
import { transformSpatialProps } from '../spatialUtils';
|
||||
|
||||
export default function transformProps(chartProps: ChartProps) {
|
||||
return transformSpatialProps(chartProps);
|
||||
}
|
||||
@@ -75,7 +75,7 @@ export const getLayer: GetLayerType<HexagonLayer> = function ({
|
||||
|
||||
const colorSchemeType = fd.color_scheme_type;
|
||||
const colorRange = getColorRange({
|
||||
defaultBreakpointsColor: fd.default_breakpoint_color,
|
||||
defaultBreakpointsColor: fd.deafult_breakpoint_color,
|
||||
colorBreakpoints: fd.color_breakpoints,
|
||||
fixedColor: fd.color_picker,
|
||||
colorSchemeType,
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { SpatialFormData, buildSpatialQuery } from '../spatialUtils';
|
||||
|
||||
export interface DeckHexFormData extends SpatialFormData {
|
||||
extruded?: boolean;
|
||||
js_agg_function?: string;
|
||||
grid_size?: number;
|
||||
}
|
||||
|
||||
export default function buildQuery(formData: DeckHexFormData) {
|
||||
return buildSpatialQuery(formData);
|
||||
}
|
||||
@@ -21,8 +21,7 @@ import thumbnail from './images/thumbnail.png';
|
||||
import thumbnailDark from './images/thumbnail-dark.png';
|
||||
import example from './images/example.png';
|
||||
import exampleDark from './images/example-dark.png';
|
||||
import buildQuery from './buildQuery';
|
||||
import transformProps from './transformProps';
|
||||
import transformProps from '../../transformProps';
|
||||
import controlPanel from './controlPanel';
|
||||
|
||||
const metadata = new ChartMetadata({
|
||||
@@ -35,6 +34,7 @@ const metadata = new ChartMetadata({
|
||||
name: t('deck.gl 3D Hexagon'),
|
||||
thumbnail,
|
||||
thumbnailDark,
|
||||
useLegacyApi: true,
|
||||
tags: [t('deckGL'), t('3D'), t('Geo'), t('Comparison')],
|
||||
behaviors: [Behavior.InteractiveChart],
|
||||
});
|
||||
@@ -42,7 +42,6 @@ const metadata = new ChartMetadata({
|
||||
export default class HexChartPlugin extends ChartPlugin {
|
||||
constructor() {
|
||||
super({
|
||||
buildQuery,
|
||||
loadChart: () => import('./Hex'),
|
||||
controlPanel,
|
||||
metadata,
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { ChartProps } from '@superset-ui/core';
|
||||
import { transformSpatialProps } from '../spatialUtils';
|
||||
|
||||
export default function transformProps(chartProps: ChartProps) {
|
||||
return transformSpatialProps(chartProps);
|
||||
}
|
||||
@@ -1,95 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import {
|
||||
buildQueryContext,
|
||||
ensureIsArray,
|
||||
SqlaFormData,
|
||||
QueryFormColumn,
|
||||
} from '@superset-ui/core';
|
||||
import { addNullFilters, addTooltipColumnsToQuery } from '../buildQueryUtils';
|
||||
|
||||
export interface DeckPathFormData extends SqlaFormData {
|
||||
line_column?: string;
|
||||
line_type?: 'polyline' | 'json' | 'geohash';
|
||||
metric?: string;
|
||||
reverse_long_lat?: boolean;
|
||||
js_columns?: string[];
|
||||
tooltip_contents?: unknown[];
|
||||
tooltip_template?: string;
|
||||
}
|
||||
|
||||
export default function buildQuery(formData: DeckPathFormData) {
|
||||
const { line_column, metric, js_columns, tooltip_contents } = formData;
|
||||
|
||||
if (!line_column) {
|
||||
throw new Error('Line column is required for Path charts');
|
||||
}
|
||||
|
||||
return buildQueryContext(formData, {
|
||||
buildQuery: baseQueryObject => {
|
||||
const columns = ensureIsArray(
|
||||
baseQueryObject.columns || [],
|
||||
) as QueryFormColumn[];
|
||||
const metrics = ensureIsArray(baseQueryObject.metrics || []);
|
||||
const groupby = ensureIsArray(
|
||||
baseQueryObject.groupby || [],
|
||||
) as QueryFormColumn[];
|
||||
const jsColumns = ensureIsArray(js_columns || []);
|
||||
|
||||
if (baseQueryObject.metrics?.length || metric) {
|
||||
if (metric && !metrics.includes(metric)) {
|
||||
metrics.push(metric);
|
||||
}
|
||||
if (!groupby.includes(line_column)) {
|
||||
groupby.push(line_column);
|
||||
}
|
||||
} else if (!columns.includes(line_column)) {
|
||||
columns.push(line_column);
|
||||
}
|
||||
|
||||
jsColumns.forEach(col => {
|
||||
if (!columns.includes(col) && !groupby.includes(col)) {
|
||||
columns.push(col);
|
||||
}
|
||||
});
|
||||
|
||||
const finalColumns = addTooltipColumnsToQuery(columns, tooltip_contents);
|
||||
const finalGroupby = addTooltipColumnsToQuery(groupby, tooltip_contents);
|
||||
|
||||
const filters = addNullFilters(
|
||||
ensureIsArray(baseQueryObject.filters || []),
|
||||
[line_column],
|
||||
);
|
||||
|
||||
const isTimeseries = Boolean(formData.time_grain_sqla);
|
||||
|
||||
return [
|
||||
{
|
||||
...baseQueryObject,
|
||||
columns: finalColumns,
|
||||
metrics,
|
||||
groupby: finalGroupby,
|
||||
filters,
|
||||
is_timeseries: isTimeseries,
|
||||
row_limit: baseQueryObject.row_limit,
|
||||
},
|
||||
];
|
||||
},
|
||||
});
|
||||
}
|
||||
@@ -21,8 +21,7 @@ import thumbnail from './images/thumbnail.png';
|
||||
import thumbnailDark from './images/thumbnail-dark.png';
|
||||
import example from './images/example.png';
|
||||
import exampleDark from './images/example-dark.png';
|
||||
import buildQuery from './buildQuery';
|
||||
import transformProps from './transformProps';
|
||||
import transformProps from '../../transformProps';
|
||||
import controlPanel from './controlPanel';
|
||||
|
||||
const metadata = new ChartMetadata({
|
||||
@@ -33,6 +32,7 @@ const metadata = new ChartMetadata({
|
||||
thumbnail,
|
||||
thumbnailDark,
|
||||
exampleGallery: [{ url: example, urlDark: exampleDark }],
|
||||
useLegacyApi: true,
|
||||
tags: [t('deckGL'), t('Web')],
|
||||
behaviors: [Behavior.InteractiveChart],
|
||||
});
|
||||
@@ -40,7 +40,6 @@ const metadata = new ChartMetadata({
|
||||
export default class PathChartPlugin extends ChartPlugin {
|
||||
constructor() {
|
||||
super({
|
||||
buildQuery,
|
||||
loadChart: () => import('./Path'),
|
||||
controlPanel,
|
||||
metadata,
|
||||
|
||||
@@ -1,166 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { ChartProps, DTTM_ALIAS } from '@superset-ui/core';
|
||||
import { addJsColumnsToExtraProps, DataRecord } from '../spatialUtils';
|
||||
import {
|
||||
createBaseTransformResult,
|
||||
getRecordsFromQuery,
|
||||
getMetricLabelFromFormData,
|
||||
parseMetricValue,
|
||||
addPropertiesToFeature,
|
||||
} from '../transformUtils';
|
||||
import { DeckPathFormData } from './buildQuery';
|
||||
|
||||
declare global {
|
||||
interface Window {
|
||||
polyline?: {
|
||||
decode: (data: string) => [number, number][];
|
||||
};
|
||||
geohash?: {
|
||||
decode: (data: string) => { longitude: number; latitude: number };
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export interface DeckPathTransformPropsFormData extends DeckPathFormData {
|
||||
js_data_mutator?: string;
|
||||
js_tooltip?: string;
|
||||
js_onclick_href?: string;
|
||||
}
|
||||
|
||||
interface PathFeature {
|
||||
path: [number, number][];
|
||||
metric?: number;
|
||||
timestamp?: unknown;
|
||||
extraProps?: Record<string, unknown>;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
const decoders = {
|
||||
json: (data: string): [number, number][] => {
|
||||
try {
|
||||
const parsed = JSON.parse(data);
|
||||
return Array.isArray(parsed) ? parsed : [];
|
||||
} catch (error) {
|
||||
return [];
|
||||
}
|
||||
},
|
||||
polyline: (data: string): [number, number][] => {
|
||||
try {
|
||||
if (typeof window !== 'undefined' && window.polyline) {
|
||||
return window.polyline.decode(data);
|
||||
}
|
||||
return [];
|
||||
} catch (error) {
|
||||
return [];
|
||||
}
|
||||
},
|
||||
geohash: (data: string): [number, number][] => {
|
||||
try {
|
||||
if (typeof window !== 'undefined' && window.geohash) {
|
||||
const decoded = window.geohash.decode(data);
|
||||
return [[decoded.longitude, decoded.latitude]];
|
||||
}
|
||||
return [];
|
||||
} catch (error) {
|
||||
return [];
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
function processPathData(
|
||||
records: DataRecord[],
|
||||
lineColumn: string,
|
||||
lineType: 'polyline' | 'json' | 'geohash' = 'json',
|
||||
reverseLongLat: boolean = false,
|
||||
metricLabel?: string,
|
||||
jsColumns?: string[],
|
||||
): PathFeature[] {
|
||||
if (!records.length || !lineColumn) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const decoder = decoders[lineType] || decoders.json;
|
||||
const excludeKeys = new Set(
|
||||
[
|
||||
lineType !== 'geohash' ? lineColumn : undefined,
|
||||
'timestamp',
|
||||
DTTM_ALIAS,
|
||||
metricLabel,
|
||||
...(jsColumns || []),
|
||||
].filter(Boolean) as string[],
|
||||
);
|
||||
|
||||
return records.map(record => {
|
||||
const lineData = record[lineColumn];
|
||||
let path: [number, number][] = [];
|
||||
|
||||
if (lineData) {
|
||||
path = decoder(String(lineData));
|
||||
if (reverseLongLat && path.length > 0) {
|
||||
path = path.map(([lng, lat]) => [lat, lng]);
|
||||
}
|
||||
}
|
||||
|
||||
let feature: PathFeature = {
|
||||
path,
|
||||
timestamp: record[DTTM_ALIAS],
|
||||
extraProps: {},
|
||||
};
|
||||
|
||||
if (metricLabel && record[metricLabel] != null) {
|
||||
const metricValue = parseMetricValue(record[metricLabel]);
|
||||
if (metricValue !== undefined) {
|
||||
feature.metric = metricValue;
|
||||
}
|
||||
}
|
||||
|
||||
feature = addJsColumnsToExtraProps(feature, record, jsColumns);
|
||||
feature = addPropertiesToFeature(feature, record, excludeKeys);
|
||||
return feature;
|
||||
});
|
||||
}
|
||||
|
||||
export default function transformProps(chartProps: ChartProps) {
|
||||
const { rawFormData: formData } = chartProps;
|
||||
const {
|
||||
line_column,
|
||||
line_type = 'json',
|
||||
metric,
|
||||
reverse_long_lat = false,
|
||||
js_columns,
|
||||
} = formData as DeckPathTransformPropsFormData;
|
||||
|
||||
const metricLabel = getMetricLabelFromFormData(metric);
|
||||
const records = getRecordsFromQuery(chartProps.queriesData);
|
||||
const features = processPathData(
|
||||
records,
|
||||
line_column || '',
|
||||
line_type,
|
||||
reverse_long_lat,
|
||||
metricLabel,
|
||||
js_columns,
|
||||
).reverse();
|
||||
|
||||
return createBaseTransformResult(
|
||||
chartProps,
|
||||
features,
|
||||
metricLabel ? [metricLabel] : [],
|
||||
);
|
||||
}
|
||||
@@ -118,7 +118,7 @@ export const getLayer: GetLayerType<PolygonLayer> = function ({
|
||||
fd.fill_color_picker;
|
||||
const sc: { r: number; g: number; b: number; a: number } =
|
||||
fd.stroke_color_picker;
|
||||
const defaultBreakpointColor = fd.default_breakpoint_color;
|
||||
const defaultBreakpointColor = fd.deafult_breakpoint_color;
|
||||
let data = [...payload.data.features];
|
||||
|
||||
if (fd.js_data_mutator) {
|
||||
|
||||
@@ -1,111 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import {
|
||||
buildQueryContext,
|
||||
ensureIsArray,
|
||||
SqlaFormData,
|
||||
getMetricLabel,
|
||||
QueryObjectFilterClause,
|
||||
QueryObject,
|
||||
QueryFormColumn,
|
||||
} from '@superset-ui/core';
|
||||
import { addTooltipColumnsToQuery } from '../buildQueryUtils';
|
||||
|
||||
export interface DeckPolygonFormData extends SqlaFormData {
|
||||
line_column?: string;
|
||||
line_type?: string;
|
||||
metric?: string;
|
||||
point_radius_fixed?: {
|
||||
value?: string;
|
||||
};
|
||||
reverse_long_lat?: boolean;
|
||||
filter_nulls?: boolean;
|
||||
js_columns?: string[];
|
||||
tooltip_contents?: unknown[];
|
||||
tooltip_template?: string;
|
||||
}
|
||||
|
||||
export default function buildQuery(formData: DeckPolygonFormData) {
|
||||
const {
|
||||
line_column,
|
||||
metric,
|
||||
point_radius_fixed,
|
||||
filter_nulls = true,
|
||||
js_columns,
|
||||
tooltip_contents,
|
||||
} = formData;
|
||||
|
||||
if (!line_column) {
|
||||
throw new Error('Polygon column is required for Polygon charts');
|
||||
}
|
||||
|
||||
return buildQueryContext(formData, (baseQueryObject: QueryObject) => {
|
||||
let columns: QueryFormColumn[] = [
|
||||
...ensureIsArray(baseQueryObject.columns || []),
|
||||
line_column,
|
||||
];
|
||||
|
||||
const jsColumns = ensureIsArray(js_columns || []);
|
||||
jsColumns.forEach((col: string) => {
|
||||
if (!columns.includes(col)) {
|
||||
columns.push(col);
|
||||
}
|
||||
});
|
||||
|
||||
columns = addTooltipColumnsToQuery(columns, tooltip_contents);
|
||||
|
||||
const metrics = [];
|
||||
if (metric) {
|
||||
metrics.push(metric);
|
||||
}
|
||||
if (point_radius_fixed?.value) {
|
||||
metrics.push(point_radius_fixed.value);
|
||||
}
|
||||
|
||||
const filters = ensureIsArray(baseQueryObject.filters || []);
|
||||
if (filter_nulls) {
|
||||
const nullFilters: QueryObjectFilterClause[] = [
|
||||
{
|
||||
col: line_column,
|
||||
op: 'IS NOT NULL',
|
||||
},
|
||||
];
|
||||
|
||||
if (metric) {
|
||||
nullFilters.push({
|
||||
col: getMetricLabel(metric),
|
||||
op: 'IS NOT NULL',
|
||||
});
|
||||
}
|
||||
|
||||
filters.push(...nullFilters);
|
||||
}
|
||||
|
||||
return [
|
||||
{
|
||||
...baseQueryObject,
|
||||
columns,
|
||||
metrics,
|
||||
filters,
|
||||
is_timeseries: false,
|
||||
row_limit: baseQueryObject.row_limit,
|
||||
},
|
||||
];
|
||||
});
|
||||
}
|
||||
@@ -21,8 +21,7 @@ import thumbnail from './images/thumbnail.png';
|
||||
import thumbnailDark from './images/thumbnail-dark.png';
|
||||
import example from './images/example.png';
|
||||
import exampleDark from './images/example-dark.png';
|
||||
import transformProps from './transformProps';
|
||||
import buildQuery from './buildQuery';
|
||||
import transformProps from '../../transformProps';
|
||||
import controlPanel from './controlPanel';
|
||||
|
||||
const metadata = new ChartMetadata({
|
||||
@@ -35,6 +34,7 @@ const metadata = new ChartMetadata({
|
||||
thumbnail,
|
||||
thumbnailDark,
|
||||
exampleGallery: [{ url: example, urlDark: exampleDark }],
|
||||
useLegacyApi: true,
|
||||
tags: [t('deckGL'), t('3D'), t('Multi-Dimensions'), t('Geo')],
|
||||
behaviors: [Behavior.InteractiveChart],
|
||||
});
|
||||
@@ -42,7 +42,6 @@ const metadata = new ChartMetadata({
|
||||
export default class PolygonChartPlugin extends ChartPlugin {
|
||||
constructor() {
|
||||
super({
|
||||
buildQuery,
|
||||
loadChart: () => import('./Polygon'),
|
||||
controlPanel,
|
||||
metadata,
|
||||
|
||||
@@ -1,143 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { ChartProps } from '@superset-ui/core';
|
||||
import { addJsColumnsToExtraProps, DataRecord } from '../spatialUtils';
|
||||
import {
|
||||
createBaseTransformResult,
|
||||
getRecordsFromQuery,
|
||||
getMetricLabelFromFormData,
|
||||
parseMetricValue,
|
||||
addPropertiesToFeature,
|
||||
} from '../transformUtils';
|
||||
import { DeckPolygonFormData } from './buildQuery';
|
||||
|
||||
interface PolygonFeature {
|
||||
polygon?: number[][];
|
||||
name?: string;
|
||||
elevation?: number;
|
||||
extraProps?: Record<string, unknown>;
|
||||
metrics?: Record<string, number | string>;
|
||||
}
|
||||
|
||||
function processPolygonData(
|
||||
records: DataRecord[],
|
||||
formData: DeckPolygonFormData,
|
||||
): PolygonFeature[] {
|
||||
const {
|
||||
line_column,
|
||||
line_type,
|
||||
metric,
|
||||
point_radius_fixed,
|
||||
reverse_long_lat,
|
||||
js_columns,
|
||||
} = formData;
|
||||
|
||||
if (!line_column || !records.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const metricLabel = getMetricLabelFromFormData(metric);
|
||||
const elevationLabel = getMetricLabelFromFormData(point_radius_fixed);
|
||||
const excludeKeys = new Set([line_column, ...(js_columns || [])]);
|
||||
|
||||
return records
|
||||
.map(record => {
|
||||
let feature: PolygonFeature = {
|
||||
extraProps: {},
|
||||
metrics: {},
|
||||
};
|
||||
|
||||
feature = addJsColumnsToExtraProps(feature, record, js_columns);
|
||||
const updatedFeature = addPropertiesToFeature(
|
||||
feature as unknown as Record<string, unknown>,
|
||||
record,
|
||||
excludeKeys,
|
||||
);
|
||||
feature = updatedFeature as unknown as PolygonFeature;
|
||||
|
||||
const rawPolygonData = record[line_column];
|
||||
if (!rawPolygonData) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
let polygonCoords: number[][];
|
||||
|
||||
switch (line_type) {
|
||||
case 'json': {
|
||||
const parsed =
|
||||
typeof rawPolygonData === 'string'
|
||||
? JSON.parse(rawPolygonData)
|
||||
: rawPolygonData;
|
||||
|
||||
if (parsed.coordinates) {
|
||||
polygonCoords = parsed.coordinates[0] || parsed.coordinates;
|
||||
} else if (Array.isArray(parsed)) {
|
||||
polygonCoords = parsed;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'geohash':
|
||||
case 'zipcode':
|
||||
default: {
|
||||
polygonCoords = Array.isArray(rawPolygonData) ? rawPolygonData : [];
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (reverse_long_lat && polygonCoords.length > 0) {
|
||||
polygonCoords = polygonCoords.map(coord => [coord[1], coord[0]]);
|
||||
}
|
||||
|
||||
feature.polygon = polygonCoords;
|
||||
|
||||
if (elevationLabel && record[elevationLabel] != null) {
|
||||
const elevationValue = parseMetricValue(record[elevationLabel]);
|
||||
if (elevationValue !== undefined) {
|
||||
feature.elevation = elevationValue;
|
||||
}
|
||||
}
|
||||
|
||||
if (metricLabel && record[metricLabel] != null) {
|
||||
const metricValue = record[metricLabel];
|
||||
if (
|
||||
typeof metricValue === 'string' ||
|
||||
typeof metricValue === 'number'
|
||||
) {
|
||||
feature.metrics![metricLabel] = metricValue;
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
|
||||
return feature;
|
||||
})
|
||||
.filter((feature): feature is PolygonFeature => feature !== null);
|
||||
}
|
||||
|
||||
export default function transformProps(chartProps: ChartProps) {
|
||||
const { rawFormData: formData } = chartProps;
|
||||
const records = getRecordsFromQuery(chartProps.queriesData);
|
||||
const features = processPolygonData(records, formData as DeckPolygonFormData);
|
||||
|
||||
return createBaseTransformResult(chartProps, features);
|
||||
}
|
||||
@@ -1,105 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import {
|
||||
buildQueryContext,
|
||||
ensureIsArray,
|
||||
QueryFormOrderBy,
|
||||
SqlaFormData,
|
||||
QueryFormColumn,
|
||||
QueryObject,
|
||||
} from '@superset-ui/core';
|
||||
import {
|
||||
getSpatialColumns,
|
||||
addSpatialNullFilters,
|
||||
SpatialFormData,
|
||||
} from '../spatialUtils';
|
||||
import {
|
||||
addJsColumnsToColumns,
|
||||
processMetricsArray,
|
||||
addTooltipColumnsToQuery,
|
||||
} from '../buildQueryUtils';
|
||||
|
||||
export interface DeckScatterFormData
|
||||
extends Omit<SpatialFormData, 'color_picker'>,
|
||||
SqlaFormData {
|
||||
point_radius_fixed?: {
|
||||
value?: string;
|
||||
};
|
||||
multiplier?: number;
|
||||
point_unit?: string;
|
||||
min_radius?: number;
|
||||
max_radius?: number;
|
||||
color_picker?: { r: number; g: number; b: number; a: number };
|
||||
category_name?: string;
|
||||
}
|
||||
|
||||
export default function buildQuery(formData: DeckScatterFormData) {
|
||||
const {
|
||||
spatial,
|
||||
point_radius_fixed,
|
||||
category_name,
|
||||
js_columns,
|
||||
tooltip_contents,
|
||||
} = formData;
|
||||
|
||||
if (!spatial) {
|
||||
throw new Error('Spatial configuration is required for Scatter charts');
|
||||
}
|
||||
|
||||
return buildQueryContext(formData, {
|
||||
buildQuery: (baseQueryObject: QueryObject) => {
|
||||
const spatialColumns = getSpatialColumns(spatial);
|
||||
let columns = [...(baseQueryObject.columns || []), ...spatialColumns];
|
||||
|
||||
if (category_name) {
|
||||
columns.push(category_name);
|
||||
}
|
||||
|
||||
const columnStrings = columns.map(col =>
|
||||
typeof col === 'string' ? col : col.label || col.sqlExpression || '',
|
||||
);
|
||||
const withJsColumns = addJsColumnsToColumns(columnStrings, js_columns);
|
||||
|
||||
columns = withJsColumns as QueryFormColumn[];
|
||||
columns = addTooltipColumnsToQuery(columns, tooltip_contents);
|
||||
|
||||
const metrics = processMetricsArray([point_radius_fixed?.value]);
|
||||
const filters = addSpatialNullFilters(
|
||||
spatial,
|
||||
ensureIsArray(baseQueryObject.filters || []),
|
||||
);
|
||||
|
||||
const orderby = point_radius_fixed?.value
|
||||
? ([[point_radius_fixed.value, false]] as QueryFormOrderBy[])
|
||||
: (baseQueryObject.orderby as QueryFormOrderBy[]) || [];
|
||||
|
||||
return [
|
||||
{
|
||||
...baseQueryObject,
|
||||
columns,
|
||||
metrics,
|
||||
filters,
|
||||
orderby,
|
||||
is_timeseries: false,
|
||||
row_limit: baseQueryObject.row_limit,
|
||||
},
|
||||
];
|
||||
},
|
||||
});
|
||||
}
|
||||
@@ -21,8 +21,7 @@ import thumbnail from './images/thumbnail.png';
|
||||
import thumbnailDark from './images/thumbnail-dark.png';
|
||||
import example from './images/example.png';
|
||||
import exampleDark from './images/example-dark.png';
|
||||
import buildQuery from './buildQuery';
|
||||
import transformProps from './transformProps';
|
||||
import transformProps from '../../transformProps';
|
||||
import controlPanel from './controlPanel';
|
||||
|
||||
const metadata = new ChartMetadata({
|
||||
@@ -35,6 +34,7 @@ const metadata = new ChartMetadata({
|
||||
thumbnail,
|
||||
thumbnailDark,
|
||||
exampleGallery: [{ url: example, urlDark: exampleDark }],
|
||||
useLegacyApi: true,
|
||||
tags: [
|
||||
t('deckGL'),
|
||||
t('Comparison'),
|
||||
@@ -50,7 +50,6 @@ const metadata = new ChartMetadata({
|
||||
export default class ScatterChartPlugin extends ChartPlugin {
|
||||
constructor() {
|
||||
super({
|
||||
buildQuery,
|
||||
loadChart: () => import('./Scatter'),
|
||||
controlPanel,
|
||||
metadata,
|
||||
|
||||
@@ -1,116 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { ChartProps } from '@superset-ui/core';
|
||||
import { processSpatialData, DataRecord } from '../spatialUtils';
|
||||
import {
|
||||
createBaseTransformResult,
|
||||
getRecordsFromQuery,
|
||||
getMetricLabelFromFormData,
|
||||
parseMetricValue,
|
||||
addPropertiesToFeature,
|
||||
} from '../transformUtils';
|
||||
import { DeckScatterFormData } from './buildQuery';
|
||||
|
||||
interface ScatterPoint {
|
||||
position: [number, number];
|
||||
radius?: number;
|
||||
color?: [number, number, number, number];
|
||||
cat_color?: string;
|
||||
metric?: number;
|
||||
extraProps?: Record<string, unknown>;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
function processScatterData(
|
||||
records: DataRecord[],
|
||||
spatial: DeckScatterFormData['spatial'],
|
||||
radiusMetricLabel?: string,
|
||||
categoryColumn?: string,
|
||||
jsColumns?: string[],
|
||||
): ScatterPoint[] {
|
||||
if (!spatial || !records.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const spatialFeatures = processSpatialData(records, spatial);
|
||||
const excludeKeys = new Set([
|
||||
'position',
|
||||
'weight',
|
||||
'extraProps',
|
||||
...(spatial
|
||||
? [
|
||||
spatial.lonCol,
|
||||
spatial.latCol,
|
||||
spatial.lonlatCol,
|
||||
spatial.geohashCol,
|
||||
].filter(Boolean)
|
||||
: []),
|
||||
radiusMetricLabel,
|
||||
categoryColumn,
|
||||
...(jsColumns || []),
|
||||
]);
|
||||
|
||||
return spatialFeatures.map(feature => {
|
||||
let scatterPoint: ScatterPoint = {
|
||||
position: feature.position,
|
||||
extraProps: feature.extraProps || {},
|
||||
};
|
||||
|
||||
if (radiusMetricLabel && feature[radiusMetricLabel] != null) {
|
||||
const radiusValue = parseMetricValue(feature[radiusMetricLabel]);
|
||||
if (radiusValue !== undefined) {
|
||||
scatterPoint.radius = radiusValue;
|
||||
scatterPoint.metric = radiusValue;
|
||||
}
|
||||
}
|
||||
|
||||
if (categoryColumn && feature[categoryColumn] != null) {
|
||||
scatterPoint.cat_color = String(feature[categoryColumn]);
|
||||
}
|
||||
|
||||
scatterPoint = addPropertiesToFeature(
|
||||
scatterPoint,
|
||||
feature as DataRecord,
|
||||
excludeKeys,
|
||||
);
|
||||
return scatterPoint;
|
||||
});
|
||||
}
|
||||
|
||||
export default function transformProps(chartProps: ChartProps) {
|
||||
const { rawFormData: formData } = chartProps;
|
||||
const { spatial, point_radius_fixed, category_name, js_columns } =
|
||||
formData as DeckScatterFormData;
|
||||
|
||||
const radiusMetricLabel = getMetricLabelFromFormData(point_radius_fixed);
|
||||
const records = getRecordsFromQuery(chartProps.queriesData);
|
||||
const features = processScatterData(
|
||||
records,
|
||||
spatial,
|
||||
radiusMetricLabel,
|
||||
category_name,
|
||||
js_columns,
|
||||
);
|
||||
|
||||
return createBaseTransformResult(
|
||||
chartProps,
|
||||
features,
|
||||
radiusMetricLabel ? [radiusMetricLabel] : [],
|
||||
);
|
||||
}
|
||||
@@ -123,7 +123,7 @@ export const getLayer: GetLayerType<ScreenGridLayer> = function ({
|
||||
|
||||
const colorSchemeType = fd.color_scheme_type as ColorSchemeType & 'default';
|
||||
const colorRange = getColorRange({
|
||||
defaultBreakpointsColor: fd.default_breakpoint_color,
|
||||
defaultBreakpointsColor: fd.deafult_breakpoint_color,
|
||||
colorBreakpoints: fd.color_breakpoints,
|
||||
fixedColor: fd.color_picker,
|
||||
colorSchemeType,
|
||||
|
||||
@@ -21,8 +21,7 @@ import thumbnail from './images/thumbnail.png';
|
||||
import thumbnailDark from './images/thumbnail-dark.png';
|
||||
import example from './images/example.png';
|
||||
import exampleDark from './images/example-dark.png';
|
||||
import buildQuery from './buildQuery';
|
||||
import transformProps from './transformProps';
|
||||
import transformProps from '../../transformProps';
|
||||
import controlPanel from './controlPanel';
|
||||
|
||||
const metadata = new ChartMetadata({
|
||||
@@ -35,6 +34,7 @@ const metadata = new ChartMetadata({
|
||||
thumbnail,
|
||||
thumbnailDark,
|
||||
exampleGallery: [{ url: example, urlDark: exampleDark }],
|
||||
useLegacyApi: true,
|
||||
tags: [t('deckGL'), t('Comparison'), t('Intensity'), t('Density')],
|
||||
behaviors: [Behavior.InteractiveChart],
|
||||
});
|
||||
@@ -42,7 +42,6 @@ const metadata = new ChartMetadata({
|
||||
export default class ScreengridChartPlugin extends ChartPlugin {
|
||||
constructor() {
|
||||
super({
|
||||
buildQuery,
|
||||
loadChart: () => import('./Screengrid'),
|
||||
controlPanel,
|
||||
metadata,
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { ChartProps } from '@superset-ui/core';
|
||||
import { transformSpatialProps } from '../spatialUtils';
|
||||
|
||||
export default function transformProps(chartProps: ChartProps) {
|
||||
return transformSpatialProps(chartProps);
|
||||
}
|
||||
@@ -1,142 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import {
|
||||
getMetricLabel,
|
||||
QueryObjectFilterClause,
|
||||
QueryFormColumn,
|
||||
getColumnLabel,
|
||||
} from '@superset-ui/core';
|
||||
|
||||
export function addJsColumnsToColumns(
|
||||
columns: string[],
|
||||
jsColumns?: string[],
|
||||
existingColumns?: string[],
|
||||
): string[] {
|
||||
if (!jsColumns?.length) return columns;
|
||||
|
||||
const allExisting = new Set([...columns, ...(existingColumns || [])]);
|
||||
const result = [...columns];
|
||||
|
||||
jsColumns.forEach(col => {
|
||||
if (!allExisting.has(col)) {
|
||||
result.push(col);
|
||||
allExisting.add(col);
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
export function addNullFilters(
|
||||
filters: QueryObjectFilterClause[],
|
||||
columnNames: string[],
|
||||
): QueryObjectFilterClause[] {
|
||||
const existingFilters = new Set(
|
||||
filters
|
||||
.filter(filter => filter.op === 'IS NOT NULL')
|
||||
.map(filter => filter.col),
|
||||
);
|
||||
|
||||
const nullFilters: QueryObjectFilterClause[] = columnNames
|
||||
.filter(col => !existingFilters.has(col))
|
||||
.map(col => ({
|
||||
col,
|
||||
op: 'IS NOT NULL' as const,
|
||||
}));
|
||||
|
||||
return [...filters, ...nullFilters];
|
||||
}
|
||||
|
||||
export function addMetricNullFilter(
|
||||
filters: QueryObjectFilterClause[],
|
||||
metric?: string,
|
||||
): QueryObjectFilterClause[] {
|
||||
if (!metric) return filters;
|
||||
return addNullFilters(filters, [getMetricLabel(metric)]);
|
||||
}
|
||||
|
||||
export function ensureColumnsUnique(columns: string[]): string[] {
|
||||
return [...new Set(columns)];
|
||||
}
|
||||
|
||||
export function addColumnsIfNotExists(
|
||||
baseColumns: string[],
|
||||
newColumns: string[],
|
||||
): string[] {
|
||||
const existing = new Set(baseColumns);
|
||||
const result = [...baseColumns];
|
||||
|
||||
newColumns.forEach(col => {
|
||||
if (!existing.has(col)) {
|
||||
result.push(col);
|
||||
existing.add(col);
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
export function processMetricsArray(metrics: (string | undefined)[]): string[] {
|
||||
return metrics.filter((metric): metric is string => Boolean(metric));
|
||||
}
|
||||
|
||||
export function extractTooltipColumns(tooltipContents?: unknown[]): string[] {
|
||||
if (!Array.isArray(tooltipContents) || !tooltipContents.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const columns: string[] = [];
|
||||
|
||||
tooltipContents.forEach(item => {
|
||||
if (typeof item === 'string') {
|
||||
columns.push(item);
|
||||
} else if (item && typeof item === 'object') {
|
||||
const objItem = item as Record<string, unknown>;
|
||||
if (
|
||||
objItem.item_type === 'column' &&
|
||||
typeof objItem.column_name === 'string'
|
||||
) {
|
||||
columns.push(objItem.column_name);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return columns;
|
||||
}
|
||||
|
||||
export function addTooltipColumnsToQuery(
|
||||
baseColumns: QueryFormColumn[],
|
||||
tooltipContents?: unknown[],
|
||||
): QueryFormColumn[] {
|
||||
const tooltipColumns = extractTooltipColumns(tooltipContents);
|
||||
|
||||
const baseColumnLabels = baseColumns.map(getColumnLabel);
|
||||
const existingLabels = new Set(baseColumnLabels);
|
||||
|
||||
const result: QueryFormColumn[] = [...baseColumns];
|
||||
|
||||
tooltipColumns.forEach(col => {
|
||||
if (!existingLabels.has(col)) {
|
||||
result.push(col);
|
||||
existingLabels.add(col);
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
@@ -1,604 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import {
|
||||
ChartProps,
|
||||
DatasourceType,
|
||||
QueryObjectFilterClause,
|
||||
SupersetTheme,
|
||||
} from '@superset-ui/core';
|
||||
import { decode } from 'ngeohash';
|
||||
|
||||
import {
|
||||
getSpatialColumns,
|
||||
addSpatialNullFilters,
|
||||
buildSpatialQuery,
|
||||
processSpatialData,
|
||||
transformSpatialProps,
|
||||
SpatialFormData,
|
||||
} from './spatialUtils';
|
||||
|
||||
jest.mock('ngeohash', () => ({
|
||||
decode: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('@superset-ui/core', () => ({
|
||||
...jest.requireActual('@superset-ui/core'),
|
||||
buildQueryContext: jest.fn(),
|
||||
getMetricLabel: jest.fn(),
|
||||
ensureIsArray: jest.fn(arr => arr || []),
|
||||
normalizeOrderBy: jest.fn(({ orderby }) => ({ orderby })),
|
||||
}));
|
||||
|
||||
// Mock DOM element for bootstrap data
|
||||
const mockBootstrapData = {
|
||||
common: {
|
||||
conf: {
|
||||
MAPBOX_API_KEY: 'test_api_key',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
Object.defineProperty(document, 'getElementById', {
|
||||
value: jest.fn().mockReturnValue({
|
||||
getAttribute: jest.fn().mockReturnValue(JSON.stringify(mockBootstrapData)),
|
||||
}),
|
||||
writable: true,
|
||||
});
|
||||
|
||||
const mockDecode = decode as jest.MockedFunction<typeof decode>;
|
||||
|
||||
describe('spatialUtils', () => {
|
||||
test('getSpatialColumns returns correct columns for latlong type', () => {
|
||||
const spatial: SpatialFormData['spatial'] = {
|
||||
type: 'latlong',
|
||||
lonCol: 'longitude',
|
||||
latCol: 'latitude',
|
||||
};
|
||||
|
||||
const result = getSpatialColumns(spatial);
|
||||
expect(result).toEqual(['longitude', 'latitude']);
|
||||
});
|
||||
|
||||
test('getSpatialColumns returns correct columns for delimited type', () => {
|
||||
const spatial: SpatialFormData['spatial'] = {
|
||||
type: 'delimited',
|
||||
lonlatCol: 'coordinates',
|
||||
};
|
||||
|
||||
const result = getSpatialColumns(spatial);
|
||||
expect(result).toEqual(['coordinates']);
|
||||
});
|
||||
|
||||
test('getSpatialColumns returns correct columns for geohash type', () => {
|
||||
const spatial: SpatialFormData['spatial'] = {
|
||||
type: 'geohash',
|
||||
geohashCol: 'geohash_code',
|
||||
};
|
||||
|
||||
const result = getSpatialColumns(spatial);
|
||||
expect(result).toEqual(['geohash_code']);
|
||||
});
|
||||
|
||||
test('getSpatialColumns throws error when spatial is null', () => {
|
||||
expect(() => getSpatialColumns(null as any)).toThrow('Bad spatial key');
|
||||
});
|
||||
|
||||
test('getSpatialColumns throws error when spatial type is missing', () => {
|
||||
const spatial = {} as SpatialFormData['spatial'];
|
||||
expect(() => getSpatialColumns(spatial)).toThrow('Bad spatial key');
|
||||
});
|
||||
|
||||
test('getSpatialColumns throws error when latlong columns are missing', () => {
|
||||
const spatial: SpatialFormData['spatial'] = {
|
||||
type: 'latlong',
|
||||
};
|
||||
expect(() => getSpatialColumns(spatial)).toThrow(
|
||||
'Longitude and latitude columns are required for latlong type',
|
||||
);
|
||||
});
|
||||
|
||||
test('getSpatialColumns throws error when delimited column is missing', () => {
|
||||
const spatial: SpatialFormData['spatial'] = {
|
||||
type: 'delimited',
|
||||
};
|
||||
expect(() => getSpatialColumns(spatial)).toThrow(
|
||||
'Longitude/latitude column is required for delimited type',
|
||||
);
|
||||
});
|
||||
|
||||
test('getSpatialColumns throws error when geohash column is missing', () => {
|
||||
const spatial: SpatialFormData['spatial'] = {
|
||||
type: 'geohash',
|
||||
};
|
||||
expect(() => getSpatialColumns(spatial)).toThrow(
|
||||
'Geohash column is required for geohash type',
|
||||
);
|
||||
});
|
||||
|
||||
test('getSpatialColumns throws error for unknown spatial type', () => {
|
||||
const spatial = {
|
||||
type: 'unknown',
|
||||
} as any;
|
||||
expect(() => getSpatialColumns(spatial)).toThrow(
|
||||
'Unknown spatial type: unknown',
|
||||
);
|
||||
});
|
||||
|
||||
test('addSpatialNullFilters adds null filters for spatial columns', () => {
|
||||
const spatial: SpatialFormData['spatial'] = {
|
||||
type: 'latlong',
|
||||
lonCol: 'longitude',
|
||||
latCol: 'latitude',
|
||||
};
|
||||
const existingFilters: QueryObjectFilterClause[] = [
|
||||
{ col: 'other_col', op: '==', val: 'test' },
|
||||
];
|
||||
|
||||
const result = addSpatialNullFilters(spatial, existingFilters);
|
||||
|
||||
expect(result).toEqual([
|
||||
{ col: 'other_col', op: '==', val: 'test' },
|
||||
{ col: 'longitude', op: 'IS NOT NULL', val: null },
|
||||
{ col: 'latitude', op: 'IS NOT NULL', val: null },
|
||||
]);
|
||||
});
|
||||
|
||||
test('addSpatialNullFilters returns original filters when spatial is null', () => {
|
||||
const existingFilters: QueryObjectFilterClause[] = [
|
||||
{ col: 'test_col', op: '==', val: 'test' },
|
||||
];
|
||||
|
||||
const result = addSpatialNullFilters(null as any, existingFilters);
|
||||
expect(result).toBe(existingFilters);
|
||||
});
|
||||
|
||||
test('addSpatialNullFilters works with empty filters array', () => {
|
||||
const spatial: SpatialFormData['spatial'] = {
|
||||
type: 'delimited',
|
||||
lonlatCol: 'coordinates',
|
||||
};
|
||||
|
||||
const result = addSpatialNullFilters(spatial, []);
|
||||
|
||||
expect(result).toEqual([
|
||||
{ col: 'coordinates', op: 'IS NOT NULL', val: null },
|
||||
]);
|
||||
});
|
||||
|
||||
test('buildSpatialQuery throws error when spatial is missing', () => {
|
||||
const formData = {} as SpatialFormData;
|
||||
|
||||
expect(() => buildSpatialQuery(formData)).toThrow(
|
||||
'Spatial configuration is required for this chart',
|
||||
);
|
||||
});
|
||||
|
||||
test('buildSpatialQuery calls buildQueryContext with correct parameters', () => {
|
||||
const mockBuildQueryContext =
|
||||
jest.requireMock('@superset-ui/core').buildQueryContext;
|
||||
const formData: SpatialFormData = {
|
||||
spatial: {
|
||||
type: 'latlong',
|
||||
lonCol: 'longitude',
|
||||
latCol: 'latitude',
|
||||
},
|
||||
size: 'count',
|
||||
js_columns: ['extra_col'],
|
||||
} as SpatialFormData;
|
||||
|
||||
buildSpatialQuery(formData);
|
||||
|
||||
expect(mockBuildQueryContext).toHaveBeenCalledWith(formData, {
|
||||
buildQuery: expect.any(Function),
|
||||
});
|
||||
});
|
||||
|
||||
test('processSpatialData processes latlong data correctly', () => {
|
||||
const records = [
|
||||
{ longitude: -122.4, latitude: 37.8, count: 10, extra: 'test1' },
|
||||
{ longitude: -122.5, latitude: 37.9, count: 20, extra: 'test2' },
|
||||
];
|
||||
const spatial: SpatialFormData['spatial'] = {
|
||||
type: 'latlong',
|
||||
lonCol: 'longitude',
|
||||
latCol: 'latitude',
|
||||
};
|
||||
const metricLabel = 'count';
|
||||
const jsColumns = ['extra'];
|
||||
|
||||
const result = processSpatialData(records, spatial, metricLabel, jsColumns);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0]).toEqual({
|
||||
position: [-122.4, 37.8],
|
||||
weight: 10,
|
||||
extraProps: { extra: 'test1' },
|
||||
});
|
||||
expect(result[1]).toEqual({
|
||||
position: [-122.5, 37.9],
|
||||
weight: 20,
|
||||
extraProps: { extra: 'test2' },
|
||||
});
|
||||
});
|
||||
|
||||
test('processSpatialData processes delimited data correctly', () => {
|
||||
const records = [
|
||||
{ coordinates: '-122.4,37.8', count: 15 },
|
||||
{ coordinates: '-122.5,37.9', count: 25 },
|
||||
];
|
||||
const spatial: SpatialFormData['spatial'] = {
|
||||
type: 'delimited',
|
||||
lonlatCol: 'coordinates',
|
||||
};
|
||||
|
||||
const result = processSpatialData(records, spatial, 'count');
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0]).toEqual({
|
||||
position: [-122.4, 37.8],
|
||||
weight: 15,
|
||||
extraProps: {},
|
||||
});
|
||||
});
|
||||
|
||||
test('processSpatialData processes geohash data correctly', () => {
|
||||
mockDecode.mockReturnValue({
|
||||
latitude: 37.8,
|
||||
longitude: -122.4,
|
||||
error: {
|
||||
latitude: 0,
|
||||
longitude: 0,
|
||||
},
|
||||
});
|
||||
|
||||
const records = [{ geohash: 'dr5regw3p', count: 30 }];
|
||||
const spatial: SpatialFormData['spatial'] = {
|
||||
type: 'geohash',
|
||||
geohashCol: 'geohash',
|
||||
};
|
||||
|
||||
const result = processSpatialData(records, spatial, 'count');
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0]).toEqual({
|
||||
position: [-122.4, 37.8],
|
||||
weight: 30,
|
||||
extraProps: {},
|
||||
});
|
||||
expect(mockDecode).toHaveBeenCalledWith('dr5regw3p');
|
||||
});
|
||||
|
||||
test('processSpatialData reverses coordinates when reverseCheckbox is true', () => {
|
||||
const records = [{ longitude: -122.4, latitude: 37.8, count: 10 }];
|
||||
const spatial: SpatialFormData['spatial'] = {
|
||||
type: 'latlong',
|
||||
lonCol: 'longitude',
|
||||
latCol: 'latitude',
|
||||
reverseCheckbox: true,
|
||||
};
|
||||
|
||||
const result = processSpatialData(records, spatial, 'count');
|
||||
|
||||
expect(result[0].position).toEqual([37.8, -122.4]);
|
||||
});
|
||||
|
||||
test('processSpatialData handles invalid coordinates', () => {
|
||||
const records = [
|
||||
{ longitude: 'invalid', latitude: 37.8, count: 10 },
|
||||
{ longitude: -122.4, latitude: NaN, count: 20 },
|
||||
// 'latlong' spatial type expects longitude/latitude fields
|
||||
// so records with 'coordinates' should be filtered out
|
||||
{ coordinates: 'invalid,coords', count: 30 },
|
||||
{ coordinates: '-122.4,invalid', count: 40 },
|
||||
];
|
||||
const spatial: SpatialFormData['spatial'] = {
|
||||
type: 'latlong',
|
||||
lonCol: 'longitude',
|
||||
latCol: 'latitude',
|
||||
};
|
||||
|
||||
const result = processSpatialData(records, spatial, 'count');
|
||||
|
||||
expect(result).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('processSpatialData handles missing metric values', () => {
|
||||
const records = [
|
||||
{ longitude: -122.4, latitude: 37.8, count: null },
|
||||
{ longitude: -122.5, latitude: 37.9 },
|
||||
{ longitude: -122.6, latitude: 38.0, count: 'invalid' },
|
||||
];
|
||||
const spatial: SpatialFormData['spatial'] = {
|
||||
type: 'latlong',
|
||||
lonCol: 'longitude',
|
||||
latCol: 'latitude',
|
||||
};
|
||||
|
||||
const result = processSpatialData(records, spatial, 'count');
|
||||
|
||||
expect(result).toHaveLength(3);
|
||||
expect(result[0].weight).toBe(1);
|
||||
expect(result[1].weight).toBe(1);
|
||||
expect(result[2].weight).toBe(1);
|
||||
});
|
||||
|
||||
test('processSpatialData returns empty array for empty records', () => {
|
||||
const spatial: SpatialFormData['spatial'] = {
|
||||
type: 'latlong',
|
||||
lonCol: 'longitude',
|
||||
latCol: 'latitude',
|
||||
};
|
||||
|
||||
const result = processSpatialData([], spatial);
|
||||
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
test('processSpatialData returns empty array when spatial is null', () => {
|
||||
const records = [{ longitude: -122.4, latitude: 37.8 }];
|
||||
|
||||
const result = processSpatialData(records, null as any);
|
||||
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
test('processSpatialData handles delimited coordinate edge cases', () => {
|
||||
const records = [
|
||||
{ coordinates: '', count: 10 },
|
||||
{ coordinates: null, count: 20 },
|
||||
{ coordinates: undefined, count: 30 },
|
||||
{ coordinates: '-122.4', count: 40 }, // only one coordinate
|
||||
{ coordinates: 'a,b', count: 50 }, // non-numeric
|
||||
{ coordinates: ' -122.4 , 37.8 ', count: 60 }, // with spaces
|
||||
];
|
||||
const spatial: SpatialFormData['spatial'] = {
|
||||
type: 'delimited',
|
||||
lonlatCol: 'coordinates',
|
||||
};
|
||||
|
||||
const result = processSpatialData(records, spatial, 'count');
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0]).toEqual({
|
||||
position: [-122.4, 37.8],
|
||||
weight: 60,
|
||||
extraProps: {},
|
||||
});
|
||||
});
|
||||
|
||||
test('processSpatialData copies additional properties correctly', () => {
|
||||
const records = [
|
||||
{
|
||||
longitude: -122.4,
|
||||
latitude: 37.8,
|
||||
count: 10,
|
||||
category: 'A',
|
||||
description: 'Test location',
|
||||
extra_col: 'extra_value',
|
||||
},
|
||||
];
|
||||
const spatial: SpatialFormData['spatial'] = {
|
||||
type: 'latlong',
|
||||
lonCol: 'longitude',
|
||||
latCol: 'latitude',
|
||||
};
|
||||
const jsColumns = ['extra_col'];
|
||||
|
||||
const result = processSpatialData(records, spatial, 'count', jsColumns);
|
||||
|
||||
expect(result[0]).toEqual({
|
||||
position: [-122.4, 37.8],
|
||||
weight: 10,
|
||||
extraProps: { extra_col: 'extra_value' },
|
||||
category: 'A',
|
||||
description: 'Test location',
|
||||
});
|
||||
|
||||
expect(result[0]).not.toHaveProperty('longitude');
|
||||
expect(result[0]).not.toHaveProperty('latitude');
|
||||
expect(result[0]).not.toHaveProperty('count');
|
||||
expect(result[0]).not.toHaveProperty('extra_col');
|
||||
});
|
||||
|
||||
test('transformSpatialProps transforms chart props correctly', () => {
|
||||
const mockGetMetricLabel =
|
||||
jest.requireMock('@superset-ui/core').getMetricLabel;
|
||||
mockGetMetricLabel.mockReturnValue('count_label');
|
||||
|
||||
const chartProps: ChartProps = {
|
||||
datasource: {
|
||||
id: 1,
|
||||
type: DatasourceType.Table,
|
||||
columns: [],
|
||||
name: '',
|
||||
metrics: [],
|
||||
},
|
||||
height: 400,
|
||||
width: 600,
|
||||
hooks: {
|
||||
onAddFilter: jest.fn(),
|
||||
onContextMenu: jest.fn(),
|
||||
setControlValue: jest.fn(),
|
||||
setDataMask: jest.fn(),
|
||||
},
|
||||
queriesData: [
|
||||
{
|
||||
data: [
|
||||
{ longitude: -122.4, latitude: 37.8, count: 10 },
|
||||
{ longitude: -122.5, latitude: 37.9, count: 20 },
|
||||
],
|
||||
},
|
||||
],
|
||||
rawFormData: {
|
||||
spatial: {
|
||||
type: 'latlong',
|
||||
lonCol: 'longitude',
|
||||
latCol: 'latitude',
|
||||
},
|
||||
size: 'count',
|
||||
js_columns: [],
|
||||
viewport: {
|
||||
zoom: 10,
|
||||
latitude: 37.8,
|
||||
longitude: -122.4,
|
||||
},
|
||||
} as unknown as SpatialFormData,
|
||||
filterState: {},
|
||||
emitCrossFilters: true,
|
||||
annotationData: {},
|
||||
rawDatasource: {},
|
||||
initialValues: {},
|
||||
formData: {
|
||||
spatial: {
|
||||
type: 'latlong',
|
||||
lonCol: 'longitude',
|
||||
latCol: 'latitude',
|
||||
},
|
||||
size: 'count',
|
||||
js_columns: [],
|
||||
viewport: {
|
||||
zoom: 10,
|
||||
latitude: 37.8,
|
||||
longitude: -122.4,
|
||||
},
|
||||
},
|
||||
ownState: {},
|
||||
behaviors: [],
|
||||
theme: {} as unknown as SupersetTheme,
|
||||
};
|
||||
|
||||
const result = transformSpatialProps(chartProps);
|
||||
|
||||
expect(result).toMatchObject({
|
||||
datasource: chartProps.datasource,
|
||||
emitCrossFilters: chartProps.emitCrossFilters,
|
||||
formData: chartProps.rawFormData,
|
||||
height: 400,
|
||||
width: 600,
|
||||
filterState: {},
|
||||
onAddFilter: chartProps.hooks.onAddFilter,
|
||||
onContextMenu: chartProps.hooks.onContextMenu,
|
||||
setControlValue: chartProps.hooks.setControlValue,
|
||||
setDataMask: chartProps.hooks.setDataMask,
|
||||
viewport: {
|
||||
zoom: 10,
|
||||
latitude: 37.8,
|
||||
longitude: -122.4,
|
||||
height: 400,
|
||||
width: 600,
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.payload.data.features).toHaveLength(2);
|
||||
expect(result.payload.data.mapboxApiKey).toBe('test_api_key');
|
||||
expect(result.payload.data.metricLabels).toEqual(['count_label']);
|
||||
});
|
||||
|
||||
test('transformSpatialProps handles missing hooks gracefully', () => {
|
||||
const chartProps: ChartProps = {
|
||||
datasource: {
|
||||
id: 1,
|
||||
type: DatasourceType.Table,
|
||||
columns: [],
|
||||
name: '',
|
||||
metrics: [],
|
||||
},
|
||||
height: 400,
|
||||
width: 600,
|
||||
hooks: {},
|
||||
queriesData: [{ data: [] }],
|
||||
rawFormData: {
|
||||
spatial: {
|
||||
type: 'latlong',
|
||||
lonCol: 'longitude',
|
||||
latCol: 'latitude',
|
||||
},
|
||||
} as SpatialFormData,
|
||||
filterState: {},
|
||||
emitCrossFilters: true,
|
||||
annotationData: {},
|
||||
rawDatasource: {},
|
||||
initialValues: {},
|
||||
formData: {
|
||||
spatial: {
|
||||
type: 'latlong',
|
||||
lonCol: 'longitude',
|
||||
latCol: 'latitude',
|
||||
},
|
||||
},
|
||||
ownState: {},
|
||||
behaviors: [],
|
||||
theme: {} as unknown as SupersetTheme,
|
||||
};
|
||||
|
||||
const result = transformSpatialProps(chartProps);
|
||||
|
||||
expect(typeof result.onAddFilter).toBe('function');
|
||||
expect(typeof result.onContextMenu).toBe('function');
|
||||
expect(typeof result.setControlValue).toBe('function');
|
||||
expect(typeof result.setDataMask).toBe('function');
|
||||
expect(typeof result.setTooltip).toBe('function');
|
||||
});
|
||||
|
||||
test('transformSpatialProps handles missing metric', () => {
|
||||
const mockGetMetricLabel =
|
||||
jest.requireMock('@superset-ui/core').getMetricLabel;
|
||||
mockGetMetricLabel.mockReturnValue(undefined);
|
||||
|
||||
const chartProps: ChartProps = {
|
||||
datasource: {
|
||||
id: 1,
|
||||
type: DatasourceType.Table,
|
||||
columns: [],
|
||||
name: '',
|
||||
metrics: [],
|
||||
},
|
||||
height: 400,
|
||||
width: 600,
|
||||
hooks: {},
|
||||
queriesData: [{ data: [] }],
|
||||
rawFormData: {
|
||||
spatial: {
|
||||
type: 'latlong',
|
||||
lonCol: 'longitude',
|
||||
latCol: 'latitude',
|
||||
},
|
||||
} as SpatialFormData,
|
||||
filterState: {},
|
||||
emitCrossFilters: true,
|
||||
annotationData: {},
|
||||
rawDatasource: {},
|
||||
initialValues: {},
|
||||
formData: {
|
||||
spatial: {
|
||||
type: 'latlong',
|
||||
lonCol: 'longitude',
|
||||
latCol: 'latitude',
|
||||
},
|
||||
},
|
||||
ownState: {},
|
||||
behaviors: [],
|
||||
theme: {} as unknown as SupersetTheme,
|
||||
};
|
||||
|
||||
const result = transformSpatialProps(chartProps);
|
||||
|
||||
expect(result.payload.data.metricLabels).toEqual([]);
|
||||
});
|
||||
});
|
||||
@@ -1,400 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import {
|
||||
buildQueryContext,
|
||||
getMetricLabel,
|
||||
QueryFormData,
|
||||
QueryObjectFilterClause,
|
||||
ensureIsArray,
|
||||
ChartProps,
|
||||
normalizeOrderBy,
|
||||
} from '@superset-ui/core';
|
||||
import { decode } from 'ngeohash';
|
||||
import { addTooltipColumnsToQuery } from './buildQueryUtils';
|
||||
|
||||
export interface SpatialConfiguration {
|
||||
type: 'latlong' | 'delimited' | 'geohash';
|
||||
lonCol?: string;
|
||||
latCol?: string;
|
||||
lonlatCol?: string;
|
||||
geohashCol?: string;
|
||||
reverseCheckbox?: boolean;
|
||||
}
|
||||
|
||||
export interface DataRecord {
|
||||
[key: string]: string | number | null | undefined;
|
||||
}
|
||||
|
||||
export interface BootstrapData {
|
||||
common?: {
|
||||
conf?: {
|
||||
MAPBOX_API_KEY?: string;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
export interface SpatialFormData extends QueryFormData {
|
||||
spatial: SpatialConfiguration;
|
||||
size?: string;
|
||||
grid_size?: number;
|
||||
js_data_mutator?: string;
|
||||
js_agg_function?: string;
|
||||
js_columns?: string[];
|
||||
color_scheme?: string;
|
||||
color_scheme_type?: string;
|
||||
color_breakpoints?: number[];
|
||||
default_breakpoint_color?: string;
|
||||
tooltip_contents?: unknown[];
|
||||
tooltip_template?: string;
|
||||
color_picker?: string;
|
||||
}
|
||||
|
||||
export interface SpatialPoint {
|
||||
position: [number, number];
|
||||
weight: number;
|
||||
extraProps?: Record<string, unknown>;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
export function getSpatialColumns(spatial: SpatialConfiguration): string[] {
|
||||
if (!spatial || !spatial.type) {
|
||||
throw new Error('Bad spatial key');
|
||||
}
|
||||
|
||||
switch (spatial.type) {
|
||||
case 'latlong':
|
||||
if (!spatial.lonCol || !spatial.latCol) {
|
||||
throw new Error(
|
||||
'Longitude and latitude columns are required for latlong type',
|
||||
);
|
||||
}
|
||||
return [spatial.lonCol, spatial.latCol];
|
||||
case 'delimited':
|
||||
if (!spatial.lonlatCol) {
|
||||
throw new Error(
|
||||
'Longitude/latitude column is required for delimited type',
|
||||
);
|
||||
}
|
||||
return [spatial.lonlatCol];
|
||||
case 'geohash':
|
||||
if (!spatial.geohashCol) {
|
||||
throw new Error('Geohash column is required for geohash type');
|
||||
}
|
||||
return [spatial.geohashCol];
|
||||
default:
|
||||
throw new Error(`Unknown spatial type: ${spatial.type}`);
|
||||
}
|
||||
}
|
||||
|
||||
export function addSpatialNullFilters(
|
||||
spatial: SpatialConfiguration,
|
||||
filters: QueryObjectFilterClause[],
|
||||
): QueryObjectFilterClause[] {
|
||||
if (!spatial) return filters;
|
||||
|
||||
const spatialColumns = getSpatialColumns(spatial);
|
||||
const nullFilters: QueryObjectFilterClause[] = spatialColumns.map(column => ({
|
||||
col: column,
|
||||
op: 'IS NOT NULL',
|
||||
val: null,
|
||||
}));
|
||||
|
||||
return [...filters, ...nullFilters];
|
||||
}
|
||||
|
||||
export function buildSpatialQuery(formData: SpatialFormData) {
|
||||
const { spatial, size: metric, js_columns, tooltip_contents } = formData;
|
||||
|
||||
if (!spatial) {
|
||||
throw new Error(`Spatial configuration is required for this chart`);
|
||||
}
|
||||
return buildQueryContext(formData, {
|
||||
buildQuery: baseQueryObject => {
|
||||
const spatialColumns = getSpatialColumns(spatial);
|
||||
let columns = [...(baseQueryObject.columns || []), ...spatialColumns];
|
||||
const metrics = metric ? [metric] : [];
|
||||
|
||||
if (js_columns?.length) {
|
||||
js_columns.forEach(col => {
|
||||
if (!columns.includes(col)) {
|
||||
columns.push(col);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
columns = addTooltipColumnsToQuery(columns, tooltip_contents);
|
||||
|
||||
const filters = addSpatialNullFilters(
|
||||
spatial,
|
||||
ensureIsArray(baseQueryObject.filters || []),
|
||||
);
|
||||
|
||||
const orderby = metric
|
||||
? normalizeOrderBy({ orderby: [[metric, false]] }).orderby
|
||||
: baseQueryObject.orderby;
|
||||
|
||||
return [
|
||||
{
|
||||
...baseQueryObject,
|
||||
columns,
|
||||
metrics,
|
||||
filters,
|
||||
orderby,
|
||||
is_timeseries: false,
|
||||
row_limit: baseQueryObject.row_limit,
|
||||
},
|
||||
];
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
function parseCoordinates(latlong: string): [number, number] | null {
|
||||
if (!latlong || typeof latlong !== 'string') {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const coords = latlong.split(',').map(coord => parseFloat(coord.trim()));
|
||||
if (
|
||||
coords.length === 2 &&
|
||||
!Number.isNaN(coords[0]) &&
|
||||
!Number.isNaN(coords[1])
|
||||
) {
|
||||
return [coords[0], coords[1]];
|
||||
}
|
||||
return null;
|
||||
} catch (error) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function reverseGeohashDecode(geohashCode: string): [number, number] | null {
|
||||
if (!geohashCode || typeof geohashCode !== 'string') {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const { latitude: lat, longitude: lng } = decode(geohashCode);
|
||||
if (
|
||||
Number.isNaN(lat) ||
|
||||
Number.isNaN(lng) ||
|
||||
lat < -90 ||
|
||||
lat > 90 ||
|
||||
lng < -180 ||
|
||||
lng > 180
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
return [lng, lat];
|
||||
} catch (error) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export function addJsColumnsToExtraProps<
|
||||
T extends { extraProps?: Record<string, unknown> },
|
||||
>(feature: T, record: DataRecord, jsColumns?: string[]): T {
|
||||
if (!jsColumns?.length) {
|
||||
return feature;
|
||||
}
|
||||
|
||||
const extraProps: Record<string, unknown> = { ...(feature.extraProps ?? {}) };
|
||||
|
||||
jsColumns.forEach(col => {
|
||||
if (record[col] !== undefined) {
|
||||
extraProps[col] = record[col];
|
||||
}
|
||||
});
|
||||
|
||||
return { ...feature, extraProps };
|
||||
}
|
||||
|
||||
export function processSpatialData(
|
||||
records: DataRecord[],
|
||||
spatial: SpatialConfiguration,
|
||||
metricLabel?: string,
|
||||
jsColumns?: string[],
|
||||
): SpatialPoint[] {
|
||||
if (!spatial || !records.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const features: SpatialPoint[] = [];
|
||||
const spatialColumns = getSpatialColumns(spatial);
|
||||
const jsColumnsSet = jsColumns ? new Set(jsColumns) : null;
|
||||
const spatialColumnsSet = new Set(spatialColumns);
|
||||
|
||||
for (const record of records) {
|
||||
let position: [number, number] | null = null;
|
||||
|
||||
switch (spatial.type) {
|
||||
case 'latlong':
|
||||
if (spatial.lonCol && spatial.latCol) {
|
||||
const lon = parseFloat(String(record[spatial.lonCol] ?? ''));
|
||||
const lat = parseFloat(String(record[spatial.latCol] ?? ''));
|
||||
if (!Number.isNaN(lon) && !Number.isNaN(lat)) {
|
||||
position = [lon, lat];
|
||||
}
|
||||
}
|
||||
break;
|
||||
case 'delimited':
|
||||
if (spatial.lonlatCol) {
|
||||
position = parseCoordinates(String(record[spatial.lonlatCol] ?? ''));
|
||||
}
|
||||
break;
|
||||
case 'geohash':
|
||||
if (spatial.geohashCol) {
|
||||
const geohashValue = record[spatial.geohashCol];
|
||||
if (geohashValue) {
|
||||
position = reverseGeohashDecode(String(geohashValue));
|
||||
}
|
||||
}
|
||||
break;
|
||||
default:
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!position) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (spatial.reverseCheckbox) {
|
||||
position = [position[1], position[0]];
|
||||
}
|
||||
|
||||
let weight = 1;
|
||||
if (metricLabel && record[metricLabel] != null) {
|
||||
const metricValue = parseFloat(String(record[metricLabel]));
|
||||
if (!Number.isNaN(metricValue)) {
|
||||
weight = metricValue;
|
||||
}
|
||||
}
|
||||
|
||||
let spatialPoint: SpatialPoint = {
|
||||
position,
|
||||
weight,
|
||||
extraProps: {},
|
||||
};
|
||||
|
||||
spatialPoint = addJsColumnsToExtraProps(spatialPoint, record, jsColumns);
|
||||
Object.keys(record).forEach(key => {
|
||||
if (spatialColumnsSet.has(key)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (key === metricLabel) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (jsColumnsSet?.has(key)) {
|
||||
return;
|
||||
}
|
||||
|
||||
spatialPoint[key] = record[key];
|
||||
});
|
||||
|
||||
features.push(spatialPoint);
|
||||
}
|
||||
|
||||
return features;
|
||||
}
|
||||
|
||||
const NOOP = () => {};
|
||||
|
||||
export function getMapboxApiKey(mapboxApiKey?: string): string {
|
||||
if (mapboxApiKey) {
|
||||
return mapboxApiKey;
|
||||
}
|
||||
|
||||
if (typeof document !== 'undefined') {
|
||||
try {
|
||||
const appContainer = document.getElementById('app');
|
||||
const dataBootstrap = appContainer?.getAttribute('data-bootstrap');
|
||||
if (dataBootstrap) {
|
||||
const bootstrapData: BootstrapData = JSON.parse(dataBootstrap);
|
||||
return bootstrapData?.common?.conf?.MAPBOX_API_KEY || '';
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
`Failed to read MAPBOX_API_KEY from bootstrap data: ${error}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return '';
|
||||
}
|
||||
|
||||
export function transformSpatialProps(chartProps: ChartProps) {
|
||||
const {
|
||||
datasource,
|
||||
height,
|
||||
hooks,
|
||||
queriesData,
|
||||
rawFormData: formData,
|
||||
width,
|
||||
filterState,
|
||||
emitCrossFilters,
|
||||
} = chartProps;
|
||||
|
||||
const {
|
||||
onAddFilter = NOOP,
|
||||
onContextMenu = NOOP,
|
||||
setControlValue = NOOP,
|
||||
setDataMask = NOOP,
|
||||
} = hooks;
|
||||
|
||||
const { spatial, size: metric, js_columns } = formData as SpatialFormData;
|
||||
const metricLabel = metric ? getMetricLabel(metric) : undefined;
|
||||
|
||||
const queryData = queriesData[0];
|
||||
const records = queryData?.data || [];
|
||||
const features = processSpatialData(
|
||||
records,
|
||||
spatial,
|
||||
metricLabel,
|
||||
js_columns,
|
||||
);
|
||||
|
||||
return {
|
||||
datasource,
|
||||
emitCrossFilters,
|
||||
formData,
|
||||
height,
|
||||
onAddFilter,
|
||||
onContextMenu,
|
||||
payload: {
|
||||
...queryData,
|
||||
data: {
|
||||
features,
|
||||
mapboxApiKey: getMapboxApiKey(),
|
||||
metricLabels: metricLabel ? [metricLabel] : [],
|
||||
},
|
||||
},
|
||||
setControlValue,
|
||||
filterState,
|
||||
viewport: {
|
||||
...formData.viewport,
|
||||
height,
|
||||
width,
|
||||
},
|
||||
width,
|
||||
setDataMask,
|
||||
setTooltip: () => {},
|
||||
};
|
||||
}
|
||||
@@ -1,142 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { ChartProps, getMetricLabel } from '@superset-ui/core';
|
||||
import { getMapboxApiKey, DataRecord } from './spatialUtils';
|
||||
|
||||
const NOOP = () => {};
|
||||
|
||||
export interface BaseHooks {
|
||||
onAddFilter: ChartProps['hooks']['onAddFilter'];
|
||||
onContextMenu: ChartProps['hooks']['onContextMenu'];
|
||||
setControlValue: ChartProps['hooks']['setControlValue'];
|
||||
setDataMask: ChartProps['hooks']['setDataMask'];
|
||||
}
|
||||
|
||||
export interface BaseTransformPropsResult {
|
||||
datasource: ChartProps['datasource'];
|
||||
emitCrossFilters: ChartProps['emitCrossFilters'];
|
||||
formData: ChartProps['rawFormData'];
|
||||
height: ChartProps['height'];
|
||||
onAddFilter: ChartProps['hooks']['onAddFilter'];
|
||||
onContextMenu: ChartProps['hooks']['onContextMenu'];
|
||||
payload: {
|
||||
data: {
|
||||
features: unknown[];
|
||||
mapboxApiKey: string;
|
||||
metricLabels?: string[];
|
||||
};
|
||||
[key: string]: unknown;
|
||||
};
|
||||
setControlValue: ChartProps['hooks']['setControlValue'];
|
||||
filterState: ChartProps['filterState'];
|
||||
viewport: {
|
||||
height: number;
|
||||
width: number;
|
||||
[key: string]: unknown;
|
||||
};
|
||||
width: ChartProps['width'];
|
||||
setDataMask: ChartProps['hooks']['setDataMask'];
|
||||
setTooltip: () => void;
|
||||
}
|
||||
|
||||
export function extractHooks(hooks: ChartProps['hooks']): BaseHooks {
|
||||
return {
|
||||
onAddFilter: hooks?.onAddFilter || NOOP,
|
||||
onContextMenu: hooks?.onContextMenu || NOOP,
|
||||
setControlValue: hooks?.setControlValue || NOOP,
|
||||
setDataMask: hooks?.setDataMask || NOOP,
|
||||
};
|
||||
}
|
||||
|
||||
export function createBaseTransformResult(
|
||||
chartProps: ChartProps,
|
||||
features: unknown[],
|
||||
metricLabels?: string[],
|
||||
): BaseTransformPropsResult {
|
||||
const {
|
||||
datasource,
|
||||
height,
|
||||
queriesData,
|
||||
rawFormData: formData,
|
||||
width,
|
||||
filterState,
|
||||
emitCrossFilters,
|
||||
} = chartProps;
|
||||
|
||||
const hooks = extractHooks(chartProps.hooks);
|
||||
const queryData = queriesData[0];
|
||||
|
||||
return {
|
||||
datasource,
|
||||
emitCrossFilters,
|
||||
formData,
|
||||
height,
|
||||
...hooks,
|
||||
payload: {
|
||||
...queryData,
|
||||
data: {
|
||||
features,
|
||||
mapboxApiKey: getMapboxApiKey(),
|
||||
metricLabels: metricLabels || [],
|
||||
},
|
||||
},
|
||||
filterState,
|
||||
viewport: {
|
||||
...formData.viewport,
|
||||
height,
|
||||
width,
|
||||
},
|
||||
width,
|
||||
setTooltip: NOOP,
|
||||
};
|
||||
}
|
||||
|
||||
export function getRecordsFromQuery(
|
||||
queriesData: ChartProps['queriesData'],
|
||||
): DataRecord[] {
|
||||
return queriesData[0]?.data || [];
|
||||
}
|
||||
|
||||
export function parseMetricValue(value: unknown): number | undefined {
|
||||
if (value == null) return undefined;
|
||||
const parsed = parseFloat(String(value));
|
||||
return Number.isNaN(parsed) ? undefined : parsed;
|
||||
}
|
||||
|
||||
export function addPropertiesToFeature<T extends Record<string, unknown>>(
|
||||
feature: T,
|
||||
record: DataRecord,
|
||||
excludeKeys: Set<string>,
|
||||
): T {
|
||||
const result = { ...feature } as Record<string, unknown>;
|
||||
Object.keys(record).forEach(key => {
|
||||
if (!excludeKeys.has(key)) {
|
||||
result[key] = record[key];
|
||||
}
|
||||
});
|
||||
return result as T;
|
||||
}
|
||||
|
||||
export function getMetricLabelFromFormData(
|
||||
metric: string | { value?: string } | undefined,
|
||||
): string | undefined {
|
||||
if (!metric) return undefined;
|
||||
if (typeof metric === 'string') return getMetricLabel(metric);
|
||||
return metric.value ? getMetricLabel(metric.value) : undefined;
|
||||
}
|
||||
@@ -615,7 +615,7 @@ export const deckGLColorBreakpointsSelect: CustomControlItem = {
|
||||
};
|
||||
|
||||
export const breakpointsDefaultColor: CustomControlItem = {
|
||||
name: 'default_breakpoint_color',
|
||||
name: 'deafult_breakpoint_color',
|
||||
config: {
|
||||
label: t('Default color'),
|
||||
type: 'ColorPickerControl',
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { getColorBreakpointsBuckets, getBreakPoints } from './utils';
|
||||
import { getColorBreakpointsBuckets } from './utils';
|
||||
import { ColorBreakpointType } from './types';
|
||||
|
||||
describe('getColorBreakpointsBuckets', () => {
|
||||
@@ -44,447 +44,3 @@ describe('getColorBreakpointsBuckets', () => {
|
||||
expect(result).toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getBreakPoints', () => {
|
||||
const accessor = (d: any) => d.value;
|
||||
|
||||
describe('automatic breakpoint generation', () => {
|
||||
it('generates correct number of breakpoints for given buckets', () => {
|
||||
const features = [{ value: 0 }, { value: 50 }, { value: 100 }];
|
||||
|
||||
const breakPoints = getBreakPoints(
|
||||
{ break_points: [], num_buckets: '5' },
|
||||
features,
|
||||
accessor,
|
||||
);
|
||||
|
||||
expect(breakPoints).toHaveLength(6); // n buckets = n+1 breakpoints
|
||||
expect(breakPoints.every(bp => typeof bp === 'string')).toBe(true);
|
||||
});
|
||||
|
||||
it('ensures data range is fully covered', () => {
|
||||
// Test various data ranges to ensure min/max are always included
|
||||
const testCases = [
|
||||
{ data: [0, 100], buckets: 5 },
|
||||
{ data: [0.1, 99.9], buckets: 4 },
|
||||
{ data: [-50, 50], buckets: 10 },
|
||||
{ data: [3.2, 38.7], buckets: 5 }, // Original max bug case
|
||||
{ data: [3.14, 100], buckets: 5 }, // Min rounding bug case (3.14 -> 3)
|
||||
{ data: [2.345, 10], buckets: 4 }, // Min rounding bug case (2.345 -> 2.35)
|
||||
{ data: [0.0001, 0.0009], buckets: 3 }, // Very small numbers
|
||||
{ data: [1000000, 9000000], buckets: 8 }, // Large numbers
|
||||
];
|
||||
|
||||
testCases.forEach(({ data, buckets }) => {
|
||||
const [min, max] = data;
|
||||
const features = [{ value: min }, { value: max }];
|
||||
|
||||
const breakPoints = getBreakPoints(
|
||||
{ break_points: [], num_buckets: String(buckets) },
|
||||
features,
|
||||
accessor,
|
||||
);
|
||||
|
||||
const firstBp = parseFloat(breakPoints[0]);
|
||||
const lastBp = parseFloat(breakPoints[breakPoints.length - 1]);
|
||||
|
||||
// Critical: min and max must be within the breakpoint range
|
||||
expect(firstBp).toBeLessThanOrEqual(min);
|
||||
expect(lastBp).toBeGreaterThanOrEqual(max);
|
||||
expect(breakPoints).toHaveLength(buckets + 1);
|
||||
});
|
||||
});
|
||||
|
||||
it('handles uniform distribution correctly', () => {
|
||||
const features = [
|
||||
{ value: 0 },
|
||||
{ value: 25 },
|
||||
{ value: 50 },
|
||||
{ value: 75 },
|
||||
{ value: 100 },
|
||||
];
|
||||
|
||||
const breakPoints = getBreakPoints(
|
||||
{ break_points: [], num_buckets: '4' },
|
||||
features,
|
||||
accessor,
|
||||
);
|
||||
|
||||
// Check that breakpoints are evenly spaced
|
||||
const numericBreakPoints = breakPoints.map(parseFloat);
|
||||
const deltas = [];
|
||||
for (let i = 1; i < numericBreakPoints.length; i += 1) {
|
||||
deltas.push(numericBreakPoints[i] - numericBreakPoints[i - 1]);
|
||||
}
|
||||
|
||||
// All deltas should be approximately equal
|
||||
const avgDelta = deltas.reduce((a, b) => a + b, 0) / deltas.length;
|
||||
deltas.forEach(delta => {
|
||||
expect(delta).toBeCloseTo(avgDelta, 1);
|
||||
});
|
||||
});
|
||||
|
||||
it('handles single value datasets', () => {
|
||||
const features = [{ value: 42 }, { value: 42 }, { value: 42 }];
|
||||
|
||||
const breakPoints = getBreakPoints(
|
||||
{ break_points: [], num_buckets: '5' },
|
||||
features,
|
||||
accessor,
|
||||
);
|
||||
|
||||
const firstBp = parseFloat(breakPoints[0]);
|
||||
const lastBp = parseFloat(breakPoints[breakPoints.length - 1]);
|
||||
|
||||
expect(firstBp).toBeLessThanOrEqual(42);
|
||||
expect(lastBp).toBeGreaterThanOrEqual(42);
|
||||
});
|
||||
|
||||
it('preserves appropriate precision for different scales', () => {
|
||||
const testCases = [
|
||||
{ data: [0, 1], expectedMaxPrecision: 1 }, // 0.0, 0.2, 0.4...
|
||||
{ data: [0, 0.1], expectedMaxPrecision: 2 }, // 0.00, 0.02...
|
||||
{ data: [0, 0.01], expectedMaxPrecision: 3 }, // 0.000, 0.002...
|
||||
{ data: [0, 1000], expectedMaxPrecision: 0 }, // 0, 200, 400...
|
||||
];
|
||||
|
||||
testCases.forEach(({ data, expectedMaxPrecision }) => {
|
||||
const [min, max] = data;
|
||||
const features = [{ value: min }, { value: max }];
|
||||
|
||||
const breakPoints = getBreakPoints(
|
||||
{ break_points: [], num_buckets: '5' },
|
||||
features,
|
||||
accessor,
|
||||
);
|
||||
|
||||
breakPoints.forEach(bp => {
|
||||
const decimalPlaces = (bp.split('.')[1] || '').length;
|
||||
expect(decimalPlaces).toBeLessThanOrEqual(expectedMaxPrecision);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('handles negative values correctly', () => {
|
||||
const features = [
|
||||
{ value: -100 },
|
||||
{ value: -50 },
|
||||
{ value: 0 },
|
||||
{ value: 50 },
|
||||
{ value: 100 },
|
||||
];
|
||||
|
||||
const breakPoints = getBreakPoints(
|
||||
{ break_points: [], num_buckets: '5' },
|
||||
features,
|
||||
accessor,
|
||||
);
|
||||
|
||||
const numericBreakPoints = breakPoints.map(parseFloat);
|
||||
expect(numericBreakPoints[0]).toBeLessThanOrEqual(-100);
|
||||
expect(
|
||||
numericBreakPoints[numericBreakPoints.length - 1],
|
||||
).toBeGreaterThanOrEqual(100);
|
||||
|
||||
// Verify ascending order
|
||||
for (let i = 1; i < numericBreakPoints.length; i += 1) {
|
||||
expect(numericBreakPoints[i]).toBeGreaterThan(
|
||||
numericBreakPoints[i - 1],
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
it('handles mixed integer and decimal values', () => {
|
||||
const features = [
|
||||
{ value: 1 },
|
||||
{ value: 2.5 },
|
||||
{ value: 3.7 },
|
||||
{ value: 5 },
|
||||
{ value: 8.2 },
|
||||
];
|
||||
|
||||
const breakPoints = getBreakPoints(
|
||||
{ break_points: [], num_buckets: '4' },
|
||||
features,
|
||||
accessor,
|
||||
);
|
||||
|
||||
const firstBp = parseFloat(breakPoints[0]);
|
||||
const lastBp = parseFloat(breakPoints[breakPoints.length - 1]);
|
||||
|
||||
expect(firstBp).toBeLessThanOrEqual(1);
|
||||
expect(lastBp).toBeGreaterThanOrEqual(8.2);
|
||||
});
|
||||
|
||||
it('uses floor/ceil for boundary breakpoints to ensure inclusion', () => {
|
||||
// Test that Math.floor and Math.ceil are used for boundaries
|
||||
// This ensures all data points fall within the breakpoint range
|
||||
|
||||
const testCases = [
|
||||
{ minValue: 3.14, maxValue: 100, buckets: 5 },
|
||||
{ minValue: 2.345, maxValue: 10.678, buckets: 4 },
|
||||
{ minValue: 1.67, maxValue: 5.33, buckets: 3 },
|
||||
{ minValue: 0.123, maxValue: 0.987, buckets: 5 },
|
||||
];
|
||||
|
||||
testCases.forEach(({ minValue, maxValue, buckets }) => {
|
||||
const features = [{ value: minValue }, { value: maxValue }];
|
||||
|
||||
const breakPoints = getBreakPoints(
|
||||
{ break_points: [], num_buckets: String(buckets) },
|
||||
features,
|
||||
accessor,
|
||||
);
|
||||
|
||||
const firstBp = parseFloat(breakPoints[0]);
|
||||
const lastBp = parseFloat(breakPoints[breakPoints.length - 1]);
|
||||
|
||||
// First breakpoint should be floored (always <= minValue)
|
||||
expect(firstBp).toBeLessThanOrEqual(minValue);
|
||||
|
||||
// Last breakpoint should be ceiled (always >= maxValue)
|
||||
expect(lastBp).toBeGreaterThanOrEqual(maxValue);
|
||||
|
||||
// All values should be within range
|
||||
expect(minValue).toBeGreaterThanOrEqual(firstBp);
|
||||
expect(maxValue).toBeLessThanOrEqual(lastBp);
|
||||
});
|
||||
});
|
||||
|
||||
it('prevents minimum value exclusion edge case', () => {
|
||||
// Specific edge case test for minimum value exclusion
|
||||
// Tests the exact scenario where rounding would exclude the min value
|
||||
|
||||
const features = [
|
||||
{ value: 3.14 }, // This would round to 3 at precision 0
|
||||
{ value: 50 },
|
||||
{ value: 100 },
|
||||
];
|
||||
|
||||
const breakPoints = getBreakPoints(
|
||||
{ break_points: [], num_buckets: '5' },
|
||||
features,
|
||||
accessor,
|
||||
);
|
||||
|
||||
const firstBp = parseFloat(breakPoints[0]);
|
||||
|
||||
// The first breakpoint must be <= 3.14 (floor behavior)
|
||||
expect(firstBp).toBeLessThanOrEqual(3.14);
|
||||
|
||||
// Verify that 3.14 is not excluded
|
||||
expect(3.14).toBeGreaterThanOrEqual(firstBp);
|
||||
|
||||
// The first breakpoint should be a clean floor value
|
||||
expect(breakPoints[0]).toMatch(/^3(\.0*)?$/);
|
||||
});
|
||||
|
||||
it('prevents maximum value exclusion edge case', () => {
|
||||
// Specific edge case test for maximum value exclusion
|
||||
// Tests the exact scenario where rounding would exclude the max value
|
||||
|
||||
const features = [
|
||||
{ value: 0 },
|
||||
{ value: 20 },
|
||||
{ value: 38.7 }, // Original bug case
|
||||
];
|
||||
|
||||
const breakPoints = getBreakPoints(
|
||||
{ break_points: [], num_buckets: '5' },
|
||||
features,
|
||||
accessor,
|
||||
);
|
||||
|
||||
const lastBp = parseFloat(breakPoints[breakPoints.length - 1]);
|
||||
|
||||
// The last breakpoint must be >= 38.7 (ceil behavior)
|
||||
expect(lastBp).toBeGreaterThanOrEqual(38.7);
|
||||
|
||||
// Verify that 38.7 is not excluded
|
||||
expect(38.7).toBeLessThanOrEqual(lastBp);
|
||||
|
||||
// The last breakpoint should be a clean ceil value
|
||||
expect(breakPoints[breakPoints.length - 1]).toMatch(/^39(\.0*)?$/);
|
||||
});
|
||||
});
|
||||
|
||||
describe('custom breakpoints', () => {
|
||||
it('uses custom breakpoints when provided', () => {
|
||||
const features = [{ value: 5 }, { value: 15 }, { value: 25 }];
|
||||
const customBreakPoints = ['0', '10', '20', '30', '40'];
|
||||
|
||||
const breakPoints = getBreakPoints(
|
||||
{ break_points: customBreakPoints, num_buckets: '' },
|
||||
features,
|
||||
accessor,
|
||||
);
|
||||
|
||||
expect(breakPoints).toEqual(['0', '10', '20', '30', '40']);
|
||||
});
|
||||
|
||||
it('sorts custom breakpoints in ascending order', () => {
|
||||
const features = [{ value: 5 }];
|
||||
const customBreakPoints = ['30', '10', '0', '20'];
|
||||
|
||||
const breakPoints = getBreakPoints(
|
||||
{ break_points: customBreakPoints, num_buckets: '' },
|
||||
features,
|
||||
accessor,
|
||||
);
|
||||
|
||||
expect(breakPoints).toEqual(['0', '10', '20', '30']);
|
||||
});
|
||||
|
||||
it('ignores num_buckets when custom breakpoints are provided', () => {
|
||||
const features = [{ value: 5 }];
|
||||
const customBreakPoints = ['0', '50', '100'];
|
||||
|
||||
const breakPoints = getBreakPoints(
|
||||
{ break_points: customBreakPoints, num_buckets: '10' }, // num_buckets should be ignored
|
||||
features,
|
||||
accessor,
|
||||
);
|
||||
|
||||
expect(breakPoints).toEqual(['0', '50', '100']);
|
||||
expect(breakPoints).toHaveLength(3); // not 11
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases and error handling', () => {
|
||||
it('returns empty array when features are undefined', () => {
|
||||
const breakPoints = getBreakPoints(
|
||||
{ break_points: [], num_buckets: '5' },
|
||||
undefined as any,
|
||||
accessor,
|
||||
);
|
||||
|
||||
expect(breakPoints).toEqual([]);
|
||||
});
|
||||
|
||||
it('returns empty array when features is null', () => {
|
||||
const breakPoints = getBreakPoints(
|
||||
{ break_points: [], num_buckets: '5' },
|
||||
null as any,
|
||||
accessor,
|
||||
);
|
||||
|
||||
expect(breakPoints).toEqual([]);
|
||||
});
|
||||
|
||||
it('returns empty array when all values are undefined', () => {
|
||||
const features = [
|
||||
{ value: undefined },
|
||||
{ value: undefined },
|
||||
{ value: undefined },
|
||||
];
|
||||
|
||||
const breakPoints = getBreakPoints(
|
||||
{ break_points: [], num_buckets: '5' },
|
||||
features,
|
||||
accessor,
|
||||
);
|
||||
|
||||
expect(breakPoints).toEqual([]);
|
||||
});
|
||||
|
||||
it('handles empty features array', () => {
|
||||
const breakPoints = getBreakPoints(
|
||||
{ break_points: [], num_buckets: '5' },
|
||||
[],
|
||||
accessor,
|
||||
);
|
||||
|
||||
expect(breakPoints).toEqual([]);
|
||||
});
|
||||
|
||||
it('handles string values that can be parsed as numbers', () => {
|
||||
const features = [
|
||||
{ value: '10.5' },
|
||||
{ value: '20.3' },
|
||||
{ value: '30.7' },
|
||||
];
|
||||
|
||||
const breakPoints = getBreakPoints(
|
||||
{ break_points: [], num_buckets: '3' },
|
||||
features,
|
||||
(d: any) =>
|
||||
typeof d.value === 'string' ? parseFloat(d.value) : d.value,
|
||||
);
|
||||
|
||||
const firstBp = parseFloat(breakPoints[0]);
|
||||
const lastBp = parseFloat(breakPoints[breakPoints.length - 1]);
|
||||
|
||||
expect(firstBp).toBeLessThanOrEqual(10.5);
|
||||
expect(lastBp).toBeGreaterThanOrEqual(30.7);
|
||||
});
|
||||
|
||||
it('uses default number of buckets when not specified', () => {
|
||||
const features = [{ value: 0 }, { value: 100 }];
|
||||
|
||||
const breakPoints = getBreakPoints(
|
||||
{ break_points: [], num_buckets: '' },
|
||||
features,
|
||||
accessor,
|
||||
);
|
||||
|
||||
// Should use DEFAULT_NUM_BUCKETS (10)
|
||||
expect(breakPoints).toHaveLength(11); // 10 buckets = 11 breakpoints
|
||||
});
|
||||
|
||||
it('handles Infinity and -Infinity values', () => {
|
||||
const features = [
|
||||
{ value: -Infinity },
|
||||
{ value: 0 },
|
||||
{ value: Infinity },
|
||||
];
|
||||
|
||||
const breakPoints = getBreakPoints(
|
||||
{ break_points: [], num_buckets: '5' },
|
||||
features,
|
||||
accessor,
|
||||
);
|
||||
|
||||
// Should return empty array when Infinity values are present
|
||||
expect(breakPoints).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('breakpoint boundaries validation', () => {
|
||||
it('ensures no data points fall outside breakpoint range', () => {
|
||||
// Generate random test data
|
||||
const generateRandomData = (count: number, min: number, max: number) => {
|
||||
const data = [];
|
||||
for (let i = 0; i < count; i += 1) {
|
||||
data.push({ value: Math.random() * (max - min) + min });
|
||||
}
|
||||
return data;
|
||||
};
|
||||
|
||||
// Test with various random datasets
|
||||
for (let i = 0; i < 10; i += 1) {
|
||||
const features = generateRandomData(20, -1000, 1000);
|
||||
const minValue = Math.min(...features.map(f => f.value));
|
||||
const maxValue = Math.max(...features.map(f => f.value));
|
||||
|
||||
const breakPoints = getBreakPoints(
|
||||
{ break_points: [], num_buckets: '5' },
|
||||
features,
|
||||
accessor,
|
||||
);
|
||||
|
||||
const firstBp = parseFloat(breakPoints[0]);
|
||||
const lastBp = parseFloat(breakPoints[breakPoints.length - 1]);
|
||||
|
||||
// Every data point should fall within the breakpoint range
|
||||
features.forEach(feature => {
|
||||
expect(feature.value).toBeGreaterThanOrEqual(firstBp);
|
||||
expect(feature.value).toBeLessThanOrEqual(lastBp);
|
||||
});
|
||||
|
||||
// The range should be as tight as possible while including all data
|
||||
expect(firstBp).toBeLessThanOrEqual(minValue);
|
||||
expect(lastBp).toBeGreaterThanOrEqual(maxValue);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -75,35 +75,19 @@ export function getBreakPoints(
|
||||
if (minValue === undefined || maxValue === undefined) {
|
||||
return [];
|
||||
}
|
||||
// Handle Infinity values
|
||||
if (!Number.isFinite(minValue) || !Number.isFinite(maxValue)) {
|
||||
return [];
|
||||
}
|
||||
const delta = (maxValue - minValue) / numBuckets;
|
||||
const precision =
|
||||
delta === 0 ? 0 : Math.max(0, Math.ceil(Math.log10(1 / delta)));
|
||||
const extraBucket =
|
||||
maxValue > parseFloat(maxValue.toFixed(precision)) ? 1 : 0;
|
||||
const startValue =
|
||||
minValue < parseFloat(minValue.toFixed(precision))
|
||||
? minValue - 1
|
||||
: minValue;
|
||||
|
||||
// Generate breakpoints
|
||||
const breakPoints = new Array(numBuckets + 1).fill(0).map((_, i) => {
|
||||
const value = minValue + i * delta;
|
||||
|
||||
// For the first breakpoint, floor to ensure minimum is included
|
||||
if (i === 0) {
|
||||
const scale = Math.pow(10, precision);
|
||||
return (Math.floor(minValue * scale) / scale).toFixed(precision);
|
||||
}
|
||||
|
||||
// For the last breakpoint, ceil to ensure maximum is included
|
||||
if (i === numBuckets) {
|
||||
const scale = Math.pow(10, precision);
|
||||
return (Math.ceil(maxValue * scale) / scale).toFixed(precision);
|
||||
}
|
||||
|
||||
// For middle breakpoints, use standard rounding
|
||||
return value.toFixed(precision);
|
||||
});
|
||||
|
||||
return breakPoints;
|
||||
return new Array(numBuckets + 1 + extraBucket)
|
||||
.fill(0)
|
||||
.map((_, i) => (startValue + i * delta).toFixed(precision));
|
||||
}
|
||||
|
||||
return formDataBreakPoints.sort(
|
||||
@@ -162,10 +146,7 @@ export function getBreakPointColorScaler(
|
||||
scaler = scaleThreshold<number, string>()
|
||||
.domain(points)
|
||||
.range(bucketedColors);
|
||||
// Only mask values that are strictly outside the min/max bounds
|
||||
// Include values equal to the max breakpoint
|
||||
maskPoint = value =>
|
||||
!!value && (value > points[points.length - 1] || value < points[0]);
|
||||
maskPoint = value => !!value && (value > points[n] || value < points[0]);
|
||||
} else {
|
||||
// interpolate colors linearly
|
||||
const linearScaleDomain = extent(features, accessor);
|
||||
|
||||
@@ -73,10 +73,7 @@ export interface ValidatedPickingData {
|
||||
sourcePosition?: [number, number];
|
||||
targetPosition?: [number, number];
|
||||
path?: string;
|
||||
geometry?: {
|
||||
type: string;
|
||||
coordinates: number[] | number[][] | number[][][];
|
||||
};
|
||||
geometry?: any;
|
||||
}
|
||||
|
||||
const getFiltersBySpatialType = ({
|
||||
@@ -99,7 +96,7 @@ const getFiltersBySpatialType = ({
|
||||
type,
|
||||
delimiter,
|
||||
} = spatialData;
|
||||
let values: (string | number | [number, number] | [number, number][])[] = [];
|
||||
let values: any[] = [];
|
||||
let filters: QueryObjectFilterClause[] = [];
|
||||
let customColumnLabel;
|
||||
|
||||
|
||||
@@ -1,15 +1,8 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
// Path Resolution: Override baseUrl to maintain correct path mappings from parent config
|
||||
// (e.g., "@apache-superset/core" -> "./packages/superset-core/src")
|
||||
"baseUrl": "../..",
|
||||
|
||||
// Directory Overrides: Parent config paths are relative to frontend root,
|
||||
// but packages need paths relative to their own directory
|
||||
"outDir": "lib",
|
||||
"rootDir": "src",
|
||||
"declarationDir": "lib"
|
||||
"outDir": "lib"
|
||||
},
|
||||
"include": ["src/**/*", "types/**/*"],
|
||||
"exclude": ["src/**/*.test.*", "src/**/*.stories.*"],
|
||||
|
||||
@@ -1,15 +1,8 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
// Path Resolution: Override baseUrl to maintain correct path mappings from parent config
|
||||
// (e.g., "@apache-superset/core" -> "./packages/superset-core/src")
|
||||
"baseUrl": "../..",
|
||||
|
||||
// Directory Overrides: Parent config paths are relative to frontend root,
|
||||
// but packages need paths relative to their own directory
|
||||
"outDir": "lib",
|
||||
"rootDir": "src",
|
||||
"declarationDir": "lib"
|
||||
"outDir": "lib"
|
||||
},
|
||||
"include": ["src/**/*", "types/**/*"],
|
||||
"exclude": ["src/**/*.test.*", "src/**/*.stories.*"],
|
||||
|
||||
@@ -19,10 +19,10 @@
|
||||
import {
|
||||
DataRecord,
|
||||
DataRecordValue,
|
||||
GenericDataType,
|
||||
getTimeFormatterForGranularity,
|
||||
t,
|
||||
} from '@superset-ui/core';
|
||||
import { GenericDataType } from '@apache-superset/core/api/core';
|
||||
import { useCallback, useEffect, useState, useMemo } from 'react';
|
||||
import { isEqual } from 'lodash';
|
||||
|
||||
|
||||
@@ -43,6 +43,7 @@ import {
|
||||
import {
|
||||
ensureIsArray,
|
||||
FeatureFlag,
|
||||
GenericDataType,
|
||||
isAdhocColumn,
|
||||
isFeatureEnabled,
|
||||
isPhysicalColumn,
|
||||
@@ -54,7 +55,7 @@ import {
|
||||
validateMaxValue,
|
||||
validateServerPagination,
|
||||
} from '@superset-ui/core';
|
||||
import { GenericDataType } from '@apache-superset/core/api/core';
|
||||
|
||||
import { isEmpty, last } from 'lodash';
|
||||
import { PAGE_SIZE_OPTIONS, SERVER_PAGE_SIZE_OPTIONS } from './consts';
|
||||
import { ColorSchemeEnum } from './types';
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user