mirror of
https://github.com/apache/superset.git
synced 2026-05-15 04:45:10 +00:00
Compare commits
7 Commits
fix/dashbo
...
fix-oauth-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9af82715d2 | ||
|
|
bb840a7720 | ||
|
|
e518c293d1 | ||
|
|
4e09889607 | ||
|
|
672e9a1477 | ||
|
|
8fa5a75c70 | ||
|
|
144dae7c43 |
7
.github/workflows/superset-docs-verify.yml
vendored
7
.github/workflows/superset-docs-verify.yml
vendored
@@ -78,6 +78,13 @@ jobs:
|
||||
- name: yarn install
|
||||
run: |
|
||||
yarn install --check-cache
|
||||
- name: Lint docs links
|
||||
# Fast source-level check for bare relative internal links
|
||||
# like `[Foo](../foo)` that Docusaurus's onBrokenLinks
|
||||
# setting can't catch. Runs in seconds; fails fast before
|
||||
# the expensive build step.
|
||||
run: |
|
||||
yarn lint:docs-links
|
||||
- name: yarn typecheck
|
||||
run: |
|
||||
yarn typecheck
|
||||
|
||||
@@ -29,10 +29,10 @@ sidebar_position: 1
|
||||
|
||||
## Components
|
||||
|
||||
- [DropdownContainer](./dropdowncontainer)
|
||||
- [Flex](./flex)
|
||||
- [Grid](./grid)
|
||||
- [Layout](./layout)
|
||||
- [MetadataBar](./metadatabar)
|
||||
- [Space](./space)
|
||||
- [Table](./table)
|
||||
- [DropdownContainer](./dropdowncontainer.mdx)
|
||||
- [Flex](./flex.mdx)
|
||||
- [Grid](./grid.mdx)
|
||||
- [Layout](./layout.mdx)
|
||||
- [MetadataBar](./metadatabar.mdx)
|
||||
- [Space](./space.mdx)
|
||||
- [Table](./table.mdx)
|
||||
|
||||
@@ -62,7 +62,7 @@ This documentation is auto-generated from Storybook stories. To add or update co
|
||||
4. Run `yarn generate:superset-components` in the `docs/` directory
|
||||
|
||||
:::info Work in Progress
|
||||
This component library is actively being documented. See the [Components TODO](./TODO) page for a list of components awaiting documentation.
|
||||
This component library is actively being documented. See the [Components TODO](./TODO.md) page for a list of components awaiting documentation.
|
||||
:::
|
||||
|
||||
---
|
||||
|
||||
@@ -29,49 +29,49 @@ sidebar_position: 1
|
||||
|
||||
## Components
|
||||
|
||||
- [AutoComplete](./autocomplete)
|
||||
- [Avatar](./avatar)
|
||||
- [Badge](./badge)
|
||||
- [Breadcrumb](./breadcrumb)
|
||||
- [Button](./button)
|
||||
- [ButtonGroup](./buttongroup)
|
||||
- [CachedLabel](./cachedlabel)
|
||||
- [Card](./card)
|
||||
- [Checkbox](./checkbox)
|
||||
- [Collapse](./collapse)
|
||||
- [DatePicker](./datepicker)
|
||||
- [Divider](./divider)
|
||||
- [EditableTitle](./editabletitle)
|
||||
- [EmptyState](./emptystate)
|
||||
- [FaveStar](./favestar)
|
||||
- [IconButton](./iconbutton)
|
||||
- [Icons](./icons)
|
||||
- [IconTooltip](./icontooltip)
|
||||
- [InfoTooltip](./infotooltip)
|
||||
- [Input](./input)
|
||||
- [Label](./label)
|
||||
- [List](./list)
|
||||
- [ListViewCard](./listviewcard)
|
||||
- [Loading](./loading)
|
||||
- [Menu](./menu)
|
||||
- [Modal](./modal)
|
||||
- [ModalTrigger](./modaltrigger)
|
||||
- [Popover](./popover)
|
||||
- [ProgressBar](./progressbar)
|
||||
- [Radio](./radio)
|
||||
- [SafeMarkdown](./safemarkdown)
|
||||
- [Select](./select)
|
||||
- [Skeleton](./skeleton)
|
||||
- [Slider](./slider)
|
||||
- [Steps](./steps)
|
||||
- [Switch](./switch)
|
||||
- [TableCollection](./tablecollection)
|
||||
- [TableView](./tableview)
|
||||
- [Tabs](./tabs)
|
||||
- [Timer](./timer)
|
||||
- [Tooltip](./tooltip)
|
||||
- [Tree](./tree)
|
||||
- [TreeSelect](./treeselect)
|
||||
- [Typography](./typography)
|
||||
- [UnsavedChangesModal](./unsavedchangesmodal)
|
||||
- [Upload](./upload)
|
||||
- [AutoComplete](./autocomplete.mdx)
|
||||
- [Avatar](./avatar.mdx)
|
||||
- [Badge](./badge.mdx)
|
||||
- [Breadcrumb](./breadcrumb.mdx)
|
||||
- [Button](./button.mdx)
|
||||
- [ButtonGroup](./buttongroup.mdx)
|
||||
- [CachedLabel](./cachedlabel.mdx)
|
||||
- [Card](./card.mdx)
|
||||
- [Checkbox](./checkbox.mdx)
|
||||
- [Collapse](./collapse.mdx)
|
||||
- [DatePicker](./datepicker.mdx)
|
||||
- [Divider](./divider.mdx)
|
||||
- [EditableTitle](./editabletitle.mdx)
|
||||
- [EmptyState](./emptystate.mdx)
|
||||
- [FaveStar](./favestar.mdx)
|
||||
- [IconButton](./iconbutton.mdx)
|
||||
- [Icons](./icons.mdx)
|
||||
- [IconTooltip](./icontooltip.mdx)
|
||||
- [InfoTooltip](./infotooltip.mdx)
|
||||
- [Input](./input.mdx)
|
||||
- [Label](./label.mdx)
|
||||
- [List](./list.mdx)
|
||||
- [ListViewCard](./listviewcard.mdx)
|
||||
- [Loading](./loading.mdx)
|
||||
- [Menu](./menu.mdx)
|
||||
- [Modal](./modal.mdx)
|
||||
- [ModalTrigger](./modaltrigger.mdx)
|
||||
- [Popover](./popover.mdx)
|
||||
- [ProgressBar](./progressbar.mdx)
|
||||
- [Radio](./radio.mdx)
|
||||
- [SafeMarkdown](./safemarkdown.mdx)
|
||||
- [Select](./select.mdx)
|
||||
- [Skeleton](./skeleton.mdx)
|
||||
- [Slider](./slider.mdx)
|
||||
- [Steps](./steps.mdx)
|
||||
- [Switch](./switch.mdx)
|
||||
- [TableCollection](./tablecollection.mdx)
|
||||
- [TableView](./tableview.mdx)
|
||||
- [Tabs](./tabs.mdx)
|
||||
- [Timer](./timer.mdx)
|
||||
- [Tooltip](./tooltip.mdx)
|
||||
- [Tree](./tree.mdx)
|
||||
- [TreeSelect](./treeselect.mdx)
|
||||
- [Typography](./typography.mdx)
|
||||
- [UnsavedChangesModal](./unsavedchangesmodal.mdx)
|
||||
- [Upload](./upload.mdx)
|
||||
|
||||
@@ -327,13 +327,13 @@ stats.sort_stats('cumulative').print_stats(10)
|
||||
## Resources
|
||||
|
||||
### Internal
|
||||
- [Coding Guidelines](../guidelines/design-guidelines)
|
||||
- [Testing Guide](../testing/overview)
|
||||
- [Extension Architecture](../extensions/architecture)
|
||||
- [Coding Guidelines](../guidelines/design-guidelines.md)
|
||||
- [Testing Guide](../testing/overview.md)
|
||||
- [Extension Architecture](../extensions/architecture.md)
|
||||
|
||||
### External
|
||||
- [Google's Code Review Guide](https://google.github.io/eng-practices/review/)
|
||||
- [Best Practices for Code Review](https://smartbear.com/learn/code-review/best-practices-for-peer-code-review/)
|
||||
- [The Art of Readable Code](https://www.oreilly.com/library/view/the-art-of/9781449318482/)
|
||||
|
||||
Next: [Reporting issues effectively](./issue-reporting)
|
||||
Next: [Reporting issues effectively](./issue-reporting.md)
|
||||
|
||||
@@ -668,7 +668,7 @@ A series of checks will now run when you make a git commit.
|
||||
|
||||
## Linting
|
||||
|
||||
See [how tos](./howtos#linting)
|
||||
See [how tos](./howtos.md#linting)
|
||||
|
||||
## GitHub Actions and `act`
|
||||
|
||||
|
||||
@@ -77,7 +77,7 @@ Finally, never submit a PR that will put master branch in broken state. If the P
|
||||
in `requirements.txt` pinned to a specific version which ensures that the application
|
||||
build is deterministic.
|
||||
- For TypeScript/JavaScript, include new libraries in `package.json`
|
||||
- **Tests:** The pull request should include tests, either as doctests, unit tests, or both. Make sure to resolve all errors and test failures. See [Testing](./howtos#testing) for how to run tests.
|
||||
- **Tests:** The pull request should include tests, either as doctests, unit tests, or both. Make sure to resolve all errors and test failures. See [Testing](./howtos.md#testing) for how to run tests.
|
||||
- **Documentation:** If the pull request adds functionality, the docs should be updated as part of the same PR.
|
||||
- **CI:** Reviewers will not review the code until all CI tests are passed. Sometimes there can be flaky tests. You can close and open PR to re-run CI test. Please report if the issue persists. After the CI fix has been deployed to `master`, please rebase your PR.
|
||||
- **Code coverage:** Please ensure that code coverage does not decrease.
|
||||
|
||||
@@ -282,7 +282,7 @@ You can now launch your VSCode debugger with the same config as above. VSCode wi
|
||||
|
||||
### Storybook
|
||||
|
||||
See the dedicated [Storybook documentation](../testing/storybook) for information on running Storybook locally and adding new stories.
|
||||
See the dedicated [Storybook documentation](../testing/storybook.md) for information on running Storybook locally and adding new stories.
|
||||
|
||||
## Contributing Translations
|
||||
|
||||
|
||||
@@ -413,6 +413,6 @@ Consider:
|
||||
- **Feature Request**: Use feature request template
|
||||
- **Question**: Use GitHub Discussions
|
||||
- **Configuration Help**: Ask in Slack
|
||||
- **Development Help**: See [Contributing Guide](./overview)
|
||||
- **Development Help**: See [Contributing Guide](./overview.md)
|
||||
|
||||
Next: [Understanding the release process](./release-process)
|
||||
Next: [Understanding the release process](./release-process.md)
|
||||
|
||||
@@ -94,7 +94,7 @@ Look through the GitHub issues. Issues tagged with
|
||||
Superset could always use better documentation,
|
||||
whether as part of the official Superset docs,
|
||||
in docstrings, `docs/*.rst` or even on the web as blog posts or
|
||||
articles. See [Documentation](./howtos#contributing-to-documentation) for more details.
|
||||
articles. See [Documentation](./howtos.md#contributing-to-documentation) for more details.
|
||||
|
||||
### Add Translations
|
||||
|
||||
@@ -103,7 +103,7 @@ text strings from Superset's UI. You can jump into the existing
|
||||
language dictionaries at
|
||||
`superset/translations/<language_code>/LC_MESSAGES/messages.po`, or
|
||||
even create a dictionary for a new language altogether.
|
||||
See [Translating](./howtos#contributing-translations) for more details.
|
||||
See [Translating](./howtos.md#contributing-translations) for more details.
|
||||
|
||||
### Ask Questions
|
||||
|
||||
@@ -158,9 +158,9 @@ Security team members should also follow these general expectations:
|
||||
|
||||
Ready to contribute? Here's how to get started:
|
||||
|
||||
1. **[Set up your environment](./development-setup)** - Get Superset running locally
|
||||
1. **[Set up your environment](./development-setup.md)** - Get Superset running locally
|
||||
2. **[Find something to work on](#types-of-contributions)** - Pick an issue or feature
|
||||
3. **[Submit your contribution](./submitting-pr)** - Create a pull request
|
||||
4. **[Follow guidelines](./guidelines)** - Ensure code quality
|
||||
3. **[Submit your contribution](./submitting-pr.md)** - Create a pull request
|
||||
4. **[Follow guidelines](./guidelines.md)** - Ensure code quality
|
||||
|
||||
Welcome to the Apache Superset community! 🚀
|
||||
|
||||
@@ -466,4 +466,4 @@ Credit:
|
||||
- [Release Scripts](https://github.com/apache/superset/tree/master/scripts/release)
|
||||
- [Superset Repository Scripts](https://github.com/apache/superset/tree/master/scripts)
|
||||
|
||||
Next: Return to [Contributing Overview](./overview)
|
||||
Next: Return to [Contributing Overview](./overview.md)
|
||||
|
||||
@@ -31,11 +31,11 @@ Learn how to create and submit high-quality pull requests to Apache Superset.
|
||||
### Prerequisites
|
||||
- [ ] Development environment is set up
|
||||
- [ ] You've forked and cloned the repository
|
||||
- [ ] You've read the [contributing overview](./overview)
|
||||
- [ ] You've read the [contributing overview](./overview.md)
|
||||
- [ ] You've found or created an issue to work on
|
||||
|
||||
### PR Readiness Checklist
|
||||
- [ ] Code follows [coding guidelines](../guidelines/design-guidelines)
|
||||
- [ ] Code follows [coding guidelines](../guidelines/design-guidelines.md)
|
||||
- [ ] Tests are passing locally
|
||||
- [ ] Linting passes (`pre-commit run --all-files`)
|
||||
- [ ] Documentation is updated if needed
|
||||
@@ -318,4 +318,4 @@ git push origin master
|
||||
- **GitHub**: Tag @apache/superset-committers for attention
|
||||
- **Mailing List**: dev@superset.apache.org
|
||||
|
||||
Next: [Understanding code review process](./code-review)
|
||||
Next: [Understanding code review process](./code-review.md)
|
||||
|
||||
@@ -233,7 +233,7 @@ This architecture provides several key benefits:
|
||||
|
||||
Now that you understand the architecture, explore:
|
||||
|
||||
- **[Dependencies](./dependencies)** - Managing dependencies and understanding API stability
|
||||
- **[Quick Start](./quick-start)** - Build your first extension
|
||||
- **[Contribution Types](./contribution-types)** - What kinds of extensions you can build
|
||||
- **[Development](./development)** - Project structure, APIs, and development workflow
|
||||
- **[Dependencies](./dependencies.md)** - Managing dependencies and understanding API stability
|
||||
- **[Quick Start](./quick-start.md)** - Build your first extension
|
||||
- **[Contribution Types](./contribution-types.md)** - What kinds of extensions you can build
|
||||
- **[Development](./development.md)** - Project structure, APIs, and development workflow
|
||||
|
||||
@@ -29,7 +29,7 @@ These UI components are available to Superset extension developers through the `
|
||||
|
||||
## Available Components
|
||||
|
||||
- [Alert](./alert)
|
||||
- [Alert](./alert.mdx)
|
||||
|
||||
## Usage
|
||||
|
||||
@@ -90,4 +90,4 @@ InteractiveMyComponent.argTypes = {
|
||||
|
||||
## Interactive Documentation
|
||||
|
||||
For interactive examples with controls, visit the [Storybook](/storybook/?path=/docs/extension-components--docs).
|
||||
For interactive examples with controls, run Storybook locally — see the [Storybook documentation](/developer-docs/testing/storybook).
|
||||
|
||||
@@ -110,7 +110,7 @@ editors.registerEditor(
|
||||
);
|
||||
```
|
||||
|
||||
See [Editors Extension Point](./extension-points/editors) for implementation details.
|
||||
See [Editors Extension Point](./extension-points/editors.md) for implementation details.
|
||||
|
||||
## Backend
|
||||
|
||||
@@ -146,7 +146,7 @@ class MyExtensionAPI(RestApi):
|
||||
from .api import MyExtensionAPI
|
||||
```
|
||||
|
||||
**Note**: The [`@api`](superset-core/src/superset_core/rest_api/decorators.py) decorator automatically detects context and generates appropriate paths:
|
||||
**Note**: The [`@api`](https://github.com/apache/superset/blob/master/superset-core/src/superset_core/rest_api/decorators.py) decorator automatically detects context and generates appropriate paths:
|
||||
|
||||
- **Extension context**: `/extensions/{publisher}/{name}/` with ID prefixed as `extensions.{publisher}.{name}.{id}`
|
||||
- **Host context**: `/api/v1/` with original ID
|
||||
@@ -193,7 +193,7 @@ def get_summary() -> dict:
|
||||
return {"status": "success", "result": {"queries_today": 42}}
|
||||
```
|
||||
|
||||
See [MCP Integration](./mcp) for implementation details.
|
||||
See [MCP Integration](./mcp.md) for implementation details.
|
||||
|
||||
### MCP Prompts
|
||||
|
||||
@@ -223,7 +223,7 @@ async def analysis_guide(ctx: Context) -> str:
|
||||
"""
|
||||
```
|
||||
|
||||
See [MCP Integration](./mcp) for implementation details.
|
||||
See [MCP Integration](./mcp.md) for implementation details.
|
||||
|
||||
### Semantic Layers
|
||||
|
||||
|
||||
@@ -161,6 +161,6 @@ Until then, monitor the Superset release notes and test your extensions with eac
|
||||
|
||||
## Next Steps
|
||||
|
||||
- **[Architecture](./architecture)** - Understand the extension system design
|
||||
- **[Development](./development)** - Learn about APIs and development workflow
|
||||
- **[Quick Start](./quick-start)** - Build your first extension
|
||||
- **[Architecture](./architecture.md)** - Understand the extension system design
|
||||
- **[Development](./development.md)** - Learn about APIs and development workflow
|
||||
- **[Quick Start](./quick-start.md)** - Build your first extension
|
||||
|
||||
@@ -252,7 +252,7 @@ class DatasetReferencesAPI(RestApi):
|
||||
|
||||
### Automatic Context Detection
|
||||
|
||||
The [`@api`](superset-core/src/superset_core/rest_api/decorators.py) decorator automatically detects whether it's being used in host or extension code:
|
||||
The [`@api`](https://github.com/apache/superset/blob/master/superset-core/src/superset_core/rest_api/decorators.py) decorator automatically detects whether it's being used in host or extension code:
|
||||
|
||||
- **Extension APIs**: Registered under `/extensions/{publisher}/{name}/` with IDs prefixed as `extensions.{publisher}.{name}.{id}`
|
||||
- **Host APIs**: Registered under `/api/v1/` with original IDs
|
||||
|
||||
@@ -217,6 +217,6 @@ const disposable = handle.registerCompletionProvider(provider);
|
||||
|
||||
## Next Steps
|
||||
|
||||
- **[SQL Lab Extension Points](./sqllab)** - Learn about other SQL Lab customizations
|
||||
- **[Contribution Types](../contribution-types)** - Explore other contribution types
|
||||
- **[Development](../development)** - Set up your development environment
|
||||
- **[SQL Lab Extension Points](./sqllab.md)** - Learn about other SQL Lab customizations
|
||||
- **[Contribution Types](../contribution-types.md)** - Explore other contribution types
|
||||
- **[Development](../development.md)** - Set up your development environment
|
||||
|
||||
@@ -51,7 +51,7 @@ SQL Lab provides 4 extension points where extensions can contribute custom UI co
|
||||
| **Right Sidebar** | `sqllab.rightSidebar` | ✓ | — | Custom panels (AI assistants, query analysis) |
|
||||
| **Panels** | `sqllab.panels` | ✓ | ✓ | Custom tabs + toolbar actions (data profiling) |
|
||||
|
||||
\*Editor views are contributed via [Editor Contributions](./editors), not standard view contributions.
|
||||
\*Editor views are contributed via [Editor Contributions](./editors.md), not standard view contributions.
|
||||
|
||||
## Customization Types
|
||||
|
||||
@@ -78,7 +78,7 @@ Extensions can add toolbar actions to **Left Sidebar**, **Editor**, and **Panels
|
||||
|
||||
### Custom Editors
|
||||
|
||||
Extensions can replace the default SQL editor with custom implementations (Monaco, CodeMirror, etc.). See [Editor Contributions](./editors) for details.
|
||||
Extensions can replace the default SQL editor with custom implementations (Monaco, CodeMirror, etc.). See [Editor Contributions](./editors.md) for details.
|
||||
|
||||
## Examples
|
||||
|
||||
@@ -157,6 +157,6 @@ menus.registerMenuItem(
|
||||
|
||||
## Next Steps
|
||||
|
||||
- **[Contribution Types](../contribution-types)** - Learn about other contribution types (commands, menus)
|
||||
- **[Development](../development)** - Set up your development environment
|
||||
- **[Quick Start](../quick-start)** - Build a complete extension
|
||||
- **[Contribution Types](../contribution-types.md)** - Learn about other contribution types (commands, menus)
|
||||
- **[Development](../development.md)** - Set up your development environment
|
||||
- **[Quick Start](../quick-start.md)** - Build a complete extension
|
||||
|
||||
@@ -455,5 +455,5 @@ async def metrics_guide(ctx: Context) -> str:
|
||||
|
||||
## Next Steps
|
||||
|
||||
- **[Development](./development)** - Project structure, APIs, and dev workflow
|
||||
- **[Security](./security)** - Security best practices for extensions
|
||||
- **[Development](./development.md)** - Project structure, APIs, and dev workflow
|
||||
- **[Security](./security.md)** - Security best practices for extensions
|
||||
|
||||
@@ -47,13 +47,13 @@ Extension developers have access to pre-built UI components via `@apache-superse
|
||||
|
||||
## Next Steps
|
||||
|
||||
- **[Quick Start](./quick-start)** - Build your first extension with a complete walkthrough
|
||||
- **[Architecture](./architecture)** - Design principles and system overview
|
||||
- **[Dependencies](./dependencies)** - Managing dependencies and understanding API stability
|
||||
- **[Contribution Types](./contribution-types)** - Available extension points
|
||||
- **[Development](./development)** - Project structure, APIs, and development workflow
|
||||
- **[Deployment](./deployment)** - Packaging and deploying extensions
|
||||
- **[MCP Integration](./mcp)** - Adding AI agent capabilities using extensions
|
||||
- **[Security](./security)** - Security considerations and best practices
|
||||
- **[Tasks](./tasks)** - Framework for creating and managing long running tasks
|
||||
- **[Community Extensions](./registry)** - Browse extensions shared by the community
|
||||
- **[Quick Start](./quick-start.md)** - Build your first extension with a complete walkthrough
|
||||
- **[Architecture](./architecture.md)** - Design principles and system overview
|
||||
- **[Dependencies](./dependencies.md)** - Managing dependencies and understanding API stability
|
||||
- **[Contribution Types](./contribution-types.md)** - Available extension points
|
||||
- **[Development](./development.md)** - Project structure, APIs, and development workflow
|
||||
- **[Deployment](./deployment.md)** - Packaging and deploying extensions
|
||||
- **[MCP Integration](./mcp.md)** - Adding AI agent capabilities using extensions
|
||||
- **[Security](./security.md)** - Security considerations and best practices
|
||||
- **[Tasks](./tasks.md)** - Framework for creating and managing long running tasks
|
||||
- **[Community Extensions](./registry.md)** - Browse extensions shared by the community
|
||||
|
||||
@@ -168,7 +168,7 @@ class HelloWorldAPI(RestApi):
|
||||
|
||||
**Key points:**
|
||||
|
||||
- Uses [`@api`](superset-core/src/superset_core/rest_api/decorators.py) decorator with automatic context detection
|
||||
- Uses [`@api`](https://github.com/apache/superset/blob/master/superset-core/src/superset_core/rest_api/decorators.py) decorator with automatic context detection
|
||||
- Extends `RestApi` from `superset_core.rest_api.api`
|
||||
- Uses Flask-AppBuilder decorators (`@expose`, `@protect`, `@safe`)
|
||||
- Returns responses using `self.response(status_code, result=data)`
|
||||
@@ -184,7 +184,7 @@ Replace the generated print statement with API import to trigger registration:
|
||||
from .api import HelloWorldAPI # noqa: F401
|
||||
```
|
||||
|
||||
The [`@api`](superset-core/src/superset_core/rest_api/decorators.py) decorator automatically detects extension context and registers your API with proper namespacing.
|
||||
The [`@api`](https://github.com/apache/superset/blob/master/superset-core/src/superset_core/rest_api/decorators.py) decorator automatically detects extension context and registers your API with proper namespacing.
|
||||
|
||||
## Step 5: Create Frontend Component
|
||||
|
||||
@@ -225,7 +225,7 @@ The `@apache-superset/core` package must be listed in both `peerDependencies` (t
|
||||
|
||||
The webpack configuration requires specific settings for Module Federation. Key settings include `externalsType: "window"` and `externals` to map `@apache-superset/core` to `window.superset` at runtime, `import: false` for shared modules to use the host's React instead of bundling a separate copy, and `remoteEntry.[contenthash].js` for cache busting.
|
||||
|
||||
**Convention**: Superset always loads extensions by requesting the `./index` module from the Module Federation container. The `exposes` entry must be exactly `'./index': './src/index.tsx'` — do not rename or add additional entries. All API registrations must be reachable from that file. See [Architecture](./architecture#module-federation) for a full explanation.
|
||||
**Convention**: Superset always loads extensions by requesting the `./index` module from the Module Federation container. The `exposes` entry must be exactly `'./index': './src/index.tsx'` — do not rename or add additional entries. All API registrations must be reachable from that file. See [Architecture](./architecture.md#module-federation) for a full explanation.
|
||||
|
||||
```javascript
|
||||
const path = require('path');
|
||||
@@ -496,7 +496,7 @@ Superset will extract and validate the extension metadata, load the assets, regi
|
||||
Here's what happens when your extension loads:
|
||||
|
||||
1. **Superset starts**: Reads `manifest.json` from the `.supx` bundle and loads the backend entrypoint
|
||||
2. **Backend registration**: `entrypoint.py` imports your API class, triggering the [`@api`](superset-core/src/superset_core/rest_api/decorators.py) decorator to register it automatically
|
||||
2. **Backend registration**: `entrypoint.py` imports your API class, triggering the [`@api`](https://github.com/apache/superset/blob/master/superset-core/src/superset_core/rest_api/decorators.py) decorator to register it automatically
|
||||
3. **Frontend loads**: When SQL Lab opens, Superset fetches the remote entry file
|
||||
4. **Module Federation**: Webpack loads your extension module and resolves `@apache-superset/core` to `window.superset`
|
||||
5. **Registration**: The module executes at load time, calling `views.registerView` to register your panel
|
||||
@@ -509,9 +509,9 @@ Here's what happens when your extension loads:
|
||||
|
||||
Now that you have a working extension, explore:
|
||||
|
||||
- **[Development](./development)** - Project structure, APIs, and development workflow
|
||||
- **[Contribution Types](./contribution-types)** - Other contribution points beyond panels
|
||||
- **[Deployment](./deployment)** - Packaging and deploying your extension
|
||||
- **[Security](./security)** - Security best practices for extensions
|
||||
- **[Development](./development.md)** - Project structure, APIs, and development workflow
|
||||
- **[Contribution Types](./contribution-types.md)** - Other contribution points beyond panels
|
||||
- **[Deployment](./deployment.md)** - Packaging and deploying your extension
|
||||
- **[Security](./security.md)** - Security best practices for extensions
|
||||
|
||||
For a complete real-world example, examine the query insights extension in the Superset codebase.
|
||||
|
||||
@@ -28,7 +28,7 @@ By default, extensions are disabled and must be explicitly enabled by setting th
|
||||
|
||||
For external extensions, administrators are responsible for evaluating and verifying the security of any extensions they choose to install, just as they would when installing third-party NPM or PyPI packages. At this stage, all extensions run in the same context as the host application, without additional sandboxing. This means that external extensions can impact the security and performance of a Superset environment in the same way as any other installed dependency.
|
||||
|
||||
We plan to introduce an optional sandboxed execution model for extensions in the future (as part of an additional SIP). Until then, administrators should exercise caution and follow best practices when selecting and deploying third-party extensions. A directory of community extensions is available in the [Community Extensions](./registry) page. Note that these extensions are not vetted by the Apache Superset project—administrators must evaluate each extension before installation.
|
||||
We plan to introduce an optional sandboxed execution model for extensions in the future (as part of an additional SIP). Until then, administrators should exercise caution and follow best practices when selecting and deploying third-party extensions. A directory of community extensions is available in the [Community Extensions](./registry.md) page. Note that these extensions are not vetted by the Apache Superset project—administrators must evaluate each extension before installation.
|
||||
|
||||
**Any performance or security vulnerabilities introduced by external extensions should be reported directly to the extension author, not as Superset vulnerabilities.**
|
||||
|
||||
|
||||
@@ -114,7 +114,7 @@ class CreateDashboardCommand(BaseCommand):
|
||||
|
||||
### Data Access Objects (DAOs)
|
||||
|
||||
See: [DAO Style Guidelines and Best Practices](./backend/dao-style-guidelines)
|
||||
See: [DAO Style Guidelines and Best Practices](./backend/dao-style-guidelines.md)
|
||||
|
||||
## Testing
|
||||
|
||||
|
||||
@@ -29,16 +29,16 @@ This is a list of statements that describe how we do frontend development in Sup
|
||||
- We develop using TypeScript.
|
||||
- See: [SIP-36](https://github.com/apache/superset/issues/9101)
|
||||
- We use React for building components, and Redux to manage app/global state.
|
||||
- See: [Component Style Guidelines and Best Practices](./frontend/component-style-guidelines)
|
||||
- See: [Component Style Guidelines and Best Practices](./frontend/component-style-guidelines.md)
|
||||
- We prefer functional components to class components and use hooks for local component state.
|
||||
- We use [Ant Design](https://ant.design/) components from our component library whenever possible, only building our own custom components when it's required.
|
||||
- See: [SIP-48](https://github.com/apache/superset/issues/11283)
|
||||
- We use [@emotion](https://emotion.sh/docs/introduction) to provide styling for our components, co-locating styling within component files.
|
||||
- See: [SIP-37](https://github.com/apache/superset/issues/9145)
|
||||
- See: [Emotion Styling Guidelines and Best Practices](./frontend/emotion-styling-guidelines)
|
||||
- See: [Emotion Styling Guidelines and Best Practices](./frontend/emotion-styling-guidelines.md)
|
||||
- We use Jest for unit tests, React Testing Library for component tests, and Cypress for end-to-end tests.
|
||||
- See: [SIP-56](https://github.com/apache/superset/issues/11830)
|
||||
- See: [Testing Guidelines and Best Practices](../testing/testing-guidelines)
|
||||
- See: [Testing Guidelines and Best Practices](../testing/testing-guidelines.md)
|
||||
- We add tests for every new component or file added to the frontend.
|
||||
- We organize our repo so similar files live near each other, and tests are co-located with the files they test.
|
||||
- See: [SIP-61](https://github.com/apache/superset/issues/12098)
|
||||
@@ -46,6 +46,6 @@ This is a list of statements that describe how we do frontend development in Sup
|
||||
- We use OXC (oxlint) and Prettier to automatically fix lint errors and format the code.
|
||||
- We do not debate code formatting style in PRs, instead relying on automated tooling to enforce it.
|
||||
- If there's not a linting rule, we don't have a rule!
|
||||
- See: [Linting How-Tos](../contributing/howtos#typescript--javascript)
|
||||
- See: [Linting How-Tos](../contributing/howtos.md#typescript--javascript)
|
||||
- We use [React Storybook](https://storybook.js.org/) to help preview/test and stabilize our components
|
||||
- A public Storybook with components from the `master` branch is available [here](https://apache-superset.github.io/superset-ui/?path=/story/*)
|
||||
|
||||
@@ -31,7 +31,7 @@ This guide is intended primarily for reusable components. Whenever possible, all
|
||||
## General Guidelines
|
||||
|
||||
- We use [Ant Design](https://ant.design/) as our component library. Do not build a new component if Ant Design provides one but rather instead extend or customize what the library provides
|
||||
- Always style your component using Emotion and always prefer the theme variables whenever applicable. See: [Emotion Styling Guidelines and Best Practices](./emotion-styling-guidelines)
|
||||
- Always style your component using Emotion and always prefer the theme variables whenever applicable. See: [Emotion Styling Guidelines and Best Practices](./emotion-styling-guidelines.md)
|
||||
- All components should be made to be reusable whenever possible
|
||||
- All components should follow the structure and best practices as detailed below
|
||||
|
||||
@@ -53,7 +53,7 @@ superset-frontend/src/components
|
||||
|
||||
**Storybook:** Components should come with a storybook file whenever applicable, with the following naming convention `\{ComponentName\}.stories.tsx`. More details about Storybook below
|
||||
|
||||
**Unit and end-to-end tests:** All components should come with unit tests using Jest and React Testing Library. The file name should follow this naming convention `\{ComponentName\}.test.tsx`. Read the [Testing Guidelines and Best Practices](../../testing/testing-guidelines) for more details
|
||||
**Unit and end-to-end tests:** All components should come with unit tests using Jest and React Testing Library. The file name should follow this naming convention `\{ComponentName\}.test.tsx`. Read the [Testing Guidelines and Best Practices](../../testing/testing-guidelines.md) for more details
|
||||
|
||||
**Reference naming:** Use `PascalCase` for React components and `camelCase` for component instances
|
||||
|
||||
|
||||
@@ -37,16 +37,16 @@ Superset embraces a testing pyramid approach:
|
||||
## Testing Documentation
|
||||
|
||||
### Frontend Testing
|
||||
- **[Frontend Testing](./frontend-testing)** - Jest, React Testing Library, and component testing strategies
|
||||
- **[Frontend Testing](./frontend-testing.md)** - Jest, React Testing Library, and component testing strategies
|
||||
|
||||
### Backend Testing
|
||||
- **[Backend Testing](./backend-testing)** - pytest, database testing, and API testing patterns
|
||||
- **[Backend Testing](./backend-testing.md)** - pytest, database testing, and API testing patterns
|
||||
|
||||
### End-to-End Testing
|
||||
- **[E2E Testing](./e2e-testing)** - Playwright testing for complete user workflows
|
||||
- **[E2E Testing](./e2e-testing.md)** - Playwright testing for complete user workflows
|
||||
|
||||
### CI/CD Integration
|
||||
- **[CI/CD](./ci-cd)** - Continuous integration, automated testing, and deployment pipelines
|
||||
- **[CI/CD](./ci-cd.md)** - Continuous integration, automated testing, and deployment pipelines
|
||||
|
||||
## Testing Tools & Frameworks
|
||||
|
||||
|
||||
@@ -254,7 +254,7 @@ const config: Config = {
|
||||
'Apache Superset is a modern data exploration and visualization platform',
|
||||
url: 'https://superset.apache.org',
|
||||
baseUrl: '/',
|
||||
onBrokenLinks: 'warn',
|
||||
onBrokenLinks: 'throw',
|
||||
markdown: {
|
||||
mermaid: true,
|
||||
hooks: {
|
||||
|
||||
@@ -30,6 +30,7 @@
|
||||
"lint:db-metadata:report": "python3 ../superset/db_engine_specs/lint_metadata.py --markdown -o ../superset/db_engine_specs/METADATA_STATUS.md",
|
||||
"update:readme-db-logos": "node scripts/generate-database-docs.mjs --update-readme",
|
||||
"eslint": "eslint .",
|
||||
"lint:docs-links": "node scripts/lint-docs-links.mjs",
|
||||
"version:add": "node scripts/manage-versions.mjs add",
|
||||
"version:remove": "node scripts/manage-versions.mjs remove",
|
||||
"version:add:docs": "node scripts/manage-versions.mjs add docs",
|
||||
|
||||
@@ -1260,7 +1260,15 @@ function generateCategoryIndex(category, components) {
|
||||
};
|
||||
const componentList = components
|
||||
.sort((a, b) => a.componentName.localeCompare(b.componentName))
|
||||
.map(c => `- [${c.componentName}](./${c.componentName.toLowerCase()})`)
|
||||
// `.mdx` suffix matches the actual component page files emitted
|
||||
// by this generator (see the MDX wrappers below). The extension
|
||||
// is required: Docusaurus only validates and rewrites *file-based*
|
||||
// references (.md/.mdx). Bare relative paths bypass the file
|
||||
// resolver and get emitted as raw HTML hrefs that the browser
|
||||
// resolves against the current URL — which gives the wrong
|
||||
// directory for trailing-slash routes and breaks SPA navigation.
|
||||
// See docs/scripts/lint-docs-links.mjs.
|
||||
.map(c => `- [${c.componentName}](./${c.componentName.toLowerCase()}.mdx)`)
|
||||
.join('\n');
|
||||
|
||||
return `---
|
||||
@@ -1366,7 +1374,7 @@ This documentation is auto-generated from Storybook stories. To add or update co
|
||||
4. Run \`yarn generate:superset-components\` in the \`docs/\` directory
|
||||
|
||||
:::info Work in Progress
|
||||
This component library is actively being documented. See the [Components TODO](./TODO) page for a list of components awaiting documentation.
|
||||
This component library is actively being documented. See the [Components TODO](./TODO.md) page for a list of components awaiting documentation.
|
||||
:::
|
||||
|
||||
---
|
||||
|
||||
230
docs/scripts/lint-docs-links.mjs
Normal file
230
docs/scripts/lint-docs-links.mjs
Normal file
@@ -0,0 +1,230 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* lint-docs-links — source-level checks for internal markdown links.
|
||||
*
|
||||
* Catches three failure modes that combine to break SPA navigation in
|
||||
* a Docusaurus build:
|
||||
*
|
||||
* 1. BARE — `[X](../foo)` with no extension. Skips
|
||||
* Docusaurus's file resolver entirely. Emitted
|
||||
* as a raw href and resolved by the browser
|
||||
* against the current page URL — usually the
|
||||
* wrong directory for trailing-slash routes.
|
||||
* `onBrokenLinks: 'throw'` cannot catch this.
|
||||
*
|
||||
* 2. MISSING_TARGET — `[X](./gone.md)` with an extension, but no
|
||||
* file at that path. The Docusaurus build
|
||||
* catches this too (via
|
||||
* `onBrokenMarkdownLinks: 'throw'`) but only
|
||||
* after a multi-minute build. This script
|
||||
* flags it in ~1s.
|
||||
*
|
||||
* 3. WRONG_EXTENSION — `[X](./foo.md)` where the file is actually
|
||||
* `foo.mdx` (or vice versa). Same end result
|
||||
* as MISSING_TARGET, but the fix is one
|
||||
* character — so we report it as its own
|
||||
* category with the actual extension on disk.
|
||||
*
|
||||
* Skips: fenced code blocks, asset-style targets (.png/.json/etc.),
|
||||
* external URLs, in-page anchors, and the `versioned_docs/`
|
||||
* snapshots (those are frozen historical content).
|
||||
*
|
||||
* Run from `docs/`:
|
||||
* node scripts/lint-docs-links.mjs
|
||||
*
|
||||
* Exits 0 on clean, 1 on any finding.
|
||||
*/
|
||||
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
const docsRoot = path.join(__dirname, '..');
|
||||
|
||||
const ROOTS = ['docs', 'admin_docs', 'developer_docs', 'components'];
|
||||
|
||||
const NON_DOC_EXTENSIONS = new Set([
|
||||
'.png', '.jpg', '.jpeg', '.gif', '.webp', '.svg', '.ico',
|
||||
'.json', '.yaml', '.yml', '.txt', '.csv',
|
||||
'.zip', '.tar', '.gz',
|
||||
'.pdf',
|
||||
'.mp4', '.webm', '.mov',
|
||||
]);
|
||||
|
||||
const LINK_RE = /\[[^\]\n]+?\]\((?<url>\.{1,2}\/[^)\s]+?)\)/g;
|
||||
|
||||
/**
|
||||
* Classify a single markdown link from a source file.
|
||||
* Returns one of: ok / bare / asset / missing-target / wrong-extension.
|
||||
*/
|
||||
function classifyLink(sourceFile, url) {
|
||||
const stripped = url.split('#', 1)[0].split('?', 1)[0];
|
||||
const ext = path.extname(stripped).toLowerCase();
|
||||
|
||||
// Non-doc assets — legit bare extensions, leave alone.
|
||||
if (ext && NON_DOC_EXTENSIONS.has(ext)) {
|
||||
return { kind: 'asset' };
|
||||
}
|
||||
|
||||
// Anything that doesn't end in .md/.mdx is a bare relative URL.
|
||||
if (ext !== '.md' && ext !== '.mdx') {
|
||||
return { kind: 'bare' };
|
||||
}
|
||||
|
||||
// Has a .md/.mdx extension — make sure the target exists.
|
||||
const target = path.normalize(path.join(path.dirname(sourceFile), stripped));
|
||||
if (fs.existsSync(target)) {
|
||||
return { kind: 'ok' };
|
||||
}
|
||||
|
||||
// Target doesn't exist — check if the OTHER extension does.
|
||||
const otherExt = ext === '.md' ? '.mdx' : '.md';
|
||||
const otherTarget = target.slice(0, -ext.length) + otherExt;
|
||||
if (fs.existsSync(otherTarget)) {
|
||||
return { kind: 'wrong-extension', actualExt: otherExt };
|
||||
}
|
||||
|
||||
return { kind: 'missing-target' };
|
||||
}
|
||||
|
||||
function* walk(dir) {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
const full = path.join(dir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
if (
|
||||
entry.name.startsWith('.') ||
|
||||
entry.name === 'node_modules' ||
|
||||
entry.name.endsWith('_versioned_docs') ||
|
||||
entry.name === 'versioned_docs'
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
yield* walk(full);
|
||||
} else if (entry.isFile()) {
|
||||
if (entry.name.endsWith('.md') || entry.name.endsWith('.mdx')) {
|
||||
yield full;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function lintFile(file) {
|
||||
const src = fs.readFileSync(file, 'utf8');
|
||||
const findings = [];
|
||||
let inFence = false;
|
||||
const lines = src.split('\n');
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
if (line.trimStart().startsWith('```')) {
|
||||
inFence = !inFence;
|
||||
continue;
|
||||
}
|
||||
if (inFence) continue;
|
||||
for (const m of line.matchAll(LINK_RE)) {
|
||||
const url = m.groups.url;
|
||||
const result = classifyLink(file, url);
|
||||
if (result.kind !== 'ok' && result.kind !== 'asset') {
|
||||
findings.push({ line: i + 1, url, ...result });
|
||||
}
|
||||
}
|
||||
}
|
||||
return findings;
|
||||
}
|
||||
|
||||
const findings = [];
|
||||
for (const root of ROOTS) {
|
||||
const abs = path.join(docsRoot, root);
|
||||
if (!fs.existsSync(abs)) continue;
|
||||
for (const file of walk(abs)) {
|
||||
for (const f of lintFile(file)) {
|
||||
findings.push({ file: path.relative(docsRoot, file), ...f });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (findings.length === 0) {
|
||||
console.log('✓ lint-docs-links: no broken internal links found');
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
// Group by kind for readable output.
|
||||
const groups = {
|
||||
bare: [],
|
||||
'wrong-extension': [],
|
||||
'missing-target': [],
|
||||
};
|
||||
for (const f of findings) {
|
||||
groups[f.kind].push(f);
|
||||
}
|
||||
|
||||
console.error(
|
||||
`✗ lint-docs-links: found ${findings.length} broken internal link(s)`
|
||||
);
|
||||
console.error('');
|
||||
|
||||
if (groups.bare.length) {
|
||||
console.error(
|
||||
` ${groups.bare.length} bare relative link(s) (no .md/.mdx extension)`
|
||||
);
|
||||
console.error(
|
||||
" Docusaurus's file resolver skips these; the browser resolves them"
|
||||
);
|
||||
console.error(
|
||||
' against the current page URL — wrong directory for trailing-slash routes.'
|
||||
);
|
||||
console.error(' Add the extension so the file resolver picks them up.');
|
||||
console.error('');
|
||||
for (const f of groups.bare) {
|
||||
console.error(` ${f.file}:${f.line} ${f.url}`);
|
||||
}
|
||||
console.error('');
|
||||
}
|
||||
|
||||
if (groups['wrong-extension'].length) {
|
||||
console.error(
|
||||
` ${groups['wrong-extension'].length} wrong-extension link(s) (.md vs .mdx mismatch)`
|
||||
);
|
||||
console.error(' The target file exists with the other extension on disk.');
|
||||
console.error('');
|
||||
for (const f of groups['wrong-extension']) {
|
||||
console.error(
|
||||
` ${f.file}:${f.line} ${f.url} → use ${f.actualExt}`
|
||||
);
|
||||
}
|
||||
console.error('');
|
||||
}
|
||||
|
||||
if (groups['missing-target'].length) {
|
||||
console.error(
|
||||
` ${groups['missing-target'].length} missing-target link(s) (file doesn't exist)`
|
||||
);
|
||||
console.error('');
|
||||
for (const f of groups['missing-target']) {
|
||||
console.error(` ${f.file}:${f.line} ${f.url}`);
|
||||
}
|
||||
console.error('');
|
||||
}
|
||||
|
||||
process.exit(1);
|
||||
@@ -20,12 +20,12 @@ Alerts and reports are disabled by default. To turn them on, you need to do some
|
||||
|
||||
#### In your `superset_config.py` or `superset_config_docker.py`
|
||||
|
||||
- `"ALERT_REPORTS"` [feature flag](/docs/6.0.0/configuration/configuring-superset#feature-flags) must be turned to True.
|
||||
- `"ALERT_REPORTS"` [feature flag](/user-docs/6.0.0/configuration/configuring-superset#feature-flags) must be turned to True.
|
||||
- `beat_schedule` in CeleryConfig must contain schedule for `reports.scheduler`.
|
||||
- At least one of those must be configured, depending on what you want to use:
|
||||
- emails: `SMTP_*` settings
|
||||
- Slack messages: `SLACK_API_TOKEN`
|
||||
- Users can customize the email subject by including date code placeholders, which will automatically be replaced with the corresponding UTC date when the email is sent. To enable this functionality, activate the `"DATE_FORMAT_IN_EMAIL_SUBJECT"` [feature flag](/docs/6.0.0/configuration/configuring-superset#feature-flags). This enables date formatting in email subjects, preventing all reporting emails from being grouped into the same thread (optional for the reporting feature).
|
||||
- Users can customize the email subject by including date code placeholders, which will automatically be replaced with the corresponding UTC date when the email is sent. To enable this functionality, activate the `"DATE_FORMAT_IN_EMAIL_SUBJECT"` [feature flag](/user-docs/6.0.0/configuration/configuring-superset#feature-flags). This enables date formatting in email subjects, preventing all reporting emails from being grouped into the same thread (optional for the reporting feature).
|
||||
- Use date codes from [strftime.org](https://strftime.org/) to create the email subject.
|
||||
- If no date code is provided, the original string will be used as the email subject.
|
||||
|
||||
@@ -38,7 +38,7 @@ Screenshots will be taken but no messages actually sent as long as `ALERT_REPORT
|
||||
- You must install a headless browser, for taking screenshots of the charts and dashboards. Only Firefox and Chrome are currently supported.
|
||||
> If you choose Chrome, you must also change the value of `WEBDRIVER_TYPE` to `"chrome"` in your `superset_config.py`.
|
||||
|
||||
Note: All the components required (Firefox headless browser, Redis, Postgres db, celery worker and celery beat) are present in the *dev* docker image if you are following [Installing Superset Locally](/docs/6.0.0/installation/docker-compose/).
|
||||
Note: All the components required (Firefox headless browser, Redis, Postgres db, celery worker and celery beat) are present in the *dev* docker image if you are following [Installing Superset Locally](/user-docs/6.0.0/installation/docker-compose/).
|
||||
All you need to do is add the required config variables described in this guide (See `Detailed Config`).
|
||||
|
||||
If you are running a non-dev docker image, e.g., a stable release like `apache/superset:3.1.0`, that image does not include a headless browser. Only the `superset_worker` container needs this headless browser to browse to the target chart or dashboard.
|
||||
@@ -70,7 +70,7 @@ Note: when you configure an alert or a report, the Slack channel list takes chan
|
||||
### Kubernetes-specific
|
||||
|
||||
- You must have a `celery beat` pod running. If you're using the chart included in the GitHub repository under [helm/superset](https://github.com/apache/superset/tree/master/helm/superset), you need to put `supersetCeleryBeat.enabled = true` in your values override.
|
||||
- You can see the dedicated docs about [Kubernetes installation](/docs/6.0.0/installation/kubernetes) for more details.
|
||||
- You can see the dedicated docs about [Kubernetes installation](/user-docs/6.0.0/installation/kubernetes) for more details.
|
||||
|
||||
### Docker Compose specific
|
||||
|
||||
|
||||
@@ -78,11 +78,11 @@ Caching for SQL Lab query results is used when async queries are enabled and is
|
||||
Note that this configuration does not use a flask-caching dictionary for its configuration, but
|
||||
instead requires a cachelib object.
|
||||
|
||||
See [Async Queries via Celery](/docs/6.0.0/configuration/async-queries-celery) for details.
|
||||
See [Async Queries via Celery](/user-docs/6.0.0/configuration/async-queries-celery) for details.
|
||||
|
||||
## Caching Thumbnails
|
||||
|
||||
This is an optional feature that can be turned on by activating its [feature flag](/docs/6.0.0/configuration/configuring-superset#feature-flags) on config:
|
||||
This is an optional feature that can be turned on by activating its [feature flag](/user-docs/6.0.0/configuration/configuring-superset#feature-flags) on config:
|
||||
|
||||
```
|
||||
FEATURE_FLAGS = {
|
||||
|
||||
@@ -37,7 +37,7 @@ ENV SUPERSET_CONFIG_PATH /app/superset_config.py
|
||||
```
|
||||
|
||||
Docker compose deployments handle application configuration differently using specific conventions.
|
||||
Refer to the [docker compose tips & configuration](/docs/6.0.0/installation/docker-compose#docker-compose-tips--configuration)
|
||||
Refer to the [docker compose tips & configuration](/user-docs/6.0.0/installation/docker-compose#docker-compose-tips--configuration)
|
||||
for details.
|
||||
|
||||
The following is an example of just a few of the parameters you can set in your `superset_config.py` file:
|
||||
@@ -254,7 +254,7 @@ flask --app "superset.app:create_app(superset_app_root='/analytics')"
|
||||
|
||||
### Docker builds
|
||||
|
||||
The [docker compose](/docs/6.0.0/installation/docker-compose#configuring-further) developer
|
||||
The [docker compose](/user-docs/6.0.0/installation/docker-compose#configuring-further) developer
|
||||
configuration includes an additional environmental variable,
|
||||
[`SUPERSET_APP_ROOT`](https://github.com/apache/superset/blob/master/docker/.env),
|
||||
to simplify the process of setting up a non-default root path across the services.
|
||||
@@ -449,4 +449,4 @@ FEATURE_FLAGS = {
|
||||
}
|
||||
```
|
||||
|
||||
A current list of feature flags can be found in the [Feature Flags](/docs/6.0.0/configuration/feature-flags) documentation.
|
||||
A current list of feature flags can be found in the [Feature Flags](/user-docs/6.0.0/configuration/configuring-superset#feature-flags) documentation.
|
||||
|
||||
@@ -14,7 +14,7 @@ in your environment.
|
||||
You’ll need to install the required packages for the database you want to use as your metadata database
|
||||
as well as the packages needed to connect to the databases you want to access through Superset.
|
||||
For information about setting up Superset's metadata database, please refer to
|
||||
installation documentations ([Docker Compose](/docs/6.0.0/installation/docker-compose), [Kubernetes](/docs/6.0.0/installation/kubernetes))
|
||||
installation documentations ([Docker Compose](/user-docs/6.0.0/installation/docker-compose), [Kubernetes](/user-docs/6.0.0/installation/kubernetes))
|
||||
:::
|
||||
|
||||
This documentation tries to keep pointer to the different drivers for commonly used database
|
||||
@@ -26,7 +26,7 @@ Superset requires a Python [DB-API database driver](https://peps.python.org/pep-
|
||||
and a [SQLAlchemy dialect](https://docs.sqlalchemy.org/en/20/dialects/) to be installed for
|
||||
each database engine you want to connect to.
|
||||
|
||||
You can read more [here](/docs/6.0.0/configuration/databases#installing-drivers-in-docker-images) about how to
|
||||
You can read more [here](/user-docs/6.0.0/configuration/databases#installing-drivers-in-docker-images) about how to
|
||||
install new database drivers into your Superset configuration.
|
||||
|
||||
### Supported Databases and Dependencies
|
||||
@@ -37,53 +37,53 @@ are compatible with Superset.
|
||||
|
||||
| <div style={{width: '150px'}}>Database</div> | PyPI package | Connection String |
|
||||
| --------------------------------------------------------- | ---------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------ |
|
||||
| [AWS Athena](/docs/6.0.0/configuration/databases#aws-athena) | `pip install pyathena[pandas]` , `pip install PyAthenaJDBC` | `awsathena+rest://{access_key_id}:{access_key}@athena.{region}.amazonaws.com/{schema}?s3_staging_dir={s3_staging_dir}&...` |
|
||||
| [AWS DynamoDB](/docs/6.0.0/configuration/databases#aws-dynamodb) | `pip install pydynamodb` | `dynamodb://{access_key_id}:{secret_access_key}@dynamodb.{region_name}.amazonaws.com?connector=superset` |
|
||||
| [AWS Redshift](/docs/6.0.0/configuration/databases#aws-redshift) | `pip install sqlalchemy-redshift` | `redshift+psycopg2://<userName>:<DBPassword>@<AWS End Point>:5439/<Database Name>` |
|
||||
| [Apache Doris](/docs/6.0.0/configuration/databases#apache-doris) | `pip install pydoris` | `doris://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>` |
|
||||
| [Apache Drill](/docs/6.0.0/configuration/databases#apache-drill) | `pip install sqlalchemy-drill` | `drill+sadrill://<username>:<password>@<host>:<port>/<storage_plugin>`, often useful: `?use_ssl=True/False` |
|
||||
| [Apache Druid](/docs/6.0.0/configuration/databases#apache-druid) | `pip install pydruid` | `druid://<User>:<password>@<Host>:<Port-default-9088>/druid/v2/sql` |
|
||||
| [Apache Hive](/docs/6.0.0/configuration/databases#hive) | `pip install pyhive` | `hive://hive@{hostname}:{port}/{database}` |
|
||||
| [Apache Impala](/docs/6.0.0/configuration/databases#apache-impala) | `pip install impyla` | `impala://{hostname}:{port}/{database}` |
|
||||
| [Apache Kylin](/docs/6.0.0/configuration/databases#apache-kylin) | `pip install kylinpy` | `kylin://<username>:<password>@<hostname>:<port>/<project>?<param1>=<value1>&<param2>=<value2>` |
|
||||
| [Apache Pinot](/docs/6.0.0/configuration/databases#apache-pinot) | `pip install pinotdb` | `pinot://BROKER:5436/query?server=http://CONTROLLER:5983/` |
|
||||
| [Apache Solr](/docs/6.0.0/configuration/databases#apache-solr) | `pip install sqlalchemy-solr` | `solr://{username}:{password}@{hostname}:{port}/{server_path}/{collection}` |
|
||||
| [Apache Spark SQL](/docs/6.0.0/configuration/databases#apache-spark-sql) | `pip install pyhive` | `hive://hive@{hostname}:{port}/{database}` |
|
||||
| [Ascend.io](/docs/6.0.0/configuration/databases#ascendio) | `pip install impyla` | `ascend://{username}:{password}@{hostname}:{port}/{database}?auth_mechanism=PLAIN;use_ssl=true` |
|
||||
| [Azure MS SQL](/docs/6.0.0/configuration/databases#sql-server) | `pip install pymssql` | `mssql+pymssql://UserName@presetSQL:TestPassword@presetSQL.database.windows.net:1433/TestSchema` |
|
||||
| [ClickHouse](/docs/6.0.0/configuration/databases#clickhouse) | `pip install clickhouse-connect` | `clickhousedb://{username}:{password}@{hostname}:{port}/{database}` |
|
||||
| [CockroachDB](/docs/6.0.0/configuration/databases#cockroachdb) | `pip install cockroachdb` | `cockroachdb://root@{hostname}:{port}/{database}?sslmode=disable` |
|
||||
| [Couchbase](/docs/6.0.0/configuration/databases#couchbase) | `pip install couchbase-sqlalchemy` | `couchbase://{username}:{password}@{hostname}:{port}?truststorepath={ssl certificate path}` |
|
||||
| [CrateDB](/docs/6.0.0/configuration/databases#cratedb) | `pip install sqlalchemy-cratedb` | `crate://{username}:{password}@{hostname}:{port}`, often useful: `?ssl=true/false` or `?schema=testdrive`. |
|
||||
| [Denodo](/docs/6.0.0/configuration/databases#denodo) | `pip install denodo-sqlalchemy` | `denodo://{username}:{password}@{hostname}:{port}/{database}` |
|
||||
| [Dremio](/docs/6.0.0/configuration/databases#dremio) | `pip install sqlalchemy_dremio` |`dremio+flight://{username}:{password}@{host}:32010`, often useful: `?UseEncryption=true/false`. For Legacy ODBC: `dremio+pyodbc://{username}:{password}@{host}:31010` |
|
||||
| [Elasticsearch](/docs/6.0.0/configuration/databases#elasticsearch) | `pip install elasticsearch-dbapi` | `elasticsearch+http://{user}:{password}@{host}:9200/` |
|
||||
| [Exasol](/docs/6.0.0/configuration/databases#exasol) | `pip install sqlalchemy-exasol` | `exa+pyodbc://{username}:{password}@{hostname}:{port}/my_schema?CONNECTIONLCALL=en_US.UTF-8&driver=EXAODBC` |
|
||||
| [Google BigQuery](/docs/6.0.0/configuration/databases#google-bigquery) | `pip install sqlalchemy-bigquery` | `bigquery://{project_id}` |
|
||||
| [Google Sheets](/docs/6.0.0/configuration/databases#google-sheets) | `pip install shillelagh[gsheetsapi]` | `gsheets://` |
|
||||
| [Firebolt](/docs/6.0.0/configuration/databases#firebolt) | `pip install firebolt-sqlalchemy` | `firebolt://{client_id}:{client_secret}@{database}/{engine_name}?account_name={name}` |
|
||||
| [Hologres](/docs/6.0.0/configuration/databases#hologres) | `pip install psycopg2` | `postgresql+psycopg2://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [IBM Db2](/docs/6.0.0/configuration/databases#ibm-db2) | `pip install ibm_db_sa` | `db2+ibm_db://` |
|
||||
| [IBM Netezza Performance Server](/docs/6.0.0/configuration/databases#ibm-netezza-performance-server) | `pip install nzalchemy` | `netezza+nzpy://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [MySQL](/docs/6.0.0/configuration/databases#mysql) | `pip install mysqlclient` | `mysql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [OceanBase](/docs/6.0.0/configuration/databases#oceanbase) | `pip install oceanbase_py` | `oceanbase://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [Oracle](/docs/6.0.0/configuration/databases#oracle) | `pip install cx_Oracle` | `oracle://<username>:<password>@<hostname>:<port>` |
|
||||
| [Parseable](/docs/6.0.0/configuration/databases#parseable) | `pip install sqlalchemy-parseable` | `parseable://<UserName>:<DBPassword>@<Database Host>/<Stream Name>` |
|
||||
| [PostgreSQL](/docs/6.0.0/configuration/databases#postgres) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [Presto](/docs/6.0.0/configuration/databases#presto) | `pip install pyhive` | `presto://{username}:{password}@{hostname}:{port}/{database}` |
|
||||
| [SAP Hana](/docs/6.0.0/configuration/databases#hana) | `pip install hdbcli sqlalchemy-hana` or `pip install apache_superset[hana]` | `hana://{username}:{password}@{host}:{port}` |
|
||||
| [SingleStore](/docs/6.0.0/configuration/databases#singlestore) | `pip install sqlalchemy-singlestoredb` | `singlestoredb://{username}:{password}@{host}:{port}/{database}` |
|
||||
| [StarRocks](/docs/6.0.0/configuration/databases#starrocks) | `pip install starrocks` | `starrocks://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>` |
|
||||
| [Snowflake](/docs/6.0.0/configuration/databases#snowflake) | `pip install snowflake-sqlalchemy` | `snowflake://{user}:{password}@{account}.{region}/{database}?role={role}&warehouse={warehouse}` |
|
||||
| [AWS Athena](/user-docs/6.0.0/configuration/databases#aws-athena) | `pip install pyathena[pandas]` , `pip install PyAthenaJDBC` | `awsathena+rest://{access_key_id}:{access_key}@athena.{region}.amazonaws.com/{schema}?s3_staging_dir={s3_staging_dir}&...` |
|
||||
| [AWS DynamoDB](/user-docs/6.0.0/configuration/databases#aws-dynamodb) | `pip install pydynamodb` | `dynamodb://{access_key_id}:{secret_access_key}@dynamodb.{region_name}.amazonaws.com?connector=superset` |
|
||||
| [AWS Redshift](/user-docs/6.0.0/configuration/databases#aws-redshift) | `pip install sqlalchemy-redshift` | `redshift+psycopg2://<userName>:<DBPassword>@<AWS End Point>:5439/<Database Name>` |
|
||||
| [Apache Doris](/user-docs/6.0.0/configuration/databases#apache-doris) | `pip install pydoris` | `doris://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>` |
|
||||
| [Apache Drill](/user-docs/6.0.0/configuration/databases#apache-drill) | `pip install sqlalchemy-drill` | `drill+sadrill://<username>:<password>@<host>:<port>/<storage_plugin>`, often useful: `?use_ssl=True/False` |
|
||||
| [Apache Druid](/user-docs/6.0.0/configuration/databases#apache-druid) | `pip install pydruid` | `druid://<User>:<password>@<Host>:<Port-default-9088>/druid/v2/sql` |
|
||||
| [Apache Hive](/user-docs/6.0.0/configuration/databases#hive) | `pip install pyhive` | `hive://hive@{hostname}:{port}/{database}` |
|
||||
| [Apache Impala](/user-docs/6.0.0/configuration/databases#apache-impala) | `pip install impyla` | `impala://{hostname}:{port}/{database}` |
|
||||
| [Apache Kylin](/user-docs/6.0.0/configuration/databases#apache-kylin) | `pip install kylinpy` | `kylin://<username>:<password>@<hostname>:<port>/<project>?<param1>=<value1>&<param2>=<value2>` |
|
||||
| [Apache Pinot](/user-docs/6.0.0/configuration/databases#apache-pinot) | `pip install pinotdb` | `pinot://BROKER:5436/query?server=http://CONTROLLER:5983/` |
|
||||
| [Apache Solr](/user-docs/6.0.0/configuration/databases#apache-solr) | `pip install sqlalchemy-solr` | `solr://{username}:{password}@{hostname}:{port}/{server_path}/{collection}` |
|
||||
| [Apache Spark SQL](/user-docs/6.0.0/configuration/databases#apache-spark-sql) | `pip install pyhive` | `hive://hive@{hostname}:{port}/{database}` |
|
||||
| [Ascend.io](/user-docs/6.0.0/configuration/databases#ascendio) | `pip install impyla` | `ascend://{username}:{password}@{hostname}:{port}/{database}?auth_mechanism=PLAIN;use_ssl=true` |
|
||||
| [Azure MS SQL](/user-docs/6.0.0/configuration/databases#sql-server) | `pip install pymssql` | `mssql+pymssql://UserName@presetSQL:TestPassword@presetSQL.database.windows.net:1433/TestSchema` |
|
||||
| [ClickHouse](/user-docs/6.0.0/configuration/databases#clickhouse) | `pip install clickhouse-connect` | `clickhousedb://{username}:{password}@{hostname}:{port}/{database}` |
|
||||
| [CockroachDB](/user-docs/6.0.0/configuration/databases#cockroachdb) | `pip install cockroachdb` | `cockroachdb://root@{hostname}:{port}/{database}?sslmode=disable` |
|
||||
| [Couchbase](/user-docs/6.0.0/configuration/databases#couchbase) | `pip install couchbase-sqlalchemy` | `couchbase://{username}:{password}@{hostname}:{port}?truststorepath={ssl certificate path}` |
|
||||
| [CrateDB](/user-docs/6.0.0/configuration/databases#cratedb) | `pip install sqlalchemy-cratedb` | `crate://{username}:{password}@{hostname}:{port}`, often useful: `?ssl=true/false` or `?schema=testdrive`. |
|
||||
| [Denodo](/user-docs/6.0.0/configuration/databases#denodo) | `pip install denodo-sqlalchemy` | `denodo://{username}:{password}@{hostname}:{port}/{database}` |
|
||||
| [Dremio](/user-docs/6.0.0/configuration/databases#dremio) | `pip install sqlalchemy_dremio` |`dremio+flight://{username}:{password}@{host}:32010`, often useful: `?UseEncryption=true/false`. For Legacy ODBC: `dremio+pyodbc://{username}:{password}@{host}:31010` |
|
||||
| [Elasticsearch](/user-docs/6.0.0/configuration/databases#elasticsearch) | `pip install elasticsearch-dbapi` | `elasticsearch+http://{user}:{password}@{host}:9200/` |
|
||||
| [Exasol](/user-docs/6.0.0/configuration/databases#exasol) | `pip install sqlalchemy-exasol` | `exa+pyodbc://{username}:{password}@{hostname}:{port}/my_schema?CONNECTIONLCALL=en_US.UTF-8&driver=EXAODBC` |
|
||||
| [Google BigQuery](/user-docs/6.0.0/configuration/databases#google-bigquery) | `pip install sqlalchemy-bigquery` | `bigquery://{project_id}` |
|
||||
| [Google Sheets](/user-docs/6.0.0/configuration/databases#google-sheets) | `pip install shillelagh[gsheetsapi]` | `gsheets://` |
|
||||
| [Firebolt](/user-docs/6.0.0/configuration/databases#firebolt) | `pip install firebolt-sqlalchemy` | `firebolt://{client_id}:{client_secret}@{database}/{engine_name}?account_name={name}` |
|
||||
| [Hologres](/user-docs/6.0.0/configuration/databases#hologres) | `pip install psycopg2` | `postgresql+psycopg2://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [IBM Db2](/user-docs/6.0.0/configuration/databases#ibm-db2) | `pip install ibm_db_sa` | `db2+ibm_db://` |
|
||||
| [IBM Netezza Performance Server](/user-docs/6.0.0/configuration/databases#ibm-netezza-performance-server) | `pip install nzalchemy` | `netezza+nzpy://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [MySQL](/user-docs/6.0.0/configuration/databases#mysql) | `pip install mysqlclient` | `mysql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [OceanBase](/user-docs/6.0.0/configuration/databases#oceanbase) | `pip install oceanbase_py` | `oceanbase://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [Oracle](/user-docs/6.0.0/configuration/databases#oracle) | `pip install cx_Oracle` | `oracle://<username>:<password>@<hostname>:<port>` |
|
||||
| [Parseable](/user-docs/6.0.0/configuration/databases#parseable) | `pip install sqlalchemy-parseable` | `parseable://<UserName>:<DBPassword>@<Database Host>/<Stream Name>` |
|
||||
| [PostgreSQL](/user-docs/6.0.0/configuration/databases#postgres) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [Presto](/user-docs/6.0.0/configuration/databases#presto) | `pip install pyhive` | `presto://{username}:{password}@{hostname}:{port}/{database}` |
|
||||
| [SAP Hana](/user-docs/6.0.0/configuration/databases#hana) | `pip install hdbcli sqlalchemy-hana` or `pip install apache_superset[hana]` | `hana://{username}:{password}@{host}:{port}` |
|
||||
| [SingleStore](/user-docs/6.0.0/configuration/databases#singlestore) | `pip install sqlalchemy-singlestoredb` | `singlestoredb://{username}:{password}@{host}:{port}/{database}` |
|
||||
| [StarRocks](/user-docs/6.0.0/configuration/databases#starrocks) | `pip install starrocks` | `starrocks://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>` |
|
||||
| [Snowflake](/user-docs/6.0.0/configuration/databases#snowflake) | `pip install snowflake-sqlalchemy` | `snowflake://{user}:{password}@{account}.{region}/{database}?role={role}&warehouse={warehouse}` |
|
||||
| SQLite | No additional library needed | `sqlite://path/to/file.db?check_same_thread=false` |
|
||||
| [SQL Server](/docs/6.0.0/configuration/databases#sql-server) | `pip install pymssql` | `mssql+pymssql://<Username>:<Password>@<Host>:<Port-default:1433>/<Database Name>` |
|
||||
| [TDengine](/docs/6.0.0/configuration/databases#tdengine) | `pip install taospy` `pip install taos-ws-py` | `taosws://<user>:<password>@<host>:<port>` |
|
||||
| [Teradata](/docs/6.0.0/configuration/databases#teradata) | `pip install teradatasqlalchemy` | `teradatasql://{user}:{password}@{host}` |
|
||||
| [TimescaleDB](/docs/6.0.0/configuration/databases#timescaledb) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>:<Port>/<Database Name>` |
|
||||
| [Trino](/docs/6.0.0/configuration/databases#trino) | `pip install trino` | `trino://{username}:{password}@{hostname}:{port}/{catalog}` |
|
||||
| [Vertica](/docs/6.0.0/configuration/databases#vertica) | `pip install sqlalchemy-vertica-python` | `vertica+vertica_python://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [YDB](/docs/6.0.0/configuration/databases#ydb) | `pip install ydb-sqlalchemy` | `ydb://{host}:{port}/{database_name}` |
|
||||
| [YugabyteDB](/docs/6.0.0/configuration/databases#yugabytedb) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [SQL Server](/user-docs/6.0.0/configuration/databases#sql-server) | `pip install pymssql` | `mssql+pymssql://<Username>:<Password>@<Host>:<Port-default:1433>/<Database Name>` |
|
||||
| [TDengine](/user-docs/6.0.0/configuration/databases#tdengine) | `pip install taospy` `pip install taos-ws-py` | `taosws://<user>:<password>@<host>:<port>` |
|
||||
| [Teradata](/user-docs/6.0.0/configuration/databases#teradata) | `pip install teradatasqlalchemy` | `teradatasql://{user}:{password}@{host}` |
|
||||
| [TimescaleDB](/user-docs/6.0.0/configuration/databases#timescaledb) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>:<Port>/<Database Name>` |
|
||||
| [Trino](/user-docs/6.0.0/configuration/databases#trino) | `pip install trino` | `trino://{username}:{password}@{hostname}:{port}/{catalog}` |
|
||||
| [Vertica](/user-docs/6.0.0/configuration/databases#vertica) | `pip install sqlalchemy-vertica-python` | `vertica+vertica_python://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [YDB](/user-docs/6.0.0/configuration/databases#ydb) | `pip install ydb-sqlalchemy` | `ydb://{host}:{port}/{database_name}` |
|
||||
| [YugabyteDB](/user-docs/6.0.0/configuration/databases#yugabytedb) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
|
||||
---
|
||||
|
||||
@@ -109,7 +109,7 @@ The connector library installation process is the same for all additional librar
|
||||
|
||||
#### 1. Determine the driver you need
|
||||
|
||||
Consult the [list of database drivers](/docs/6.0.0/configuration/databases)
|
||||
Consult the [list of database drivers](/user-docs/6.0.0/configuration/databases)
|
||||
and find the PyPI package needed to connect to your database. In this example, we're connecting
|
||||
to a MySQL database, so we'll need the `mysqlclient` connector library.
|
||||
|
||||
@@ -165,11 +165,11 @@ to your database via the Superset web UI.
|
||||
|
||||
As an admin user, go to Settings -> Data: Database Connections and click the +DATABASE button.
|
||||
From there, follow the steps on the
|
||||
[Using Database Connection UI page](/docs/6.0.0/configuration/databases#connecting-through-the-ui).
|
||||
[Using Database Connection UI page](/user-docs/6.0.0/configuration/databases#connecting-through-the-ui).
|
||||
|
||||
Consult the page for your specific database type in the Superset documentation to determine
|
||||
the connection string and any other parameters you need to input. For instance,
|
||||
on the [MySQL page](/docs/6.0.0/configuration/databases#mysql), we see that the connection string
|
||||
on the [MySQL page](/user-docs/6.0.0/configuration/databases#mysql), we see that the connection string
|
||||
to a local MySQL database differs depending on whether the setup is running on Linux or Mac.
|
||||
|
||||
Click the “Test Connection” button, which should result in a popup message saying,
|
||||
@@ -407,7 +407,7 @@ this:
|
||||
crate://<username>:<password>@<clustername>.cratedb.net:4200/?ssl=true
|
||||
```
|
||||
|
||||
Follow the steps [here](/docs/6.0.0/configuration/databases#installing-database-drivers)
|
||||
Follow the steps [here](/user-docs/6.0.0/configuration/databases#installing-database-drivers)
|
||||
to install the CrateDB connector package when setting up Superset locally using
|
||||
Docker Compose.
|
||||
|
||||
@@ -782,7 +782,7 @@ The recommended connector library for BigQuery is
|
||||
|
||||
##### Install BigQuery Driver
|
||||
|
||||
Follow the steps [here](/docs/6.0.0/configuration/databases#installing-drivers-in-docker-images) about how to
|
||||
Follow the steps [here](/user-docs/6.0.0/configuration/databases#installing-drivers-in-docker-images) about how to
|
||||
install new database drivers when setting up Superset locally via docker compose.
|
||||
|
||||
```bash
|
||||
@@ -1177,7 +1177,7 @@ risingwave://root@{hostname}:{port}/{database}?sslmode=disable
|
||||
|
||||
##### Install Snowflake Driver
|
||||
|
||||
Follow the steps [here](/docs/6.0.0/configuration/databases#installing-database-drivers) about how to
|
||||
Follow the steps [here](/user-docs/6.0.0/configuration/databases#installing-database-drivers) about how to
|
||||
install new database drivers when setting up Superset locally via docker compose.
|
||||
|
||||
```bash
|
||||
|
||||
@@ -51,7 +51,7 @@ Restart Superset for this configuration change to take effect.
|
||||
|
||||
#### Making a Dashboard Public
|
||||
|
||||
1. Add the `'DASHBOARD_RBAC': True` [Feature Flag](/docs/6.0.0/configuration/feature-flags) to `superset_config.py`
|
||||
1. Add the `'DASHBOARD_RBAC': True` [Feature Flag](/user-docs/6.0.0/configuration/configuring-superset#feature-flags) to `superset_config.py`
|
||||
2. Add the `Public` role to your dashboard as described [here](https://superset.apache.org/docs/using-superset/creating-your-first-dashboard/#manage-access-to-dashboards)
|
||||
|
||||
#### Embedding a Public Dashboard
|
||||
|
||||
@@ -10,7 +10,7 @@ version: 1
|
||||
## Jinja Templates
|
||||
|
||||
SQL Lab and Explore supports [Jinja templating](https://jinja.palletsprojects.com/en/2.11.x/) in queries.
|
||||
To enable templating, the `ENABLE_TEMPLATE_PROCESSING` [feature flag](/docs/6.0.0/configuration/configuring-superset#feature-flags) needs to be enabled in
|
||||
To enable templating, the `ENABLE_TEMPLATE_PROCESSING` [feature flag](/user-docs/6.0.0/configuration/configuring-superset#feature-flags) needs to be enabled in
|
||||
`superset_config.py`. When templating is enabled, python code can be embedded in virtual datasets and
|
||||
in Custom SQL in the filter and metric controls in Explore. By default, the following variables are
|
||||
made available in the Jinja context:
|
||||
|
||||
@@ -20,7 +20,7 @@ To help make the problem somewhat tractable—given that Apache Superset has no
|
||||
|
||||
To strive for data consistency (regardless of the timezone of the client) the Apache Superset backend tries to ensure that any timestamp sent to the client has an explicit (or semi-explicit as in the case with [Epoch time](https://en.wikipedia.org/wiki/Unix_time) which is always in reference to UTC) timezone encoded within.
|
||||
|
||||
The challenge however lies with the slew of [database engines](/docs/6.0.0/configuration/databases#installing-drivers-in-docker-images) which Apache Superset supports and various inconsistencies between their [Python Database API (DB-API)](https://www.python.org/dev/peps/pep-0249/) implementations combined with the fact that we use [Pandas](https://pandas.pydata.org/) to read SQL into a DataFrame prior to serializing to JSON. Regrettably Pandas ignores the DB-API [type_code](https://www.python.org/dev/peps/pep-0249/#type-objects) relying by default on the underlying Python type returned by the DB-API. Currently only a subset of the supported database engines work correctly with Pandas, i.e., ensuring timestamps without an explicit timestamp are serializd to JSON with the server timezone, thus guaranteeing the client will display timestamps in a consistent manner irrespective of the client's timezone.
|
||||
The challenge however lies with the slew of [database engines](/user-docs/6.0.0/configuration/databases#installing-drivers-in-docker-images) which Apache Superset supports and various inconsistencies between their [Python Database API (DB-API)](https://www.python.org/dev/peps/pep-0249/) implementations combined with the fact that we use [Pandas](https://pandas.pydata.org/) to read SQL into a DataFrame prior to serializing to JSON. Regrettably Pandas ignores the DB-API [type_code](https://www.python.org/dev/peps/pep-0249/#type-objects) relying by default on the underlying Python type returned by the DB-API. Currently only a subset of the supported database engines work correctly with Pandas, i.e., ensuring timestamps without an explicit timestamp are serializd to JSON with the server timezone, thus guaranteeing the client will display timestamps in a consistent manner irrespective of the client's timezone.
|
||||
|
||||
For example the following is a comparison of MySQL and Presto,
|
||||
|
||||
|
||||
@@ -77,7 +77,7 @@ Look through the GitHub issues. Issues tagged with
|
||||
Superset could always use better documentation,
|
||||
whether as part of the official Superset docs,
|
||||
in docstrings, `docs/*.rst` or even on the web as blog posts or
|
||||
articles. See [Documentation](/docs/6.0.0/contributing/howtos#contributing-to-documentation) for more details.
|
||||
articles. See [Documentation](/user-docs/6.0.0/contributing/howtos#contributing-to-documentation) for more details.
|
||||
|
||||
### Add Translations
|
||||
|
||||
|
||||
@@ -599,7 +599,7 @@ export enum FeatureFlag {
|
||||
those specified under FEATURE_FLAGS in `superset_config.py`. For example, `DEFAULT_FEATURE_FLAGS = { 'FOO': True, 'BAR': False }` in `superset/config.py` and `FEATURE_FLAGS = { 'BAR': True, 'BAZ': True }` in `superset_config.py` will result
|
||||
in combined feature flags of `{ 'FOO': True, 'BAR': True, 'BAZ': True }`.
|
||||
|
||||
The current status of the usability of each flag (stable vs testing, etc) can be found in the [Feature Flags](/docs/6.0.0/configuration/feature-flags) documentation.
|
||||
The current status of the usability of each flag (stable vs testing, etc) can be found in the [Feature Flags](/user-docs/6.0.0/configuration/configuring-superset#feature-flags) documentation.
|
||||
|
||||
## Git Hooks
|
||||
|
||||
@@ -614,7 +614,7 @@ A series of checks will now run when you make a git commit.
|
||||
|
||||
## Linting
|
||||
|
||||
See [how tos](/docs/6.0.0/contributing/howtos#linting)
|
||||
See [how tos](/user-docs/6.0.0/contributing/howtos#linting)
|
||||
|
||||
## GitHub Actions and `act`
|
||||
|
||||
|
||||
@@ -57,7 +57,7 @@ Finally, never submit a PR that will put master branch in broken state. If the P
|
||||
in `requirements.txt` pinned to a specific version which ensures that the application
|
||||
build is deterministic.
|
||||
- For TypeScript/JavaScript, include new libraries in `package.json`
|
||||
- **Tests:** The pull request should include tests, either as doctests, unit tests, or both. Make sure to resolve all errors and test failures. See [Testing](/docs/6.0.0/contributing/howtos#testing) for how to run tests.
|
||||
- **Tests:** The pull request should include tests, either as doctests, unit tests, or both. Make sure to resolve all errors and test failures. See [Testing](/user-docs/6.0.0/contributing/howtos#testing) for how to run tests.
|
||||
- **Documentation:** If the pull request adds functionality, the docs should be updated as part of the same PR.
|
||||
- **CI:** Reviewers will not review the code until all CI tests are passed. Sometimes there can be flaky tests. You can close and open PR to re-run CI test. Please report if the issue persists. After the CI fix has been deployed to `master`, please rebase your PR.
|
||||
- **Code coverage:** Please ensure that code coverage does not decrease.
|
||||
|
||||
@@ -51,11 +51,11 @@ multiple tables as long as your database account has access to the tables.
|
||||
## How do I create my own visualization?
|
||||
|
||||
We recommend reading the instructions in
|
||||
[Creating Visualization Plugins](/docs/6.0.0/contributing/howtos#creating-visualization-plugins).
|
||||
[Creating Visualization Plugins](/user-docs/6.0.0/contributing/howtos#creating-visualization-plugins).
|
||||
|
||||
## Can I upload and visualize CSV data?
|
||||
|
||||
Absolutely! Read the instructions [here](/docs/using-superset/exploring-data) to learn
|
||||
Absolutely! Read the instructions [here](/user-docs/using-superset/exploring-data) to learn
|
||||
how to enable and use CSV upload.
|
||||
|
||||
## Why are my queries timing out?
|
||||
@@ -142,7 +142,7 @@ SQLALCHEMY_DATABASE_URI = 'sqlite:////new/location/superset.db?check_same_thread
|
||||
```
|
||||
|
||||
You can read more about customizing Superset using the configuration file
|
||||
[here](/docs/6.0.0/configuration/configuring-superset).
|
||||
[here](/user-docs/6.0.0/configuration/configuring-superset).
|
||||
|
||||
## What if the table schema changed?
|
||||
|
||||
@@ -157,7 +157,7 @@ table afterwards to configure the Columns tab, check the appropriate boxes and s
|
||||
|
||||
To clarify, the database backend is an OLTP database used by Superset to store its internal
|
||||
information like your list of users and dashboard definitions. While Superset supports a
|
||||
[variety of databases as data _sources_](/docs/6.0.0/configuration/databases#installing-database-drivers),
|
||||
[variety of databases as data _sources_](/user-docs/6.0.0/configuration/databases#installing-database-drivers),
|
||||
only a few database engines are supported for use as the OLTP backend / metadata store.
|
||||
|
||||
Superset is tested using MySQL, PostgreSQL, and SQLite backends. It’s recommended you install
|
||||
@@ -190,7 +190,7 @@ second etc). Example:
|
||||
|
||||
## Does Superset work with [insert database engine here]?
|
||||
|
||||
The [Connecting to Databases section](/docs/6.0.0/configuration/databases) provides the best
|
||||
The [Connecting to Databases section](/user-docs/6.0.0/configuration/databases) provides the best
|
||||
overview for supported databases. Database engines not listed on that page may work too. We rely on
|
||||
the community to contribute to this knowledge base.
|
||||
|
||||
@@ -226,7 +226,7 @@ are typical in basic SQL:
|
||||
## Does Superset offer a public API?
|
||||
|
||||
Yes, a public REST API, and the surface of that API formal is expanding steadily. You can read more about this API and
|
||||
interact with it using Swagger [here](/docs/api).
|
||||
interact with it using Swagger [here](/developer-docs/api).
|
||||
|
||||
Some of the
|
||||
original vision for the collection of endpoints under **/api/v1** was originally specified in
|
||||
@@ -266,7 +266,7 @@ Superset uses [Scarf](https://about.scarf.sh/) by default to collect basic telem
|
||||
We use the [Scarf Gateway](https://docs.scarf.sh/gateway/) to sit in front of container registries, the [scarf-js](https://about.scarf.sh/package-sdks) package to track `npm` installations, and a Scarf pixel to gather anonymous analytics on Superset page views.
|
||||
Scarf purges PII and provides aggregated statistics. Superset users can easily opt out of analytics in various ways documented [here](https://docs.scarf.sh/gateway/#do-not-track) and [here](https://docs.scarf.sh/package-analytics/#as-a-user-of-a-package-using-scarf-js-how-can-i-opt-out-of-analytics).
|
||||
Superset maintainers can also opt out of telemetry data collection by setting the `SCARF_ANALYTICS` environment variable to `false` in the Superset container (or anywhere Superset/webpack are run).
|
||||
Additional opt-out instructions for Docker users are available on the [Docker Installation](/docs/6.0.0/installation/docker-compose) page.
|
||||
Additional opt-out instructions for Docker users are available on the [Docker Installation](/user-docs/6.0.0/installation/docker-compose) page.
|
||||
|
||||
## Does Superset have an archive panel or trash bin from which a user can recover deleted assets?
|
||||
|
||||
|
||||
@@ -24,10 +24,10 @@ A Superset installation is made up of these components:
|
||||
|
||||
The optional components above are necessary to enable these features:
|
||||
|
||||
- [Alerts and Reports](/docs/6.0.0/configuration/alerts-reports)
|
||||
- [Caching](/docs/6.0.0/configuration/cache)
|
||||
- [Async Queries](/docs/6.0.0/configuration/async-queries-celery/)
|
||||
- [Dashboard Thumbnails](/docs/6.0.0/configuration/cache/#caching-thumbnails)
|
||||
- [Alerts and Reports](/user-docs/6.0.0/configuration/alerts-reports)
|
||||
- [Caching](/user-docs/6.0.0/configuration/cache)
|
||||
- [Async Queries](/user-docs/6.0.0/configuration/async-queries-celery/)
|
||||
- [Dashboard Thumbnails](/user-docs/6.0.0/configuration/cache/#caching-thumbnails)
|
||||
|
||||
If you install with Kubernetes or Docker Compose, all of these components will be created.
|
||||
|
||||
@@ -59,7 +59,7 @@ The caching layer serves two main functions:
|
||||
- Store the results of queries to your data warehouse so that when a chart is loaded twice, it pulls from the cache the second time, speeding up the application and reducing load on your data warehouse.
|
||||
- Act as a message broker for the worker, enabling the Alerts & Reports, async queries, and thumbnail caching features.
|
||||
|
||||
Most people use Redis for their cache, but Superset supports other options too. See the [cache docs](/docs/6.0.0/configuration/cache/) for more.
|
||||
Most people use Redis for their cache, but Superset supports other options too. See the [cache docs](/user-docs/6.0.0/configuration/cache/) for more.
|
||||
|
||||
### Worker and Beat
|
||||
|
||||
@@ -67,6 +67,6 @@ This is one or more workers who execute tasks like run async queries or take sna
|
||||
|
||||
## Other components
|
||||
|
||||
Other components can be incorporated into Superset. The best place to learn about additional configurations is the [Configuration page](/docs/6.0.0/configuration/configuring-superset). For instance, you could set up a load balancer or reverse proxy to implement HTTPS in front of your Superset application, or specify a Mapbox URL to enable geospatial charts, etc.
|
||||
Other components can be incorporated into Superset. The best place to learn about additional configurations is the [Configuration page](/user-docs/6.0.0/configuration/configuring-superset). For instance, you could set up a load balancer or reverse proxy to implement HTTPS in front of your Superset application, or specify a Mapbox URL to enable geospatial charts, etc.
|
||||
|
||||
Superset won't even start without certain configuration settings established, so it's essential to review that page.
|
||||
|
||||
@@ -21,7 +21,7 @@ with our [installing on k8s](https://superset.apache.org/docs/installation/runni
|
||||
documentation.
|
||||
:::
|
||||
|
||||
As mentioned in our [quickstart guide](/docs/quickstart), the fastest way to try
|
||||
As mentioned in our [quickstart guide](/user-docs/quickstart), the fastest way to try
|
||||
Superset locally is using Docker Compose on a Linux or Mac OSX
|
||||
computer. Superset does not have official support for Windows. It's also the easiest
|
||||
way to launch a fully functioning **development environment** quickly.
|
||||
|
||||
@@ -9,11 +9,11 @@ import useBaseUrl from "@docusaurus/useBaseUrl";
|
||||
|
||||
# Installation Methods
|
||||
|
||||
How should you install Superset? Here's a comparison of the different options. It will help if you've first read the [Architecture](/docs/6.0.0/installation/architecture page to understand Superset's different components.
|
||||
How should you install Superset? Here's a comparison of the different options. It will help if you've first read the [Architecture](/user-docs/6.0.0/installation/architecture) page to understand Superset's different components.
|
||||
|
||||
The fundamental trade-off is between you needing to do more of the detail work yourself vs. using a more complex deployment route that handles those details.
|
||||
|
||||
## [Docker Compose](/docs/6.0.0/installation/docker-compose
|
||||
## [Docker Compose](/user-docs/6.0.0/installation/docker-compose)
|
||||
|
||||
**Summary:** This takes advantage of containerization while remaining simpler than Kubernetes. This is the best way to try out Superset; it's also useful for developing & contributing back to Superset.
|
||||
|
||||
@@ -27,9 +27,9 @@ You will need to back up your metadata DB. That could mean backing up the servic
|
||||
|
||||
You will also need to extend the Superset docker image. The default `lean` images do not contain drivers needed to access your metadata database (Postgres or MySQL), nor to access your data warehouse, nor the headless browser needed for Alerts & Reports. You could run a `-dev` image while demoing Superset, which has some of this, but you'll still need to install the driver for your data warehouse. The `-dev` images run as root, which is not recommended for production.
|
||||
|
||||
Ideally you will build your own image of Superset that extends `lean`, adding what your deployment needs. See [Building your own production Docker image](/docs/6.0.0/installation/docker-builds/#building-your-own-production-docker-image).
|
||||
Ideally you will build your own image of Superset that extends `lean`, adding what your deployment needs. See [Building your own production Docker image](/user-docs/6.0.0/installation/docker-builds/#building-your-own-production-docker-image).
|
||||
|
||||
## [Kubernetes (K8s)](/docs/6.0.0/installation/kubernetes
|
||||
## [Kubernetes (K8s)](/user-docs/6.0.0/installation/kubernetes)
|
||||
|
||||
**Summary:** This is the best-practice way to deploy a production instance of Superset, but has the steepest skill requirement - someone who knows Kubernetes.
|
||||
|
||||
@@ -41,7 +41,7 @@ A K8s deployment can scale up and down based on usage and deploy rolling updates
|
||||
|
||||
You will need to build your own Docker image, and back up your metadata DB, both as described in Docker Compose above. You'll also need to customize your Helm chart values and deploy and maintain your Kubernetes cluster.
|
||||
|
||||
## [PyPI (Python)](/docs/6.0.0/installation/pypi
|
||||
## [PyPI (Python)](/user-docs/6.0.0/installation/pypi)
|
||||
|
||||
**Summary:** This is the only method that requires no knowledge of containers. It requires the most hands-on work to deploy, connect, and maintain each component.
|
||||
|
||||
|
||||
@@ -149,7 +149,7 @@ For production clusters it's recommended to build own image with this step done
|
||||
Superset requires a Python DB-API database driver and a SQLAlchemy
|
||||
dialect to be installed for each datastore you want to connect to.
|
||||
|
||||
See [Install Database Drivers](/docs/6.0.0/configuration/databases) for more information.
|
||||
See [Install Database Drivers](/user-docs/6.0.0/configuration/databases) for more information.
|
||||
It is recommended that you refer to versions listed in
|
||||
[pyproject.toml](https://github.com/apache/superset/blob/master/pyproject.toml)
|
||||
instead of hard-coding them in your bootstrap script, as seen below.
|
||||
@@ -310,7 +310,7 @@ configOverrides:
|
||||
|
||||
### Enable Alerts and Reports
|
||||
|
||||
For this, as per the [Alerts and Reports doc](/docs/6.0.0/configuration/alerts-reports), you will need to:
|
||||
For this, as per the [Alerts and Reports doc](/user-docs/6.0.0/configuration/alerts-reports), you will need to:
|
||||
|
||||
#### Install a supported webdriver in the Celery worker
|
||||
|
||||
|
||||
@@ -172,7 +172,7 @@ how to set up a development environment.
|
||||
## Resources
|
||||
|
||||
- [Superset "In the Wild"](https://github.com/apache/superset/blob/master/RESOURCES/INTHEWILD.md) - open a PR to add your org to the list!
|
||||
- [Feature Flags](/docs/6.0.0/configuration/feature-flags) - the status of Superset's Feature Flags.
|
||||
- [Feature Flags](/user-docs/6.0.0/configuration/configuring-superset#feature-flags) - the status of Superset's Feature Flags.
|
||||
- [Standard Roles](https://github.com/apache/superset/blob/master/RESOURCES/STANDARD_ROLES.md) - How RBAC permissions map to roles.
|
||||
- [Superset Wiki](https://github.com/apache/superset/wiki) - Tons of additional community resources: best practices, community content and other information.
|
||||
- [Superset SIPs](https://github.com/orgs/apache/projects/170) - The status of Superset's SIPs (Superset Improvement Proposals) for both consensus and implementation status.
|
||||
|
||||
@@ -15,7 +15,7 @@ Although we recommend using `Docker Compose` for a quick start in a sandbox-type
|
||||
environment and for other development-type use cases, **we
|
||||
do not recommend this setup for production**. For this purpose please
|
||||
refer to our
|
||||
[Installing on Kubernetes](/docs/6.0.0/installation/kubernetes/)
|
||||
[Installing on Kubernetes](/user-docs/6.0.0/installation/kubernetes/)
|
||||
page.
|
||||
:::
|
||||
|
||||
@@ -73,10 +73,10 @@ processes by running Docker Compose `stop` command. By doing so, you can avoid d
|
||||
|
||||
From this point on, you can head on to:
|
||||
|
||||
- [Create your first Dashboard](/docs/6.0.0/using-superset/creating-your-first-dashboard)
|
||||
- [Connect to a Database](/docs/6.0.0/configuration/databases)
|
||||
- [Using Docker Compose](/docs/6.0.0/installation/docker-compose)
|
||||
- [Configure Superset](/docs/6.0.0/configuration/configuring-superset/)
|
||||
- [Installing on Kubernetes](/docs/6.0.0/installation/kubernetes/)
|
||||
- [Create your first Dashboard](/user-docs/6.0.0/using-superset/creating-your-first-dashboard)
|
||||
- [Connect to a Database](/user-docs/6.0.0/configuration/databases)
|
||||
- [Using Docker Compose](/user-docs/6.0.0/installation/docker-compose)
|
||||
- [Configure Superset](/user-docs/6.0.0/configuration/configuring-superset/)
|
||||
- [Installing on Kubernetes](/user-docs/6.0.0/installation/kubernetes/)
|
||||
|
||||
Or just explore our [Documentation](https://superset.apache.org/docs/intro)!
|
||||
|
||||
@@ -31,7 +31,7 @@ your existing SQL-speaking database or data store.
|
||||
|
||||
First things first, we need to add the connection credentials to your database to be able
|
||||
to query and visualize data from it. If you're using Superset locally via
|
||||
[Docker compose](/docs/6.0.0/installation/docker-compose), you can
|
||||
[Docker compose](/user-docs/6.0.0/installation/docker-compose), you can
|
||||
skip this step because a Postgres database, named **examples**, is included and
|
||||
pre-configured in Superset for you.
|
||||
|
||||
@@ -188,7 +188,7 @@ Access to dashboards is managed via owners (users that have edit permissions to
|
||||
Non-owner users access can be managed in two different ways. The dashboard needs to be published to be visible to other users.
|
||||
|
||||
1. Dataset permissions - if you add to the relevant role permissions to datasets it automatically grants implicit access to all dashboards that uses those permitted datasets.
|
||||
2. Dashboard roles - if you enable [**DASHBOARD_RBAC** feature flag](/docs/6.0.0/configuration/configuring-superset#feature-flags) then you will be able to manage which roles can access the dashboard
|
||||
2. Dashboard roles - if you enable [**DASHBOARD_RBAC** feature flag](/user-docs/6.0.0/configuration/configuring-superset#feature-flags) then you will be able to manage which roles can access the dashboard
|
||||
- Granting a role access to a dashboard will bypass dataset level checks. Having dashboard access implicitly grants read access to all the featured charts in the dashboard, and thereby also all the associated datasets.
|
||||
- If no roles are specified for a dashboard, regular **Dataset permissions** will apply.
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
import * as reduxHooks from 'react-redux';
|
||||
import { Provider } from 'react-redux';
|
||||
import { createStore } from 'redux';
|
||||
import { render, fireEvent, waitFor } from 'spec/helpers/testing-library';
|
||||
import { render, waitFor } from 'spec/helpers/testing-library';
|
||||
import { ErrorLevel, ErrorSource, ErrorTypeEnum } from '@superset-ui/core';
|
||||
import { reRunQuery } from 'src/SqlLab/actions/sqlLab';
|
||||
import { triggerQuery } from 'src/components/Chart/chartAction';
|
||||
@@ -58,25 +58,33 @@ jest.mock('src/dashboard/actions/dashboardState', () => ({
|
||||
const mockDispatch = jest.fn();
|
||||
jest.spyOn(reduxHooks, 'useDispatch').mockReturnValue(mockDispatch);
|
||||
|
||||
// Mock global window functions
|
||||
const mockOpen = jest.spyOn(window, 'open').mockImplementation(() => null);
|
||||
const mockAddEventListener = jest.spyOn(window, 'addEventListener');
|
||||
const mockRemoveEventListener = jest.spyOn(window, 'removeEventListener');
|
||||
|
||||
// Mock window.postMessage
|
||||
const originalPostMessage = window.postMessage;
|
||||
// Capture the channel instance created by the component so tests can drive its
|
||||
// onmessage handler and assert it gets closed on unmount.
|
||||
let capturedChannel: {
|
||||
onmessage: ((event: any) => void) | null;
|
||||
close: jest.Mock;
|
||||
};
|
||||
const channelCloseMock = jest.fn();
|
||||
|
||||
beforeEach(() => {
|
||||
window.postMessage = jest.fn();
|
||||
jest.clearAllMocks();
|
||||
capturedChannel = { onmessage: null, close: channelCloseMock };
|
||||
(global as any).BroadcastChannel = jest
|
||||
.fn()
|
||||
.mockImplementation(() => capturedChannel);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
window.postMessage = originalPostMessage;
|
||||
});
|
||||
function simulateBroadcastMessage(data: any) {
|
||||
capturedChannel.onmessage?.({ data });
|
||||
}
|
||||
|
||||
function simulateMessageEvent(data: any, origin: string) {
|
||||
const messageEvent = new MessageEvent('message', { data, origin });
|
||||
window.dispatchEvent(messageEvent);
|
||||
function simulateStorageMessage(data: any) {
|
||||
window.dispatchEvent(
|
||||
new StorageEvent('storage', {
|
||||
key: 'oauth2_auth_complete',
|
||||
newValue: JSON.stringify(data),
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
const defaultProps = {
|
||||
@@ -108,27 +116,36 @@ describe('OAuth2RedirectMessage Component', () => {
|
||||
expect(getByText(/provide authorization/i)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('opens a new window with the correct URL when the link is clicked', () => {
|
||||
test('renders the authorization link pointing at the OAuth2 URL', () => {
|
||||
const { getByText } = render(setup());
|
||||
|
||||
const linkElement = getByText(/provide authorization/i);
|
||||
fireEvent.click(linkElement);
|
||||
|
||||
expect(mockOpen).toHaveBeenCalledWith('https://example.com', '_blank');
|
||||
const linkElement = getByText(/provide authorization/i).closest('a');
|
||||
expect(linkElement).toHaveAttribute('href', 'https://example.com');
|
||||
expect(linkElement).toHaveAttribute('target', '_blank');
|
||||
});
|
||||
|
||||
test('cleans up the message event listener on unmount', () => {
|
||||
test('closes the BroadcastChannel on unmount', () => {
|
||||
const { unmount } = render(setup());
|
||||
|
||||
expect(mockAddEventListener).toHaveBeenCalled();
|
||||
expect((global as any).BroadcastChannel).toHaveBeenCalledWith('oauth');
|
||||
unmount();
|
||||
expect(mockRemoveEventListener).toHaveBeenCalled();
|
||||
expect(channelCloseMock).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('dispatches reRunQuery action when a message with correct tab ID is received for SQL Lab', async () => {
|
||||
render(setup());
|
||||
|
||||
simulateMessageEvent({ tabId: 'tabId' }, 'https://redirect.example.com');
|
||||
simulateBroadcastMessage({ tabId: 'tabId' });
|
||||
|
||||
await waitFor(() => {
|
||||
expect(reRunQuery).toHaveBeenCalledWith({ sql: 'SELECT * FROM table' });
|
||||
});
|
||||
});
|
||||
|
||||
test('dispatches reRunQuery action when storage event has matching tab ID', async () => {
|
||||
render(setup());
|
||||
|
||||
simulateStorageMessage({ tabId: 'tabId' });
|
||||
|
||||
await waitFor(() => {
|
||||
expect(reRunQuery).toHaveBeenCalledWith({ sql: 'SELECT * FROM table' });
|
||||
@@ -138,7 +155,7 @@ describe('OAuth2RedirectMessage Component', () => {
|
||||
test('dispatches triggerQuery action for explore source upon receiving a correct message', async () => {
|
||||
render(setup({ source: 'explore' }));
|
||||
|
||||
simulateMessageEvent({ tabId: 'tabId' }, 'https://redirect.example.com');
|
||||
simulateBroadcastMessage({ tabId: 'tabId' });
|
||||
|
||||
await waitFor(() => {
|
||||
expect(triggerQuery).toHaveBeenCalledWith(true, 123);
|
||||
@@ -148,11 +165,19 @@ describe('OAuth2RedirectMessage Component', () => {
|
||||
test('dispatches onRefresh action for dashboard source upon receiving a correct message', async () => {
|
||||
render(setup({ source: 'dashboard' }));
|
||||
|
||||
simulateMessageEvent({ tabId: 'tabId' }, 'https://redirect.example.com');
|
||||
simulateBroadcastMessage({ tabId: 'tabId' });
|
||||
|
||||
await waitFor(() => {
|
||||
// Chart IDs are converted to numbers by the component via chartList.map(Number)
|
||||
expect(onRefresh).toHaveBeenCalledWith([1, 2], true, 0, 'dashboard-id');
|
||||
});
|
||||
});
|
||||
|
||||
test('ignores messages with a mismatched tab ID', () => {
|
||||
render(setup());
|
||||
|
||||
simulateBroadcastMessage({ tabId: 'someOtherTab' });
|
||||
|
||||
expect(reRunQuery).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { useEffect, useRef, MouseEvent } from 'react';
|
||||
import { useEffect } from 'react';
|
||||
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
import { QueryEditor, SqlLabRootState } from 'src/SqlLab/types';
|
||||
@@ -31,10 +31,12 @@ import { QueryResponse } from '@superset-ui/core';
|
||||
import type { ErrorMessageComponentProps } from './types';
|
||||
import { ErrorAlert } from './ErrorAlert';
|
||||
|
||||
const OAUTH_CHANNEL_NAME = 'oauth';
|
||||
const OAUTH_STORAGE_EVENT_KEY = 'oauth2_auth_complete';
|
||||
|
||||
interface OAuth2RedirectExtra {
|
||||
url: string;
|
||||
tab_id: string;
|
||||
redirect_uri: string;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -52,29 +54,20 @@ interface OAuth2RedirectExtra {
|
||||
* be used in subsequent connections. If a refresh token is also present in the response,
|
||||
* it will also be stored.
|
||||
*
|
||||
* After the token has been stored, the opened tab will send a message to the original
|
||||
* tab and close itself. This component, running on the original tab, will listen for
|
||||
* message events, and once it receives the success message from the opened tab it will
|
||||
* re-run the query for the user, be it in SQL Lab, Explore, or a dashboard. In order to
|
||||
* communicate securely, both tabs share a "tab ID", which is a UUID that is generated
|
||||
* by the backend and sent from the opened tab to the original tab. For extra security,
|
||||
* we also check that the source of the message is the opened tab via a ref.
|
||||
* After the token has been stored, the opened tab will broadcast a message to the
|
||||
* original tab and close itself. This component, running on the original tab, listens
|
||||
* on a same-origin BroadcastChannel and re-runs the query for the user once it
|
||||
* receives the success message — be it in SQL Lab, Explore, or a dashboard. Both tabs
|
||||
* share a "tab ID" (a UUID generated by the backend) which is echoed back through the
|
||||
* channel so the original tab only reacts to its own OAuth2 flow.
|
||||
*/
|
||||
export function OAuth2RedirectMessage({
|
||||
error,
|
||||
source,
|
||||
closable,
|
||||
}: ErrorMessageComponentProps<OAuth2RedirectExtra>) {
|
||||
const oAuthTab = useRef<Window | null>(null);
|
||||
const { extra, level } = error;
|
||||
|
||||
// store a reference to the OAuth2 browser tab, so we can check that the success
|
||||
// message is coming from it
|
||||
const handleOAuthClick = (event: MouseEvent<HTMLAnchorElement>) => {
|
||||
event.preventDefault();
|
||||
oAuthTab.current = window.open(extra.url, '_blank');
|
||||
};
|
||||
|
||||
// state needed for re-running the SQL Lab query
|
||||
const queries = useSelector<
|
||||
SqlLabRootState,
|
||||
@@ -107,43 +100,50 @@ export function OAuth2RedirectMessage({
|
||||
const dispatch = useDispatch();
|
||||
|
||||
useEffect(() => {
|
||||
/* Listen for messages from the OAuth2 tab.
|
||||
*
|
||||
* After OAuth2 is successful the opened tab will send a message before
|
||||
* closing itself. Once we receive the message we can retrigger the
|
||||
* original query in SQL Lab, explore, or in a dashboard.
|
||||
*/
|
||||
const redirectUrl = new URL(extra.redirect_uri);
|
||||
const handleMessage = (event: MessageEvent) => {
|
||||
if (
|
||||
event.origin === redirectUrl.origin &&
|
||||
event.data.tabId === extra.tab_id &&
|
||||
event.source === oAuthTab.current
|
||||
) {
|
||||
if (source === 'sqllab' && query) {
|
||||
dispatch(reRunQuery(query));
|
||||
} else if (source === 'explore' && chartId) {
|
||||
dispatch(triggerQuery(true, chartId));
|
||||
} else if (source === 'dashboard') {
|
||||
dispatch(onRefresh(chartList.map(Number), true, 0, dashboardId));
|
||||
}
|
||||
const handleOAuthComplete = (tabId?: string) => {
|
||||
if (tabId !== extra.tab_id) {
|
||||
return;
|
||||
}
|
||||
if (source === 'sqllab' && query) {
|
||||
dispatch(reRunQuery(query));
|
||||
} else if (source === 'explore' && chartId) {
|
||||
dispatch(triggerQuery(true, chartId));
|
||||
} else if (source === 'dashboard') {
|
||||
dispatch(onRefresh(chartList.map(Number), true, 0, dashboardId));
|
||||
}
|
||||
};
|
||||
window.addEventListener('message', handleMessage);
|
||||
|
||||
const channel =
|
||||
typeof BroadcastChannel !== 'undefined'
|
||||
? new BroadcastChannel(OAUTH_CHANNEL_NAME)
|
||||
: null;
|
||||
|
||||
if (channel) {
|
||||
channel.onmessage = event => {
|
||||
handleOAuthComplete(event.data?.tabId);
|
||||
};
|
||||
}
|
||||
|
||||
const handleStorage = (event: StorageEvent) => {
|
||||
if (event.key !== OAUTH_STORAGE_EVENT_KEY || !event.newValue) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const message = JSON.parse(event.newValue) as { tabId?: string };
|
||||
handleOAuthComplete(message.tabId);
|
||||
} catch {
|
||||
// ignore malformed storage payloads
|
||||
}
|
||||
};
|
||||
|
||||
window.addEventListener('storage', handleStorage);
|
||||
|
||||
return () => {
|
||||
window.removeEventListener('message', handleMessage);
|
||||
window.removeEventListener('storage', handleStorage);
|
||||
channel?.close();
|
||||
};
|
||||
}, [
|
||||
source,
|
||||
extra.redirect_uri,
|
||||
extra.tab_id,
|
||||
dispatch,
|
||||
query,
|
||||
chartId,
|
||||
chartList,
|
||||
dashboardId,
|
||||
]);
|
||||
}, [source, extra.tab_id, dispatch, query, chartId, chartList, dashboardId]);
|
||||
|
||||
const body = (
|
||||
<p>
|
||||
@@ -155,12 +155,7 @@ export function OAuth2RedirectMessage({
|
||||
const subtitle = (
|
||||
<>
|
||||
{t('You need to')}{' '}
|
||||
<a
|
||||
href={extra.url}
|
||||
onClick={handleOAuthClick}
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
>
|
||||
<a href={extra.url} target="_blank" rel="noreferrer">
|
||||
{t('provide authorization')}
|
||||
</a>{' '}
|
||||
{t('in order to run this operation.')}
|
||||
|
||||
@@ -146,6 +146,27 @@ class ExportDashboardsCommand(ExportModelsCommand):
|
||||
if dataset:
|
||||
target["datasetUuid"] = str(dataset.uuid)
|
||||
|
||||
# Replace display control dataset references with uuid.
|
||||
# datasetId is intentionally preserved alongside datasetUuid so that
|
||||
# bundles remain importable by older versions that do not yet understand
|
||||
# datasetUuid for display-control targets.
|
||||
for customization in (
|
||||
payload.get("metadata", {}).get("chart_customization_config") or []
|
||||
):
|
||||
for target in customization.get("targets") or []:
|
||||
dataset_id = target.get("datasetId")
|
||||
if dataset_id is not None:
|
||||
dataset = DatasetDAO.find_by_id(dataset_id)
|
||||
if dataset:
|
||||
target["datasetUuid"] = str(dataset.uuid)
|
||||
else:
|
||||
logger.warning(
|
||||
"Dashboard '%s': display control target references "
|
||||
"missing dataset %s; datasetUuid will not be set",
|
||||
model.dashboard_title,
|
||||
dataset_id,
|
||||
)
|
||||
|
||||
# the mapping between dashboard -> charts is inferred from the position
|
||||
# attribute, so if it's not present we need to add a default config
|
||||
if not payload.get("position"):
|
||||
@@ -230,3 +251,14 @@ class ExportDashboardsCommand(ExportModelsCommand):
|
||||
dataset = DatasetDAO.find_by_id(dataset_id)
|
||||
if dataset:
|
||||
yield from ExportDatasetsCommand([dataset_id]).run()
|
||||
|
||||
# Export datasets referenced by display controls
|
||||
for customization in (
|
||||
payload.get("metadata", {}).get("chart_customization_config") or []
|
||||
):
|
||||
for target in customization.get("targets") or []:
|
||||
dataset_id = target.get("datasetId")
|
||||
if dataset_id is not None:
|
||||
dataset = DatasetDAO.find_by_id(dataset_id)
|
||||
if dataset:
|
||||
yield from ExportDatasetsCommand([dataset_id]).run()
|
||||
|
||||
@@ -42,6 +42,11 @@ def find_native_filter_datasets(metadata: dict[str, Any]) -> set[str]:
|
||||
dataset_uuid = target.get("datasetUuid")
|
||||
if dataset_uuid:
|
||||
uuids.add(dataset_uuid)
|
||||
for customization in metadata.get("chart_customization_config") or []:
|
||||
for target in customization.get("targets") or []:
|
||||
dataset_uuid = target.get("datasetUuid")
|
||||
if dataset_uuid:
|
||||
uuids.add(dataset_uuid)
|
||||
return uuids
|
||||
|
||||
|
||||
@@ -139,6 +144,28 @@ def update_id_refs( # pylint: disable=too-many-locals # noqa: C901
|
||||
native_filter["scope"]["excluded"] = [
|
||||
id_map[old_id] for old_id in scope_excluded if old_id in id_map
|
||||
]
|
||||
|
||||
# fix display control dataset references
|
||||
for customization in (
|
||||
fixed.get("metadata", {}).get("chart_customization_config") or []
|
||||
):
|
||||
for target in customization.get("targets") or []:
|
||||
dataset_uuid = target.pop("datasetUuid", None)
|
||||
if dataset_uuid:
|
||||
info = dataset_info.get(dataset_uuid)
|
||||
if info:
|
||||
target["datasetId"] = info["datasource_id"]
|
||||
else:
|
||||
# UUID present but unresolvable — remove stale integer ID
|
||||
# so the control fails visibly rather than binding to
|
||||
# whatever dataset happens to own that ID in this environment
|
||||
target.pop("datasetId", None)
|
||||
logger.warning(
|
||||
"Display control target references unknown dataset UUID %s; "
|
||||
"datasetId will not be restored",
|
||||
dataset_uuid,
|
||||
)
|
||||
|
||||
fixed = update_cross_filter_scoping(fixed, id_map)
|
||||
return fixed
|
||||
|
||||
|
||||
@@ -26,14 +26,23 @@ URL parameter extraction. Config mapping logic lives in chart_utils.py.
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Any, TYPE_CHECKING
|
||||
from urllib.parse import parse_qs, urlparse
|
||||
|
||||
from superset.constants import EXTRA_FORM_DATA_OVERRIDE_REGULAR_MAPPINGS
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from superset.models.slice import Slice
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
QUERY_CONTEXT_EXTRA_FORM_DATA_OVERRIDE_KEYS = {
|
||||
"granularity",
|
||||
"time_grain",
|
||||
"time_grain_sqla",
|
||||
"time_range",
|
||||
}
|
||||
|
||||
|
||||
def find_chart_by_identifier(identifier: int | str) -> Slice | None:
|
||||
"""Find a chart by numeric ID or UUID string.
|
||||
@@ -69,6 +78,446 @@ def get_cached_form_data(form_data_key: str) -> str | None:
|
||||
return None
|
||||
|
||||
|
||||
def resolve_datasource_engine(datasource_id: Any, datasource_type: str) -> str:
|
||||
"""Return the datasource engine name, or ``"base"`` if it cannot be resolved."""
|
||||
if not isinstance(datasource_id, (int, str)):
|
||||
return "base"
|
||||
try:
|
||||
# avoid circular import
|
||||
from superset.daos.datasource import DatasourceDAO
|
||||
from superset.utils.core import DatasourceType
|
||||
|
||||
datasource = DatasourceDAO.get_datasource(
|
||||
datasource_type=DatasourceType(datasource_type),
|
||||
database_id_or_uuid=datasource_id,
|
||||
)
|
||||
return datasource.database.db_engine_spec.engine
|
||||
except Exception: # noqa: BLE001
|
||||
# Engine lookup is best-effort; fall back to generic filter normalization.
|
||||
logger.debug("Could not resolve engine for datasource %s", datasource_id)
|
||||
return "base"
|
||||
|
||||
|
||||
def prepare_form_data_for_query(
|
||||
form_data: dict[str, Any],
|
||||
datasource_id: Any,
|
||||
datasource_type: str,
|
||||
extra_form_data: dict[str, Any] | None = None,
|
||||
datasource_engine: str | None = None,
|
||||
) -> None:
|
||||
"""Normalize form_data filters before building a QueryObject payload.
|
||||
|
||||
Explore and legacy viz query construction merge dashboard/native filter payloads
|
||||
and split adhoc filters into the concrete ``filters``/``where``/``having``
|
||||
fields consumed by QueryObject. MCP tools that build query payloads directly
|
||||
must perform the same normalization before calling QueryContextFactory.
|
||||
|
||||
Mutates ``form_data`` in place.
|
||||
"""
|
||||
# avoid circular import
|
||||
from superset.utils.core import (
|
||||
convert_legacy_filters_into_adhoc,
|
||||
form_data_to_adhoc,
|
||||
merge_extra_filters,
|
||||
simple_filter_to_adhoc,
|
||||
split_adhoc_filters_into_base_filters,
|
||||
)
|
||||
|
||||
if isinstance(form_data.get("adhoc_filters"), list):
|
||||
adhoc_filters = [
|
||||
*(
|
||||
form_data_to_adhoc(form_data, clause)
|
||||
for clause in ("having", "where")
|
||||
if form_data.get(clause)
|
||||
),
|
||||
*(
|
||||
simple_filter_to_adhoc(filter_, "where")
|
||||
for filter_ in form_data.get("filters") or []
|
||||
if filter_ is not None
|
||||
),
|
||||
*form_data["adhoc_filters"],
|
||||
]
|
||||
form_data["adhoc_filters"] = adhoc_filters
|
||||
|
||||
if extra_form_data:
|
||||
form_data["extra_form_data"] = merge_extra_form_data(
|
||||
form_data.get("extra_form_data"),
|
||||
extra_form_data,
|
||||
)
|
||||
convert_legacy_filters_into_adhoc(form_data)
|
||||
merge_extra_filters(form_data)
|
||||
split_adhoc_filters_into_base_filters(
|
||||
form_data,
|
||||
datasource_engine or resolve_datasource_engine(datasource_id, datasource_type),
|
||||
)
|
||||
|
||||
|
||||
def merge_extra_form_data(
|
||||
existing: Any,
|
||||
incoming: dict[str, Any],
|
||||
) -> dict[str, Any]:
|
||||
"""Merge cached and request-level extra_form_data payloads."""
|
||||
merged: dict[str, Any] = dict(existing) if isinstance(existing, dict) else {}
|
||||
for key, value in incoming.items():
|
||||
current = merged.get(key)
|
||||
if isinstance(current, list) and isinstance(value, list):
|
||||
merged[key] = [*current, *value]
|
||||
elif isinstance(current, dict) and isinstance(value, dict):
|
||||
merged[key] = {**current, **value}
|
||||
else:
|
||||
merged[key] = value
|
||||
return merged
|
||||
|
||||
|
||||
def apply_form_data_filters_to_query(
|
||||
query: dict[str, Any],
|
||||
form_data: dict[str, Any],
|
||||
) -> None:
|
||||
"""Copy normalized form_data filter fields into a fresh query payload."""
|
||||
if filters := form_data.get("filters"):
|
||||
query["filters"] = filters
|
||||
else:
|
||||
query.setdefault("filters", [])
|
||||
|
||||
if time_range := form_data.get("time_range"):
|
||||
query["time_range"] = time_range
|
||||
if where := form_data.get("where"):
|
||||
query["where"] = where
|
||||
if having := form_data.get("having"):
|
||||
query["having"] = having
|
||||
|
||||
|
||||
def _join_sql_clause(existing_clause: str, additional_clause: str) -> str:
|
||||
"""AND two SQL filter clauses while preserving their original grouping."""
|
||||
return f"({existing_clause}) AND ({additional_clause})"
|
||||
|
||||
|
||||
def _is_temporal_override_filter(
|
||||
filter_: dict[str, Any],
|
||||
form_data: dict[str, Any],
|
||||
) -> bool:
|
||||
return (
|
||||
filter_.get("op") == "TEMPORAL_RANGE"
|
||||
and form_data.get("time_range") is not None
|
||||
and filter_.get("val") == form_data.get("time_range")
|
||||
and (
|
||||
form_data.get("granularity") is None
|
||||
or filter_.get("col") == form_data.get("granularity")
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def merge_form_data_filters_into_query(
|
||||
query: dict[str, Any],
|
||||
form_data: dict[str, Any],
|
||||
) -> None:
|
||||
"""Merge normalized form_data filters into an existing query payload.
|
||||
|
||||
Saved query contexts can contain query-specific filter, where, or having
|
||||
fields. This helper adds normalized predicates while applying request-level
|
||||
extra_form_data overrides for temporal query fields.
|
||||
"""
|
||||
if filters := [
|
||||
filter_
|
||||
for filter_ in form_data.get("filters") or []
|
||||
if not _is_temporal_override_filter(filter_, form_data)
|
||||
]:
|
||||
query["filters"] = [
|
||||
*(query.get("filters") or []),
|
||||
*filters,
|
||||
]
|
||||
|
||||
for key in EXTRA_FORM_DATA_OVERRIDE_REGULAR_MAPPINGS.values():
|
||||
if (
|
||||
key in QUERY_CONTEXT_EXTRA_FORM_DATA_OVERRIDE_KEYS
|
||||
and key in form_data
|
||||
and form_data[key] is not None
|
||||
):
|
||||
query[key] = form_data[key]
|
||||
|
||||
for clause in ("where", "having"):
|
||||
if additional_clause := form_data.get(clause):
|
||||
if existing_clause := query.get(clause):
|
||||
query[clause] = _join_sql_clause(existing_clause, additional_clause)
|
||||
else:
|
||||
query[clause] = additional_clause
|
||||
|
||||
|
||||
def merge_extra_form_data_filters_into_query(
|
||||
query: dict[str, Any],
|
||||
extra_form_data: dict[str, Any],
|
||||
datasource_id: Any,
|
||||
datasource_type: str,
|
||||
) -> None:
|
||||
"""Merge request extra_form_data predicates into an existing query payload."""
|
||||
extra_query_form_data: dict[str, Any] = {"adhoc_filters": []}
|
||||
prepare_form_data_for_query(
|
||||
extra_query_form_data,
|
||||
datasource_id,
|
||||
datasource_type,
|
||||
extra_form_data,
|
||||
)
|
||||
merge_form_data_filters_into_query(query, extra_query_form_data)
|
||||
|
||||
|
||||
def resolve_metrics(form_data: dict[str, Any], viz_type: str) -> list[Any]:
|
||||
"""Extract metrics from form_data, handling chart-type-specific fields."""
|
||||
if viz_type == "bubble":
|
||||
return [m for field in ("x", "y", "size") if (m := form_data.get(field))]
|
||||
|
||||
metrics = form_data.get("metrics", [])
|
||||
if not metrics and (metric := form_data.get("metric")):
|
||||
metrics = [metric]
|
||||
return metrics
|
||||
|
||||
|
||||
def resolve_groupby(form_data: dict[str, Any]) -> list[Any]:
|
||||
"""Extract groupby columns from form_data with fallback aliases."""
|
||||
raw_columns = form_data.get("all_columns")
|
||||
if form_data.get("query_mode") == "raw" and isinstance(raw_columns, list):
|
||||
return list(raw_columns)
|
||||
|
||||
raw_groupby = form_data.get("groupby") or []
|
||||
if isinstance(raw_groupby, str):
|
||||
groupby: list[Any] = [raw_groupby]
|
||||
else:
|
||||
groupby = list(raw_groupby)
|
||||
|
||||
if groupby:
|
||||
return groupby
|
||||
|
||||
for field in ("entity", "series"):
|
||||
value = form_data.get(field)
|
||||
if isinstance(value, str) and value not in groupby:
|
||||
groupby.append(value)
|
||||
|
||||
form_columns = form_data.get("columns")
|
||||
if isinstance(form_columns, list):
|
||||
for col in form_columns:
|
||||
if isinstance(col, str) and col not in groupby:
|
||||
groupby.append(col)
|
||||
|
||||
if not groupby and isinstance(raw_columns, list):
|
||||
groupby.extend(raw_columns)
|
||||
|
||||
return groupby
|
||||
|
||||
|
||||
def resolve_metrics_and_groupby(
|
||||
form_data: dict[str, Any],
|
||||
chart: Any | None = None,
|
||||
) -> tuple[list[Any], list[Any]]:
|
||||
"""Resolve metrics and groupby columns from form_data."""
|
||||
viz_type = form_data.get(
|
||||
"viz_type", getattr(chart, "viz_type", "") if chart else ""
|
||||
)
|
||||
singular_metric_no_groupby = (
|
||||
"big_number",
|
||||
"big_number_total",
|
||||
"pop_kpi",
|
||||
)
|
||||
if viz_type in singular_metric_no_groupby:
|
||||
metrics: list[Any] = [metric] if (metric := form_data.get("metric")) else []
|
||||
return metrics, []
|
||||
|
||||
return resolve_metrics(form_data, viz_type), resolve_groupby(form_data)
|
||||
|
||||
|
||||
def extract_x_axis_col(form_data: dict[str, Any]) -> str | None:
|
||||
"""Return the x_axis column name from form_data, or None if not set."""
|
||||
x_axis = form_data.get("x_axis")
|
||||
if isinstance(x_axis, str) and x_axis:
|
||||
return x_axis
|
||||
if isinstance(x_axis, dict):
|
||||
col_name = x_axis.get("column_name")
|
||||
return col_name if isinstance(col_name, str) and col_name else None
|
||||
return None
|
||||
|
||||
|
||||
def _build_single_query_dict(
|
||||
form_data: dict[str, Any],
|
||||
columns: list[Any],
|
||||
metrics: list[Any],
|
||||
row_limit: int | None = None,
|
||||
order_desc: bool | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Build one query entry for QueryContextFactory from form_data fields."""
|
||||
qd: dict[str, Any] = {"columns": columns, "metrics": metrics}
|
||||
effective_row_limit = row_limit
|
||||
if effective_row_limit is None:
|
||||
effective_row_limit = form_data.get("row_limit")
|
||||
if effective_row_limit is not None:
|
||||
qd["row_limit"] = effective_row_limit
|
||||
if order_desc is not None:
|
||||
qd["order_desc"] = order_desc
|
||||
apply_form_data_filters_to_query(qd, form_data)
|
||||
return qd
|
||||
|
||||
|
||||
def _build_mixed_timeseries_secondary(
|
||||
form_data: dict[str, Any],
|
||||
x_axis_col: str | None,
|
||||
engine: str,
|
||||
row_limit: int | None = None,
|
||||
order_desc: bool | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Build the secondary query dict for the ``mixed_timeseries`` viz type."""
|
||||
# avoid circular import
|
||||
from superset.utils.core import split_adhoc_filters_into_base_filters
|
||||
|
||||
metrics_b: list[Any] = list(form_data.get("metrics_b") or [])
|
||||
raw_b = form_data.get("groupby_b") or []
|
||||
groupby_b: list[Any] = [raw_b] if isinstance(raw_b, str) else list(raw_b)
|
||||
if x_axis_col and x_axis_col not in groupby_b:
|
||||
groupby_b = [x_axis_col] + groupby_b
|
||||
|
||||
qd = _build_single_query_dict(
|
||||
form_data,
|
||||
groupby_b,
|
||||
metrics_b,
|
||||
row_limit=row_limit,
|
||||
order_desc=order_desc,
|
||||
)
|
||||
if time_range_b := form_data.get("time_range_b"):
|
||||
qd["time_range"] = time_range_b
|
||||
if row_limit is None and (row_limit_b := form_data.get("row_limit_b")) is not None:
|
||||
qd["row_limit"] = row_limit_b
|
||||
|
||||
if adhoc_filters_b := form_data.get("adhoc_filters_b"):
|
||||
secondary_fd: dict[str, Any] = {"adhoc_filters": adhoc_filters_b}
|
||||
split_adhoc_filters_into_base_filters(secondary_fd, engine)
|
||||
if secondary_filters := secondary_fd.get("filters"):
|
||||
qd["filters"] = secondary_filters
|
||||
else:
|
||||
qd.pop("filters", None)
|
||||
for clause in ("where", "having"):
|
||||
if secondary_clause := secondary_fd.get(clause):
|
||||
qd[clause] = secondary_clause
|
||||
else:
|
||||
qd.pop(clause, None)
|
||||
return qd
|
||||
|
||||
|
||||
def build_query_dicts_from_form_data(
|
||||
form_data: dict[str, Any],
|
||||
datasource_id: Any,
|
||||
datasource_type: str,
|
||||
chart: Any | None = None,
|
||||
extra_form_data: dict[str, Any] | None = None,
|
||||
row_limit: int | None = None,
|
||||
order_desc: bool | None = None,
|
||||
) -> list[dict[str, Any]]:
|
||||
"""Build chart-type-aware query dicts from Explore form_data."""
|
||||
engine = resolve_datasource_engine(datasource_id, datasource_type)
|
||||
prepare_form_data_for_query(
|
||||
form_data,
|
||||
datasource_id,
|
||||
datasource_type,
|
||||
extra_form_data,
|
||||
datasource_engine=engine,
|
||||
)
|
||||
|
||||
metrics, groupby = resolve_metrics_and_groupby(form_data, chart)
|
||||
viz_type: str = (
|
||||
form_data.get("viz_type")
|
||||
or (getattr(chart, "viz_type", "") if chart else "")
|
||||
or ""
|
||||
)
|
||||
is_timeseries = (
|
||||
viz_type.startswith("echarts_timeseries") or viz_type == "mixed_timeseries"
|
||||
)
|
||||
|
||||
x_axis_col: str | None = None
|
||||
if is_timeseries:
|
||||
x_axis_col = extract_x_axis_col(form_data)
|
||||
if x_axis_col and x_axis_col not in groupby:
|
||||
groupby = [x_axis_col] + groupby
|
||||
|
||||
queries = [
|
||||
_build_single_query_dict(
|
||||
form_data,
|
||||
groupby,
|
||||
metrics,
|
||||
row_limit=row_limit,
|
||||
order_desc=order_desc,
|
||||
)
|
||||
]
|
||||
if viz_type == "mixed_timeseries":
|
||||
queries.append(
|
||||
_build_mixed_timeseries_secondary(
|
||||
form_data,
|
||||
x_axis_col,
|
||||
engine,
|
||||
row_limit=row_limit,
|
||||
order_desc=order_desc,
|
||||
)
|
||||
)
|
||||
return queries
|
||||
|
||||
|
||||
def resolve_form_data_datasource(
|
||||
form_data: dict[str, Any],
|
||||
chart: Any | None = None,
|
||||
) -> tuple[int | str | None, str]:
|
||||
"""Resolve datasource id/type from form_data with chart fallbacks."""
|
||||
datasource_id = form_data.get("datasource_id")
|
||||
datasource_type = form_data.get("datasource_type")
|
||||
|
||||
if not datasource_id and (combined := form_data.get("datasource")):
|
||||
if isinstance(combined, str) and "__" in combined:
|
||||
parts = combined.split("__", 1)
|
||||
datasource_id = int(parts[0]) if parts[0].isdigit() else parts[0]
|
||||
datasource_type = parts[1] if len(parts) > 1 else None
|
||||
|
||||
if not datasource_id and chart:
|
||||
datasource_id = getattr(chart, "datasource_id", None)
|
||||
if not datasource_type and chart:
|
||||
datasource_type = getattr(chart, "datasource_type", None)
|
||||
|
||||
return datasource_id, datasource_type if isinstance(
|
||||
datasource_type, str
|
||||
) else "table"
|
||||
|
||||
|
||||
def build_query_context_from_form_data(
|
||||
form_data: dict[str, Any],
|
||||
chart: Any | None = None,
|
||||
extra_form_data: dict[str, Any] | None = None,
|
||||
row_limit: int | None = None,
|
||||
order_desc: bool | None = None,
|
||||
result_type: Any = None,
|
||||
force: bool = False,
|
||||
) -> Any:
|
||||
"""Build a QueryContext from chart-type-aware Explore form_data."""
|
||||
# avoid circular import
|
||||
from superset.common.query_context_factory import QueryContextFactory
|
||||
|
||||
datasource_id, datasource_type = resolve_form_data_datasource(form_data, chart)
|
||||
if not isinstance(datasource_id, (int, str)):
|
||||
raise ValueError(
|
||||
"Cannot determine datasource ID from form_data. "
|
||||
"Provide a chart identifier or ensure form_data contains "
|
||||
"'datasource_id' or 'datasource'."
|
||||
)
|
||||
|
||||
queries = build_query_dicts_from_form_data(
|
||||
form_data,
|
||||
datasource_id,
|
||||
datasource_type,
|
||||
chart=chart,
|
||||
extra_form_data=extra_form_data,
|
||||
row_limit=row_limit,
|
||||
order_desc=order_desc,
|
||||
)
|
||||
return QueryContextFactory().create(
|
||||
datasource={"id": datasource_id, "type": datasource_type},
|
||||
queries=queries,
|
||||
form_data=form_data,
|
||||
result_type=result_type,
|
||||
force=force,
|
||||
)
|
||||
|
||||
|
||||
def extract_form_data_key_from_url(url: str | None) -> str | None:
|
||||
"""Extract the form_data_key query parameter from an explore URL.
|
||||
|
||||
|
||||
@@ -35,8 +35,11 @@ from superset.commands.exceptions import CommandException
|
||||
from superset.exceptions import OAuth2Error, OAuth2RedirectError, SupersetException
|
||||
from superset.extensions import event_logger
|
||||
from superset.mcp_service.chart.chart_helpers import (
|
||||
build_query_context_from_form_data,
|
||||
build_query_dicts_from_form_data,
|
||||
find_chart_by_identifier,
|
||||
get_cached_form_data,
|
||||
merge_extra_form_data_filters_into_query,
|
||||
)
|
||||
from superset.mcp_service.chart.chart_utils import validate_chart_dataset
|
||||
from superset.mcp_service.chart.schemas import (
|
||||
@@ -55,7 +58,6 @@ from superset.mcp_service.utils.oauth2_utils import (
|
||||
build_oauth2_redirect_message,
|
||||
OAUTH2_CONFIG_ERROR_MESSAGE,
|
||||
)
|
||||
from superset.utils.core import merge_extra_filters
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -94,16 +96,6 @@ def _sanitize_chart_data_for_llm_context(chart_data: ChartData) -> ChartData:
|
||||
return ChartData.model_validate(payload)
|
||||
|
||||
|
||||
def _apply_extra_form_data(
|
||||
form_data: dict[str, Any], extra_form_data: dict[str, Any] | None
|
||||
) -> None:
|
||||
"""Merge dashboard native filters into chart form_data in-place."""
|
||||
if not extra_form_data:
|
||||
return
|
||||
form_data["extra_form_data"] = extra_form_data
|
||||
merge_extra_filters(form_data)
|
||||
|
||||
|
||||
@tool(
|
||||
tags=["data"],
|
||||
class_permission_name="Chart",
|
||||
@@ -293,65 +285,18 @@ async def get_chart_data( # noqa: C901
|
||||
# If using cached form_data, we need to build query_context from it
|
||||
if using_unsaved_state and cached_form_data_dict is not None:
|
||||
# Build query context from cached form_data (unsaved state)
|
||||
from superset.common.query_context_factory import QueryContextFactory
|
||||
|
||||
factory = QueryContextFactory()
|
||||
row_limit = (
|
||||
request.limit
|
||||
or cached_form_data_dict.get("row_limit")
|
||||
or current_app.config["ROW_LIMIT"]
|
||||
)
|
||||
|
||||
# Get datasource info from cached form_data or fall back to chart
|
||||
datasource_id = cached_form_data_dict.get(
|
||||
"datasource_id", chart.datasource_id
|
||||
)
|
||||
datasource_type = cached_form_data_dict.get(
|
||||
"datasource_type", chart.datasource_type
|
||||
)
|
||||
|
||||
# Handle different chart types that have different form_data
|
||||
# structures. Some charts use "metric" (singular), not "metrics"
|
||||
# (plural): big_number, big_number_total, pop_kpi.
|
||||
# These charts also don't have groupby columns.
|
||||
cached_viz_type = cached_form_data_dict.get(
|
||||
"viz_type", chart.viz_type or ""
|
||||
)
|
||||
if cached_viz_type in ("big_number", "big_number_total", "pop_kpi"):
|
||||
metric = cached_form_data_dict.get("metric")
|
||||
cached_metrics = [metric] if metric else []
|
||||
cached_groupby: list[str] = []
|
||||
else:
|
||||
cached_metrics = cached_form_data_dict.get("metrics", [])
|
||||
raw_groupby = cached_form_data_dict.get("groupby", [])
|
||||
# Guard against string groupby (e.g. heatmap_v2 migrated
|
||||
# from legacy heatmap where all_columns_y was a string)
|
||||
if isinstance(raw_groupby, str):
|
||||
cached_groupby = [raw_groupby]
|
||||
else:
|
||||
cached_groupby = list(raw_groupby)
|
||||
|
||||
_apply_extra_form_data(cached_form_data_dict, request.extra_form_data)
|
||||
|
||||
cached_query: dict[str, Any] = {
|
||||
"filters": cached_form_data_dict.get("filters", []),
|
||||
"columns": cached_groupby,
|
||||
"metrics": cached_metrics,
|
||||
"row_limit": row_limit,
|
||||
"order_desc": cached_form_data_dict.get("order_desc", True),
|
||||
}
|
||||
# Include adhoc_filters so dashboard native filters are applied
|
||||
cached_adhoc = cached_form_data_dict.get("adhoc_filters")
|
||||
if cached_adhoc:
|
||||
cached_query["adhoc_filters"] = cached_adhoc
|
||||
|
||||
query_context = factory.create(
|
||||
datasource={
|
||||
"id": datasource_id,
|
||||
"type": datasource_type,
|
||||
},
|
||||
queries=[cached_query],
|
||||
form_data=cached_form_data_dict,
|
||||
query_context = build_query_context_from_form_data(
|
||||
cached_form_data_dict,
|
||||
chart=chart,
|
||||
extra_form_data=request.extra_form_data,
|
||||
row_limit=row_limit,
|
||||
order_desc=cached_form_data_dict.get("order_desc", True),
|
||||
force=request.force_refresh,
|
||||
)
|
||||
await ctx.debug(
|
||||
@@ -420,102 +365,23 @@ async def get_chart_data( # noqa: C901
|
||||
error_type="MissingQueryContext",
|
||||
)
|
||||
|
||||
singular_metric_no_groupby = (
|
||||
"big_number",
|
||||
"big_number_total",
|
||||
"pop_kpi",
|
||||
fallback_queries = build_query_dicts_from_form_data(
|
||||
form_data,
|
||||
chart.datasource_id,
|
||||
chart.datasource_type,
|
||||
chart=chart,
|
||||
extra_form_data=request.extra_form_data,
|
||||
row_limit=row_limit,
|
||||
order_desc=True,
|
||||
)
|
||||
singular_metric_types = (
|
||||
*singular_metric_no_groupby,
|
||||
"world_map",
|
||||
"treemap_v2",
|
||||
"sunburst_v2",
|
||||
"gauge_chart",
|
||||
)
|
||||
|
||||
if viz_type == "bubble":
|
||||
# Bubble charts store metrics in x, y, size fields
|
||||
bubble_metrics = []
|
||||
for field in ("x", "y", "size"):
|
||||
m = form_data.get(field)
|
||||
if m:
|
||||
bubble_metrics.append(m)
|
||||
metrics = bubble_metrics
|
||||
groupby_columns: list[str] = list(
|
||||
form_data.get("entity", None) and [form_data["entity"]] or []
|
||||
)
|
||||
series_field = form_data.get("series")
|
||||
if series_field and series_field not in groupby_columns:
|
||||
groupby_columns.append(series_field)
|
||||
elif viz_type in singular_metric_types:
|
||||
# These chart types use "metric" (singular)
|
||||
metric = form_data.get("metric")
|
||||
metrics = [metric] if metric else []
|
||||
if viz_type in singular_metric_no_groupby:
|
||||
groupby_columns = []
|
||||
else:
|
||||
# Some singular-metric charts use groupby, entity,
|
||||
# series, or columns for dimensional breakdown
|
||||
groupby_columns = list(form_data.get("groupby") or [])
|
||||
entity = form_data.get("entity")
|
||||
if entity and entity not in groupby_columns:
|
||||
groupby_columns.append(entity)
|
||||
series = form_data.get("series")
|
||||
if series and series not in groupby_columns:
|
||||
groupby_columns.append(series)
|
||||
form_columns = form_data.get("columns")
|
||||
if form_columns and isinstance(form_columns, list):
|
||||
for col in form_columns:
|
||||
if isinstance(col, str) and col not in groupby_columns:
|
||||
groupby_columns.append(col)
|
||||
else:
|
||||
# Standard charts use "metrics" (plural) and "groupby"
|
||||
metrics = form_data.get("metrics", [])
|
||||
raw_groupby = form_data.get("groupby") or []
|
||||
# Guard against string groupby (e.g. heatmap_v2 migrated
|
||||
# from legacy heatmap where all_columns_y was a string)
|
||||
if isinstance(raw_groupby, str):
|
||||
groupby_columns = [raw_groupby]
|
||||
else:
|
||||
groupby_columns = list(raw_groupby)
|
||||
# Some chart types use "columns" instead of "groupby"
|
||||
if not groupby_columns:
|
||||
form_columns = form_data.get("columns")
|
||||
if form_columns and isinstance(form_columns, list):
|
||||
for col in form_columns:
|
||||
if isinstance(col, str):
|
||||
groupby_columns.append(col)
|
||||
|
||||
# Fallback: if metrics is still empty, try singular "metric"
|
||||
if not metrics:
|
||||
fallback_metric = form_data.get("metric")
|
||||
if fallback_metric:
|
||||
metrics = [fallback_metric]
|
||||
|
||||
# Fallback: try entity/series if groupby is still empty
|
||||
if not groupby_columns:
|
||||
entity = form_data.get("entity")
|
||||
if entity:
|
||||
groupby_columns.append(entity)
|
||||
series = form_data.get("series")
|
||||
if series and series not in groupby_columns:
|
||||
groupby_columns.append(series)
|
||||
|
||||
# Build query columns list: include both x_axis and groupby
|
||||
x_axis_config = form_data.get("x_axis")
|
||||
query_columns = groupby_columns.copy()
|
||||
if x_axis_config and isinstance(x_axis_config, str):
|
||||
if x_axis_config not in query_columns:
|
||||
query_columns.insert(0, x_axis_config)
|
||||
elif x_axis_config and isinstance(x_axis_config, dict):
|
||||
col_name = x_axis_config.get("column_name")
|
||||
if col_name and col_name not in query_columns:
|
||||
query_columns.insert(0, col_name)
|
||||
|
||||
# Safety net: if we could not extract any metrics or
|
||||
# columns, return a clear error instead of the cryptic
|
||||
# "Empty query?" that comes from deeper in the stack.
|
||||
if not metrics and not query_columns:
|
||||
if all(
|
||||
not query.get("metrics") and not query.get("columns")
|
||||
for query in fallback_queries
|
||||
):
|
||||
await ctx.warning(
|
||||
"Cannot construct fallback query for chart %s "
|
||||
"(viz_type=%s): no metrics, columns, or groupby "
|
||||
@@ -534,26 +400,12 @@ async def get_chart_data( # noqa: C901
|
||||
error_type="MissingQueryContext",
|
||||
)
|
||||
|
||||
_apply_extra_form_data(form_data, request.extra_form_data)
|
||||
|
||||
fallback_query: dict[str, Any] = {
|
||||
"filters": form_data.get("filters", []),
|
||||
"columns": query_columns,
|
||||
"metrics": metrics,
|
||||
"row_limit": row_limit,
|
||||
"order_desc": True,
|
||||
}
|
||||
# Include adhoc_filters so dashboard native filters are applied
|
||||
fallback_adhoc = form_data.get("adhoc_filters")
|
||||
if fallback_adhoc:
|
||||
fallback_query["adhoc_filters"] = fallback_adhoc
|
||||
|
||||
query_context = factory.create(
|
||||
datasource={
|
||||
"id": chart.datasource_id,
|
||||
"type": chart.datasource_type,
|
||||
},
|
||||
queries=[fallback_query],
|
||||
queries=fallback_queries,
|
||||
form_data=form_data,
|
||||
force=request.force_refresh,
|
||||
)
|
||||
@@ -566,9 +418,14 @@ async def get_chart_data( # noqa: C901
|
||||
for query in query_context_json.get("queries", []):
|
||||
query["row_limit"] = request.limit
|
||||
|
||||
# Merge dashboard native filters into query_context's form_data
|
||||
qc_form_data = query_context_json.setdefault("form_data", {})
|
||||
_apply_extra_form_data(qc_form_data, request.extra_form_data)
|
||||
if request.extra_form_data:
|
||||
for query in query_context_json.get("queries", []):
|
||||
merge_extra_form_data_filters_into_query(
|
||||
query,
|
||||
request.extra_form_data,
|
||||
query_context_json["datasource"]["id"],
|
||||
query_context_json["datasource"]["type"],
|
||||
)
|
||||
|
||||
# Create QueryContext from the saved context using the schema
|
||||
# This is exactly how the API does it
|
||||
@@ -871,16 +728,14 @@ async def _query_from_form_data(
|
||||
Used for unsaved charts where we only have form_data_key.
|
||||
"""
|
||||
from superset.commands.chart.data.get_data_command import ChartDataCommand
|
||||
from superset.common.query_context_factory import QueryContextFactory
|
||||
|
||||
datasource_id = form_data.get("datasource_id")
|
||||
datasource_type: str = form_data.get("datasource_type") or "table"
|
||||
|
||||
# Handle combined datasource field (e.g., "1__table")
|
||||
if not datasource_id and form_data.get("datasource"):
|
||||
parts = str(form_data["datasource"]).split("__")
|
||||
if len(parts) == 2:
|
||||
datasource_id, datasource_type = parts[0], parts[1]
|
||||
datasource_id = parts[0]
|
||||
|
||||
if not datasource_id:
|
||||
return ChartError(
|
||||
@@ -888,34 +743,17 @@ async def _query_from_form_data(
|
||||
error_type="InvalidFormData",
|
||||
)
|
||||
|
||||
viz_type = form_data.get("viz_type", "unknown")
|
||||
row_limit = (
|
||||
request.limit or form_data.get("row_limit") or current_app.config["ROW_LIMIT"]
|
||||
)
|
||||
|
||||
# Extract metrics and groupby based on chart type
|
||||
if viz_type in ("big_number", "big_number_total", "pop_kpi"):
|
||||
metric = form_data.get("metric")
|
||||
metrics = [metric] if metric else []
|
||||
groupby: list[str] = []
|
||||
else:
|
||||
metrics = form_data.get("metrics", [])
|
||||
groupby = list(form_data.get("groupby") or [])
|
||||
viz_type = form_data.get("viz_type", "unknown")
|
||||
|
||||
try:
|
||||
factory = QueryContextFactory()
|
||||
query_context = factory.create(
|
||||
datasource={"id": datasource_id, "type": datasource_type},
|
||||
queries=[
|
||||
{
|
||||
"filters": form_data.get("filters", []),
|
||||
"columns": groupby,
|
||||
"metrics": metrics,
|
||||
"row_limit": row_limit,
|
||||
"order_desc": form_data.get("order_desc", True),
|
||||
}
|
||||
],
|
||||
form_data=form_data,
|
||||
query_context = build_query_context_from_form_data(
|
||||
form_data,
|
||||
extra_form_data=request.extra_form_data,
|
||||
row_limit=row_limit,
|
||||
order_desc=form_data.get("order_desc", True),
|
||||
force=request.force_refresh,
|
||||
)
|
||||
|
||||
|
||||
@@ -33,7 +33,10 @@ from superset.mcp_service.chart.ascii_charts import (
|
||||
generate_ascii_chart,
|
||||
generate_ascii_table,
|
||||
)
|
||||
from superset.mcp_service.chart.chart_helpers import find_chart_by_identifier
|
||||
from superset.mcp_service.chart.chart_helpers import (
|
||||
build_query_context_from_form_data,
|
||||
find_chart_by_identifier,
|
||||
)
|
||||
from superset.mcp_service.chart.chart_utils import validate_chart_dataset
|
||||
from superset.mcp_service.chart.schemas import (
|
||||
AccessibilityMetadata,
|
||||
@@ -197,7 +200,6 @@ class ASCIIPreviewStrategy(PreviewFormatStrategy):
|
||||
def generate(self) -> ASCIIPreview | ChartError:
|
||||
try:
|
||||
from superset.commands.chart.data.get_data_command import ChartDataCommand
|
||||
from superset.common.query_context_factory import QueryContextFactory
|
||||
from superset.utils import json as utils_json
|
||||
|
||||
form_data = utils_json.loads(self.chart.params) if self.chart.params else {}
|
||||
@@ -214,50 +216,11 @@ class ASCIIPreviewStrategy(PreviewFormatStrategy):
|
||||
error_type="InvalidChart",
|
||||
)
|
||||
|
||||
# Build query for chart data
|
||||
x_axis_config = form_data.get("x_axis")
|
||||
groupby_columns = form_data.get("groupby", [])
|
||||
metrics = form_data.get("metrics", [])
|
||||
|
||||
# Table charts in raw mode use all_columns or columns
|
||||
all_columns = form_data.get("all_columns", [])
|
||||
raw_columns = form_data.get("columns", [])
|
||||
if form_data.get("query_mode") == "raw" and (all_columns or raw_columns):
|
||||
columns = list(all_columns or raw_columns)
|
||||
else:
|
||||
columns = groupby_columns.copy()
|
||||
if x_axis_config and isinstance(x_axis_config, str):
|
||||
columns.append(x_axis_config)
|
||||
elif x_axis_config and isinstance(x_axis_config, dict):
|
||||
if "column_name" in x_axis_config:
|
||||
columns.append(x_axis_config["column_name"])
|
||||
|
||||
if not columns and not metrics:
|
||||
return ChartError(
|
||||
error=(
|
||||
"Cannot generate ASCII preview: chart has no columns or "
|
||||
"metrics in its configuration. This chart type may not "
|
||||
"support ASCII preview."
|
||||
),
|
||||
error_type="UnsupportedChart",
|
||||
)
|
||||
|
||||
factory = QueryContextFactory()
|
||||
query_context = factory.create(
|
||||
datasource={
|
||||
"id": self.chart.datasource_id,
|
||||
"type": self.chart.datasource_type,
|
||||
},
|
||||
queries=[
|
||||
{
|
||||
"filters": form_data.get("filters", []),
|
||||
"columns": columns,
|
||||
"metrics": metrics,
|
||||
"row_limit": 50,
|
||||
"order_desc": True,
|
||||
}
|
||||
],
|
||||
form_data=form_data,
|
||||
query_context = build_query_context_from_form_data(
|
||||
form_data,
|
||||
chart=self.chart,
|
||||
row_limit=50,
|
||||
order_desc=True,
|
||||
force=False,
|
||||
)
|
||||
|
||||
@@ -303,7 +266,6 @@ class TablePreviewStrategy(PreviewFormatStrategy):
|
||||
def generate(self) -> TablePreview | ChartError:
|
||||
try:
|
||||
from superset.commands.chart.data.get_data_command import ChartDataCommand
|
||||
from superset.common.query_context_factory import QueryContextFactory
|
||||
from superset.utils import json as utils_json
|
||||
|
||||
form_data = utils_json.loads(self.chart.params) if self.chart.params else {}
|
||||
@@ -315,24 +277,11 @@ class TablePreviewStrategy(PreviewFormatStrategy):
|
||||
error_type="InvalidChart",
|
||||
)
|
||||
|
||||
columns = _build_query_columns(form_data)
|
||||
|
||||
factory = QueryContextFactory()
|
||||
query_context = factory.create(
|
||||
datasource={
|
||||
"id": self.chart.datasource_id,
|
||||
"type": self.chart.datasource_type,
|
||||
},
|
||||
queries=[
|
||||
{
|
||||
"filters": form_data.get("filters", []),
|
||||
"columns": columns,
|
||||
"metrics": form_data.get("metrics", []),
|
||||
"row_limit": 20,
|
||||
"order_desc": True,
|
||||
}
|
||||
],
|
||||
form_data=form_data,
|
||||
query_context = build_query_context_from_form_data(
|
||||
form_data,
|
||||
chart=self.chart,
|
||||
row_limit=20,
|
||||
order_desc=True,
|
||||
force=False,
|
||||
)
|
||||
|
||||
@@ -386,7 +335,6 @@ class VegaLitePreviewStrategy(PreviewFormatStrategy):
|
||||
# Get chart data directly using the same logic as get_chart_data tool
|
||||
# but without calling the MCP tool wrapper
|
||||
from superset.commands.chart.data.get_data_command import ChartDataCommand
|
||||
from superset.common.query_context_factory import QueryContextFactory
|
||||
from superset.daos.chart import ChartDAO
|
||||
from superset.utils import json as utils_json
|
||||
|
||||
@@ -419,26 +367,11 @@ class VegaLitePreviewStrategy(PreviewFormatStrategy):
|
||||
utils_json.loads(self.chart.params) if self.chart.params else {}
|
||||
)
|
||||
|
||||
# Build columns list: include both x_axis and groupby
|
||||
columns = _build_query_columns(form_data)
|
||||
|
||||
# Create query context for data retrieval
|
||||
factory = QueryContextFactory()
|
||||
query_context = factory.create(
|
||||
datasource={
|
||||
"id": self.chart.datasource_id,
|
||||
"type": self.chart.datasource_type,
|
||||
},
|
||||
queries=[
|
||||
{
|
||||
"filters": form_data.get("filters", []),
|
||||
"columns": columns,
|
||||
"metrics": form_data.get("metrics", []),
|
||||
"row_limit": 1000, # More data for visualization
|
||||
"order_desc": True,
|
||||
}
|
||||
],
|
||||
form_data=form_data,
|
||||
query_context = build_query_context_from_form_data(
|
||||
form_data,
|
||||
chart=self.chart,
|
||||
row_limit=1000,
|
||||
order_desc=True,
|
||||
force=self.request.force_refresh,
|
||||
)
|
||||
|
||||
|
||||
@@ -32,6 +32,13 @@ from superset.commands.exceptions import CommandException
|
||||
from superset.commands.explore.form_data.parameters import CommandParameters
|
||||
from superset.exceptions import SupersetException, SupersetSecurityException
|
||||
from superset.extensions import event_logger
|
||||
from superset.mcp_service.chart.chart_helpers import (
|
||||
build_query_context_from_form_data,
|
||||
extract_x_axis_col,
|
||||
resolve_groupby,
|
||||
resolve_metrics,
|
||||
resolve_metrics_and_groupby,
|
||||
)
|
||||
from superset.mcp_service.chart.chart_utils import validate_chart_dataset
|
||||
from superset.mcp_service.chart.schemas import (
|
||||
ChartError,
|
||||
@@ -73,160 +80,25 @@ def _get_cached_form_data(form_data_key: str) -> str | None:
|
||||
|
||||
def _resolve_metrics(form_data: dict[str, Any], viz_type: str) -> list[Any]:
|
||||
"""Extract metrics from form_data, handling chart-type-specific fields."""
|
||||
# Bubble charts store measures in x, y, size fields
|
||||
if viz_type == "bubble":
|
||||
return [m for field in ("x", "y", "size") if (m := form_data.get(field))]
|
||||
|
||||
metrics = form_data.get("metrics", [])
|
||||
# Fallback: some chart types store the measure as singular "metric"
|
||||
if not metrics and (metric := form_data.get("metric")):
|
||||
metrics = [metric]
|
||||
return metrics
|
||||
return resolve_metrics(form_data, viz_type)
|
||||
|
||||
|
||||
def _resolve_groupby(form_data: dict[str, Any]) -> list[str]:
|
||||
"""Extract groupby columns from form_data with fallback aliases.
|
||||
|
||||
Normalises scalar strings (e.g. heatmap_v2 migrated from legacy
|
||||
``all_columns_y``) so that ``list("country")`` does not split into
|
||||
individual characters.
|
||||
"""
|
||||
raw_groupby = form_data.get("groupby") or []
|
||||
if isinstance(raw_groupby, str):
|
||||
groupby: list[str] = [raw_groupby]
|
||||
else:
|
||||
groupby = list(raw_groupby)
|
||||
|
||||
if groupby:
|
||||
return groupby
|
||||
|
||||
# Fallback: some chart types store dimensions in entity/series/columns
|
||||
for field in ("entity", "series"):
|
||||
value = form_data.get(field)
|
||||
if isinstance(value, str) and value not in groupby:
|
||||
groupby.append(value)
|
||||
|
||||
form_columns = form_data.get("columns")
|
||||
if isinstance(form_columns, list):
|
||||
for col in form_columns:
|
||||
if isinstance(col, str) and col not in groupby:
|
||||
groupby.append(col)
|
||||
|
||||
return groupby
|
||||
def _resolve_groupby(form_data: dict[str, Any]) -> list[Any]:
|
||||
"""Extract groupby columns from form_data with fallback aliases."""
|
||||
return resolve_groupby(form_data)
|
||||
|
||||
|
||||
def _resolve_metrics_and_groupby(
|
||||
form_data: dict[str, Any],
|
||||
chart: "Slice | None",
|
||||
) -> tuple[list[Any], list[str]]:
|
||||
"""Resolve metrics and groupby columns from form_data.
|
||||
|
||||
Handles chart-type-specific field names: singular ``metric`` for
|
||||
big-number variants, bubble ``x``/``y``/``size``, and fallback
|
||||
fields ``entity``, ``series``, and ``columns`` for dimensions.
|
||||
"""
|
||||
viz_type = form_data.get(
|
||||
"viz_type", getattr(chart, "viz_type", "") if chart else ""
|
||||
)
|
||||
|
||||
singular_metric_no_groupby = (
|
||||
"big_number",
|
||||
"big_number_total",
|
||||
"pop_kpi",
|
||||
)
|
||||
if viz_type in singular_metric_no_groupby:
|
||||
metrics: list[Any] = [metric] if (metric := form_data.get("metric")) else []
|
||||
return metrics, []
|
||||
|
||||
return _resolve_metrics(form_data, viz_type), _resolve_groupby(form_data)
|
||||
) -> tuple[list[Any], list[Any]]:
|
||||
"""Resolve metrics and groupby columns from form_data."""
|
||||
return resolve_metrics_and_groupby(form_data, chart)
|
||||
|
||||
|
||||
def _extract_x_axis_col(form_data: dict[str, Any]) -> str | None:
|
||||
"""Return the x_axis column name from form_data, or None if not set.
|
||||
|
||||
``x_axis`` may be stored as a plain column-name string or as an adhoc
|
||||
column dict (``{"column_name": "...", ...}``).
|
||||
"""
|
||||
x_axis = form_data.get("x_axis")
|
||||
if isinstance(x_axis, str) and x_axis:
|
||||
return x_axis
|
||||
if isinstance(x_axis, dict):
|
||||
col_name = x_axis.get("column_name")
|
||||
return col_name if isinstance(col_name, str) and col_name else None
|
||||
return None
|
||||
|
||||
|
||||
def _resolve_engine(
|
||||
datasource_id: Any,
|
||||
datasource_type: str,
|
||||
) -> str:
|
||||
"""Return the DB engine name for *datasource_id*, or ``"base"`` on any error."""
|
||||
if not isinstance(datasource_id, (int, str)):
|
||||
return "base"
|
||||
try:
|
||||
from superset.daos.datasource import DatasourceDAO
|
||||
from superset.utils.core import DatasourceType
|
||||
|
||||
ds = DatasourceDAO.get_datasource(
|
||||
datasource_type=DatasourceType(datasource_type),
|
||||
database_id_or_uuid=datasource_id,
|
||||
)
|
||||
return ds.database.db_engine_spec.engine
|
||||
except Exception: # noqa: BLE001
|
||||
logger.debug("Could not resolve engine for datasource %s", datasource_id)
|
||||
return "base"
|
||||
|
||||
|
||||
def _build_single_query_dict(
|
||||
form_data: dict[str, Any],
|
||||
columns: list[Any],
|
||||
metrics: list[Any],
|
||||
) -> dict[str, Any]:
|
||||
"""Build one query entry for QueryContextFactory from form_data fields."""
|
||||
qd: dict[str, Any] = {"columns": columns, "metrics": metrics}
|
||||
if time_range := form_data.get("time_range"):
|
||||
qd["time_range"] = time_range
|
||||
if filters := form_data.get("filters"):
|
||||
qd["filters"] = filters
|
||||
if (row_limit := form_data.get("row_limit")) is not None:
|
||||
qd["row_limit"] = row_limit
|
||||
return qd
|
||||
|
||||
|
||||
def _build_mixed_timeseries_secondary(
|
||||
form_data: dict[str, Any],
|
||||
x_axis_col: str | None,
|
||||
engine: str = "base",
|
||||
) -> dict[str, Any]:
|
||||
"""Build the secondary query dict for the ``mixed_timeseries`` viz type.
|
||||
|
||||
``mixed_timeseries`` has two independent series layers; the secondary
|
||||
layer uses ``metrics_b`` / ``groupby_b`` instead of the primary fields.
|
||||
Secondary-specific overrides (``time_range_b``, ``row_limit_b``,
|
||||
``adhoc_filters_b``) replace the corresponding primary values so the
|
||||
generated SQL accurately reflects each series' independent configuration.
|
||||
"""
|
||||
metrics_b: list[Any] = list(form_data.get("metrics_b") or [])
|
||||
raw_b = form_data.get("groupby_b") or []
|
||||
groupby_b: list[Any] = [raw_b] if isinstance(raw_b, str) else list(raw_b)
|
||||
if x_axis_col and x_axis_col not in groupby_b:
|
||||
groupby_b = [x_axis_col] + groupby_b
|
||||
qd = _build_single_query_dict(form_data, groupby_b, metrics_b)
|
||||
if time_range_b := form_data.get("time_range_b"):
|
||||
qd["time_range"] = time_range_b
|
||||
if (row_limit_b := form_data.get("row_limit_b")) is not None:
|
||||
qd["row_limit"] = row_limit_b
|
||||
# Process adhoc_filters_b into concrete filter clauses for the secondary
|
||||
# query, mirroring how split_adhoc_filters_into_base_filters handles the
|
||||
# primary adhoc_filters in _build_query_context_from_form_data.
|
||||
if adhoc_filters_b := form_data.get("adhoc_filters_b"):
|
||||
from superset.utils.core import split_adhoc_filters_into_base_filters
|
||||
|
||||
secondary_fd: dict[str, Any] = {"adhoc_filters": adhoc_filters_b}
|
||||
split_adhoc_filters_into_base_filters(secondary_fd, engine)
|
||||
if secondary_filters := secondary_fd.get("filters"):
|
||||
qd["filters"] = secondary_filters
|
||||
return qd
|
||||
"""Return the x_axis column name from form_data, or None if not set."""
|
||||
return extract_x_axis_col(form_data)
|
||||
|
||||
|
||||
def _build_query_context_from_form_data(
|
||||
@@ -239,85 +111,10 @@ def _build_query_context_from_form_data(
|
||||
instead of executing the query.
|
||||
"""
|
||||
from superset.common.chart_data import ChartDataResultType
|
||||
from superset.common.query_context_factory import QueryContextFactory
|
||||
|
||||
factory = QueryContextFactory()
|
||||
|
||||
datasource_id = form_data.get("datasource_id")
|
||||
datasource_type = form_data.get("datasource_type")
|
||||
|
||||
# Unsaved Explore state often stores datasource as a combined field
|
||||
# like "123__table" instead of separate datasource_id/datasource_type.
|
||||
if not datasource_id and (combined := form_data.get("datasource")):
|
||||
if isinstance(combined, str) and "__" in combined:
|
||||
parts = combined.split("__", 1)
|
||||
datasource_id = int(parts[0]) if parts[0].isdigit() else parts[0]
|
||||
datasource_type = parts[1] if len(parts) > 1 else None
|
||||
|
||||
if not datasource_id and chart:
|
||||
datasource_id = getattr(chart, "datasource_id", None)
|
||||
if not datasource_type and chart:
|
||||
datasource_type = getattr(chart, "datasource_type", None)
|
||||
|
||||
metrics, groupby = _resolve_metrics_and_groupby(form_data, chart)
|
||||
|
||||
# Preprocess adhoc_filters into where/having/filters on form_data so
|
||||
# that the QueryObject receives concrete filter clauses. This mirrors
|
||||
# the view-layer call in viz.py:process_query_filters.
|
||||
from superset.utils.core import (
|
||||
merge_extra_filters,
|
||||
split_adhoc_filters_into_base_filters,
|
||||
)
|
||||
|
||||
resolved_type_str: str = (
|
||||
datasource_type if isinstance(datasource_type, str) else "table"
|
||||
)
|
||||
engine = _resolve_engine(datasource_id, resolved_type_str)
|
||||
merge_extra_filters(form_data)
|
||||
split_adhoc_filters_into_base_filters(form_data, engine)
|
||||
|
||||
viz_type: str = (
|
||||
form_data.get("viz_type")
|
||||
or (getattr(chart, "viz_type", "") if chart else "")
|
||||
or ""
|
||||
)
|
||||
is_timeseries = (
|
||||
viz_type.startswith("echarts_timeseries") or viz_type == "mixed_timeseries"
|
||||
)
|
||||
|
||||
# For echarts_timeseries_* and mixed_timeseries charts the temporal
|
||||
# column is stored in x_axis rather than groupby. Prepend it so the
|
||||
# generated SQL includes the time axis.
|
||||
x_axis_col: str | None = None
|
||||
if is_timeseries:
|
||||
x_axis_col = _extract_x_axis_col(form_data)
|
||||
if x_axis_col and x_axis_col not in groupby:
|
||||
groupby = [x_axis_col] + groupby
|
||||
|
||||
queries: list[dict[str, Any]] = [
|
||||
_build_single_query_dict(form_data, groupby, metrics)
|
||||
]
|
||||
|
||||
# mixed_timeseries exposes two independent query layers (primary and
|
||||
# secondary). Build the second query from metrics_b / groupby_b so
|
||||
# that get_chart_sql returns SQL for both and neither is silently lost.
|
||||
if viz_type == "mixed_timeseries":
|
||||
queries.append(_build_mixed_timeseries_secondary(form_data, x_axis_col, engine))
|
||||
|
||||
# Ensure datasource fields satisfy DatasourceDict typing requirements.
|
||||
# datasource_id must be int | str; datasource_type must be str.
|
||||
if not isinstance(datasource_id, (int, str)):
|
||||
raise ValueError(
|
||||
"Cannot determine datasource ID from form_data. "
|
||||
"Provide a chart identifier or ensure form_data contains "
|
||||
"'datasource_id' or 'datasource'."
|
||||
)
|
||||
resolved_id: int | str = datasource_id
|
||||
|
||||
return factory.create(
|
||||
datasource={"id": resolved_id, "type": resolved_type_str},
|
||||
queries=queries,
|
||||
form_data=form_data,
|
||||
return build_query_context_from_form_data(
|
||||
form_data,
|
||||
chart=chart,
|
||||
result_type=ChartDataResultType.QUERY,
|
||||
force=False,
|
||||
)
|
||||
|
||||
@@ -22,8 +22,15 @@ under the License.
|
||||
<meta charset="utf-8">
|
||||
</head>
|
||||
<body>
|
||||
<script>
|
||||
window.opener.postMessage({ tabId: '{{ tab_id }}' });
|
||||
<script nonce="{{ get_nonce() }}">
|
||||
const message = { tabId: '{{ tab_id }}' };
|
||||
if (typeof BroadcastChannel !== 'undefined') {
|
||||
const channel = new BroadcastChannel('oauth');
|
||||
channel.postMessage(message);
|
||||
channel.close();
|
||||
}
|
||||
localStorage.setItem('oauth2_auth_complete', JSON.stringify(message));
|
||||
localStorage.removeItem('oauth2_auth_complete');
|
||||
window.close();
|
||||
</script>
|
||||
<p>You can close this window and re-run the query.</p>
|
||||
|
||||
226
tests/unit_tests/commands/dashboard/export_test.py
Normal file
226
tests/unit_tests/commands/dashboard/export_test.py
Normal file
@@ -0,0 +1,226 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
from typing import Any
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import yaml
|
||||
|
||||
from superset.utils import json
|
||||
|
||||
|
||||
def _make_mock_dashboard(json_metadata: dict[str, Any]) -> MagicMock:
|
||||
dashboard = MagicMock()
|
||||
dashboard.dashboard_title = "Test Dashboard"
|
||||
dashboard.theme = None
|
||||
dashboard.slices = []
|
||||
dashboard.tags = []
|
||||
dashboard.export_to_dict.return_value = {
|
||||
"position_json": json.dumps(
|
||||
{
|
||||
"DASHBOARD_VERSION_KEY": "v2",
|
||||
"ROOT_ID": {"children": ["GRID_ID"], "id": "ROOT_ID", "type": "ROOT"},
|
||||
"GRID_ID": {
|
||||
"children": [],
|
||||
"id": "GRID_ID",
|
||||
"parents": ["ROOT_ID"],
|
||||
"type": "GRID",
|
||||
},
|
||||
"HEADER_ID": {
|
||||
"id": "HEADER_ID",
|
||||
"meta": {"text": "Test Dashboard"},
|
||||
"type": "HEADER",
|
||||
},
|
||||
}
|
||||
),
|
||||
"json_metadata": json.dumps(json_metadata),
|
||||
}
|
||||
return dashboard
|
||||
|
||||
|
||||
def test_file_content_replaces_dataset_id_with_uuid_in_display_controls():
|
||||
"""
|
||||
_file_content must replace datasetId with datasetUuid in chart_customization_config
|
||||
targets, mirroring what it already does for native_filter_configuration.
|
||||
"""
|
||||
from superset.commands.dashboard.export import ExportDashboardsCommand
|
||||
|
||||
dataset_uuid = str(uuid.uuid4())
|
||||
|
||||
mock_dashboard = _make_mock_dashboard(
|
||||
{
|
||||
"native_filter_configuration": [],
|
||||
"chart_customization_config": [
|
||||
{
|
||||
"id": "CUSTOMIZATION-abc",
|
||||
"type": "CHART_CUSTOMIZATION",
|
||||
"targets": [{"datasetId": 99, "column": {"name": "col"}}],
|
||||
},
|
||||
{
|
||||
"id": "CUSTOMIZATION-divider",
|
||||
"type": "CHART_CUSTOMIZATION_DIVIDER",
|
||||
"targets": [],
|
||||
},
|
||||
],
|
||||
}
|
||||
)
|
||||
|
||||
mock_dataset = MagicMock()
|
||||
mock_dataset.uuid = dataset_uuid
|
||||
|
||||
with (
|
||||
patch(
|
||||
"superset.commands.dashboard.export.DatasetDAO.find_by_id",
|
||||
return_value=mock_dataset,
|
||||
),
|
||||
patch(
|
||||
"superset.commands.dashboard.export.feature_flag_manager.is_feature_enabled",
|
||||
return_value=False,
|
||||
),
|
||||
):
|
||||
content = ExportDashboardsCommand._file_content(mock_dashboard)
|
||||
|
||||
result = yaml.safe_load(content)
|
||||
customizations = result["metadata"]["chart_customization_config"]
|
||||
|
||||
# datasetUuid must be added; datasetId preserved for backward compat
|
||||
target = customizations[0]["targets"][0]
|
||||
assert target["datasetUuid"] == dataset_uuid
|
||||
assert target["datasetId"] == 99
|
||||
|
||||
# Dividers with no targets must be unaffected
|
||||
assert customizations[1]["targets"] == []
|
||||
|
||||
|
||||
def test_export_yields_dataset_files_for_display_controls():
|
||||
"""
|
||||
_export must yield dataset files for datasets referenced by display controls.
|
||||
|
||||
The _export generator has a second pass over json_metadata (separate from
|
||||
_file_content) whose job is to emit dataset YAML files into the bundle.
|
||||
Without this, the round-trip fails: the UUID is in the dashboard YAML but
|
||||
the dataset file is absent from the ZIP.
|
||||
"""
|
||||
from superset.commands.dashboard.export import ExportDashboardsCommand
|
||||
|
||||
dataset_id = 42
|
||||
mock_dashboard = _make_mock_dashboard(
|
||||
{
|
||||
"native_filter_configuration": [],
|
||||
"chart_customization_config": [
|
||||
{
|
||||
"id": "CUSTOMIZATION-abc",
|
||||
"type": "CHART_CUSTOMIZATION",
|
||||
"targets": [{"datasetId": dataset_id}],
|
||||
},
|
||||
],
|
||||
}
|
||||
)
|
||||
|
||||
mock_dataset = MagicMock()
|
||||
sentinel_file = ("datasets/my_dataset.yaml", lambda: "dataset_content")
|
||||
mock_datasets_cmd = MagicMock()
|
||||
mock_datasets_cmd.run.return_value = iter([sentinel_file])
|
||||
|
||||
with (
|
||||
patch(
|
||||
"superset.commands.dashboard.export.DatasetDAO.find_by_id",
|
||||
return_value=mock_dataset,
|
||||
),
|
||||
patch(
|
||||
"superset.commands.dashboard.export.ExportDatasetsCommand",
|
||||
return_value=mock_datasets_cmd,
|
||||
) as mock_datasets_cls,
|
||||
patch(
|
||||
"superset.commands.dashboard.export.ExportChartsCommand"
|
||||
) as mock_charts_cls,
|
||||
patch(
|
||||
"superset.commands.dashboard.export.feature_flag_manager.is_feature_enabled",
|
||||
return_value=False,
|
||||
),
|
||||
):
|
||||
mock_charts_cls.return_value.run.return_value = iter([])
|
||||
results = list(ExportDashboardsCommand._export(mock_dashboard))
|
||||
|
||||
mock_datasets_cls.assert_called_once_with([dataset_id])
|
||||
mock_datasets_cmd.run.assert_called_once()
|
||||
filenames = [name for name, _ in results]
|
||||
assert "datasets/my_dataset.yaml" in filenames
|
||||
|
||||
|
||||
def test_file_content_null_chart_customization_config_does_not_raise():
|
||||
"""
|
||||
When chart_customization_config is explicitly null in metadata,
|
||||
_file_content must not raise — the `or []` guard handles it.
|
||||
"""
|
||||
from superset.commands.dashboard.export import ExportDashboardsCommand
|
||||
|
||||
mock_dashboard = _make_mock_dashboard(
|
||||
{
|
||||
"native_filter_configuration": [],
|
||||
"chart_customization_config": None,
|
||||
}
|
||||
)
|
||||
|
||||
with patch(
|
||||
"superset.commands.dashboard.export.feature_flag_manager.is_feature_enabled",
|
||||
return_value=False,
|
||||
):
|
||||
content = ExportDashboardsCommand._file_content(mock_dashboard)
|
||||
|
||||
result = yaml.safe_load(content)
|
||||
assert result["metadata"]["chart_customization_config"] is None
|
||||
|
||||
|
||||
def test_file_content_missing_dataset_preserves_dataset_id():
|
||||
"""
|
||||
When DatasetDAO.find_by_id returns None for a display control target,
|
||||
datasetId is preserved (dual-write: it was never popped) and no
|
||||
datasetUuid is added — the target is not silently emptied.
|
||||
"""
|
||||
from superset.commands.dashboard.export import ExportDashboardsCommand
|
||||
|
||||
mock_dashboard = _make_mock_dashboard(
|
||||
{
|
||||
"chart_customization_config": [
|
||||
{
|
||||
"id": "CUSTOMIZATION-orphan",
|
||||
"type": "CHART_CUSTOMIZATION",
|
||||
"targets": [{"datasetId": 9999}],
|
||||
},
|
||||
],
|
||||
}
|
||||
)
|
||||
|
||||
with (
|
||||
patch(
|
||||
"superset.commands.dashboard.export.DatasetDAO.find_by_id",
|
||||
return_value=None,
|
||||
),
|
||||
patch(
|
||||
"superset.commands.dashboard.export.feature_flag_manager.is_feature_enabled",
|
||||
return_value=False,
|
||||
),
|
||||
):
|
||||
content = ExportDashboardsCommand._file_content(mock_dashboard)
|
||||
|
||||
result = yaml.safe_load(content)
|
||||
target = result["metadata"]["chart_customization_config"][0]["targets"][0]
|
||||
assert target["datasetId"] == 9999
|
||||
assert "datasetUuid" not in target
|
||||
@@ -244,6 +244,143 @@ def test_update_id_refs_preserves_time_grains_in_native_filters():
|
||||
assert filter_config.get("filterType") == "filter_timegrain"
|
||||
|
||||
|
||||
def test_find_native_filter_datasets_includes_display_controls():
|
||||
"""
|
||||
Test that find_native_filter_datasets also returns dataset UUIDs
|
||||
from chart_customization_config (display controls).
|
||||
"""
|
||||
from superset.commands.dashboard.importers.v1.utils import (
|
||||
find_native_filter_datasets,
|
||||
)
|
||||
|
||||
metadata = {
|
||||
"native_filter_configuration": [
|
||||
{"targets": [{"datasetUuid": "uuid-native-1"}]},
|
||||
],
|
||||
"chart_customization_config": [
|
||||
{"targets": [{"datasetUuid": "uuid-display-1"}]},
|
||||
{"targets": [{"datasetUuid": "uuid-display-2"}]},
|
||||
{"targets": []},
|
||||
],
|
||||
}
|
||||
|
||||
uuids = find_native_filter_datasets(metadata)
|
||||
assert uuids == {"uuid-native-1", "uuid-display-1", "uuid-display-2"}
|
||||
|
||||
|
||||
def test_update_id_refs_fixes_display_control_dataset_references():
|
||||
"""
|
||||
Test that update_id_refs converts datasetUuid back to datasetId in
|
||||
chart_customization_config (display controls) during import.
|
||||
"""
|
||||
from superset.commands.dashboard.importers.v1.utils import update_id_refs
|
||||
|
||||
config: dict[str, Any] = {
|
||||
"position": {
|
||||
"CHART1": {
|
||||
"id": "CHART1",
|
||||
"meta": {"chartId": 101, "uuid": "uuid1"},
|
||||
"type": "CHART",
|
||||
},
|
||||
},
|
||||
"metadata": {
|
||||
"native_filter_configuration": [],
|
||||
"chart_customization_config": [
|
||||
{
|
||||
"id": "CUSTOMIZATION-abc",
|
||||
"type": "CHART_CUSTOMIZATION",
|
||||
# dual-write format: both fields present in exported bundle
|
||||
"targets": [
|
||||
{
|
||||
"datasetId": 99,
|
||||
"datasetUuid": "ds-uuid-1",
|
||||
"column": {"name": "col"},
|
||||
}
|
||||
],
|
||||
},
|
||||
{
|
||||
"id": "CUSTOMIZATION-divider",
|
||||
"type": "CHART_CUSTOMIZATION_DIVIDER",
|
||||
"targets": [],
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
chart_ids = {"uuid1": 1}
|
||||
dataset_info: dict[str, dict[str, Any]] = {
|
||||
"ds-uuid-1": {"datasource_id": 42},
|
||||
}
|
||||
|
||||
fixed = update_id_refs(config, chart_ids, dataset_info)
|
||||
|
||||
customizations = fixed["metadata"]["chart_customization_config"]
|
||||
target = customizations[0]["targets"][0]
|
||||
assert target["datasetId"] == 42 # updated to destination-env ID
|
||||
assert "datasetUuid" not in target # consumed by import
|
||||
assert customizations[1]["targets"] == []
|
||||
|
||||
|
||||
def test_update_id_refs_removes_stale_dataset_id_when_uuid_unresolvable():
|
||||
"""
|
||||
When a target has both datasetId and datasetUuid but the UUID is absent
|
||||
from dataset_info, the stale datasetId must also be removed. A visibly
|
||||
broken control is safer than one silently bound to whatever dataset
|
||||
happens to own that integer ID in the destination environment.
|
||||
"""
|
||||
from superset.commands.dashboard.importers.v1.utils import update_id_refs
|
||||
|
||||
config: dict[str, Any] = {
|
||||
"position": {},
|
||||
"metadata": {
|
||||
"native_filter_configuration": [],
|
||||
"chart_customization_config": [
|
||||
{
|
||||
"id": "CUSTOMIZATION-abc",
|
||||
"type": "CHART_CUSTOMIZATION",
|
||||
"targets": [{"datasetId": 99, "datasetUuid": "uuid-missing"}],
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
fixed = update_id_refs(config, {}, {})
|
||||
|
||||
target = fixed["metadata"]["chart_customization_config"][0]["targets"][0]
|
||||
assert "datasetUuid" not in target
|
||||
assert "datasetId" not in target
|
||||
|
||||
|
||||
def test_update_id_refs_skips_display_control_target_on_missing_uuid():
|
||||
"""
|
||||
When a display control target's datasetUuid is absent from dataset_info
|
||||
(e.g. a partially corrupt export bundle), update_id_refs skips the target
|
||||
silently rather than raising KeyError — the datasetUuid is popped and no
|
||||
datasetId is written, leaving the target without a dataset reference.
|
||||
"""
|
||||
from superset.commands.dashboard.importers.v1.utils import update_id_refs
|
||||
|
||||
config: dict[str, Any] = {
|
||||
"position": {},
|
||||
"metadata": {
|
||||
"native_filter_configuration": [],
|
||||
"chart_customization_config": [
|
||||
{
|
||||
"id": "CUSTOMIZATION-abc",
|
||||
"type": "CHART_CUSTOMIZATION",
|
||||
"targets": [{"datasetUuid": "uuid-missing-from-bundle"}],
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
fixed = update_id_refs(config, {}, {})
|
||||
|
||||
target = fixed["metadata"]["chart_customization_config"][0]["targets"][0]
|
||||
assert "datasetUuid" not in target
|
||||
assert "datasetId" not in target
|
||||
|
||||
|
||||
def test_update_id_refs_handles_missing_time_grains():
|
||||
"""
|
||||
Test backward compatibility when time_grains is not present.
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
import yaml
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
from superset import db
|
||||
@@ -304,3 +305,54 @@ version: 1.0.0
|
||||
""",
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def test_export_two_datasets_same_table_name_different_schema(
|
||||
session: Session,
|
||||
) -> None:
|
||||
"""
|
||||
Regression coverage for GitHub issue #16141.
|
||||
|
||||
Exporting two datasets that share a `table_name` but live in
|
||||
different schemas (e.g. prod.users + dev.users) must produce two
|
||||
distinct entries in the export. Historically the pair could collide
|
||||
onto a single filename — the export filename is now disambiguated by
|
||||
dataset id, so this test pins that behavior so it can't silently
|
||||
regress.
|
||||
"""
|
||||
from superset.commands.dataset.export import ExportDatasetsCommand
|
||||
from superset.connectors.sqla.models import SqlaTable
|
||||
from superset.models.core import Database
|
||||
|
||||
engine = db.session.get_bind()
|
||||
SqlaTable.metadata.create_all(engine) # pylint: disable=no-member
|
||||
|
||||
database = Database(database_name="my_database", sqlalchemy_uri="sqlite://")
|
||||
db.session.add(database)
|
||||
db.session.flush()
|
||||
|
||||
prod = SqlaTable(table_name="users", schema="prod", database=database)
|
||||
dev = SqlaTable(table_name="users", schema="dev", database=database)
|
||||
db.session.add_all([prod, dev])
|
||||
db.session.flush()
|
||||
|
||||
paths: list[str] = []
|
||||
contents: list[str] = []
|
||||
for ds in (prod, dev):
|
||||
for path, content_fn in ExportDatasetsCommand._export( # pylint: disable=protected-access
|
||||
ds, export_related=False
|
||||
):
|
||||
paths.append(path)
|
||||
contents.append(content_fn())
|
||||
|
||||
# Both datasets must produce distinct export paths — no collision.
|
||||
assert len(paths) == len(set(paths)), (
|
||||
f"Export filenames collided for same-table-name datasets: {paths}"
|
||||
)
|
||||
|
||||
# And both YAML payloads must reflect their own schema, not be
|
||||
# silently merged or overwritten.
|
||||
schemas_in_yaml = {yaml.safe_load(c)["schema"] for c in contents}
|
||||
assert schemas_in_yaml == {"prod", "dev"}, (
|
||||
f"Expected both prod and dev schemas in export, got {schemas_in_yaml}"
|
||||
)
|
||||
|
||||
@@ -18,9 +18,14 @@
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from superset.mcp_service.chart.chart_helpers import (
|
||||
apply_form_data_filters_to_query,
|
||||
build_query_dicts_from_form_data,
|
||||
extract_form_data_key_from_url,
|
||||
find_chart_by_identifier,
|
||||
get_cached_form_data,
|
||||
merge_extra_form_data_filters_into_query,
|
||||
merge_form_data_filters_into_query,
|
||||
prepare_form_data_for_query,
|
||||
)
|
||||
|
||||
|
||||
@@ -106,3 +111,177 @@ def test_get_cached_form_data_key_error(mock_init, mock_run):
|
||||
mock_init.return_value = None
|
||||
result = get_cached_form_data("bad_key")
|
||||
assert result is None
|
||||
|
||||
|
||||
def test_prepare_form_data_for_query_preserves_existing_filters_with_adhoc(
|
||||
monkeypatch,
|
||||
):
|
||||
monkeypatch.setattr(
|
||||
"superset.mcp_service.chart.chart_helpers.resolve_datasource_engine",
|
||||
lambda datasource_id, datasource_type: "base",
|
||||
)
|
||||
form_data = {
|
||||
"filters": [{"col": "gender", "op": "==", "val": "boy"}],
|
||||
"adhoc_filters": [
|
||||
{
|
||||
"clause": "WHERE",
|
||||
"expressionType": "SIMPLE",
|
||||
"subject": "gender",
|
||||
"operator": "==",
|
||||
"comparator": "girl",
|
||||
}
|
||||
],
|
||||
}
|
||||
query = {}
|
||||
|
||||
prepare_form_data_for_query(form_data, 1, "table")
|
||||
apply_form_data_filters_to_query(query, form_data)
|
||||
|
||||
assert query["filters"] == [
|
||||
{"col": "gender", "op": "==", "val": "boy"},
|
||||
{"col": "gender", "op": "==", "val": "girl"},
|
||||
]
|
||||
|
||||
|
||||
def test_prepare_form_data_for_query_merges_cached_and_request_extra_form_data(
|
||||
monkeypatch,
|
||||
):
|
||||
monkeypatch.setattr(
|
||||
"superset.mcp_service.chart.chart_helpers.resolve_datasource_engine",
|
||||
lambda datasource_id, datasource_type: "base",
|
||||
)
|
||||
form_data = {
|
||||
"adhoc_filters": [],
|
||||
"extra_form_data": {
|
||||
"adhoc_filters": [
|
||||
{
|
||||
"clause": "WHERE",
|
||||
"expressionType": "SIMPLE",
|
||||
"subject": "country",
|
||||
"operator": "==",
|
||||
"comparator": "US",
|
||||
}
|
||||
],
|
||||
"time_range": "Last year",
|
||||
},
|
||||
}
|
||||
query = {}
|
||||
|
||||
prepare_form_data_for_query(
|
||||
form_data,
|
||||
1,
|
||||
"table",
|
||||
{
|
||||
"adhoc_filters": [
|
||||
{
|
||||
"clause": "WHERE",
|
||||
"expressionType": "SIMPLE",
|
||||
"subject": "gender",
|
||||
"operator": "==",
|
||||
"comparator": "boy",
|
||||
}
|
||||
],
|
||||
"time_range": "No filter",
|
||||
},
|
||||
)
|
||||
apply_form_data_filters_to_query(query, form_data)
|
||||
|
||||
assert query["filters"] == [
|
||||
{"col": "country", "op": "==", "val": "US"},
|
||||
{"col": "gender", "op": "==", "val": "boy"},
|
||||
]
|
||||
assert query["time_range"] == "No filter"
|
||||
|
||||
|
||||
def test_build_query_dicts_from_form_data_uses_raw_all_columns(monkeypatch):
|
||||
monkeypatch.setattr(
|
||||
"superset.mcp_service.chart.chart_helpers.resolve_datasource_engine",
|
||||
lambda datasource_id, datasource_type: "base",
|
||||
)
|
||||
form_data = {
|
||||
"viz_type": "handlebars",
|
||||
"query_mode": "raw",
|
||||
"all_columns": ["state", "city"],
|
||||
"adhoc_filters": [],
|
||||
}
|
||||
|
||||
queries = build_query_dicts_from_form_data(form_data, 1, "table")
|
||||
|
||||
assert queries == [
|
||||
{
|
||||
"columns": ["state", "city"],
|
||||
"metrics": [],
|
||||
"filters": [],
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
def test_merge_form_data_filters_into_query_applies_regular_overrides():
|
||||
query = {
|
||||
"filters": [{"col": "country", "op": "==", "val": "US"}],
|
||||
"time_range": "Last year",
|
||||
"granularity": "created_at",
|
||||
"time_grain": "P1Y",
|
||||
"time_grain_sqla": "P1Y",
|
||||
"where": "region = 'NA'",
|
||||
"having": "SUM(num) > 10",
|
||||
}
|
||||
|
||||
merge_form_data_filters_into_query(
|
||||
query,
|
||||
{
|
||||
"filters": [{"col": "gender", "op": "==", "val": "boy"}],
|
||||
"time_range": "No filter",
|
||||
"granularity": "updated_at",
|
||||
"time_grain": "P1D",
|
||||
"time_grain_sqla": "P1D",
|
||||
"where": "name IS NOT NULL",
|
||||
"having": "COUNT(*) > 1",
|
||||
},
|
||||
)
|
||||
|
||||
assert query["filters"] == [
|
||||
{"col": "country", "op": "==", "val": "US"},
|
||||
{"col": "gender", "op": "==", "val": "boy"},
|
||||
]
|
||||
assert query["time_range"] == "No filter"
|
||||
assert query["granularity"] == "updated_at"
|
||||
assert query["time_grain"] == "P1D"
|
||||
assert query["time_grain_sqla"] == "P1D"
|
||||
assert query["where"] == "(region = 'NA') AND (name IS NOT NULL)"
|
||||
assert query["having"] == "(SUM(num) > 10) AND (COUNT(*) > 1)"
|
||||
|
||||
|
||||
def test_merge_extra_form_data_filters_into_query_adds_only_extra_predicates(
|
||||
monkeypatch,
|
||||
):
|
||||
monkeypatch.setattr(
|
||||
"superset.mcp_service.chart.chart_helpers.resolve_datasource_engine",
|
||||
lambda datasource_id, datasource_type: "base",
|
||||
)
|
||||
query = {
|
||||
"filters": [{"col": "country", "op": "==", "val": "US"}],
|
||||
"time_range": "Last year",
|
||||
"granularity": "created_at",
|
||||
"time_grain_sqla": "P1Y",
|
||||
}
|
||||
|
||||
merge_extra_form_data_filters_into_query(
|
||||
query,
|
||||
{
|
||||
"filters": [{"col": "gender", "op": "==", "val": "boy"}],
|
||||
"granularity_sqla": "updated_at",
|
||||
"time_range": "No filter",
|
||||
"time_grain_sqla": "P1D",
|
||||
},
|
||||
1,
|
||||
"table",
|
||||
)
|
||||
|
||||
assert query["filters"] == [
|
||||
{"col": "country", "op": "==", "val": "US"},
|
||||
{"col": "gender", "op": "==", "val": "boy"},
|
||||
]
|
||||
assert query["time_range"] == "No filter"
|
||||
assert query["granularity"] == "updated_at"
|
||||
assert query["time_grain_sqla"] == "P1D"
|
||||
|
||||
@@ -19,6 +19,9 @@
|
||||
Tests for the get_chart_data request schema and chart type fallback handling.
|
||||
"""
|
||||
|
||||
import importlib
|
||||
from contextlib import nullcontext
|
||||
from types import SimpleNamespace
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
@@ -30,6 +33,7 @@ from superset.mcp_service.chart.schemas import (
|
||||
PerformanceMetadata,
|
||||
)
|
||||
from superset.mcp_service.chart.tool.get_chart_data import (
|
||||
_query_from_form_data,
|
||||
_sanitize_chart_data_for_llm_context,
|
||||
)
|
||||
from superset.mcp_service.utils import sanitize_for_llm_context
|
||||
@@ -356,6 +360,181 @@ class TestChartDataSanitization:
|
||||
assert result.data[0][escaped_key] == sanitize_for_llm_context("value")
|
||||
|
||||
|
||||
class _AsyncContext:
|
||||
async def report_progress(self, *args: Any, **kwargs: Any) -> None:
|
||||
pass
|
||||
|
||||
|
||||
class TestUnsavedChartDataQueryConstruction:
|
||||
@pytest.mark.asyncio
|
||||
async def test_form_data_key_adhoc_filters_become_query_filters(
|
||||
self,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
"""Cached form_data adhoc filters should constrain unsaved chart data."""
|
||||
chart_data_module = importlib.import_module(
|
||||
"superset.mcp_service.chart.tool.get_chart_data"
|
||||
)
|
||||
query_context_factory_module = importlib.import_module(
|
||||
"superset.common.query_context_factory"
|
||||
)
|
||||
get_data_command_module = importlib.import_module(
|
||||
"superset.commands.chart.data.get_data_command"
|
||||
)
|
||||
|
||||
captured_query_contexts: list[dict[str, Any]] = []
|
||||
|
||||
class QueryContextFactory:
|
||||
def create(self, **kwargs: Any) -> object:
|
||||
captured_query_contexts.append(kwargs)
|
||||
return object()
|
||||
|
||||
class ChartDataCommand:
|
||||
def __init__(self, query_context: object) -> None:
|
||||
self.query_context = query_context
|
||||
|
||||
def validate(self) -> None:
|
||||
pass
|
||||
|
||||
def run(self) -> dict[str, Any]:
|
||||
return {
|
||||
"queries": [
|
||||
{
|
||||
"data": [{"gender": "boy", "count": 1}],
|
||||
"colnames": ["gender", "count"],
|
||||
"rowcount": 1,
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
monkeypatch.setattr(
|
||||
query_context_factory_module,
|
||||
"QueryContextFactory",
|
||||
QueryContextFactory,
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
get_data_command_module, "ChartDataCommand", ChartDataCommand
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
chart_data_module,
|
||||
"event_logger",
|
||||
SimpleNamespace(log_context=lambda **kwargs: nullcontext()),
|
||||
)
|
||||
|
||||
adhoc_filter = {
|
||||
"clause": "WHERE",
|
||||
"expressionType": "SIMPLE",
|
||||
"subject": "gender",
|
||||
"operator": "==",
|
||||
"comparator": "boy",
|
||||
}
|
||||
|
||||
await _query_from_form_data(
|
||||
{
|
||||
"datasource_id": 1,
|
||||
"datasource_type": "table",
|
||||
"viz_type": "table",
|
||||
"groupby": ["gender"],
|
||||
"metrics": ["count"],
|
||||
"row_limit": 10,
|
||||
"adhoc_filters": [adhoc_filter],
|
||||
},
|
||||
GetChartDataRequest(form_data_key="cached-key"),
|
||||
_AsyncContext(),
|
||||
)
|
||||
|
||||
query = captured_query_contexts[0]["queries"][0]
|
||||
assert query["filters"] == [{"col": "gender", "op": "==", "val": "boy"}]
|
||||
assert "adhoc_filters" not in query
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_form_data_key_mixed_timeseries_builds_secondary_query(
|
||||
self,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
"""Unsaved mixed-timeseries form_data should preserve both query layers."""
|
||||
chart_data_module = importlib.import_module(
|
||||
"superset.mcp_service.chart.tool.get_chart_data"
|
||||
)
|
||||
query_context_factory_module = importlib.import_module(
|
||||
"superset.common.query_context_factory"
|
||||
)
|
||||
get_data_command_module = importlib.import_module(
|
||||
"superset.commands.chart.data.get_data_command"
|
||||
)
|
||||
|
||||
captured_query_contexts: list[dict[str, Any]] = []
|
||||
|
||||
class QueryContextFactory:
|
||||
def create(self, **kwargs: Any) -> object:
|
||||
captured_query_contexts.append(kwargs)
|
||||
return object()
|
||||
|
||||
class ChartDataCommand:
|
||||
def __init__(self, query_context: object) -> None:
|
||||
self.query_context = query_context
|
||||
|
||||
def validate(self) -> None:
|
||||
pass
|
||||
|
||||
def run(self) -> dict[str, Any]:
|
||||
return {
|
||||
"queries": [
|
||||
{
|
||||
"data": [{"ds": "2024-01-01", "sales": 1}],
|
||||
"colnames": ["ds", "sales"],
|
||||
"rowcount": 1,
|
||||
},
|
||||
{
|
||||
"data": [{"ds": "2024-01-01", "profit": 2}],
|
||||
"colnames": ["ds", "profit"],
|
||||
"rowcount": 1,
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
monkeypatch.setattr(
|
||||
query_context_factory_module,
|
||||
"QueryContextFactory",
|
||||
QueryContextFactory,
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
get_data_command_module, "ChartDataCommand", ChartDataCommand
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
chart_data_module,
|
||||
"event_logger",
|
||||
SimpleNamespace(log_context=lambda **kwargs: nullcontext()),
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
"superset.mcp_service.chart.chart_helpers.resolve_datasource_engine",
|
||||
lambda datasource_id, datasource_type: "base",
|
||||
)
|
||||
|
||||
await _query_from_form_data(
|
||||
{
|
||||
"datasource": "1__table",
|
||||
"viz_type": "mixed_timeseries",
|
||||
"x_axis": "ds",
|
||||
"groupby": ["country"],
|
||||
"metrics": ["sum__sales"],
|
||||
"groupby_b": ["state"],
|
||||
"metrics_b": ["sum__profit"],
|
||||
},
|
||||
GetChartDataRequest(form_data_key="cached-key", limit=99),
|
||||
_AsyncContext(),
|
||||
)
|
||||
|
||||
queries = captured_query_contexts[0]["queries"]
|
||||
assert len(queries) == 2
|
||||
assert queries[0]["columns"] == ["ds", "country"]
|
||||
assert queries[0]["metrics"] == ["sum__sales"]
|
||||
assert queries[0]["row_limit"] == 99
|
||||
assert queries[1]["columns"] == ["ds", "state"]
|
||||
assert queries[1]["metrics"] == ["sum__profit"]
|
||||
assert queries[1]["row_limit"] == 99
|
||||
|
||||
|
||||
class TestWorldMapChartFallback:
|
||||
"""Tests for world_map chart fallback query construction."""
|
||||
|
||||
|
||||
@@ -19,6 +19,10 @@
|
||||
Unit tests for get_chart_preview MCP tool
|
||||
"""
|
||||
|
||||
import importlib
|
||||
from types import SimpleNamespace
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
|
||||
from superset.mcp_service.chart.schemas import (
|
||||
@@ -34,8 +38,11 @@ from superset.mcp_service.chart.schemas import (
|
||||
)
|
||||
from superset.mcp_service.chart.tool.get_chart_preview import (
|
||||
_sanitize_chart_preview_for_llm_context,
|
||||
ASCIIPreviewStrategy,
|
||||
TablePreviewStrategy,
|
||||
)
|
||||
from superset.mcp_service.utils import sanitize_for_llm_context
|
||||
from superset.utils import json as utils_json
|
||||
|
||||
|
||||
class TestPreviewXAxisInQueryContext:
|
||||
@@ -277,6 +284,385 @@ class TestGetChartPreview:
|
||||
# This is a structural test - actual integration tests would verify
|
||||
# the tool returns data matching this structure
|
||||
|
||||
def test_table_preview_converts_saved_adhoc_filters_to_query_filters(
|
||||
self,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
"""Saved chart adhoc filters should constrain table previews."""
|
||||
query_context_factory_module = importlib.import_module(
|
||||
"superset.common.query_context_factory"
|
||||
)
|
||||
get_data_command_module = importlib.import_module(
|
||||
"superset.commands.chart.data.get_data_command"
|
||||
)
|
||||
|
||||
captured_query_contexts: list[dict[str, Any]] = []
|
||||
|
||||
class QueryContextFactory:
|
||||
def create(self, **kwargs: Any) -> object:
|
||||
captured_query_contexts.append(kwargs)
|
||||
return object()
|
||||
|
||||
class ChartDataCommand:
|
||||
def __init__(self, query_context: object) -> None:
|
||||
self.query_context = query_context
|
||||
|
||||
def validate(self) -> None:
|
||||
pass
|
||||
|
||||
def run(self) -> dict[str, Any]:
|
||||
return {
|
||||
"queries": [
|
||||
{
|
||||
"data": [{"gender": "boy", "count": 1}],
|
||||
"colnames": ["gender", "count"],
|
||||
"rowcount": 1,
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
monkeypatch.setattr(
|
||||
query_context_factory_module,
|
||||
"QueryContextFactory",
|
||||
QueryContextFactory,
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
get_data_command_module, "ChartDataCommand", ChartDataCommand
|
||||
)
|
||||
|
||||
adhoc_filter = {
|
||||
"clause": "WHERE",
|
||||
"expressionType": "SIMPLE",
|
||||
"subject": "gender",
|
||||
"operator": "==",
|
||||
"comparator": "boy",
|
||||
}
|
||||
chart = SimpleNamespace(
|
||||
id=0,
|
||||
slice_name="Unsaved Chart Preview",
|
||||
viz_type="table",
|
||||
datasource_id=1,
|
||||
datasource_type="table",
|
||||
params=utils_json.dumps(
|
||||
{
|
||||
"viz_type": "table",
|
||||
"groupby": ["gender"],
|
||||
"metrics": ["count"],
|
||||
"adhoc_filters": [adhoc_filter],
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
preview = TablePreviewStrategy(
|
||||
chart,
|
||||
GetChartPreviewRequest(identifier=1, format="table"),
|
||||
).generate()
|
||||
|
||||
assert isinstance(preview, TablePreview)
|
||||
query = captured_query_contexts[0]["queries"][0]
|
||||
assert query["filters"] == [{"col": "gender", "op": "==", "val": "boy"}]
|
||||
assert "adhoc_filters" not in query
|
||||
|
||||
def test_table_preview_uses_singular_metric(
|
||||
self,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
"""Preview query construction should handle charts without metrics[]."""
|
||||
query_context_factory_module = importlib.import_module(
|
||||
"superset.common.query_context_factory"
|
||||
)
|
||||
get_data_command_module = importlib.import_module(
|
||||
"superset.commands.chart.data.get_data_command"
|
||||
)
|
||||
|
||||
captured_query_contexts: list[dict[str, Any]] = []
|
||||
|
||||
class QueryContextFactory:
|
||||
def create(self, **kwargs: Any) -> object:
|
||||
captured_query_contexts.append(kwargs)
|
||||
return object()
|
||||
|
||||
class ChartDataCommand:
|
||||
def __init__(self, query_context: object) -> None:
|
||||
self.query_context = query_context
|
||||
|
||||
def validate(self) -> None:
|
||||
pass
|
||||
|
||||
def run(self) -> dict[str, Any]:
|
||||
return {
|
||||
"queries": [
|
||||
{
|
||||
"data": [{"count": 10}],
|
||||
"colnames": ["count"],
|
||||
"rowcount": 1,
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
monkeypatch.setattr(
|
||||
query_context_factory_module,
|
||||
"QueryContextFactory",
|
||||
QueryContextFactory,
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
get_data_command_module, "ChartDataCommand", ChartDataCommand
|
||||
)
|
||||
|
||||
metric = {"label": "count", "expressionType": "SIMPLE"}
|
||||
chart = SimpleNamespace(
|
||||
id=0,
|
||||
slice_name="Big Number Preview",
|
||||
viz_type="big_number",
|
||||
datasource_id=1,
|
||||
datasource_type="table",
|
||||
params=utils_json.dumps(
|
||||
{
|
||||
"viz_type": "big_number",
|
||||
"metric": metric,
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
preview = TablePreviewStrategy(
|
||||
chart,
|
||||
GetChartPreviewRequest(identifier=1, format="table"),
|
||||
).generate()
|
||||
|
||||
assert isinstance(preview, TablePreview)
|
||||
query = captured_query_contexts[0]["queries"][0]
|
||||
assert query["columns"] == []
|
||||
assert query["metrics"] == [metric]
|
||||
|
||||
def test_ascii_preview_uses_shared_query_builder(
|
||||
self,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
"""ASCII preview should use chart-type-aware query construction."""
|
||||
query_context_factory_module = importlib.import_module(
|
||||
"superset.common.query_context_factory"
|
||||
)
|
||||
get_data_command_module = importlib.import_module(
|
||||
"superset.commands.chart.data.get_data_command"
|
||||
)
|
||||
|
||||
captured_query_contexts: list[dict[str, Any]] = []
|
||||
|
||||
class QueryContextFactory:
|
||||
def create(self, **kwargs: Any) -> object:
|
||||
captured_query_contexts.append(kwargs)
|
||||
return object()
|
||||
|
||||
class ChartDataCommand:
|
||||
def __init__(self, query_context: object) -> None:
|
||||
self.query_context = query_context
|
||||
|
||||
def validate(self) -> None:
|
||||
pass
|
||||
|
||||
def run(self) -> dict[str, Any]:
|
||||
return {
|
||||
"queries": [
|
||||
{
|
||||
"data": [{"count": 10}],
|
||||
"colnames": ["count"],
|
||||
"rowcount": 1,
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
monkeypatch.setattr(
|
||||
query_context_factory_module,
|
||||
"QueryContextFactory",
|
||||
QueryContextFactory,
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
get_data_command_module, "ChartDataCommand", ChartDataCommand
|
||||
)
|
||||
|
||||
metric = {"label": "count", "expressionType": "SIMPLE"}
|
||||
chart = SimpleNamespace(
|
||||
id=0,
|
||||
slice_name="Big Number Preview",
|
||||
viz_type="big_number",
|
||||
datasource_id=1,
|
||||
datasource_type="table",
|
||||
params=utils_json.dumps(
|
||||
{
|
||||
"viz_type": "big_number",
|
||||
"metric": metric,
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
preview = ASCIIPreviewStrategy(
|
||||
chart,
|
||||
GetChartPreviewRequest(identifier=1, format="ascii"),
|
||||
).generate()
|
||||
|
||||
assert isinstance(preview, ASCIIPreview)
|
||||
query = captured_query_contexts[0]["queries"][0]
|
||||
assert query["columns"] == []
|
||||
assert query["metrics"] == [metric]
|
||||
assert query["row_limit"] == 50
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_form_data_key_overrides_saved_params_for_table_preview(
|
||||
self,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
"""form_data_key should drive table preview query construction."""
|
||||
from contextlib import nullcontext
|
||||
|
||||
get_chart_preview_module = importlib.import_module(
|
||||
"superset.mcp_service.chart.tool.get_chart_preview"
|
||||
)
|
||||
query_context_factory_module = importlib.import_module(
|
||||
"superset.common.query_context_factory"
|
||||
)
|
||||
get_data_command_module = importlib.import_module(
|
||||
"superset.commands.chart.data.get_data_command"
|
||||
)
|
||||
get_form_data_module = importlib.import_module(
|
||||
"superset.commands.explore.form_data.get"
|
||||
)
|
||||
|
||||
class AsyncContext:
|
||||
async def debug(self, *args: Any, **kwargs: Any) -> None:
|
||||
pass
|
||||
|
||||
async def error(self, *args: Any, **kwargs: Any) -> None:
|
||||
pass
|
||||
|
||||
async def info(self, *args: Any, **kwargs: Any) -> None:
|
||||
pass
|
||||
|
||||
async def report_progress(self, *args: Any, **kwargs: Any) -> None:
|
||||
pass
|
||||
|
||||
async def warning(self, *args: Any, **kwargs: Any) -> None:
|
||||
pass
|
||||
|
||||
captured_query_contexts: list[dict[str, Any]] = []
|
||||
|
||||
class QueryContextFactory:
|
||||
def create(self, **kwargs: Any) -> object:
|
||||
captured_query_contexts.append(kwargs)
|
||||
return object()
|
||||
|
||||
class ChartDataCommand:
|
||||
def __init__(self, query_context: object) -> None:
|
||||
self.query_context = query_context
|
||||
|
||||
def validate(self) -> None:
|
||||
pass
|
||||
|
||||
def run(self) -> dict[str, Any]:
|
||||
return {
|
||||
"queries": [
|
||||
{
|
||||
"data": [{"gender": "boy", "count": 1}],
|
||||
"colnames": ["gender", "count"],
|
||||
"rowcount": 1,
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
saved_filter = {
|
||||
"clause": "WHERE",
|
||||
"expressionType": "SIMPLE",
|
||||
"subject": "gender",
|
||||
"operator": "==",
|
||||
"comparator": "girl",
|
||||
}
|
||||
cached_filter = {
|
||||
"clause": "WHERE",
|
||||
"expressionType": "SIMPLE",
|
||||
"subject": "gender",
|
||||
"operator": "==",
|
||||
"comparator": "boy",
|
||||
}
|
||||
chart = SimpleNamespace(
|
||||
id=42,
|
||||
slice_name="Saved Chart Preview",
|
||||
viz_type="table",
|
||||
datasource_id=1,
|
||||
datasource_type="table",
|
||||
params=utils_json.dumps(
|
||||
{
|
||||
"viz_type": "table",
|
||||
"groupby": ["gender"],
|
||||
"metrics": ["count"],
|
||||
"adhoc_filters": [saved_filter],
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
monkeypatch.setattr(
|
||||
get_chart_preview_module,
|
||||
"find_chart_by_identifier",
|
||||
lambda identifier: chart,
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
get_chart_preview_module,
|
||||
"validate_chart_dataset",
|
||||
lambda *args, **kwargs: SimpleNamespace(
|
||||
is_valid=True,
|
||||
warnings=[],
|
||||
error=None,
|
||||
),
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
get_chart_preview_module.db.session,
|
||||
"refresh",
|
||||
lambda chart: None,
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
get_chart_preview_module.event_logger,
|
||||
"log_context",
|
||||
lambda **kwargs: nullcontext(),
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
query_context_factory_module,
|
||||
"QueryContextFactory",
|
||||
QueryContextFactory,
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
get_data_command_module,
|
||||
"ChartDataCommand",
|
||||
ChartDataCommand,
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
get_form_data_module.GetFormDataCommand,
|
||||
"__init__",
|
||||
lambda self, cmd_params: None,
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
get_form_data_module.GetFormDataCommand,
|
||||
"run",
|
||||
lambda self: utils_json.dumps(
|
||||
{
|
||||
"viz_type": "table",
|
||||
"groupby": ["gender"],
|
||||
"metrics": ["count"],
|
||||
"adhoc_filters": [cached_filter],
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
result = await get_chart_preview_module._get_chart_preview_internal(
|
||||
GetChartPreviewRequest(
|
||||
identifier=42,
|
||||
form_data_key="cached-key",
|
||||
format="table",
|
||||
),
|
||||
AsyncContext(),
|
||||
)
|
||||
|
||||
assert isinstance(result, ChartPreview)
|
||||
query = captured_query_contexts[0]["queries"][0]
|
||||
assert query["filters"] == [{"col": "gender", "op": "==", "val": "boy"}]
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_preview_dimensions(self):
|
||||
"""Test preview dimensions in response."""
|
||||
|
||||
@@ -869,6 +869,61 @@ class TestBuildQueryContextTimeseriesAndMixed:
|
||||
secondary_filters = queries[1].get("filters", [])
|
||||
assert {"col": "channel", "op": "==", "val": "organic"} in secondary_filters
|
||||
|
||||
@patch("superset.common.query_context_factory.QueryContextFactory")
|
||||
@patch("superset.daos.datasource.DatasourceDAO.get_datasource")
|
||||
def test_mixed_timeseries_adhoc_filters_b_replaces_primary_sql_clauses(
|
||||
self, mock_get_ds, mock_factory_cls
|
||||
):
|
||||
"""Secondary adhoc filters should not inherit primary SQL where/having."""
|
||||
mock_ds = Mock()
|
||||
mock_ds.database.db_engine_spec.engine = "postgresql"
|
||||
mock_get_ds.return_value = mock_ds
|
||||
|
||||
mock_factory = Mock()
|
||||
mock_factory.create.return_value = Mock()
|
||||
mock_factory_cls.return_value = mock_factory
|
||||
|
||||
form_data = {
|
||||
"datasource_id": 1,
|
||||
"datasource_type": "table",
|
||||
"viz_type": "mixed_timeseries",
|
||||
"x_axis": "ds",
|
||||
"metrics": ["sum__revenue"],
|
||||
"groupby": [],
|
||||
"metrics_b": ["count"],
|
||||
"groupby_b": [],
|
||||
"adhoc_filters": [
|
||||
{
|
||||
"clause": "WHERE",
|
||||
"expressionType": "SQL",
|
||||
"sqlExpression": "country = 'US'",
|
||||
},
|
||||
{
|
||||
"clause": "HAVING",
|
||||
"expressionType": "SQL",
|
||||
"sqlExpression": "SUM(revenue) > 100",
|
||||
},
|
||||
],
|
||||
"adhoc_filters_b": [
|
||||
{
|
||||
"clause": "WHERE",
|
||||
"expressionType": "SQL",
|
||||
"sqlExpression": "channel = 'organic'",
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
with patch("superset.common.chart_data.ChartDataResultType") as mock_rt:
|
||||
mock_rt.QUERY = "QUERY"
|
||||
_build_query_context_from_form_data(form_data, chart=None)
|
||||
|
||||
primary, secondary = mock_factory.create.call_args[1]["queries"]
|
||||
assert primary["where"] == "(country = 'US')"
|
||||
assert primary["having"] == "(SUM(revenue) > 100)"
|
||||
assert secondary["where"] == "(channel = 'organic')"
|
||||
assert "country = 'US'" not in secondary["where"]
|
||||
assert "having" not in secondary
|
||||
|
||||
|
||||
class TestResolveDatasourceName:
|
||||
"""Tests for _resolve_datasource_name helper."""
|
||||
|
||||
Reference in New Issue
Block a user