mirror of
https://github.com/apache/superset.git
synced 2026-05-16 21:35:08 +00:00
Compare commits
24 Commits
fix-flakey
...
work-pr-39
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
aabf6c2a6e | ||
|
|
a1c99167a2 | ||
|
|
20d4271c53 | ||
|
|
d19470f60c | ||
|
|
add2c38787 | ||
|
|
d98d5e4fe6 | ||
|
|
41003686ab | ||
|
|
f322a50193 | ||
|
|
6dc0dc02b8 | ||
|
|
06a9b10068 | ||
|
|
342c536358 | ||
|
|
3a4f6024c2 | ||
|
|
e8d6779b5a | ||
|
|
9458c25c95 | ||
|
|
686ad08bb5 | ||
|
|
d0b77211fc | ||
|
|
11e44ac5bf | ||
|
|
afb3d086e2 | ||
|
|
2b71d964cc | ||
|
|
f02e5b7e83 | ||
|
|
5fa9657528 | ||
|
|
d853930840 | ||
|
|
4e09889607 | ||
|
|
672e9a1477 |
7
.github/workflows/superset-docs-verify.yml
vendored
7
.github/workflows/superset-docs-verify.yml
vendored
@@ -78,6 +78,13 @@ jobs:
|
||||
- name: yarn install
|
||||
run: |
|
||||
yarn install --check-cache
|
||||
- name: Lint docs links
|
||||
# Fast source-level check for bare relative internal links
|
||||
# like `[Foo](../foo)` that Docusaurus's onBrokenLinks
|
||||
# setting can't catch. Runs in seconds; fails fast before
|
||||
# the expensive build step.
|
||||
run: |
|
||||
yarn lint:docs-links
|
||||
- name: yarn typecheck
|
||||
run: |
|
||||
yarn typecheck
|
||||
|
||||
@@ -29,10 +29,10 @@ sidebar_position: 1
|
||||
|
||||
## Components
|
||||
|
||||
- [DropdownContainer](./dropdowncontainer)
|
||||
- [Flex](./flex)
|
||||
- [Grid](./grid)
|
||||
- [Layout](./layout)
|
||||
- [MetadataBar](./metadatabar)
|
||||
- [Space](./space)
|
||||
- [Table](./table)
|
||||
- [DropdownContainer](./dropdowncontainer.mdx)
|
||||
- [Flex](./flex.mdx)
|
||||
- [Grid](./grid.mdx)
|
||||
- [Layout](./layout.mdx)
|
||||
- [MetadataBar](./metadatabar.mdx)
|
||||
- [Space](./space.mdx)
|
||||
- [Table](./table.mdx)
|
||||
|
||||
@@ -62,7 +62,7 @@ This documentation is auto-generated from Storybook stories. To add or update co
|
||||
4. Run `yarn generate:superset-components` in the `docs/` directory
|
||||
|
||||
:::info Work in Progress
|
||||
This component library is actively being documented. See the [Components TODO](./TODO) page for a list of components awaiting documentation.
|
||||
This component library is actively being documented. See the [Components TODO](./TODO.md) page for a list of components awaiting documentation.
|
||||
:::
|
||||
|
||||
---
|
||||
|
||||
@@ -29,49 +29,49 @@ sidebar_position: 1
|
||||
|
||||
## Components
|
||||
|
||||
- [AutoComplete](./autocomplete)
|
||||
- [Avatar](./avatar)
|
||||
- [Badge](./badge)
|
||||
- [Breadcrumb](./breadcrumb)
|
||||
- [Button](./button)
|
||||
- [ButtonGroup](./buttongroup)
|
||||
- [CachedLabel](./cachedlabel)
|
||||
- [Card](./card)
|
||||
- [Checkbox](./checkbox)
|
||||
- [Collapse](./collapse)
|
||||
- [DatePicker](./datepicker)
|
||||
- [Divider](./divider)
|
||||
- [EditableTitle](./editabletitle)
|
||||
- [EmptyState](./emptystate)
|
||||
- [FaveStar](./favestar)
|
||||
- [IconButton](./iconbutton)
|
||||
- [Icons](./icons)
|
||||
- [IconTooltip](./icontooltip)
|
||||
- [InfoTooltip](./infotooltip)
|
||||
- [Input](./input)
|
||||
- [Label](./label)
|
||||
- [List](./list)
|
||||
- [ListViewCard](./listviewcard)
|
||||
- [Loading](./loading)
|
||||
- [Menu](./menu)
|
||||
- [Modal](./modal)
|
||||
- [ModalTrigger](./modaltrigger)
|
||||
- [Popover](./popover)
|
||||
- [ProgressBar](./progressbar)
|
||||
- [Radio](./radio)
|
||||
- [SafeMarkdown](./safemarkdown)
|
||||
- [Select](./select)
|
||||
- [Skeleton](./skeleton)
|
||||
- [Slider](./slider)
|
||||
- [Steps](./steps)
|
||||
- [Switch](./switch)
|
||||
- [TableCollection](./tablecollection)
|
||||
- [TableView](./tableview)
|
||||
- [Tabs](./tabs)
|
||||
- [Timer](./timer)
|
||||
- [Tooltip](./tooltip)
|
||||
- [Tree](./tree)
|
||||
- [TreeSelect](./treeselect)
|
||||
- [Typography](./typography)
|
||||
- [UnsavedChangesModal](./unsavedchangesmodal)
|
||||
- [Upload](./upload)
|
||||
- [AutoComplete](./autocomplete.mdx)
|
||||
- [Avatar](./avatar.mdx)
|
||||
- [Badge](./badge.mdx)
|
||||
- [Breadcrumb](./breadcrumb.mdx)
|
||||
- [Button](./button.mdx)
|
||||
- [ButtonGroup](./buttongroup.mdx)
|
||||
- [CachedLabel](./cachedlabel.mdx)
|
||||
- [Card](./card.mdx)
|
||||
- [Checkbox](./checkbox.mdx)
|
||||
- [Collapse](./collapse.mdx)
|
||||
- [DatePicker](./datepicker.mdx)
|
||||
- [Divider](./divider.mdx)
|
||||
- [EditableTitle](./editabletitle.mdx)
|
||||
- [EmptyState](./emptystate.mdx)
|
||||
- [FaveStar](./favestar.mdx)
|
||||
- [IconButton](./iconbutton.mdx)
|
||||
- [Icons](./icons.mdx)
|
||||
- [IconTooltip](./icontooltip.mdx)
|
||||
- [InfoTooltip](./infotooltip.mdx)
|
||||
- [Input](./input.mdx)
|
||||
- [Label](./label.mdx)
|
||||
- [List](./list.mdx)
|
||||
- [ListViewCard](./listviewcard.mdx)
|
||||
- [Loading](./loading.mdx)
|
||||
- [Menu](./menu.mdx)
|
||||
- [Modal](./modal.mdx)
|
||||
- [ModalTrigger](./modaltrigger.mdx)
|
||||
- [Popover](./popover.mdx)
|
||||
- [ProgressBar](./progressbar.mdx)
|
||||
- [Radio](./radio.mdx)
|
||||
- [SafeMarkdown](./safemarkdown.mdx)
|
||||
- [Select](./select.mdx)
|
||||
- [Skeleton](./skeleton.mdx)
|
||||
- [Slider](./slider.mdx)
|
||||
- [Steps](./steps.mdx)
|
||||
- [Switch](./switch.mdx)
|
||||
- [TableCollection](./tablecollection.mdx)
|
||||
- [TableView](./tableview.mdx)
|
||||
- [Tabs](./tabs.mdx)
|
||||
- [Timer](./timer.mdx)
|
||||
- [Tooltip](./tooltip.mdx)
|
||||
- [Tree](./tree.mdx)
|
||||
- [TreeSelect](./treeselect.mdx)
|
||||
- [Typography](./typography.mdx)
|
||||
- [UnsavedChangesModal](./unsavedchangesmodal.mdx)
|
||||
- [Upload](./upload.mdx)
|
||||
|
||||
@@ -327,13 +327,13 @@ stats.sort_stats('cumulative').print_stats(10)
|
||||
## Resources
|
||||
|
||||
### Internal
|
||||
- [Coding Guidelines](../guidelines/design-guidelines)
|
||||
- [Testing Guide](../testing/overview)
|
||||
- [Extension Architecture](../extensions/architecture)
|
||||
- [Coding Guidelines](../guidelines/design-guidelines.md)
|
||||
- [Testing Guide](../testing/overview.md)
|
||||
- [Extension Architecture](../extensions/architecture.md)
|
||||
|
||||
### External
|
||||
- [Google's Code Review Guide](https://google.github.io/eng-practices/review/)
|
||||
- [Best Practices for Code Review](https://smartbear.com/learn/code-review/best-practices-for-peer-code-review/)
|
||||
- [The Art of Readable Code](https://www.oreilly.com/library/view/the-art-of/9781449318482/)
|
||||
|
||||
Next: [Reporting issues effectively](./issue-reporting)
|
||||
Next: [Reporting issues effectively](./issue-reporting.md)
|
||||
|
||||
@@ -668,7 +668,7 @@ A series of checks will now run when you make a git commit.
|
||||
|
||||
## Linting
|
||||
|
||||
See [how tos](./howtos#linting)
|
||||
See [how tos](./howtos.md#linting)
|
||||
|
||||
## GitHub Actions and `act`
|
||||
|
||||
|
||||
@@ -77,7 +77,7 @@ Finally, never submit a PR that will put master branch in broken state. If the P
|
||||
in `requirements.txt` pinned to a specific version which ensures that the application
|
||||
build is deterministic.
|
||||
- For TypeScript/JavaScript, include new libraries in `package.json`
|
||||
- **Tests:** The pull request should include tests, either as doctests, unit tests, or both. Make sure to resolve all errors and test failures. See [Testing](./howtos#testing) for how to run tests.
|
||||
- **Tests:** The pull request should include tests, either as doctests, unit tests, or both. Make sure to resolve all errors and test failures. See [Testing](./howtos.md#testing) for how to run tests.
|
||||
- **Documentation:** If the pull request adds functionality, the docs should be updated as part of the same PR.
|
||||
- **CI:** Reviewers will not review the code until all CI tests are passed. Sometimes there can be flaky tests. You can close and open PR to re-run CI test. Please report if the issue persists. After the CI fix has been deployed to `master`, please rebase your PR.
|
||||
- **Code coverage:** Please ensure that code coverage does not decrease.
|
||||
|
||||
@@ -282,7 +282,7 @@ You can now launch your VSCode debugger with the same config as above. VSCode wi
|
||||
|
||||
### Storybook
|
||||
|
||||
See the dedicated [Storybook documentation](../testing/storybook) for information on running Storybook locally and adding new stories.
|
||||
See the dedicated [Storybook documentation](../testing/storybook.md) for information on running Storybook locally and adding new stories.
|
||||
|
||||
## Contributing Translations
|
||||
|
||||
|
||||
@@ -413,6 +413,6 @@ Consider:
|
||||
- **Feature Request**: Use feature request template
|
||||
- **Question**: Use GitHub Discussions
|
||||
- **Configuration Help**: Ask in Slack
|
||||
- **Development Help**: See [Contributing Guide](./overview)
|
||||
- **Development Help**: See [Contributing Guide](./overview.md)
|
||||
|
||||
Next: [Understanding the release process](./release-process)
|
||||
Next: [Understanding the release process](./release-process.md)
|
||||
|
||||
@@ -94,7 +94,7 @@ Look through the GitHub issues. Issues tagged with
|
||||
Superset could always use better documentation,
|
||||
whether as part of the official Superset docs,
|
||||
in docstrings, `docs/*.rst` or even on the web as blog posts or
|
||||
articles. See [Documentation](./howtos#contributing-to-documentation) for more details.
|
||||
articles. See [Documentation](./howtos.md#contributing-to-documentation) for more details.
|
||||
|
||||
### Add Translations
|
||||
|
||||
@@ -103,7 +103,7 @@ text strings from Superset's UI. You can jump into the existing
|
||||
language dictionaries at
|
||||
`superset/translations/<language_code>/LC_MESSAGES/messages.po`, or
|
||||
even create a dictionary for a new language altogether.
|
||||
See [Translating](./howtos#contributing-translations) for more details.
|
||||
See [Translating](./howtos.md#contributing-translations) for more details.
|
||||
|
||||
### Ask Questions
|
||||
|
||||
@@ -158,9 +158,9 @@ Security team members should also follow these general expectations:
|
||||
|
||||
Ready to contribute? Here's how to get started:
|
||||
|
||||
1. **[Set up your environment](./development-setup)** - Get Superset running locally
|
||||
1. **[Set up your environment](./development-setup.md)** - Get Superset running locally
|
||||
2. **[Find something to work on](#types-of-contributions)** - Pick an issue or feature
|
||||
3. **[Submit your contribution](./submitting-pr)** - Create a pull request
|
||||
4. **[Follow guidelines](./guidelines)** - Ensure code quality
|
||||
3. **[Submit your contribution](./submitting-pr.md)** - Create a pull request
|
||||
4. **[Follow guidelines](./guidelines.md)** - Ensure code quality
|
||||
|
||||
Welcome to the Apache Superset community! 🚀
|
||||
|
||||
@@ -466,4 +466,4 @@ Credit:
|
||||
- [Release Scripts](https://github.com/apache/superset/tree/master/scripts/release)
|
||||
- [Superset Repository Scripts](https://github.com/apache/superset/tree/master/scripts)
|
||||
|
||||
Next: Return to [Contributing Overview](./overview)
|
||||
Next: Return to [Contributing Overview](./overview.md)
|
||||
|
||||
@@ -31,11 +31,11 @@ Learn how to create and submit high-quality pull requests to Apache Superset.
|
||||
### Prerequisites
|
||||
- [ ] Development environment is set up
|
||||
- [ ] You've forked and cloned the repository
|
||||
- [ ] You've read the [contributing overview](./overview)
|
||||
- [ ] You've read the [contributing overview](./overview.md)
|
||||
- [ ] You've found or created an issue to work on
|
||||
|
||||
### PR Readiness Checklist
|
||||
- [ ] Code follows [coding guidelines](../guidelines/design-guidelines)
|
||||
- [ ] Code follows [coding guidelines](../guidelines/design-guidelines.md)
|
||||
- [ ] Tests are passing locally
|
||||
- [ ] Linting passes (`pre-commit run --all-files`)
|
||||
- [ ] Documentation is updated if needed
|
||||
@@ -318,4 +318,4 @@ git push origin master
|
||||
- **GitHub**: Tag @apache/superset-committers for attention
|
||||
- **Mailing List**: dev@superset.apache.org
|
||||
|
||||
Next: [Understanding code review process](./code-review)
|
||||
Next: [Understanding code review process](./code-review.md)
|
||||
|
||||
@@ -233,7 +233,7 @@ This architecture provides several key benefits:
|
||||
|
||||
Now that you understand the architecture, explore:
|
||||
|
||||
- **[Dependencies](./dependencies)** - Managing dependencies and understanding API stability
|
||||
- **[Quick Start](./quick-start)** - Build your first extension
|
||||
- **[Contribution Types](./contribution-types)** - What kinds of extensions you can build
|
||||
- **[Development](./development)** - Project structure, APIs, and development workflow
|
||||
- **[Dependencies](./dependencies.md)** - Managing dependencies and understanding API stability
|
||||
- **[Quick Start](./quick-start.md)** - Build your first extension
|
||||
- **[Contribution Types](./contribution-types.md)** - What kinds of extensions you can build
|
||||
- **[Development](./development.md)** - Project structure, APIs, and development workflow
|
||||
|
||||
@@ -29,7 +29,7 @@ These UI components are available to Superset extension developers through the `
|
||||
|
||||
## Available Components
|
||||
|
||||
- [Alert](./alert)
|
||||
- [Alert](./alert.mdx)
|
||||
|
||||
## Usage
|
||||
|
||||
@@ -90,4 +90,4 @@ InteractiveMyComponent.argTypes = {
|
||||
|
||||
## Interactive Documentation
|
||||
|
||||
For interactive examples with controls, visit the [Storybook](/storybook/?path=/docs/extension-components--docs).
|
||||
For interactive examples with controls, run Storybook locally — see the [Storybook documentation](/developer-docs/testing/storybook).
|
||||
|
||||
@@ -110,7 +110,7 @@ editors.registerEditor(
|
||||
);
|
||||
```
|
||||
|
||||
See [Editors Extension Point](./extension-points/editors) for implementation details.
|
||||
See [Editors Extension Point](./extension-points/editors.md) for implementation details.
|
||||
|
||||
## Backend
|
||||
|
||||
@@ -146,7 +146,7 @@ class MyExtensionAPI(RestApi):
|
||||
from .api import MyExtensionAPI
|
||||
```
|
||||
|
||||
**Note**: The [`@api`](superset-core/src/superset_core/rest_api/decorators.py) decorator automatically detects context and generates appropriate paths:
|
||||
**Note**: The [`@api`](https://github.com/apache/superset/blob/master/superset-core/src/superset_core/rest_api/decorators.py) decorator automatically detects context and generates appropriate paths:
|
||||
|
||||
- **Extension context**: `/extensions/{publisher}/{name}/` with ID prefixed as `extensions.{publisher}.{name}.{id}`
|
||||
- **Host context**: `/api/v1/` with original ID
|
||||
@@ -193,7 +193,7 @@ def get_summary() -> dict:
|
||||
return {"status": "success", "result": {"queries_today": 42}}
|
||||
```
|
||||
|
||||
See [MCP Integration](./mcp) for implementation details.
|
||||
See [MCP Integration](./mcp.md) for implementation details.
|
||||
|
||||
### MCP Prompts
|
||||
|
||||
@@ -223,7 +223,7 @@ async def analysis_guide(ctx: Context) -> str:
|
||||
"""
|
||||
```
|
||||
|
||||
See [MCP Integration](./mcp) for implementation details.
|
||||
See [MCP Integration](./mcp.md) for implementation details.
|
||||
|
||||
### Semantic Layers
|
||||
|
||||
|
||||
@@ -161,6 +161,6 @@ Until then, monitor the Superset release notes and test your extensions with eac
|
||||
|
||||
## Next Steps
|
||||
|
||||
- **[Architecture](./architecture)** - Understand the extension system design
|
||||
- **[Development](./development)** - Learn about APIs and development workflow
|
||||
- **[Quick Start](./quick-start)** - Build your first extension
|
||||
- **[Architecture](./architecture.md)** - Understand the extension system design
|
||||
- **[Development](./development.md)** - Learn about APIs and development workflow
|
||||
- **[Quick Start](./quick-start.md)** - Build your first extension
|
||||
|
||||
@@ -252,7 +252,7 @@ class DatasetReferencesAPI(RestApi):
|
||||
|
||||
### Automatic Context Detection
|
||||
|
||||
The [`@api`](superset-core/src/superset_core/rest_api/decorators.py) decorator automatically detects whether it's being used in host or extension code:
|
||||
The [`@api`](https://github.com/apache/superset/blob/master/superset-core/src/superset_core/rest_api/decorators.py) decorator automatically detects whether it's being used in host or extension code:
|
||||
|
||||
- **Extension APIs**: Registered under `/extensions/{publisher}/{name}/` with IDs prefixed as `extensions.{publisher}.{name}.{id}`
|
||||
- **Host APIs**: Registered under `/api/v1/` with original IDs
|
||||
|
||||
@@ -217,6 +217,6 @@ const disposable = handle.registerCompletionProvider(provider);
|
||||
|
||||
## Next Steps
|
||||
|
||||
- **[SQL Lab Extension Points](./sqllab)** - Learn about other SQL Lab customizations
|
||||
- **[Contribution Types](../contribution-types)** - Explore other contribution types
|
||||
- **[Development](../development)** - Set up your development environment
|
||||
- **[SQL Lab Extension Points](./sqllab.md)** - Learn about other SQL Lab customizations
|
||||
- **[Contribution Types](../contribution-types.md)** - Explore other contribution types
|
||||
- **[Development](../development.md)** - Set up your development environment
|
||||
|
||||
@@ -51,7 +51,7 @@ SQL Lab provides 4 extension points where extensions can contribute custom UI co
|
||||
| **Right Sidebar** | `sqllab.rightSidebar` | ✓ | — | Custom panels (AI assistants, query analysis) |
|
||||
| **Panels** | `sqllab.panels` | ✓ | ✓ | Custom tabs + toolbar actions (data profiling) |
|
||||
|
||||
\*Editor views are contributed via [Editor Contributions](./editors), not standard view contributions.
|
||||
\*Editor views are contributed via [Editor Contributions](./editors.md), not standard view contributions.
|
||||
|
||||
## Customization Types
|
||||
|
||||
@@ -78,7 +78,7 @@ Extensions can add toolbar actions to **Left Sidebar**, **Editor**, and **Panels
|
||||
|
||||
### Custom Editors
|
||||
|
||||
Extensions can replace the default SQL editor with custom implementations (Monaco, CodeMirror, etc.). See [Editor Contributions](./editors) for details.
|
||||
Extensions can replace the default SQL editor with custom implementations (Monaco, CodeMirror, etc.). See [Editor Contributions](./editors.md) for details.
|
||||
|
||||
## Examples
|
||||
|
||||
@@ -157,6 +157,6 @@ menus.registerMenuItem(
|
||||
|
||||
## Next Steps
|
||||
|
||||
- **[Contribution Types](../contribution-types)** - Learn about other contribution types (commands, menus)
|
||||
- **[Development](../development)** - Set up your development environment
|
||||
- **[Quick Start](../quick-start)** - Build a complete extension
|
||||
- **[Contribution Types](../contribution-types.md)** - Learn about other contribution types (commands, menus)
|
||||
- **[Development](../development.md)** - Set up your development environment
|
||||
- **[Quick Start](../quick-start.md)** - Build a complete extension
|
||||
|
||||
@@ -455,5 +455,5 @@ async def metrics_guide(ctx: Context) -> str:
|
||||
|
||||
## Next Steps
|
||||
|
||||
- **[Development](./development)** - Project structure, APIs, and dev workflow
|
||||
- **[Security](./security)** - Security best practices for extensions
|
||||
- **[Development](./development.md)** - Project structure, APIs, and dev workflow
|
||||
- **[Security](./security.md)** - Security best practices for extensions
|
||||
|
||||
@@ -47,13 +47,13 @@ Extension developers have access to pre-built UI components via `@apache-superse
|
||||
|
||||
## Next Steps
|
||||
|
||||
- **[Quick Start](./quick-start)** - Build your first extension with a complete walkthrough
|
||||
- **[Architecture](./architecture)** - Design principles and system overview
|
||||
- **[Dependencies](./dependencies)** - Managing dependencies and understanding API stability
|
||||
- **[Contribution Types](./contribution-types)** - Available extension points
|
||||
- **[Development](./development)** - Project structure, APIs, and development workflow
|
||||
- **[Deployment](./deployment)** - Packaging and deploying extensions
|
||||
- **[MCP Integration](./mcp)** - Adding AI agent capabilities using extensions
|
||||
- **[Security](./security)** - Security considerations and best practices
|
||||
- **[Tasks](./tasks)** - Framework for creating and managing long running tasks
|
||||
- **[Community Extensions](./registry)** - Browse extensions shared by the community
|
||||
- **[Quick Start](./quick-start.md)** - Build your first extension with a complete walkthrough
|
||||
- **[Architecture](./architecture.md)** - Design principles and system overview
|
||||
- **[Dependencies](./dependencies.md)** - Managing dependencies and understanding API stability
|
||||
- **[Contribution Types](./contribution-types.md)** - Available extension points
|
||||
- **[Development](./development.md)** - Project structure, APIs, and development workflow
|
||||
- **[Deployment](./deployment.md)** - Packaging and deploying extensions
|
||||
- **[MCP Integration](./mcp.md)** - Adding AI agent capabilities using extensions
|
||||
- **[Security](./security.md)** - Security considerations and best practices
|
||||
- **[Tasks](./tasks.md)** - Framework for creating and managing long running tasks
|
||||
- **[Community Extensions](./registry.md)** - Browse extensions shared by the community
|
||||
|
||||
@@ -168,7 +168,7 @@ class HelloWorldAPI(RestApi):
|
||||
|
||||
**Key points:**
|
||||
|
||||
- Uses [`@api`](superset-core/src/superset_core/rest_api/decorators.py) decorator with automatic context detection
|
||||
- Uses [`@api`](https://github.com/apache/superset/blob/master/superset-core/src/superset_core/rest_api/decorators.py) decorator with automatic context detection
|
||||
- Extends `RestApi` from `superset_core.rest_api.api`
|
||||
- Uses Flask-AppBuilder decorators (`@expose`, `@protect`, `@safe`)
|
||||
- Returns responses using `self.response(status_code, result=data)`
|
||||
@@ -184,7 +184,7 @@ Replace the generated print statement with API import to trigger registration:
|
||||
from .api import HelloWorldAPI # noqa: F401
|
||||
```
|
||||
|
||||
The [`@api`](superset-core/src/superset_core/rest_api/decorators.py) decorator automatically detects extension context and registers your API with proper namespacing.
|
||||
The [`@api`](https://github.com/apache/superset/blob/master/superset-core/src/superset_core/rest_api/decorators.py) decorator automatically detects extension context and registers your API with proper namespacing.
|
||||
|
||||
## Step 5: Create Frontend Component
|
||||
|
||||
@@ -225,7 +225,7 @@ The `@apache-superset/core` package must be listed in both `peerDependencies` (t
|
||||
|
||||
The webpack configuration requires specific settings for Module Federation. Key settings include `externalsType: "window"` and `externals` to map `@apache-superset/core` to `window.superset` at runtime, `import: false` for shared modules to use the host's React instead of bundling a separate copy, and `remoteEntry.[contenthash].js` for cache busting.
|
||||
|
||||
**Convention**: Superset always loads extensions by requesting the `./index` module from the Module Federation container. The `exposes` entry must be exactly `'./index': './src/index.tsx'` — do not rename or add additional entries. All API registrations must be reachable from that file. See [Architecture](./architecture#module-federation) for a full explanation.
|
||||
**Convention**: Superset always loads extensions by requesting the `./index` module from the Module Federation container. The `exposes` entry must be exactly `'./index': './src/index.tsx'` — do not rename or add additional entries. All API registrations must be reachable from that file. See [Architecture](./architecture.md#module-federation) for a full explanation.
|
||||
|
||||
```javascript
|
||||
const path = require('path');
|
||||
@@ -496,7 +496,7 @@ Superset will extract and validate the extension metadata, load the assets, regi
|
||||
Here's what happens when your extension loads:
|
||||
|
||||
1. **Superset starts**: Reads `manifest.json` from the `.supx` bundle and loads the backend entrypoint
|
||||
2. **Backend registration**: `entrypoint.py` imports your API class, triggering the [`@api`](superset-core/src/superset_core/rest_api/decorators.py) decorator to register it automatically
|
||||
2. **Backend registration**: `entrypoint.py` imports your API class, triggering the [`@api`](https://github.com/apache/superset/blob/master/superset-core/src/superset_core/rest_api/decorators.py) decorator to register it automatically
|
||||
3. **Frontend loads**: When SQL Lab opens, Superset fetches the remote entry file
|
||||
4. **Module Federation**: Webpack loads your extension module and resolves `@apache-superset/core` to `window.superset`
|
||||
5. **Registration**: The module executes at load time, calling `views.registerView` to register your panel
|
||||
@@ -509,9 +509,9 @@ Here's what happens when your extension loads:
|
||||
|
||||
Now that you have a working extension, explore:
|
||||
|
||||
- **[Development](./development)** - Project structure, APIs, and development workflow
|
||||
- **[Contribution Types](./contribution-types)** - Other contribution points beyond panels
|
||||
- **[Deployment](./deployment)** - Packaging and deploying your extension
|
||||
- **[Security](./security)** - Security best practices for extensions
|
||||
- **[Development](./development.md)** - Project structure, APIs, and development workflow
|
||||
- **[Contribution Types](./contribution-types.md)** - Other contribution points beyond panels
|
||||
- **[Deployment](./deployment.md)** - Packaging and deploying your extension
|
||||
- **[Security](./security.md)** - Security best practices for extensions
|
||||
|
||||
For a complete real-world example, examine the query insights extension in the Superset codebase.
|
||||
|
||||
@@ -28,7 +28,7 @@ By default, extensions are disabled and must be explicitly enabled by setting th
|
||||
|
||||
For external extensions, administrators are responsible for evaluating and verifying the security of any extensions they choose to install, just as they would when installing third-party NPM or PyPI packages. At this stage, all extensions run in the same context as the host application, without additional sandboxing. This means that external extensions can impact the security and performance of a Superset environment in the same way as any other installed dependency.
|
||||
|
||||
We plan to introduce an optional sandboxed execution model for extensions in the future (as part of an additional SIP). Until then, administrators should exercise caution and follow best practices when selecting and deploying third-party extensions. A directory of community extensions is available in the [Community Extensions](./registry) page. Note that these extensions are not vetted by the Apache Superset project—administrators must evaluate each extension before installation.
|
||||
We plan to introduce an optional sandboxed execution model for extensions in the future (as part of an additional SIP). Until then, administrators should exercise caution and follow best practices when selecting and deploying third-party extensions. A directory of community extensions is available in the [Community Extensions](./registry.md) page. Note that these extensions are not vetted by the Apache Superset project—administrators must evaluate each extension before installation.
|
||||
|
||||
**Any performance or security vulnerabilities introduced by external extensions should be reported directly to the extension author, not as Superset vulnerabilities.**
|
||||
|
||||
|
||||
@@ -114,7 +114,7 @@ class CreateDashboardCommand(BaseCommand):
|
||||
|
||||
### Data Access Objects (DAOs)
|
||||
|
||||
See: [DAO Style Guidelines and Best Practices](./backend/dao-style-guidelines)
|
||||
See: [DAO Style Guidelines and Best Practices](./backend/dao-style-guidelines.md)
|
||||
|
||||
## Testing
|
||||
|
||||
|
||||
@@ -29,16 +29,16 @@ This is a list of statements that describe how we do frontend development in Sup
|
||||
- We develop using TypeScript.
|
||||
- See: [SIP-36](https://github.com/apache/superset/issues/9101)
|
||||
- We use React for building components, and Redux to manage app/global state.
|
||||
- See: [Component Style Guidelines and Best Practices](./frontend/component-style-guidelines)
|
||||
- See: [Component Style Guidelines and Best Practices](./frontend/component-style-guidelines.md)
|
||||
- We prefer functional components to class components and use hooks for local component state.
|
||||
- We use [Ant Design](https://ant.design/) components from our component library whenever possible, only building our own custom components when it's required.
|
||||
- See: [SIP-48](https://github.com/apache/superset/issues/11283)
|
||||
- We use [@emotion](https://emotion.sh/docs/introduction) to provide styling for our components, co-locating styling within component files.
|
||||
- See: [SIP-37](https://github.com/apache/superset/issues/9145)
|
||||
- See: [Emotion Styling Guidelines and Best Practices](./frontend/emotion-styling-guidelines)
|
||||
- See: [Emotion Styling Guidelines and Best Practices](./frontend/emotion-styling-guidelines.md)
|
||||
- We use Jest for unit tests, React Testing Library for component tests, and Cypress for end-to-end tests.
|
||||
- See: [SIP-56](https://github.com/apache/superset/issues/11830)
|
||||
- See: [Testing Guidelines and Best Practices](../testing/testing-guidelines)
|
||||
- See: [Testing Guidelines and Best Practices](../testing/testing-guidelines.md)
|
||||
- We add tests for every new component or file added to the frontend.
|
||||
- We organize our repo so similar files live near each other, and tests are co-located with the files they test.
|
||||
- See: [SIP-61](https://github.com/apache/superset/issues/12098)
|
||||
@@ -46,6 +46,6 @@ This is a list of statements that describe how we do frontend development in Sup
|
||||
- We use OXC (oxlint) and Prettier to automatically fix lint errors and format the code.
|
||||
- We do not debate code formatting style in PRs, instead relying on automated tooling to enforce it.
|
||||
- If there's not a linting rule, we don't have a rule!
|
||||
- See: [Linting How-Tos](../contributing/howtos#typescript--javascript)
|
||||
- See: [Linting How-Tos](../contributing/howtos.md#typescript--javascript)
|
||||
- We use [React Storybook](https://storybook.js.org/) to help preview/test and stabilize our components
|
||||
- A public Storybook with components from the `master` branch is available [here](https://apache-superset.github.io/superset-ui/?path=/story/*)
|
||||
|
||||
@@ -31,7 +31,7 @@ This guide is intended primarily for reusable components. Whenever possible, all
|
||||
## General Guidelines
|
||||
|
||||
- We use [Ant Design](https://ant.design/) as our component library. Do not build a new component if Ant Design provides one but rather instead extend or customize what the library provides
|
||||
- Always style your component using Emotion and always prefer the theme variables whenever applicable. See: [Emotion Styling Guidelines and Best Practices](./emotion-styling-guidelines)
|
||||
- Always style your component using Emotion and always prefer the theme variables whenever applicable. See: [Emotion Styling Guidelines and Best Practices](./emotion-styling-guidelines.md)
|
||||
- All components should be made to be reusable whenever possible
|
||||
- All components should follow the structure and best practices as detailed below
|
||||
|
||||
@@ -53,7 +53,7 @@ superset-frontend/src/components
|
||||
|
||||
**Storybook:** Components should come with a storybook file whenever applicable, with the following naming convention `\{ComponentName\}.stories.tsx`. More details about Storybook below
|
||||
|
||||
**Unit and end-to-end tests:** All components should come with unit tests using Jest and React Testing Library. The file name should follow this naming convention `\{ComponentName\}.test.tsx`. Read the [Testing Guidelines and Best Practices](../../testing/testing-guidelines) for more details
|
||||
**Unit and end-to-end tests:** All components should come with unit tests using Jest and React Testing Library. The file name should follow this naming convention `\{ComponentName\}.test.tsx`. Read the [Testing Guidelines and Best Practices](../../testing/testing-guidelines.md) for more details
|
||||
|
||||
**Reference naming:** Use `PascalCase` for React components and `camelCase` for component instances
|
||||
|
||||
|
||||
@@ -37,16 +37,16 @@ Superset embraces a testing pyramid approach:
|
||||
## Testing Documentation
|
||||
|
||||
### Frontend Testing
|
||||
- **[Frontend Testing](./frontend-testing)** - Jest, React Testing Library, and component testing strategies
|
||||
- **[Frontend Testing](./frontend-testing.md)** - Jest, React Testing Library, and component testing strategies
|
||||
|
||||
### Backend Testing
|
||||
- **[Backend Testing](./backend-testing)** - pytest, database testing, and API testing patterns
|
||||
- **[Backend Testing](./backend-testing.md)** - pytest, database testing, and API testing patterns
|
||||
|
||||
### End-to-End Testing
|
||||
- **[E2E Testing](./e2e-testing)** - Playwright testing for complete user workflows
|
||||
- **[E2E Testing](./e2e-testing.md)** - Playwright testing for complete user workflows
|
||||
|
||||
### CI/CD Integration
|
||||
- **[CI/CD](./ci-cd)** - Continuous integration, automated testing, and deployment pipelines
|
||||
- **[CI/CD](./ci-cd.md)** - Continuous integration, automated testing, and deployment pipelines
|
||||
|
||||
## Testing Tools & Frameworks
|
||||
|
||||
|
||||
@@ -254,7 +254,7 @@ const config: Config = {
|
||||
'Apache Superset is a modern data exploration and visualization platform',
|
||||
url: 'https://superset.apache.org',
|
||||
baseUrl: '/',
|
||||
onBrokenLinks: 'warn',
|
||||
onBrokenLinks: 'throw',
|
||||
markdown: {
|
||||
mermaid: true,
|
||||
hooks: {
|
||||
|
||||
@@ -30,6 +30,7 @@
|
||||
"lint:db-metadata:report": "python3 ../superset/db_engine_specs/lint_metadata.py --markdown -o ../superset/db_engine_specs/METADATA_STATUS.md",
|
||||
"update:readme-db-logos": "node scripts/generate-database-docs.mjs --update-readme",
|
||||
"eslint": "eslint .",
|
||||
"lint:docs-links": "node scripts/lint-docs-links.mjs",
|
||||
"version:add": "node scripts/manage-versions.mjs add",
|
||||
"version:remove": "node scripts/manage-versions.mjs remove",
|
||||
"version:add:docs": "node scripts/manage-versions.mjs add docs",
|
||||
|
||||
@@ -1260,7 +1260,15 @@ function generateCategoryIndex(category, components) {
|
||||
};
|
||||
const componentList = components
|
||||
.sort((a, b) => a.componentName.localeCompare(b.componentName))
|
||||
.map(c => `- [${c.componentName}](./${c.componentName.toLowerCase()})`)
|
||||
// `.mdx` suffix matches the actual component page files emitted
|
||||
// by this generator (see the MDX wrappers below). The extension
|
||||
// is required: Docusaurus only validates and rewrites *file-based*
|
||||
// references (.md/.mdx). Bare relative paths bypass the file
|
||||
// resolver and get emitted as raw HTML hrefs that the browser
|
||||
// resolves against the current URL — which gives the wrong
|
||||
// directory for trailing-slash routes and breaks SPA navigation.
|
||||
// See docs/scripts/lint-docs-links.mjs.
|
||||
.map(c => `- [${c.componentName}](./${c.componentName.toLowerCase()}.mdx)`)
|
||||
.join('\n');
|
||||
|
||||
return `---
|
||||
@@ -1366,7 +1374,7 @@ This documentation is auto-generated from Storybook stories. To add or update co
|
||||
4. Run \`yarn generate:superset-components\` in the \`docs/\` directory
|
||||
|
||||
:::info Work in Progress
|
||||
This component library is actively being documented. See the [Components TODO](./TODO) page for a list of components awaiting documentation.
|
||||
This component library is actively being documented. See the [Components TODO](./TODO.md) page for a list of components awaiting documentation.
|
||||
:::
|
||||
|
||||
---
|
||||
|
||||
230
docs/scripts/lint-docs-links.mjs
Normal file
230
docs/scripts/lint-docs-links.mjs
Normal file
@@ -0,0 +1,230 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* lint-docs-links — source-level checks for internal markdown links.
|
||||
*
|
||||
* Catches three failure modes that combine to break SPA navigation in
|
||||
* a Docusaurus build:
|
||||
*
|
||||
* 1. BARE — `[X](../foo)` with no extension. Skips
|
||||
* Docusaurus's file resolver entirely. Emitted
|
||||
* as a raw href and resolved by the browser
|
||||
* against the current page URL — usually the
|
||||
* wrong directory for trailing-slash routes.
|
||||
* `onBrokenLinks: 'throw'` cannot catch this.
|
||||
*
|
||||
* 2. MISSING_TARGET — `[X](./gone.md)` with an extension, but no
|
||||
* file at that path. The Docusaurus build
|
||||
* catches this too (via
|
||||
* `onBrokenMarkdownLinks: 'throw'`) but only
|
||||
* after a multi-minute build. This script
|
||||
* flags it in ~1s.
|
||||
*
|
||||
* 3. WRONG_EXTENSION — `[X](./foo.md)` where the file is actually
|
||||
* `foo.mdx` (or vice versa). Same end result
|
||||
* as MISSING_TARGET, but the fix is one
|
||||
* character — so we report it as its own
|
||||
* category with the actual extension on disk.
|
||||
*
|
||||
* Skips: fenced code blocks, asset-style targets (.png/.json/etc.),
|
||||
* external URLs, in-page anchors, and the `versioned_docs/`
|
||||
* snapshots (those are frozen historical content).
|
||||
*
|
||||
* Run from `docs/`:
|
||||
* node scripts/lint-docs-links.mjs
|
||||
*
|
||||
* Exits 0 on clean, 1 on any finding.
|
||||
*/
|
||||
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
const docsRoot = path.join(__dirname, '..');
|
||||
|
||||
const ROOTS = ['docs', 'admin_docs', 'developer_docs', 'components'];
|
||||
|
||||
const NON_DOC_EXTENSIONS = new Set([
|
||||
'.png', '.jpg', '.jpeg', '.gif', '.webp', '.svg', '.ico',
|
||||
'.json', '.yaml', '.yml', '.txt', '.csv',
|
||||
'.zip', '.tar', '.gz',
|
||||
'.pdf',
|
||||
'.mp4', '.webm', '.mov',
|
||||
]);
|
||||
|
||||
const LINK_RE = /\[[^\]\n]+?\]\((?<url>\.{1,2}\/[^)\s]+?)\)/g;
|
||||
|
||||
/**
|
||||
* Classify a single markdown link from a source file.
|
||||
* Returns one of: ok / bare / asset / missing-target / wrong-extension.
|
||||
*/
|
||||
function classifyLink(sourceFile, url) {
|
||||
const stripped = url.split('#', 1)[0].split('?', 1)[0];
|
||||
const ext = path.extname(stripped).toLowerCase();
|
||||
|
||||
// Non-doc assets — legit bare extensions, leave alone.
|
||||
if (ext && NON_DOC_EXTENSIONS.has(ext)) {
|
||||
return { kind: 'asset' };
|
||||
}
|
||||
|
||||
// Anything that doesn't end in .md/.mdx is a bare relative URL.
|
||||
if (ext !== '.md' && ext !== '.mdx') {
|
||||
return { kind: 'bare' };
|
||||
}
|
||||
|
||||
// Has a .md/.mdx extension — make sure the target exists.
|
||||
const target = path.normalize(path.join(path.dirname(sourceFile), stripped));
|
||||
if (fs.existsSync(target)) {
|
||||
return { kind: 'ok' };
|
||||
}
|
||||
|
||||
// Target doesn't exist — check if the OTHER extension does.
|
||||
const otherExt = ext === '.md' ? '.mdx' : '.md';
|
||||
const otherTarget = target.slice(0, -ext.length) + otherExt;
|
||||
if (fs.existsSync(otherTarget)) {
|
||||
return { kind: 'wrong-extension', actualExt: otherExt };
|
||||
}
|
||||
|
||||
return { kind: 'missing-target' };
|
||||
}
|
||||
|
||||
function* walk(dir) {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
const full = path.join(dir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
if (
|
||||
entry.name.startsWith('.') ||
|
||||
entry.name === 'node_modules' ||
|
||||
entry.name.endsWith('_versioned_docs') ||
|
||||
entry.name === 'versioned_docs'
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
yield* walk(full);
|
||||
} else if (entry.isFile()) {
|
||||
if (entry.name.endsWith('.md') || entry.name.endsWith('.mdx')) {
|
||||
yield full;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function lintFile(file) {
|
||||
const src = fs.readFileSync(file, 'utf8');
|
||||
const findings = [];
|
||||
let inFence = false;
|
||||
const lines = src.split('\n');
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
if (line.trimStart().startsWith('```')) {
|
||||
inFence = !inFence;
|
||||
continue;
|
||||
}
|
||||
if (inFence) continue;
|
||||
for (const m of line.matchAll(LINK_RE)) {
|
||||
const url = m.groups.url;
|
||||
const result = classifyLink(file, url);
|
||||
if (result.kind !== 'ok' && result.kind !== 'asset') {
|
||||
findings.push({ line: i + 1, url, ...result });
|
||||
}
|
||||
}
|
||||
}
|
||||
return findings;
|
||||
}
|
||||
|
||||
const findings = [];
|
||||
for (const root of ROOTS) {
|
||||
const abs = path.join(docsRoot, root);
|
||||
if (!fs.existsSync(abs)) continue;
|
||||
for (const file of walk(abs)) {
|
||||
for (const f of lintFile(file)) {
|
||||
findings.push({ file: path.relative(docsRoot, file), ...f });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (findings.length === 0) {
|
||||
console.log('✓ lint-docs-links: no broken internal links found');
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
// Group by kind for readable output.
|
||||
const groups = {
|
||||
bare: [],
|
||||
'wrong-extension': [],
|
||||
'missing-target': [],
|
||||
};
|
||||
for (const f of findings) {
|
||||
groups[f.kind].push(f);
|
||||
}
|
||||
|
||||
console.error(
|
||||
`✗ lint-docs-links: found ${findings.length} broken internal link(s)`
|
||||
);
|
||||
console.error('');
|
||||
|
||||
if (groups.bare.length) {
|
||||
console.error(
|
||||
` ${groups.bare.length} bare relative link(s) (no .md/.mdx extension)`
|
||||
);
|
||||
console.error(
|
||||
" Docusaurus's file resolver skips these; the browser resolves them"
|
||||
);
|
||||
console.error(
|
||||
' against the current page URL — wrong directory for trailing-slash routes.'
|
||||
);
|
||||
console.error(' Add the extension so the file resolver picks them up.');
|
||||
console.error('');
|
||||
for (const f of groups.bare) {
|
||||
console.error(` ${f.file}:${f.line} ${f.url}`);
|
||||
}
|
||||
console.error('');
|
||||
}
|
||||
|
||||
if (groups['wrong-extension'].length) {
|
||||
console.error(
|
||||
` ${groups['wrong-extension'].length} wrong-extension link(s) (.md vs .mdx mismatch)`
|
||||
);
|
||||
console.error(' The target file exists with the other extension on disk.');
|
||||
console.error('');
|
||||
for (const f of groups['wrong-extension']) {
|
||||
console.error(
|
||||
` ${f.file}:${f.line} ${f.url} → use ${f.actualExt}`
|
||||
);
|
||||
}
|
||||
console.error('');
|
||||
}
|
||||
|
||||
if (groups['missing-target'].length) {
|
||||
console.error(
|
||||
` ${groups['missing-target'].length} missing-target link(s) (file doesn't exist)`
|
||||
);
|
||||
console.error('');
|
||||
for (const f of groups['missing-target']) {
|
||||
console.error(` ${f.file}:${f.line} ${f.url}`);
|
||||
}
|
||||
console.error('');
|
||||
}
|
||||
|
||||
process.exit(1);
|
||||
@@ -20,12 +20,12 @@ Alerts and reports are disabled by default. To turn them on, you need to do some
|
||||
|
||||
#### In your `superset_config.py` or `superset_config_docker.py`
|
||||
|
||||
- `"ALERT_REPORTS"` [feature flag](/docs/6.0.0/configuration/configuring-superset#feature-flags) must be turned to True.
|
||||
- `"ALERT_REPORTS"` [feature flag](/user-docs/6.0.0/configuration/configuring-superset#feature-flags) must be turned to True.
|
||||
- `beat_schedule` in CeleryConfig must contain schedule for `reports.scheduler`.
|
||||
- At least one of those must be configured, depending on what you want to use:
|
||||
- emails: `SMTP_*` settings
|
||||
- Slack messages: `SLACK_API_TOKEN`
|
||||
- Users can customize the email subject by including date code placeholders, which will automatically be replaced with the corresponding UTC date when the email is sent. To enable this functionality, activate the `"DATE_FORMAT_IN_EMAIL_SUBJECT"` [feature flag](/docs/6.0.0/configuration/configuring-superset#feature-flags). This enables date formatting in email subjects, preventing all reporting emails from being grouped into the same thread (optional for the reporting feature).
|
||||
- Users can customize the email subject by including date code placeholders, which will automatically be replaced with the corresponding UTC date when the email is sent. To enable this functionality, activate the `"DATE_FORMAT_IN_EMAIL_SUBJECT"` [feature flag](/user-docs/6.0.0/configuration/configuring-superset#feature-flags). This enables date formatting in email subjects, preventing all reporting emails from being grouped into the same thread (optional for the reporting feature).
|
||||
- Use date codes from [strftime.org](https://strftime.org/) to create the email subject.
|
||||
- If no date code is provided, the original string will be used as the email subject.
|
||||
|
||||
@@ -38,7 +38,7 @@ Screenshots will be taken but no messages actually sent as long as `ALERT_REPORT
|
||||
- You must install a headless browser, for taking screenshots of the charts and dashboards. Only Firefox and Chrome are currently supported.
|
||||
> If you choose Chrome, you must also change the value of `WEBDRIVER_TYPE` to `"chrome"` in your `superset_config.py`.
|
||||
|
||||
Note: All the components required (Firefox headless browser, Redis, Postgres db, celery worker and celery beat) are present in the *dev* docker image if you are following [Installing Superset Locally](/docs/6.0.0/installation/docker-compose/).
|
||||
Note: All the components required (Firefox headless browser, Redis, Postgres db, celery worker and celery beat) are present in the *dev* docker image if you are following [Installing Superset Locally](/user-docs/6.0.0/installation/docker-compose/).
|
||||
All you need to do is add the required config variables described in this guide (See `Detailed Config`).
|
||||
|
||||
If you are running a non-dev docker image, e.g., a stable release like `apache/superset:3.1.0`, that image does not include a headless browser. Only the `superset_worker` container needs this headless browser to browse to the target chart or dashboard.
|
||||
@@ -70,7 +70,7 @@ Note: when you configure an alert or a report, the Slack channel list takes chan
|
||||
### Kubernetes-specific
|
||||
|
||||
- You must have a `celery beat` pod running. If you're using the chart included in the GitHub repository under [helm/superset](https://github.com/apache/superset/tree/master/helm/superset), you need to put `supersetCeleryBeat.enabled = true` in your values override.
|
||||
- You can see the dedicated docs about [Kubernetes installation](/docs/6.0.0/installation/kubernetes) for more details.
|
||||
- You can see the dedicated docs about [Kubernetes installation](/user-docs/6.0.0/installation/kubernetes) for more details.
|
||||
|
||||
### Docker Compose specific
|
||||
|
||||
|
||||
@@ -78,11 +78,11 @@ Caching for SQL Lab query results is used when async queries are enabled and is
|
||||
Note that this configuration does not use a flask-caching dictionary for its configuration, but
|
||||
instead requires a cachelib object.
|
||||
|
||||
See [Async Queries via Celery](/docs/6.0.0/configuration/async-queries-celery) for details.
|
||||
See [Async Queries via Celery](/user-docs/6.0.0/configuration/async-queries-celery) for details.
|
||||
|
||||
## Caching Thumbnails
|
||||
|
||||
This is an optional feature that can be turned on by activating its [feature flag](/docs/6.0.0/configuration/configuring-superset#feature-flags) on config:
|
||||
This is an optional feature that can be turned on by activating its [feature flag](/user-docs/6.0.0/configuration/configuring-superset#feature-flags) on config:
|
||||
|
||||
```
|
||||
FEATURE_FLAGS = {
|
||||
|
||||
@@ -37,7 +37,7 @@ ENV SUPERSET_CONFIG_PATH /app/superset_config.py
|
||||
```
|
||||
|
||||
Docker compose deployments handle application configuration differently using specific conventions.
|
||||
Refer to the [docker compose tips & configuration](/docs/6.0.0/installation/docker-compose#docker-compose-tips--configuration)
|
||||
Refer to the [docker compose tips & configuration](/user-docs/6.0.0/installation/docker-compose#docker-compose-tips--configuration)
|
||||
for details.
|
||||
|
||||
The following is an example of just a few of the parameters you can set in your `superset_config.py` file:
|
||||
@@ -254,7 +254,7 @@ flask --app "superset.app:create_app(superset_app_root='/analytics')"
|
||||
|
||||
### Docker builds
|
||||
|
||||
The [docker compose](/docs/6.0.0/installation/docker-compose#configuring-further) developer
|
||||
The [docker compose](/user-docs/6.0.0/installation/docker-compose#configuring-further) developer
|
||||
configuration includes an additional environmental variable,
|
||||
[`SUPERSET_APP_ROOT`](https://github.com/apache/superset/blob/master/docker/.env),
|
||||
to simplify the process of setting up a non-default root path across the services.
|
||||
@@ -449,4 +449,4 @@ FEATURE_FLAGS = {
|
||||
}
|
||||
```
|
||||
|
||||
A current list of feature flags can be found in the [Feature Flags](/docs/6.0.0/configuration/feature-flags) documentation.
|
||||
A current list of feature flags can be found in the [Feature Flags](/user-docs/6.0.0/configuration/configuring-superset#feature-flags) documentation.
|
||||
|
||||
@@ -14,7 +14,7 @@ in your environment.
|
||||
You’ll need to install the required packages for the database you want to use as your metadata database
|
||||
as well as the packages needed to connect to the databases you want to access through Superset.
|
||||
For information about setting up Superset's metadata database, please refer to
|
||||
installation documentations ([Docker Compose](/docs/6.0.0/installation/docker-compose), [Kubernetes](/docs/6.0.0/installation/kubernetes))
|
||||
installation documentations ([Docker Compose](/user-docs/6.0.0/installation/docker-compose), [Kubernetes](/user-docs/6.0.0/installation/kubernetes))
|
||||
:::
|
||||
|
||||
This documentation tries to keep pointer to the different drivers for commonly used database
|
||||
@@ -26,7 +26,7 @@ Superset requires a Python [DB-API database driver](https://peps.python.org/pep-
|
||||
and a [SQLAlchemy dialect](https://docs.sqlalchemy.org/en/20/dialects/) to be installed for
|
||||
each database engine you want to connect to.
|
||||
|
||||
You can read more [here](/docs/6.0.0/configuration/databases#installing-drivers-in-docker-images) about how to
|
||||
You can read more [here](/user-docs/6.0.0/configuration/databases#installing-drivers-in-docker-images) about how to
|
||||
install new database drivers into your Superset configuration.
|
||||
|
||||
### Supported Databases and Dependencies
|
||||
@@ -37,53 +37,53 @@ are compatible with Superset.
|
||||
|
||||
| <div style={{width: '150px'}}>Database</div> | PyPI package | Connection String |
|
||||
| --------------------------------------------------------- | ---------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------ |
|
||||
| [AWS Athena](/docs/6.0.0/configuration/databases#aws-athena) | `pip install pyathena[pandas]` , `pip install PyAthenaJDBC` | `awsathena+rest://{access_key_id}:{access_key}@athena.{region}.amazonaws.com/{schema}?s3_staging_dir={s3_staging_dir}&...` |
|
||||
| [AWS DynamoDB](/docs/6.0.0/configuration/databases#aws-dynamodb) | `pip install pydynamodb` | `dynamodb://{access_key_id}:{secret_access_key}@dynamodb.{region_name}.amazonaws.com?connector=superset` |
|
||||
| [AWS Redshift](/docs/6.0.0/configuration/databases#aws-redshift) | `pip install sqlalchemy-redshift` | `redshift+psycopg2://<userName>:<DBPassword>@<AWS End Point>:5439/<Database Name>` |
|
||||
| [Apache Doris](/docs/6.0.0/configuration/databases#apache-doris) | `pip install pydoris` | `doris://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>` |
|
||||
| [Apache Drill](/docs/6.0.0/configuration/databases#apache-drill) | `pip install sqlalchemy-drill` | `drill+sadrill://<username>:<password>@<host>:<port>/<storage_plugin>`, often useful: `?use_ssl=True/False` |
|
||||
| [Apache Druid](/docs/6.0.0/configuration/databases#apache-druid) | `pip install pydruid` | `druid://<User>:<password>@<Host>:<Port-default-9088>/druid/v2/sql` |
|
||||
| [Apache Hive](/docs/6.0.0/configuration/databases#hive) | `pip install pyhive` | `hive://hive@{hostname}:{port}/{database}` |
|
||||
| [Apache Impala](/docs/6.0.0/configuration/databases#apache-impala) | `pip install impyla` | `impala://{hostname}:{port}/{database}` |
|
||||
| [Apache Kylin](/docs/6.0.0/configuration/databases#apache-kylin) | `pip install kylinpy` | `kylin://<username>:<password>@<hostname>:<port>/<project>?<param1>=<value1>&<param2>=<value2>` |
|
||||
| [Apache Pinot](/docs/6.0.0/configuration/databases#apache-pinot) | `pip install pinotdb` | `pinot://BROKER:5436/query?server=http://CONTROLLER:5983/` |
|
||||
| [Apache Solr](/docs/6.0.0/configuration/databases#apache-solr) | `pip install sqlalchemy-solr` | `solr://{username}:{password}@{hostname}:{port}/{server_path}/{collection}` |
|
||||
| [Apache Spark SQL](/docs/6.0.0/configuration/databases#apache-spark-sql) | `pip install pyhive` | `hive://hive@{hostname}:{port}/{database}` |
|
||||
| [Ascend.io](/docs/6.0.0/configuration/databases#ascendio) | `pip install impyla` | `ascend://{username}:{password}@{hostname}:{port}/{database}?auth_mechanism=PLAIN;use_ssl=true` |
|
||||
| [Azure MS SQL](/docs/6.0.0/configuration/databases#sql-server) | `pip install pymssql` | `mssql+pymssql://UserName@presetSQL:TestPassword@presetSQL.database.windows.net:1433/TestSchema` |
|
||||
| [ClickHouse](/docs/6.0.0/configuration/databases#clickhouse) | `pip install clickhouse-connect` | `clickhousedb://{username}:{password}@{hostname}:{port}/{database}` |
|
||||
| [CockroachDB](/docs/6.0.0/configuration/databases#cockroachdb) | `pip install cockroachdb` | `cockroachdb://root@{hostname}:{port}/{database}?sslmode=disable` |
|
||||
| [Couchbase](/docs/6.0.0/configuration/databases#couchbase) | `pip install couchbase-sqlalchemy` | `couchbase://{username}:{password}@{hostname}:{port}?truststorepath={ssl certificate path}` |
|
||||
| [CrateDB](/docs/6.0.0/configuration/databases#cratedb) | `pip install sqlalchemy-cratedb` | `crate://{username}:{password}@{hostname}:{port}`, often useful: `?ssl=true/false` or `?schema=testdrive`. |
|
||||
| [Denodo](/docs/6.0.0/configuration/databases#denodo) | `pip install denodo-sqlalchemy` | `denodo://{username}:{password}@{hostname}:{port}/{database}` |
|
||||
| [Dremio](/docs/6.0.0/configuration/databases#dremio) | `pip install sqlalchemy_dremio` |`dremio+flight://{username}:{password}@{host}:32010`, often useful: `?UseEncryption=true/false`. For Legacy ODBC: `dremio+pyodbc://{username}:{password}@{host}:31010` |
|
||||
| [Elasticsearch](/docs/6.0.0/configuration/databases#elasticsearch) | `pip install elasticsearch-dbapi` | `elasticsearch+http://{user}:{password}@{host}:9200/` |
|
||||
| [Exasol](/docs/6.0.0/configuration/databases#exasol) | `pip install sqlalchemy-exasol` | `exa+pyodbc://{username}:{password}@{hostname}:{port}/my_schema?CONNECTIONLCALL=en_US.UTF-8&driver=EXAODBC` |
|
||||
| [Google BigQuery](/docs/6.0.0/configuration/databases#google-bigquery) | `pip install sqlalchemy-bigquery` | `bigquery://{project_id}` |
|
||||
| [Google Sheets](/docs/6.0.0/configuration/databases#google-sheets) | `pip install shillelagh[gsheetsapi]` | `gsheets://` |
|
||||
| [Firebolt](/docs/6.0.0/configuration/databases#firebolt) | `pip install firebolt-sqlalchemy` | `firebolt://{client_id}:{client_secret}@{database}/{engine_name}?account_name={name}` |
|
||||
| [Hologres](/docs/6.0.0/configuration/databases#hologres) | `pip install psycopg2` | `postgresql+psycopg2://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [IBM Db2](/docs/6.0.0/configuration/databases#ibm-db2) | `pip install ibm_db_sa` | `db2+ibm_db://` |
|
||||
| [IBM Netezza Performance Server](/docs/6.0.0/configuration/databases#ibm-netezza-performance-server) | `pip install nzalchemy` | `netezza+nzpy://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [MySQL](/docs/6.0.0/configuration/databases#mysql) | `pip install mysqlclient` | `mysql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [OceanBase](/docs/6.0.0/configuration/databases#oceanbase) | `pip install oceanbase_py` | `oceanbase://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [Oracle](/docs/6.0.0/configuration/databases#oracle) | `pip install cx_Oracle` | `oracle://<username>:<password>@<hostname>:<port>` |
|
||||
| [Parseable](/docs/6.0.0/configuration/databases#parseable) | `pip install sqlalchemy-parseable` | `parseable://<UserName>:<DBPassword>@<Database Host>/<Stream Name>` |
|
||||
| [PostgreSQL](/docs/6.0.0/configuration/databases#postgres) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [Presto](/docs/6.0.0/configuration/databases#presto) | `pip install pyhive` | `presto://{username}:{password}@{hostname}:{port}/{database}` |
|
||||
| [SAP Hana](/docs/6.0.0/configuration/databases#hana) | `pip install hdbcli sqlalchemy-hana` or `pip install apache_superset[hana]` | `hana://{username}:{password}@{host}:{port}` |
|
||||
| [SingleStore](/docs/6.0.0/configuration/databases#singlestore) | `pip install sqlalchemy-singlestoredb` | `singlestoredb://{username}:{password}@{host}:{port}/{database}` |
|
||||
| [StarRocks](/docs/6.0.0/configuration/databases#starrocks) | `pip install starrocks` | `starrocks://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>` |
|
||||
| [Snowflake](/docs/6.0.0/configuration/databases#snowflake) | `pip install snowflake-sqlalchemy` | `snowflake://{user}:{password}@{account}.{region}/{database}?role={role}&warehouse={warehouse}` |
|
||||
| [AWS Athena](/user-docs/6.0.0/configuration/databases#aws-athena) | `pip install pyathena[pandas]` , `pip install PyAthenaJDBC` | `awsathena+rest://{access_key_id}:{access_key}@athena.{region}.amazonaws.com/{schema}?s3_staging_dir={s3_staging_dir}&...` |
|
||||
| [AWS DynamoDB](/user-docs/6.0.0/configuration/databases#aws-dynamodb) | `pip install pydynamodb` | `dynamodb://{access_key_id}:{secret_access_key}@dynamodb.{region_name}.amazonaws.com?connector=superset` |
|
||||
| [AWS Redshift](/user-docs/6.0.0/configuration/databases#aws-redshift) | `pip install sqlalchemy-redshift` | `redshift+psycopg2://<userName>:<DBPassword>@<AWS End Point>:5439/<Database Name>` |
|
||||
| [Apache Doris](/user-docs/6.0.0/configuration/databases#apache-doris) | `pip install pydoris` | `doris://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>` |
|
||||
| [Apache Drill](/user-docs/6.0.0/configuration/databases#apache-drill) | `pip install sqlalchemy-drill` | `drill+sadrill://<username>:<password>@<host>:<port>/<storage_plugin>`, often useful: `?use_ssl=True/False` |
|
||||
| [Apache Druid](/user-docs/6.0.0/configuration/databases#apache-druid) | `pip install pydruid` | `druid://<User>:<password>@<Host>:<Port-default-9088>/druid/v2/sql` |
|
||||
| [Apache Hive](/user-docs/6.0.0/configuration/databases#hive) | `pip install pyhive` | `hive://hive@{hostname}:{port}/{database}` |
|
||||
| [Apache Impala](/user-docs/6.0.0/configuration/databases#apache-impala) | `pip install impyla` | `impala://{hostname}:{port}/{database}` |
|
||||
| [Apache Kylin](/user-docs/6.0.0/configuration/databases#apache-kylin) | `pip install kylinpy` | `kylin://<username>:<password>@<hostname>:<port>/<project>?<param1>=<value1>&<param2>=<value2>` |
|
||||
| [Apache Pinot](/user-docs/6.0.0/configuration/databases#apache-pinot) | `pip install pinotdb` | `pinot://BROKER:5436/query?server=http://CONTROLLER:5983/` |
|
||||
| [Apache Solr](/user-docs/6.0.0/configuration/databases#apache-solr) | `pip install sqlalchemy-solr` | `solr://{username}:{password}@{hostname}:{port}/{server_path}/{collection}` |
|
||||
| [Apache Spark SQL](/user-docs/6.0.0/configuration/databases#apache-spark-sql) | `pip install pyhive` | `hive://hive@{hostname}:{port}/{database}` |
|
||||
| [Ascend.io](/user-docs/6.0.0/configuration/databases#ascendio) | `pip install impyla` | `ascend://{username}:{password}@{hostname}:{port}/{database}?auth_mechanism=PLAIN;use_ssl=true` |
|
||||
| [Azure MS SQL](/user-docs/6.0.0/configuration/databases#sql-server) | `pip install pymssql` | `mssql+pymssql://UserName@presetSQL:TestPassword@presetSQL.database.windows.net:1433/TestSchema` |
|
||||
| [ClickHouse](/user-docs/6.0.0/configuration/databases#clickhouse) | `pip install clickhouse-connect` | `clickhousedb://{username}:{password}@{hostname}:{port}/{database}` |
|
||||
| [CockroachDB](/user-docs/6.0.0/configuration/databases#cockroachdb) | `pip install cockroachdb` | `cockroachdb://root@{hostname}:{port}/{database}?sslmode=disable` |
|
||||
| [Couchbase](/user-docs/6.0.0/configuration/databases#couchbase) | `pip install couchbase-sqlalchemy` | `couchbase://{username}:{password}@{hostname}:{port}?truststorepath={ssl certificate path}` |
|
||||
| [CrateDB](/user-docs/6.0.0/configuration/databases#cratedb) | `pip install sqlalchemy-cratedb` | `crate://{username}:{password}@{hostname}:{port}`, often useful: `?ssl=true/false` or `?schema=testdrive`. |
|
||||
| [Denodo](/user-docs/6.0.0/configuration/databases#denodo) | `pip install denodo-sqlalchemy` | `denodo://{username}:{password}@{hostname}:{port}/{database}` |
|
||||
| [Dremio](/user-docs/6.0.0/configuration/databases#dremio) | `pip install sqlalchemy_dremio` |`dremio+flight://{username}:{password}@{host}:32010`, often useful: `?UseEncryption=true/false`. For Legacy ODBC: `dremio+pyodbc://{username}:{password}@{host}:31010` |
|
||||
| [Elasticsearch](/user-docs/6.0.0/configuration/databases#elasticsearch) | `pip install elasticsearch-dbapi` | `elasticsearch+http://{user}:{password}@{host}:9200/` |
|
||||
| [Exasol](/user-docs/6.0.0/configuration/databases#exasol) | `pip install sqlalchemy-exasol` | `exa+pyodbc://{username}:{password}@{hostname}:{port}/my_schema?CONNECTIONLCALL=en_US.UTF-8&driver=EXAODBC` |
|
||||
| [Google BigQuery](/user-docs/6.0.0/configuration/databases#google-bigquery) | `pip install sqlalchemy-bigquery` | `bigquery://{project_id}` |
|
||||
| [Google Sheets](/user-docs/6.0.0/configuration/databases#google-sheets) | `pip install shillelagh[gsheetsapi]` | `gsheets://` |
|
||||
| [Firebolt](/user-docs/6.0.0/configuration/databases#firebolt) | `pip install firebolt-sqlalchemy` | `firebolt://{client_id}:{client_secret}@{database}/{engine_name}?account_name={name}` |
|
||||
| [Hologres](/user-docs/6.0.0/configuration/databases#hologres) | `pip install psycopg2` | `postgresql+psycopg2://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [IBM Db2](/user-docs/6.0.0/configuration/databases#ibm-db2) | `pip install ibm_db_sa` | `db2+ibm_db://` |
|
||||
| [IBM Netezza Performance Server](/user-docs/6.0.0/configuration/databases#ibm-netezza-performance-server) | `pip install nzalchemy` | `netezza+nzpy://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [MySQL](/user-docs/6.0.0/configuration/databases#mysql) | `pip install mysqlclient` | `mysql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [OceanBase](/user-docs/6.0.0/configuration/databases#oceanbase) | `pip install oceanbase_py` | `oceanbase://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [Oracle](/user-docs/6.0.0/configuration/databases#oracle) | `pip install cx_Oracle` | `oracle://<username>:<password>@<hostname>:<port>` |
|
||||
| [Parseable](/user-docs/6.0.0/configuration/databases#parseable) | `pip install sqlalchemy-parseable` | `parseable://<UserName>:<DBPassword>@<Database Host>/<Stream Name>` |
|
||||
| [PostgreSQL](/user-docs/6.0.0/configuration/databases#postgres) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [Presto](/user-docs/6.0.0/configuration/databases#presto) | `pip install pyhive` | `presto://{username}:{password}@{hostname}:{port}/{database}` |
|
||||
| [SAP Hana](/user-docs/6.0.0/configuration/databases#hana) | `pip install hdbcli sqlalchemy-hana` or `pip install apache_superset[hana]` | `hana://{username}:{password}@{host}:{port}` |
|
||||
| [SingleStore](/user-docs/6.0.0/configuration/databases#singlestore) | `pip install sqlalchemy-singlestoredb` | `singlestoredb://{username}:{password}@{host}:{port}/{database}` |
|
||||
| [StarRocks](/user-docs/6.0.0/configuration/databases#starrocks) | `pip install starrocks` | `starrocks://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>` |
|
||||
| [Snowflake](/user-docs/6.0.0/configuration/databases#snowflake) | `pip install snowflake-sqlalchemy` | `snowflake://{user}:{password}@{account}.{region}/{database}?role={role}&warehouse={warehouse}` |
|
||||
| SQLite | No additional library needed | `sqlite://path/to/file.db?check_same_thread=false` |
|
||||
| [SQL Server](/docs/6.0.0/configuration/databases#sql-server) | `pip install pymssql` | `mssql+pymssql://<Username>:<Password>@<Host>:<Port-default:1433>/<Database Name>` |
|
||||
| [TDengine](/docs/6.0.0/configuration/databases#tdengine) | `pip install taospy` `pip install taos-ws-py` | `taosws://<user>:<password>@<host>:<port>` |
|
||||
| [Teradata](/docs/6.0.0/configuration/databases#teradata) | `pip install teradatasqlalchemy` | `teradatasql://{user}:{password}@{host}` |
|
||||
| [TimescaleDB](/docs/6.0.0/configuration/databases#timescaledb) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>:<Port>/<Database Name>` |
|
||||
| [Trino](/docs/6.0.0/configuration/databases#trino) | `pip install trino` | `trino://{username}:{password}@{hostname}:{port}/{catalog}` |
|
||||
| [Vertica](/docs/6.0.0/configuration/databases#vertica) | `pip install sqlalchemy-vertica-python` | `vertica+vertica_python://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [YDB](/docs/6.0.0/configuration/databases#ydb) | `pip install ydb-sqlalchemy` | `ydb://{host}:{port}/{database_name}` |
|
||||
| [YugabyteDB](/docs/6.0.0/configuration/databases#yugabytedb) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [SQL Server](/user-docs/6.0.0/configuration/databases#sql-server) | `pip install pymssql` | `mssql+pymssql://<Username>:<Password>@<Host>:<Port-default:1433>/<Database Name>` |
|
||||
| [TDengine](/user-docs/6.0.0/configuration/databases#tdengine) | `pip install taospy` `pip install taos-ws-py` | `taosws://<user>:<password>@<host>:<port>` |
|
||||
| [Teradata](/user-docs/6.0.0/configuration/databases#teradata) | `pip install teradatasqlalchemy` | `teradatasql://{user}:{password}@{host}` |
|
||||
| [TimescaleDB](/user-docs/6.0.0/configuration/databases#timescaledb) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>:<Port>/<Database Name>` |
|
||||
| [Trino](/user-docs/6.0.0/configuration/databases#trino) | `pip install trino` | `trino://{username}:{password}@{hostname}:{port}/{catalog}` |
|
||||
| [Vertica](/user-docs/6.0.0/configuration/databases#vertica) | `pip install sqlalchemy-vertica-python` | `vertica+vertica_python://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [YDB](/user-docs/6.0.0/configuration/databases#ydb) | `pip install ydb-sqlalchemy` | `ydb://{host}:{port}/{database_name}` |
|
||||
| [YugabyteDB](/user-docs/6.0.0/configuration/databases#yugabytedb) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
|
||||
---
|
||||
|
||||
@@ -109,7 +109,7 @@ The connector library installation process is the same for all additional librar
|
||||
|
||||
#### 1. Determine the driver you need
|
||||
|
||||
Consult the [list of database drivers](/docs/6.0.0/configuration/databases)
|
||||
Consult the [list of database drivers](/user-docs/6.0.0/configuration/databases)
|
||||
and find the PyPI package needed to connect to your database. In this example, we're connecting
|
||||
to a MySQL database, so we'll need the `mysqlclient` connector library.
|
||||
|
||||
@@ -165,11 +165,11 @@ to your database via the Superset web UI.
|
||||
|
||||
As an admin user, go to Settings -> Data: Database Connections and click the +DATABASE button.
|
||||
From there, follow the steps on the
|
||||
[Using Database Connection UI page](/docs/6.0.0/configuration/databases#connecting-through-the-ui).
|
||||
[Using Database Connection UI page](/user-docs/6.0.0/configuration/databases#connecting-through-the-ui).
|
||||
|
||||
Consult the page for your specific database type in the Superset documentation to determine
|
||||
the connection string and any other parameters you need to input. For instance,
|
||||
on the [MySQL page](/docs/6.0.0/configuration/databases#mysql), we see that the connection string
|
||||
on the [MySQL page](/user-docs/6.0.0/configuration/databases#mysql), we see that the connection string
|
||||
to a local MySQL database differs depending on whether the setup is running on Linux or Mac.
|
||||
|
||||
Click the “Test Connection” button, which should result in a popup message saying,
|
||||
@@ -407,7 +407,7 @@ this:
|
||||
crate://<username>:<password>@<clustername>.cratedb.net:4200/?ssl=true
|
||||
```
|
||||
|
||||
Follow the steps [here](/docs/6.0.0/configuration/databases#installing-database-drivers)
|
||||
Follow the steps [here](/user-docs/6.0.0/configuration/databases#installing-database-drivers)
|
||||
to install the CrateDB connector package when setting up Superset locally using
|
||||
Docker Compose.
|
||||
|
||||
@@ -782,7 +782,7 @@ The recommended connector library for BigQuery is
|
||||
|
||||
##### Install BigQuery Driver
|
||||
|
||||
Follow the steps [here](/docs/6.0.0/configuration/databases#installing-drivers-in-docker-images) about how to
|
||||
Follow the steps [here](/user-docs/6.0.0/configuration/databases#installing-drivers-in-docker-images) about how to
|
||||
install new database drivers when setting up Superset locally via docker compose.
|
||||
|
||||
```bash
|
||||
@@ -1177,7 +1177,7 @@ risingwave://root@{hostname}:{port}/{database}?sslmode=disable
|
||||
|
||||
##### Install Snowflake Driver
|
||||
|
||||
Follow the steps [here](/docs/6.0.0/configuration/databases#installing-database-drivers) about how to
|
||||
Follow the steps [here](/user-docs/6.0.0/configuration/databases#installing-database-drivers) about how to
|
||||
install new database drivers when setting up Superset locally via docker compose.
|
||||
|
||||
```bash
|
||||
|
||||
@@ -51,7 +51,7 @@ Restart Superset for this configuration change to take effect.
|
||||
|
||||
#### Making a Dashboard Public
|
||||
|
||||
1. Add the `'DASHBOARD_RBAC': True` [Feature Flag](/docs/6.0.0/configuration/feature-flags) to `superset_config.py`
|
||||
1. Add the `'DASHBOARD_RBAC': True` [Feature Flag](/user-docs/6.0.0/configuration/configuring-superset#feature-flags) to `superset_config.py`
|
||||
2. Add the `Public` role to your dashboard as described [here](https://superset.apache.org/docs/using-superset/creating-your-first-dashboard/#manage-access-to-dashboards)
|
||||
|
||||
#### Embedding a Public Dashboard
|
||||
|
||||
@@ -10,7 +10,7 @@ version: 1
|
||||
## Jinja Templates
|
||||
|
||||
SQL Lab and Explore supports [Jinja templating](https://jinja.palletsprojects.com/en/2.11.x/) in queries.
|
||||
To enable templating, the `ENABLE_TEMPLATE_PROCESSING` [feature flag](/docs/6.0.0/configuration/configuring-superset#feature-flags) needs to be enabled in
|
||||
To enable templating, the `ENABLE_TEMPLATE_PROCESSING` [feature flag](/user-docs/6.0.0/configuration/configuring-superset#feature-flags) needs to be enabled in
|
||||
`superset_config.py`. When templating is enabled, python code can be embedded in virtual datasets and
|
||||
in Custom SQL in the filter and metric controls in Explore. By default, the following variables are
|
||||
made available in the Jinja context:
|
||||
|
||||
@@ -20,7 +20,7 @@ To help make the problem somewhat tractable—given that Apache Superset has no
|
||||
|
||||
To strive for data consistency (regardless of the timezone of the client) the Apache Superset backend tries to ensure that any timestamp sent to the client has an explicit (or semi-explicit as in the case with [Epoch time](https://en.wikipedia.org/wiki/Unix_time) which is always in reference to UTC) timezone encoded within.
|
||||
|
||||
The challenge however lies with the slew of [database engines](/docs/6.0.0/configuration/databases#installing-drivers-in-docker-images) which Apache Superset supports and various inconsistencies between their [Python Database API (DB-API)](https://www.python.org/dev/peps/pep-0249/) implementations combined with the fact that we use [Pandas](https://pandas.pydata.org/) to read SQL into a DataFrame prior to serializing to JSON. Regrettably Pandas ignores the DB-API [type_code](https://www.python.org/dev/peps/pep-0249/#type-objects) relying by default on the underlying Python type returned by the DB-API. Currently only a subset of the supported database engines work correctly with Pandas, i.e., ensuring timestamps without an explicit timestamp are serializd to JSON with the server timezone, thus guaranteeing the client will display timestamps in a consistent manner irrespective of the client's timezone.
|
||||
The challenge however lies with the slew of [database engines](/user-docs/6.0.0/configuration/databases#installing-drivers-in-docker-images) which Apache Superset supports and various inconsistencies between their [Python Database API (DB-API)](https://www.python.org/dev/peps/pep-0249/) implementations combined with the fact that we use [Pandas](https://pandas.pydata.org/) to read SQL into a DataFrame prior to serializing to JSON. Regrettably Pandas ignores the DB-API [type_code](https://www.python.org/dev/peps/pep-0249/#type-objects) relying by default on the underlying Python type returned by the DB-API. Currently only a subset of the supported database engines work correctly with Pandas, i.e., ensuring timestamps without an explicit timestamp are serializd to JSON with the server timezone, thus guaranteeing the client will display timestamps in a consistent manner irrespective of the client's timezone.
|
||||
|
||||
For example the following is a comparison of MySQL and Presto,
|
||||
|
||||
|
||||
@@ -77,7 +77,7 @@ Look through the GitHub issues. Issues tagged with
|
||||
Superset could always use better documentation,
|
||||
whether as part of the official Superset docs,
|
||||
in docstrings, `docs/*.rst` or even on the web as blog posts or
|
||||
articles. See [Documentation](/docs/6.0.0/contributing/howtos#contributing-to-documentation) for more details.
|
||||
articles. See [Documentation](/user-docs/6.0.0/contributing/howtos#contributing-to-documentation) for more details.
|
||||
|
||||
### Add Translations
|
||||
|
||||
|
||||
@@ -599,7 +599,7 @@ export enum FeatureFlag {
|
||||
those specified under FEATURE_FLAGS in `superset_config.py`. For example, `DEFAULT_FEATURE_FLAGS = { 'FOO': True, 'BAR': False }` in `superset/config.py` and `FEATURE_FLAGS = { 'BAR': True, 'BAZ': True }` in `superset_config.py` will result
|
||||
in combined feature flags of `{ 'FOO': True, 'BAR': True, 'BAZ': True }`.
|
||||
|
||||
The current status of the usability of each flag (stable vs testing, etc) can be found in the [Feature Flags](/docs/6.0.0/configuration/feature-flags) documentation.
|
||||
The current status of the usability of each flag (stable vs testing, etc) can be found in the [Feature Flags](/user-docs/6.0.0/configuration/configuring-superset#feature-flags) documentation.
|
||||
|
||||
## Git Hooks
|
||||
|
||||
@@ -614,7 +614,7 @@ A series of checks will now run when you make a git commit.
|
||||
|
||||
## Linting
|
||||
|
||||
See [how tos](/docs/6.0.0/contributing/howtos#linting)
|
||||
See [how tos](/user-docs/6.0.0/contributing/howtos#linting)
|
||||
|
||||
## GitHub Actions and `act`
|
||||
|
||||
|
||||
@@ -57,7 +57,7 @@ Finally, never submit a PR that will put master branch in broken state. If the P
|
||||
in `requirements.txt` pinned to a specific version which ensures that the application
|
||||
build is deterministic.
|
||||
- For TypeScript/JavaScript, include new libraries in `package.json`
|
||||
- **Tests:** The pull request should include tests, either as doctests, unit tests, or both. Make sure to resolve all errors and test failures. See [Testing](/docs/6.0.0/contributing/howtos#testing) for how to run tests.
|
||||
- **Tests:** The pull request should include tests, either as doctests, unit tests, or both. Make sure to resolve all errors and test failures. See [Testing](/user-docs/6.0.0/contributing/howtos#testing) for how to run tests.
|
||||
- **Documentation:** If the pull request adds functionality, the docs should be updated as part of the same PR.
|
||||
- **CI:** Reviewers will not review the code until all CI tests are passed. Sometimes there can be flaky tests. You can close and open PR to re-run CI test. Please report if the issue persists. After the CI fix has been deployed to `master`, please rebase your PR.
|
||||
- **Code coverage:** Please ensure that code coverage does not decrease.
|
||||
|
||||
@@ -51,11 +51,11 @@ multiple tables as long as your database account has access to the tables.
|
||||
## How do I create my own visualization?
|
||||
|
||||
We recommend reading the instructions in
|
||||
[Creating Visualization Plugins](/docs/6.0.0/contributing/howtos#creating-visualization-plugins).
|
||||
[Creating Visualization Plugins](/user-docs/6.0.0/contributing/howtos#creating-visualization-plugins).
|
||||
|
||||
## Can I upload and visualize CSV data?
|
||||
|
||||
Absolutely! Read the instructions [here](/docs/using-superset/exploring-data) to learn
|
||||
Absolutely! Read the instructions [here](/user-docs/using-superset/exploring-data) to learn
|
||||
how to enable and use CSV upload.
|
||||
|
||||
## Why are my queries timing out?
|
||||
@@ -142,7 +142,7 @@ SQLALCHEMY_DATABASE_URI = 'sqlite:////new/location/superset.db?check_same_thread
|
||||
```
|
||||
|
||||
You can read more about customizing Superset using the configuration file
|
||||
[here](/docs/6.0.0/configuration/configuring-superset).
|
||||
[here](/user-docs/6.0.0/configuration/configuring-superset).
|
||||
|
||||
## What if the table schema changed?
|
||||
|
||||
@@ -157,7 +157,7 @@ table afterwards to configure the Columns tab, check the appropriate boxes and s
|
||||
|
||||
To clarify, the database backend is an OLTP database used by Superset to store its internal
|
||||
information like your list of users and dashboard definitions. While Superset supports a
|
||||
[variety of databases as data _sources_](/docs/6.0.0/configuration/databases#installing-database-drivers),
|
||||
[variety of databases as data _sources_](/user-docs/6.0.0/configuration/databases#installing-database-drivers),
|
||||
only a few database engines are supported for use as the OLTP backend / metadata store.
|
||||
|
||||
Superset is tested using MySQL, PostgreSQL, and SQLite backends. It’s recommended you install
|
||||
@@ -190,7 +190,7 @@ second etc). Example:
|
||||
|
||||
## Does Superset work with [insert database engine here]?
|
||||
|
||||
The [Connecting to Databases section](/docs/6.0.0/configuration/databases) provides the best
|
||||
The [Connecting to Databases section](/user-docs/6.0.0/configuration/databases) provides the best
|
||||
overview for supported databases. Database engines not listed on that page may work too. We rely on
|
||||
the community to contribute to this knowledge base.
|
||||
|
||||
@@ -226,7 +226,7 @@ are typical in basic SQL:
|
||||
## Does Superset offer a public API?
|
||||
|
||||
Yes, a public REST API, and the surface of that API formal is expanding steadily. You can read more about this API and
|
||||
interact with it using Swagger [here](/docs/api).
|
||||
interact with it using Swagger [here](/developer-docs/api).
|
||||
|
||||
Some of the
|
||||
original vision for the collection of endpoints under **/api/v1** was originally specified in
|
||||
@@ -266,7 +266,7 @@ Superset uses [Scarf](https://about.scarf.sh/) by default to collect basic telem
|
||||
We use the [Scarf Gateway](https://docs.scarf.sh/gateway/) to sit in front of container registries, the [scarf-js](https://about.scarf.sh/package-sdks) package to track `npm` installations, and a Scarf pixel to gather anonymous analytics on Superset page views.
|
||||
Scarf purges PII and provides aggregated statistics. Superset users can easily opt out of analytics in various ways documented [here](https://docs.scarf.sh/gateway/#do-not-track) and [here](https://docs.scarf.sh/package-analytics/#as-a-user-of-a-package-using-scarf-js-how-can-i-opt-out-of-analytics).
|
||||
Superset maintainers can also opt out of telemetry data collection by setting the `SCARF_ANALYTICS` environment variable to `false` in the Superset container (or anywhere Superset/webpack are run).
|
||||
Additional opt-out instructions for Docker users are available on the [Docker Installation](/docs/6.0.0/installation/docker-compose) page.
|
||||
Additional opt-out instructions for Docker users are available on the [Docker Installation](/user-docs/6.0.0/installation/docker-compose) page.
|
||||
|
||||
## Does Superset have an archive panel or trash bin from which a user can recover deleted assets?
|
||||
|
||||
|
||||
@@ -24,10 +24,10 @@ A Superset installation is made up of these components:
|
||||
|
||||
The optional components above are necessary to enable these features:
|
||||
|
||||
- [Alerts and Reports](/docs/6.0.0/configuration/alerts-reports)
|
||||
- [Caching](/docs/6.0.0/configuration/cache)
|
||||
- [Async Queries](/docs/6.0.0/configuration/async-queries-celery/)
|
||||
- [Dashboard Thumbnails](/docs/6.0.0/configuration/cache/#caching-thumbnails)
|
||||
- [Alerts and Reports](/user-docs/6.0.0/configuration/alerts-reports)
|
||||
- [Caching](/user-docs/6.0.0/configuration/cache)
|
||||
- [Async Queries](/user-docs/6.0.0/configuration/async-queries-celery/)
|
||||
- [Dashboard Thumbnails](/user-docs/6.0.0/configuration/cache/#caching-thumbnails)
|
||||
|
||||
If you install with Kubernetes or Docker Compose, all of these components will be created.
|
||||
|
||||
@@ -59,7 +59,7 @@ The caching layer serves two main functions:
|
||||
- Store the results of queries to your data warehouse so that when a chart is loaded twice, it pulls from the cache the second time, speeding up the application and reducing load on your data warehouse.
|
||||
- Act as a message broker for the worker, enabling the Alerts & Reports, async queries, and thumbnail caching features.
|
||||
|
||||
Most people use Redis for their cache, but Superset supports other options too. See the [cache docs](/docs/6.0.0/configuration/cache/) for more.
|
||||
Most people use Redis for their cache, but Superset supports other options too. See the [cache docs](/user-docs/6.0.0/configuration/cache/) for more.
|
||||
|
||||
### Worker and Beat
|
||||
|
||||
@@ -67,6 +67,6 @@ This is one or more workers who execute tasks like run async queries or take sna
|
||||
|
||||
## Other components
|
||||
|
||||
Other components can be incorporated into Superset. The best place to learn about additional configurations is the [Configuration page](/docs/6.0.0/configuration/configuring-superset). For instance, you could set up a load balancer or reverse proxy to implement HTTPS in front of your Superset application, or specify a Mapbox URL to enable geospatial charts, etc.
|
||||
Other components can be incorporated into Superset. The best place to learn about additional configurations is the [Configuration page](/user-docs/6.0.0/configuration/configuring-superset). For instance, you could set up a load balancer or reverse proxy to implement HTTPS in front of your Superset application, or specify a Mapbox URL to enable geospatial charts, etc.
|
||||
|
||||
Superset won't even start without certain configuration settings established, so it's essential to review that page.
|
||||
|
||||
@@ -21,7 +21,7 @@ with our [installing on k8s](https://superset.apache.org/docs/installation/runni
|
||||
documentation.
|
||||
:::
|
||||
|
||||
As mentioned in our [quickstart guide](/docs/quickstart), the fastest way to try
|
||||
As mentioned in our [quickstart guide](/user-docs/quickstart), the fastest way to try
|
||||
Superset locally is using Docker Compose on a Linux or Mac OSX
|
||||
computer. Superset does not have official support for Windows. It's also the easiest
|
||||
way to launch a fully functioning **development environment** quickly.
|
||||
|
||||
@@ -9,11 +9,11 @@ import useBaseUrl from "@docusaurus/useBaseUrl";
|
||||
|
||||
# Installation Methods
|
||||
|
||||
How should you install Superset? Here's a comparison of the different options. It will help if you've first read the [Architecture](/docs/6.0.0/installation/architecture page to understand Superset's different components.
|
||||
How should you install Superset? Here's a comparison of the different options. It will help if you've first read the [Architecture](/user-docs/6.0.0/installation/architecture) page to understand Superset's different components.
|
||||
|
||||
The fundamental trade-off is between you needing to do more of the detail work yourself vs. using a more complex deployment route that handles those details.
|
||||
|
||||
## [Docker Compose](/docs/6.0.0/installation/docker-compose
|
||||
## [Docker Compose](/user-docs/6.0.0/installation/docker-compose)
|
||||
|
||||
**Summary:** This takes advantage of containerization while remaining simpler than Kubernetes. This is the best way to try out Superset; it's also useful for developing & contributing back to Superset.
|
||||
|
||||
@@ -27,9 +27,9 @@ You will need to back up your metadata DB. That could mean backing up the servic
|
||||
|
||||
You will also need to extend the Superset docker image. The default `lean` images do not contain drivers needed to access your metadata database (Postgres or MySQL), nor to access your data warehouse, nor the headless browser needed for Alerts & Reports. You could run a `-dev` image while demoing Superset, which has some of this, but you'll still need to install the driver for your data warehouse. The `-dev` images run as root, which is not recommended for production.
|
||||
|
||||
Ideally you will build your own image of Superset that extends `lean`, adding what your deployment needs. See [Building your own production Docker image](/docs/6.0.0/installation/docker-builds/#building-your-own-production-docker-image).
|
||||
Ideally you will build your own image of Superset that extends `lean`, adding what your deployment needs. See [Building your own production Docker image](/user-docs/6.0.0/installation/docker-builds/#building-your-own-production-docker-image).
|
||||
|
||||
## [Kubernetes (K8s)](/docs/6.0.0/installation/kubernetes
|
||||
## [Kubernetes (K8s)](/user-docs/6.0.0/installation/kubernetes)
|
||||
|
||||
**Summary:** This is the best-practice way to deploy a production instance of Superset, but has the steepest skill requirement - someone who knows Kubernetes.
|
||||
|
||||
@@ -41,7 +41,7 @@ A K8s deployment can scale up and down based on usage and deploy rolling updates
|
||||
|
||||
You will need to build your own Docker image, and back up your metadata DB, both as described in Docker Compose above. You'll also need to customize your Helm chart values and deploy and maintain your Kubernetes cluster.
|
||||
|
||||
## [PyPI (Python)](/docs/6.0.0/installation/pypi
|
||||
## [PyPI (Python)](/user-docs/6.0.0/installation/pypi)
|
||||
|
||||
**Summary:** This is the only method that requires no knowledge of containers. It requires the most hands-on work to deploy, connect, and maintain each component.
|
||||
|
||||
|
||||
@@ -149,7 +149,7 @@ For production clusters it's recommended to build own image with this step done
|
||||
Superset requires a Python DB-API database driver and a SQLAlchemy
|
||||
dialect to be installed for each datastore you want to connect to.
|
||||
|
||||
See [Install Database Drivers](/docs/6.0.0/configuration/databases) for more information.
|
||||
See [Install Database Drivers](/user-docs/6.0.0/configuration/databases) for more information.
|
||||
It is recommended that you refer to versions listed in
|
||||
[pyproject.toml](https://github.com/apache/superset/blob/master/pyproject.toml)
|
||||
instead of hard-coding them in your bootstrap script, as seen below.
|
||||
@@ -310,7 +310,7 @@ configOverrides:
|
||||
|
||||
### Enable Alerts and Reports
|
||||
|
||||
For this, as per the [Alerts and Reports doc](/docs/6.0.0/configuration/alerts-reports), you will need to:
|
||||
For this, as per the [Alerts and Reports doc](/user-docs/6.0.0/configuration/alerts-reports), you will need to:
|
||||
|
||||
#### Install a supported webdriver in the Celery worker
|
||||
|
||||
|
||||
@@ -172,7 +172,7 @@ how to set up a development environment.
|
||||
## Resources
|
||||
|
||||
- [Superset "In the Wild"](https://github.com/apache/superset/blob/master/RESOURCES/INTHEWILD.md) - open a PR to add your org to the list!
|
||||
- [Feature Flags](/docs/6.0.0/configuration/feature-flags) - the status of Superset's Feature Flags.
|
||||
- [Feature Flags](/user-docs/6.0.0/configuration/configuring-superset#feature-flags) - the status of Superset's Feature Flags.
|
||||
- [Standard Roles](https://github.com/apache/superset/blob/master/RESOURCES/STANDARD_ROLES.md) - How RBAC permissions map to roles.
|
||||
- [Superset Wiki](https://github.com/apache/superset/wiki) - Tons of additional community resources: best practices, community content and other information.
|
||||
- [Superset SIPs](https://github.com/orgs/apache/projects/170) - The status of Superset's SIPs (Superset Improvement Proposals) for both consensus and implementation status.
|
||||
|
||||
@@ -15,7 +15,7 @@ Although we recommend using `Docker Compose` for a quick start in a sandbox-type
|
||||
environment and for other development-type use cases, **we
|
||||
do not recommend this setup for production**. For this purpose please
|
||||
refer to our
|
||||
[Installing on Kubernetes](/docs/6.0.0/installation/kubernetes/)
|
||||
[Installing on Kubernetes](/user-docs/6.0.0/installation/kubernetes/)
|
||||
page.
|
||||
:::
|
||||
|
||||
@@ -73,10 +73,10 @@ processes by running Docker Compose `stop` command. By doing so, you can avoid d
|
||||
|
||||
From this point on, you can head on to:
|
||||
|
||||
- [Create your first Dashboard](/docs/6.0.0/using-superset/creating-your-first-dashboard)
|
||||
- [Connect to a Database](/docs/6.0.0/configuration/databases)
|
||||
- [Using Docker Compose](/docs/6.0.0/installation/docker-compose)
|
||||
- [Configure Superset](/docs/6.0.0/configuration/configuring-superset/)
|
||||
- [Installing on Kubernetes](/docs/6.0.0/installation/kubernetes/)
|
||||
- [Create your first Dashboard](/user-docs/6.0.0/using-superset/creating-your-first-dashboard)
|
||||
- [Connect to a Database](/user-docs/6.0.0/configuration/databases)
|
||||
- [Using Docker Compose](/user-docs/6.0.0/installation/docker-compose)
|
||||
- [Configure Superset](/user-docs/6.0.0/configuration/configuring-superset/)
|
||||
- [Installing on Kubernetes](/user-docs/6.0.0/installation/kubernetes/)
|
||||
|
||||
Or just explore our [Documentation](https://superset.apache.org/docs/intro)!
|
||||
|
||||
@@ -31,7 +31,7 @@ your existing SQL-speaking database or data store.
|
||||
|
||||
First things first, we need to add the connection credentials to your database to be able
|
||||
to query and visualize data from it. If you're using Superset locally via
|
||||
[Docker compose](/docs/6.0.0/installation/docker-compose), you can
|
||||
[Docker compose](/user-docs/6.0.0/installation/docker-compose), you can
|
||||
skip this step because a Postgres database, named **examples**, is included and
|
||||
pre-configured in Superset for you.
|
||||
|
||||
@@ -188,7 +188,7 @@ Access to dashboards is managed via owners (users that have edit permissions to
|
||||
Non-owner users access can be managed in two different ways. The dashboard needs to be published to be visible to other users.
|
||||
|
||||
1. Dataset permissions - if you add to the relevant role permissions to datasets it automatically grants implicit access to all dashboards that uses those permitted datasets.
|
||||
2. Dashboard roles - if you enable [**DASHBOARD_RBAC** feature flag](/docs/6.0.0/configuration/configuring-superset#feature-flags) then you will be able to manage which roles can access the dashboard
|
||||
2. Dashboard roles - if you enable [**DASHBOARD_RBAC** feature flag](/user-docs/6.0.0/configuration/configuring-superset#feature-flags) then you will be able to manage which roles can access the dashboard
|
||||
- Granting a role access to a dashboard will bypass dataset level checks. Having dashboard access implicitly grants read access to all the featured charts in the dashboard, and thereby also all the associated datasets.
|
||||
- If no roles are specified for a dashboard, regular **Dataset permissions** will apply.
|
||||
|
||||
|
||||
266
superset-frontend/package-lock.json
generated
266
superset-frontend/package-lock.json
generated
@@ -224,7 +224,7 @@
|
||||
"@types/unzipper": "^0.10.11",
|
||||
"@typescript-eslint/eslint-plugin": "^8.59.3",
|
||||
"@typescript-eslint/parser": "^8.59.3",
|
||||
"babel-jest": "^30.0.2",
|
||||
"babel-jest": "^30.4.1",
|
||||
"babel-loader": "^10.1.1",
|
||||
"babel-plugin-dynamic-import-node": "^2.3.3",
|
||||
"babel-plugin-jsx-remove-data-test-id": "^3.0.0",
|
||||
@@ -17027,16 +17027,16 @@
|
||||
"license": "Apache-2.0"
|
||||
},
|
||||
"node_modules/babel-jest": {
|
||||
"version": "30.3.0",
|
||||
"resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-30.3.0.tgz",
|
||||
"integrity": "sha512-gRpauEU2KRrCox5Z296aeVHR4jQ98BCnu0IO332D/xpHNOsIH/bgSRk9k6GbKIbBw8vFeN6ctuu6tV8WOyVfYQ==",
|
||||
"version": "30.4.1",
|
||||
"resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-30.4.1.tgz",
|
||||
"integrity": "sha512-fATAbM8piYxkiXQp3RBXmZHxZVNJZAVXXfyeyCN2Tida3+qJ8ea9UxhiJ2y4fLO90ZImKt6k9FlcH2+rLkJGhw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@jest/transform": "30.3.0",
|
||||
"@jest/transform": "30.4.1",
|
||||
"@types/babel__core": "^7.20.5",
|
||||
"babel-plugin-istanbul": "^7.0.1",
|
||||
"babel-preset-jest": "30.3.0",
|
||||
"babel-preset-jest": "30.4.0",
|
||||
"chalk": "^4.1.2",
|
||||
"graceful-fs": "^4.2.11",
|
||||
"slash": "^3.0.0"
|
||||
@@ -17048,6 +17048,85 @@
|
||||
"@babel/core": "^7.11.0 || ^8.0.0-0"
|
||||
}
|
||||
},
|
||||
"node_modules/babel-jest/node_modules/@jest/pattern": {
|
||||
"version": "30.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@jest/pattern/-/pattern-30.4.0.tgz",
|
||||
"integrity": "sha512-RAWn3+f9u8BsHijKJ71uHcFp6vmyEt6VvoWXkl6hKF3qVIuWNmudVjg12DlBPGup/frIl5UcUlH5HfEuvHpEXg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/node": "*",
|
||||
"jest-regex-util": "30.4.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/babel-jest/node_modules/@jest/schemas": {
|
||||
"version": "30.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-30.4.1.tgz",
|
||||
"integrity": "sha512-i6b4qw5qnP8c5FEeBJg/uZQ4ddrkN6Ca8qISJh0pr7a5hfn3h3v5x60BEbOC7OYAGZNMs1LfFLwnW2CuK8F57Q==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@sinclair/typebox": "^0.34.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/babel-jest/node_modules/@jest/transform": {
|
||||
"version": "30.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@jest/transform/-/transform-30.4.1.tgz",
|
||||
"integrity": "sha512-Wz0LyktlTvRefoymh+n64hQ84KNXsRGcwdoZ8CSa0Ea+fgYcHZlnk+hDP7v2MS7il2bQ5uTEIxf4/NNfhMN4KQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@babel/core": "^7.27.4",
|
||||
"@jest/types": "30.4.1",
|
||||
"@jridgewell/trace-mapping": "^0.3.25",
|
||||
"babel-plugin-istanbul": "^7.0.1",
|
||||
"chalk": "^4.1.2",
|
||||
"convert-source-map": "^2.0.0",
|
||||
"fast-json-stable-stringify": "^2.1.0",
|
||||
"graceful-fs": "^4.2.11",
|
||||
"jest-haste-map": "30.4.1",
|
||||
"jest-regex-util": "30.4.0",
|
||||
"jest-util": "30.4.1",
|
||||
"pirates": "^4.0.7",
|
||||
"slash": "^3.0.0",
|
||||
"write-file-atomic": "^5.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/babel-jest/node_modules/@jest/types": {
|
||||
"version": "30.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@jest/types/-/types-30.4.1.tgz",
|
||||
"integrity": "sha512-f1x/vJXIfjOlEmejYpbkbgw1gOqpPECwMvMEtBqe47j7H2Hg8h8w3o3ikhSXq3MI15kg+oQ0exWO0uCtTNJLoQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@jest/pattern": "30.4.0",
|
||||
"@jest/schemas": "30.4.1",
|
||||
"@types/istanbul-lib-coverage": "^2.0.6",
|
||||
"@types/istanbul-reports": "^3.0.4",
|
||||
"@types/node": "*",
|
||||
"@types/yargs": "^17.0.33",
|
||||
"chalk": "^4.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/babel-jest/node_modules/@sinclair/typebox": {
|
||||
"version": "0.34.49",
|
||||
"resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.34.49.tgz",
|
||||
"integrity": "sha512-brySQQs7Jtn0joV8Xh9ZV/hZb9Ozb0pmazDIASBkYKCjXrXU3mpcFahmK/z4YDhGkQvP9mWJbVyahdtU5wQA+A==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/babel-jest/node_modules/chalk": {
|
||||
"version": "4.1.2",
|
||||
"resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
|
||||
@@ -17065,6 +17144,105 @@
|
||||
"url": "https://github.com/chalk/chalk?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/babel-jest/node_modules/jest-haste-map": {
|
||||
"version": "30.4.1",
|
||||
"resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-30.4.1.tgz",
|
||||
"integrity": "sha512-rFrcONd8jeFsyw+Z9CrScJgglRf2+NFmNam8dKu7n+SoHqNYT47mn0DdEcVUZJpvh7Iz6/si7f7yUH7GJHVgnw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@jest/types": "30.4.1",
|
||||
"@types/node": "*",
|
||||
"anymatch": "^3.1.3",
|
||||
"fb-watchman": "^2.0.2",
|
||||
"graceful-fs": "^4.2.11",
|
||||
"jest-regex-util": "30.4.0",
|
||||
"jest-util": "30.4.1",
|
||||
"jest-worker": "30.4.1",
|
||||
"picomatch": "^4.0.3",
|
||||
"walker": "^1.0.8"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"fsevents": "^2.3.3"
|
||||
}
|
||||
},
|
||||
"node_modules/babel-jest/node_modules/jest-regex-util": {
|
||||
"version": "30.4.0",
|
||||
"resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-30.4.0.tgz",
|
||||
"integrity": "sha512-mWlvLviKIgIQ8VCuM1xRdD0TWp3zlzionlmDBjuXVBs+VkmXq6FgW9T4Emr7oGz/Rk6feDCGyiugolcQEyp3mg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/babel-jest/node_modules/jest-util": {
|
||||
"version": "30.4.1",
|
||||
"resolved": "https://registry.npmjs.org/jest-util/-/jest-util-30.4.1.tgz",
|
||||
"integrity": "sha512-vjQb1sACEiv13DKJMDToJpzVW0joCsIQrmbg0fi7CyOOt+g9jTuQl2A216pWRBYhOVt53XbL/2LbMKg1BECWOw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@jest/types": "30.4.1",
|
||||
"@types/node": "*",
|
||||
"chalk": "^4.1.2",
|
||||
"ci-info": "^4.2.0",
|
||||
"graceful-fs": "^4.2.11",
|
||||
"picomatch": "^4.0.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/babel-jest/node_modules/jest-worker": {
|
||||
"version": "30.4.1",
|
||||
"resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-30.4.1.tgz",
|
||||
"integrity": "sha512-SHynN/q/QD++iNyvMdy+WMmbCGk8jIsNcRxycXbWubSOhvo6T+j2afcfUSl+3hYsiBebOTo0cT7c2H7CXugu1g==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/node": "*",
|
||||
"@ungap/structured-clone": "^1.3.0",
|
||||
"jest-util": "30.4.1",
|
||||
"merge-stream": "^2.0.0",
|
||||
"supports-color": "^8.1.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/babel-jest/node_modules/jest-worker/node_modules/supports-color": {
|
||||
"version": "8.1.1",
|
||||
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz",
|
||||
"integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"has-flag": "^4.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/chalk/supports-color?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/babel-jest/node_modules/picomatch": {
|
||||
"version": "4.0.4",
|
||||
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz",
|
||||
"integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/jonschlinkert"
|
||||
}
|
||||
},
|
||||
"node_modules/babel-jest/node_modules/slash": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz",
|
||||
@@ -17132,9 +17310,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/babel-plugin-jest-hoist": {
|
||||
"version": "30.3.0",
|
||||
"resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-30.3.0.tgz",
|
||||
"integrity": "sha512-+TRkByhsws6sfPjVaitzadk1I0F5sPvOVUH5tyTSzhePpsGIVrdeunHSw/C36QeocS95OOk8lunc4rlu5Anwsg==",
|
||||
"version": "30.4.0",
|
||||
"resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-30.4.0.tgz",
|
||||
"integrity": "sha512-9EdtWM/sSfXLOGLwSn+GS6pIXyBnL07/8gyJlwFXjWy4DxMOyItqyUT29d4lQiS380EZwYlX7/At4PgBS+m2aA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@@ -17288,13 +17466,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/babel-preset-jest": {
|
||||
"version": "30.3.0",
|
||||
"resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-30.3.0.tgz",
|
||||
"integrity": "sha512-6ZcUbWHC+dMz2vfzdNwi87Z1gQsLNK2uLuK1Q89R11xdvejcivlYYwDlEv0FHX3VwEXpbBQ9uufB/MUNpZGfhQ==",
|
||||
"version": "30.4.0",
|
||||
"resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-30.4.0.tgz",
|
||||
"integrity": "sha512-lBY4jxsNmCnSiu7kquw8ZC9F4+XLMOKypT3RnNHPvU2Kpd4W0xaPuLr5ZkRyOsvLYAY4yaW1ZwTW4xB7NIiZzg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"babel-plugin-jest-hoist": "30.3.0",
|
||||
"babel-plugin-jest-hoist": "30.4.0",
|
||||
"babel-preset-current-node-syntax": "^1.2.0"
|
||||
},
|
||||
"engines": {
|
||||
@@ -28917,6 +29095,58 @@
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/jest-config/node_modules/babel-jest": {
|
||||
"version": "30.3.0",
|
||||
"resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-30.3.0.tgz",
|
||||
"integrity": "sha512-gRpauEU2KRrCox5Z296aeVHR4jQ98BCnu0IO332D/xpHNOsIH/bgSRk9k6GbKIbBw8vFeN6ctuu6tV8WOyVfYQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@jest/transform": "30.3.0",
|
||||
"@types/babel__core": "^7.20.5",
|
||||
"babel-plugin-istanbul": "^7.0.1",
|
||||
"babel-preset-jest": "30.3.0",
|
||||
"chalk": "^4.1.2",
|
||||
"graceful-fs": "^4.2.11",
|
||||
"slash": "^3.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@babel/core": "^7.11.0 || ^8.0.0-0"
|
||||
}
|
||||
},
|
||||
"node_modules/jest-config/node_modules/babel-plugin-jest-hoist": {
|
||||
"version": "30.3.0",
|
||||
"resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-30.3.0.tgz",
|
||||
"integrity": "sha512-+TRkByhsws6sfPjVaitzadk1I0F5sPvOVUH5tyTSzhePpsGIVrdeunHSw/C36QeocS95OOk8lunc4rlu5Anwsg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/babel__core": "^7.20.5"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/jest-config/node_modules/babel-preset-jest": {
|
||||
"version": "30.3.0",
|
||||
"resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-30.3.0.tgz",
|
||||
"integrity": "sha512-6ZcUbWHC+dMz2vfzdNwi87Z1gQsLNK2uLuK1Q89R11xdvejcivlYYwDlEv0FHX3VwEXpbBQ9uufB/MUNpZGfhQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"babel-plugin-jest-hoist": "30.3.0",
|
||||
"babel-preset-current-node-syntax": "^1.2.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@babel/core": "^7.11.0 || ^8.0.0-beta.1"
|
||||
}
|
||||
},
|
||||
"node_modules/jest-config/node_modules/brace-expansion": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.1.0.tgz",
|
||||
@@ -50174,7 +50404,7 @@
|
||||
"version": "0.20.4",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@ant-design/icons": "^6.2.2",
|
||||
"@ant-design/icons": "^6.2.3",
|
||||
"@apache-superset/core": "*",
|
||||
"@babel/runtime": "^7.29.2",
|
||||
"@types/json-bigint": "^1.0.4",
|
||||
@@ -50206,7 +50436,7 @@
|
||||
"react-js-cron": "^5.2.0",
|
||||
"react-markdown": "^8.0.7",
|
||||
"react-resize-detector": "^7.1.2",
|
||||
"react-syntax-highlighter": "^16.1.1",
|
||||
"react-syntax-highlighter": "^16.1.0",
|
||||
"react-ultimate-pagination": "^1.3.2",
|
||||
"regenerator-runtime": "^0.14.1",
|
||||
"rehype-raw": "^7.0.0",
|
||||
@@ -50275,9 +50505,9 @@
|
||||
}
|
||||
},
|
||||
"packages/superset-ui-core/node_modules/@ant-design/icons": {
|
||||
"version": "6.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@ant-design/icons/-/icons-6.2.2.tgz",
|
||||
"integrity": "sha512-zlJtE7AMbG12TeYVPhtBXwNpFInNy8mjLzcIm+0BPw16/b8ODG87YJ1G37VIF5VFscdgfsf6EweAFPTobu/3iQ==",
|
||||
"version": "6.2.3",
|
||||
"resolved": "https://registry.npmjs.org/@ant-design/icons/-/icons-6.2.3.tgz",
|
||||
"integrity": "sha512-Pl3aoAtxQeKryYnt6VvDJtOxMOtA8wrRSACe/pTjOAIG3fdHrWm6Ivb4ku9tsFjYroSXBKirvuxG4QkwBXD9gg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@ant-design/colors": "^8.0.1",
|
||||
|
||||
@@ -305,7 +305,7 @@
|
||||
"@types/unzipper": "^0.10.11",
|
||||
"@typescript-eslint/eslint-plugin": "^8.59.3",
|
||||
"@typescript-eslint/parser": "^8.59.3",
|
||||
"babel-jest": "^30.0.2",
|
||||
"babel-jest": "^30.4.1",
|
||||
"babel-loader": "^10.1.1",
|
||||
"babel-plugin-dynamic-import-node": "^2.3.3",
|
||||
"babel-plugin-jsx-remove-data-test-id": "^3.0.0",
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
"lib"
|
||||
],
|
||||
"dependencies": {
|
||||
"@ant-design/icons": "^6.2.2",
|
||||
"@ant-design/icons": "^6.2.3",
|
||||
"@apache-superset/core": "*",
|
||||
"@babel/runtime": "^7.29.2",
|
||||
"@types/json-bigint": "^1.0.4",
|
||||
@@ -56,7 +56,7 @@
|
||||
"react-js-cron": "^5.2.0",
|
||||
"react-markdown": "^8.0.7",
|
||||
"react-resize-detector": "^7.1.2",
|
||||
"react-syntax-highlighter": "^16.1.1",
|
||||
"react-syntax-highlighter": "^16.1.0",
|
||||
"react-ultimate-pagination": "^1.3.2",
|
||||
"regenerator-runtime": "^0.14.1",
|
||||
"rehype-raw": "^7.0.0",
|
||||
|
||||
@@ -113,7 +113,7 @@ const EstimateQueryCostButton = ({
|
||||
modalBody={renderModalBody()}
|
||||
triggerNode={
|
||||
<Button
|
||||
color="primary"
|
||||
color="default"
|
||||
variant="text"
|
||||
style={{ height: 32, padding: '4px 15px' }}
|
||||
onClick={onClickHandler}
|
||||
|
||||
@@ -49,7 +49,9 @@ from contextlib import AbstractContextManager
|
||||
from typing import Any, Callable, TYPE_CHECKING, TypeVar
|
||||
|
||||
from flask import g, has_request_context
|
||||
from flask_appbuilder.security.sqla.models import Group, User
|
||||
from flask_appbuilder.security.sqla.models import User
|
||||
|
||||
from superset.mcp_service.composite_token_verifier import API_KEY_PASSTHROUGH_CLAIM
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from superset.connectors.sqla.models import SqlaTable
|
||||
@@ -148,23 +150,14 @@ def check_tool_permission(func: Callable[..., Any]) -> bool:
|
||||
def load_user_with_relationships(
|
||||
username: str | None = None, email: str | None = None
|
||||
) -> User | None:
|
||||
"""
|
||||
Load a user with all relationships needed for permission checks.
|
||||
"""Load a user with roles and group roles eagerly loaded.
|
||||
|
||||
This function eagerly loads User.roles, User.groups, and Group.roles
|
||||
to prevent detached instance errors when the session is closed/rolled back.
|
||||
|
||||
IMPORTANT: Always use this function instead of security_manager.find_user()
|
||||
when loading users for MCP tool execution. The find_user() method doesn't
|
||||
eagerly load Group.roles, causing "detached instance" errors when permission
|
||||
checks access group.roles after the session is rolled back.
|
||||
|
||||
Args:
|
||||
username: The username to look up (optional if email provided)
|
||||
email: The email to look up (optional if username provided)
|
||||
|
||||
Returns:
|
||||
User object with relationships loaded, or None if not found
|
||||
Delegates to :meth:`SupersetSecurityManager.find_user_with_relationships`,
|
||||
which mirrors FAB's ``find_user`` (including ``auth_username_ci`` and
|
||||
``MultipleResultsFound`` handling) while adding eager loading of
|
||||
``User.roles`` and ``User.groups.roles`` to prevent detached-instance
|
||||
errors when the SQLAlchemy session is closed or rolled back after the
|
||||
lookup — as happens in MCP tool-execution contexts.
|
||||
|
||||
Raises:
|
||||
ValueError: If neither username nor email is provided
|
||||
@@ -172,21 +165,9 @@ def load_user_with_relationships(
|
||||
if not username and not email:
|
||||
raise ValueError("Either username or email must be provided")
|
||||
|
||||
from sqlalchemy.orm import joinedload
|
||||
from superset import security_manager
|
||||
|
||||
from superset.extensions import db
|
||||
|
||||
query = db.session.query(User).options(
|
||||
joinedload(User.roles),
|
||||
joinedload(User.groups).joinedload(Group.roles),
|
||||
)
|
||||
|
||||
if username:
|
||||
query = query.filter(User.username == username)
|
||||
else:
|
||||
query = query.filter(User.email == email)
|
||||
|
||||
return query.first()
|
||||
return security_manager.find_user_with_relationships(username=username, email=email)
|
||||
|
||||
|
||||
def _resolve_user_from_jwt_context(app: Any) -> User | None:
|
||||
@@ -218,6 +199,25 @@ def _resolve_user_from_jwt_context(app: Any) -> User | None:
|
||||
if access_token is None:
|
||||
return None
|
||||
|
||||
# API key pass-through: CompositeTokenVerifier accepted this token
|
||||
# at the transport layer but defers actual validation to
|
||||
# _resolve_user_from_api_key() (priority 2 in get_user_from_request).
|
||||
# Require client_id=="api_key" (set by CompositeTokenVerifier) in addition
|
||||
# to the claim so that an external IdP JWT that happens to include the
|
||||
# claim name is not misclassified as an API-key pass-through.
|
||||
claims = getattr(access_token, "claims", None)
|
||||
if isinstance(claims, dict) and claims.get(API_KEY_PASSTHROUGH_CLAIM):
|
||||
if getattr(access_token, "client_id", None) == "api_key":
|
||||
logger.debug(
|
||||
"API key pass-through token detected, deferring to API key auth"
|
||||
)
|
||||
return None
|
||||
logger.debug(
|
||||
"Ignoring %s claim on non-API-key token (client_id=%r); processing as JWT",
|
||||
API_KEY_PASSTHROUGH_CLAIM,
|
||||
getattr(access_token, "client_id", None),
|
||||
)
|
||||
|
||||
# Use configurable resolver or default
|
||||
from superset.mcp_service.mcp_config import default_user_resolver
|
||||
|
||||
@@ -238,9 +238,12 @@ def _resolve_user_from_jwt_context(app: Any) -> User | None:
|
||||
if not user:
|
||||
# Fail closed: JWT says this user should exist but they don't.
|
||||
# Do NOT fall through to MCP_DEV_USERNAME or stale g.user.
|
||||
# Avoid echoing the JWT-extracted username in the exception message
|
||||
# (CodeQL py/clear-text-logging-sensitive-data).
|
||||
logger.debug("JWT-authenticated user not found in database (identity from JWT)")
|
||||
raise ValueError(
|
||||
f"JWT authenticated user '{username}' not found in Superset database. "
|
||||
f"Ensure the user exists before granting MCP access."
|
||||
"JWT authenticated user not found in Superset database. "
|
||||
"Ensure the user exists before granting MCP access."
|
||||
)
|
||||
|
||||
return user
|
||||
@@ -248,37 +251,57 @@ def _resolve_user_from_jwt_context(app: Any) -> User | None:
|
||||
|
||||
def _resolve_user_from_api_key(app: Any) -> User | None:
|
||||
"""
|
||||
Resolve the current user from an API key in the Authorization header.
|
||||
Resolve the current user from an API key passed via Bearer token.
|
||||
|
||||
Uses FAB SecurityManager's API key validation. Only attempts when
|
||||
FAB_API_KEY_ENABLED is True and a request context is active.
|
||||
Reads the token from FastMCP's per-request ``AccessToken`` (set by
|
||||
``CompositeTokenVerifier`` when a Bearer token matches an API key
|
||||
prefix). The streamable-http transport does not push a Flask request
|
||||
context, so we cannot rely on ``flask.request`` headers — the verifier
|
||||
already saw the token and stashed it on the ``AccessToken``.
|
||||
|
||||
Returns:
|
||||
User object with relationships loaded, or None if no API key present
|
||||
or API key auth is not enabled/available.
|
||||
User object with relationships loaded, or None if no API key
|
||||
pass-through token is present or API key auth is not enabled.
|
||||
|
||||
Raises:
|
||||
PermissionError: If an API key is present but invalid/expired,
|
||||
or if validation is not available in this FAB version.
|
||||
PermissionError: If an API key pass-through token is present but
|
||||
invalid/expired (fail closed — do NOT fall through to weaker
|
||||
auth sources like ``MCP_DEV_USERNAME``), or if validation is
|
||||
not available in this FAB version.
|
||||
"""
|
||||
if not app.config.get("FAB_API_KEY_ENABLED", False) or not has_request_context():
|
||||
if not app.config.get("FAB_API_KEY_ENABLED", False):
|
||||
return None
|
||||
|
||||
try:
|
||||
from fastmcp.server.dependencies import get_access_token
|
||||
except ImportError:
|
||||
logger.debug("fastmcp.server.dependencies not available, skipping API key auth")
|
||||
return None
|
||||
|
||||
access_token = get_access_token()
|
||||
if access_token is None:
|
||||
return None
|
||||
|
||||
# Only validate tokens that the CompositeTokenVerifier flagged as
|
||||
# API key pass-throughs. Plain JWTs were already validated by the JWT
|
||||
# verifier and resolved in _resolve_user_from_jwt_context.
|
||||
claims = getattr(access_token, "claims", None)
|
||||
if not (isinstance(claims, dict) and claims.get(API_KEY_PASSTHROUGH_CLAIM)):
|
||||
return None
|
||||
# Defense-in-depth: require client_id=="api_key" (set by CompositeTokenVerifier)
|
||||
# to guard against rogue external IdP JWTs that include the passthrough claim.
|
||||
if getattr(access_token, "client_id", None) != "api_key":
|
||||
return None
|
||||
|
||||
api_key_string = getattr(access_token, "token", None)
|
||||
if not api_key_string:
|
||||
# Passthrough claim is set but the raw token is absent — fail closed
|
||||
# rather than silently falling through to weaker auth sources.
|
||||
raise PermissionError(
|
||||
"API key pass-through token is missing the raw token value."
|
||||
)
|
||||
|
||||
sm = app.appbuilder.sm
|
||||
# extract_api_key_from_request is FAB's method for reading
|
||||
# the Bearer token from the Authorization header and matching prefixes.
|
||||
# Not all FAB versions include this method, so guard with hasattr.
|
||||
if not hasattr(sm, "extract_api_key_from_request"):
|
||||
logger.debug(
|
||||
"FAB SecurityManager does not have extract_api_key_from_request; "
|
||||
"API key authentication is not available in this FAB version"
|
||||
)
|
||||
return None
|
||||
|
||||
api_key_string = sm.extract_api_key_from_request()
|
||||
if api_key_string is None:
|
||||
return None
|
||||
|
||||
if not hasattr(sm, "validate_api_key"):
|
||||
logger.warning(
|
||||
"FAB SecurityManager does not have validate_api_key; "
|
||||
@@ -445,7 +468,6 @@ def _setup_user_context() -> User | None:
|
||||
# tool calls when no per-request middleware refreshes it.
|
||||
# Only clear in app-context-only mode; preserve g.user when
|
||||
# a request context is active (external middleware set it).
|
||||
from flask import has_request_context
|
||||
|
||||
if not has_request_context():
|
||||
g.pop("user", None)
|
||||
@@ -489,7 +511,7 @@ def _setup_user_context() -> User | None:
|
||||
logger.error("DB connection failed on retry during user setup: %s", e)
|
||||
_cleanup_session_on_error()
|
||||
raise
|
||||
except ValueError as e:
|
||||
except (ValueError, PermissionError) as e:
|
||||
# User resolution failed — fail closed. Do not fall back to
|
||||
# g.user from middleware, as that could allow a request to
|
||||
# proceed as a different user in multi-tenant deployments.
|
||||
@@ -576,7 +598,7 @@ def mcp_auth_hook(tool_func: F) -> F: # noqa: C901
|
||||
import inspect
|
||||
import types
|
||||
|
||||
from flask import current_app, has_app_context, has_request_context
|
||||
from flask import current_app, has_app_context
|
||||
|
||||
def _get_app_context_manager() -> AbstractContextManager[None]:
|
||||
"""Push a fresh app context unless a request context is active.
|
||||
|
||||
117
superset/mcp_service/composite_token_verifier.py
Normal file
117
superset/mcp_service/composite_token_verifier.py
Normal file
@@ -0,0 +1,117 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Composite token verifier for MCP authentication.
|
||||
|
||||
Routes Bearer tokens to the appropriate verifier based on prefix:
|
||||
- Tokens matching FAB_API_KEY_PREFIXES (e.g. ``sst_``) are passed through
|
||||
to the Flask layer where ``_resolve_user_from_api_key()`` handles
|
||||
actual validation via FAB SecurityManager.
|
||||
- All other tokens are delegated to the wrapped JWT verifier (when one is
|
||||
configured); when no JWT verifier is configured, non-API-key tokens are
|
||||
rejected at the transport layer.
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
from fastmcp.server.auth import AccessToken
|
||||
from fastmcp.server.auth.providers.jwt import TokenVerifier
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Namespaced claim that flags an AccessToken as an API-key pass-through.
|
||||
# Namespacing avoids collision with custom claims an external IdP might
|
||||
# happen to mint on a JWT — a plain ``_api_key_passthrough`` claim could
|
||||
# be silently misidentified as a Superset API-key request.
|
||||
API_KEY_PASSTHROUGH_CLAIM = "_superset_mcp_api_key_passthrough"
|
||||
|
||||
|
||||
class CompositeTokenVerifier(TokenVerifier):
|
||||
"""Routes Bearer tokens between API key pass-through and JWT verification.
|
||||
|
||||
API key tokens (identified by prefix) are accepted at the transport layer
|
||||
with a marker claim so that ``_resolve_user_from_jwt_context()`` can
|
||||
detect them and fall through to ``_resolve_user_from_api_key()`` for
|
||||
actual validation.
|
||||
|
||||
Args:
|
||||
jwt_verifier: The wrapped JWT verifier for non-API-key tokens.
|
||||
When ``None``, only API-key tokens are accepted; all other
|
||||
Bearer tokens are rejected at the transport layer (used when
|
||||
``MCP_AUTH_ENABLED=False`` but ``FAB_API_KEY_ENABLED=True``).
|
||||
api_key_prefixes: List of prefixes that identify API key tokens
|
||||
(e.g. ``["sst_"]``).
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
jwt_verifier: TokenVerifier | None,
|
||||
api_key_prefixes: list[str],
|
||||
) -> None:
|
||||
super().__init__(
|
||||
base_url=getattr(jwt_verifier, "base_url", None),
|
||||
required_scopes=getattr(jwt_verifier, "required_scopes", None) or [],
|
||||
)
|
||||
self._jwt_verifier = jwt_verifier
|
||||
valid: list[str] = [
|
||||
p for p in api_key_prefixes if isinstance(p, str) and p.strip()
|
||||
]
|
||||
invalid = [p for p in api_key_prefixes if p not in valid]
|
||||
if invalid:
|
||||
# Log count only — actual values may be config secrets
|
||||
# (CodeQL py/clear-text-logging-sensitive-data).
|
||||
logger.warning(
|
||||
"FAB_API_KEY_PREFIXES has %d invalid entries (empty/non-string)"
|
||||
" — ignored",
|
||||
len(invalid),
|
||||
)
|
||||
self._api_key_prefixes = tuple(valid)
|
||||
|
||||
async def verify_token(self, token: str) -> AccessToken | None:
|
||||
"""Verify a Bearer token.
|
||||
|
||||
If the token starts with an API key prefix, return a pass-through
|
||||
AccessToken with the namespaced ``API_KEY_PASSTHROUGH_CLAIM``
|
||||
(``_superset_mcp_api_key_passthrough``). The Flask-layer
|
||||
``_resolve_user_from_api_key()`` performs the real validation.
|
||||
|
||||
Otherwise, delegate to the wrapped JWT verifier when one is
|
||||
configured; if no JWT verifier is configured, reject the token.
|
||||
"""
|
||||
if any(token.startswith(prefix) for prefix in self._api_key_prefixes):
|
||||
logger.debug("API key token detected (prefix match), passing through")
|
||||
# Populate ``scopes`` from ``self.required_scopes`` so FastMCP's
|
||||
# ``RequireAuthMiddleware`` (transport-layer scope check) is
|
||||
# satisfied for API-key requests. Without this, MCP_REQUIRED_SCOPES
|
||||
# being non-empty would 403 every API-key call before
|
||||
# ``_resolve_user_from_api_key`` even runs.
|
||||
return AccessToken(
|
||||
token=token,
|
||||
client_id="api_key",
|
||||
scopes=list(self.required_scopes or []),
|
||||
claims={API_KEY_PASSTHROUGH_CLAIM: True},
|
||||
)
|
||||
|
||||
if self._jwt_verifier is None:
|
||||
logger.debug(
|
||||
"Bearer token does not match any API key prefix and no JWT "
|
||||
"verifier is configured; rejecting"
|
||||
)
|
||||
return None
|
||||
|
||||
return await self._jwt_verifier.verify_token(token)
|
||||
@@ -20,12 +20,15 @@ import logging
|
||||
import secrets
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from fastmcp.server.auth.providers.jwt import JWTVerifier
|
||||
from flask import Flask
|
||||
|
||||
from superset.mcp_service.composite_token_verifier import CompositeTokenVerifier
|
||||
from superset.mcp_service.constants import (
|
||||
DEFAULT_TOKEN_LIMIT,
|
||||
DEFAULT_WARN_THRESHOLD_PCT,
|
||||
)
|
||||
from superset.mcp_service.jwt_verifier import DetailedJWTVerifier
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -291,56 +294,94 @@ MCP_TOOL_SEARCH_CONFIG: Dict[str, Any] = {
|
||||
|
||||
|
||||
def create_default_mcp_auth_factory(app: Flask) -> Optional[Any]:
|
||||
"""Default MCP auth factory using app.config values."""
|
||||
if not app.config.get("MCP_AUTH_ENABLED", False):
|
||||
"""Default MCP auth factory using app.config values.
|
||||
|
||||
Returns an auth provider when ``MCP_AUTH_ENABLED=True`` (JWT verifier,
|
||||
optionally wrapped with ``CompositeTokenVerifier`` for API keys) or
|
||||
when only ``FAB_API_KEY_ENABLED=True`` (API-key-only verifier that
|
||||
rejects all non-API-key Bearer tokens at the transport).
|
||||
"""
|
||||
auth_enabled = app.config.get("MCP_AUTH_ENABLED", False)
|
||||
api_key_enabled = app.config.get("FAB_API_KEY_ENABLED", False)
|
||||
|
||||
if not (auth_enabled or api_key_enabled):
|
||||
return None
|
||||
|
||||
jwks_uri = app.config.get("MCP_JWKS_URI")
|
||||
public_key = app.config.get("MCP_JWT_PUBLIC_KEY")
|
||||
secret = app.config.get("MCP_JWT_SECRET")
|
||||
jwt_verifier: Any | None = None
|
||||
|
||||
if not (jwks_uri or public_key or secret):
|
||||
logger.warning("MCP_AUTH_ENABLED is True but no JWT keys/secret configured")
|
||||
return None
|
||||
if auth_enabled:
|
||||
jwks_uri = app.config.get("MCP_JWKS_URI")
|
||||
public_key = app.config.get("MCP_JWT_PUBLIC_KEY")
|
||||
secret = app.config.get("MCP_JWT_SECRET")
|
||||
|
||||
try:
|
||||
debug_errors = app.config.get("MCP_JWT_DEBUG_ERRORS", False)
|
||||
|
||||
common_kwargs: dict[str, Any] = {
|
||||
"issuer": app.config.get("MCP_JWT_ISSUER"),
|
||||
"audience": app.config.get("MCP_JWT_AUDIENCE"),
|
||||
"required_scopes": app.config.get("MCP_REQUIRED_SCOPES", []),
|
||||
}
|
||||
|
||||
# For HS256 (symmetric), use the secret as the public_key parameter
|
||||
if app.config.get("MCP_JWT_ALGORITHM") == "HS256" and secret:
|
||||
common_kwargs["public_key"] = secret
|
||||
common_kwargs["algorithm"] = "HS256"
|
||||
if not (jwks_uri or public_key or secret):
|
||||
logger.warning("MCP_AUTH_ENABLED is True but no JWT keys/secret configured")
|
||||
if not api_key_enabled:
|
||||
return None
|
||||
else:
|
||||
# For RS256 (asymmetric), use public key or JWKS
|
||||
common_kwargs["jwks_uri"] = jwks_uri
|
||||
common_kwargs["public_key"] = public_key
|
||||
common_kwargs["algorithm"] = app.config.get("MCP_JWT_ALGORITHM", "RS256")
|
||||
try:
|
||||
jwt_verifier = _build_jwt_verifier(
|
||||
app=app,
|
||||
jwks_uri=jwks_uri,
|
||||
public_key=public_key,
|
||||
secret=secret,
|
||||
)
|
||||
except Exception: # noqa: BLE001 — JWT lib raises many types; broad catch intentional
|
||||
# Do not log the exception — it may contain secrets (e.g., key material)
|
||||
logger.error("Failed to create MCP JWT verifier")
|
||||
if not api_key_enabled:
|
||||
return None
|
||||
|
||||
if debug_errors:
|
||||
# DetailedJWTVerifier: detailed server-side logging of JWT
|
||||
# validation failures. HTTP responses are always generic per
|
||||
# RFC 6750 Section 3.1.
|
||||
from superset.mcp_service.jwt_verifier import DetailedJWTVerifier
|
||||
|
||||
auth_provider = DetailedJWTVerifier(**common_kwargs)
|
||||
if api_key_enabled:
|
||||
raw_prefixes = app.config.get("FAB_API_KEY_PREFIXES", ["sst_"])
|
||||
# Normalize: a plain string (e.g. "sst_") would iterate as characters;
|
||||
# wrap it in a list so CompositeTokenVerifier receives a proper sequence.
|
||||
if isinstance(raw_prefixes, str):
|
||||
api_key_prefixes = [raw_prefixes]
|
||||
else:
|
||||
# Default JWTVerifier: minimal logging, generic error responses.
|
||||
from fastmcp.server.auth.providers.jwt import JWTVerifier
|
||||
api_key_prefixes = list(raw_prefixes)
|
||||
logger.info("API key auth enabled for MCP")
|
||||
return CompositeTokenVerifier(
|
||||
jwt_verifier=jwt_verifier,
|
||||
api_key_prefixes=api_key_prefixes,
|
||||
)
|
||||
|
||||
auth_provider = JWTVerifier(**common_kwargs)
|
||||
return jwt_verifier
|
||||
|
||||
return auth_provider
|
||||
except Exception:
|
||||
# Do not log the exception — it may contain the HS256 secret
|
||||
# from common_kwargs["public_key"]
|
||||
logger.error("Failed to create MCP auth provider")
|
||||
return None
|
||||
|
||||
def _build_jwt_verifier(
|
||||
app: Flask,
|
||||
jwks_uri: Optional[str],
|
||||
public_key: Optional[str],
|
||||
secret: Optional[str],
|
||||
) -> Any:
|
||||
"""Construct the JWT verifier from configured keys/secret."""
|
||||
debug_errors = app.config.get("MCP_JWT_DEBUG_ERRORS", False)
|
||||
|
||||
common_kwargs: Dict[str, Any] = {
|
||||
"issuer": app.config.get("MCP_JWT_ISSUER"),
|
||||
"audience": app.config.get("MCP_JWT_AUDIENCE"),
|
||||
"required_scopes": app.config.get("MCP_REQUIRED_SCOPES", []),
|
||||
}
|
||||
|
||||
# For HS256 (symmetric), use the secret as the public_key parameter
|
||||
if app.config.get("MCP_JWT_ALGORITHM") == "HS256" and secret:
|
||||
common_kwargs["public_key"] = secret
|
||||
common_kwargs["algorithm"] = "HS256"
|
||||
else:
|
||||
# For RS256 (asymmetric), use public key or JWKS
|
||||
common_kwargs["jwks_uri"] = jwks_uri
|
||||
common_kwargs["public_key"] = public_key
|
||||
common_kwargs["algorithm"] = app.config.get("MCP_JWT_ALGORITHM", "RS256")
|
||||
|
||||
if debug_errors:
|
||||
# DetailedJWTVerifier: detailed server-side logging of JWT
|
||||
# validation failures. HTTP responses are always generic per
|
||||
# RFC 6750 Section 3.1.
|
||||
return DetailedJWTVerifier(**common_kwargs)
|
||||
|
||||
# Default JWTVerifier: minimal logging, generic error responses.
|
||||
return JWTVerifier(**common_kwargs)
|
||||
|
||||
|
||||
def default_user_resolver(app: Any, access_token: Any) -> str | None:
|
||||
|
||||
@@ -429,13 +429,13 @@ def _tool_allowed_for_current_user(tool: Any) -> bool:
|
||||
if not getattr(g, "user", None):
|
||||
try:
|
||||
g.user = get_user_from_request()
|
||||
except ValueError:
|
||||
except (ValueError, PermissionError):
|
||||
return False
|
||||
|
||||
method_permission_name = getattr(tool_func, METHOD_PERMISSION_ATTR, "read")
|
||||
permission_name = f"{PERMISSION_PREFIX}{method_permission_name}"
|
||||
return security_manager.can_access(permission_name, class_permission_name)
|
||||
except (AttributeError, RuntimeError, ValueError):
|
||||
except (AttributeError, RuntimeError, ValueError, PermissionError):
|
||||
logger.debug("Could not evaluate tool search permission", exc_info=True)
|
||||
return False
|
||||
|
||||
@@ -673,7 +673,9 @@ def _create_auth_provider(flask_app: Any) -> Any | None:
|
||||
"""Create an auth provider from Flask app config.
|
||||
|
||||
Tries MCP_AUTH_FACTORY first, then falls back to the default factory
|
||||
when MCP_AUTH_ENABLED is True.
|
||||
when either ``MCP_AUTH_ENABLED`` (JWT auth) or ``FAB_API_KEY_ENABLED``
|
||||
(API key auth) is True. The default factory builds a
|
||||
``CompositeTokenVerifier`` that handles either or both auth modes.
|
||||
"""
|
||||
auth_provider = None
|
||||
if auth_factory := flask_app.config.get("MCP_AUTH_FACTORY"):
|
||||
@@ -686,7 +688,9 @@ def _create_auth_provider(flask_app: Any) -> Any | None:
|
||||
except Exception:
|
||||
# Do not log the exception — it may contain secrets
|
||||
logger.error("Failed to create auth provider from MCP_AUTH_FACTORY")
|
||||
elif flask_app.config.get("MCP_AUTH_ENABLED", False):
|
||||
elif flask_app.config.get("MCP_AUTH_ENABLED", False) or flask_app.config.get(
|
||||
"FAB_API_KEY_ENABLED", False
|
||||
):
|
||||
from superset.mcp_service.mcp_config import (
|
||||
create_default_mcp_auth_factory,
|
||||
)
|
||||
|
||||
@@ -52,7 +52,8 @@ from flask_login import AnonymousUserMixin, LoginManager
|
||||
from jwt.api_jwt import _jwt_global_obj
|
||||
from sqlalchemy import and_, inspect, or_
|
||||
from sqlalchemy.engine.base import Connection
|
||||
from sqlalchemy.orm import eagerload
|
||||
from sqlalchemy.orm import eagerload, joinedload
|
||||
from sqlalchemy.orm.exc import MultipleResultsFound
|
||||
from sqlalchemy.orm.mapper import Mapper
|
||||
from sqlalchemy.orm.query import Query as SqlaQuery
|
||||
from sqlalchemy.sql import exists
|
||||
@@ -462,6 +463,10 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
|
||||
"PermissionViewMenu",
|
||||
"ViewMenu",
|
||||
"User",
|
||||
# FAB ApiKeyApi blueprint (active when FAB_API_KEY_ENABLED=True).
|
||||
# Listed unconditionally — harmless when the feature is off because
|
||||
# no PVMs exist under this view menu.
|
||||
"ApiKey",
|
||||
} | USER_MODEL_VIEWS
|
||||
|
||||
ALPHA_ONLY_VIEW_MENUS = {
|
||||
@@ -3164,6 +3169,60 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
|
||||
.one_or_none()
|
||||
)
|
||||
|
||||
def find_user_with_relationships(
|
||||
self,
|
||||
username: Optional[str] = None,
|
||||
email: Optional[str] = None,
|
||||
) -> Optional[User]:
|
||||
"""Find a user with roles and group roles eagerly loaded.
|
||||
|
||||
Mirrors FAB's ``SecurityManager.find_user``
|
||||
(including ``auth_username_ci`` case-insensitive handling and
|
||||
``MultipleResultsFound`` guard) and additionally eager-loads
|
||||
``User.roles`` and ``User.groups.roles`` to prevent detached-instance
|
||||
errors when the SQLAlchemy session is closed or rolled back after the
|
||||
lookup — as happens in MCP tool-execution contexts.
|
||||
"""
|
||||
eager = [
|
||||
joinedload(self.user_model.roles),
|
||||
joinedload(self.user_model.groups).joinedload("roles"),
|
||||
]
|
||||
if username:
|
||||
try:
|
||||
if self.auth_username_ci:
|
||||
from sqlalchemy import func as sa_func
|
||||
|
||||
return (
|
||||
self.session.query(self.user_model)
|
||||
.options(*eager)
|
||||
.filter(
|
||||
sa_func.lower(self.user_model.username)
|
||||
== sa_func.lower(username)
|
||||
)
|
||||
.one_or_none()
|
||||
)
|
||||
return (
|
||||
self.session.query(self.user_model)
|
||||
.options(*eager)
|
||||
.filter(self.user_model.username == username)
|
||||
.one_or_none()
|
||||
)
|
||||
except MultipleResultsFound:
|
||||
logger.error("Multiple results found for user %s", username)
|
||||
return None
|
||||
if email:
|
||||
try:
|
||||
return (
|
||||
self.session.query(self.user_model)
|
||||
.options(*eager)
|
||||
.filter_by(email=email)
|
||||
.one_or_none()
|
||||
)
|
||||
except MultipleResultsFound:
|
||||
logger.error("Multiple results found for user with email %s", email)
|
||||
return None
|
||||
return None
|
||||
|
||||
def get_anonymous_user(self) -> User:
|
||||
return AnonymousUserMixin()
|
||||
|
||||
|
||||
@@ -31,6 +31,7 @@ DATABASE_KEYS = [
|
||||
"allow_cvas",
|
||||
"allow_dml",
|
||||
"allow_run_async",
|
||||
"allows_cost_estimate",
|
||||
"allows_subquery",
|
||||
"backend",
|
||||
"database_name",
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
import yaml
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
from superset import db
|
||||
@@ -304,3 +305,54 @@ version: 1.0.0
|
||||
""",
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def test_export_two_datasets_same_table_name_different_schema(
|
||||
session: Session,
|
||||
) -> None:
|
||||
"""
|
||||
Regression coverage for GitHub issue #16141.
|
||||
|
||||
Exporting two datasets that share a `table_name` but live in
|
||||
different schemas (e.g. prod.users + dev.users) must produce two
|
||||
distinct entries in the export. Historically the pair could collide
|
||||
onto a single filename — the export filename is now disambiguated by
|
||||
dataset id, so this test pins that behavior so it can't silently
|
||||
regress.
|
||||
"""
|
||||
from superset.commands.dataset.export import ExportDatasetsCommand
|
||||
from superset.connectors.sqla.models import SqlaTable
|
||||
from superset.models.core import Database
|
||||
|
||||
engine = db.session.get_bind()
|
||||
SqlaTable.metadata.create_all(engine) # pylint: disable=no-member
|
||||
|
||||
database = Database(database_name="my_database", sqlalchemy_uri="sqlite://")
|
||||
db.session.add(database)
|
||||
db.session.flush()
|
||||
|
||||
prod = SqlaTable(table_name="users", schema="prod", database=database)
|
||||
dev = SqlaTable(table_name="users", schema="dev", database=database)
|
||||
db.session.add_all([prod, dev])
|
||||
db.session.flush()
|
||||
|
||||
paths: list[str] = []
|
||||
contents: list[str] = []
|
||||
for ds in (prod, dev):
|
||||
for path, content_fn in ExportDatasetsCommand._export( # pylint: disable=protected-access
|
||||
ds, export_related=False
|
||||
):
|
||||
paths.append(path)
|
||||
contents.append(content_fn())
|
||||
|
||||
# Both datasets must produce distinct export paths — no collision.
|
||||
assert len(paths) == len(set(paths)), (
|
||||
f"Export filenames collided for same-table-name datasets: {paths}"
|
||||
)
|
||||
|
||||
# And both YAML payloads must reflect their own schema, not be
|
||||
# silently merged or overwritten.
|
||||
schemas_in_yaml = {yaml.safe_load(c)["schema"] for c in contents}
|
||||
assert schemas_in_yaml == {"prod", "dev"}, (
|
||||
f"Expected both prod and dev schemas in export, got {schemas_in_yaml}"
|
||||
)
|
||||
|
||||
@@ -15,25 +15,55 @@
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Tests for API key authentication in get_user_from_request()."""
|
||||
"""Tests for API key authentication in get_user_from_request().
|
||||
|
||||
The streamable-http transport does not push a Flask request context, so
|
||||
``_resolve_user_from_api_key`` reads the token from FastMCP's per-request
|
||||
``AccessToken`` (populated by ``CompositeTokenVerifier``) rather than from
|
||||
``flask.request``. These tests mock ``get_access_token`` accordingly.
|
||||
"""
|
||||
|
||||
from collections.abc import Generator
|
||||
from contextlib import contextmanager
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from flask import g
|
||||
|
||||
from superset.mcp_service.auth import get_user_from_request
|
||||
from superset.app import SupersetApp
|
||||
from superset.mcp_service.auth import (
|
||||
_resolve_user_from_jwt_context,
|
||||
get_user_from_request,
|
||||
)
|
||||
from superset.mcp_service.composite_token_verifier import API_KEY_PASSTHROUGH_CLAIM
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_user():
|
||||
def mock_user() -> MagicMock:
|
||||
user = MagicMock()
|
||||
user.username = "api_key_user"
|
||||
return user
|
||||
|
||||
|
||||
def _passthrough_access_token(token: str) -> MagicMock:
|
||||
"""Build an AccessToken matching what CompositeTokenVerifier emits."""
|
||||
access_token = MagicMock()
|
||||
access_token.token = token
|
||||
access_token.client_id = "api_key"
|
||||
access_token.claims = {API_KEY_PASSTHROUGH_CLAIM: True}
|
||||
return access_token
|
||||
|
||||
|
||||
def _patch_access_token(access_token: MagicMock | None):
|
||||
"""Patch get_access_token where _resolve_user_from_api_key imports it."""
|
||||
return patch(
|
||||
"fastmcp.server.dependencies.get_access_token",
|
||||
return_value=access_token,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _enable_api_keys(app):
|
||||
def _enable_api_keys(app: SupersetApp) -> Generator[None, None, None]:
|
||||
"""Enable FAB API key auth and clear MCP_DEV_USERNAME so the API key
|
||||
path is exercised instead of falling through to the dev-user fallback."""
|
||||
app.config["FAB_API_KEY_ENABLED"] = True
|
||||
@@ -45,7 +75,7 @@ def _enable_api_keys(app):
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _disable_api_keys(app):
|
||||
def _disable_api_keys(app: SupersetApp) -> Generator[None, None, None]:
|
||||
app.config["FAB_API_KEY_ENABLED"] = False
|
||||
old_dev = app.config.pop("MCP_DEV_USERNAME", None)
|
||||
yield
|
||||
@@ -54,24 +84,45 @@ def _disable_api_keys(app):
|
||||
app.config["MCP_DEV_USERNAME"] = old_dev
|
||||
|
||||
|
||||
@contextmanager
|
||||
def _mock_sm_ctx(app: SupersetApp, mock_sm: MagicMock):
|
||||
"""Push an app context with g.user cleared and appbuilder.sm mocked."""
|
||||
with app.app_context():
|
||||
g.user = None
|
||||
app.appbuilder = MagicMock()
|
||||
app.appbuilder.sm = mock_sm
|
||||
yield
|
||||
|
||||
|
||||
def _patch_load_user_not_found():
|
||||
"""Patch load_user_with_relationships to return None (user not found).
|
||||
|
||||
load_user_with_relationships delegates to the global security_manager
|
||||
(not app.appbuilder.sm), so tests that need the JWT path to raise
|
||||
ValueError("not found") must patch it directly at the module level.
|
||||
"""
|
||||
return patch(
|
||||
"superset.mcp_service.auth.load_user_with_relationships",
|
||||
return_value=None,
|
||||
)
|
||||
|
||||
|
||||
# -- Valid API key -> user loaded --
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_enable_api_keys")
|
||||
def test_valid_api_key_returns_user(app, mock_user) -> None:
|
||||
"""A valid API key should authenticate and return the user."""
|
||||
def test_valid_api_key_returns_user(app: SupersetApp, mock_user: MagicMock) -> None:
|
||||
"""A valid API key pass-through token should authenticate and return the user."""
|
||||
mock_sm = MagicMock()
|
||||
mock_sm.extract_api_key_from_request.return_value = "sst_abc123"
|
||||
mock_sm.validate_api_key.return_value = mock_user
|
||||
|
||||
with app.test_request_context(headers={"Authorization": "Bearer sst_abc123"}):
|
||||
g.user = None
|
||||
app.appbuilder = MagicMock()
|
||||
app.appbuilder.sm = mock_sm
|
||||
|
||||
with patch(
|
||||
"superset.mcp_service.auth.load_user_with_relationships",
|
||||
return_value=mock_user,
|
||||
with _mock_sm_ctx(app, mock_sm):
|
||||
with (
|
||||
_patch_access_token(_passthrough_access_token("sst_abc123")),
|
||||
patch(
|
||||
"superset.mcp_service.auth.load_user_with_relationships",
|
||||
return_value=mock_user,
|
||||
),
|
||||
):
|
||||
result = get_user_from_request()
|
||||
|
||||
@@ -79,75 +130,70 @@ def test_valid_api_key_returns_user(app, mock_user) -> None:
|
||||
mock_sm.validate_api_key.assert_called_once_with("sst_abc123")
|
||||
|
||||
|
||||
# -- Invalid API key -> PermissionError --
|
||||
# -- Invalid API key -> PermissionError (does not silently fall back) --
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_enable_api_keys")
|
||||
def test_invalid_api_key_raises(app) -> None:
|
||||
"""An invalid API key should raise PermissionError."""
|
||||
def test_invalid_api_key_raises(app: SupersetApp) -> None:
|
||||
"""An invalid API key pass-through token should raise PermissionError
|
||||
(fail closed — do NOT fall through to MCP_DEV_USERNAME)."""
|
||||
mock_sm = MagicMock()
|
||||
mock_sm.extract_api_key_from_request.return_value = "sst_bad_key"
|
||||
mock_sm.validate_api_key.return_value = None
|
||||
|
||||
with app.test_request_context(headers={"Authorization": "Bearer sst_bad_key"}):
|
||||
g.user = None
|
||||
app.appbuilder = MagicMock()
|
||||
app.appbuilder.sm = mock_sm
|
||||
|
||||
with pytest.raises(PermissionError, match="Invalid or expired API key"):
|
||||
get_user_from_request()
|
||||
# The dangerous fallthrough scenario: dev username IS set, but the
|
||||
# request presented an invalid API key. The dev fallback must not
|
||||
# mask the rejection.
|
||||
app.config["MCP_DEV_USERNAME"] = "admin"
|
||||
try:
|
||||
with _mock_sm_ctx(app, mock_sm):
|
||||
with _patch_access_token(_passthrough_access_token("sst_bad_key")):
|
||||
with pytest.raises(PermissionError, match="Invalid or expired API key"):
|
||||
get_user_from_request()
|
||||
finally:
|
||||
app.config.pop("MCP_DEV_USERNAME", None)
|
||||
|
||||
|
||||
# -- API key disabled -> falls through to next auth method --
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_disable_api_keys")
|
||||
def test_api_key_disabled_skips_auth(app) -> None:
|
||||
"""When FAB_API_KEY_ENABLED is False, API key auth is skipped entirely."""
|
||||
def test_api_key_disabled_skips_auth(app: SupersetApp) -> None:
|
||||
"""When FAB_API_KEY_ENABLED is False, API key auth is skipped entirely
|
||||
even if an AccessToken is present."""
|
||||
mock_sm = MagicMock()
|
||||
|
||||
with app.test_request_context(headers={"Authorization": "Bearer sst_abc123"}):
|
||||
g.user = None
|
||||
app.appbuilder = MagicMock()
|
||||
app.appbuilder.sm = mock_sm
|
||||
|
||||
# Without API key auth or MCP_DEV_USERNAME, should raise ValueError
|
||||
# about no authenticated user (not about invalid API key)
|
||||
with pytest.raises(ValueError, match="No authenticated user found"):
|
||||
get_user_from_request()
|
||||
|
||||
# SecurityManager API key methods should never be called
|
||||
mock_sm.extract_api_key_from_request.assert_not_called()
|
||||
|
||||
|
||||
# -- No request context -> API key auth skipped --
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_enable_api_keys")
|
||||
def test_no_request_context_skips_api_key_auth(app) -> None:
|
||||
"""Without a request context, API key auth should be skipped
|
||||
(e.g., during MCP tool discovery with only an app context)."""
|
||||
mock_sm = MagicMock()
|
||||
|
||||
with app.app_context():
|
||||
g.user = None
|
||||
app.appbuilder = MagicMock()
|
||||
app.appbuilder.sm = mock_sm
|
||||
|
||||
# Explicitly mock has_request_context to False because the test
|
||||
# framework's app fixture may implicitly provide a request context.
|
||||
with patch("superset.mcp_service.auth.has_request_context", return_value=False):
|
||||
with _mock_sm_ctx(app, mock_sm):
|
||||
with _patch_access_token(_passthrough_access_token("sst_abc123")):
|
||||
with pytest.raises(ValueError, match="No authenticated user found"):
|
||||
get_user_from_request()
|
||||
|
||||
mock_sm.extract_api_key_from_request.assert_not_called()
|
||||
mock_sm.validate_api_key.assert_not_called()
|
||||
|
||||
|
||||
# -- No AccessToken -> API key auth skipped --
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_enable_api_keys")
|
||||
def test_no_access_token_skips_api_key_auth(app: SupersetApp) -> None:
|
||||
"""Without a FastMCP AccessToken (e.g., MCP_AUTH_ENABLED=False and no
|
||||
auth provider installed), API key auth is skipped."""
|
||||
mock_sm = MagicMock()
|
||||
|
||||
with _mock_sm_ctx(app, mock_sm):
|
||||
with _patch_access_token(None):
|
||||
with pytest.raises(ValueError, match="No authenticated user found"):
|
||||
get_user_from_request()
|
||||
|
||||
mock_sm.validate_api_key.assert_not_called()
|
||||
|
||||
|
||||
# -- g.user fallback when no higher-priority auth succeeds --
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_disable_api_keys")
|
||||
def test_g_user_fallback_when_no_jwt_or_api_key(app, mock_user) -> None:
|
||||
def test_g_user_fallback_when_no_jwt_or_api_key(
|
||||
app: SupersetApp, mock_user: MagicMock
|
||||
) -> None:
|
||||
"""When no JWT or API key auth succeeds and MCP_DEV_USERNAME is not set,
|
||||
g.user (set by external middleware) is used as fallback."""
|
||||
with app.test_request_context():
|
||||
@@ -158,89 +204,174 @@ def test_g_user_fallback_when_no_jwt_or_api_key(app, mock_user) -> None:
|
||||
assert result.username == "api_key_user"
|
||||
|
||||
|
||||
# -- FAB version without extract_api_key_from_request --
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_enable_api_keys")
|
||||
def test_fab_without_extract_method_skips_gracefully(app) -> None:
|
||||
"""If FAB SecurityManager lacks extract_api_key_from_request,
|
||||
API key auth should be skipped with a debug log, not crash."""
|
||||
mock_sm = MagicMock(spec=[]) # empty spec = no attributes
|
||||
|
||||
with app.test_request_context():
|
||||
g.user = None
|
||||
app.appbuilder = MagicMock()
|
||||
app.appbuilder.sm = mock_sm
|
||||
|
||||
with pytest.raises(ValueError, match="No authenticated user found"):
|
||||
get_user_from_request()
|
||||
|
||||
|
||||
# -- FAB version without validate_api_key --
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_enable_api_keys")
|
||||
def test_fab_without_validate_method_raises(app) -> None:
|
||||
"""If FAB has extract_api_key_from_request but not validate_api_key,
|
||||
should raise PermissionError about unavailable validation."""
|
||||
mock_sm = MagicMock(spec=["extract_api_key_from_request"])
|
||||
mock_sm.extract_api_key_from_request.return_value = "sst_abc123"
|
||||
def test_fab_without_validate_method_raises(app: SupersetApp) -> None:
|
||||
"""If FAB SecurityManager lacks validate_api_key, should raise
|
||||
PermissionError about unavailable validation."""
|
||||
mock_sm = MagicMock(spec=[]) # empty spec = no attributes
|
||||
|
||||
with app.test_request_context(headers={"Authorization": "Bearer sst_abc123"}):
|
||||
g.user = None
|
||||
app.appbuilder = MagicMock()
|
||||
app.appbuilder.sm = mock_sm
|
||||
|
||||
with pytest.raises(
|
||||
PermissionError, match="API key validation is not available"
|
||||
):
|
||||
get_user_from_request()
|
||||
with _mock_sm_ctx(app, mock_sm):
|
||||
with _patch_access_token(_passthrough_access_token("sst_abc123")):
|
||||
with pytest.raises(
|
||||
PermissionError, match="API key validation is not available"
|
||||
):
|
||||
get_user_from_request()
|
||||
|
||||
|
||||
# -- Relationship reload fallback --
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_enable_api_keys")
|
||||
def test_relationship_reload_failure_returns_original_user(app, mock_user) -> None:
|
||||
def test_relationship_reload_failure_returns_original_user(
|
||||
app: SupersetApp, mock_user: MagicMock
|
||||
) -> None:
|
||||
"""If load_user_with_relationships fails, the original user from
|
||||
validate_api_key should be returned as fallback."""
|
||||
mock_sm = MagicMock()
|
||||
mock_sm.extract_api_key_from_request.return_value = "sst_abc123"
|
||||
mock_sm.validate_api_key.return_value = mock_user
|
||||
|
||||
with app.test_request_context(headers={"Authorization": "Bearer sst_abc123"}):
|
||||
g.user = None
|
||||
app.appbuilder = MagicMock()
|
||||
app.appbuilder.sm = mock_sm
|
||||
|
||||
with patch(
|
||||
"superset.mcp_service.auth.load_user_with_relationships",
|
||||
return_value=None,
|
||||
with _mock_sm_ctx(app, mock_sm):
|
||||
with (
|
||||
_patch_access_token(_passthrough_access_token("sst_abc123")),
|
||||
patch(
|
||||
"superset.mcp_service.auth.load_user_with_relationships",
|
||||
return_value=None,
|
||||
),
|
||||
):
|
||||
result = get_user_from_request()
|
||||
|
||||
assert result is mock_user
|
||||
|
||||
|
||||
# -- AccessToken without passthrough claim (plain JWT) -> skip API key auth --
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_enable_api_keys")
|
||||
def test_jwt_access_token_skips_api_key_auth(app: SupersetApp) -> None:
|
||||
"""When the AccessToken is a plain JWT (no API_KEY_PASSTHROUGH_CLAIM),
|
||||
API key auth is skipped — the JWT was already validated by the JWT
|
||||
verifier and resolved in _resolve_user_from_jwt_context."""
|
||||
mock_sm = MagicMock()
|
||||
|
||||
jwt_access_token = MagicMock()
|
||||
jwt_access_token.token = "eyJhbGciOiJIUzI1NiJ9.not-an-api-key" # noqa: S105
|
||||
jwt_access_token.claims = {"sub": "alice"}
|
||||
|
||||
with _mock_sm_ctx(app, mock_sm):
|
||||
with _patch_access_token(jwt_access_token), _patch_load_user_not_found():
|
||||
# _resolve_user_from_jwt_context resolves "alice" from JWT claims
|
||||
# and raises ValueError because the username is not a real user.
|
||||
# We assert that _resolve_user_from_api_key did NOT short-circuit
|
||||
# to the API key path.
|
||||
with pytest.raises(ValueError, match="not found"):
|
||||
get_user_from_request()
|
||||
|
||||
mock_sm.validate_api_key.assert_not_called()
|
||||
|
||||
|
||||
# -- API key pass-through detection in JWT context resolver --
|
||||
|
||||
|
||||
def test_jwt_context_with_api_key_passthrough_returns_none(app: SupersetApp) -> None:
|
||||
"""When CompositeTokenVerifier passes through an API key token,
|
||||
_resolve_user_from_jwt_context should detect the namespaced
|
||||
pass-through claim AND client_id=="api_key" and return None so
|
||||
get_user_from_request falls through to _resolve_user_from_api_key."""
|
||||
mock_access_token = MagicMock()
|
||||
mock_access_token.client_id = "api_key"
|
||||
mock_access_token.claims = {API_KEY_PASSTHROUGH_CLAIM: True}
|
||||
|
||||
with patch(
|
||||
"fastmcp.server.dependencies.get_access_token",
|
||||
return_value=mock_access_token,
|
||||
):
|
||||
result = _resolve_user_from_jwt_context(app)
|
||||
|
||||
assert result is None
|
||||
|
||||
|
||||
def test_namespaced_claim_without_api_key_client_id_is_ignored(
|
||||
app: SupersetApp,
|
||||
) -> None:
|
||||
"""An external IdP JWT that includes the namespaced API_KEY_PASSTHROUGH_CLAIM
|
||||
but does NOT have client_id=='api_key' must NOT divert into the API-key path.
|
||||
The client_id guard prevents misclassification / DoS for affected JWT users."""
|
||||
mock_sm = MagicMock()
|
||||
|
||||
rogue_token = MagicMock()
|
||||
rogue_token.token = "eyJhbGciOiJSUzI1NiJ9.idp_jwt_with_rogue_claim" # noqa: S105
|
||||
rogue_token.client_id = "some-idp-client"
|
||||
rogue_token.claims = {API_KEY_PASSTHROUGH_CLAIM: True, "sub": "alice"}
|
||||
|
||||
with _mock_sm_ctx(app, mock_sm):
|
||||
with _patch_access_token(rogue_token), _patch_load_user_not_found():
|
||||
# JWT path resolves "alice" from claims and raises ValueError
|
||||
# because no such user exists.
|
||||
# validate_api_key must NOT be called — the rogue claim was ignored.
|
||||
with pytest.raises(ValueError, match="not found"):
|
||||
get_user_from_request()
|
||||
|
||||
mock_sm.validate_api_key.assert_not_called()
|
||||
|
||||
|
||||
# -- Plain JWT with a colliding non-namespaced claim is NOT mistaken for API key --
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_enable_api_keys")
|
||||
def test_unnamespaced_passthrough_claim_does_not_trigger_api_key_path(
|
||||
app: SupersetApp,
|
||||
) -> None:
|
||||
"""A JWT minted by an external IdP that happens to include a custom
|
||||
``_api_key_passthrough`` claim (legacy unnamespaced name) must NOT be
|
||||
treated as an API-key pass-through. Only the namespaced
|
||||
``API_KEY_PASSTHROUGH_CLAIM`` triggers the API-key path."""
|
||||
mock_sm = MagicMock()
|
||||
|
||||
rogue_token = MagicMock()
|
||||
rogue_token.token = "eyJhbGciOiJSUzI1NiJ9.rogue_jwt" # noqa: S105
|
||||
rogue_token.claims = {"_api_key_passthrough": True, "sub": "alice"}
|
||||
|
||||
with _mock_sm_ctx(app, mock_sm):
|
||||
with _patch_access_token(rogue_token), _patch_load_user_not_found():
|
||||
# JWT path resolves "alice" from claims and raises ValueError.
|
||||
# validate_api_key must NOT be called — the rogue claim was ignored.
|
||||
with pytest.raises(ValueError, match="not found"):
|
||||
get_user_from_request()
|
||||
|
||||
mock_sm.validate_api_key.assert_not_called()
|
||||
|
||||
|
||||
# -- SecurityManager method name regression test --
|
||||
|
||||
|
||||
def test_security_manager_has_expected_api_key_methods() -> None:
|
||||
"""Regression test: verify the SecurityManager method names referenced in
|
||||
auth._resolve_user_from_api_key() actually exist on the FAB SecurityManager
|
||||
class. This catches future renames before they silently break API key auth
|
||||
at runtime (SC-99414: _extract_api_key_from_request vs
|
||||
extract_api_key_from_request)."""
|
||||
from superset import security_manager
|
||||
def test_security_manager_has_expected_api_key_methods(app: SupersetApp) -> None:
|
||||
"""Regression test: verify the SecurityManager method name referenced in
|
||||
auth._resolve_user_from_api_key() actually exists on the FAB
|
||||
SecurityManager class. Catches future renames before they silently break
|
||||
API key auth at runtime (see PR #39437)."""
|
||||
with app.app_context():
|
||||
from superset import security_manager
|
||||
|
||||
sm = security_manager
|
||||
assert hasattr(sm, "extract_api_key_from_request"), (
|
||||
"FAB SecurityManager is missing 'extract_api_key_from_request'. "
|
||||
"auth._resolve_user_from_api_key() references this method by name — "
|
||||
"update auth.py if the FAB API changed."
|
||||
)
|
||||
assert hasattr(sm, "validate_api_key"), (
|
||||
"FAB SecurityManager is missing 'validate_api_key'. "
|
||||
"auth._resolve_user_from_api_key() references this method by name — "
|
||||
"update auth.py if the FAB API changed."
|
||||
)
|
||||
sm = security_manager
|
||||
assert hasattr(sm, "validate_api_key"), (
|
||||
"FAB SecurityManager is missing 'validate_api_key'. "
|
||||
"auth._resolve_user_from_api_key() references this method by name — "
|
||||
"update auth.py if the FAB API changed."
|
||||
)
|
||||
|
||||
|
||||
def test_security_manager_has_find_user_with_relationships(app: SupersetApp) -> None:
|
||||
"""Regression test: verify SupersetSecurityManager.find_user_with_relationships
|
||||
exists. load_user_with_relationships() in auth.py delegates to it — a rename
|
||||
or removal would silently break MCP user resolution at runtime."""
|
||||
with app.app_context():
|
||||
from superset import security_manager
|
||||
|
||||
assert hasattr(security_manager, "find_user_with_relationships"), (
|
||||
"SupersetSecurityManager is missing 'find_user_with_relationships'. "
|
||||
"auth.load_user_with_relationships() delegates to this method — "
|
||||
"update auth.py if the method was renamed or removed."
|
||||
)
|
||||
|
||||
@@ -285,7 +285,7 @@ def test_mcp_auth_hook_clears_stale_g_user(app) -> None:
|
||||
# framework's autouse app_context fixture may implicitly provide
|
||||
# a request context in some CI environments.
|
||||
with (
|
||||
patch("flask.has_request_context", return_value=False),
|
||||
patch("superset.mcp_service.auth.has_request_context", return_value=False),
|
||||
patch(
|
||||
"superset.mcp_service.auth.get_user_from_request",
|
||||
side_effect=lambda: _assert_cleared_then_return(),
|
||||
@@ -324,7 +324,7 @@ def test_mcp_auth_hook_clears_stale_g_user_async(app) -> None:
|
||||
with app.app_context():
|
||||
g.user = stale_user
|
||||
with (
|
||||
patch("flask.has_request_context", return_value=False),
|
||||
patch("superset.mcp_service.auth.has_request_context", return_value=False),
|
||||
patch(
|
||||
"superset.mcp_service.auth.get_user_from_request",
|
||||
side_effect=lambda: _assert_cleared_then_return(),
|
||||
|
||||
218
tests/unit_tests/mcp_service/test_composite_token_verifier.py
Normal file
218
tests/unit_tests/mcp_service/test_composite_token_verifier.py
Normal file
@@ -0,0 +1,218 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Tests for CompositeTokenVerifier."""
|
||||
|
||||
from unittest.mock import AsyncMock, MagicMock
|
||||
|
||||
import pytest
|
||||
from fastmcp.server.auth import AccessToken
|
||||
|
||||
from superset.mcp_service.composite_token_verifier import (
|
||||
API_KEY_PASSTHROUGH_CLAIM,
|
||||
CompositeTokenVerifier,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_jwt_verifier() -> MagicMock:
|
||||
verifier = MagicMock()
|
||||
verifier.required_scopes = []
|
||||
verifier.verify_token = AsyncMock()
|
||||
return verifier
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def composite_verifier(mock_jwt_verifier: MagicMock) -> CompositeTokenVerifier:
|
||||
return CompositeTokenVerifier(
|
||||
jwt_verifier=mock_jwt_verifier,
|
||||
api_key_prefixes=["sst_", "pat_"],
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_api_key_token_returns_passthrough(
|
||||
composite_verifier: CompositeTokenVerifier,
|
||||
) -> None:
|
||||
"""Tokens matching an API key prefix return a pass-through AccessToken."""
|
||||
api_key = "sst_abc123secret" # noqa: S105
|
||||
result = await composite_verifier.verify_token(api_key)
|
||||
|
||||
assert result is not None
|
||||
assert result.token == api_key
|
||||
assert result.client_id == "api_key"
|
||||
assert result.claims.get(API_KEY_PASSTHROUGH_CLAIM) is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_second_prefix_matches(
|
||||
composite_verifier: CompositeTokenVerifier,
|
||||
) -> None:
|
||||
"""All configured prefixes are checked, not just the first."""
|
||||
result = await composite_verifier.verify_token("pat_mytoken")
|
||||
|
||||
assert result is not None
|
||||
assert result.claims.get(API_KEY_PASSTHROUGH_CLAIM) is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_jwt_token_delegates_to_wrapped_verifier(
|
||||
composite_verifier: CompositeTokenVerifier, mock_jwt_verifier: MagicMock
|
||||
) -> None:
|
||||
"""Non-API-key tokens are delegated to the wrapped JWT verifier."""
|
||||
jwt_token = "eyJhbGciOiJSUzI1NiJ9.jwt_payload" # noqa: S105
|
||||
jwt_result = AccessToken(
|
||||
token=jwt_token,
|
||||
client_id="oauth_client",
|
||||
scopes=["read"],
|
||||
claims={"sub": "user1"},
|
||||
)
|
||||
mock_jwt_verifier.verify_token.return_value = jwt_result
|
||||
|
||||
result = await composite_verifier.verify_token("eyJhbGciOiJSUzI1NiJ9.jwt_payload")
|
||||
|
||||
assert result is jwt_result
|
||||
mock_jwt_verifier.verify_token.assert_awaited_once_with(
|
||||
"eyJhbGciOiJSUzI1NiJ9.jwt_payload"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_invalid_jwt_returns_none(
|
||||
composite_verifier: CompositeTokenVerifier, mock_jwt_verifier: MagicMock
|
||||
) -> None:
|
||||
"""When the JWT verifier rejects a token, None is returned."""
|
||||
mock_jwt_verifier.verify_token.return_value = None
|
||||
|
||||
result = await composite_verifier.verify_token("not_a_valid_token")
|
||||
|
||||
assert result is None
|
||||
mock_jwt_verifier.verify_token.assert_awaited_once()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_api_key_does_not_call_jwt_verifier(
|
||||
composite_verifier: CompositeTokenVerifier, mock_jwt_verifier: MagicMock
|
||||
) -> None:
|
||||
"""API key tokens bypass the JWT verifier entirely."""
|
||||
await composite_verifier.verify_token("sst_test_key")
|
||||
|
||||
mock_jwt_verifier.verify_token.assert_not_awaited()
|
||||
|
||||
|
||||
# -- API-key-only mode (no JWT verifier configured) --
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_api_key_only_mode_accepts_api_keys() -> None:
|
||||
"""When jwt_verifier is None, API key tokens are still passed through."""
|
||||
verifier = CompositeTokenVerifier(jwt_verifier=None, api_key_prefixes=["sst_"])
|
||||
|
||||
result = await verifier.verify_token("sst_abc123")
|
||||
|
||||
assert result is not None
|
||||
assert result.claims.get(API_KEY_PASSTHROUGH_CLAIM) is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_api_key_only_mode_rejects_non_api_key_tokens() -> None:
|
||||
"""When jwt_verifier is None, non-API-key Bearer tokens are rejected at
|
||||
the transport instead of being silently accepted."""
|
||||
verifier = CompositeTokenVerifier(jwt_verifier=None, api_key_prefixes=["sst_"])
|
||||
|
||||
result = await verifier.verify_token("eyJhbGciOiJSUzI1NiJ9.jwt_payload")
|
||||
|
||||
assert result is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_empty_string_prefix_is_filtered_out() -> None:
|
||||
"""An empty-string prefix would match every Bearer token (DoS vector).
|
||||
It must be silently dropped and never stored in _api_key_prefixes."""
|
||||
verifier = CompositeTokenVerifier(jwt_verifier=None, api_key_prefixes=[""])
|
||||
|
||||
assert "" not in verifier._api_key_prefixes
|
||||
# A plain JWT must NOT be misidentified as an API key.
|
||||
result = await verifier.verify_token("eyJhbGciOiJSUzI1NiJ9.jwt_payload")
|
||||
assert result is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_whitespace_only_prefix_is_filtered_out() -> None:
|
||||
"""A whitespace-only prefix is also invalid and must be dropped."""
|
||||
verifier = CompositeTokenVerifier(jwt_verifier=None, api_key_prefixes=[" "])
|
||||
|
||||
assert " " not in verifier._api_key_prefixes
|
||||
result = await verifier.verify_token(" starts_with_spaces")
|
||||
assert result is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_non_string_prefix_is_filtered_out() -> None:
|
||||
"""Non-string entries (e.g. None, int) must not be stored and must not
|
||||
cause a TypeError during verify_token."""
|
||||
verifier = CompositeTokenVerifier(
|
||||
jwt_verifier=None,
|
||||
api_key_prefixes=[None, 42, "sst_"], # type: ignore[list-item]
|
||||
)
|
||||
|
||||
assert None not in verifier._api_key_prefixes
|
||||
assert 42 not in verifier._api_key_prefixes
|
||||
assert verifier._api_key_prefixes == ("sst_",)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_invalid_prefixes_emit_warning(caplog: pytest.LogCaptureFixture) -> None:
|
||||
"""Invalid prefix entries must trigger a logger.warning so operators can
|
||||
detect misconfiguration in FAB_API_KEY_PREFIXES."""
|
||||
import logging
|
||||
|
||||
logger_name = "superset.mcp_service.composite_token_verifier"
|
||||
with caplog.at_level(logging.WARNING, logger=logger_name):
|
||||
CompositeTokenVerifier(jwt_verifier=None, api_key_prefixes=["", "sst_"])
|
||||
|
||||
assert any("invalid" in record.message.lower() for record in caplog.records)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_all_invalid_prefixes_accepts_no_api_keys() -> None:
|
||||
"""When all prefixes are invalid and filtered out, no token should match
|
||||
the API key path."""
|
||||
verifier = CompositeTokenVerifier(jwt_verifier=None, api_key_prefixes=["", " "])
|
||||
|
||||
assert verifier._api_key_prefixes == ()
|
||||
result = await verifier.verify_token("sst_abc123")
|
||||
assert result is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_api_key_passthrough_propagates_required_scopes() -> None:
|
||||
"""The pass-through AccessToken must carry the verifier's required_scopes
|
||||
so FastMCP's transport-level ``RequireAuthMiddleware`` does not 403 the
|
||||
request before ``_resolve_user_from_api_key`` runs."""
|
||||
jwt_verifier = MagicMock()
|
||||
jwt_verifier.required_scopes = ["read", "write"]
|
||||
jwt_verifier.verify_token = AsyncMock()
|
||||
|
||||
verifier = CompositeTokenVerifier(
|
||||
jwt_verifier=jwt_verifier, api_key_prefixes=["sst_"]
|
||||
)
|
||||
|
||||
result = await verifier.verify_token("sst_abc123")
|
||||
|
||||
assert result is not None
|
||||
assert result.scopes == ["read", "write"]
|
||||
@@ -91,6 +91,17 @@ def test_is_gamma_pvm_excludes_export_image(app_context: None) -> None:
|
||||
assert sm._is_gamma_pvm(pvm) is False
|
||||
|
||||
|
||||
def test_api_key_view_menu_is_admin_only() -> None:
|
||||
"""Regression test: 'ApiKey' must be in ADMIN_ONLY_VIEW_MENUS.
|
||||
|
||||
FAB registers an ApiKeyApi blueprint when FAB_API_KEY_ENABLED=True.
|
||||
Without this guard any Gamma user could reach the API key management
|
||||
endpoints. A rename or removal of the entry would silently re-open
|
||||
that access hole.
|
||||
"""
|
||||
assert "ApiKey" in SupersetSecurityManager.ADMIN_ONLY_VIEW_MENUS
|
||||
|
||||
|
||||
def test_is_gamma_pvm_allows_copy_clipboard(app_context: None) -> None:
|
||||
"""Verify _is_gamma_pvm returns True for can_copy_clipboard."""
|
||||
from superset.extensions import appbuilder
|
||||
|
||||
Reference in New Issue
Block a user