Compare commits

..

2 Commits

Author SHA1 Message Date
Joe Li
1f54d52aa5 chore: update comment 2026-04-30 11:46:24 -07:00
Joe Li
9b4c74f74a fix(playwright): use superset shell instead of python -c in bashlib
Replace inline `python -c` block with `superset shell` heredoc, which has
the Flask app context pre-loaded. Drops the create_app()/app_context()
boilerplate and addresses review feedback.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-30 11:46:24 -07:00
641 changed files with 23528 additions and 55875 deletions

4
.github/CODEOWNERS vendored
View File

@@ -36,10 +36,6 @@
**/*.geojson @villebro @rusackas
/superset-frontend/plugins/legacy-plugin-chart-country-map/ @villebro @rusackas
# Notify translation maintainers of changes to translations
/superset/translations/ @sfirke
# Notify PMC members of changes to extension-related files
/docs/developer_portal/extensions/ @michael-s-molina @villebro @rusackas

View File

@@ -17,12 +17,13 @@ jobs:
steps:
- name: Check if user is allowed
id: check
env:
COMMENTER: ${{ github.event.comment.user.login }}
run: |
# List of allowed users
ALLOWED_USERS="mistercrunch,rusackas"
# Get the commenter's username
COMMENTER="${{ github.event.comment.user.login }}"
echo "Checking permissions for user: $COMMENTER"
# Check if user is in allowed list
@@ -44,12 +45,9 @@ jobs:
steps:
- name: Comment access denied
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
env:
COMMENTER_LOGIN: ${{ github.event.comment.user.login || github.event.review.user.login || github.event.issue.user.login }}
with:
script: |
const commenter = process.env.COMMENTER_LOGIN;
const message = `👋 Hi @${commenter}!
const message = `👋 Hi @${{ github.event.comment.user.login || github.event.review.user.login || github.event.issue.user.login }}!
Thanks for trying to use Claude Code, but currently only certain team members have access to this feature.

View File

@@ -41,7 +41,7 @@ jobs:
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@68bde559dea0fdcac2102bfdf6230c5f70eb485e # v4
uses: github/codeql-action/init@v4
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
@@ -53,6 +53,6 @@ jobs:
- name: Perform CodeQL Analysis
if: steps.check.outputs.python || steps.check.outputs.frontend
uses: github/codeql-action/analyze@68bde559dea0fdcac2102bfdf6230c5f70eb485e # v4
uses: github/codeql-action/analyze@v4
with:
category: "/language:${{matrix.language}}"

View File

@@ -29,7 +29,7 @@ jobs:
- name: "Checkout Repository"
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: "Dependency Review"
uses: actions/dependency-review-action@a1d282b36b6f3519aa1f3fc636f609c47dddb294 # v5.0.0
uses: actions/dependency-review-action@2031cfc080254a8a887f58cffee85186f0e49e48 # v4.9.0
continue-on-error: true
with:
fail-on-severity: critical

View File

@@ -58,7 +58,7 @@ jobs:
- name: Login to Amazon ECR
if: steps.describe-services.outputs.active == 'true'
id: login-ecr
uses: aws-actions/amazon-ecr-login@fa648b43de3d4d023bcb3f89ed6940096949c419 # v2
uses: aws-actions/amazon-ecr-login@19d944daaa35f0fa1d3f7f8af1d3f2e5de25c5b7 # v2
- name: Delete ECR image tag
if: steps.describe-services.outputs.active == 'true'

View File

@@ -199,7 +199,7 @@ jobs:
- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@fa648b43de3d4d023bcb3f89ed6940096949c419 # v2
uses: aws-actions/amazon-ecr-login@19d944daaa35f0fa1d3f7f8af1d3f2e5de25c5b7 # v2
- name: Load, tag and push image to ECR
id: push-image
@@ -235,7 +235,7 @@ jobs:
- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@fa648b43de3d4d023bcb3f89ed6940096949c419 # v2
uses: aws-actions/amazon-ecr-login@19d944daaa35f0fa1d3f7f8af1d3f2e5de25c5b7 # v2
- name: Check target image exists in ECR
id: check-image
@@ -265,7 +265,7 @@ jobs:
- name: Fill in the new image ID in the Amazon ECS task definition
id: task-def
uses: aws-actions/amazon-ecs-render-task-definition@6853cfae8c3a7d978fbf68b5a55453395541dfbb # v1
uses: aws-actions/amazon-ecs-render-task-definition@77954e213ba1f9f9cb016b86a1d4f6fcdea0d57e # v1
with:
task-definition: .github/workflows/ecs-task-definition.json
container-name: superset-ci
@@ -300,7 +300,7 @@ jobs:
--tags key=pr,value=$PR_NUMBER key=github_user,value=${{ github.actor }}
- name: Deploy Amazon ECS task definition
id: deploy-task
uses: aws-actions/amazon-ecs-deploy-task-definition@a310a830f5c14e583e35d84e4e1ec7dd177c3c9c # v2
uses: aws-actions/amazon-ecs-deploy-task-definition@fc8fc60f3a60ffd500fcb13b209c59d221ac8c8c # v2
with:
task-definition: ${{ steps.task-def.outputs.task-definition }}
service: pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-service

View File

@@ -29,7 +29,7 @@ jobs:
- name: Run latest-tag
uses: ./.github/actions/latest-tag
if: steps.latest-tag.outputs.SKIP_TAG != 'true'
if: (! ${{ steps.latest-tag.outputs.SKIP_TAG }} )
with:
description: Superset latest release
tag-name: latest

View File

@@ -17,16 +17,6 @@ on:
workflow_dispatch: {}
# Serialize deploys: the action pushes to apache/superset-site without
# rebasing, so concurrent runs race on the final push and the loser fails
# with `! [rejected] asf-site -> asf-site (fetch first)`. Cancel any
# in-progress run as soon as a newer one starts — the destination repo
# isn't touched until the final push step, so canceling mid-build is safe,
# and the freshest content always wins.
concurrency:
group: docs-deploy-asf-site
cancel-in-progress: true
jobs:
config:
runs-on: ubuntu-24.04

View File

@@ -78,13 +78,6 @@ jobs:
- name: yarn install
run: |
yarn install --check-cache
- name: Lint docs links
# Fast source-level check for bare relative internal links
# like `[Foo](../foo)` that Docusaurus's onBrokenLinks
# setting can't catch. Runs in seconds; fails fast before
# the expensive build step.
run: |
yarn lint:docs-links
- name: yarn typecheck
run: |
yarn typecheck

View File

@@ -54,7 +54,6 @@ jobs:
SUPERSET_SECRET_KEY: not-a-secret
run: |
pytest --durations-min=0.5 --cov=superset/sql/ ./tests/unit_tests/sql/ --cache-clear --cov-fail-under=100
pytest --durations-min=0.5 --cov=superset/semantic_layers/ ./tests/unit_tests/semantic_layers/ --cache-clear --cov-fail-under=100
- name: Upload code coverage
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v5
with:

View File

@@ -43,9 +43,6 @@ _build/*
_static/*
.buildinfo
searchindex.js
# auto-generated by docs/scripts/convert-api-sidebar.mjs from openapi.json
sidebar.js
sidebar.ts
# auto generated
requirements/*
# vendorized

View File

@@ -56,33 +56,8 @@ def verify_sha512(filename: str) -> str:
# Part 2: Verify RSA key - this is the same as running `gpg --verify {release}.asc {release}` and comparing the RSA key and email address against the KEYS file # noqa: E501
KEYS_URL = "https://downloads.apache.org/superset/KEYS"
def ensure_keys_imported() -> None:
"""Import the Apache Superset KEYS file into the local GPG keyring.
Without this, `gpg --verify` returns "No public key" and the signature
cannot actually be verified — only the key ID in the signature metadata
is visible.
"""
try:
keys = requests.get(KEYS_URL, timeout=30)
except requests.RequestException as exc:
print(f"Warning: could not fetch KEYS file for import: {exc}")
return
if keys.status_code != 200:
print(f"Warning: could not fetch KEYS file (HTTP {keys.status_code})")
return
subprocess.run( # noqa: S603
["gpg", "--import"], # noqa: S607
input=keys.content,
capture_output=True,
)
def get_gpg_info(filename: str) -> tuple[Optional[str], Optional[str]]:
"""Run the GPG verify command and extract RSA/EDDSA key and email address."""
"""Run the GPG verify command and extract RSA key and email address."""
asc_filename = filename + ".asc"
result = subprocess.run( # noqa: S603
["gpg", "--verify", asc_filename, filename], # noqa: S607
@@ -90,50 +65,25 @@ def get_gpg_info(filename: str) -> tuple[Optional[str], Optional[str]]:
)
output = result.stderr.decode()
# If no public key was available, import KEYS and retry so that
# `Good signature from "Name <email>"` appears in the output.
if "No public key" in output:
ensure_keys_imported()
result = subprocess.run( # noqa: S603
["gpg", "--verify", asc_filename, filename], # noqa: S607
capture_output=True, # noqa: S607
)
output = result.stderr.decode()
rsa_key = re.search(r"RSA key ([0-9A-F]+)", output)
eddsa_key = re.search(r"EDDSA key ([0-9A-F]+)", output)
# Try multiple patterns — `Good signature from` is the most reliable
# source of the email; `issuer` is a fallback for older gpg output.
email_patterns = (
r'Good signature from ".*?<([^>]+)>"',
r'aka ".*?<([^>]+)>"',
r'issuer "([^"]+)"',
)
email_result: Optional[str] = None
for pattern in email_patterns:
match = re.search(pattern, output)
if match:
email_result = match.group(1)
break
email = re.search(r'issuer "([^"]+)"', output)
rsa_key_result = rsa_key.group(1) if rsa_key else None
eddsa_key_result = eddsa_key.group(1) if eddsa_key else None
email_result = email.group(1) if email else None
key_result = rsa_key_result or eddsa_key_result
# Debugging:
if key_result:
print("RSA or EDDSA Key found")
else:
print("Warning: No RSA or EDDSA key found in GPG verification output.")
if email_result:
print(f"Email found: {email_result}")
print("email found")
else:
print("Warning: No email address found in GPG verification output.")
if "No public key" in output:
print(
"Hint: public key is not in your keyring. Import it with:\n"
f" curl -s {KEYS_URL} | gpg --import"
)
return key_result, email_result

View File

@@ -46,13 +46,6 @@ The Deck.gl MapBox chart's **Opacity**, **Default longitude**, **Default latitud
**To restore fit-to-data behavior:** Open the chart in Explore, clear the **Default longitude**, **Default latitude**, and **Zoom** fields in the Viewport section, and re-save the chart.
### Combined datasource list endpoint
Added a new combined datasource list endpoint at `GET /api/v1/datasource/` to serve datasets and semantic views in one response.
- The endpoint is available to users with at least one of `can_read` on `Dataset` or `SemanticView`.
- Semantic views are included only when the `SEMANTIC_LAYERS` feature flag is enabled.
- The endpoint enforces strict `order_column` validation and returns `400` for invalid sort columns.
### ClickHouse minimum driver version bump
The minimum required version of `clickhouse-connect` has been raised to `>=0.13.0`. If you are using the ClickHouse connector, please upgrade your `clickhouse-connect` package. The `_mutate_label` workaround that appended hash suffixes to column aliases has also been removed, as it is no longer needed with modern versions of the driver.
@@ -328,7 +321,7 @@ Note: Pillow is now a required dependency (previously optional) to support image
- [33116](https://github.com/apache/superset/pull/33116) In Echarts Series charts (e.g. Line, Area, Bar, etc.) charts, the `x_axis_sort_series` and `x_axis_sort_series_ascending` form data items have been renamed with `x_axis_sort` and `x_axis_sort_asc`.
There's a migration added that can potentially affect a significant number of existing charts.
- [32317](https://github.com/apache/superset/pull/32317) The horizontal filter bar feature is now out of testing/beta development and its feature flag `HORIZONTAL_FILTER_BAR` has been removed.
- [31590](https://github.com/apache/superset/pull/31590) Marks the beginning of intricate work around supporting dynamic Theming, and breaks support for [THEME_OVERRIDES](https://github.com/apache/superset/blob/732de4ac7fae88e29b7f123b6cbb2d7cd411b0e4/superset/config.py#L671) in favor of a new theming system based on AntD V5. Likely this will be in disrepair until settling over the 5.x lifecycle.
- [31590](https://github.com/apache/superset/pull/31590) Marks the begining of intricate work around supporting dynamic Theming, and breaks support for [THEME_OVERRIDES](https://github.com/apache/superset/blob/732de4ac7fae88e29b7f123b6cbb2d7cd411b0e4/superset/config.py#L671) in favor of a new theming system based on AntD V5. Likely this will be in disrepair until settling over the 5.x lifecycle.
- [32432](https://github.com/apache/superset/pull/32432) Moves the List Roles FAB view to the frontend and requires `FAB_ADD_SECURITY_API` to be enabled in the configuration and `superset init` to be executed.
- [34319](https://github.com/apache/superset/pull/34319) Drill to Detail and Drill By is now supported in Embedded mode, and also with the `DASHBOARD_RBAC` FF. If you don't want to expose these features in Embedded / `DASHBOARD_RBAC`, make sure the roles used for Embedded / `DASHBOARD_RBAC`don't have the required permissions to perform D2D actions.
@@ -343,7 +336,7 @@ Note: Pillow is now a required dependency (previously optional) to support image
- [31774](https://github.com/apache/superset/pull/31774): Fixes the spelling of the `USE-ANALAGOUS-COLORS` feature flag. Please update any scripts/configuration item to use the new/corrected `USE-ANALOGOUS-COLORS` flag spelling.
- [31582](https://github.com/apache/superset/pull/31582) Removed the legacy Area, Bar, Event Flow, Heatmap, Histogram, Line, Sankey, and Sankey Loop charts. They were all automatically migrated to their ECharts counterparts with the exception of the Event Flow and Sankey Loop charts which were removed as they were not actively maintained and not widely used. If you were using the Event Flow or Sankey Loop charts, you will need to find an alternative solution.
- [31198](https://github.com/apache/superset/pull/31198) Disallows by default the use of the following ClickHouse functions: "version", "currentDatabase", "hostName".
- [29798](https://github.com/apache/superset/pull/29798) Since 3.1.0, the initial schedule for an alert or report was mistakenly offset by the specified timezone's relation to UTC. The initial schedule should now begin at the correct time.
- [29798](https://github.com/apache/superset/pull/29798) Since 3.1.0, the intial schedule for an alert or report was mistakenly offset by the specified timezone's relation to UTC. The initial schedule should now begin at the correct time.
- [30021](https://github.com/apache/superset/pull/30021) The `dev` layer in our Dockerfile no long includes firefox binaries, only Chromium to reduce bloat/docker-build-time.
- [30099](https://github.com/apache/superset/pull/30099) Translations are no longer included in the default docker image builds. If your environment requires translations, you'll want to set the docker build arg `BUILD_TRANSLATIONS=true`.
- [31262](https://github.com/apache/superset/pull/31262) NOTE: deprecated `pylint` in favor of `ruff` as our only python linter. Only affect development workflows positively (not the release itself). It should cover most important rules, be much faster, but some things linting rules that were enforced before may not be enforce in the exact same way as before.
@@ -356,7 +349,7 @@ Note: Pillow is now a required dependency (previously optional) to support image
- [25166](https://github.com/apache/superset/pull/25166) Changed the default configuration of `UPLOAD_FOLDER` from `/app/static/uploads/` to `/static/uploads/`. It also removed the unused `IMG_UPLOAD_FOLDER` and `IMG_UPLOAD_URL` configuration options.
- [30284](https://github.com/apache/superset/pull/30284) Deprecated GLOBAL_ASYNC_QUERIES_REDIS_CONFIG in favor of the new GLOBAL_ASYNC_QUERIES_CACHE_BACKEND configuration. To leverage Redis Sentinel, set CACHE_TYPE to RedisSentinelCache, or use RedisCache for standalone Redis
- [31961](https://github.com/apache/superset/pull/31961) Upgraded React from version 16.13.1 to 17.0.2. If you are using custom frontend extensions or plugins, you may need to update them to be compatible with React 17.
- [31260](https://github.com/apache/superset/pull/31260) Docker images now use `uv pip install` instead of `pip install` to manage the python environment. Most docker-based deployments will be affected, whether you derive one of the published images, or have custom bootstrap script that install python libraries (drivers)
- [31260](https://github.com/apache/superset/pull/31260) Docker images now use `uv pip install` instead of `pip install` to manage the python envrionment. Most docker-based deployments will be affected, whether you derive one of the published images, or have custom bootstrap script that install python libraries (drivers)
### Potential Downtime
@@ -433,7 +426,7 @@ Note: Pillow is now a required dependency (previously optional) to support image
- [26462](https://github.com/apache/superset/issues/26462): Removes the Profile feature given that it's not actively maintained and not widely used.
- [26377](https://github.com/apache/superset/pull/26377): Removes the deprecated Redirect API that supported short URLs used before the permalink feature.
- [26329](https://github.com/apache/superset/issues/26329): Removes the deprecated `DASHBOARD_NATIVE_FILTERS` feature flag. The previous value of the feature flag was `True` and now the feature is permanently enabled.
- [25510](https://github.com/apache/superset/pull/25510): Reinforces that any newly defined Python data format (other than epoch) must adhere to the ISO 8601 standard (enforced by way of validation at the API and database level) after a previous relaxation to include slashes in addition to dashes. From now on when specifying new columns, dataset owners will need to use a SQL expression instead to convert their string columns of the form %Y/%m/%d etc. to a `DATE`, `DATETIME`, etc. type.
- [25510](https://github.com/apache/superset/pull/25510): Reenforces that any newly defined Python data format (other than epoch) must adhere to the ISO 8601 standard (enforced by way of validation at the API and database level) after a previous relaxation to include slashes in addition to dashes. From now on when specifying new columns, dataset owners will need to use a SQL expression instead to convert their string columns of the form %Y/%m/%d etc. to a `DATE`, `DATETIME`, etc. type.
- [26372](https://github.com/apache/superset/issues/26372): Removes the deprecated `GENERIC_CHART_AXES` feature flag. The previous value of the feature flag was `True` and now the feature is permanently enabled.
### Potential Downtime

View File

@@ -105,13 +105,7 @@ class CeleryConfig:
CELERY_CONFIG = CeleryConfig
FEATURE_FLAGS = {
"ALERT_REPORTS": True,
"DATASET_FOLDERS": True,
"ENABLE_EXTENSIONS": True,
"SEMANTIC_LAYERS": True,
}
EXTENSIONS_PATH = "/app/docker/extensions"
FEATURE_FLAGS = {"ALERT_REPORTS": True, "DATASET_FOLDERS": True}
ALERT_REPORTS_NOTIFICATION_DRY_RUN = True
WEBDRIVER_BASEURL = f"http://superset_app{os.environ.get('SUPERSET_APP_ROOT', '/')}/" # When using docker compose baseurl should be http://superset_nginx{ENV{BASEPATH}}/ # noqa: E501
# The base URL for the email report hyperlinks.

View File

@@ -31,9 +31,8 @@ You are currently in the `/docs` subdirectory of the Apache Superset repository.
├── superset-frontend/ # React/TypeScript frontend
└── docs/ # Documentation site (YOU ARE HERE)
├── docs/ # Main documentation content
├── admin_docs/ # Admin-focused guides
├── developer_docs/ # Developer guides
├── components/ # Component playground
├── developer_portal/ # Developer guides (currently disabled)
├── components/ # Component playground (currently disabled)
└── docusaurus.config.ts # Site configuration
```
@@ -47,19 +46,12 @@ yarn build # Build production site
yarn serve # Serve built site locally
# Version Management (USE THESE, NOT docusaurus commands)
# The add scripts auto-run `generate:smart` so auto-gen content (database
# pages, API reference, component pages) is fresh before snapshotting.
# For maximum-detail databases.json, drop the `database-diagnostics`
# artifact from Python-Integration CI at src/data/databases.json before
# cutting. See README.md "Before You Cut".
yarn version:add:docs <version> # Add new docs version
yarn version:add:admin_docs <version> # Add admin docs version
yarn version:add:developer_docs <version> # Add developer docs version
yarn version:add:developer_portal <version> # Add developer portal version
yarn version:add:components <version> # Add components version
yarn version:remove:docs <version> # Remove docs version
yarn version:remove:admin_docs <version> # Remove admin docs version
yarn version:remove:developer_docs <version> # Remove developer docs version
yarn version:remove:components <version> # Remove components version
yarn version:remove:developer_portal <version> # Remove developer portal version
yarn version:remove:components <version> # Remove components version
# Quality Checks
yarn typecheck # TypeScript validation
@@ -103,14 +95,15 @@ docs/
└── [security guides]
```
### Admin Docs (`/admin_docs`)
Admin-focused content: installation, configuration, security.
### Developer Portal (`/developer_portal`) - Currently Disabled
When enabled, contains developer-focused content:
- API documentation
- Architecture guides
- CLI tools
- Code examples
### Developer Docs (`/developer_docs`)
Developer-focused content: API documentation, architecture guides, CLI tools, code examples.
### Component Playground (`/components`)
Interactive component examples for UI development.
### Component Playground (`/components`) - Currently Disabled
When enabled, provides interactive component examples for UI development.
## 📝 Documentation Standards

View File

@@ -37,45 +37,23 @@ Each section maintains its own version history and can be versioned independentl
To create a new version for any section, use the Docusaurus version command with the appropriate plugin ID or use our automated scripts:
#### Before You Cut
The cut snapshots whatever's on disk into a frozen historical version, including auto-generated content (database pages from `superset/db_engine_specs/`, API reference from `static/resources/openapi.json`, component pages from Storybook stories). The cut script refreshes these via `generate:smart` before snapshotting, but the **`databases.json` diagnostics file** needs special care to capture full detail:
1. **Canonical release cut**: download the `database-diagnostics` artifact from a green `Python-Integration` run on master, place it at `docs/src/data/databases.json`, then run the cut script with `--skip-generate` to preserve it. This is what the production deploy uses and includes full Flask-context diagnostics (driver versions, feature support matrix, etc.).
2. **Local dev cut**: just run the script normally. `generate:smart` will regenerate `databases.json` using your local Flask environment — accurate to whatever drivers/extras you have installed, but typically less complete than the CI artifact.
3. **No Flask available**: also fine — the database generator falls back to AST parsing of engine spec files. The MDX pages are still correct; only the diagnostics JSON is leaner.
Also: confirm `master` CI is green, and that your local checkout matches the SHA you intend to cut from.
#### Using Automated Scripts (Required)
**⚠️ Important:** Always use these custom commands instead of the native Docusaurus commands. These scripts ensure that both the Docusaurus versioning system AND the `versions-config.json` file are updated correctly, AND that auto-generated content is refreshed before snapshotting.
**⚠️ Important:** Always use these custom commands instead of the native Docusaurus commands. These scripts ensure that both the Docusaurus versioning system AND the `versions-config.json` file are updated correctly.
```bash
# Main Documentation
yarn version:add:docs 1.2.0
# Admin Docs
yarn version:add:admin_docs 1.2.0
# Developer Portal
yarn version:add:developer_portal 1.2.0
# Developer Docs
yarn version:add:developer_docs 1.2.0
# Component Playground
# Component Playground (when enabled)
yarn version:add:components 1.2.0
```
What the script does:
1. Refreshes auto-generated content via `generate:smart` (database pages, API reference, component pages).
2. Calls `yarn docusaurus docs:version` (or the per-section equivalent) to snapshot the section.
3. Freezes any data-file imports (`@site/static/*.json`, `../../data/*.json`) into a snapshot-local `_versioned_data/` dir so the historical version doesn't silently mutate when the source files change.
4. Adjusts relative import paths (`../../src/...``../../../src/...`) for files now one directory deeper.
5. Updates `versions-config.json` and `<section>_versions.json`.
**Do NOT use** the native Docusaurus commands directly (`yarn docusaurus docs:version`), as they will:
- ❌ Create version files but NOT update `versions-config.json`
- ❌ Skip auto-gen refresh, freezing whatever was on disk
- ❌ Skip data-import freezing, leaving the snapshot pointed at live data
- ❌ Cause versions to not appear in dropdown menus
- ❌ Require manual fixes to synchronize the configuration
@@ -113,11 +91,8 @@ If creating versions manually, you'll need to:
# Main Documentation
yarn version:remove:docs 1.0.0
# Admin Docs
yarn version:remove:admin_docs 1.0.0
# Developer Docs
yarn version:remove:developer_docs 1.0.0
# Developer Portal
yarn version:remove:developer_portal 1.0.0
# Component Playground
yarn version:remove:components 1.0.0
@@ -128,20 +103,17 @@ To manually remove a version:
1. **Delete the version folder** from the appropriate location:
- Main docs: `versioned_docs/version-X.X.X/` (no prefix for main)
- Admin Docs: `admin_docs_versioned_docs/version-X.X.X/`
- Developer Docs: `developer_docs_versioned_docs/version-X.X.X/`
- Developer Portal: `developer_portal_versioned_docs/version-X.X.X/`
- Components: `components_versioned_docs/version-X.X.X/`
2. **Delete the version metadata file**:
- Main docs: `versioned_sidebars/version-X.X.X-sidebars.json` (no prefix)
- Admin Docs: `admin_docs_versioned_sidebars/version-X.X.X-sidebars.json`
- Developer Docs: `developer_docs_versioned_sidebars/version-X.X.X-sidebars.json`
- Developer Portal: `developer_portal_versioned_sidebars/version-X.X.X-sidebars.json`
- Components: `components_versioned_sidebars/version-X.X.X-sidebars.json`
3. **Update the versions list file**:
- Main docs: `versions.json`
- Admin Docs: `admin_docs_versions.json`
- Developer Docs: `developer_docs_versions.json`
- Developer Portal: `developer_portal_versions.json`
- Components: `components_versions.json`
4. **Update configuration**:
@@ -173,12 +145,12 @@ docs: {
}
```
#### Developer Docs & Components (custom plugins)
#### Developer Portal & Components (custom plugins)
```typescript
{
id: 'developer_docs',
path: 'developer_docs',
routeBasePath: 'developer-docs',
id: 'developer_portal',
path: 'developer_portal',
routeBasePath: 'developer_portal',
includeCurrentVersion: true,
lastVersion: '1.1.0', // Default version
onlyIncludeVersions: ['current', '1.1.0', '1.0.0'],
@@ -222,7 +194,7 @@ For other issues:
#### Broken Links in Versioned Documentation
When creating a new version, links in the documentation are preserved as-is. Common issues:
- **Cross-section links**: Links between sections (e.g., from developer_docs to docs) need to be version-aware
- **Cross-section links**: Links between sections (e.g., from developer_portal to docs) need to be version-aware
- **Absolute vs relative paths**: Use relative paths within the same section
- **Version-specific URLs**: Update hardcoded URLs to use version variables

View File

@@ -81,87 +81,6 @@ SLACK_CACHE_TIMEOUT = int(timedelta(days=2).total_seconds())
SLACK_API_RATE_LIMIT_RETRY_COUNT = 5
```
### Webhook integration
Superset can send alert and report notifications to any HTTP endpoint — useful for chat platforms, incident management tools, or custom automation.
#### Enabling Webhooks
Enable the feature flag in `superset_config.py`:
```python
FEATURE_FLAGS = {
"ALERT_REPORTS": True,
"ALERT_REPORT_WEBHOOK": True,
}
```
#### Configuring a Webhook Recipient
When creating or editing an alert or report, select **Webhook** as the notification method and enter your endpoint URL.
#### Payload Format
Superset sends an HTTP POST with `Content-Type: application/json`:
```json
{
"name": "My Alert",
"header": {
"notification_format": "JSON",
"notification_type": "Alert",
"notification_source": "Alert",
"chart_id": 42,
"dashboard_id": null
},
"text": "Alert condition met: value exceeded threshold",
"description": "Monthly revenue dropped below target",
"url": "https://your-superset-host/superset/dashboard/1/"
}
```
When a report includes file attachments (CSV, PDF, or PNG screenshots), the request is sent as `multipart/form-data` instead. In that case, each top-level payload field (`name`, `text`, `description`, `url`) becomes its own form field, and nested structures like `header` are serialized as a JSON-encoded string in their own field. Every attachment is added as a repeated form field named `files`:
```
POST /webhook HTTP/1.1
Content-Type: multipart/form-data; boundary=...
--...
Content-Disposition: form-data; name="name"
My Alert
--...
Content-Disposition: form-data; name="header"
{"notification_format": "JSON", "notification_type": "Alert", ...}
--...
Content-Disposition: form-data; name="text"
Alert condition met: value exceeded threshold
--...
Content-Disposition: form-data; name="files"; filename="report.csv"
Content-Type: text/csv
<file bytes>
--...
```
Webhook consumers should branch on `Content-Type`: parse the body as JSON when `application/json`, or read the individual form fields (decoding `header` as JSON) when `multipart/form-data`.
#### HTTPS Enforcement
To require HTTPS webhook URLs (recommended for production), set:
```python
ALERT_REPORTS_WEBHOOK_HTTPS_ONLY = True
```
When enabled, Superset rejects webhook configurations that use `http://` URLs.
#### Retry Behavior
Superset automatically retries webhook deliveries on `429 Too Many Requests` and `5xx` server errors using exponential backoff.
### Kubernetes-specific
- You must have a `celery beat` pod running. If you're using the chart included in the GitHub repository under [helm/superset](https://github.com/apache/superset/tree/master/helm/superset), you need to put `supersetCeleryBeat.enabled = true` in your values override.

View File

@@ -472,38 +472,6 @@ FEATURE_FLAGS = {
A current list of feature flags can be found in the [Feature Flags](/admin-docs/configuration/feature-flags) documentation.
## Security Configuration
### HASH_ALGORITHM
Controls the hashing algorithm used for internal checksums and cache keys (thumbnails, cache keys, etc.). The default is `sha256`, which satisfies environments with stricter compliance requirements (e.g., FedRAMP). Set it to `md5` to retain the legacy behavior from older Superset deployments:
```python
HASH_ALGORITHM = "sha256" # default; set to "md5" for legacy behavior
```
A companion `HASH_ALGORITHM_FALLBACKS` list (default: `["md5"]`) lets UUID lookups fall back to older algorithms, which enables gradual migration without breaking existing entries. Set it to `[]` for strict mode (use only `HASH_ALGORITHM`).
:::note
This setting affects internal Superset operations only, not user passwords or authentication tokens. Changing it in an existing deployment may invalidate cached values but does not require a database migration.
:::
## SQL Lab Query History Pruning
SQL Lab query history is stored in the metadata database and is **not** pruned by default. To trim older rows, enable the `prune_query` Celery beat task by uncommenting it in `CELERY_BEAT_SCHEDULE` and choosing a retention window:
```python
CELERY_BEAT_SCHEDULE = {
"prune_query": {
"task": "prune_query",
"schedule": crontab(minute=0, hour=0, day_of_month=1),
"kwargs": {"retention_period_days": 180},
},
}
```
Adjust `retention_period_days` to control how long query rows are kept. Companion opt-in tasks (`prune_logs`, `prune_tasks`) exist for pruning the logs and tasks tables; see the commented-out examples in `superset/config.py`. Without enabling these tasks, the metadata database will grow unbounded over time.
:::resources
- [Blog: Feature Flags in Apache Superset](https://preset.io/blog/feature-flags-in-apache-superset-and-preset/)
:::

View File

@@ -122,17 +122,6 @@ When `ENABLE_UI_THEME_ADMINISTRATION = True`:
3. Administrators can change system themes without restarting Superset
4. Configuration file themes serve as fallbacks when no UI themes are set
### Theme Validation and Fallback
Superset validates theme JSON when it is saved, either through the UI or via configuration. If a theme contains invalid tokens or an unrecognized structure, Superset logs a warning and falls back to the built-in default theme rather than applying a broken configuration. This prevents a bad theme from rendering the application unusable.
The fallback order is:
1. **UI-configured system theme** (highest priority, if `ENABLE_UI_THEME_ADMINISTRATION = True`)
2. **`THEME_DEFAULT` / `THEME_DARK`** from `superset_config.py`
3. **Built-in Superset default theme** (always present as a safety net)
If you see unexpected styling after a config change, check the Superset server logs for theme validation warnings.
### Copying Themes Between Systems
To export a theme for use in configuration files or another instance:
@@ -154,11 +143,7 @@ Superset supports custom fonts through the theme configuration, allowing you to
### Default Fonts
By default, Superset uses **Inter** for UI text and **IBM Plex Mono** for code (SQL editors, JSON fields, and other monospace contexts). Both fonts are bundled with the application via `@fontsource` packages and work offline without any external network calls.
:::note
IBM Plex Mono replaced Fira Code as the default code font in Superset 6.1. If you have an existing theme that explicitly sets `fontFamilyCode: "Fira Code, ..."`, you may want to update it.
:::
By default, Superset uses Inter and Fira Code fonts which are bundled with the application via `@fontsource` packages. These fonts work offline and require no external network calls.
### Configuring Custom Fonts

View File

@@ -205,57 +205,6 @@ FAB_ADD_SECURITY_API = True
Once configured, the documentation for additional "Security" endpoints will be visible in Swagger for you to explore.
### API Key Authentication
Superset supports long-lived API keys for service accounts, CI/CD pipelines, and programmatic integrations (including MCP clients).
#### Enabling API Key Authentication
API key authentication is **disabled by default**. To turn it on, set the Flask-AppBuilder config value in `superset_config.py` and also enable the matching feature flag so the management UI is exposed:
```python
FAB_API_KEY_ENABLED = True
FEATURE_FLAGS = {
"FAB_API_KEY_ENABLED": True,
}
```
The config value registers the `ApiKeyApi` blueprint on the backend; the feature flag controls whether the UI for managing keys appears for the user. See the [Feature Flags](/admin-docs/configuration/feature-flags) documentation for more on feature flag configuration.
#### Creating an API Key
Once enabled, each user manages their own keys from their profile page:
1. Open the user menu (top-right) and click **Info** to navigate to the User Info page
2. Expand the **API Keys** section
3. Click **+ API Key**
4. Enter a name and (optionally) an expiration date
5. Copy the generated token — it is shown only once
Only users with the `can_read` and `can_write` permissions on `ApiKey` (granted by default to Admins) can manage API keys.
#### Using an API Key
Pass the key as a Bearer token in the `Authorization` header:
```
Authorization: Bearer <your-api-key>
```
This works for all REST API endpoints and the MCP server. The request is executed with the permissions of the user who created the key.
#### Use Cases
- **CI/CD pipelines** — automated chart/dashboard exports and imports
- **MCP integrations** — connect AI assistants without interactive login
- **External services** — dashboards embedded in other applications
- **Service accounts** — long-lived credentials that don't expire with session cookies
:::caution
Store API keys securely. Anyone with a valid key can make requests on behalf of the creating user. Revoke keys promptly if they are compromised by deleting them from the **API Keys** section of your User Info page.
:::
### Customizing Permissions
The permissions exposed by FAB are very granular and allow for a great level of

View File

@@ -0,0 +1 @@
[]

View File

@@ -29,10 +29,10 @@ sidebar_position: 1
## Components
- [DropdownContainer](./dropdowncontainer.mdx)
- [Flex](./flex.mdx)
- [Grid](./grid.mdx)
- [Layout](./layout.mdx)
- [MetadataBar](./metadatabar.mdx)
- [Space](./space.mdx)
- [Table](./table.mdx)
- [DropdownContainer](./dropdowncontainer)
- [Flex](./flex)
- [Grid](./grid)
- [Layout](./layout)
- [MetadataBar](./metadatabar)
- [Space](./space)
- [Table](./table)

View File

@@ -62,7 +62,7 @@ This documentation is auto-generated from Storybook stories. To add or update co
4. Run `yarn generate:superset-components` in the `docs/` directory
:::info Work in Progress
This component library is actively being documented. See the [Components TODO](./TODO.md) page for a list of components awaiting documentation.
This component library is actively being documented. See the [Components TODO](./TODO) page for a list of components awaiting documentation.
:::
---

View File

@@ -29,49 +29,49 @@ sidebar_position: 1
## Components
- [AutoComplete](./autocomplete.mdx)
- [Avatar](./avatar.mdx)
- [Badge](./badge.mdx)
- [Breadcrumb](./breadcrumb.mdx)
- [Button](./button.mdx)
- [ButtonGroup](./buttongroup.mdx)
- [CachedLabel](./cachedlabel.mdx)
- [Card](./card.mdx)
- [Checkbox](./checkbox.mdx)
- [Collapse](./collapse.mdx)
- [DatePicker](./datepicker.mdx)
- [Divider](./divider.mdx)
- [EditableTitle](./editabletitle.mdx)
- [EmptyState](./emptystate.mdx)
- [FaveStar](./favestar.mdx)
- [IconButton](./iconbutton.mdx)
- [Icons](./icons.mdx)
- [IconTooltip](./icontooltip.mdx)
- [InfoTooltip](./infotooltip.mdx)
- [Input](./input.mdx)
- [Label](./label.mdx)
- [List](./list.mdx)
- [ListViewCard](./listviewcard.mdx)
- [Loading](./loading.mdx)
- [Menu](./menu.mdx)
- [Modal](./modal.mdx)
- [ModalTrigger](./modaltrigger.mdx)
- [Popover](./popover.mdx)
- [ProgressBar](./progressbar.mdx)
- [Radio](./radio.mdx)
- [SafeMarkdown](./safemarkdown.mdx)
- [Select](./select.mdx)
- [Skeleton](./skeleton.mdx)
- [Slider](./slider.mdx)
- [Steps](./steps.mdx)
- [Switch](./switch.mdx)
- [TableCollection](./tablecollection.mdx)
- [TableView](./tableview.mdx)
- [Tabs](./tabs.mdx)
- [Timer](./timer.mdx)
- [Tooltip](./tooltip.mdx)
- [Tree](./tree.mdx)
- [TreeSelect](./treeselect.mdx)
- [Typography](./typography.mdx)
- [UnsavedChangesModal](./unsavedchangesmodal.mdx)
- [Upload](./upload.mdx)
- [AutoComplete](./autocomplete)
- [Avatar](./avatar)
- [Badge](./badge)
- [Breadcrumb](./breadcrumb)
- [Button](./button)
- [ButtonGroup](./buttongroup)
- [CachedLabel](./cachedlabel)
- [Card](./card)
- [Checkbox](./checkbox)
- [Collapse](./collapse)
- [DatePicker](./datepicker)
- [Divider](./divider)
- [EditableTitle](./editabletitle)
- [EmptyState](./emptystate)
- [FaveStar](./favestar)
- [IconButton](./iconbutton)
- [Icons](./icons)
- [IconTooltip](./icontooltip)
- [InfoTooltip](./infotooltip)
- [Input](./input)
- [Label](./label)
- [List](./list)
- [ListViewCard](./listviewcard)
- [Loading](./loading)
- [Menu](./menu)
- [Modal](./modal)
- [ModalTrigger](./modaltrigger)
- [Popover](./popover)
- [ProgressBar](./progressbar)
- [Radio](./radio)
- [SafeMarkdown](./safemarkdown)
- [Select](./select)
- [Skeleton](./skeleton)
- [Slider](./slider)
- [Steps](./steps)
- [Switch](./switch)
- [TableCollection](./tablecollection)
- [TableView](./tableview)
- [Tabs](./tabs)
- [Timer](./timer)
- [Tooltip](./tooltip)
- [Tree](./tree)
- [TreeSelect](./treeselect)
- [Typography](./typography)
- [UnsavedChangesModal](./unsavedchangesmodal)
- [Upload](./upload)

View File

@@ -327,13 +327,13 @@ stats.sort_stats('cumulative').print_stats(10)
## Resources
### Internal
- [Coding Guidelines](../guidelines/design-guidelines.md)
- [Testing Guide](../testing/overview.md)
- [Extension Architecture](../extensions/architecture.md)
- [Coding Guidelines](../guidelines/design-guidelines)
- [Testing Guide](../testing/overview)
- [Extension Architecture](../extensions/architecture)
### External
- [Google's Code Review Guide](https://google.github.io/eng-practices/review/)
- [Best Practices for Code Review](https://smartbear.com/learn/code-review/best-practices-for-peer-code-review/)
- [The Art of Readable Code](https://www.oreilly.com/library/view/the-art-of/9781449318482/)
Next: [Reporting issues effectively](./issue-reporting.md)
Next: [Reporting issues effectively](./issue-reporting)

View File

@@ -668,7 +668,7 @@ A series of checks will now run when you make a git commit.
## Linting
See [how tos](./howtos.md#linting)
See [how tos](./howtos#linting)
## GitHub Actions and `act`

View File

@@ -77,7 +77,7 @@ Finally, never submit a PR that will put master branch in broken state. If the P
in `requirements.txt` pinned to a specific version which ensures that the application
build is deterministic.
- For TypeScript/JavaScript, include new libraries in `package.json`
- **Tests:** The pull request should include tests, either as doctests, unit tests, or both. Make sure to resolve all errors and test failures. See [Testing](./howtos.md#testing) for how to run tests.
- **Tests:** The pull request should include tests, either as doctests, unit tests, or both. Make sure to resolve all errors and test failures. See [Testing](./howtos#testing) for how to run tests.
- **Documentation:** If the pull request adds functionality, the docs should be updated as part of the same PR.
- **CI:** Reviewers will not review the code until all CI tests are passed. Sometimes there can be flaky tests. You can close and open PR to re-run CI test. Please report if the issue persists. After the CI fix has been deployed to `master`, please rebase your PR.
- **Code coverage:** Please ensure that code coverage does not decrease.

View File

@@ -282,7 +282,7 @@ You can now launch your VSCode debugger with the same config as above. VSCode wi
### Storybook
See the dedicated [Storybook documentation](../testing/storybook.md) for information on running Storybook locally and adding new stories.
See the dedicated [Storybook documentation](../testing/storybook) for information on running Storybook locally and adding new stories.
## Contributing Translations

View File

@@ -413,6 +413,6 @@ Consider:
- **Feature Request**: Use feature request template
- **Question**: Use GitHub Discussions
- **Configuration Help**: Ask in Slack
- **Development Help**: See [Contributing Guide](./overview.md)
- **Development Help**: See [Contributing Guide](./overview)
Next: [Understanding the release process](./release-process.md)
Next: [Understanding the release process](./release-process)

View File

@@ -94,7 +94,7 @@ Look through the GitHub issues. Issues tagged with
Superset could always use better documentation,
whether as part of the official Superset docs,
in docstrings, `docs/*.rst` or even on the web as blog posts or
articles. See [Documentation](./howtos.md#contributing-to-documentation) for more details.
articles. See [Documentation](./howtos#contributing-to-documentation) for more details.
### Add Translations
@@ -103,7 +103,7 @@ text strings from Superset's UI. You can jump into the existing
language dictionaries at
`superset/translations/<language_code>/LC_MESSAGES/messages.po`, or
even create a dictionary for a new language altogether.
See [Translating](./howtos.md#contributing-translations) for more details.
See [Translating](./howtos#contributing-translations) for more details.
### Ask Questions
@@ -158,9 +158,9 @@ Security team members should also follow these general expectations:
Ready to contribute? Here's how to get started:
1. **[Set up your environment](./development-setup.md)** - Get Superset running locally
1. **[Set up your environment](./development-setup)** - Get Superset running locally
2. **[Find something to work on](#types-of-contributions)** - Pick an issue or feature
3. **[Submit your contribution](./submitting-pr.md)** - Create a pull request
4. **[Follow guidelines](./guidelines.md)** - Ensure code quality
3. **[Submit your contribution](./submitting-pr)** - Create a pull request
4. **[Follow guidelines](./guidelines)** - Ensure code quality
Welcome to the Apache Superset community! 🚀

View File

@@ -466,4 +466,4 @@ Credit:
- [Release Scripts](https://github.com/apache/superset/tree/master/scripts/release)
- [Superset Repository Scripts](https://github.com/apache/superset/tree/master/scripts)
Next: Return to [Contributing Overview](./overview.md)
Next: Return to [Contributing Overview](./overview)

View File

@@ -31,11 +31,11 @@ Learn how to create and submit high-quality pull requests to Apache Superset.
### Prerequisites
- [ ] Development environment is set up
- [ ] You've forked and cloned the repository
- [ ] You've read the [contributing overview](./overview.md)
- [ ] You've read the [contributing overview](./overview)
- [ ] You've found or created an issue to work on
### PR Readiness Checklist
- [ ] Code follows [coding guidelines](../guidelines/design-guidelines.md)
- [ ] Code follows [coding guidelines](../guidelines/design-guidelines)
- [ ] Tests are passing locally
- [ ] Linting passes (`pre-commit run --all-files`)
- [ ] Documentation is updated if needed
@@ -318,4 +318,4 @@ git push origin master
- **GitHub**: Tag @apache/superset-committers for attention
- **Mailing List**: dev@superset.apache.org
Next: [Understanding code review process](./code-review.md)
Next: [Understanding code review process](./code-review)

View File

@@ -233,7 +233,7 @@ This architecture provides several key benefits:
Now that you understand the architecture, explore:
- **[Dependencies](./dependencies.md)** - Managing dependencies and understanding API stability
- **[Quick Start](./quick-start.md)** - Build your first extension
- **[Contribution Types](./contribution-types.md)** - What kinds of extensions you can build
- **[Development](./development.md)** - Project structure, APIs, and development workflow
- **[Dependencies](./dependencies)** - Managing dependencies and understanding API stability
- **[Quick Start](./quick-start)** - Build your first extension
- **[Contribution Types](./contribution-types)** - What kinds of extensions you can build
- **[Development](./development)** - Project structure, APIs, and development workflow

View File

@@ -29,7 +29,7 @@ These UI components are available to Superset extension developers through the `
## Available Components
- [Alert](./alert.mdx)
- [Alert](./alert)
## Usage
@@ -90,4 +90,4 @@ InteractiveMyComponent.argTypes = {
## Interactive Documentation
For interactive examples with controls, run Storybook locally — see the [Storybook documentation](/developer-docs/testing/storybook).
For interactive examples with controls, visit the [Storybook](/storybook/?path=/docs/extension-components--docs).

View File

@@ -110,7 +110,7 @@ editors.registerEditor(
);
```
See [Editors Extension Point](./extension-points/editors.md) for implementation details.
See [Editors Extension Point](./extension-points/editors) for implementation details.
## Backend
@@ -146,7 +146,7 @@ class MyExtensionAPI(RestApi):
from .api import MyExtensionAPI
```
**Note**: The [`@api`](https://github.com/apache/superset/blob/master/superset-core/src/superset_core/rest_api/decorators.py) decorator automatically detects context and generates appropriate paths:
**Note**: The [`@api`](superset-core/src/superset_core/rest_api/decorators.py) decorator automatically detects context and generates appropriate paths:
- **Extension context**: `/extensions/{publisher}/{name}/` with ID prefixed as `extensions.{publisher}.{name}.{id}`
- **Host context**: `/api/v1/` with original ID
@@ -193,7 +193,7 @@ def get_summary() -> dict:
return {"status": "success", "result": {"queries_today": 42}}
```
See [MCP Integration](./mcp.md) for implementation details.
See [MCP Integration](./mcp) for implementation details.
### MCP Prompts
@@ -223,53 +223,4 @@ async def analysis_guide(ctx: Context) -> str:
"""
```
See [MCP Integration](./mcp.md) for implementation details.
### Semantic Layers
Extensions can register custom semantic layer implementations that allow Superset to connect to external data modeling frameworks. Each semantic layer defines how to authenticate, discover semantic views (tables/metrics/dimensions), and execute queries against the external system.
```python
from superset_core.semantic_layers.decorators import semantic_layer
from superset_core.semantic_layers.layer import SemanticLayer
from my_extension.config import MyConfig
from my_extension.view import MySemanticView
@semantic_layer(
id="my_platform",
name="My Data Platform",
description="Connect to My Data Platform's semantic layer",
)
class MySemanticLayer(SemanticLayer[MyConfig, MySemanticView]):
configuration_class = MyConfig
@classmethod
def from_configuration(cls, configuration: dict) -> "MySemanticLayer":
config = MyConfig.model_validate(configuration)
return cls(config)
@classmethod
def get_configuration_schema(cls, configuration=None) -> dict:
return MyConfig.model_json_schema()
@classmethod
def get_runtime_schema(cls, configuration=None, runtime_data=None) -> dict:
return {"type": "object", "properties": {}}
def get_semantic_views(self, runtime_configuration: dict) -> set[MySemanticView]:
# Return available views from the external platform
...
def get_semantic_view(self, name: str, additional_configuration: dict) -> MySemanticView:
# Return a specific view by name
...
```
**Note**: The `@semantic_layer` decorator automatically detects context and applies appropriate ID prefixing:
- **Extension context**: ID prefixed as `extensions.{publisher}.{name}.{id}`
- **Host context**: Original ID used as-is
The decorator registers the class in the semantic layers registry, making it available in the UI for users to create connections. The `configuration_class` should be a Pydantic model that defines the fields needed to connect (credentials, project, database, etc.). Superset uses the model's JSON schema to render the configuration form dynamically.
See [MCP Integration](./mcp) for implementation details.

View File

@@ -161,6 +161,6 @@ Until then, monitor the Superset release notes and test your extensions with eac
## Next Steps
- **[Architecture](./architecture.md)** - Understand the extension system design
- **[Development](./development.md)** - Learn about APIs and development workflow
- **[Quick Start](./quick-start.md)** - Build your first extension
- **[Architecture](./architecture)** - Understand the extension system design
- **[Development](./development)** - Learn about APIs and development workflow
- **[Quick Start](./quick-start)** - Build your first extension

View File

@@ -252,7 +252,7 @@ class DatasetReferencesAPI(RestApi):
### Automatic Context Detection
The [`@api`](https://github.com/apache/superset/blob/master/superset-core/src/superset_core/rest_api/decorators.py) decorator automatically detects whether it's being used in host or extension code:
The [`@api`](superset-core/src/superset_core/rest_api/decorators.py) decorator automatically detects whether it's being used in host or extension code:
- **Extension APIs**: Registered under `/extensions/{publisher}/{name}/` with IDs prefixed as `extensions.{publisher}.{name}.{id}`
- **Host APIs**: Registered under `/api/v1/` with original IDs

View File

@@ -217,6 +217,6 @@ const disposable = handle.registerCompletionProvider(provider);
## Next Steps
- **[SQL Lab Extension Points](./sqllab.md)** - Learn about other SQL Lab customizations
- **[Contribution Types](../contribution-types.md)** - Explore other contribution types
- **[Development](../development.md)** - Set up your development environment
- **[SQL Lab Extension Points](./sqllab)** - Learn about other SQL Lab customizations
- **[Contribution Types](../contribution-types)** - Explore other contribution types
- **[Development](../development)** - Set up your development environment

View File

@@ -51,7 +51,7 @@ SQL Lab provides 4 extension points where extensions can contribute custom UI co
| **Right Sidebar** | `sqllab.rightSidebar` | ✓ | — | Custom panels (AI assistants, query analysis) |
| **Panels** | `sqllab.panels` | ✓ | ✓ | Custom tabs + toolbar actions (data profiling) |
\*Editor views are contributed via [Editor Contributions](./editors.md), not standard view contributions.
\*Editor views are contributed via [Editor Contributions](./editors), not standard view contributions.
## Customization Types
@@ -78,7 +78,7 @@ Extensions can add toolbar actions to **Left Sidebar**, **Editor**, and **Panels
### Custom Editors
Extensions can replace the default SQL editor with custom implementations (Monaco, CodeMirror, etc.). See [Editor Contributions](./editors.md) for details.
Extensions can replace the default SQL editor with custom implementations (Monaco, CodeMirror, etc.). See [Editor Contributions](./editors) for details.
## Examples
@@ -157,6 +157,6 @@ menus.registerMenuItem(
## Next Steps
- **[Contribution Types](../contribution-types.md)** - Learn about other contribution types (commands, menus)
- **[Development](../development.md)** - Set up your development environment
- **[Quick Start](../quick-start.md)** - Build a complete extension
- **[Contribution Types](../contribution-types)** - Learn about other contribution types (commands, menus)
- **[Development](../development)** - Set up your development environment
- **[Quick Start](../quick-start)** - Build a complete extension

View File

@@ -455,5 +455,5 @@ async def metrics_guide(ctx: Context) -> str:
## Next Steps
- **[Development](./development.md)** - Project structure, APIs, and dev workflow
- **[Security](./security.md)** - Security best practices for extensions
- **[Development](./development)** - Project structure, APIs, and dev workflow
- **[Security](./security)** - Security best practices for extensions

View File

@@ -47,13 +47,13 @@ Extension developers have access to pre-built UI components via `@apache-superse
## Next Steps
- **[Quick Start](./quick-start.md)** - Build your first extension with a complete walkthrough
- **[Architecture](./architecture.md)** - Design principles and system overview
- **[Dependencies](./dependencies.md)** - Managing dependencies and understanding API stability
- **[Contribution Types](./contribution-types.md)** - Available extension points
- **[Development](./development.md)** - Project structure, APIs, and development workflow
- **[Deployment](./deployment.md)** - Packaging and deploying extensions
- **[MCP Integration](./mcp.md)** - Adding AI agent capabilities using extensions
- **[Security](./security.md)** - Security considerations and best practices
- **[Tasks](./tasks.md)** - Framework for creating and managing long running tasks
- **[Community Extensions](./registry.md)** - Browse extensions shared by the community
- **[Quick Start](./quick-start)** - Build your first extension with a complete walkthrough
- **[Architecture](./architecture)** - Design principles and system overview
- **[Dependencies](./dependencies)** - Managing dependencies and understanding API stability
- **[Contribution Types](./contribution-types)** - Available extension points
- **[Development](./development)** - Project structure, APIs, and development workflow
- **[Deployment](./deployment)** - Packaging and deploying extensions
- **[MCP Integration](./mcp)** - Adding AI agent capabilities using extensions
- **[Security](./security)** - Security considerations and best practices
- **[Tasks](./tasks)** - Framework for creating and managing long running tasks
- **[Community Extensions](./registry)** - Browse extensions shared by the community

View File

@@ -168,7 +168,7 @@ class HelloWorldAPI(RestApi):
**Key points:**
- Uses [`@api`](https://github.com/apache/superset/blob/master/superset-core/src/superset_core/rest_api/decorators.py) decorator with automatic context detection
- Uses [`@api`](superset-core/src/superset_core/rest_api/decorators.py) decorator with automatic context detection
- Extends `RestApi` from `superset_core.rest_api.api`
- Uses Flask-AppBuilder decorators (`@expose`, `@protect`, `@safe`)
- Returns responses using `self.response(status_code, result=data)`
@@ -184,7 +184,7 @@ Replace the generated print statement with API import to trigger registration:
from .api import HelloWorldAPI # noqa: F401
```
The [`@api`](https://github.com/apache/superset/blob/master/superset-core/src/superset_core/rest_api/decorators.py) decorator automatically detects extension context and registers your API with proper namespacing.
The [`@api`](superset-core/src/superset_core/rest_api/decorators.py) decorator automatically detects extension context and registers your API with proper namespacing.
## Step 5: Create Frontend Component
@@ -225,7 +225,7 @@ The `@apache-superset/core` package must be listed in both `peerDependencies` (t
The webpack configuration requires specific settings for Module Federation. Key settings include `externalsType: "window"` and `externals` to map `@apache-superset/core` to `window.superset` at runtime, `import: false` for shared modules to use the host's React instead of bundling a separate copy, and `remoteEntry.[contenthash].js` for cache busting.
**Convention**: Superset always loads extensions by requesting the `./index` module from the Module Federation container. The `exposes` entry must be exactly `'./index': './src/index.tsx'` — do not rename or add additional entries. All API registrations must be reachable from that file. See [Architecture](./architecture.md#module-federation) for a full explanation.
**Convention**: Superset always loads extensions by requesting the `./index` module from the Module Federation container. The `exposes` entry must be exactly `'./index': './src/index.tsx'` — do not rename or add additional entries. All API registrations must be reachable from that file. See [Architecture](./architecture#module-federation) for a full explanation.
```javascript
const path = require('path');
@@ -496,7 +496,7 @@ Superset will extract and validate the extension metadata, load the assets, regi
Here's what happens when your extension loads:
1. **Superset starts**: Reads `manifest.json` from the `.supx` bundle and loads the backend entrypoint
2. **Backend registration**: `entrypoint.py` imports your API class, triggering the [`@api`](https://github.com/apache/superset/blob/master/superset-core/src/superset_core/rest_api/decorators.py) decorator to register it automatically
2. **Backend registration**: `entrypoint.py` imports your API class, triggering the [`@api`](superset-core/src/superset_core/rest_api/decorators.py) decorator to register it automatically
3. **Frontend loads**: When SQL Lab opens, Superset fetches the remote entry file
4. **Module Federation**: Webpack loads your extension module and resolves `@apache-superset/core` to `window.superset`
5. **Registration**: The module executes at load time, calling `views.registerView` to register your panel
@@ -509,9 +509,9 @@ Here's what happens when your extension loads:
Now that you have a working extension, explore:
- **[Development](./development.md)** - Project structure, APIs, and development workflow
- **[Contribution Types](./contribution-types.md)** - Other contribution points beyond panels
- **[Deployment](./deployment.md)** - Packaging and deploying your extension
- **[Security](./security.md)** - Security best practices for extensions
- **[Development](./development)** - Project structure, APIs, and development workflow
- **[Contribution Types](./contribution-types)** - Other contribution points beyond panels
- **[Deployment](./deployment)** - Packaging and deploying your extension
- **[Security](./security)** - Security best practices for extensions
For a complete real-world example, examine the query insights extension in the Superset codebase.

View File

@@ -28,7 +28,7 @@ By default, extensions are disabled and must be explicitly enabled by setting th
For external extensions, administrators are responsible for evaluating and verifying the security of any extensions they choose to install, just as they would when installing third-party NPM or PyPI packages. At this stage, all extensions run in the same context as the host application, without additional sandboxing. This means that external extensions can impact the security and performance of a Superset environment in the same way as any other installed dependency.
We plan to introduce an optional sandboxed execution model for extensions in the future (as part of an additional SIP). Until then, administrators should exercise caution and follow best practices when selecting and deploying third-party extensions. A directory of community extensions is available in the [Community Extensions](./registry.md) page. Note that these extensions are not vetted by the Apache Superset project—administrators must evaluate each extension before installation.
We plan to introduce an optional sandboxed execution model for extensions in the future (as part of an additional SIP). Until then, administrators should exercise caution and follow best practices when selecting and deploying third-party extensions. A directory of community extensions is available in the [Community Extensions](./registry) page. Note that these extensions are not vetted by the Apache Superset project—administrators must evaluate each extension before installation.
**Any performance or security vulnerabilities introduced by external extensions should be reported directly to the extension author, not as Superset vulnerabilities.**

View File

@@ -114,7 +114,7 @@ class CreateDashboardCommand(BaseCommand):
### Data Access Objects (DAOs)
See: [DAO Style Guidelines and Best Practices](./backend/dao-style-guidelines.md)
See: [DAO Style Guidelines and Best Practices](./backend/dao-style-guidelines)
## Testing

View File

@@ -29,16 +29,16 @@ This is a list of statements that describe how we do frontend development in Sup
- We develop using TypeScript.
- See: [SIP-36](https://github.com/apache/superset/issues/9101)
- We use React for building components, and Redux to manage app/global state.
- See: [Component Style Guidelines and Best Practices](./frontend/component-style-guidelines.md)
- See: [Component Style Guidelines and Best Practices](./frontend/component-style-guidelines)
- We prefer functional components to class components and use hooks for local component state.
- We use [Ant Design](https://ant.design/) components from our component library whenever possible, only building our own custom components when it's required.
- See: [SIP-48](https://github.com/apache/superset/issues/11283)
- We use [@emotion](https://emotion.sh/docs/introduction) to provide styling for our components, co-locating styling within component files.
- See: [SIP-37](https://github.com/apache/superset/issues/9145)
- See: [Emotion Styling Guidelines and Best Practices](./frontend/emotion-styling-guidelines.md)
- See: [Emotion Styling Guidelines and Best Practices](./frontend/emotion-styling-guidelines)
- We use Jest for unit tests, React Testing Library for component tests, and Cypress for end-to-end tests.
- See: [SIP-56](https://github.com/apache/superset/issues/11830)
- See: [Testing Guidelines and Best Practices](../testing/testing-guidelines.md)
- See: [Testing Guidelines and Best Practices](../testing/testing-guidelines)
- We add tests for every new component or file added to the frontend.
- We organize our repo so similar files live near each other, and tests are co-located with the files they test.
- See: [SIP-61](https://github.com/apache/superset/issues/12098)
@@ -46,6 +46,6 @@ This is a list of statements that describe how we do frontend development in Sup
- We use OXC (oxlint) and Prettier to automatically fix lint errors and format the code.
- We do not debate code formatting style in PRs, instead relying on automated tooling to enforce it.
- If there's not a linting rule, we don't have a rule!
- See: [Linting How-Tos](../contributing/howtos.md#typescript--javascript)
- See: [Linting How-Tos](../contributing/howtos#typescript--javascript)
- We use [React Storybook](https://storybook.js.org/) to help preview/test and stabilize our components
- A public Storybook with components from the `master` branch is available [here](https://apache-superset.github.io/superset-ui/?path=/story/*)

View File

@@ -31,7 +31,7 @@ This guide is intended primarily for reusable components. Whenever possible, all
## General Guidelines
- We use [Ant Design](https://ant.design/) as our component library. Do not build a new component if Ant Design provides one but rather instead extend or customize what the library provides
- Always style your component using Emotion and always prefer the theme variables whenever applicable. See: [Emotion Styling Guidelines and Best Practices](./emotion-styling-guidelines.md)
- Always style your component using Emotion and always prefer the theme variables whenever applicable. See: [Emotion Styling Guidelines and Best Practices](./emotion-styling-guidelines)
- All components should be made to be reusable whenever possible
- All components should follow the structure and best practices as detailed below
@@ -53,7 +53,7 @@ superset-frontend/src/components
**Storybook:** Components should come with a storybook file whenever applicable, with the following naming convention `\{ComponentName\}.stories.tsx`. More details about Storybook below
**Unit and end-to-end tests:** All components should come with unit tests using Jest and React Testing Library. The file name should follow this naming convention `\{ComponentName\}.test.tsx`. Read the [Testing Guidelines and Best Practices](../../testing/testing-guidelines.md) for more details
**Unit and end-to-end tests:** All components should come with unit tests using Jest and React Testing Library. The file name should follow this naming convention `\{ComponentName\}.test.tsx`. Read the [Testing Guidelines and Best Practices](../../testing/testing-guidelines) for more details
**Reference naming:** Use `PascalCase` for React components and `camelCase` for component instances

View File

@@ -37,16 +37,16 @@ Superset embraces a testing pyramid approach:
## Testing Documentation
### Frontend Testing
- **[Frontend Testing](./frontend-testing.md)** - Jest, React Testing Library, and component testing strategies
- **[Frontend Testing](./frontend-testing)** - Jest, React Testing Library, and component testing strategies
### Backend Testing
- **[Backend Testing](./backend-testing.md)** - pytest, database testing, and API testing patterns
- **[Backend Testing](./backend-testing)** - pytest, database testing, and API testing patterns
### End-to-End Testing
- **[E2E Testing](./e2e-testing.md)** - Playwright testing for complete user workflows
- **[E2E Testing](./e2e-testing)** - Playwright testing for complete user workflows
### CI/CD Integration
- **[CI/CD](./ci-cd.md)** - Continuous integration, automated testing, and deployment pipelines
- **[CI/CD](./ci-cd)** - Continuous integration, automated testing, and deployment pipelines
## Testing Tools & Frameworks

View File

@@ -0,0 +1 @@
[]

View File

@@ -0,0 +1 @@
[]

View File

@@ -1,143 +0,0 @@
---
title: Handlebars Chart
hide_title: true
sidebar_position: 10
version: 1
---
## Handlebars Chart
The Handlebars chart lets you render query results using a custom [Handlebars](https://handlebarsjs.com/) template. This gives you full control over how your data is displayed — from simple tables to rich HTML layouts.
### Basic Usage
In the chart editor, write a Handlebars template in the **Template** field. Your query results are available as `data`, an array of row objects.
```handlebars
{{#each data}}
<p>{{this.name}}: {{this.value}}</p>
{{/each}}
```
### Built-in Helpers
Superset registers several custom helpers on top of the standard Handlebars built-ins.
#### `dateFormat`
Formats a date value using [Day.js](https://day.js.org/) format strings.
```handlebars
{{dateFormat my_date format="MMMM YYYY"}}
```
| Option | Default | Description |
|--------|---------|-------------|
| `format` | `YYYY-MM-DD` | A Day.js-compatible format string |
---
#### `stringify`
Converts an object to a JSON string, or any other value to its string representation.
```handlebars
{{stringify myObj}}
```
---
#### `formatNumber`
Formats a number using locale-aware formatting.
```handlebars
{{formatNumber myNumber "en-US"}}
```
| Option | Default | Description |
|--------|---------|-------------|
| `locale` | `en-US` | A BCP 47 language tag |
---
#### `parseJson`
Parses a JSON string into an object that can be used in your template.
```handlebars
{{parseJson myJsonString}}
```
---
#### `groupBy`
Groups an array of objects by a key, powered by [handlebars-group-by](https://github.com/nicktindall/handlebars-group-by).
```handlebars
{{#groupBy data "department"}}
<h3>{{value}}</h3>
{{#each items}}
<p>{{this.name}}</p>
{{/each}}
{{/groupBy}}
```
---
### Helpers from just-handlebars-helpers
Superset also registers all helpers from the [just-handlebars-helpers](https://github.com/leapfrogtechnology/just-handlebars-helpers) library. These include a wide range of comparison, math, string, and conditional helpers. Commonly used ones include:
#### Comparison
| Helper | Description | Example |
|--------|-------------|---------|
| `eq` | Strict equality | `{{#if (eq status "active")}}` |
| `eqw` | Weak equality | `{{#if (eqw count "5")}}` |
| `neq` | Strict inequality | `{{#if (neq role "admin")}}` |
| `lt` | Less than | `{{#if (lt score 50)}}` |
| `lte` | Less than or equal | `{{#if (lte score 100)}}` |
| `gt` | Greater than | `{{#if (gt price 0)}}` |
| `gte` | Greater than or equal | `{{#if (gte age 18)}}` |
#### Logical
| Helper | Description | Example |
|--------|-------------|---------|
| `and` | Logical AND | `{{#if (and isActive isVerified)}}` |
| `or` | Logical OR | `{{#if (or isAdmin isMod)}}` |
| `not` | Logical NOT | `{{#if (not isDisabled)}}` |
| `ifx` | Inline conditional | `{{ifx isActive "Yes" "No"}}` |
| `coalesce` | Returns first non-falsy value | `{{coalesce nickname name "Anonymous"}}` |
#### String
| Helper | Description | Example |
|--------|-------------|---------|
| `capitalize` | Capitalizes first letter | `{{capitalize name}}` |
| `uppercase` | Converts to uppercase | `{{uppercase status}}` |
| `lowercase` | Converts to lowercase | `{{lowercase email}}` |
| `truncate` | Truncates a string | `{{truncate description 100}}` |
| `contains` | Checks if string contains substring | `{{#if (contains tag "urgent")}}` |
#### Math
| Helper | Description | Example |
|--------|-------------|---------|
| `add` | Addition | `{{add a b}}` |
| `subtract` | Subtraction | `{{subtract total discount}}` |
| `multiply` | Multiplication | `{{multiply price quantity}}` |
| `divide` | Division | `{{divide total count}}` |
| `ceil` | Ceiling | `{{ceil value}}` |
| `floor` | Floor | `{{floor value}}` |
| `round` | Round | `{{round value}}` |
For the full list of available helpers, see the [just-handlebars-helpers documentation](https://github.com/leapfrogtechnology/just-handlebars-helpers).
### Tips
- Use raw blocks to escape Handlebars syntax if you need to display double curly braces literally.
- Comparison helpers like `eq` must be wrapped in a subexpression when used with `#if`: `{{#if (eq myVal "foo")}}`.
- HTML output is sanitized by default based on your Superset configuration (`HTML_SANITIZATION`).

View File

@@ -254,7 +254,7 @@ const config: Config = {
'Apache Superset is a modern data exploration and visualization platform',
url: 'https://superset.apache.org',
baseUrl: '/',
onBrokenLinks: 'throw',
onBrokenLinks: 'warn',
markdown: {
mermaid: true,
hooks: {

View File

@@ -30,26 +30,23 @@
"lint:db-metadata:report": "python3 ../superset/db_engine_specs/lint_metadata.py --markdown -o ../superset/db_engine_specs/METADATA_STATUS.md",
"update:readme-db-logos": "node scripts/generate-database-docs.mjs --update-readme",
"eslint": "eslint .",
"lint:docs-links": "node scripts/lint-docs-links.mjs",
"version:add": "node scripts/manage-versions.mjs add",
"version:remove": "node scripts/manage-versions.mjs remove",
"version:add:docs": "node scripts/manage-versions.mjs add docs",
"version:add:admin_docs": "node scripts/manage-versions.mjs add admin_docs",
"version:add:developer_docs": "node scripts/manage-versions.mjs add developer_docs",
"version:add:developer_portal": "node scripts/manage-versions.mjs add developer_portal",
"version:add:components": "node scripts/manage-versions.mjs add components",
"version:remove:docs": "node scripts/manage-versions.mjs remove docs",
"version:remove:admin_docs": "node scripts/manage-versions.mjs remove admin_docs",
"version:remove:developer_docs": "node scripts/manage-versions.mjs remove developer_docs",
"version:remove:developer_portal": "node scripts/manage-versions.mjs remove developer_portal",
"version:remove:components": "node scripts/manage-versions.mjs remove components"
},
"dependencies": {
"@ant-design/icons": "^6.2.3",
"@docusaurus/core": "^3.10.1",
"@docusaurus/faster": "^3.10.1",
"@docusaurus/plugin-client-redirects": "^3.10.1",
"@docusaurus/preset-classic": "3.10.1",
"@docusaurus/theme-live-codeblock": "^3.10.1",
"@docusaurus/theme-mermaid": "^3.10.1",
"@ant-design/icons": "^6.2.2",
"@docusaurus/core": "^3.10.0",
"@docusaurus/faster": "^3.10.0",
"@docusaurus/plugin-client-redirects": "^3.10.0",
"@docusaurus/preset-classic": "3.10.0",
"@docusaurus/theme-live-codeblock": "^3.10.0",
"@docusaurus/theme-mermaid": "^3.10.0",
"@emotion/core": "^11.0.0",
"@emotion/react": "^11.13.3",
"@emotion/styled": "^11.14.1",
@@ -70,12 +67,12 @@
"@storybook/preview-api": "^8.6.18",
"@storybook/theming": "^8.6.15",
"@superset-ui/core": "^0.20.4",
"@swc/core": "^1.15.33",
"@swc/core": "^1.15.32",
"antd": "^6.3.7",
"baseline-browser-mapping": "^2.10.29",
"caniuse-lite": "^1.0.30001792",
"docusaurus-plugin-openapi-docs": "^5.0.2",
"docusaurus-theme-openapi-docs": "^5.0.2",
"baseline-browser-mapping": "^2.10.24",
"caniuse-lite": "^1.0.30001791",
"docusaurus-plugin-openapi-docs": "^5.0.1",
"docusaurus-theme-openapi-docs": "^5.0.1",
"js-yaml": "^4.1.1",
"js-yaml-loader": "^1.2.2",
"json-bigint": "^1.0.0",
@@ -95,21 +92,21 @@
"unist-util-visit": "^5.1.0"
},
"devDependencies": {
"@docusaurus/module-type-aliases": "^3.10.1",
"@docusaurus/tsconfig": "^3.10.1",
"@docusaurus/module-type-aliases": "^3.10.0",
"@docusaurus/tsconfig": "^3.10.0",
"@eslint/js": "^9.39.2",
"@types/js-yaml": "^4.0.9",
"@types/react": "^19.1.8",
"@typescript-eslint/eslint-plugin": "^8.59.3",
"@typescript-eslint/parser": "^8.59.3",
"@typescript-eslint/eslint-plugin": "^8.52.0",
"@typescript-eslint/parser": "^8.59.0",
"eslint": "^9.39.2",
"eslint-config-prettier": "^10.1.8",
"eslint-plugin-prettier": "^5.5.5",
"eslint-plugin-react": "^7.37.5",
"globals": "^17.6.0",
"globals": "^17.5.0",
"prettier": "^3.8.3",
"typescript": "~6.0.3",
"typescript-eslint": "^8.59.3",
"typescript-eslint": "^8.59.1",
"webpack": "^5.106.2"
},
"browserslist": {
@@ -127,7 +124,8 @@
"resolutions": {
"react-redux": "^9.2.0",
"@reduxjs/toolkit": "^2.5.0",
"baseline-browser-mapping": "^2.9.19"
"baseline-browser-mapping": "^2.9.19",
"webpackbar": "^7.0.0"
},
"packageManager": "yarn@1.22.22+sha1.ac34549e6aa8e7ead463a7407e1c7390f61a6610"
}

View File

@@ -141,47 +141,6 @@ def eval_node(node):
return "<f-string>"
return None
def static_return_bool(func_node):
"""
Statically resolve a method's return value to a bool when possible.
Returns True/False for functions whose body is (effectively) a single
\`return True\` / \`return False\` — allowing a leading docstring and
ignoring pure-comment/pass statements. Returns None for anything more
complex (conditional returns, computed values, no return, etc.).
Used by \`has_implicit_cancel\` handling: \`diagnose()\` in lib.py calls
the method and checks the return value, so an override that explicitly
returns False must NOT be treated as enabling query cancelation.
"""
returns = []
other_logic = False
docstring_skipped = False
for stmt in func_node.body:
# Skip docstring (only the FIRST expression statement that is a
# string constant — later bare string literals are not docstrings
# and should count as non-trivial logic).
if (not docstring_skipped
and isinstance(stmt, ast.Expr)
and isinstance(stmt.value, ast.Constant)
and isinstance(stmt.value.value, str)):
docstring_skipped = True
continue
if isinstance(stmt, ast.Pass):
continue
if isinstance(stmt, ast.Return):
returns.append(stmt)
continue
# Any other statement (if/for/assign/etc.) means control flow is
# non-trivial; bail out to be conservative.
other_logic = True
break
if other_logic or len(returns) != 1:
return None
val = eval_node(returns[0].value)
return val if isinstance(val, bool) else None
def deep_merge(base, override):
"""Deep merge two dictionaries. Override values take precedence."""
if base is None:
@@ -227,55 +186,8 @@ if not os.path.isdir(specs_dir):
print(json.dumps({"error": f"Directory not found: {specs_dir}", "cwd": os.getcwd()}))
sys.exit(1)
# Capability flag attributes with their defaults from BaseEngineSpec
CAP_ATTR_DEFAULTS = {
'supports_dynamic_schema': False,
'supports_catalog': False,
'supports_dynamic_catalog': False,
'disable_ssh_tunneling': False,
'supports_file_upload': True,
'allows_joins': True,
'allows_subqueries': True,
}
# Maps source capability attribute -> output field name used in databases.json.
# When a cap attr is assigned an unevaluable expression (e.g.
# allows_joins = is_feature_enabled("DRUID_JOINS")), the JS layer uses this
# mapping to preserve the corresponding field from the previously-generated
# JSON rather than silently inheriting an incorrect parent default.
CAP_ATTR_TO_OUTPUT_FIELD = {
'allows_joins': 'joins',
'allows_subqueries': 'subqueries',
'supports_dynamic_schema': 'supports_dynamic_schema',
'supports_catalog': 'supports_catalog',
'supports_dynamic_catalog': 'supports_dynamic_catalog',
'disable_ssh_tunneling': 'ssh_tunneling',
'supports_file_upload': 'supports_file_upload',
}
# Methods that indicate a capability when overridden by a non-BaseEngineSpec class.
# Mirrors the has_custom_method checks in superset/db_engine_specs/lib.py.
# cancel_query / has_implicit_cancel -> query_cancelation
# (diagnose() checks cancel_query override OR has_implicit_cancel() == True;
# base has_implicit_cancel returns False, so overriding it is the static
# equivalent of that method returning True. get_cancel_query_id is NOT
# part of the diagnose() heuristic and is intentionally excluded.)
# estimate_statement_cost / estimate_query_cost -> query_cost_estimation
# impersonate_user / update_impersonation_config / get_url_for_impersonation -> user_impersonation
# validate_sql -> sql_validation (not used yet; validation is engine-based)
CAP_METHODS = {
'cancel_query', 'has_implicit_cancel',
'estimate_statement_cost', 'estimate_query_cost',
'impersonate_user', 'update_impersonation_config', 'get_url_for_impersonation',
'validate_sql',
}
# Only the literal BaseEngineSpec is excluded from method-override tracking.
# Intermediate base classes (e.g. PrestoBaseEngineSpec) do count as overrides.
TRUE_BASE_CLASS = 'BaseEngineSpec'
# First pass: collect all class info (name, bases, metadata, cap_attrs, direct_methods)
class_info = {} # class_name -> {bases: [], metadata: {}, engine_name: str, filename: str, ...}
# First pass: collect all class info (name, bases, metadata)
class_info = {} # class_name -> {bases: [], metadata: {}, engine_name: str, filename: str}
for filename in sorted(os.listdir(specs_dir)):
if not filename.endswith('.py') or filename in ('__init__.py', 'lib.py', 'lint_metadata.py'):
@@ -306,54 +218,30 @@ for filename in sorted(os.listdir(specs_dir)):
# Extract class attributes
engine_name = None
engine_attr = None
metadata = None
cap_attrs = {} # capability flag attributes defined directly in this class
# Cap attrs assigned via expressions we can't statically resolve
# (e.g. is_feature_enabled("FLAG")). Tracked so the JS layer can
# fall back to the previously-generated databases.json value
# rather than inherit a parent default that would be wrong.
unresolved_cap_attrs = set()
direct_methods = set() # capability methods defined directly in this class
for item in node.body:
if isinstance(item, ast.Assign):
for target in item.targets:
if not isinstance(target, ast.Name):
continue
if target.id == 'engine_name':
val = eval_node(item.value)
if isinstance(val, str):
engine_name = val
elif target.id == 'engine':
val = eval_node(item.value)
if isinstance(val, str):
engine_attr = val
elif target.id == 'metadata':
metadata = eval_node(item.value)
elif target.id in CAP_ATTR_DEFAULTS:
val = eval_node(item.value)
if isinstance(val, bool):
cap_attrs[target.id] = val
else:
# Unevaluable expression — defer to JS fallback.
unresolved_cap_attrs.add(target.id)
elif isinstance(item, (ast.FunctionDef, ast.AsyncFunctionDef)):
if item.name in CAP_METHODS:
# has_implicit_cancel is special: diagnose() uses the
# method's RETURN VALUE, not just its presence. If the
# override statically returns False, treat it as if
# the method weren't overridden so query_cancelation
# matches diagnose(). Unresolvable / True / anything
# else falls through as an override (conservative).
if item.name == 'has_implicit_cancel':
if static_return_bool(item) is False:
continue
direct_methods.add(item.name)
if isinstance(target, ast.Name):
if target.id == 'engine_name':
val = eval_node(item.value)
if isinstance(val, str):
engine_name = val
elif target.id == 'metadata':
metadata = eval_node(item.value)
# Check for engine attribute with non-empty value to distinguish
# true base classes from product classes like OceanBaseEngineSpec
has_non_empty_engine = engine_attr is not None and bool(engine_attr)
has_non_empty_engine = False
for item in node.body:
if isinstance(item, ast.Assign):
for target in item.targets:
if isinstance(target, ast.Name) and target.id == 'engine':
# Check if engine value is non-empty string
if isinstance(item.value, ast.Constant):
has_non_empty_engine = bool(item.value.value)
break
# True base classes: end with BaseEngineSpec AND don't define engine
# or have empty engine (like PostgresBaseEngineSpec with engine = "")
@@ -366,18 +254,13 @@ for filename in sorted(os.listdir(specs_dir)):
'bases': base_names,
'metadata': metadata,
'engine_name': engine_name,
'engine': engine_attr,
'filename': filename,
'is_base_or_mixin': is_true_base,
'cap_attrs': cap_attrs,
'unresolved_cap_attrs': unresolved_cap_attrs,
'direct_methods': direct_methods,
}
except Exception as e:
errors.append(f"{filename}: {str(e)}")
# Second pass: resolve inheritance and build final metadata + capability flags
# Second pass: resolve inheritance and build final metadata
def get_inherited_metadata(class_name, visited=None):
"""Recursively get metadata from parent classes."""
if visited is None:
@@ -403,64 +286,6 @@ def get_inherited_metadata(class_name, visited=None):
return inherited
def get_resolved_caps(class_name, visited=None):
"""
Resolve capability flags and method overrides with inheritance.
Returns (attr_values, unresolved, methods):
- attr_values: {attr: bool} for attrs where the nearest MRO assignment
was a literal bool. Defaults are applied at the call site.
- unresolved: attrs where the nearest MRO assignment was an unevaluable
expression (e.g. is_feature_enabled("FLAG")). The JS layer falls
back to the previously-generated JSON value for these.
- methods: capability methods defined directly in some non-base ancestor,
matching the has_custom_method() logic in db_engine_specs/lib.py.
attr_values and unresolved are disjoint — an attr is in at most one.
"""
if visited is None:
visited = set()
if class_name in visited:
return {}, set(), set()
visited.add(class_name)
info = class_info.get(class_name)
if not info:
return {}, set(), set()
attr_values = {}
unresolved = set()
resolved_methods = set()
# Collect from parents, iterating right-to-left so leftmost bases win
# (matches Python MRO: for class C(A, B), A's attributes take precedence).
for base_name in reversed(info['bases']):
p_vals, p_unres, p_meth = get_resolved_caps(base_name, visited.copy())
# A parent's literal assignments overwrite whatever we inherited so far.
for attr, val in p_vals.items():
attr_values[attr] = val
unresolved.discard(attr)
# A parent's unresolved assignments likewise take precedence.
for attr in p_unres:
unresolved.add(attr)
attr_values.pop(attr, None)
resolved_methods.update(p_meth)
# Apply this class's own assignments (override parents).
for attr, val in info['cap_attrs'].items():
attr_values[attr] = val
unresolved.discard(attr)
for attr in info['unresolved_cap_attrs']:
unresolved.add(attr)
attr_values.pop(attr, None)
# Accumulate method overrides, but skip the literal BaseEngineSpec
# (its implementations are stubs; only non-base overrides count).
if class_name != TRUE_BASE_CLASS:
resolved_methods.update(info['direct_methods'])
return attr_values, unresolved, resolved_methods
for class_name, info in class_info.items():
# Skip base classes and mixins
if info['is_base_or_mixin']:
@@ -485,14 +310,7 @@ for class_name, info in class_info.items():
if final_metadata and isinstance(final_metadata, dict) and display_name:
debug_info["classes_with_metadata"] += 1
# Resolve capability flags from Python source
attr_values, unresolved_caps, cap_methods = get_resolved_caps(class_name)
cap_attrs = dict(CAP_ATTR_DEFAULTS)
cap_attrs.update(attr_values)
engine_attr = info.get('engine') or ''
entry = {
databases[display_name] = {
'engine': display_name.lower().replace(' ', '_'),
'engine_name': display_name,
'module': info['filename'][:-3], # Remove .py extension
@@ -500,40 +318,19 @@ for class_name, info in class_info.items():
'time_grains': {},
'score': 0,
'max_score': 0,
# Capability flags read from engine spec class attributes/methods
'joins': cap_attrs['allows_joins'],
'subqueries': cap_attrs['allows_subqueries'],
'supports_dynamic_schema': cap_attrs['supports_dynamic_schema'],
'supports_catalog': cap_attrs['supports_catalog'],
'supports_dynamic_catalog': cap_attrs['supports_dynamic_catalog'],
'ssh_tunneling': not cap_attrs['disable_ssh_tunneling'],
'supports_file_upload': cap_attrs['supports_file_upload'],
# Method-based flags: True only when a non-base class overrides them.
# Matches diagnose() in lib.py: cancel_query override OR
# has_implicit_cancel() returning True (which, given the base
# returns False, is equivalent to overriding has_implicit_cancel).
'query_cancelation': bool({'cancel_query', 'has_implicit_cancel'} & cap_methods),
'query_cost_estimation': bool({'estimate_statement_cost', 'estimate_query_cost'} & cap_methods),
# SQL validation is implemented in external validator classes keyed by engine name
'sql_validation': engine_attr in {'presto', 'postgresql'},
'user_impersonation': bool(
{'impersonate_user', 'update_impersonation_config', 'get_url_for_impersonation'} & cap_methods
),
'joins': True,
'subqueries': True,
'supports_dynamic_schema': False,
'supports_catalog': False,
'supports_dynamic_catalog': False,
'ssh_tunneling': False,
'query_cancelation': False,
'supports_file_upload': False,
'user_impersonation': False,
'query_cost_estimation': False,
'sql_validation': False,
}
# Tell the JS layer which output fields were populated from the
# BaseEngineSpec default because the source assignment was an
# unevaluable expression; those get overridden from existing JSON.
unresolved_fields = sorted(
CAP_ATTR_TO_OUTPUT_FIELD[attr]
for attr in unresolved_caps
if attr in CAP_ATTR_TO_OUTPUT_FIELD
)
if unresolved_fields:
entry['_unresolved_cap_fields'] = unresolved_fields
databases[display_name] = entry
if errors and not databases:
print(json.dumps({"error": "Parse errors", "details": errors, "debug": debug_info}), file=sys.stderr)
@@ -1054,52 +851,24 @@ function loadExistingData() {
}
}
/**
* Fall back to the previously-generated databases.json for capability flags
* whose source assignment couldn't be statically resolved (e.g.
* `allows_joins = is_feature_enabled("DRUID_JOINS")`). The Python extractor
* flags these via the internal `_unresolved_cap_fields` marker; without this
* fallback those fields would silently inherit the BaseEngineSpec default
* and disagree with runtime behavior. The marker is stripped before output.
*/
function fallbackUnresolvedCaps(newDatabases, existingData) {
for (const [name, db] of Object.entries(newDatabases)) {
const unresolved = db._unresolved_cap_fields;
if (!unresolved || unresolved.length === 0) {
delete db._unresolved_cap_fields;
continue;
}
const existingDb = existingData?.databases?.[name];
if (existingDb) {
for (const field of unresolved) {
if (existingDb[field] !== undefined) {
db[field] = existingDb[field];
}
}
}
delete db._unresolved_cap_fields;
}
return newDatabases;
}
/**
* Merge new documentation with existing diagnostics
* Preserves score, max_score, and time_grains from existing data (these require
* Flask context to generate and cannot be derived from static source analysis).
* Capability flags (joins, supports_catalog, etc.) are NOT preserved here — they
* are read fresh from the Python engine spec source by extractEngineSpecMetadata(),
* with a separate fallback for expression-based assignments (see fallbackUnresolvedCaps).
* Preserves score, time_grains, and feature flags from existing data
*/
function mergeWithExistingDiagnostics(newDatabases, existingData) {
if (!existingData?.databases) return newDatabases;
// Only preserve fields that require Flask/runtime context to generate
const diagnosticFields = ['score', 'max_score', 'time_grains'];
const diagnosticFields = [
'score', 'max_score', 'time_grains', 'joins', 'subqueries',
'supports_dynamic_schema', 'supports_catalog', 'supports_dynamic_catalog',
'ssh_tunneling', 'query_cancelation', 'supports_file_upload',
'user_impersonation', 'query_cost_estimation', 'sql_validation'
];
for (const [name, db] of Object.entries(newDatabases)) {
const existingDb = existingData.databases[name];
if (existingDb && existingDb.score > 0) {
// Preserve score/time_grain diagnostics from existing data
// Preserve diagnostics from existing data
for (const field of diagnosticFields) {
if (existingDb[field] !== undefined) {
db[field] = existingDb[field];
@@ -1110,7 +879,7 @@ function mergeWithExistingDiagnostics(newDatabases, existingData) {
const preserved = Object.values(newDatabases).filter(d => d.score > 0).length;
if (preserved > 0) {
console.log(`Preserved score/time_grains for ${preserved} databases from existing data`);
console.log(`Preserved diagnostics for ${preserved} databases from existing data`);
}
return newDatabases;
@@ -1158,12 +927,6 @@ async function main() {
databases = mergeWithExistingDiagnostics(databases, existingData);
}
// For cap flags assigned via unevaluable expressions (e.g.
// `is_feature_enabled(...)`), prefer the value from a previously-generated
// JSON. Runs regardless of scores since it addresses static-analysis gaps,
// not missing Flask diagnostics. Always strips the internal marker.
databases = fallbackUnresolvedCaps(databases, existingData);
// Extract and merge custom_errors for troubleshooting documentation
const customErrors = extractCustomErrors();
mergeCustomErrors(databases, customErrors);

View File

@@ -1260,15 +1260,7 @@ function generateCategoryIndex(category, components) {
};
const componentList = components
.sort((a, b) => a.componentName.localeCompare(b.componentName))
// `.mdx` suffix matches the actual component page files emitted
// by this generator (see the MDX wrappers below). The extension
// is required: Docusaurus only validates and rewrites *file-based*
// references (.md/.mdx). Bare relative paths bypass the file
// resolver and get emitted as raw HTML hrefs that the browser
// resolves against the current URL — which gives the wrong
// directory for trailing-slash routes and breaks SPA navigation.
// See docs/scripts/lint-docs-links.mjs.
.map(c => `- [${c.componentName}](./${c.componentName.toLowerCase()}.mdx)`)
.map(c => `- [${c.componentName}](./${c.componentName.toLowerCase()})`)
.join('\n');
return `---
@@ -1374,7 +1366,7 @@ This documentation is auto-generated from Storybook stories. To add or update co
4. Run \`yarn generate:superset-components\` in the \`docs/\` directory
:::info Work in Progress
This component library is actively being documented. See the [Components TODO](./TODO.md) page for a list of components awaiting documentation.
This component library is actively being documented. See the [Components TODO](./TODO) page for a list of components awaiting documentation.
:::
---

View File

@@ -1,230 +0,0 @@
#!/usr/bin/env node
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* lint-docs-links — source-level checks for internal markdown links.
*
* Catches three failure modes that combine to break SPA navigation in
* a Docusaurus build:
*
* 1. BARE — `[X](../foo)` with no extension. Skips
* Docusaurus's file resolver entirely. Emitted
* as a raw href and resolved by the browser
* against the current page URL — usually the
* wrong directory for trailing-slash routes.
* `onBrokenLinks: 'throw'` cannot catch this.
*
* 2. MISSING_TARGET — `[X](./gone.md)` with an extension, but no
* file at that path. The Docusaurus build
* catches this too (via
* `onBrokenMarkdownLinks: 'throw'`) but only
* after a multi-minute build. This script
* flags it in ~1s.
*
* 3. WRONG_EXTENSION — `[X](./foo.md)` where the file is actually
* `foo.mdx` (or vice versa). Same end result
* as MISSING_TARGET, but the fix is one
* character — so we report it as its own
* category with the actual extension on disk.
*
* Skips: fenced code blocks, asset-style targets (.png/.json/etc.),
* external URLs, in-page anchors, and the `versioned_docs/`
* snapshots (those are frozen historical content).
*
* Run from `docs/`:
* node scripts/lint-docs-links.mjs
*
* Exits 0 on clean, 1 on any finding.
*/
import fs from 'node:fs';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const docsRoot = path.join(__dirname, '..');
const ROOTS = ['docs', 'admin_docs', 'developer_docs', 'components'];
const NON_DOC_EXTENSIONS = new Set([
'.png', '.jpg', '.jpeg', '.gif', '.webp', '.svg', '.ico',
'.json', '.yaml', '.yml', '.txt', '.csv',
'.zip', '.tar', '.gz',
'.pdf',
'.mp4', '.webm', '.mov',
]);
const LINK_RE = /\[[^\]\n]+?\]\((?<url>\.{1,2}\/[^)\s]+?)\)/g;
/**
* Classify a single markdown link from a source file.
* Returns one of: ok / bare / asset / missing-target / wrong-extension.
*/
function classifyLink(sourceFile, url) {
const stripped = url.split('#', 1)[0].split('?', 1)[0];
const ext = path.extname(stripped).toLowerCase();
// Non-doc assets — legit bare extensions, leave alone.
if (ext && NON_DOC_EXTENSIONS.has(ext)) {
return { kind: 'asset' };
}
// Anything that doesn't end in .md/.mdx is a bare relative URL.
if (ext !== '.md' && ext !== '.mdx') {
return { kind: 'bare' };
}
// Has a .md/.mdx extension — make sure the target exists.
const target = path.normalize(path.join(path.dirname(sourceFile), stripped));
if (fs.existsSync(target)) {
return { kind: 'ok' };
}
// Target doesn't exist — check if the OTHER extension does.
const otherExt = ext === '.md' ? '.mdx' : '.md';
const otherTarget = target.slice(0, -ext.length) + otherExt;
if (fs.existsSync(otherTarget)) {
return { kind: 'wrong-extension', actualExt: otherExt };
}
return { kind: 'missing-target' };
}
function* walk(dir) {
const entries = fs.readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
const full = path.join(dir, entry.name);
if (entry.isDirectory()) {
if (
entry.name.startsWith('.') ||
entry.name === 'node_modules' ||
entry.name.endsWith('_versioned_docs') ||
entry.name === 'versioned_docs'
) {
continue;
}
yield* walk(full);
} else if (entry.isFile()) {
if (entry.name.endsWith('.md') || entry.name.endsWith('.mdx')) {
yield full;
}
}
}
}
function lintFile(file) {
const src = fs.readFileSync(file, 'utf8');
const findings = [];
let inFence = false;
const lines = src.split('\n');
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
if (line.trimStart().startsWith('```')) {
inFence = !inFence;
continue;
}
if (inFence) continue;
for (const m of line.matchAll(LINK_RE)) {
const url = m.groups.url;
const result = classifyLink(file, url);
if (result.kind !== 'ok' && result.kind !== 'asset') {
findings.push({ line: i + 1, url, ...result });
}
}
}
return findings;
}
const findings = [];
for (const root of ROOTS) {
const abs = path.join(docsRoot, root);
if (!fs.existsSync(abs)) continue;
for (const file of walk(abs)) {
for (const f of lintFile(file)) {
findings.push({ file: path.relative(docsRoot, file), ...f });
}
}
}
if (findings.length === 0) {
console.log('✓ lint-docs-links: no broken internal links found');
process.exit(0);
}
// Group by kind for readable output.
const groups = {
bare: [],
'wrong-extension': [],
'missing-target': [],
};
for (const f of findings) {
groups[f.kind].push(f);
}
console.error(
`✗ lint-docs-links: found ${findings.length} broken internal link(s)`
);
console.error('');
if (groups.bare.length) {
console.error(
` ${groups.bare.length} bare relative link(s) (no .md/.mdx extension)`
);
console.error(
" Docusaurus's file resolver skips these; the browser resolves them"
);
console.error(
' against the current page URL — wrong directory for trailing-slash routes.'
);
console.error(' Add the extension so the file resolver picks them up.');
console.error('');
for (const f of groups.bare) {
console.error(` ${f.file}:${f.line} ${f.url}`);
}
console.error('');
}
if (groups['wrong-extension'].length) {
console.error(
` ${groups['wrong-extension'].length} wrong-extension link(s) (.md vs .mdx mismatch)`
);
console.error(' The target file exists with the other extension on disk.');
console.error('');
for (const f of groups['wrong-extension']) {
console.error(
` ${f.file}:${f.line} ${f.url} → use ${f.actualExt}`
);
}
console.error('');
}
if (groups['missing-target'].length) {
console.error(
` ${groups['missing-target'].length} missing-target link(s) (file doesn't exist)`
);
console.error('');
for (const f of groups['missing-target']) {
console.error(` ${f.file}:${f.line} ${f.url}`);
}
console.error('');
}
process.exit(1);

View File

@@ -30,11 +30,9 @@ const __dirname = path.dirname(__filename);
const CONFIG_FILE = path.join(__dirname, '..', 'versions-config.json');
// Parse command line arguments
const rawArgs = process.argv.slice(2);
const skipGenerate = rawArgs.includes('--skip-generate');
const args = rawArgs.filter((a) => a !== '--skip-generate');
const args = process.argv.slice(2);
const command = args[0]; // 'add' or 'remove'
const section = args[1]; // 'docs', 'admin_docs', 'developer_docs', or 'components'
const section = args[1]; // 'docs', 'developer_portal', or 'components'
const version = args[2]; // version string like '1.2.0'
function loadConfig() {
@@ -45,158 +43,36 @@ function saveConfig(config) {
fs.writeFileSync(CONFIG_FILE, JSON.stringify(config, null, 2) + '\n');
}
function freezeDataImports(section, version) {
// MDX files can `import` JSON/YAML data from outside the section, either
// via escaping relative paths (e.g. country-map-tools.mdx imports
// `../../data/countries.json`) or via the `@site/` alias (e.g.
// feature-flags.mdx imports `@site/static/feature-flags.json`). Without
// intervention the snapshot keeps reading the live file, so the
// historical version's content silently changes whenever the data file
// is updated. Copy each escaping data import into a snapshot-local
// `_versioned_data/` dir and rewrite the import to point there.
const sectionRoot = section === 'docs'
? path.join(__dirname, '..', 'docs')
: path.join(__dirname, '..', section);
const docsRoot = path.join(__dirname, '..');
const versionedDocsDir = section === 'docs'
? `versioned_docs/version-${version}`
: `${section}_versioned_docs/version-${version}`;
const versionedDocsPath = path.join(__dirname, '..', versionedDocsDir);
const frozenDataDir = path.join(versionedDocsPath, '_versioned_data');
function fixVersionedImports(version) {
const versionedDocsPath = path.join(__dirname, '..', 'versioned_docs', `version-${version}`);
if (!fs.existsSync(versionedDocsPath)) {
return;
}
// Files that need import path fixes
const filesToFix = [
'contributing/resources.mdx',
'configuration/country-map-tools.mdx'
];
console.log(` Freezing data imports in ${versionedDocsDir}...`);
console.log(` Fixing relative imports in versioned docs...`);
// Matches data file imports in two flavors:
// `from '../../foo/bar.json'` (relative, must escape one or more dirs)
// `from '@site/static/foo.json'` (Docusaurus site-root alias)
const dataImportRe = /(from\s+['"])((?:\.\.\/)+|@site\/)([^'"\s]+\.(?:json|ya?ml))(['"])/g;
filesToFix.forEach(filePath => {
const fullPath = path.join(versionedDocsPath, filePath);
if (fs.existsSync(fullPath)) {
let content = fs.readFileSync(fullPath, 'utf8');
function freezeOne(fullPath, depth, prefix, pathSpec, importPath, suffix) {
let resolvedSource;
if (pathSpec === '@site/') {
// `@site/...` always resolves relative to the docs root.
resolvedSource = path.join(docsRoot, importPath);
} else {
// Relative path — must escape the file's depth within the section
// to point at content outside the section. Imports that stay inside
// are copied wholesale by Docusaurus, so we leave them alone.
const upCount = pathSpec.match(/\.\.\//g).length;
if (upCount <= depth) return null;
const relativeFromVersioned = path.relative(versionedDocsPath, fullPath);
const originalDir = path.dirname(path.join(sectionRoot, relativeFromVersioned));
resolvedSource = path.resolve(originalDir, pathSpec + importPath);
// Fix imports that go up two directories to go up three instead
content = content.replace(
/from ['"]\.\.\/\.\.\/src\//g,
"from '../../../src/"
);
content = content.replace(
/from ['"]\.\.\/\.\.\/data\//g,
"from '../../../data/"
);
fs.writeFileSync(fullPath, content);
console.log(` Fixed imports in ${filePath}`);
}
// Skip imports that land inside the section root — those get copied
// with the section snapshot already.
const relFromSection = path.relative(sectionRoot, resolvedSource);
if (!relFromSection.startsWith('..')) return null;
const relFromDocsRoot = path.relative(docsRoot, resolvedSource);
if (relFromDocsRoot.startsWith('..') || !fs.existsSync(resolvedSource)) {
return null;
}
const destPath = path.join(frozenDataDir, relFromDocsRoot);
fs.mkdirSync(path.dirname(destPath), { recursive: true });
fs.copyFileSync(resolvedSource, destPath);
const rewritten = path
.relative(path.dirname(fullPath), destPath)
.split(path.sep)
.join('/');
const finalImport = rewritten.startsWith('.') ? rewritten : `./${rewritten}`;
return `${prefix}${finalImport}${suffix}`;
}
function walk(dir, depth) {
for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
const fullPath = path.join(dir, entry.name);
if (entry.isDirectory()) {
if (entry.name.startsWith('_')) continue;
walk(fullPath, depth + 1);
} else if (entry.isFile() && /\.(md|mdx)$/.test(entry.name)) {
const original = fs.readFileSync(fullPath, 'utf8');
let inFence = false;
let mutated = false;
const updated = original.split('\n').map(line => {
if (/^\s*(```|~~~)/.test(line)) {
inFence = !inFence;
return line;
}
if (inFence) return line;
return line.replace(dataImportRe, (match, prefix, pathSpec, importPath, suffix) => {
const rewritten = freezeOne(fullPath, depth, prefix, pathSpec, importPath, suffix);
if (rewritten === null) return match;
mutated = true;
return rewritten;
});
}).join('\n');
if (mutated) {
fs.writeFileSync(fullPath, updated);
const rel = path.relative(versionedDocsPath, fullPath);
console.log(` Froze data imports in ${rel}`);
}
}
}
}
walk(versionedDocsPath, 0);
}
function fixVersionedImports(section, version) {
// Versioned content lands one directory deeper than the source content,
// so any `../../src/` or `../../data/` imports in .md/.mdx files need
// an extra `../` to keep reaching docs/src and docs/data.
const versionedDocsDir = section === 'docs'
? `versioned_docs/version-${version}`
: `${section}_versioned_docs/version-${version}`;
const versionedDocsPath = path.join(__dirname, '..', versionedDocsDir);
if (!fs.existsSync(versionedDocsPath)) {
return;
}
console.log(` Fixing relative imports in ${versionedDocsDir}...`);
// Imports whose `../` count exceeds the file's depth within the section
// escape the section root, so they need one extra `../` once the file
// lives one level deeper inside the snapshot dir. Imports that stay
// inside the section are unaffected (the section copies wholesale).
function walk(dir, depth) {
for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
const fullPath = path.join(dir, entry.name);
if (entry.isDirectory()) {
walk(fullPath, depth + 1);
} else if (entry.isFile() && /\.(md|mdx)$/.test(entry.name)) {
const original = fs.readFileSync(fullPath, 'utf8');
// Track fenced code blocks so we don't rewrite import samples inside
// ```ts / ```js (etc.) blocks that are documentation, not real imports.
let inFence = false;
const updated = original.split('\n').map(line => {
if (/^\s*(```|~~~)/.test(line)) {
inFence = !inFence;
return line;
}
if (inFence) return line;
return line.replace(
/(from\s+['"])((?:\.\.\/)+)/g,
(match, prefix, dots) => {
const upCount = dots.match(/\.\.\//g).length;
return upCount > depth ? `${prefix}../${dots}` : match;
},
);
}).join('\n');
if (updated !== original) {
fs.writeFileSync(fullPath, updated);
const rel = path.relative(versionedDocsPath, fullPath);
console.log(` Fixed imports in ${rel}`);
}
}
}
}
walk(versionedDocsPath, 0);
});
}
function addVersion(section, version) {
@@ -215,28 +91,6 @@ function addVersion(section, version) {
console.log(`Creating version ${version} for ${section}...`);
// Refresh auto-generated content (database pages, API reference,
// component playground) so the snapshot captures the current state of
// master rather than whatever happened to be on disk. `generate:smart`
// hashes its inputs and skips unchanged generators, so this is cheap
// when the dev already has fresh output.
//
// Use --skip-generate if you've placed a CI-artifact databases.json
// (the `database-diagnostics` artifact from Python-Integration) and
// want to preserve it instead of letting the local env regenerate it.
// See docs/README.md "Before You Cut" for the canonical release flow.
if (skipGenerate) {
console.log(` Skipping auto-gen refresh (--skip-generate set)`);
} else {
console.log(` Refreshing auto-generated docs...`);
try {
execSync('yarn run generate:smart', { stdio: 'inherit' });
} catch (error) {
console.error(`Failed to refresh auto-generated docs: ${error.message}`);
process.exit(1);
}
}
// Run Docusaurus version command
const docusaurusCommand = section === 'docs'
? `yarn docusaurus docs:version ${version}`
@@ -249,12 +103,10 @@ function addVersion(section, version) {
process.exit(1);
}
// Freeze data imports BEFORE adjusting paths, so the depth-aware rewriter
// doesn't process the now-local imports we just rewrote.
freezeDataImports(section, version);
// Fix relative imports in versioned content
fixVersionedImports(section, version);
// Fix relative imports in versioned docs (for main docs section only)
if (section === 'docs') {
fixVersionedImports(version);
}
// Update config
// Add to onlyIncludeVersions array (after 'current')
@@ -269,15 +121,10 @@ function addVersion(section, version) {
banner: 'none'
};
// Note: we deliberately do NOT auto-bump `lastVersion` to the new
// version. Superset's docs site keeps `lastVersion: 'current'` so
// the canonical URLs (`/user-docs/...`, `/admin-docs/...`,
// `/developer-docs/...`, `/components/...`) always render master
// content; cut versions are accessed only via their explicit version
// segment. (`/docs/...` paths are legacy and handled via per-page
// redirects in docusaurus.config.ts — not a current canonical
// form.) If you want a different policy, edit versions-config.json
// after cutting.
// Optionally update lastVersion if this is the first non-current version
if (config[section].onlyIncludeVersions.length === 2) {
config[section].lastVersion = version;
}
saveConfig(config);
console.log(`✅ Version ${version} added successfully to ${section}`);
@@ -338,17 +185,8 @@ function removeVersion(section, version) {
const versionIndex = versions.indexOf(version);
if (versionIndex > -1) {
versions.splice(versionIndex, 1);
if (versions.length === 0) {
// Sections with no versions shouldn't carry an empty versions file
// on disk — Docusaurus doesn't require it, and an empty `[]` file
// gets picked up by `docusaurus version` and snapshotted into the
// next cut.
fs.unlinkSync(versionsJsonPath);
console.log(` Removed empty ${versionsJsonFile}`);
} else {
fs.writeFileSync(versionsJsonPath, JSON.stringify(versions, null, 2) + '\n');
console.log(` Updated ${versionsJsonFile}`);
}
fs.writeFileSync(versionsJsonPath, JSON.stringify(versions, null, 2) + '\n');
console.log(` Updated ${versionsJsonFile}`);
}
}
@@ -373,20 +211,17 @@ function removeVersion(section, version) {
function printUsage() {
console.log(`
Usage:
node scripts/manage-versions.mjs add <section> <version> [--skip-generate]
node scripts/manage-versions.mjs remove <section> <version>
node scripts/manage-versions.js add <section> <version>
node scripts/manage-versions.js remove <section> <version>
Where:
- section: 'docs', 'developer_docs', 'admin_docs', or 'components'
- section: 'docs', 'developer_portal', or 'components'
- version: version string (e.g., '1.2.0', '2.0.0')
- --skip-generate: skip refreshing auto-generated docs before snapshotting
(use when you've already placed a fresh databases.json
from CI and want to preserve it)
Examples:
node scripts/manage-versions.mjs add docs 2.0.0
node scripts/manage-versions.mjs add developer_docs 1.3.0
node scripts/manage-versions.mjs remove components 1.0.0
node scripts/manage-versions.js add docs 2.0.0
node scripts/manage-versions.js add developer_portal 1.3.0
node scripts/manage-versions.js remove components 1.0.0
`);
}

File diff suppressed because it is too large Load Diff

View File

@@ -30,30 +30,19 @@ import { DownOutlined } from '@ant-design/icons';
import styles from './styles.module.css';
// Map each versioned plugin id to the URL prefix it actually serves
// content from. Three of the four routeBasePath values differ from
// their pluginId — the default preset-classic docs plugin lives at
// `/user-docs`, and admin_docs / developer_docs use hyphens in their
// URLs even though the plugin ids use underscores. Without this map
// the basePath derivation below would mis-split the pathname for
// those sections and the version dropdown would jump to the section
// root instead of preserving the current page.
//
// Keep in sync with the `routeBasePath` values in docusaurus.config.ts.
const PLUGIN_ID_TO_BASE_PATH = {
default: '/user-docs',
components: '/components',
admin_docs: '/admin-docs',
developer_docs: '/developer-docs',
};
export default function DocVersionBadge() {
const activePlugin = useActivePlugin();
const { pathname } = useLocation();
const pluginId = activePlugin?.pluginId;
const [versionedPath, setVersionedPath] = React.useState('');
const isVersioned = pluginId && pluginId in PLUGIN_ID_TO_BASE_PATH;
// Show version selector for all versioned sections
const isVersioned = [
'default', // main docs
'components',
'tutorials',
'developer_portal',
].includes(pluginId);
const { preferredVersion } = useDocsPreferredVersion(pluginId);
const versions = useVersions(pluginId);
@@ -64,8 +53,7 @@ export default function DocVersionBadge() {
if (!pathname || !version || !pluginId) return;
let relativePath = '';
const basePath = PLUGIN_ID_TO_BASE_PATH[pluginId];
if (!basePath) return;
const basePath = pluginId === 'default' ? '/docs' : `/${pluginId}`;
// Handle different version path patterns
if (pathname.includes(basePath)) {

View File

@@ -0,0 +1,121 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import React, { useState, useEffect } from 'react';
import DocVersionBanner from '@theme-original/DocVersionBanner';
import {
useActivePlugin,
useDocsVersion,
useVersions,
} from '@docusaurus/plugin-content-docs/client';
import { useLocation } from '@docusaurus/router';
import { useDocsPreferredVersion } from '@docusaurus/theme-common';
import { Dropdown } from 'antd';
import { DownOutlined } from '@ant-design/icons';
import styles from './styles.module.css';
export default function DocVersionBannerWrapper(props) {
const activePlugin = useActivePlugin();
const { pathname } = useLocation();
const pluginId = activePlugin?.pluginId;
const [versionedPath, setVersionedPath] = useState('');
// Only show version selector for tutorials
// Main docs, components, and developer_portal use the DocVersionBadge component instead
const isVersioned = pluginId && ['tutorials'].includes(pluginId);
const { preferredVersion } = useDocsPreferredVersion(pluginId);
const versions = useVersions(pluginId);
const version = useDocsVersion();
// Early return if required data is not available
if (!isVersioned || !versions || !version) {
return <DocVersionBanner {...props} />;
}
// Extract the current page path relative to the version
useEffect(() => {
if (!pathname || !version || !pluginId) return;
let relativePath = '';
// Handle different version path patterns
if (pathname.includes(`/${pluginId}/`)) {
// Extract the part after the version
// Example: /components/1.1.0/ui-components/button -> /ui-components/button
const parts = pathname.split(`/${pluginId}/`);
if (parts.length > 1) {
const afterPluginId = parts[1];
// Find where the version part ends
const versionParts = afterPluginId.split('/');
if (versionParts.length > 1) {
// Remove the version part and join the rest
relativePath = '/' + versionParts.slice(1).join('/');
}
}
}
setVersionedPath(relativePath);
}, [pathname, version, pluginId]);
// Create dropdown items for version selection
const items = versions.map(v => {
// Construct the URL for this version, preserving the current page
// v.path is the version-specific path like "1.0.0" or "next"
let versionUrl = v.path;
if (versionedPath) {
// Construct the full URL with the version and the current page path
versionUrl = v.path + versionedPath;
}
return {
key: v.name,
label: (
<a href={versionUrl}>
{v.label}
{v.name === version.name && ' (current)'}
{v.name === preferredVersion?.name && ' (preferred)'}
</a>
),
};
});
return (
<>
<DocVersionBanner {...props} />
{isVersioned && (
<div className={styles.versionBanner}>
<div className={styles.versionContainer}>
<span className={styles.versionLabel}>Version:</span>
<Dropdown menu={{ items }} trigger={['click']}>
<a
onClick={e => e.preventDefault()}
className={styles.versionSelector}
>
{version.label} <DownOutlined />
</a>
</Dropdown>
</div>
</div>
)}
</>
);
}

View File

@@ -17,4 +17,33 @@
* under the License.
*/
/// <reference types="@emotion/jest" />
.versionBanner {
background-color: var(--ifm-color-emphasis-100);
padding: 0.5rem 1rem;
margin-bottom: 1rem;
border-bottom: 1px solid var(--ifm-color-emphasis-200);
}
.versionContainer {
display: flex;
align-items: center;
max-width: var(--ifm-container-width);
margin: 0 auto;
padding: 0 var(--ifm-spacing-horizontal);
}
.versionLabel {
font-weight: bold;
margin-right: 0.5rem;
}
.versionSelector {
cursor: pointer;
color: var(--ifm-color-primary);
font-weight: 500;
}
.versionSelector:hover {
text-decoration: none;
color: var(--ifm-color-primary-darker);
}

View File

@@ -81,12 +81,6 @@
"lifecycle": "development",
"description": "Expand nested types in Presto into extra columns/arrays. Experimental, doesn't work with all nested types."
},
{
"name": "SEMANTIC_LAYERS",
"default": false,
"lifecycle": "development",
"description": "Enable semantic layers and show semantic views alongside datasets"
},
{
"name": "TABLE_V2_TIME_COMPARISON_ENABLED",
"default": false,
@@ -174,7 +168,7 @@
"default": false,
"lifecycle": "testing",
"description": "Allows users to add a superset:// DB that can query across databases. Experimental with potential security/performance risks. See SUPERSET_META_DB_LIMIT.",
"docs": "https://superset.apache.org/user-docs/databases/supported/superset-meta-database"
"docs": "https://superset.apache.org/docs/configuration/databases/#querying-across-databases"
},
{
"name": "ESTIMATE_QUERY_COST",

Binary file not shown.

Before

Width:  |  Height:  |  Size: 132 KiB

After

Width:  |  Height:  |  Size: 134 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 116 KiB

After

Width:  |  Height:  |  Size: 104 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 118 KiB

After

Width:  |  Height:  |  Size: 118 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 97 KiB

After

Width:  |  Height:  |  Size: 99 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 240 KiB

After

Width:  |  Height:  |  Size: 79 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 39 KiB

After

Width:  |  Height:  |  Size: 84 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 131 KiB

After

Width:  |  Height:  |  Size: 51 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

After

Width:  |  Height:  |  Size: 85 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.1 KiB

After

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 37 KiB

After

Width:  |  Height:  |  Size: 97 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 21 KiB

After

Width:  |  Height:  |  Size: 141 KiB

View File

@@ -0,0 +1,3 @@
[
"1.0.0"
]

View File

@@ -20,12 +20,12 @@ Alerts and reports are disabled by default. To turn them on, you need to do some
#### In your `superset_config.py` or `superset_config_docker.py`
- `"ALERT_REPORTS"` [feature flag](/user-docs/6.0.0/configuration/configuring-superset#feature-flags) must be turned to True.
- `"ALERT_REPORTS"` [feature flag](/docs/6.0.0/configuration/configuring-superset#feature-flags) must be turned to True.
- `beat_schedule` in CeleryConfig must contain schedule for `reports.scheduler`.
- At least one of those must be configured, depending on what you want to use:
- emails: `SMTP_*` settings
- Slack messages: `SLACK_API_TOKEN`
- Users can customize the email subject by including date code placeholders, which will automatically be replaced with the corresponding UTC date when the email is sent. To enable this functionality, activate the `"DATE_FORMAT_IN_EMAIL_SUBJECT"` [feature flag](/user-docs/6.0.0/configuration/configuring-superset#feature-flags). This enables date formatting in email subjects, preventing all reporting emails from being grouped into the same thread (optional for the reporting feature).
- Users can customize the email subject by including date code placeholders, which will automatically be replaced with the corresponding UTC date when the email is sent. To enable this functionality, activate the `"DATE_FORMAT_IN_EMAIL_SUBJECT"` [feature flag](/docs/6.0.0/configuration/configuring-superset#feature-flags). This enables date formatting in email subjects, preventing all reporting emails from being grouped into the same thread (optional for the reporting feature).
- Use date codes from [strftime.org](https://strftime.org/) to create the email subject.
- If no date code is provided, the original string will be used as the email subject.
@@ -38,7 +38,7 @@ Screenshots will be taken but no messages actually sent as long as `ALERT_REPORT
- You must install a headless browser, for taking screenshots of the charts and dashboards. Only Firefox and Chrome are currently supported.
> If you choose Chrome, you must also change the value of `WEBDRIVER_TYPE` to `"chrome"` in your `superset_config.py`.
Note: All the components required (Firefox headless browser, Redis, Postgres db, celery worker and celery beat) are present in the *dev* docker image if you are following [Installing Superset Locally](/user-docs/6.0.0/installation/docker-compose/).
Note: All the components required (Firefox headless browser, Redis, Postgres db, celery worker and celery beat) are present in the *dev* docker image if you are following [Installing Superset Locally](/docs/6.0.0/installation/docker-compose/).
All you need to do is add the required config variables described in this guide (See `Detailed Config`).
If you are running a non-dev docker image, e.g., a stable release like `apache/superset:3.1.0`, that image does not include a headless browser. Only the `superset_worker` container needs this headless browser to browse to the target chart or dashboard.
@@ -70,7 +70,7 @@ Note: when you configure an alert or a report, the Slack channel list takes chan
### Kubernetes-specific
- You must have a `celery beat` pod running. If you're using the chart included in the GitHub repository under [helm/superset](https://github.com/apache/superset/tree/master/helm/superset), you need to put `supersetCeleryBeat.enabled = true` in your values override.
- You can see the dedicated docs about [Kubernetes installation](/user-docs/6.0.0/installation/kubernetes) for more details.
- You can see the dedicated docs about [Kubernetes installation](/docs/6.0.0/installation/kubernetes) for more details.
### Docker Compose specific

View File

@@ -78,11 +78,11 @@ Caching for SQL Lab query results is used when async queries are enabled and is
Note that this configuration does not use a flask-caching dictionary for its configuration, but
instead requires a cachelib object.
See [Async Queries via Celery](/user-docs/6.0.0/configuration/async-queries-celery) for details.
See [Async Queries via Celery](/docs/6.0.0/configuration/async-queries-celery) for details.
## Caching Thumbnails
This is an optional feature that can be turned on by activating its [feature flag](/user-docs/6.0.0/configuration/configuring-superset#feature-flags) on config:
This is an optional feature that can be turned on by activating its [feature flag](/docs/6.0.0/configuration/configuring-superset#feature-flags) on config:
```
FEATURE_FLAGS = {

View File

@@ -37,7 +37,7 @@ ENV SUPERSET_CONFIG_PATH /app/superset_config.py
```
Docker compose deployments handle application configuration differently using specific conventions.
Refer to the [docker compose tips & configuration](/user-docs/6.0.0/installation/docker-compose#docker-compose-tips--configuration)
Refer to the [docker compose tips & configuration](/docs/6.0.0/installation/docker-compose#docker-compose-tips--configuration)
for details.
The following is an example of just a few of the parameters you can set in your `superset_config.py` file:
@@ -254,7 +254,7 @@ flask --app "superset.app:create_app(superset_app_root='/analytics')"
### Docker builds
The [docker compose](/user-docs/6.0.0/installation/docker-compose#configuring-further) developer
The [docker compose](/docs/6.0.0/installation/docker-compose#configuring-further) developer
configuration includes an additional environmental variable,
[`SUPERSET_APP_ROOT`](https://github.com/apache/superset/blob/master/docker/.env),
to simplify the process of setting up a non-default root path across the services.
@@ -449,4 +449,4 @@ FEATURE_FLAGS = {
}
```
A current list of feature flags can be found in the [Feature Flags](/user-docs/6.0.0/configuration/configuring-superset#feature-flags) documentation.
A current list of feature flags can be found in the [Feature Flags](/docs/6.0.0/configuration/feature-flags) documentation.

View File

@@ -14,7 +14,7 @@ in your environment.
Youll need to install the required packages for the database you want to use as your metadata database
as well as the packages needed to connect to the databases you want to access through Superset.
For information about setting up Superset's metadata database, please refer to
installation documentations ([Docker Compose](/user-docs/6.0.0/installation/docker-compose), [Kubernetes](/user-docs/6.0.0/installation/kubernetes))
installation documentations ([Docker Compose](/docs/6.0.0/installation/docker-compose), [Kubernetes](/docs/6.0.0/installation/kubernetes))
:::
This documentation tries to keep pointer to the different drivers for commonly used database
@@ -26,7 +26,7 @@ Superset requires a Python [DB-API database driver](https://peps.python.org/pep-
and a [SQLAlchemy dialect](https://docs.sqlalchemy.org/en/20/dialects/) to be installed for
each database engine you want to connect to.
You can read more [here](/user-docs/6.0.0/configuration/databases#installing-drivers-in-docker-images) about how to
You can read more [here](/docs/6.0.0/configuration/databases#installing-drivers-in-docker-images) about how to
install new database drivers into your Superset configuration.
### Supported Databases and Dependencies
@@ -37,53 +37,53 @@ are compatible with Superset.
| <div style={{width: '150px'}}>Database</div> | PyPI package | Connection String |
| --------------------------------------------------------- | ---------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------ |
| [AWS Athena](/user-docs/6.0.0/configuration/databases#aws-athena) | `pip install pyathena[pandas]` , `pip install PyAthenaJDBC` | `awsathena+rest://{access_key_id}:{access_key}@athena.{region}.amazonaws.com/{schema}?s3_staging_dir={s3_staging_dir}&...` |
| [AWS DynamoDB](/user-docs/6.0.0/configuration/databases#aws-dynamodb) | `pip install pydynamodb` | `dynamodb://{access_key_id}:{secret_access_key}@dynamodb.{region_name}.amazonaws.com?connector=superset` |
| [AWS Redshift](/user-docs/6.0.0/configuration/databases#aws-redshift) | `pip install sqlalchemy-redshift` | `redshift+psycopg2://<userName>:<DBPassword>@<AWS End Point>:5439/<Database Name>` |
| [Apache Doris](/user-docs/6.0.0/configuration/databases#apache-doris) | `pip install pydoris` | `doris://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>` |
| [Apache Drill](/user-docs/6.0.0/configuration/databases#apache-drill) | `pip install sqlalchemy-drill` | `drill+sadrill://<username>:<password>@<host>:<port>/<storage_plugin>`, often useful: `?use_ssl=True/False` |
| [Apache Druid](/user-docs/6.0.0/configuration/databases#apache-druid) | `pip install pydruid` | `druid://<User>:<password>@<Host>:<Port-default-9088>/druid/v2/sql` |
| [Apache Hive](/user-docs/6.0.0/configuration/databases#hive) | `pip install pyhive` | `hive://hive@{hostname}:{port}/{database}` |
| [Apache Impala](/user-docs/6.0.0/configuration/databases#apache-impala) | `pip install impyla` | `impala://{hostname}:{port}/{database}` |
| [Apache Kylin](/user-docs/6.0.0/configuration/databases#apache-kylin) | `pip install kylinpy` | `kylin://<username>:<password>@<hostname>:<port>/<project>?<param1>=<value1>&<param2>=<value2>` |
| [Apache Pinot](/user-docs/6.0.0/configuration/databases#apache-pinot) | `pip install pinotdb` | `pinot://BROKER:5436/query?server=http://CONTROLLER:5983/` |
| [Apache Solr](/user-docs/6.0.0/configuration/databases#apache-solr) | `pip install sqlalchemy-solr` | `solr://{username}:{password}@{hostname}:{port}/{server_path}/{collection}` |
| [Apache Spark SQL](/user-docs/6.0.0/configuration/databases#apache-spark-sql) | `pip install pyhive` | `hive://hive@{hostname}:{port}/{database}` |
| [Ascend.io](/user-docs/6.0.0/configuration/databases#ascendio) | `pip install impyla` | `ascend://{username}:{password}@{hostname}:{port}/{database}?auth_mechanism=PLAIN;use_ssl=true` |
| [Azure MS SQL](/user-docs/6.0.0/configuration/databases#sql-server) | `pip install pymssql` | `mssql+pymssql://UserName@presetSQL:TestPassword@presetSQL.database.windows.net:1433/TestSchema` |
| [ClickHouse](/user-docs/6.0.0/configuration/databases#clickhouse) | `pip install clickhouse-connect` | `clickhousedb://{username}:{password}@{hostname}:{port}/{database}` |
| [CockroachDB](/user-docs/6.0.0/configuration/databases#cockroachdb) | `pip install cockroachdb` | `cockroachdb://root@{hostname}:{port}/{database}?sslmode=disable` |
| [Couchbase](/user-docs/6.0.0/configuration/databases#couchbase) | `pip install couchbase-sqlalchemy` | `couchbase://{username}:{password}@{hostname}:{port}?truststorepath={ssl certificate path}` |
| [CrateDB](/user-docs/6.0.0/configuration/databases#cratedb) | `pip install sqlalchemy-cratedb` | `crate://{username}:{password}@{hostname}:{port}`, often useful: `?ssl=true/false` or `?schema=testdrive`. |
| [Denodo](/user-docs/6.0.0/configuration/databases#denodo) | `pip install denodo-sqlalchemy` | `denodo://{username}:{password}@{hostname}:{port}/{database}` |
| [Dremio](/user-docs/6.0.0/configuration/databases#dremio) | `pip install sqlalchemy_dremio` |`dremio+flight://{username}:{password}@{host}:32010`, often useful: `?UseEncryption=true/false`. For Legacy ODBC: `dremio+pyodbc://{username}:{password}@{host}:31010` |
| [Elasticsearch](/user-docs/6.0.0/configuration/databases#elasticsearch) | `pip install elasticsearch-dbapi` | `elasticsearch+http://{user}:{password}@{host}:9200/` |
| [Exasol](/user-docs/6.0.0/configuration/databases#exasol) | `pip install sqlalchemy-exasol` | `exa+pyodbc://{username}:{password}@{hostname}:{port}/my_schema?CONNECTIONLCALL=en_US.UTF-8&driver=EXAODBC` |
| [Google BigQuery](/user-docs/6.0.0/configuration/databases#google-bigquery) | `pip install sqlalchemy-bigquery` | `bigquery://{project_id}` |
| [Google Sheets](/user-docs/6.0.0/configuration/databases#google-sheets) | `pip install shillelagh[gsheetsapi]` | `gsheets://` |
| [Firebolt](/user-docs/6.0.0/configuration/databases#firebolt) | `pip install firebolt-sqlalchemy` | `firebolt://{client_id}:{client_secret}@{database}/{engine_name}?account_name={name}` |
| [Hologres](/user-docs/6.0.0/configuration/databases#hologres) | `pip install psycopg2` | `postgresql+psycopg2://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
| [IBM Db2](/user-docs/6.0.0/configuration/databases#ibm-db2) | `pip install ibm_db_sa` | `db2+ibm_db://` |
| [IBM Netezza Performance Server](/user-docs/6.0.0/configuration/databases#ibm-netezza-performance-server) | `pip install nzalchemy` | `netezza+nzpy://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
| [MySQL](/user-docs/6.0.0/configuration/databases#mysql) | `pip install mysqlclient` | `mysql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
| [OceanBase](/user-docs/6.0.0/configuration/databases#oceanbase) | `pip install oceanbase_py` | `oceanbase://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
| [Oracle](/user-docs/6.0.0/configuration/databases#oracle) | `pip install cx_Oracle` | `oracle://<username>:<password>@<hostname>:<port>` |
| [Parseable](/user-docs/6.0.0/configuration/databases#parseable) | `pip install sqlalchemy-parseable` | `parseable://<UserName>:<DBPassword>@<Database Host>/<Stream Name>` |
| [PostgreSQL](/user-docs/6.0.0/configuration/databases#postgres) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
| [Presto](/user-docs/6.0.0/configuration/databases#presto) | `pip install pyhive` | `presto://{username}:{password}@{hostname}:{port}/{database}` |
| [SAP Hana](/user-docs/6.0.0/configuration/databases#hana) | `pip install hdbcli sqlalchemy-hana` or `pip install apache_superset[hana]` | `hana://{username}:{password}@{host}:{port}` |
| [SingleStore](/user-docs/6.0.0/configuration/databases#singlestore) | `pip install sqlalchemy-singlestoredb` | `singlestoredb://{username}:{password}@{host}:{port}/{database}` |
| [StarRocks](/user-docs/6.0.0/configuration/databases#starrocks) | `pip install starrocks` | `starrocks://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>` |
| [Snowflake](/user-docs/6.0.0/configuration/databases#snowflake) | `pip install snowflake-sqlalchemy` | `snowflake://{user}:{password}@{account}.{region}/{database}?role={role}&warehouse={warehouse}` |
| [AWS Athena](/docs/6.0.0/configuration/databases#aws-athena) | `pip install pyathena[pandas]` , `pip install PyAthenaJDBC` | `awsathena+rest://{access_key_id}:{access_key}@athena.{region}.amazonaws.com/{schema}?s3_staging_dir={s3_staging_dir}&...` |
| [AWS DynamoDB](/docs/6.0.0/configuration/databases#aws-dynamodb) | `pip install pydynamodb` | `dynamodb://{access_key_id}:{secret_access_key}@dynamodb.{region_name}.amazonaws.com?connector=superset` |
| [AWS Redshift](/docs/6.0.0/configuration/databases#aws-redshift) | `pip install sqlalchemy-redshift` | `redshift+psycopg2://<userName>:<DBPassword>@<AWS End Point>:5439/<Database Name>` |
| [Apache Doris](/docs/6.0.0/configuration/databases#apache-doris) | `pip install pydoris` | `doris://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>` |
| [Apache Drill](/docs/6.0.0/configuration/databases#apache-drill) | `pip install sqlalchemy-drill` | `drill+sadrill://<username>:<password>@<host>:<port>/<storage_plugin>`, often useful: `?use_ssl=True/False` |
| [Apache Druid](/docs/6.0.0/configuration/databases#apache-druid) | `pip install pydruid` | `druid://<User>:<password>@<Host>:<Port-default-9088>/druid/v2/sql` |
| [Apache Hive](/docs/6.0.0/configuration/databases#hive) | `pip install pyhive` | `hive://hive@{hostname}:{port}/{database}` |
| [Apache Impala](/docs/6.0.0/configuration/databases#apache-impala) | `pip install impyla` | `impala://{hostname}:{port}/{database}` |
| [Apache Kylin](/docs/6.0.0/configuration/databases#apache-kylin) | `pip install kylinpy` | `kylin://<username>:<password>@<hostname>:<port>/<project>?<param1>=<value1>&<param2>=<value2>` |
| [Apache Pinot](/docs/6.0.0/configuration/databases#apache-pinot) | `pip install pinotdb` | `pinot://BROKER:5436/query?server=http://CONTROLLER:5983/` |
| [Apache Solr](/docs/6.0.0/configuration/databases#apache-solr) | `pip install sqlalchemy-solr` | `solr://{username}:{password}@{hostname}:{port}/{server_path}/{collection}` |
| [Apache Spark SQL](/docs/6.0.0/configuration/databases#apache-spark-sql) | `pip install pyhive` | `hive://hive@{hostname}:{port}/{database}` |
| [Ascend.io](/docs/6.0.0/configuration/databases#ascendio) | `pip install impyla` | `ascend://{username}:{password}@{hostname}:{port}/{database}?auth_mechanism=PLAIN;use_ssl=true` |
| [Azure MS SQL](/docs/6.0.0/configuration/databases#sql-server) | `pip install pymssql` | `mssql+pymssql://UserName@presetSQL:TestPassword@presetSQL.database.windows.net:1433/TestSchema` |
| [ClickHouse](/docs/6.0.0/configuration/databases#clickhouse) | `pip install clickhouse-connect` | `clickhousedb://{username}:{password}@{hostname}:{port}/{database}` |
| [CockroachDB](/docs/6.0.0/configuration/databases#cockroachdb) | `pip install cockroachdb` | `cockroachdb://root@{hostname}:{port}/{database}?sslmode=disable` |
| [Couchbase](/docs/6.0.0/configuration/databases#couchbase) | `pip install couchbase-sqlalchemy` | `couchbase://{username}:{password}@{hostname}:{port}?truststorepath={ssl certificate path}` |
| [CrateDB](/docs/6.0.0/configuration/databases#cratedb) | `pip install sqlalchemy-cratedb` | `crate://{username}:{password}@{hostname}:{port}`, often useful: `?ssl=true/false` or `?schema=testdrive`. |
| [Denodo](/docs/6.0.0/configuration/databases#denodo) | `pip install denodo-sqlalchemy` | `denodo://{username}:{password}@{hostname}:{port}/{database}` |
| [Dremio](/docs/6.0.0/configuration/databases#dremio) | `pip install sqlalchemy_dremio` |`dremio+flight://{username}:{password}@{host}:32010`, often useful: `?UseEncryption=true/false`. For Legacy ODBC: `dremio+pyodbc://{username}:{password}@{host}:31010` |
| [Elasticsearch](/docs/6.0.0/configuration/databases#elasticsearch) | `pip install elasticsearch-dbapi` | `elasticsearch+http://{user}:{password}@{host}:9200/` |
| [Exasol](/docs/6.0.0/configuration/databases#exasol) | `pip install sqlalchemy-exasol` | `exa+pyodbc://{username}:{password}@{hostname}:{port}/my_schema?CONNECTIONLCALL=en_US.UTF-8&driver=EXAODBC` |
| [Google BigQuery](/docs/6.0.0/configuration/databases#google-bigquery) | `pip install sqlalchemy-bigquery` | `bigquery://{project_id}` |
| [Google Sheets](/docs/6.0.0/configuration/databases#google-sheets) | `pip install shillelagh[gsheetsapi]` | `gsheets://` |
| [Firebolt](/docs/6.0.0/configuration/databases#firebolt) | `pip install firebolt-sqlalchemy` | `firebolt://{client_id}:{client_secret}@{database}/{engine_name}?account_name={name}` |
| [Hologres](/docs/6.0.0/configuration/databases#hologres) | `pip install psycopg2` | `postgresql+psycopg2://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
| [IBM Db2](/docs/6.0.0/configuration/databases#ibm-db2) | `pip install ibm_db_sa` | `db2+ibm_db://` |
| [IBM Netezza Performance Server](/docs/6.0.0/configuration/databases#ibm-netezza-performance-server) | `pip install nzalchemy` | `netezza+nzpy://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
| [MySQL](/docs/6.0.0/configuration/databases#mysql) | `pip install mysqlclient` | `mysql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
| [OceanBase](/docs/6.0.0/configuration/databases#oceanbase) | `pip install oceanbase_py` | `oceanbase://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
| [Oracle](/docs/6.0.0/configuration/databases#oracle) | `pip install cx_Oracle` | `oracle://<username>:<password>@<hostname>:<port>` |
| [Parseable](/docs/6.0.0/configuration/databases#parseable) | `pip install sqlalchemy-parseable` | `parseable://<UserName>:<DBPassword>@<Database Host>/<Stream Name>` |
| [PostgreSQL](/docs/6.0.0/configuration/databases#postgres) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
| [Presto](/docs/6.0.0/configuration/databases#presto) | `pip install pyhive` | `presto://{username}:{password}@{hostname}:{port}/{database}` |
| [SAP Hana](/docs/6.0.0/configuration/databases#hana) | `pip install hdbcli sqlalchemy-hana` or `pip install apache_superset[hana]` | `hana://{username}:{password}@{host}:{port}` |
| [SingleStore](/docs/6.0.0/configuration/databases#singlestore) | `pip install sqlalchemy-singlestoredb` | `singlestoredb://{username}:{password}@{host}:{port}/{database}` |
| [StarRocks](/docs/6.0.0/configuration/databases#starrocks) | `pip install starrocks` | `starrocks://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>` |
| [Snowflake](/docs/6.0.0/configuration/databases#snowflake) | `pip install snowflake-sqlalchemy` | `snowflake://{user}:{password}@{account}.{region}/{database}?role={role}&warehouse={warehouse}` |
| SQLite | No additional library needed | `sqlite://path/to/file.db?check_same_thread=false` |
| [SQL Server](/user-docs/6.0.0/configuration/databases#sql-server) | `pip install pymssql` | `mssql+pymssql://<Username>:<Password>@<Host>:<Port-default:1433>/<Database Name>` |
| [TDengine](/user-docs/6.0.0/configuration/databases#tdengine) | `pip install taospy` `pip install taos-ws-py` | `taosws://<user>:<password>@<host>:<port>` |
| [Teradata](/user-docs/6.0.0/configuration/databases#teradata) | `pip install teradatasqlalchemy` | `teradatasql://{user}:{password}@{host}` |
| [TimescaleDB](/user-docs/6.0.0/configuration/databases#timescaledb) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>:<Port>/<Database Name>` |
| [Trino](/user-docs/6.0.0/configuration/databases#trino) | `pip install trino` | `trino://{username}:{password}@{hostname}:{port}/{catalog}` |
| [Vertica](/user-docs/6.0.0/configuration/databases#vertica) | `pip install sqlalchemy-vertica-python` | `vertica+vertica_python://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
| [YDB](/user-docs/6.0.0/configuration/databases#ydb) | `pip install ydb-sqlalchemy` | `ydb://{host}:{port}/{database_name}` |
| [YugabyteDB](/user-docs/6.0.0/configuration/databases#yugabytedb) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
| [SQL Server](/docs/6.0.0/configuration/databases#sql-server) | `pip install pymssql` | `mssql+pymssql://<Username>:<Password>@<Host>:<Port-default:1433>/<Database Name>` |
| [TDengine](/docs/6.0.0/configuration/databases#tdengine) | `pip install taospy` `pip install taos-ws-py` | `taosws://<user>:<password>@<host>:<port>` |
| [Teradata](/docs/6.0.0/configuration/databases#teradata) | `pip install teradatasqlalchemy` | `teradatasql://{user}:{password}@{host}` |
| [TimescaleDB](/docs/6.0.0/configuration/databases#timescaledb) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>:<Port>/<Database Name>` |
| [Trino](/docs/6.0.0/configuration/databases#trino) | `pip install trino` | `trino://{username}:{password}@{hostname}:{port}/{catalog}` |
| [Vertica](/docs/6.0.0/configuration/databases#vertica) | `pip install sqlalchemy-vertica-python` | `vertica+vertica_python://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
| [YDB](/docs/6.0.0/configuration/databases#ydb) | `pip install ydb-sqlalchemy` | `ydb://{host}:{port}/{database_name}` |
| [YugabyteDB](/docs/6.0.0/configuration/databases#yugabytedb) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
---
@@ -109,7 +109,7 @@ The connector library installation process is the same for all additional librar
#### 1. Determine the driver you need
Consult the [list of database drivers](/user-docs/6.0.0/configuration/databases)
Consult the [list of database drivers](/docs/6.0.0/configuration/databases)
and find the PyPI package needed to connect to your database. In this example, we're connecting
to a MySQL database, so we'll need the `mysqlclient` connector library.
@@ -165,11 +165,11 @@ to your database via the Superset web UI.
As an admin user, go to Settings -> Data: Database Connections and click the +DATABASE button.
From there, follow the steps on the
[Using Database Connection UI page](/user-docs/6.0.0/configuration/databases#connecting-through-the-ui).
[Using Database Connection UI page](/docs/6.0.0/configuration/databases#connecting-through-the-ui).
Consult the page for your specific database type in the Superset documentation to determine
the connection string and any other parameters you need to input. For instance,
on the [MySQL page](/user-docs/6.0.0/configuration/databases#mysql), we see that the connection string
on the [MySQL page](/docs/6.0.0/configuration/databases#mysql), we see that the connection string
to a local MySQL database differs depending on whether the setup is running on Linux or Mac.
Click the “Test Connection” button, which should result in a popup message saying,
@@ -407,7 +407,7 @@ this:
crate://<username>:<password>@<clustername>.cratedb.net:4200/?ssl=true
```
Follow the steps [here](/user-docs/6.0.0/configuration/databases#installing-database-drivers)
Follow the steps [here](/docs/6.0.0/configuration/databases#installing-database-drivers)
to install the CrateDB connector package when setting up Superset locally using
Docker Compose.
@@ -782,7 +782,7 @@ The recommended connector library for BigQuery is
##### Install BigQuery Driver
Follow the steps [here](/user-docs/6.0.0/configuration/databases#installing-drivers-in-docker-images) about how to
Follow the steps [here](/docs/6.0.0/configuration/databases#installing-drivers-in-docker-images) about how to
install new database drivers when setting up Superset locally via docker compose.
```bash
@@ -1177,7 +1177,7 @@ risingwave://root@{hostname}:{port}/{database}?sslmode=disable
##### Install Snowflake Driver
Follow the steps [here](/user-docs/6.0.0/configuration/databases#installing-database-drivers) about how to
Follow the steps [here](/docs/6.0.0/configuration/databases#installing-database-drivers) about how to
install new database drivers when setting up Superset locally via docker compose.
```bash

View File

@@ -51,7 +51,7 @@ Restart Superset for this configuration change to take effect.
#### Making a Dashboard Public
1. Add the `'DASHBOARD_RBAC': True` [Feature Flag](/user-docs/6.0.0/configuration/configuring-superset#feature-flags) to `superset_config.py`
1. Add the `'DASHBOARD_RBAC': True` [Feature Flag](/docs/6.0.0/configuration/feature-flags) to `superset_config.py`
2. Add the `Public` role to your dashboard as described [here](https://superset.apache.org/docs/using-superset/creating-your-first-dashboard/#manage-access-to-dashboards)
#### Embedding a Public Dashboard

View File

@@ -10,7 +10,7 @@ version: 1
## Jinja Templates
SQL Lab and Explore supports [Jinja templating](https://jinja.palletsprojects.com/en/2.11.x/) in queries.
To enable templating, the `ENABLE_TEMPLATE_PROCESSING` [feature flag](/user-docs/6.0.0/configuration/configuring-superset#feature-flags) needs to be enabled in
To enable templating, the `ENABLE_TEMPLATE_PROCESSING` [feature flag](/docs/6.0.0/configuration/configuring-superset#feature-flags) needs to be enabled in
`superset_config.py`. When templating is enabled, python code can be embedded in virtual datasets and
in Custom SQL in the filter and metric controls in Explore. By default, the following variables are
made available in the Jinja context:

View File

@@ -20,7 +20,7 @@ To help make the problem somewhat tractable—given that Apache Superset has no
To strive for data consistency (regardless of the timezone of the client) the Apache Superset backend tries to ensure that any timestamp sent to the client has an explicit (or semi-explicit as in the case with [Epoch time](https://en.wikipedia.org/wiki/Unix_time) which is always in reference to UTC) timezone encoded within.
The challenge however lies with the slew of [database engines](/user-docs/6.0.0/configuration/databases#installing-drivers-in-docker-images) which Apache Superset supports and various inconsistencies between their [Python Database API (DB-API)](https://www.python.org/dev/peps/pep-0249/) implementations combined with the fact that we use [Pandas](https://pandas.pydata.org/) to read SQL into a DataFrame prior to serializing to JSON. Regrettably Pandas ignores the DB-API [type_code](https://www.python.org/dev/peps/pep-0249/#type-objects) relying by default on the underlying Python type returned by the DB-API. Currently only a subset of the supported database engines work correctly with Pandas, i.e., ensuring timestamps without an explicit timestamp are serializd to JSON with the server timezone, thus guaranteeing the client will display timestamps in a consistent manner irrespective of the client's timezone.
The challenge however lies with the slew of [database engines](/docs/6.0.0/configuration/databases#installing-drivers-in-docker-images) which Apache Superset supports and various inconsistencies between their [Python Database API (DB-API)](https://www.python.org/dev/peps/pep-0249/) implementations combined with the fact that we use [Pandas](https://pandas.pydata.org/) to read SQL into a DataFrame prior to serializing to JSON. Regrettably Pandas ignores the DB-API [type_code](https://www.python.org/dev/peps/pep-0249/#type-objects) relying by default on the underlying Python type returned by the DB-API. Currently only a subset of the supported database engines work correctly with Pandas, i.e., ensuring timestamps without an explicit timestamp are serializd to JSON with the server timezone, thus guaranteeing the client will display timestamps in a consistent manner irrespective of the client's timezone.
For example the following is a comparison of MySQL and Presto,

View File

@@ -77,7 +77,7 @@ Look through the GitHub issues. Issues tagged with
Superset could always use better documentation,
whether as part of the official Superset docs,
in docstrings, `docs/*.rst` or even on the web as blog posts or
articles. See [Documentation](/user-docs/6.0.0/contributing/howtos#contributing-to-documentation) for more details.
articles. See [Documentation](/docs/6.0.0/contributing/howtos#contributing-to-documentation) for more details.
### Add Translations

View File

@@ -599,7 +599,7 @@ export enum FeatureFlag {
those specified under FEATURE_FLAGS in `superset_config.py`. For example, `DEFAULT_FEATURE_FLAGS = { 'FOO': True, 'BAR': False }` in `superset/config.py` and `FEATURE_FLAGS = { 'BAR': True, 'BAZ': True }` in `superset_config.py` will result
in combined feature flags of `{ 'FOO': True, 'BAR': True, 'BAZ': True }`.
The current status of the usability of each flag (stable vs testing, etc) can be found in the [Feature Flags](/user-docs/6.0.0/configuration/configuring-superset#feature-flags) documentation.
The current status of the usability of each flag (stable vs testing, etc) can be found in the [Feature Flags](/docs/6.0.0/configuration/feature-flags) documentation.
## Git Hooks
@@ -614,7 +614,7 @@ A series of checks will now run when you make a git commit.
## Linting
See [how tos](/user-docs/6.0.0/contributing/howtos#linting)
See [how tos](/docs/6.0.0/contributing/howtos#linting)
## GitHub Actions and `act`

View File

@@ -57,7 +57,7 @@ Finally, never submit a PR that will put master branch in broken state. If the P
in `requirements.txt` pinned to a specific version which ensures that the application
build is deterministic.
- For TypeScript/JavaScript, include new libraries in `package.json`
- **Tests:** The pull request should include tests, either as doctests, unit tests, or both. Make sure to resolve all errors and test failures. See [Testing](/user-docs/6.0.0/contributing/howtos#testing) for how to run tests.
- **Tests:** The pull request should include tests, either as doctests, unit tests, or both. Make sure to resolve all errors and test failures. See [Testing](/docs/6.0.0/contributing/howtos#testing) for how to run tests.
- **Documentation:** If the pull request adds functionality, the docs should be updated as part of the same PR.
- **CI:** Reviewers will not review the code until all CI tests are passed. Sometimes there can be flaky tests. You can close and open PR to re-run CI test. Please report if the issue persists. After the CI fix has been deployed to `master`, please rebase your PR.
- **Code coverage:** Please ensure that code coverage does not decrease.

View File

@@ -51,11 +51,11 @@ multiple tables as long as your database account has access to the tables.
## How do I create my own visualization?
We recommend reading the instructions in
[Creating Visualization Plugins](/user-docs/6.0.0/contributing/howtos#creating-visualization-plugins).
[Creating Visualization Plugins](/docs/6.0.0/contributing/howtos#creating-visualization-plugins).
## Can I upload and visualize CSV data?
Absolutely! Read the instructions [here](/user-docs/using-superset/exploring-data) to learn
Absolutely! Read the instructions [here](/docs/using-superset/exploring-data) to learn
how to enable and use CSV upload.
## Why are my queries timing out?
@@ -142,7 +142,7 @@ SQLALCHEMY_DATABASE_URI = 'sqlite:////new/location/superset.db?check_same_thread
```
You can read more about customizing Superset using the configuration file
[here](/user-docs/6.0.0/configuration/configuring-superset).
[here](/docs/6.0.0/configuration/configuring-superset).
## What if the table schema changed?
@@ -157,7 +157,7 @@ table afterwards to configure the Columns tab, check the appropriate boxes and s
To clarify, the database backend is an OLTP database used by Superset to store its internal
information like your list of users and dashboard definitions. While Superset supports a
[variety of databases as data _sources_](/user-docs/6.0.0/configuration/databases#installing-database-drivers),
[variety of databases as data _sources_](/docs/6.0.0/configuration/databases#installing-database-drivers),
only a few database engines are supported for use as the OLTP backend / metadata store.
Superset is tested using MySQL, PostgreSQL, and SQLite backends. Its recommended you install
@@ -190,7 +190,7 @@ second etc). Example:
## Does Superset work with [insert database engine here]?
The [Connecting to Databases section](/user-docs/6.0.0/configuration/databases) provides the best
The [Connecting to Databases section](/docs/6.0.0/configuration/databases) provides the best
overview for supported databases. Database engines not listed on that page may work too. We rely on
the community to contribute to this knowledge base.
@@ -226,7 +226,7 @@ are typical in basic SQL:
## Does Superset offer a public API?
Yes, a public REST API, and the surface of that API formal is expanding steadily. You can read more about this API and
interact with it using Swagger [here](/developer-docs/api).
interact with it using Swagger [here](/docs/api).
Some of the
original vision for the collection of endpoints under **/api/v1** was originally specified in
@@ -266,7 +266,7 @@ Superset uses [Scarf](https://about.scarf.sh/) by default to collect basic telem
We use the [Scarf Gateway](https://docs.scarf.sh/gateway/) to sit in front of container registries, the [scarf-js](https://about.scarf.sh/package-sdks) package to track `npm` installations, and a Scarf pixel to gather anonymous analytics on Superset page views.
Scarf purges PII and provides aggregated statistics. Superset users can easily opt out of analytics in various ways documented [here](https://docs.scarf.sh/gateway/#do-not-track) and [here](https://docs.scarf.sh/package-analytics/#as-a-user-of-a-package-using-scarf-js-how-can-i-opt-out-of-analytics).
Superset maintainers can also opt out of telemetry data collection by setting the `SCARF_ANALYTICS` environment variable to `false` in the Superset container (or anywhere Superset/webpack are run).
Additional opt-out instructions for Docker users are available on the [Docker Installation](/user-docs/6.0.0/installation/docker-compose) page.
Additional opt-out instructions for Docker users are available on the [Docker Installation](/docs/6.0.0/installation/docker-compose) page.
## Does Superset have an archive panel or trash bin from which a user can recover deleted assets?

View File

@@ -24,10 +24,10 @@ A Superset installation is made up of these components:
The optional components above are necessary to enable these features:
- [Alerts and Reports](/user-docs/6.0.0/configuration/alerts-reports)
- [Caching](/user-docs/6.0.0/configuration/cache)
- [Async Queries](/user-docs/6.0.0/configuration/async-queries-celery/)
- [Dashboard Thumbnails](/user-docs/6.0.0/configuration/cache/#caching-thumbnails)
- [Alerts and Reports](/docs/6.0.0/configuration/alerts-reports)
- [Caching](/docs/6.0.0/configuration/cache)
- [Async Queries](/docs/6.0.0/configuration/async-queries-celery/)
- [Dashboard Thumbnails](/docs/6.0.0/configuration/cache/#caching-thumbnails)
If you install with Kubernetes or Docker Compose, all of these components will be created.
@@ -59,7 +59,7 @@ The caching layer serves two main functions:
- Store the results of queries to your data warehouse so that when a chart is loaded twice, it pulls from the cache the second time, speeding up the application and reducing load on your data warehouse.
- Act as a message broker for the worker, enabling the Alerts & Reports, async queries, and thumbnail caching features.
Most people use Redis for their cache, but Superset supports other options too. See the [cache docs](/user-docs/6.0.0/configuration/cache/) for more.
Most people use Redis for their cache, but Superset supports other options too. See the [cache docs](/docs/6.0.0/configuration/cache/) for more.
### Worker and Beat
@@ -67,6 +67,6 @@ This is one or more workers who execute tasks like run async queries or take sna
## Other components
Other components can be incorporated into Superset. The best place to learn about additional configurations is the [Configuration page](/user-docs/6.0.0/configuration/configuring-superset). For instance, you could set up a load balancer or reverse proxy to implement HTTPS in front of your Superset application, or specify a Mapbox URL to enable geospatial charts, etc.
Other components can be incorporated into Superset. The best place to learn about additional configurations is the [Configuration page](/docs/6.0.0/configuration/configuring-superset). For instance, you could set up a load balancer or reverse proxy to implement HTTPS in front of your Superset application, or specify a Mapbox URL to enable geospatial charts, etc.
Superset won't even start without certain configuration settings established, so it's essential to review that page.

View File

@@ -21,7 +21,7 @@ with our [installing on k8s](https://superset.apache.org/docs/installation/runni
documentation.
:::
As mentioned in our [quickstart guide](/user-docs/quickstart), the fastest way to try
As mentioned in our [quickstart guide](/docs/quickstart), the fastest way to try
Superset locally is using Docker Compose on a Linux or Mac OSX
computer. Superset does not have official support for Windows. It's also the easiest
way to launch a fully functioning **development environment** quickly.

View File

@@ -9,11 +9,11 @@ import useBaseUrl from "@docusaurus/useBaseUrl";
# Installation Methods
How should you install Superset? Here's a comparison of the different options. It will help if you've first read the [Architecture](/user-docs/6.0.0/installation/architecture) page to understand Superset's different components.
How should you install Superset? Here's a comparison of the different options. It will help if you've first read the [Architecture](/docs/6.0.0/installation/architecture page to understand Superset's different components.
The fundamental trade-off is between you needing to do more of the detail work yourself vs. using a more complex deployment route that handles those details.
## [Docker Compose](/user-docs/6.0.0/installation/docker-compose)
## [Docker Compose](/docs/6.0.0/installation/docker-compose
**Summary:** This takes advantage of containerization while remaining simpler than Kubernetes. This is the best way to try out Superset; it's also useful for developing & contributing back to Superset.
@@ -27,9 +27,9 @@ You will need to back up your metadata DB. That could mean backing up the servic
You will also need to extend the Superset docker image. The default `lean` images do not contain drivers needed to access your metadata database (Postgres or MySQL), nor to access your data warehouse, nor the headless browser needed for Alerts & Reports. You could run a `-dev` image while demoing Superset, which has some of this, but you'll still need to install the driver for your data warehouse. The `-dev` images run as root, which is not recommended for production.
Ideally you will build your own image of Superset that extends `lean`, adding what your deployment needs. See [Building your own production Docker image](/user-docs/6.0.0/installation/docker-builds/#building-your-own-production-docker-image).
Ideally you will build your own image of Superset that extends `lean`, adding what your deployment needs. See [Building your own production Docker image](/docs/6.0.0/installation/docker-builds/#building-your-own-production-docker-image).
## [Kubernetes (K8s)](/user-docs/6.0.0/installation/kubernetes)
## [Kubernetes (K8s)](/docs/6.0.0/installation/kubernetes
**Summary:** This is the best-practice way to deploy a production instance of Superset, but has the steepest skill requirement - someone who knows Kubernetes.
@@ -41,7 +41,7 @@ A K8s deployment can scale up and down based on usage and deploy rolling updates
You will need to build your own Docker image, and back up your metadata DB, both as described in Docker Compose above. You'll also need to customize your Helm chart values and deploy and maintain your Kubernetes cluster.
## [PyPI (Python)](/user-docs/6.0.0/installation/pypi)
## [PyPI (Python)](/docs/6.0.0/installation/pypi
**Summary:** This is the only method that requires no knowledge of containers. It requires the most hands-on work to deploy, connect, and maintain each component.

View File

@@ -149,7 +149,7 @@ For production clusters it's recommended to build own image with this step done
Superset requires a Python DB-API database driver and a SQLAlchemy
dialect to be installed for each datastore you want to connect to.
See [Install Database Drivers](/user-docs/6.0.0/configuration/databases) for more information.
See [Install Database Drivers](/docs/6.0.0/configuration/databases) for more information.
It is recommended that you refer to versions listed in
[pyproject.toml](https://github.com/apache/superset/blob/master/pyproject.toml)
instead of hard-coding them in your bootstrap script, as seen below.
@@ -310,7 +310,7 @@ configOverrides:
### Enable Alerts and Reports
For this, as per the [Alerts and Reports doc](/user-docs/6.0.0/configuration/alerts-reports), you will need to:
For this, as per the [Alerts and Reports doc](/docs/6.0.0/configuration/alerts-reports), you will need to:
#### Install a supported webdriver in the Celery worker

View File

@@ -172,7 +172,7 @@ how to set up a development environment.
## Resources
- [Superset "In the Wild"](https://github.com/apache/superset/blob/master/RESOURCES/INTHEWILD.md) - open a PR to add your org to the list!
- [Feature Flags](/user-docs/6.0.0/configuration/configuring-superset#feature-flags) - the status of Superset's Feature Flags.
- [Feature Flags](/docs/6.0.0/configuration/feature-flags) - the status of Superset's Feature Flags.
- [Standard Roles](https://github.com/apache/superset/blob/master/RESOURCES/STANDARD_ROLES.md) - How RBAC permissions map to roles.
- [Superset Wiki](https://github.com/apache/superset/wiki) - Tons of additional community resources: best practices, community content and other information.
- [Superset SIPs](https://github.com/orgs/apache/projects/170) - The status of Superset's SIPs (Superset Improvement Proposals) for both consensus and implementation status.

View File

@@ -15,7 +15,7 @@ Although we recommend using `Docker Compose` for a quick start in a sandbox-type
environment and for other development-type use cases, **we
do not recommend this setup for production**. For this purpose please
refer to our
[Installing on Kubernetes](/user-docs/6.0.0/installation/kubernetes/)
[Installing on Kubernetes](/docs/6.0.0/installation/kubernetes/)
page.
:::
@@ -73,10 +73,10 @@ processes by running Docker Compose `stop` command. By doing so, you can avoid d
From this point on, you can head on to:
- [Create your first Dashboard](/user-docs/6.0.0/using-superset/creating-your-first-dashboard)
- [Connect to a Database](/user-docs/6.0.0/configuration/databases)
- [Using Docker Compose](/user-docs/6.0.0/installation/docker-compose)
- [Configure Superset](/user-docs/6.0.0/configuration/configuring-superset/)
- [Installing on Kubernetes](/user-docs/6.0.0/installation/kubernetes/)
- [Create your first Dashboard](/docs/6.0.0/using-superset/creating-your-first-dashboard)
- [Connect to a Database](/docs/6.0.0/configuration/databases)
- [Using Docker Compose](/docs/6.0.0/installation/docker-compose)
- [Configure Superset](/docs/6.0.0/configuration/configuring-superset/)
- [Installing on Kubernetes](/docs/6.0.0/installation/kubernetes/)
Or just explore our [Documentation](https://superset.apache.org/docs/intro)!

View File

@@ -31,7 +31,7 @@ your existing SQL-speaking database or data store.
First things first, we need to add the connection credentials to your database to be able
to query and visualize data from it. If you're using Superset locally via
[Docker compose](/user-docs/6.0.0/installation/docker-compose), you can
[Docker compose](/docs/6.0.0/installation/docker-compose), you can
skip this step because a Postgres database, named **examples**, is included and
pre-configured in Superset for you.
@@ -188,7 +188,7 @@ Access to dashboards is managed via owners (users that have edit permissions to
Non-owner users access can be managed in two different ways. The dashboard needs to be published to be visible to other users.
1. Dataset permissions - if you add to the relevant role permissions to datasets it automatically grants implicit access to all dashboards that uses those permitted datasets.
2. Dashboard roles - if you enable [**DASHBOARD_RBAC** feature flag](/user-docs/6.0.0/configuration/configuring-superset#feature-flags) then you will be able to manage which roles can access the dashboard
2. Dashboard roles - if you enable [**DASHBOARD_RBAC** feature flag](/docs/6.0.0/configuration/configuring-superset#feature-flags) then you will be able to manage which roles can access the dashboard
- Granting a role access to a dashboard will bypass dataset level checks. Having dashboard access implicitly grants read access to all the featured charts in the dashboard, and thereby also all the associated datasets.
- If no roles are specified for a dashboard, regular **Dataset permissions** will apply.

File diff suppressed because it is too large Load Diff

View File

@@ -29,7 +29,7 @@ maintainers:
- name: craig-rueda
email: craig@craigrueda.com
url: https://github.com/craig-rueda
version: 0.15.5 # See [README](https://github.com/apache/superset/blob/master/helm/superset/README.md#versioning) for version details.
version: 0.15.4 # See [README](https://github.com/apache/superset/blob/master/helm/superset/README.md#versioning) for version details.
dependencies:
- name: postgresql
version: 16.7.27

View File

@@ -23,7 +23,7 @@ NOTE: This file is generated by helm-docs: https://github.com/norwoodj/helm-docs
# superset
![Version: 0.15.5](https://img.shields.io/badge/Version-0.15.5-informational?style=flat-square)
![Version: 0.15.4](https://img.shields.io/badge/Version-0.15.4-informational?style=flat-square)
Apache Superset is a modern, enterprise-ready business intelligence web application

View File

@@ -844,8 +844,6 @@ postgresql:
database: superset
image:
registry: docker.io
repository: bitnamilegacy/postgresql
tag: "14.17.0-debian-12-r3"
## PostgreSQL Primary parameters
@@ -920,11 +918,6 @@ redis:
accessModes:
- ReadWriteOnce
image:
registry: docker.io
repository: bitnamilegacy/redis
tag: 7.0.10-debian-11-r4
nodeSelector: {}
tolerations: []

View File

@@ -71,7 +71,7 @@ dependencies = [
"marshmallow>=3.0, <4",
"marshmallow-union>=0.1",
"msgpack>=1.0.0, <1.2",
"nh3>=0.2.11, <0.4",
"nh3>=0.2.11, <0.3",
"numpy>1.23.5, <2.3",
"packaging",
# --------------------------
@@ -95,7 +95,7 @@ dependencies = [
"redis>=5.0.0, <6.0",
"rison>=2.0.0, <3.0",
"selenium>=4.14.0, <5.0",
"shillelagh[gsheetsapi]>=1.4.4, <2.0",
"shillelagh[gsheetsapi]>=1.4.3, <2.0",
"sshtunnel>=0.4.0, <0.5",
"simplejson>=3.15.0",
"slack_sdk>=3.19.0, <4",
@@ -114,7 +114,7 @@ dependencies = [
[project.optional-dependencies]
athena = ["pyathena[pandas]>=2, <4"]
athena = ["pyathena[pandas]>=2, <3"]
aurora-data-api = ["preset-sqlalchemy-aurora-data-api>=0.2.8,<0.3"]
bigquery = [
"pandas-gbq>=0.19.1",
@@ -131,31 +131,25 @@ d1 = [
]
databend = ["databend-sqlalchemy>=0.3.2, <1.0"]
databricks = [
"databricks-sql-connector==4.2.6",
"databricks-sql-connector==4.1.2",
"databricks-sqlalchemy==1.0.5",
]
db2 = ["ibm-db-sa>0.3.8, <=0.4.4"]
denodo = ["denodo-sqlalchemy>=1.0.6,<2.1.0"]
db2 = ["ibm-db-sa>0.3.8, <=0.4.0"]
denodo = ["denodo-sqlalchemy~=1.0.6"]
dremio = ["sqlalchemy-dremio>=1.2.1, <4"]
drill = ["sqlalchemy-drill>=1.1.4, <2"]
druid = ["pydruid>=0.6.5,<0.7"]
duckdb = ["duckdb>=1.4.2,<2", "duckdb-engine>=0.17.0"]
dynamodb = ["pydynamodb>=0.4.2"]
solr = ["sqlalchemy-solr >= 0.2.0"]
elasticsearch = ["elasticsearch-dbapi>=0.2.13, <0.3.0"]
elasticsearch = ["elasticsearch-dbapi>=0.2.12, <0.3.0"]
exasol = ["sqlalchemy-exasol >= 2.4.0, <3.0"]
excel = ["xlrd>=1.2.0, <1.3"]
fastmcp = [
"fastmcp>=3.2.4,<4.0",
# tiktoken backs the response-size-guard token estimator. Without
# it, the middleware falls back to a coarser character-based
# heuristic that under-counts JSON-heavy MCP responses.
"tiktoken>=0.7.0,<1.0",
]
firebird = ["sqlalchemy-firebird>=0.7.0, <2.2"]
fastmcp = ["fastmcp>=3.2.4,<4.0"]
firebird = ["sqlalchemy-firebird>=0.7.0, <0.8"]
firebolt = ["firebolt-sqlalchemy>=1.0.0, <2"]
gevent = ["gevent>=23.9.1"]
gsheets = ["shillelagh[gsheetsapi]>=1.4.4, <2"]
gsheets = ["shillelagh[gsheetsapi]>=1.4.3, <2"]
hana = ["hdbcli==2.4.162", "sqlalchemy_hana==0.4.0"]
hive = [
"pyhive[hive]>=0.6.5;python_version<'3.11'",
@@ -164,7 +158,7 @@ hive = [
"thrift>=0.14.1, <1.0.0",
"thrift_sasl>=0.4.3, < 1.0.0",
]
impala = ["impyla>0.16.2, <0.23"]
impala = ["impyla>0.16.2, <0.17"]
kusto = ["sqlalchemy-kusto>=3.0.0, <4"]
kylin = ["kylinpy>=2.8.1, <2.9"]
mssql = ["pymssql>=2.2.8, <3"]
@@ -177,9 +171,9 @@ ocient = [
"shapely",
"geojson",
]
oracle = ["cx-Oracle>8.0.0, <8.4"]
oracle = ["cx-Oracle>8.0.0, <8.1"]
parseable = ["sqlalchemy-parseable>=0.1.3,<0.2.0"]
pinot = ["pinotdb>=5.0.0, <10.0.0"]
pinot = ["pinotdb>=5.0.0, <6.0.0"]
playwright = ["playwright>=1.37.0, <2"]
postgres = ["psycopg2-binary==2.9.12"]
presto = ["pyhive[presto]>=0.6.5"]
@@ -187,7 +181,7 @@ trino = ["trino>=0.328.0"]
prophet = ["prophet>=1.1.6, <2"]
redshift = ["sqlalchemy-redshift>=0.8.1, <0.9"]
risingwave = ["sqlalchemy-risingwave"]
shillelagh = ["shillelagh[all]>=1.4.4, <2"]
shillelagh = ["shillelagh[all]>=1.4.3, <2"]
singlestore = ["sqlalchemy-singlestoredb>=1.1.1, <2"]
snowflake = ["snowflake-sqlalchemy>=1.2.4, <2"]
sqlite = ["syntaqlite>=0.1.0"]
@@ -203,7 +197,7 @@ tdengine = [
]
teradata = ["teradatasql>=16.20.0.23"]
thumbnails = [] # deprecated, will be removed in 7.0
vertica = ["sqlalchemy-vertica-python>= 0.5.9, < 0.7"]
vertica = ["sqlalchemy-vertica-python>=0.5.9, < 0.6"]
netezza = ["nzalchemy>=11.0.2"]
starrocks = ["starrocks>=1.0.0"]
doris = ["pydoris>=1.0.0, <2.0.0"]
@@ -224,7 +218,7 @@ development = [
"progress>=1.5,<2",
"psutil",
"pyfakefs",
"pyinstrument>=4.0.2,<6",
"pyinstrument>=4.0.2,<5",
"pylint",
"pytest<8.0.0", # hairy issue with pytest >=8 where current_app proxies are not set in time
"pytest-asyncio",
@@ -294,7 +288,6 @@ module = [
"superset.tags.filters",
"superset.commands.security.update",
"superset.commands.security.create",
"superset.semantic_layers.api",
]
warn_unused_ignores = false
@@ -383,7 +376,6 @@ dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
[tool.ruff.lint.per-file-ignores]
"superset/mcp_service/app.py" = ["S608", "E501"] # LLM instruction text: SQL examples (S608) and long lines in multiline string (E501)
"superset/mcp_service/*/tool/list_*.py" = ["E501"] # LLM docstring examples show full request shapes which exceed line length
"scripts/*" = ["TID251"]
"setup.py" = ["TID251"]
"superset/config.py" = ["TID251"]

View File

@@ -183,9 +183,7 @@ idna==3.10
# trio
# url-normalize
isodate==0.7.2
# via
# apache-superset (pyproject.toml)
# apache-superset-core
# via apache-superset (pyproject.toml)
itsdangerous==2.2.0
# via
# flask
@@ -298,7 +296,6 @@ pyarrow==20.0.0
# via
# -r requirements/base.in
# apache-superset (pyproject.toml)
# apache-superset-core
pyasn1==0.6.3
# via
# pyasn1-modules
@@ -384,7 +381,7 @@ selenium==4.32.0
# via apache-superset (pyproject.toml)
setuptools==80.9.0
# via -r requirements/base.in
shillelagh==1.4.4
shillelagh==1.4.3
# via apache-superset (pyproject.toml)
simplejson==3.20.1
# via apache-superset (pyproject.toml)

View File

@@ -442,7 +442,6 @@ isodate==0.7.2
# via
# -c requirements/base-constraint.txt
# apache-superset
# apache-superset-core
isort==6.0.1
# via pylint
itsdangerous==2.2.0
@@ -708,7 +707,7 @@ protobuf==4.25.8
# proto-plus
psutil==6.1.0
# via apache-superset
psycopg2-binary==2.9.12
psycopg2-binary==2.9.9
# via apache-superset
py-key-value-aio==0.4.4
# via fastmcp
@@ -716,7 +715,6 @@ pyarrow==20.0.0
# via
# -c requirements/base-constraint.txt
# apache-superset
# apache-superset-core
# db-dtypes
# pandas-gbq
pyasn1==0.6.3
@@ -868,8 +866,6 @@ referencing==0.36.2
# jsonschema
# jsonschema-path
# jsonschema-specifications
regex==2026.4.4
# via tiktoken
requests==2.33.0
# via
# -c requirements/base-constraint.txt
@@ -882,7 +878,6 @@ requests==2.33.0
# requests-cache
# requests-oauthlib
# shillelagh
# tiktoken
# trino
requests-cache==1.2.1
# via
@@ -936,7 +931,7 @@ setuptools==80.9.0
# pydata-google-auth
# zope-event
# zope-interface
shillelagh==1.4.4
shillelagh==1.4.3
# via
# -c requirements/base-constraint.txt
# apache-superset
@@ -1008,8 +1003,6 @@ tabulate==0.9.0
# via
# -c requirements/base-constraint.txt
# apache-superset
tiktoken==0.12.0
# via apache-superset
tomli-w==1.2.0
# via apache-superset-extensions-cli
tomlkit==0.13.3

View File

@@ -18,7 +18,7 @@
[project]
name = "apache-superset-core"
version = "0.1.0"
version = "0.1.0rc2"
description = "Core Python package for building Apache Superset backend extensions and integrations"
readme = "README.md"
authors = [
@@ -43,8 +43,6 @@ classifiers = [
]
dependencies = [
"flask-appbuilder>=5.0.2,<6",
"isodate>=0.7.0",
"pyarrow>=16.0.0",
"pydantic>=2.8.0",
"sqlalchemy>=1.4.0,<2.0",
"sqlalchemy-utils>=0.38.0, <0.43", # expanding lowerbound to work with pydoris

View File

@@ -1,73 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import Any
from pydantic import BaseModel
def build_configuration_schema(
config_class: type[BaseModel],
configuration: BaseModel | None = None,
) -> dict[str, Any]:
"""
Build a JSON schema from a Pydantic configuration class.
Handles generic boilerplate that any semantic layer with dynamic fields needs:
- Reorders properties to match model field order (Pydantic sorts alphabetically)
- When ``configuration`` is None, sets ``enum: []`` on all ``x-dynamic`` properties
so the frontend renders them as empty dropdowns
Semantic layer implementations call this instead of
``model_json_schema()`` directly,
then only need to add their own dynamic population logic.
"""
schema = config_class.model_json_schema()
# Pydantic sorts properties alphabetically; restore model field order
field_order = [
field.alias or name for name, field in config_class.model_fields.items()
]
schema["properties"] = {
key: schema["properties"][key]
for key in field_order
if key in schema["properties"]
}
if configuration is None:
for prop_schema in schema["properties"].values():
if prop_schema.get("x-dynamic"):
prop_schema["enum"] = []
return schema
def check_dependencies(
prop_schema: dict[str, Any],
configuration: BaseModel,
) -> bool:
"""
Check whether a dynamic property's dependencies are satisfied.
Reads the ``x-dependsOn`` list from the property schema and returns ``True``
when every referenced attribute on ``configuration`` is truthy.
"""
dependencies = prop_schema.get("x-dependsOn", [])
return all(getattr(configuration, dep, None) for dep in dependencies)

Some files were not shown because too many files have changed in this diff Show More