Compare commits
60 Commits
docker-up
...
extensions
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4ae0bc9ade | ||
|
|
87bbd54d0a | ||
|
|
b630830841 | ||
|
|
9fabd7f997 | ||
|
|
fadab21493 | ||
|
|
cc972cad5a | ||
|
|
de6ac2a444 | ||
|
|
2b647d2352 | ||
|
|
7888da9e30 | ||
|
|
b576665f9a | ||
|
|
7f4c260cbe | ||
|
|
febc5d54d5 | ||
|
|
aa37e96a02 | ||
|
|
3fa5bb4138 | ||
|
|
0289028313 | ||
|
|
5e7fe81cfa | ||
|
|
02495a130f | ||
|
|
d4723ef116 | ||
|
|
c3d5edbae9 | ||
|
|
bb3452b43c | ||
|
|
996e0e1e7a | ||
|
|
daec330127 | ||
|
|
d2907b2577 | ||
|
|
17d6f4ebc4 | ||
|
|
dee063a4c5 | ||
|
|
ec36791551 | ||
|
|
0fedfe03d5 | ||
|
|
23fec55e3d | ||
|
|
212559dab2 | ||
|
|
c564655f39 | ||
|
|
dc15feb83d | ||
|
|
95169807d3 | ||
|
|
10ed60b4c1 | ||
|
|
a33f96b2fc | ||
|
|
39d5511b29 | ||
|
|
b460ca94c6 | ||
|
|
2c1a33fd32 | ||
|
|
13013bbd64 | ||
|
|
a1d24f1e4a | ||
|
|
3fa7dba094 | ||
|
|
801c84f0ef | ||
|
|
238bebebec | ||
|
|
281c0c9672 | ||
|
|
807ff513ef | ||
|
|
445bc403b8 | ||
|
|
2267b78a10 | ||
|
|
d0e80d2079 | ||
|
|
25647942fd | ||
|
|
3fba967856 | ||
|
|
e1fa374517 | ||
|
|
50d0508a92 | ||
|
|
2187fb4ab4 | ||
|
|
fe16c828cf | ||
|
|
6e1718910f | ||
|
|
2d20079a88 | ||
|
|
1f19ef92cb | ||
|
|
f4597be341 | ||
|
|
4393db57d9 | ||
|
|
409cdad264 | ||
|
|
c0cbbe393a |
15
.claude/settings.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"hooks": {
|
||||
"PreToolUse": [
|
||||
{
|
||||
"matcher": "Bash",
|
||||
"hooks": [
|
||||
{
|
||||
"type": "command",
|
||||
"command": "jq -r '.tool_input.command // \"\"' | grep -qE '^git commit' && cd \"$CLAUDE_PROJECT_DIR\" && echo '🔍 Running pre-commit before commit...' && pre-commit run || true"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
3
.github/dependabot.yml
vendored
@@ -12,6 +12,9 @@ updates:
|
||||
# not until React >= 18.0.0
|
||||
- dependency-name: "storybook"
|
||||
- dependency-name: "@storybook*"
|
||||
# remark-gfm v4+ requires react-markdown v9+, which needs React 18
|
||||
- dependency-name: "remark-gfm"
|
||||
- dependency-name: "react-markdown"
|
||||
# JSDOM v30 doesn't play well with Jest v30
|
||||
# Source: https://jestjs.io/blog#known-issues
|
||||
# GH thread: https://github.com/jsdom/jsdom/issues/3492
|
||||
|
||||
@@ -69,7 +69,7 @@ jobs:
|
||||
`❗ @${pull.user.login} Your base branch \`${currentBranch}\` has ` +
|
||||
'also updated `superset/migrations`.\n' +
|
||||
'\n' +
|
||||
'**Please consider rebasing your branch and [resolving potential db migration conflicts](https://github.com/apache/superset/blob/master/CONTRIBUTING.md#merging-db-migrations).**',
|
||||
'**Please consider rebasing your branch and [resolving potential db migration conflicts](https://superset.apache.org/docs/contributing/development#merging-db-migrations).**',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
41
.github/workflows/superset-docs-deploy.yml
vendored
@@ -1,6 +1,13 @@
|
||||
name: Docs Deployment
|
||||
|
||||
on:
|
||||
# Deploy after integration tests complete on master
|
||||
workflow_run:
|
||||
workflows: ["Python-Integration"]
|
||||
types: [completed]
|
||||
branches: [master]
|
||||
|
||||
# Also allow manual trigger and direct pushes to docs
|
||||
push:
|
||||
paths:
|
||||
- "docs/**"
|
||||
@@ -30,9 +37,10 @@ jobs:
|
||||
name: Build & Deploy
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
- name: "Checkout ${{ github.event.workflow_run.head_sha || github.sha }}"
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: ${{ github.event.workflow_run.head_sha || github.sha }}
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Set up Node.js
|
||||
@@ -58,6 +66,35 @@ jobs:
|
||||
working-directory: docs
|
||||
run: |
|
||||
yarn install --check-cache
|
||||
- name: Download database diagnostics (if triggered by integration tests)
|
||||
if: github.event_name == 'workflow_run' && github.event.workflow_run.conclusion == 'success'
|
||||
uses: dawidd6/action-download-artifact@v6
|
||||
continue-on-error: true
|
||||
with:
|
||||
workflow: superset-python-integrationtest.yml
|
||||
run_id: ${{ github.event.workflow_run.id }}
|
||||
name: database-diagnostics
|
||||
path: docs/src/data/
|
||||
- name: Try to download latest diagnostics (for push/dispatch triggers)
|
||||
if: github.event_name != 'workflow_run'
|
||||
uses: dawidd6/action-download-artifact@v6
|
||||
continue-on-error: true
|
||||
with:
|
||||
workflow: superset-python-integrationtest.yml
|
||||
name: database-diagnostics
|
||||
path: docs/src/data/
|
||||
branch: master
|
||||
search_artifacts: true
|
||||
if_no_artifact_found: warn
|
||||
- name: Use diagnostics artifact if available
|
||||
working-directory: docs
|
||||
run: |
|
||||
if [ -f "src/data/databases-diagnostics.json" ]; then
|
||||
echo "Using fresh diagnostics from integration tests"
|
||||
mv src/data/databases-diagnostics.json src/data/databases.json
|
||||
else
|
||||
echo "Using committed databases.json (no artifact found)"
|
||||
fi
|
||||
- name: yarn build
|
||||
working-directory: docs
|
||||
run: |
|
||||
@@ -71,5 +108,5 @@ jobs:
|
||||
destination-github-username: "apache"
|
||||
destination-repository-name: "superset-site"
|
||||
target-branch: "asf-site"
|
||||
commit-message: "deploying docs: ${{ github.event.head_commit.message }} (apache/superset@${{ github.sha }})"
|
||||
commit-message: "deploying docs: ${{ github.event.head_commit.message || 'triggered by integration tests' }} (apache/superset@${{ github.event.workflow_run.head_sha || github.sha }})"
|
||||
user-email: dev@superset.apache.org
|
||||
|
||||
62
.github/workflows/superset-docs-verify.yml
vendored
@@ -4,17 +4,23 @@ on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "docs/**"
|
||||
- "superset/db_engine_specs/**"
|
||||
- ".github/workflows/superset-docs-verify.yml"
|
||||
types: [synchronize, opened, reopened, ready_for_review]
|
||||
workflow_run:
|
||||
workflows: ["Python-Integration"]
|
||||
types: [completed]
|
||||
|
||||
# cancel previous workflow jobs for PRs
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.event.workflow_run.head_sha || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
linkinator:
|
||||
# See docs here: https://github.com/marketplace/actions/linkinator
|
||||
# Only run on pull_request, not workflow_run
|
||||
if: github.event_name == 'pull_request'
|
||||
name: Link Checking
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
@@ -50,8 +56,11 @@ jobs:
|
||||
https://timbr.ai/,
|
||||
https://opensource.org/license/apache-2-0,
|
||||
https://www.plaidcloud.com/
|
||||
build-deploy:
|
||||
name: Build & Deploy
|
||||
|
||||
build-on-pr:
|
||||
# Build docs when PR changes docs/** (uses committed databases.json)
|
||||
if: github.event_name == 'pull_request'
|
||||
name: Build (PR trigger)
|
||||
runs-on: ubuntu-24.04
|
||||
defaults:
|
||||
run:
|
||||
@@ -75,3 +84,50 @@ jobs:
|
||||
- name: yarn build
|
||||
run: |
|
||||
yarn build
|
||||
|
||||
build-after-tests:
|
||||
# Build docs after integration tests complete (uses fresh diagnostics)
|
||||
# Only runs if integration tests succeeded
|
||||
if: >
|
||||
github.event_name == 'workflow_run' &&
|
||||
github.event.workflow_run.conclusion == 'success'
|
||||
name: Build (after integration tests)
|
||||
runs-on: ubuntu-24.04
|
||||
defaults:
|
||||
run:
|
||||
working-directory: docs
|
||||
steps:
|
||||
- name: "Checkout PR head: ${{ github.event.workflow_run.head_sha }}"
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: ${{ github.event.workflow_run.head_sha }}
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: './docs/.nvmrc'
|
||||
- name: yarn install
|
||||
run: |
|
||||
yarn install --check-cache
|
||||
- name: Download database diagnostics from integration tests
|
||||
uses: dawidd6/action-download-artifact@v6
|
||||
with:
|
||||
workflow: superset-python-integrationtest.yml
|
||||
run_id: ${{ github.event.workflow_run.id }}
|
||||
name: database-diagnostics
|
||||
path: docs/src/data/
|
||||
- name: Use fresh diagnostics
|
||||
run: |
|
||||
if [ -f "src/data/databases-diagnostics.json" ]; then
|
||||
echo "Using fresh diagnostics from integration tests"
|
||||
mv src/data/databases-diagnostics.json src/data/databases.json
|
||||
else
|
||||
echo "Warning: No diagnostics artifact found, using committed data"
|
||||
fi
|
||||
- name: yarn typecheck
|
||||
run: |
|
||||
yarn typecheck
|
||||
- name: yarn build
|
||||
run: |
|
||||
yarn build
|
||||
|
||||
@@ -73,6 +73,36 @@ jobs:
|
||||
flags: python,mysql
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
- name: Generate database diagnostics for docs
|
||||
if: steps.check.outputs.python
|
||||
env:
|
||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||
SUPERSET__SQLALCHEMY_DATABASE_URI: |
|
||||
mysql+mysqldb://superset:superset@127.0.0.1:13306/superset?charset=utf8mb4&binary_prefix=true
|
||||
run: |
|
||||
python -c "
|
||||
import json
|
||||
from superset.app import create_app
|
||||
from superset.db_engine_specs.lib import generate_yaml_docs
|
||||
app = create_app()
|
||||
with app.app_context():
|
||||
docs = generate_yaml_docs()
|
||||
# Wrap in the expected format
|
||||
output = {
|
||||
'generated': '$(date -Iseconds)',
|
||||
'databases': docs
|
||||
}
|
||||
with open('databases-diagnostics.json', 'w') as f:
|
||||
json.dump(output, f, indent=2, default=str)
|
||||
print(f'Generated diagnostics for {len(docs)} databases')
|
||||
"
|
||||
- name: Upload database diagnostics artifact
|
||||
if: steps.check.outputs.python
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: database-diagnostics
|
||||
path: databases-diagnostics.json
|
||||
retention-days: 7
|
||||
test-postgres:
|
||||
runs-on: ubuntu-24.04
|
||||
strategy:
|
||||
|
||||
1
.gitignore
vendored
@@ -139,3 +139,4 @@ PROJECT.md
|
||||
.env.local
|
||||
oxc-custom-build/
|
||||
*.code-workspace
|
||||
*.duckdb
|
||||
|
||||
@@ -49,7 +49,7 @@ repos:
|
||||
hooks:
|
||||
- id: check-docstring-first
|
||||
- id: check-added-large-files
|
||||
exclude: ^.*\.(geojson)$|^docs/static/img/screenshots/.*|^superset-frontend/CHANGELOG\.md$
|
||||
exclude: ^.*\.(geojson)$|^docs/static/img/screenshots/.*|^superset-frontend/CHANGELOG\.md$|^superset/examples/.*/data\.parquet$
|
||||
- id: check-yaml
|
||||
exclude: ^helm/superset/templates/
|
||||
- id: debug-statements
|
||||
@@ -142,3 +142,18 @@ repos:
|
||||
else
|
||||
echo "No Python files to lint."
|
||||
fi
|
||||
- id: db-engine-spec-metadata
|
||||
name: database engine spec metadata validation
|
||||
entry: python superset/db_engine_specs/lint_metadata.py --strict
|
||||
language: system
|
||||
files: ^superset/db_engine_specs/.*\.py$
|
||||
exclude: ^superset/db_engine_specs/(base|lib|lint_metadata|__init__)\.py$
|
||||
pass_filenames: false
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: feature-flags-sync
|
||||
name: feature flags documentation sync
|
||||
entry: bash -c 'python scripts/extract_feature_flags.py > docs/static/feature-flags.json.tmp && if ! diff -q docs/static/feature-flags.json docs/static/feature-flags.json.tmp > /dev/null 2>&1; then mv docs/static/feature-flags.json.tmp docs/static/feature-flags.json && echo "Updated docs/static/feature-flags.json" && exit 1; else rm docs/static/feature-flags.json.tmp; fi'
|
||||
language: system
|
||||
files: ^superset/config\.py$
|
||||
pass_filenames: false
|
||||
|
||||
@@ -75,6 +75,9 @@ postgresql.svg
|
||||
snowflake.svg
|
||||
ydb.svg
|
||||
loading.svg
|
||||
apache-solr.svg
|
||||
azure.svg
|
||||
superset.svg
|
||||
|
||||
# docs third-party logos, i.e. docs/static/img/logos/*
|
||||
logos/*
|
||||
|
||||
21
AGENTS.md
@@ -2,6 +2,27 @@
|
||||
|
||||
Apache Superset is a data visualization platform with Flask/Python backend and React/TypeScript frontend.
|
||||
|
||||
## ⚠️ CRITICAL: Always Run Pre-commit Before Pushing
|
||||
|
||||
**ALWAYS run `pre-commit run --all-files` before pushing commits.** CI will fail if pre-commit checks don't pass. This is non-negotiable.
|
||||
|
||||
```bash
|
||||
# Stage your changes first
|
||||
git add .
|
||||
|
||||
# Run pre-commit on all files
|
||||
pre-commit run --all-files
|
||||
|
||||
# If there are auto-fixes, stage them and commit
|
||||
git add .
|
||||
git commit --amend # or new commit
|
||||
```
|
||||
|
||||
Common pre-commit failures:
|
||||
- **Formatting** - black, prettier, eslint will auto-fix
|
||||
- **Type errors** - mypy failures need manual fixes
|
||||
- **Linting** - ruff, pylint issues need manual fixes
|
||||
|
||||
## ⚠️ CRITICAL: Ongoing Refactors (What NOT to Do)
|
||||
|
||||
**These migrations are actively happening - avoid deprecated patterns:**
|
||||
|
||||
20
Dockerfile
@@ -26,9 +26,6 @@ ARG BUILDPLATFORM=${BUILDPLATFORM:-amd64}
|
||||
# Include translations in the final build
|
||||
ARG BUILD_TRANSLATIONS="false"
|
||||
|
||||
# Build arg to pre-populate examples DuckDB file
|
||||
ARG LOAD_EXAMPLES_DUCKDB="false"
|
||||
|
||||
######################################################################
|
||||
# superset-node-ci used as a base for building frontend assets and CI
|
||||
######################################################################
|
||||
@@ -146,9 +143,6 @@ RUN if [ "${BUILD_TRANSLATIONS}" = "true" ]; then \
|
||||
######################################################################
|
||||
FROM python-base AS python-common
|
||||
|
||||
# Re-declare build arg to receive it in this stage
|
||||
ARG LOAD_EXAMPLES_DUCKDB
|
||||
|
||||
ENV SUPERSET_HOME="/app/superset_home" \
|
||||
HOME="/app/superset_home" \
|
||||
SUPERSET_ENV="production" \
|
||||
@@ -202,17 +196,9 @@ RUN /app/docker/apt-install.sh \
|
||||
libecpg-dev \
|
||||
libldap2-dev
|
||||
|
||||
# Pre-load examples DuckDB file if requested
|
||||
RUN if [ "$LOAD_EXAMPLES_DUCKDB" = "true" ]; then \
|
||||
mkdir -p /app/data && \
|
||||
echo "Downloading pre-built examples.duckdb..." && \
|
||||
curl -L -o /app/data/examples.duckdb \
|
||||
"https://raw.githubusercontent.com/apache-superset/examples-data/master/examples.duckdb" && \
|
||||
chown -R superset:superset /app/data; \
|
||||
else \
|
||||
mkdir -p /app/data && \
|
||||
chown -R superset:superset /app/data; \
|
||||
fi
|
||||
# Create data directory for DuckDB examples database
|
||||
# The database file will be created at runtime when examples are loaded from Parquet files
|
||||
RUN mkdir -p /app/data && chown -R superset:superset /app/data
|
||||
|
||||
# Copy compiled things from previous stages
|
||||
COPY --from=superset-node /app/superset/static/assets superset/static/assets
|
||||
|
||||
20
INSTALL.md
@@ -16,8 +16,20 @@ KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
# INSTALL / BUILD instructions for Apache Superset
|
||||
# Installing Apache Superset
|
||||
|
||||
At this time, the docker file at RELEASING/Dockerfile.from_local_tarball
|
||||
constitutes the recipe on how to get to a working release from a source
|
||||
release tarball.
|
||||
For comprehensive installation instructions, please see the Apache Superset documentation:
|
||||
|
||||
**[📚 Installation Guide →](https://superset.apache.org/docs/installation/installation-methods)**
|
||||
|
||||
The documentation covers:
|
||||
- [Docker Compose](https://superset.apache.org/docs/installation/docker-compose) (recommended for development)
|
||||
- [Kubernetes / Helm](https://superset.apache.org/docs/installation/kubernetes)
|
||||
- [PyPI](https://superset.apache.org/docs/installation/pypi)
|
||||
- [Docker Builds](https://superset.apache.org/docs/installation/docker-builds)
|
||||
- [Architecture Overview](https://superset.apache.org/docs/installation/architecture)
|
||||
|
||||
## Building from Source
|
||||
|
||||
For building from a source release tarball, see the Dockerfile at:
|
||||
`RELEASING/Dockerfile.from_local_tarball`
|
||||
|
||||
@@ -1,121 +0,0 @@
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Superset Frontend Linting Architecture
|
||||
|
||||
## Overview
|
||||
We use a hybrid linting approach combining OXC (fast, standard rules) with custom AST-based checks for Superset-specific patterns.
|
||||
|
||||
## Components
|
||||
|
||||
### 1. Primary Linter: OXC
|
||||
- **What**: Oxidation Compiler's linter (oxlint)
|
||||
- **Handles**: 95% of linting rules (standard ESLint rules, TypeScript, React, etc.)
|
||||
- **Speed**: ~50-100x faster than ESLint
|
||||
- **Config**: `oxlint.json`
|
||||
|
||||
### 2. Custom Rule Checker
|
||||
- **What**: Node.js AST-based script
|
||||
- **Handles**: Superset-specific rules:
|
||||
- No literal colors (use theme)
|
||||
- No FontAwesome icons (use Icons component)
|
||||
- No template vars in i18n
|
||||
- **Speed**: Fast enough for pre-commit
|
||||
- **Script**: `scripts/check-custom-rules.js`
|
||||
|
||||
## Developer Workflow
|
||||
|
||||
### Local Development
|
||||
```bash
|
||||
# Fast linting (OXC only)
|
||||
npm run lint
|
||||
|
||||
# Full linting (OXC + custom rules)
|
||||
npm run lint:full
|
||||
|
||||
# Auto-fix what's possible
|
||||
npm run lint-fix
|
||||
```
|
||||
|
||||
### Pre-commit
|
||||
1. OXC runs first (via `scripts/oxlint.sh`)
|
||||
2. Custom rules check runs second (lightweight, AST-based)
|
||||
3. Both must pass for commit to succeed
|
||||
|
||||
### CI Pipeline
|
||||
```yaml
|
||||
- name: Lint with OXC
|
||||
run: npm run lint
|
||||
|
||||
- name: Check custom rules
|
||||
run: npm run check:custom-rules
|
||||
```
|
||||
|
||||
## Why This Architecture?
|
||||
|
||||
### ✅ Pros
|
||||
1. **No binary distribution issues** - ASF compatible
|
||||
2. **Fast performance** - OXC for bulk, lightweight script for custom
|
||||
3. **Maintainable** - Custom rules in JavaScript, not Rust
|
||||
4. **Flexible** - Can evolve as OXC adds plugin support
|
||||
5. **Cacheable** - Both OXC and Node.js are standard tools
|
||||
|
||||
### ❌ Cons
|
||||
1. **Two tools** - Slightly more complex than single linter
|
||||
2. **Duplicate parsing** - Files parsed twice (once by each tool)
|
||||
|
||||
### 🔄 Migration Path
|
||||
When OXC supports JavaScript plugins:
|
||||
1. Convert `check-custom-rules.js` to OXC plugin format
|
||||
2. Consolidate back to single tool
|
||||
3. Keep same rules and developer experience
|
||||
|
||||
## Implementation Checklist
|
||||
|
||||
- [x] OXC for standard linting
|
||||
- [x] Pre-commit integration
|
||||
- [ ] Custom rules script
|
||||
- [ ] Combine in npm scripts
|
||||
- [ ] Update CI pipeline
|
||||
- [ ] Developer documentation
|
||||
|
||||
## Performance Targets
|
||||
|
||||
| Operation | Target Time | Current |
|
||||
|-----------|------------|---------|
|
||||
| Pre-commit (changed files) | <2s | ✅ 1.5s |
|
||||
| Full lint (all files) | <10s | ✅ 8s |
|
||||
| Custom rules check | <5s | 🔄 TBD |
|
||||
|
||||
## Caching Strategy
|
||||
|
||||
### Local Development
|
||||
- OXC: Built-in incremental checking
|
||||
- Custom rules: Use file hash cache (similar to pytest cache)
|
||||
|
||||
### CI
|
||||
- Cache `node_modules` (includes oxlint binary)
|
||||
- Cache custom rules results by commit hash
|
||||
- Skip unchanged files using git diff
|
||||
|
||||
## Future Improvements
|
||||
|
||||
1. **When OXC adds plugin support**: Migrate custom rules to OXC plugins
|
||||
2. **Consider Biome**: Another Rust-based linter with plugin support
|
||||
3. **AST sharing**: Investigate sharing AST between tools to avoid double parsing
|
||||
85
README.md
@@ -101,51 +101,54 @@ Superset provides:
|
||||
|
||||
## Supported Databases
|
||||
|
||||
Superset can query data from any SQL-speaking datastore or data engine (Presto, Trino, Athena, [and more](https://superset.apache.org/docs/configuration/databases)) that has a Python DB-API driver and a SQLAlchemy dialect.
|
||||
Superset can query data from any SQL-speaking datastore or data engine (Presto, Trino, Athena, [and more](https://superset.apache.org/docs/databases)) that has a Python DB-API driver and a SQLAlchemy dialect.
|
||||
|
||||
Here are some of the major database solutions that are supported:
|
||||
|
||||
<!-- SUPPORTED_DATABASES_START -->
|
||||
<p align="center">
|
||||
<img src="https://superset.apache.org/img/databases/redshift.png" alt="redshift" border="0" width="200"/>
|
||||
<img src="https://superset.apache.org/img/databases/google-biquery.png" alt="google-bigquery" border="0" width="200"/>
|
||||
<img src="https://superset.apache.org/img/databases/snowflake.png" alt="snowflake" border="0" width="200"/>
|
||||
<img src="https://superset.apache.org/img/databases/trino.png" alt="trino" border="0" width="150" />
|
||||
<img src="https://superset.apache.org/img/databases/presto.png" alt="presto" border="0" width="200"/>
|
||||
<img src="https://superset.apache.org/img/databases/databricks.png" alt="databricks" border="0" width="160" />
|
||||
<img src="https://superset.apache.org/img/databases/druid.png" alt="druid" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/firebolt.png" alt="firebolt" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/timescale.png" alt="timescale" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/postgresql.png" alt="postgresql" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/mysql.png" alt="mysql" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/mssql-server.png" alt="mssql-server" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/ibm-db2.svg" alt="db2" border="0" width="220" />
|
||||
<img src="https://superset.apache.org/img/databases/sqlite.png" alt="sqlite" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/sybase.png" alt="sybase" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/mariadb.png" alt="mariadb" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/vertica.png" alt="vertica" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/oracle.png" alt="oracle" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/firebird.png" alt="firebird" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/greenplum.png" alt="greenplum" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/clickhouse.png" alt="clickhouse" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/exasol.png" alt="exasol" border="0" width="160" />
|
||||
<img src="https://superset.apache.org/img/databases/monet-db.png" alt="monet-db" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/apache-kylin.png" alt="apache-kylin" border="0" width="80"/>
|
||||
<img src="https://superset.apache.org/img/databases/hologres.png" alt="hologres" border="0" width="80"/>
|
||||
<img src="https://superset.apache.org/img/databases/netezza.png" alt="netezza" border="0" width="80"/>
|
||||
<img src="https://superset.apache.org/img/databases/pinot.png" alt="pinot" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/teradata.png" alt="teradata" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/yugabyte.png" alt="yugabyte" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/databend.png" alt="databend" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/starrocks.png" alt="starrocks" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/doris.png" alt="doris" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/oceanbase.svg" alt="oceanbase" border="0" width="220" />
|
||||
<img src="https://superset.apache.org/img/databases/sap-hana.png" alt="sap-hana" border="0" width="220" />
|
||||
<img src="https://superset.apache.org/img/databases/denodo.png" alt="denodo" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/ydb.svg" alt="ydb" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/tdengine.png" alt="TDengine" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/doris.png" alt="apache-doris" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/apache-drill.png" alt="apache-drill" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/druid.png" alt="apache-druid" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/apache-hive.svg" alt="apache-hive" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/apache-impala.png" alt="apache-impala" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/apache-kylin.png" alt="apache-kylin" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/apache-pinot.svg" alt="apache-pinot" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/amazon-athena.jpg" alt="aws-athena" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/redshift.png" alt="aws-redshift" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/clickhouse.png" alt="clickhouse" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/couchbase.svg" alt="couchbase" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/databend.png" alt="databend" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/databricks.png" alt="databricks" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/denodo.png" alt="denodo" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/dremio.png" alt="dremio" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/exasol.png" alt="exasol" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/firebird.png" alt="firebird" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/firebolt.png" alt="firebolt" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/google-big-query.svg" alt="google-bigquery" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/google-sheets.svg" alt="google-sheets" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/ibm-db2.svg" alt="ibm-db2" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/netezza.png" alt="ibm-netezza" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/mariadb.png" alt="mariadb" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/msql.png" alt="microsoft-sql-server" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/mysql.png" alt="mysql" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/oceanbase.svg" alt="oceanbase" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/oraclelogo.png" alt="oracle" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/postgresql.svg" alt="postgresql" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/presto-og.png" alt="presto" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/sap-hana.png" alt="sap-hana" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/snowflake.svg" alt="snowflake" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/sqlite.png" alt="sqlite" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/starrocks.png" alt="starrocks" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/tdengine.png" alt="tdengine" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/teradata.png" alt="teradata" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/trino.png" alt="trino" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/vertica.png" alt="vertica" border="0" width="120" height="60" class="database-logo" />
|
||||
<img src="https://superset.apache.org/img/databases/ydb.svg" alt="ydb" border="0" width="120" height="60" class="database-logo" />
|
||||
</p>
|
||||
<!-- SUPPORTED_DATABASES_END -->
|
||||
|
||||
**A more comprehensive list of supported databases** along with the configuration instructions can be found [here](https://superset.apache.org/docs/configuration/databases).
|
||||
**A more comprehensive list of supported databases** along with the configuration instructions can be found [here](https://superset.apache.org/docs/databases).
|
||||
|
||||
Want to add support for your datastore or data engine? Read more [here](https://superset.apache.org/docs/frequently-asked-questions#does-superset-work-with-insert-database-engine-here) about the technical requirements.
|
||||
|
||||
@@ -165,14 +168,14 @@ Try out Superset's [quickstart](https://superset.apache.org/docs/quickstart/) gu
|
||||
## Contributor Guide
|
||||
|
||||
Interested in contributing? Check out our
|
||||
[CONTRIBUTING.md](https://github.com/apache/superset/blob/master/CONTRIBUTING.md)
|
||||
[Developer Portal](https://superset.apache.org/developer_portal/)
|
||||
to find resources around contributing along with a detailed guide on
|
||||
how to set up a development environment.
|
||||
|
||||
## Resources
|
||||
|
||||
- [Superset "In the Wild"](https://superset.apache.org/inTheWild) - see who's using Superset, and [add your organization](https://github.com/apache/superset/edit/master/RESOURCES/INTHEWILD.yaml) to the list!
|
||||
- [Feature Flags](https://github.com/apache/superset/blob/master/RESOURCES/FEATURE_FLAGS.md) - the status of Superset's Feature Flags.
|
||||
- [Feature Flags](https://superset.apache.org/docs/configuration/feature-flags) - the status of Superset's Feature Flags.
|
||||
- [Standard Roles](https://github.com/apache/superset/blob/master/RESOURCES/STANDARD_ROLES.md) - How RBAC permissions map to roles.
|
||||
- [Superset Wiki](https://github.com/apache/superset/wiki) - Tons of additional community resources: best practices, community content and other information.
|
||||
- [Superset SIPs](https://github.com/orgs/apache/projects/170) - The status of Superset's SIPs (Superset Improvement Proposals) for both consensus and implementation status.
|
||||
|
||||
@@ -92,7 +92,7 @@ Some of the new features in this release are disabled by default. Each has a fea
|
||||
|
||||
| Feature | Feature Flag | Dependencies | Documentation
|
||||
| --- | --- | --- | --- |
|
||||
| Global Async Queries | `GLOBAL_ASYNC_QUERIES: True` | Redis 5.0+, celery workers configured and running | [Extra documentation](https://github.com/apache/superset/blob/master/CONTRIBUTING.md#async-chart-queries )
|
||||
| Global Async Queries | `GLOBAL_ASYNC_QUERIES: True` | Redis 5.0+, celery workers configured and running | [Extra documentation](https://superset.apache.org/docs/contributing/misc#async-chart-queries)
|
||||
| Dashboard Native Filters | `DASHBOARD_NATIVE_FILTERS: True` | |
|
||||
| Alerts & Reporting | `ALERT_REPORTS: True` | [Celery workers configured & celery beat process](https://superset.apache.org/docs/installation/async-queries-celery) |
|
||||
| Homescreen Thumbnails | `THUMBNAILS: TRUE, THUMBNAIL_CACHE_CONFIG: CacheConfig = { "CACHE_TYPE": "null", "CACHE_NO_NULL_WARNING": True}`| selenium, pillow 7, celery |
|
||||
|
||||
@@ -1,103 +0,0 @@
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Superset Feature Flags
|
||||
|
||||
This is a list of the current Superset optional features. See config.py for default values. These features can be turned on/off by setting your preferred values in superset_config.py to True/False respectively
|
||||
|
||||
## In Development
|
||||
|
||||
These features are considered **unfinished** and should only be used on development environments.
|
||||
|
||||
[//]: # "PLEASE KEEP THE LIST SORTED ALPHABETICALLY"
|
||||
|
||||
- ALERT_REPORT_TABS
|
||||
- DATE_RANGE_TIMESHIFTS_ENABLED
|
||||
- ENABLE_ADVANCED_DATA_TYPES
|
||||
- PRESTO_EXPAND_DATA
|
||||
- SHARE_QUERIES_VIA_KV_STORE
|
||||
- TAGGING_SYSTEM
|
||||
- CHART_PLUGINS_EXPERIMENTAL
|
||||
|
||||
## In Testing
|
||||
|
||||
These features are **finished** but currently being tested. They are usable, but may still contain some bugs.
|
||||
|
||||
[//]: # "PLEASE KEEP THE LIST SORTED ALPHABETICALLY"
|
||||
|
||||
- ALERT_REPORTS: [(docs)](https://superset.apache.org/docs/configuration/alerts-reports)
|
||||
- ALLOW_FULL_CSV_EXPORT
|
||||
- CACHE_IMPERSONATION
|
||||
- CONFIRM_DASHBOARD_DIFF
|
||||
- DYNAMIC_PLUGINS
|
||||
- DATE_FORMAT_IN_EMAIL_SUBJECT: [(docs)](https://superset.apache.org/docs/configuration/alerts-reports#commons)
|
||||
- ENABLE_SUPERSET_META_DB: [(docs)](https://superset.apache.org/docs/configuration/databases/#querying-across-databases)
|
||||
- ESTIMATE_QUERY_COST
|
||||
- GLOBAL_ASYNC_QUERIES [(docs)](https://github.com/apache/superset/blob/master/CONTRIBUTING.md#async-chart-queries)
|
||||
- IMPERSONATE_WITH_EMAIL_PREFIX
|
||||
- PLAYWRIGHT_REPORTS_AND_THUMBNAILS
|
||||
- RLS_IN_SQLLAB
|
||||
- SSH_TUNNELING [(docs)](https://superset.apache.org/docs/configuration/setup-ssh-tunneling)
|
||||
- USE_ANALAGOUS_COLORS
|
||||
|
||||
## Stable
|
||||
|
||||
These features flags are **safe for production**. They have been tested and will be supported for the at least the current major version cycle.
|
||||
|
||||
[//]: # "PLEASE KEEP THESE LISTS SORTED ALPHABETICALLY"
|
||||
|
||||
### Flags on the path to feature launch and flag deprecation/removal
|
||||
|
||||
- DASHBOARD_VIRTUALIZATION
|
||||
|
||||
### Flags retained for runtime configuration
|
||||
|
||||
Currently some of our feature flags act as dynamic configurations that can change
|
||||
on the fly. This acts in contradiction with the typical ephemeral feature flag use case,
|
||||
where the flag is used to mature a feature, and eventually deprecated once the feature is
|
||||
solid. Eventually we'll likely refactor these under a more formal "dynamic configurations" managed
|
||||
independently. This new framework will also allow for non-boolean configurations.
|
||||
|
||||
- ALERTS_ATTACH_REPORTS
|
||||
- ALLOW_ADHOC_SUBQUERY
|
||||
- DASHBOARD_RBAC [(docs)](https://superset.apache.org/docs/using-superset/creating-your-first-dashboard#manage-access-to-dashboards)
|
||||
- DATAPANEL_CLOSED_BY_DEFAULT
|
||||
- DRILL_BY
|
||||
- DRUID_JOINS
|
||||
- EMBEDDABLE_CHARTS
|
||||
- EMBEDDED_SUPERSET
|
||||
- ENABLE_TEMPLATE_PROCESSING
|
||||
- ESCAPE_MARKDOWN_HTML
|
||||
- LISTVIEWS_DEFAULT_CARD_VIEW
|
||||
- SCHEDULED_QUERIES [(docs)](https://superset.apache.org/docs/configuration/alerts-reports)
|
||||
- SLACK_ENABLE_AVATARS (see `superset/config.py` for more information)
|
||||
- SQLLAB_BACKEND_PERSISTENCE
|
||||
- SQL_VALIDATORS_BY_ENGINE [(docs)](https://superset.apache.org/docs/configuration/sql-templating)
|
||||
- THUMBNAILS [(docs)](https://superset.apache.org/docs/configuration/cache)
|
||||
|
||||
## Deprecated Flags
|
||||
|
||||
These features flags currently default to True and **will be removed in a future major release**. For this current release you can turn them off by setting your config to False, but it is advised to remove or set these flags in your local configuration to **True** so that you do not experience any unexpected changes in a future release.
|
||||
|
||||
[//]: # "PLEASE KEEP THE LIST SORTED ALPHABETICALLY"
|
||||
|
||||
- AVOID_COLORS_COLLISION
|
||||
- DRILL_TO_DETAIL
|
||||
- ENABLE_JAVASCRIPT_CONTROLS
|
||||
- KV_STORE
|
||||
35
UPDATING.md
@@ -24,6 +24,41 @@ assists people when migrating to a new version.
|
||||
|
||||
## Next
|
||||
|
||||
### Example Data Loading Improvements
|
||||
|
||||
#### New Directory Structure
|
||||
Examples are now organized by name with data and configs co-located:
|
||||
```
|
||||
superset/examples/
|
||||
├── _shared/ # Shared database & metadata configs
|
||||
├── birth_names/ # Each example is self-contained
|
||||
│ ├── data.parquet # Dataset (Parquet format)
|
||||
│ ├── dataset.yaml # Dataset metadata
|
||||
│ ├── dashboard.yaml # Dashboard config (optional)
|
||||
│ └── charts/ # Chart configs (optional)
|
||||
└── ...
|
||||
```
|
||||
|
||||
#### Simplified Parquet-based Loading
|
||||
- Auto-discovery: create `superset/examples/my_dataset/data.parquet` to add a new example
|
||||
- Parquet is an Apache project format: compressed (~27% smaller), self-describing schema
|
||||
- YAML configs define datasets, charts, and dashboards declaratively
|
||||
- Removed Python-based data generation from individual example files
|
||||
|
||||
#### Test Data Reorganization
|
||||
- Moved `big_data.py` to `superset/cli/test_loaders.py` - better reflects its purpose as a test utility
|
||||
- Fixed inverted logic for `--load-test-data` flag (now correctly includes .test.yaml files when flag is set)
|
||||
- Clarified CLI flags:
|
||||
- `--force` / `-f`: Force reload even if tables exist
|
||||
- `--only-metadata` / `-m`: Create table metadata without loading data
|
||||
- `--load-test-data` / `-t`: Include test dashboards and .test.yaml configs
|
||||
- `--load-big-data` / `-b`: Generate synthetic stress-test data
|
||||
|
||||
#### Bug Fixes
|
||||
- Fixed numpy array serialization for PostgreSQL (converts complex types to JSON strings)
|
||||
- Fixed KeyError for `allow_csv_upload` field in database configs (now optional with default)
|
||||
- Fixed test data loading logic that was incorrectly filtering files
|
||||
|
||||
### MCP Service
|
||||
|
||||
The MCP (Model Context Protocol) service enables AI assistants and automation tools to interact programmatically with Superset.
|
||||
|
||||
@@ -77,7 +77,6 @@ x-common-build: &common-build
|
||||
INCLUDE_CHROMIUM: ${INCLUDE_CHROMIUM:-false}
|
||||
INCLUDE_FIREFOX: ${INCLUDE_FIREFOX:-false}
|
||||
BUILD_TRANSLATIONS: ${BUILD_TRANSLATIONS:-false}
|
||||
LOAD_EXAMPLES_DUCKDB: ${LOAD_EXAMPLES_DUCKDB:-true}
|
||||
|
||||
services:
|
||||
db-light:
|
||||
@@ -116,7 +115,6 @@ services:
|
||||
DATABASE_HOST: db-light
|
||||
DATABASE_DB: superset_light
|
||||
POSTGRES_DB: superset_light
|
||||
SUPERSET__SQLALCHEMY_EXAMPLES_URI: "duckdb:////app/data/examples.duckdb"
|
||||
SUPERSET_CONFIG_PATH: /app/docker/pythonpath_dev/superset_config_docker_light.py
|
||||
GITHUB_HEAD_REF: ${GITHUB_HEAD_REF:-}
|
||||
GITHUB_SHA: ${GITHUB_SHA:-}
|
||||
@@ -139,7 +137,6 @@ services:
|
||||
DATABASE_HOST: db-light
|
||||
DATABASE_DB: superset_light
|
||||
POSTGRES_DB: superset_light
|
||||
SUPERSET__SQLALCHEMY_EXAMPLES_URI: "duckdb:////app/data/examples.duckdb"
|
||||
SUPERSET_CONFIG_PATH: /app/docker/pythonpath_dev/superset_config_docker_light.py
|
||||
healthcheck:
|
||||
disable: true
|
||||
@@ -196,7 +193,6 @@ services:
|
||||
DATABASE_DB: test
|
||||
POSTGRES_DB: test
|
||||
SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@db-light:5432/test
|
||||
SUPERSET__SQLALCHEMY_EXAMPLES_URI: "duckdb:////app/data/examples.duckdb"
|
||||
SUPERSET_CONFIG: superset_test_config_light
|
||||
PYTHONPATH: /app/pythonpath:/app/docker/pythonpath_dev:/app
|
||||
|
||||
|
||||
@@ -44,7 +44,6 @@ x-common-build: &common-build
|
||||
INCLUDE_CHROMIUM: ${INCLUDE_CHROMIUM:-false}
|
||||
INCLUDE_FIREFOX: ${INCLUDE_FIREFOX:-false}
|
||||
BUILD_TRANSLATIONS: ${BUILD_TRANSLATIONS:-false}
|
||||
LOAD_EXAMPLES_DUCKDB: ${LOAD_EXAMPLES_DUCKDB:-true}
|
||||
|
||||
services:
|
||||
nginx:
|
||||
@@ -106,8 +105,6 @@ services:
|
||||
superset-init:
|
||||
condition: service_completed_successfully
|
||||
volumes: *superset-volumes
|
||||
environment:
|
||||
SUPERSET__SQLALCHEMY_EXAMPLES_URI: "duckdb:////app/data/examples.duckdb"
|
||||
|
||||
superset-websocket:
|
||||
build: ./superset-websocket
|
||||
@@ -157,8 +154,6 @@ services:
|
||||
condition: service_started
|
||||
user: *superset-user
|
||||
volumes: *superset-volumes
|
||||
environment:
|
||||
SUPERSET__SQLALCHEMY_EXAMPLES_URI: "duckdb:////app/data/examples.duckdb"
|
||||
healthcheck:
|
||||
disable: true
|
||||
|
||||
|
||||
7
docs/.gitignore
vendored
@@ -26,3 +26,10 @@ docs/intro.md
|
||||
|
||||
# Generated badge images (downloaded at build time by remark-localize-badges plugin)
|
||||
static/badges/
|
||||
|
||||
# Generated database documentation MDX files (regenerated at build time)
|
||||
# Source of truth is in superset/db_engine_specs/*.py metadata attributes
|
||||
docs/databases/
|
||||
|
||||
# Note: src/data/databases.json is COMMITTED (not ignored) to preserve feature diagnostics
|
||||
# that require Flask context to generate. Update it locally with: npm run gen-db-docs
|
||||
|
||||
@@ -416,7 +416,7 @@ If versions don't appear in dropdown:
|
||||
|
||||
- [Docusaurus Documentation](https://docusaurus.io/docs)
|
||||
- [MDX Documentation](https://mdxjs.com/)
|
||||
- [Superset Contributing Guide](../CONTRIBUTING.md)
|
||||
- [Superset Developer Portal](https://superset.apache.org/developer_portal/)
|
||||
- [Main Superset Documentation](https://superset.apache.org/docs/intro)
|
||||
|
||||
## 📖 Real Examples and Patterns
|
||||
|
||||
@@ -18,9 +18,9 @@ under the License.
|
||||
-->
|
||||
|
||||
This is the public documentation site for Superset, built using
|
||||
[Docusaurus 3](https://docusaurus.io/). See
|
||||
[CONTRIBUTING.md](../CONTRIBUTING.md#documentation) for documentation on
|
||||
contributing to documentation.
|
||||
[Docusaurus 3](https://docusaurus.io/). See the
|
||||
[Developer Portal](https://superset.apache.org/developer_portal/contributing/development-setup#documentation)
|
||||
for documentation on contributing to documentation.
|
||||
|
||||
## Version Management
|
||||
|
||||
|
||||
@@ -653,7 +653,7 @@ export enum FeatureFlag {
|
||||
those specified under FEATURE_FLAGS in `superset_config.py`. For example, `DEFAULT_FEATURE_FLAGS = { 'FOO': True, 'BAR': False }` in `superset/config.py` and `FEATURE_FLAGS = { 'BAR': True, 'BAZ': True }` in `superset_config.py` will result
|
||||
in combined feature flags of `{ 'FOO': True, 'BAR': True, 'BAZ': True }`.
|
||||
|
||||
The current status of the usability of each flag (stable vs testing, etc) can be found in `RESOURCES/FEATURE_FLAGS.md`.
|
||||
The current status of the usability of each flag (stable vs testing, etc) can be found in the [Feature Flags](/docs/configuration/feature-flags) documentation.
|
||||
|
||||
## Git Hooks
|
||||
|
||||
|
||||
@@ -342,26 +342,79 @@ ruff check --fix .
|
||||
|
||||
Pre-commit hooks run automatically on `git commit` if installed.
|
||||
|
||||
### TypeScript
|
||||
### TypeScript / JavaScript
|
||||
|
||||
We use ESLint and Prettier for TypeScript:
|
||||
We use a hybrid linting approach combining OXC (Oxidation Compiler) for standard rules and a custom AST-based checker for Superset-specific patterns.
|
||||
|
||||
#### Quick Commands
|
||||
|
||||
```bash
|
||||
cd superset-frontend
|
||||
|
||||
# Run eslint checks
|
||||
# Run both OXC and custom rules
|
||||
npm run lint:full
|
||||
|
||||
# Run OXC linter only (faster for most checks)
|
||||
npm run lint
|
||||
|
||||
# Fix auto-fixable issues with OXC
|
||||
npm run lint-fix
|
||||
|
||||
# Run custom rules checker only
|
||||
npm run check:custom-rules
|
||||
|
||||
# Run tsc (typescript) checks
|
||||
npm run type
|
||||
|
||||
# Fix lint issues
|
||||
npm run lint-fix
|
||||
|
||||
# Format with Prettier
|
||||
npm run prettier
|
||||
```
|
||||
|
||||
#### Architecture
|
||||
|
||||
The linting system consists of two components:
|
||||
|
||||
1. **OXC Linter** (`oxlint`) - A Rust-based linter that's 50-100x faster than ESLint
|
||||
- Handles all standard JavaScript/TypeScript rules
|
||||
- Configured via `oxlint.json`
|
||||
- Runs via `npm run lint` or `npm run lint-fix`
|
||||
|
||||
2. **Custom Rules Checker** - A Node.js AST-based checker for Superset-specific patterns
|
||||
- Enforces no literal colors (use theme colors)
|
||||
- Prevents FontAwesome usage (use @superset-ui/core Icons)
|
||||
- Validates i18n template usage (no template variables)
|
||||
- Runs via `npm run check:custom-rules`
|
||||
|
||||
#### Why This Approach?
|
||||
|
||||
- **50-100x faster linting** compared to ESLint for standard rules via OXC
|
||||
- **Apache-compatible** - No custom binaries, ASF-friendly
|
||||
- **Maintainable** - Custom rules in JavaScript, not Rust
|
||||
- **Flexible** - Can evolve as OXC adds plugin support
|
||||
|
||||
#### Troubleshooting
|
||||
|
||||
**"Plugin 'basic-custom-plugin' not found" Error**
|
||||
|
||||
Ensure you're using the explicit config:
|
||||
```bash
|
||||
npx oxlint --config oxlint.json
|
||||
```
|
||||
|
||||
**Custom Rules Not Running**
|
||||
|
||||
Verify the AST parsing dependencies are installed:
|
||||
```bash
|
||||
npm ls @babel/parser @babel/traverse glob
|
||||
```
|
||||
|
||||
#### Adding New Custom Rules
|
||||
|
||||
1. Edit `scripts/check-custom-rules.js`
|
||||
2. Add a new check function following the AST visitor pattern
|
||||
3. Call the function in `processFile()`
|
||||
4. Test with `npm run check:custom-rules`
|
||||
|
||||
## GitHub Ephemeral Environments
|
||||
|
||||
For every PR, an ephemeral environment is automatically deployed for testing.
|
||||
|
||||
248
docs/developer_portal/extensions/admin-configuration.md
Normal file
@@ -0,0 +1,248 @@
|
||||
---
|
||||
title: Administrator Configuration
|
||||
sidebar_position: 12
|
||||
---
|
||||
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Extension Administrator Configuration
|
||||
|
||||
This guide covers how to configure extension security for production deployments. As an administrator, you control which extensions can run and at what trust level.
|
||||
|
||||
## Trust Configuration
|
||||
|
||||
Configure extension trust in `superset_config.py`:
|
||||
|
||||
```python
|
||||
EXTENSIONS_TRUST_CONFIG = {
|
||||
# Extensions that can run with full privileges ('core' trust level)
|
||||
"trusted_extensions": [
|
||||
"official-parquet-export",
|
||||
"enterprise-sso-plugin",
|
||||
],
|
||||
|
||||
# Allow any extension to run as 'core' without signature verification
|
||||
# WARNING: NEVER enable in production - development use only!
|
||||
"allow_unsigned_core": False,
|
||||
|
||||
# Default sandbox for extensions without explicit trust configuration
|
||||
# Options: 'core', 'iframe', 'worker', 'wasm'
|
||||
"default_trust_level": "iframe",
|
||||
|
||||
# Require valid signatures for extensions requesting 'core' trust
|
||||
# Recommended for production deployments
|
||||
"require_core_signatures": True,
|
||||
|
||||
# Public keys for verified publishers (file paths or PEM strings)
|
||||
"trusted_signers": [
|
||||
"/etc/superset/keys/apache-official.pub",
|
||||
"/etc/superset/keys/enterprise-team.pub",
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
## Configuration Options
|
||||
|
||||
### `trusted_extensions`
|
||||
|
||||
A list of extension IDs that are allowed to run as `core` trust level without signature verification. Use this for extensions you've reviewed and trust completely.
|
||||
|
||||
```python
|
||||
"trusted_extensions": [
|
||||
"my-company-plugin",
|
||||
"approved-community-extension",
|
||||
],
|
||||
```
|
||||
|
||||
### `allow_unsigned_core`
|
||||
|
||||
When `True`, allows any extension to run as `core` trust level regardless of signatures or trusted list. **Never enable this in production** - it's intended only for development environments.
|
||||
|
||||
```python
|
||||
# Development only!
|
||||
"allow_unsigned_core": True,
|
||||
```
|
||||
|
||||
### `default_trust_level`
|
||||
|
||||
The trust level assigned to extensions that don't specify one in their manifest. The safest option is `iframe`, which provides browser-enforced isolation.
|
||||
|
||||
| Level | Description |
|
||||
|-------|-------------|
|
||||
| `iframe` | Browser-sandboxed iframe with controlled API access (recommended default) |
|
||||
| `worker` | Web Worker sandbox for command-only extensions |
|
||||
| `wasm` | WASM sandbox with no DOM access (most restrictive) |
|
||||
| `core` | Full access to main context (not recommended as default) |
|
||||
|
||||
```python
|
||||
"default_trust_level": "iframe",
|
||||
```
|
||||
|
||||
### `require_core_signatures`
|
||||
|
||||
When `True`, extensions requesting `core` trust level must have a valid signature from a trusted signer. Extensions without valid signatures are downgraded to `default_trust_level`.
|
||||
|
||||
```python
|
||||
"require_core_signatures": True,
|
||||
```
|
||||
|
||||
### `trusted_signers`
|
||||
|
||||
A list of public keys authorized to sign extensions. Keys can be specified as file paths or inline PEM strings.
|
||||
|
||||
```python
|
||||
"trusted_signers": [
|
||||
# File path to public key
|
||||
"/etc/superset/keys/publisher.pub",
|
||||
|
||||
# Inline PEM string
|
||||
"""-----BEGIN PUBLIC KEY-----
|
||||
MCowBQYDK2VwAyEA...
|
||||
-----END PUBLIC KEY-----""",
|
||||
],
|
||||
```
|
||||
|
||||
## Signature Verification
|
||||
|
||||
### How It Works
|
||||
|
||||
1. Extension developers generate a signing keypair using the CLI
|
||||
2. They sign their extension's manifest during the build process
|
||||
3. The signed bundle includes `manifest.sig` alongside `manifest.json`
|
||||
4. When Superset loads the extension, it verifies the signature against `trusted_signers`
|
||||
5. If verification passes, the extension can run at its requested trust level
|
||||
|
||||
### Configuring Trusted Signers
|
||||
|
||||
1. Obtain the publisher's public key file (`.pub` extension)
|
||||
2. Place it in a secure location on your server (e.g., `/etc/superset/keys/`)
|
||||
3. Add the path to `trusted_signers` in your configuration
|
||||
|
||||
```python
|
||||
EXTENSIONS_TRUST_CONFIG = {
|
||||
"trusted_signers": [
|
||||
"/etc/superset/keys/acme-corp.pub",
|
||||
],
|
||||
"require_core_signatures": True,
|
||||
}
|
||||
```
|
||||
|
||||
### Verifying a Key Fingerprint
|
||||
|
||||
Before adding a public key to your trusted signers, verify its fingerprint with the publisher:
|
||||
|
||||
```bash
|
||||
# On the publisher's machine
|
||||
superset-extensions generate-keys --output my-key.pem
|
||||
# Output: Fingerprint: MCowBQYDK2Vw...
|
||||
```
|
||||
|
||||
Compare this fingerprint with what you receive to ensure authenticity.
|
||||
|
||||
## Security Recommendations
|
||||
|
||||
### Production Deployments
|
||||
|
||||
1. **Set `require_core_signatures: True`** - Ensures core extensions are verified
|
||||
2. **Set `allow_unsigned_core: False`** - Never allow unsigned core extensions
|
||||
3. **Use `iframe` as default** - Provides strong browser isolation
|
||||
4. **Limit `trusted_extensions`** - Only add extensions you've thoroughly reviewed
|
||||
5. **Secure key storage** - Store public keys in protected directories
|
||||
|
||||
### Development Environments
|
||||
|
||||
For local development, you may relax some restrictions:
|
||||
|
||||
```python
|
||||
# Development configuration
|
||||
EXTENSIONS_TRUST_CONFIG = {
|
||||
"trusted_extensions": [],
|
||||
"allow_unsigned_core": True, # OK for development
|
||||
"default_trust_level": "core", # Easier debugging
|
||||
"require_core_signatures": False,
|
||||
"trusted_signers": [],
|
||||
}
|
||||
```
|
||||
|
||||
## Extension Installation
|
||||
|
||||
### From Trusted Sources
|
||||
|
||||
1. Download the `.supx` bundle from a trusted source
|
||||
2. Verify any checksums or signatures provided by the publisher
|
||||
3. Place the bundle in your `EXTENSIONS_PATH` directory
|
||||
4. If the extension requires `core` trust, add it to `trusted_extensions` or configure signature verification
|
||||
|
||||
### From Community Registry
|
||||
|
||||
Extensions from the community registry should be treated as semi-trusted at best. Consider:
|
||||
|
||||
1. Using `iframe` sandbox for community extensions
|
||||
2. Reviewing the extension's source code before installation
|
||||
3. Testing in a staging environment first
|
||||
|
||||
## Monitoring Extensions
|
||||
|
||||
### Logging
|
||||
|
||||
Extension trust decisions are logged at the INFO level:
|
||||
|
||||
```
|
||||
INFO: Extension my-extension granted core trust (trusted + valid signature)
|
||||
WARNING: Extension unknown-ext trust downgraded from core to iframe: Extension not in trusted list
|
||||
```
|
||||
|
||||
Review these logs to monitor extension behavior and identify potential issues.
|
||||
|
||||
### Trust Downgrades
|
||||
|
||||
If an extension's trust is downgraded, you'll see a warning in the logs. Common reasons:
|
||||
|
||||
| Reason | Meaning |
|
||||
|--------|---------|
|
||||
| "Extension not in trusted list" | Extension requests core but isn't in `trusted_extensions` |
|
||||
| "Core trust requires a valid signature" | `require_core_signatures` is enabled but signature is missing |
|
||||
| "Signature verification failed" | Signature doesn't match any trusted signer |
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Extension Not Loading as Core
|
||||
|
||||
1. Check if the extension ID is in `trusted_extensions`
|
||||
2. If using signatures, verify the public key is in `trusted_signers`
|
||||
3. Check logs for trust downgrade messages
|
||||
4. Verify the extension bundle contains `manifest.sig`
|
||||
|
||||
### Signature Verification Failing
|
||||
|
||||
1. Ensure the public key file is readable by the Superset process
|
||||
2. Verify the key is in PEM format with correct Ed25519 type
|
||||
3. Check that the manifest wasn't modified after signing
|
||||
4. Confirm the signature was created with the matching private key
|
||||
|
||||
### Permission Denied Errors
|
||||
|
||||
Sandboxed extensions may encounter permission errors if:
|
||||
|
||||
1. The extension's declared permissions don't match its API calls
|
||||
2. The sandbox is blocking access correctly (working as intended)
|
||||
3. The extension was downgraded to a more restrictive sandbox
|
||||
|
||||
Check the extension's `sandbox.permissions` configuration against its actual needs.
|
||||
416
docs/developer_portal/extensions/sandbox.md
Normal file
@@ -0,0 +1,416 @@
|
||||
---
|
||||
title: Extension Sandboxing
|
||||
sidebar_position: 10
|
||||
---
|
||||
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Extension Sandboxing
|
||||
|
||||
Superset provides a tiered sandbox architecture for running extensions with varying levels of trust and isolation. This system balances security with functionality, allowing extensions to be safely executed based on their trust level and requirements.
|
||||
|
||||
## Overview
|
||||
|
||||
The sandbox system supports three tiers of trust:
|
||||
|
||||
| Tier | Trust Level | Isolation | Use Case |
|
||||
|------|-------------|-----------|----------|
|
||||
| **Tier 1** | `core` | None (main context) | Official/signed extensions |
|
||||
| **Tier 2** | `iframe` | Browser sandbox | Community UI extensions |
|
||||
| **Tier 3** | `wasm` | WASM sandbox | Logic-only extensions |
|
||||
|
||||
## Trust Levels
|
||||
|
||||
### Tier 1: Core (Trusted)
|
||||
|
||||
Core extensions run in the main JavaScript context with full access to Superset APIs, DOM, and browser capabilities. This is the same behavior as legacy extensions.
|
||||
|
||||
**Requirements:**
|
||||
- Must be in the trusted extensions list, OR
|
||||
- `allowUnsignedCore` configuration must be enabled
|
||||
|
||||
**Use cases:**
|
||||
- Official Apache Superset extensions
|
||||
- Enterprise-verified plugins
|
||||
- Extensions from trusted sources
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "official-extension",
|
||||
"sandbox": {
|
||||
"trustLevel": "core",
|
||||
"requiresSignature": true
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Tier 2: Iframe (Semi-Trusted)
|
||||
|
||||
Iframe-sandboxed extensions run in isolated browser sandboxes with controlled API access via postMessage. This provides strong browser-enforced isolation while still allowing full UI rendering.
|
||||
|
||||
**Security features:**
|
||||
- Browser-enforced same-origin isolation
|
||||
- Content Security Policy (CSP) restrictions
|
||||
- Permission-based API access
|
||||
- No access to parent window's cookies, localStorage, or DOM
|
||||
|
||||
**Use cases:**
|
||||
- Community-contributed extensions
|
||||
- Third-party plugins
|
||||
- Extensions that render custom UI
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "community-extension",
|
||||
"sandbox": {
|
||||
"trustLevel": "iframe",
|
||||
"permissions": ["sqllab:read", "notification:show"],
|
||||
"csp": {
|
||||
"connectSrc": ["https://api.example.com"]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Tier 3: WASM (Untrusted)
|
||||
|
||||
WASM-sandboxed extensions run in a QuickJS WebAssembly sandbox with no DOM access. Only explicitly injected APIs are available. This provides the highest level of isolation.
|
||||
|
||||
**Security features:**
|
||||
- Complete isolation from browser APIs
|
||||
- Memory limits to prevent DoS
|
||||
- Execution time limits
|
||||
- No network or DOM access
|
||||
|
||||
**Use cases:**
|
||||
- Custom data transformations
|
||||
- Calculated fields and formatters
|
||||
- Data validation rules
|
||||
- Custom aggregation functions
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "formatter-extension",
|
||||
"sandbox": {
|
||||
"trustLevel": "wasm",
|
||||
"resourceLimits": {
|
||||
"maxMemory": 10485760,
|
||||
"maxExecutionTime": 5000
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Permissions
|
||||
|
||||
Sandboxed extensions (Tier 2 and 3) must declare the permissions they need. Permissions follow a least-privilege model.
|
||||
|
||||
### Available Permissions
|
||||
|
||||
| Permission | Description |
|
||||
|------------|-------------|
|
||||
| `api:read` | Read-only access to Superset APIs |
|
||||
| `api:write` | Write access to Superset APIs |
|
||||
| `sqllab:read` | Read SQL Lab state (queries, results) |
|
||||
| `sqllab:execute` | Execute SQL queries |
|
||||
| `dashboard:read` | Read dashboard data |
|
||||
| `dashboard:write` | Modify dashboards |
|
||||
| `chart:read` | Read chart data |
|
||||
| `chart:write` | Modify charts |
|
||||
| `user:read` | Read current user info |
|
||||
| `notification:show` | Show notifications to user |
|
||||
| `modal:open` | Open modal dialogs |
|
||||
| `navigation:redirect` | Navigate to other pages |
|
||||
| `clipboard:write` | Write to clipboard |
|
||||
| `download:file` | Trigger file downloads |
|
||||
|
||||
### Example Permission Declaration
|
||||
|
||||
```json
|
||||
{
|
||||
"sandbox": {
|
||||
"trustLevel": "iframe",
|
||||
"permissions": [
|
||||
"sqllab:read",
|
||||
"notification:show",
|
||||
"download:file"
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Sandboxed Extension API
|
||||
|
||||
Extensions running in iframe sandboxes have access to a controlled API through the `window.superset` object.
|
||||
|
||||
### SQL Lab API
|
||||
|
||||
```typescript
|
||||
// Get the current SQL Lab tab (requires sqllab:read)
|
||||
const tab = await window.superset.sqlLab.getCurrentTab();
|
||||
|
||||
// Get query results (requires sqllab:read)
|
||||
const results = await window.superset.sqlLab.getQueryResults(queryId);
|
||||
```
|
||||
|
||||
### Dashboard API
|
||||
|
||||
```typescript
|
||||
// Get dashboard context (requires dashboard:read)
|
||||
const context = await window.superset.dashboard.getContext();
|
||||
|
||||
// Get dashboard filters (requires dashboard:read)
|
||||
const filters = await window.superset.dashboard.getFilters();
|
||||
```
|
||||
|
||||
### Chart API
|
||||
|
||||
```typescript
|
||||
// Get chart data (requires chart:read)
|
||||
const chartData = await window.superset.chart.getData(chartId);
|
||||
```
|
||||
|
||||
### User API
|
||||
|
||||
```typescript
|
||||
// Get current user (requires user:read)
|
||||
const user = await window.superset.user.getCurrentUser();
|
||||
```
|
||||
|
||||
### UI API
|
||||
|
||||
```typescript
|
||||
// Show notification (requires notification:show)
|
||||
window.superset.ui.showNotification('Success!', 'success');
|
||||
|
||||
// Open modal (requires modal:open)
|
||||
const result = await window.superset.ui.openModal({
|
||||
title: 'Confirm',
|
||||
content: 'Are you sure?',
|
||||
type: 'confirm'
|
||||
});
|
||||
|
||||
// Navigate (requires navigation:redirect)
|
||||
window.superset.ui.navigateTo('/dashboard/1');
|
||||
```
|
||||
|
||||
### Utility API
|
||||
|
||||
```typescript
|
||||
// Copy to clipboard (requires clipboard:write)
|
||||
await window.superset.utils.copyToClipboard('text');
|
||||
|
||||
// Download file (requires download:file)
|
||||
window.superset.utils.downloadFile(blob, 'filename.csv');
|
||||
|
||||
// Get CSRF token (no permission required)
|
||||
const token = await window.superset.utils.getCSRFToken();
|
||||
```
|
||||
|
||||
### Event Subscriptions
|
||||
|
||||
```typescript
|
||||
// Subscribe to events
|
||||
const unsubscribe = window.superset.on('dashboard:filterChange', (filters) => {
|
||||
console.log('Filters changed:', filters);
|
||||
});
|
||||
|
||||
// Later, unsubscribe
|
||||
unsubscribe();
|
||||
```
|
||||
|
||||
## Content Security Policy
|
||||
|
||||
Iframe-sandboxed extensions can customize their Content Security Policy through the `csp` configuration:
|
||||
|
||||
```json
|
||||
{
|
||||
"sandbox": {
|
||||
"trustLevel": "iframe",
|
||||
"csp": {
|
||||
"defaultSrc": ["'none'"],
|
||||
"scriptSrc": ["'unsafe-inline'"],
|
||||
"styleSrc": ["'unsafe-inline'"],
|
||||
"imgSrc": ["data:", "blob:", "https://cdn.example.com"],
|
||||
"connectSrc": ["https://api.example.com"],
|
||||
"fontSrc": ["data:"]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Default CSP
|
||||
|
||||
By default, iframe sandboxes use a restrictive CSP:
|
||||
|
||||
```
|
||||
default-src 'none';
|
||||
script-src 'unsafe-inline';
|
||||
style-src 'unsafe-inline';
|
||||
img-src data: blob:;
|
||||
font-src data:;
|
||||
connect-src 'none';
|
||||
frame-src 'none';
|
||||
```
|
||||
|
||||
## WASM Resource Limits
|
||||
|
||||
WASM-sandboxed extensions can configure resource limits:
|
||||
|
||||
```json
|
||||
{
|
||||
"sandbox": {
|
||||
"trustLevel": "wasm",
|
||||
"resourceLimits": {
|
||||
"maxMemory": 10485760, // 10MB max memory
|
||||
"maxExecutionTime": 5000, // 5 second timeout
|
||||
"maxStackSize": 1000 // Max call stack depth
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Defaults
|
||||
|
||||
- **maxMemory**: 10MB
|
||||
- **maxExecutionTime**: 5000ms (5 seconds)
|
||||
- **maxStackSize**: 1000 calls
|
||||
|
||||
## Migration Guide
|
||||
|
||||
### Migrating from Legacy Extensions
|
||||
|
||||
Existing extensions that don't specify a `sandbox` configuration will continue to run as `core` extensions for backward compatibility. To migrate to a sandboxed model:
|
||||
|
||||
1. **Assess your extension's requirements**:
|
||||
- Does it need to render UI? Use `iframe`
|
||||
- Is it logic-only (formatters, validators)? Use `wasm`
|
||||
- Does it need full access? Keep as `core` (requires trust)
|
||||
|
||||
2. **Add sandbox configuration to extension.json**:
|
||||
|
||||
```json
|
||||
{
|
||||
"sandbox": {
|
||||
"trustLevel": "iframe",
|
||||
"permissions": ["sqllab:read"]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
3. **Update your code to use the sandboxed API**:
|
||||
|
||||
Before (core extension):
|
||||
```typescript
|
||||
import { sqlLab } from '@apache-superset/core';
|
||||
const tab = sqlLab.getCurrentTab();
|
||||
```
|
||||
|
||||
After (sandboxed extension):
|
||||
```typescript
|
||||
const tab = await window.superset.sqlLab.getCurrentTab();
|
||||
```
|
||||
|
||||
4. **Test thoroughly** to ensure all functionality works within the sandbox
|
||||
|
||||
## Security Comparison
|
||||
|
||||
| Aspect | Core | Iframe | WASM |
|
||||
|--------|------|--------|------|
|
||||
| DOM Access | Full | Own iframe only | None |
|
||||
| Network | Full | Restricted (CSP) | None |
|
||||
| Cookies | Full | None | None |
|
||||
| localStorage | Full | None | None |
|
||||
| Superset APIs | Full | Controlled bridge | Injected only |
|
||||
| Performance | Native | Near-native | ~40% slower |
|
||||
| React rendering | Full | Own instance | Via descriptors |
|
||||
|
||||
## Administrator Configuration
|
||||
|
||||
Administrators can configure trust settings for their Superset deployment:
|
||||
|
||||
```python
|
||||
# In superset_config.py
|
||||
EXTENSIONS_TRUST_CONFIG = {
|
||||
# Extensions allowed to run as 'core'
|
||||
"trusted_extensions": [
|
||||
"official-extension-1",
|
||||
"enterprise-plugin",
|
||||
],
|
||||
|
||||
# Allow unsigned extensions to run as core (not recommended for production)
|
||||
"allow_unsigned_core": False,
|
||||
|
||||
# Default trust level for extensions without sandbox config
|
||||
"default_trust_level": "iframe",
|
||||
}
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Request minimal permissions** - Only request the permissions your extension actually needs
|
||||
|
||||
2. **Prefer iframe over core** - Unless your extension requires deep integration, use iframe sandboxing
|
||||
|
||||
3. **Use WASM for pure logic** - If your extension doesn't need UI, WASM provides the best isolation
|
||||
|
||||
4. **Handle permission denials gracefully** - Your extension should degrade gracefully if a permission is not granted
|
||||
|
||||
5. **Don't store sensitive data** - Sandboxed extensions should not store sensitive user data
|
||||
|
||||
6. **Test in sandboxed mode** - Always test your extension in its intended sandbox environment
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Permission Denied Errors
|
||||
|
||||
If you see "Permission denied" errors, verify that:
|
||||
1. The permission is declared in your extension.json
|
||||
2. The permission was granted by the administrator
|
||||
3. You're calling the correct API method for that permission
|
||||
|
||||
### Timeout Errors (WASM)
|
||||
|
||||
If your WASM extension times out:
|
||||
1. Optimize your code for faster execution
|
||||
2. Request a higher `maxExecutionTime` limit
|
||||
3. Break large operations into smaller chunks
|
||||
|
||||
### CSP Violations (Iframe)
|
||||
|
||||
If resources fail to load due to CSP:
|
||||
1. Add the required domains to your CSP configuration
|
||||
2. Ensure you're using HTTPS for external resources
|
||||
3. Avoid inline scripts and styles where possible
|
||||
|
||||
### Core Trust Denied
|
||||
|
||||
If your extension is downgraded from `core` to another trust level:
|
||||
1. Check if the extension ID is in the administrator's `trusted_extensions` list
|
||||
2. If signature verification is required, ensure the extension is signed
|
||||
3. Verify the signing key is in the administrator's `trusted_signers`
|
||||
|
||||
See [Extension Signing](./signing) for how to sign your extension.
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- [Security Overview](./security) - Extension security fundamentals
|
||||
- [Extension Signing](./signing) - How to sign extensions for core trust
|
||||
- [Administrator Configuration](./admin-configuration) - Trust configuration for admins
|
||||
@@ -26,9 +26,44 @@ under the License.
|
||||
|
||||
By default, extensions are disabled and must be explicitly enabled by setting the `ENABLE_EXTENSIONS` feature flag. Built-in extensions are included as part of the Superset codebase and are held to the same security standards and review processes as the rest of the application.
|
||||
|
||||
For external extensions, administrators are responsible for evaluating and verifying the security of any extensions they choose to install, just as they would when installing third-party NPM or PyPI packages. At this stage, all extensions run in the same context as the host application, without additional sandboxing. This means that external extensions can impact the security and performance of a Superset environment in the same way as any other installed dependency.
|
||||
## Extension Sandboxing
|
||||
|
||||
We plan to introduce an optional sandboxed execution model for extensions in the future (as part of an additional SIP). Until then, administrators should exercise caution and follow best practices when selecting and deploying third-party extensions. A directory of community extensions is available in the [Community Extensions](./registry) page. Note that these extensions are not vetted by the Apache Superset project—administrators must evaluate each extension before installation.
|
||||
Superset provides a tiered sandbox architecture for running extensions with varying levels of trust and isolation. Extensions can declare their trust level and permissions in their manifest, and Superset will load them in the appropriate sandbox:
|
||||
|
||||
- **Core (Tier 1)**: Trusted extensions run in the main context with full access
|
||||
- **Iframe (Tier 2)**: Semi-trusted extensions run in browser-sandboxed iframes
|
||||
- **WASM (Tier 3)**: Untrusted logic runs in WebAssembly sandboxes
|
||||
|
||||
For detailed information about the sandbox system, see [Extension Sandboxing](./sandbox).
|
||||
|
||||
## Trust Model
|
||||
|
||||
Administrators are responsible for evaluating and verifying the security of any extensions they choose to install. Superset's sandbox system provides defense-in-depth:
|
||||
|
||||
1. **Core extensions** require explicit trust configuration and optionally signature verification
|
||||
2. **Iframe-sandboxed extensions** are isolated by the browser's same-origin policy
|
||||
3. **WASM-sandboxed extensions** have no access to browser APIs
|
||||
|
||||
A directory of community extensions is available in the [Community Extensions](./registry) page. Note that these extensions are not vetted by the Apache Superset project—administrators must evaluate each extension before installation.
|
||||
|
||||
## Extension Signing
|
||||
|
||||
Extensions can be cryptographically signed to verify their authenticity and integrity. This is required for extensions that need `core` trust level in production environments with signature verification enabled.
|
||||
|
||||
- **Developers**: See [Extension Signing](./signing) to learn how to sign your extensions
|
||||
- **Administrators**: See [Administrator Configuration](./admin-configuration) to configure trusted signers
|
||||
|
||||
## Administrator Configuration
|
||||
|
||||
Superset provides extensive configuration options for controlling extension trust levels, signature verification, and security policies. Key settings include:
|
||||
|
||||
- **Trusted extensions list**: Extensions allowed to run as `core`
|
||||
- **Signature verification**: Require valid signatures for core trust
|
||||
- **Default trust level**: Sandbox level for unlisted extensions
|
||||
|
||||
For complete configuration details, see [Administrator Configuration](./admin-configuration).
|
||||
|
||||
## Security Reporting
|
||||
|
||||
**Any performance or security vulnerabilities introduced by external extensions should be reported directly to the extension author, not as Superset vulnerabilities.**
|
||||
|
||||
|
||||
236
docs/developer_portal/extensions/signing.md
Normal file
@@ -0,0 +1,236 @@
|
||||
---
|
||||
title: Extension Signing
|
||||
sidebar_position: 11
|
||||
---
|
||||
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Extension Signing
|
||||
|
||||
Signing your extension allows administrators to verify its authenticity and integrity. Signed extensions can run as `core` trust level in production environments where signature verification is required.
|
||||
|
||||
## Why Sign Extensions?
|
||||
|
||||
- **Trust**: Administrators can verify your extension comes from a known source
|
||||
- **Integrity**: Ensures the extension hasn't been modified since you signed it
|
||||
- **Core Access**: Required for extensions needing `core` trust level in secured deployments
|
||||
- **Distribution**: Makes your extension suitable for enterprise environments
|
||||
|
||||
## Generating Signing Keys
|
||||
|
||||
Generate a new Ed25519 keypair for signing your extensions:
|
||||
|
||||
```bash
|
||||
superset-extensions generate-keys --output my-signing-key.pem
|
||||
```
|
||||
|
||||
This creates two files:
|
||||
|
||||
| File | Purpose | Share? |
|
||||
|------|---------|--------|
|
||||
| `my-signing-key.pem` | Private key for signing | **Never share!** |
|
||||
| `my-signing-key.pub` | Public key for verification | Share with administrators |
|
||||
|
||||
**Output example:**
|
||||
|
||||
```
|
||||
✅ Private key: my-signing-key.pem
|
||||
✅ Public key: my-signing-key.pub
|
||||
Fingerprint: MCowBQYDK2Vw...
|
||||
|
||||
⚠️ Keep the private key secure! Only share the public key with administrators.
|
||||
|
||||
Usage:
|
||||
Sign an extension: superset-extensions bundle --sign my-signing-key.pem
|
||||
Share with admins: my-signing-key.pub
|
||||
```
|
||||
|
||||
## Signing an Extension
|
||||
|
||||
### During Bundle
|
||||
|
||||
The easiest way to sign is during the bundle step:
|
||||
|
||||
```bash
|
||||
superset-extensions bundle --sign my-signing-key.pem
|
||||
```
|
||||
|
||||
This builds, signs the manifest, and creates the `.supx` bundle in one command.
|
||||
|
||||
**Output:**
|
||||
|
||||
```
|
||||
✅ Full build completed in dist/
|
||||
✅ Manifest signed
|
||||
✅ Bundle created (signed): my-extension-1.0.0.supx
|
||||
```
|
||||
|
||||
### Signing Existing Manifest
|
||||
|
||||
To sign an already-built manifest:
|
||||
|
||||
```bash
|
||||
superset-extensions sign --key my-signing-key.pem --manifest dist/manifest.json
|
||||
```
|
||||
|
||||
This creates `dist/manifest.sig` containing the signature.
|
||||
|
||||
## Bundle Structure
|
||||
|
||||
A signed extension bundle contains:
|
||||
|
||||
```
|
||||
my-extension-1.0.0.supx
|
||||
├── manifest.json # Extension manifest
|
||||
├── manifest.sig # Ed25519 signature (base64-encoded)
|
||||
├── frontend/dist/ # Frontend assets
|
||||
└── backend/src/ # Backend code (if applicable)
|
||||
```
|
||||
|
||||
The signature file (`manifest.sig`) contains a base64-encoded Ed25519 signature of the manifest content.
|
||||
|
||||
## Distributing Your Public Key
|
||||
|
||||
Share your public key (`.pub` file) with administrators who want to trust your extensions:
|
||||
|
||||
1. **Direct sharing**: Send the `.pub` file via secure channels
|
||||
2. **Documentation**: Include in your extension's README
|
||||
3. **Website**: Host on your organization's website with HTTPS
|
||||
|
||||
Administrators will add your public key to their `EXTENSIONS_TRUST_CONFIG.trusted_signers` configuration.
|
||||
|
||||
### Key Fingerprint
|
||||
|
||||
The fingerprint helps administrators verify they have the correct key. Include it in your documentation:
|
||||
|
||||
```
|
||||
Public Key Fingerprint: MCowBQYDK2Vw...
|
||||
```
|
||||
|
||||
Administrators should verify this fingerprint matches when adding your key.
|
||||
|
||||
## Security Best Practices
|
||||
|
||||
### Protect Your Private Key
|
||||
|
||||
- **Never commit** private keys to version control
|
||||
- **Use secure storage** like hardware security modules (HSM) for production keys
|
||||
- **Limit access** to the private key to authorized personnel only
|
||||
- **Back up securely** in case of key loss
|
||||
|
||||
### Key Rotation
|
||||
|
||||
Consider rotating keys periodically:
|
||||
|
||||
1. Generate a new keypair
|
||||
2. Notify administrators of the new public key
|
||||
3. Sign new releases with the new key
|
||||
4. Keep the old key available for verifying existing releases
|
||||
|
||||
### Multiple Keys
|
||||
|
||||
For organizations, consider separate keys for:
|
||||
|
||||
- Development/testing releases
|
||||
- Production releases
|
||||
- Different product teams
|
||||
|
||||
## Requesting Core Trust
|
||||
|
||||
If your extension needs `core` trust level:
|
||||
|
||||
1. **Sign your extension** using the process above
|
||||
2. **Document your public key** with fingerprint
|
||||
3. **Explain why core is needed** in your extension documentation
|
||||
4. **Provide your public key** to administrators
|
||||
|
||||
Administrators will then:
|
||||
1. Add your public key to `trusted_signers`
|
||||
2. Enable `require_core_signatures: True`
|
||||
3. Your signed extension can now run as `core`
|
||||
|
||||
## Verification Process
|
||||
|
||||
When Superset loads your extension:
|
||||
|
||||
1. Reads `manifest.json` and `manifest.sig` from the bundle
|
||||
2. Checks if the extension requests `core` trust level
|
||||
3. If `require_core_signatures` is enabled, verifies the signature
|
||||
4. Checks the signature against all keys in `trusted_signers`
|
||||
5. If verification passes, grants the requested trust level
|
||||
6. If verification fails, downgrades to `default_trust_level`
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### "Signature verification failed"
|
||||
|
||||
- Ensure you're using the matching private key for the public key given to admins
|
||||
- Verify the manifest wasn't modified after signing
|
||||
- Check that the `.sig` file was included in the bundle
|
||||
|
||||
### "Private key must be Ed25519"
|
||||
|
||||
- The signing system only supports Ed25519 keys
|
||||
- Generate a new key using `superset-extensions generate-keys`
|
||||
|
||||
### Administrator Reports Invalid Signature
|
||||
|
||||
- Verify the public key file wasn't corrupted during transfer
|
||||
- Confirm the fingerprint matches between your key and theirs
|
||||
- Re-sign the extension and redistribute
|
||||
|
||||
## Technical Details
|
||||
|
||||
### Signature Algorithm
|
||||
|
||||
Extensions use **Ed25519** signatures:
|
||||
|
||||
- Fast signature generation and verification
|
||||
- Small signature size (64 bytes)
|
||||
- Strong security guarantees
|
||||
- Deterministic signatures (same input always produces same output)
|
||||
|
||||
### Signature Format
|
||||
|
||||
The `manifest.sig` file contains:
|
||||
|
||||
```
|
||||
<base64-encoded Ed25519 signature>
|
||||
```
|
||||
|
||||
The signature is computed over the raw bytes of `manifest.json`.
|
||||
|
||||
### Key Format
|
||||
|
||||
Keys are stored in PEM format:
|
||||
|
||||
**Private key:**
|
||||
```
|
||||
-----BEGIN PRIVATE KEY-----
|
||||
MC4CAQAwBQYDK2VwBCIEI...
|
||||
-----END PRIVATE KEY-----
|
||||
```
|
||||
|
||||
**Public key:**
|
||||
```
|
||||
-----BEGIN PUBLIC KEY-----
|
||||
MCowBQYDK2VwAyEA...
|
||||
-----END PUBLIC KEY-----
|
||||
```
|
||||
@@ -43,8 +43,9 @@ This is a list of statements that describe how we do frontend development in Sup
|
||||
- We organize our repo so similar files live near each other, and tests are co-located with the files they test.
|
||||
- See: [SIP-61](https://github.com/apache/superset/issues/12098)
|
||||
- We prefer small, easily testable files and components.
|
||||
- We use ESLint and Prettier to automatically fix lint errors and format the code.
|
||||
- We use OXC (oxlint) and Prettier to automatically fix lint errors and format the code.
|
||||
- We do not debate code formatting style in PRs, instead relying on automated tooling to enforce it.
|
||||
- If there's not a linting rule, we don't have a rule!
|
||||
- See: [Linting How-Tos](../contributing/howtos#typescript--javascript)
|
||||
- We use [React Storybook](https://storybook.js.org/) and [Applitools](https://applitools.com/) to help preview/test and stabilize our components
|
||||
- A public Storybook with components from the `master` branch is available [here](https://apache-superset.github.io/superset-ui/?path=/story/*)
|
||||
|
||||
@@ -86,7 +86,6 @@ Everything you need to contribute to the Apache Superset project. This section i
|
||||
- **[Configuration Guide](https://superset.apache.org/docs/configuration/configuring-superset)** - Setup and configuration
|
||||
|
||||
### Important Files
|
||||
- **[CONTRIBUTING.md](https://github.com/apache/superset/blob/master/CONTRIBUTING.md)** - Contribution guidelines
|
||||
- **[CLAUDE.md](https://github.com/apache/superset/blob/master/CLAUDE.md)** - LLM development guide
|
||||
- **[UPDATING.md](https://github.com/apache/superset/blob/master/UPDATING.md)** - Breaking changes log
|
||||
|
||||
|
||||
@@ -49,7 +49,17 @@ module.exports = {
|
||||
'extensions/development',
|
||||
'extensions/deployment',
|
||||
'extensions/mcp',
|
||||
'extensions/security',
|
||||
{
|
||||
type: 'category',
|
||||
label: 'Security',
|
||||
collapsed: true,
|
||||
items: [
|
||||
'extensions/security',
|
||||
'extensions/sandbox',
|
||||
'extensions/signing',
|
||||
'extensions/admin-configuration',
|
||||
],
|
||||
},
|
||||
'extensions/registry',
|
||||
],
|
||||
},
|
||||
|
||||
@@ -24,57 +24,204 @@ under the License.
|
||||
|
||||
# End-to-End Testing
|
||||
|
||||
🚧 **Coming Soon** 🚧
|
||||
Apache Superset uses Playwright for end-to-end testing, migrating from the legacy Cypress tests.
|
||||
|
||||
Guide for writing and running end-to-end tests using Playwright and Cypress.
|
||||
|
||||
## Topics to be covered:
|
||||
## Running Tests
|
||||
|
||||
### Playwright (Recommended)
|
||||
- Setting up Playwright environment
|
||||
- Writing reliable E2E tests
|
||||
- Page Object Model pattern
|
||||
- Handling async operations
|
||||
- Cross-browser testing
|
||||
- Visual regression testing
|
||||
- Debugging with Playwright Inspector
|
||||
- CI/CD integration
|
||||
|
||||
### Cypress (Deprecated)
|
||||
- Legacy Cypress test maintenance
|
||||
- Migration to Playwright
|
||||
- Running existing Cypress tests
|
||||
|
||||
## Quick Commands
|
||||
|
||||
### Playwright
|
||||
```bash
|
||||
# Run all Playwright tests
|
||||
npm run playwright:test
|
||||
cd superset-frontend
|
||||
|
||||
# Run in headed mode (see browser)
|
||||
npm run playwright:headed
|
||||
# Run all tests
|
||||
npm run playwright:test
|
||||
# or: npx playwright test
|
||||
|
||||
# Run specific test file
|
||||
npx playwright test tests/auth/login.spec.ts
|
||||
|
||||
# Debug specific test
|
||||
npm run playwright:debug tests/auth/login.spec.ts
|
||||
|
||||
# Open Playwright UI
|
||||
# Run with UI mode for debugging
|
||||
npm run playwright:ui
|
||||
# or: npx playwright test --ui
|
||||
|
||||
# Run in headed mode (see browser)
|
||||
npm run playwright:headed
|
||||
# or: npx playwright test --headed
|
||||
|
||||
# Debug specific test file
|
||||
npm run playwright:debug tests/auth/login.spec.ts
|
||||
# or: npx playwright test --debug tests/auth/login.spec.ts
|
||||
```
|
||||
|
||||
### Cypress (Deprecated)
|
||||
```bash
|
||||
# Run Cypress tests
|
||||
cd superset-frontend/cypress-base
|
||||
npm run cypress-run-chrome
|
||||
|
||||
# Open Cypress UI
|
||||
npm run cypress-debug
|
||||
Cypress tests are being migrated to Playwright. For legacy tests:
|
||||
|
||||
```bash
|
||||
cd superset-frontend/cypress-base
|
||||
npm run cypress-run-chrome # Headless
|
||||
npm run cypress-debug # Interactive UI
|
||||
```
|
||||
|
||||
---
|
||||
## Project Architecture
|
||||
|
||||
*This documentation is under active development. Check back soon for updates!*
|
||||
```
|
||||
superset-frontend/playwright/
|
||||
├── components/core/ # Reusable UI components
|
||||
├── pages/ # Page Object Models
|
||||
├── tests/ # Test files organized by feature
|
||||
├── utils/ # Shared constants and utilities
|
||||
└── playwright.config.ts
|
||||
```
|
||||
|
||||
## Design Principles
|
||||
|
||||
We follow **YAGNI** (You Aren't Gonna Need It), **DRY** (Don't Repeat Yourself), and **KISS** (Keep It Simple, Stupid) principles:
|
||||
|
||||
- Build only what's needed now
|
||||
- Reuse existing patterns and components
|
||||
- Keep solutions simple and maintainable
|
||||
|
||||
## Page Object Pattern
|
||||
|
||||
Each page object encapsulates:
|
||||
|
||||
- **Actions**: What you can do on the page
|
||||
- **Queries**: Information you can get from the page
|
||||
- **Selectors**: Centralized in private static SELECTORS constant
|
||||
- **NO Assertions**: Keep assertions in test files
|
||||
|
||||
**Example Page Object:**
|
||||
|
||||
```typescript
|
||||
export class AuthPage {
|
||||
// Selectors centralized in the page object
|
||||
private static readonly SELECTORS = {
|
||||
LOGIN_FORM: '[data-test="login-form"]',
|
||||
USERNAME_INPUT: '[data-test="username-input"]',
|
||||
} as const;
|
||||
|
||||
// Actions - what you can do
|
||||
async loginWithCredentials(username: string, password: string) {}
|
||||
|
||||
// Queries - information you can get
|
||||
async getCurrentUrl(): Promise<string> {}
|
||||
|
||||
// NO assertions - those belong in tests
|
||||
}
|
||||
```
|
||||
|
||||
**Example Test:**
|
||||
|
||||
```typescript
|
||||
import { test, expect } from '@playwright/test';
|
||||
import { AuthPage } from '../../pages/AuthPage';
|
||||
import { LOGIN } from '../../utils/urls';
|
||||
|
||||
test('should login with correct credentials', async ({ page }) => {
|
||||
const authPage = new AuthPage(page);
|
||||
await authPage.goto();
|
||||
await authPage.loginWithCredentials('admin', 'general');
|
||||
|
||||
// Assertions belong in tests, not page objects
|
||||
expect(await authPage.getCurrentUrl()).not.toContain(LOGIN);
|
||||
});
|
||||
```
|
||||
|
||||
## Core Components
|
||||
|
||||
Reusable UI interaction classes for common elements (`components/core/`):
|
||||
|
||||
- **Form**: Container with properly scoped child element access
|
||||
- **Input**: Supports `fill()`, `type()`, and `pressSequentially()` methods
|
||||
- **Button**: Standard click, hover, focus interactions
|
||||
|
||||
**Usage Example:**
|
||||
|
||||
```typescript
|
||||
import { Form } from '../components/core';
|
||||
|
||||
const loginForm = new Form(page, '[data-test="login-form"]');
|
||||
const usernameInput = loginForm.getInput('[data-test="username-input"]');
|
||||
await usernameInput.fill('admin');
|
||||
```
|
||||
|
||||
## Test Reports
|
||||
|
||||
Playwright generates multiple reports for better visibility:
|
||||
|
||||
```bash
|
||||
# View interactive HTML report (opens automatically on failure)
|
||||
npm run playwright:report
|
||||
# or: npx playwright show-report
|
||||
|
||||
# View test trace for debugging failures
|
||||
npx playwright show-trace test-results/[test-name]/trace.zip
|
||||
```
|
||||
|
||||
### Report Types
|
||||
|
||||
- **List Reporter**: Shows progress and summary table in terminal
|
||||
- **HTML Report**: Interactive web interface with screenshots, videos, and traces
|
||||
- **JSON Report**: Machine-readable format in `test-results/results.json`
|
||||
- **GitHub Actions**: Annotations in CI for failed tests
|
||||
|
||||
### Debugging Failed Tests
|
||||
|
||||
When tests fail, Playwright automatically captures:
|
||||
|
||||
- **Screenshots** at the point of failure
|
||||
- **Videos** of the entire test run
|
||||
- **Traces** with timeline and network activity
|
||||
- **Error context** with detailed debugging information
|
||||
|
||||
All debugging artifacts are available in the HTML report for easy analysis.
|
||||
|
||||
## Configuration
|
||||
|
||||
- **Config**: `playwright.config.ts` - matches Cypress settings
|
||||
- **Base URL**: `http://localhost:8088` (assumes Superset running)
|
||||
- **Browsers**: Chrome only for Phase 1 (YAGNI)
|
||||
- **Retries**: 2 in CI, 0 locally (matches Cypress)
|
||||
|
||||
## Contributing Guidelines
|
||||
|
||||
### Adding New Tests
|
||||
|
||||
1. **Check existing components** before creating new ones
|
||||
2. **Use page objects** for page interactions
|
||||
3. **Keep assertions in tests**, not page objects
|
||||
4. **Follow naming conventions**: `feature.spec.ts`
|
||||
|
||||
### Adding New Components
|
||||
|
||||
1. **Follow YAGNI**: Only build what's immediately needed
|
||||
2. **Use Locator-based scoping** for proper element isolation
|
||||
3. **Support both string selectors and Locator objects** via constructor overloads
|
||||
4. **Add to `components/core/index.ts`** for easy importing
|
||||
|
||||
### Adding New Page Objects
|
||||
|
||||
1. **Centralize selectors** in private static SELECTORS constant
|
||||
2. **Import shared constants** from `utils/urls.ts`
|
||||
3. **Actions and queries only** - no assertions
|
||||
4. **Use existing components** for DOM interactions
|
||||
|
||||
## Migration from Cypress
|
||||
|
||||
When porting Cypress tests:
|
||||
|
||||
1. **Port the logic**, not the implementation
|
||||
2. **Use page objects** instead of inline selectors
|
||||
3. **Replace `cy.intercept/cy.wait`** with `page.waitForRequest()`
|
||||
4. **Use shared constants** from `utils/urls.ts`
|
||||
5. **Follow the established patterns** shown in `tests/auth/login.spec.ts`
|
||||
|
||||
## Best Practices
|
||||
|
||||
- **Centralize selectors** in page objects
|
||||
- **Centralize URLs** in `utils/urls.ts`
|
||||
- **Use meaningful test descriptions**
|
||||
- **Keep page objects action-focused**
|
||||
- **Put assertions in tests, not page objects**
|
||||
- **Follow the existing patterns** for consistency
|
||||
|
||||
@@ -441,7 +441,7 @@ FEATURE_FLAGS = {
|
||||
}
|
||||
```
|
||||
|
||||
A current list of feature flags can be found in [RESOURCES/FEATURE_FLAGS.md](https://github.com/apache/superset/blob/master/RESOURCES/FEATURE_FLAGS.md).
|
||||
A current list of feature flags can be found in the [Feature Flags](/docs/configuration/feature-flags) documentation.
|
||||
|
||||
:::resources
|
||||
- [Blog: Feature Flags in Apache Superset](https://preset.io/blog/feature-flags-in-apache-superset-and-preset/)
|
||||
|
||||
107
docs/docs/configuration/feature-flags.mdx
Normal file
@@ -0,0 +1,107 @@
|
||||
---
|
||||
title: Feature Flags
|
||||
hide_title: true
|
||||
sidebar_position: 2
|
||||
version: 1
|
||||
---
|
||||
|
||||
import featureFlags from '@site/static/feature-flags.json';
|
||||
|
||||
export const FlagTable = ({flags}) => (
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Flag</th>
|
||||
<th>Default</th>
|
||||
<th>Description</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{flags.map((flag) => (
|
||||
<tr key={flag.name}>
|
||||
<td><code>{flag.name}</code></td>
|
||||
<td><code>{flag.default ? 'True' : 'False'}</code></td>
|
||||
<td>
|
||||
{flag.description}
|
||||
{flag.docs && (
|
||||
<> (<a href={flag.docs}>docs</a>)</>
|
||||
)}
|
||||
</td>
|
||||
</tr>
|
||||
))}
|
||||
</tbody>
|
||||
</table>
|
||||
);
|
||||
|
||||
# Feature Flags
|
||||
|
||||
Superset uses feature flags to control the availability of features. Feature flags allow
|
||||
gradual rollout of new functionality and provide a way to enable experimental features.
|
||||
|
||||
To enable a feature flag, add it to your `superset_config.py`:
|
||||
|
||||
```python
|
||||
FEATURE_FLAGS = {
|
||||
"ENABLE_TEMPLATE_PROCESSING": True,
|
||||
}
|
||||
```
|
||||
|
||||
## Lifecycle
|
||||
|
||||
Feature flags progress through lifecycle stages:
|
||||
|
||||
| Stage | Description |
|
||||
|-------|-------------|
|
||||
| **Development** | Experimental features under active development. May be incomplete or unstable. |
|
||||
| **Testing** | Feature complete but undergoing testing. Usable but may contain bugs. |
|
||||
| **Stable** | Production-ready features. Safe for all deployments. |
|
||||
| **Deprecated** | Features scheduled for removal. Migrate away from these. |
|
||||
|
||||
---
|
||||
|
||||
## Development
|
||||
|
||||
These features are experimental and under active development. Use only in development environments.
|
||||
|
||||
<FlagTable flags={featureFlags.flags.development} />
|
||||
|
||||
---
|
||||
|
||||
## Testing
|
||||
|
||||
These features are complete but still being tested. They are usable but may have bugs.
|
||||
|
||||
<FlagTable flags={featureFlags.flags.testing} />
|
||||
|
||||
---
|
||||
|
||||
## Stable
|
||||
|
||||
These features are production-ready and safe to enable.
|
||||
|
||||
<FlagTable flags={featureFlags.flags.stable} />
|
||||
|
||||
---
|
||||
|
||||
## Deprecated
|
||||
|
||||
These features are scheduled for removal. Plan to migrate away from them.
|
||||
|
||||
<FlagTable flags={featureFlags.flags.deprecated} />
|
||||
|
||||
---
|
||||
|
||||
## Adding New Feature Flags
|
||||
|
||||
When adding a new feature flag to `superset/config.py`, include the following annotations:
|
||||
|
||||
```python
|
||||
# Description of what the feature does
|
||||
# @lifecycle: development | testing | stable | deprecated
|
||||
# @docs: https://superset.apache.org/docs/... (optional)
|
||||
# @category: runtime_config | path_to_deprecation (optional, for stable flags)
|
||||
"MY_NEW_FEATURE": False,
|
||||
```
|
||||
|
||||
This documentation is auto-generated from the annotations in
|
||||
[config.py](https://github.com/apache/superset/blob/master/superset/config.py).
|
||||
@@ -60,7 +60,7 @@ There are two approaches to making dashboards publicly accessible:
|
||||
|
||||
**Option 2: Dashboard-level access (selective control)**
|
||||
1. Set `PUBLIC_ROLE_LIKE = "Public"` in `superset_config.py`
|
||||
2. Add the `'DASHBOARD_RBAC': True` [Feature Flag](https://github.com/apache/superset/blob/master/RESOURCES/FEATURE_FLAGS.md)
|
||||
2. Add the `'DASHBOARD_RBAC': True` [Feature Flag](/docs/configuration/feature-flags)
|
||||
3. Edit each dashboard's properties and add the "Public" role
|
||||
4. Only dashboards with the Public role explicitly assigned are visible to anonymous users
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@ To help make the problem somewhat tractable—given that Apache Superset has no
|
||||
|
||||
To strive for data consistency (regardless of the timezone of the client) the Apache Superset backend tries to ensure that any timestamp sent to the client has an explicit (or semi-explicit as in the case with [Epoch time](https://en.wikipedia.org/wiki/Unix_time) which is always in reference to UTC) timezone encoded within.
|
||||
|
||||
The challenge however lies with the slew of [database engines](/docs/configuration/databases#installing-drivers-in-docker-images) which Apache Superset supports and various inconsistencies between their [Python Database API (DB-API)](https://www.python.org/dev/peps/pep-0249/) implementations combined with the fact that we use [Pandas](https://pandas.pydata.org/) to read SQL into a DataFrame prior to serializing to JSON. Regrettably Pandas ignores the DB-API [type_code](https://www.python.org/dev/peps/pep-0249/#type-objects) relying by default on the underlying Python type returned by the DB-API. Currently only a subset of the supported database engines work correctly with Pandas, i.e., ensuring timestamps without an explicit timestamp are serializd to JSON with the server timezone, thus guaranteeing the client will display timestamps in a consistent manner irrespective of the client's timezone.
|
||||
The challenge however lies with the slew of [database engines](/docs/databases#installing-drivers-in-docker) which Apache Superset supports and various inconsistencies between their [Python Database API (DB-API)](https://www.python.org/dev/peps/pep-0249/) implementations combined with the fact that we use [Pandas](https://pandas.pydata.org/) to read SQL into a DataFrame prior to serializing to JSON. Regrettably Pandas ignores the DB-API [type_code](https://www.python.org/dev/peps/pep-0249/#type-objects) relying by default on the underlying Python type returned by the DB-API. Currently only a subset of the supported database engines work correctly with Pandas, i.e., ensuring timestamps without an explicit timestamp are serializd to JSON with the server timezone, thus guaranteeing the client will display timestamps in a consistent manner irrespective of the client's timezone.
|
||||
|
||||
For example the following is a comparison of MySQL and Presto,
|
||||
|
||||
|
||||
@@ -350,6 +350,12 @@ superset init
|
||||
# Note: you MUST have previously created an admin user with the username `admin` for this command to work.
|
||||
superset load-examples
|
||||
|
||||
# The load-examples command supports various options:
|
||||
# --force / -f Force reload data even if tables exist
|
||||
# --only-metadata / -m Only create table metadata without loading data (fast setup)
|
||||
# --load-test-data / -t Load additional test dashboards and datasets
|
||||
# --load-big-data / -b Generate synthetic data for stress testing (wide tables, many tables)
|
||||
|
||||
# Start the Flask dev web server from inside your virtualenv.
|
||||
# Note that your page may not have CSS at this point.
|
||||
# See instructions below on how to build the front-end assets.
|
||||
@@ -599,7 +605,7 @@ export enum FeatureFlag {
|
||||
those specified under FEATURE_FLAGS in `superset_config.py`. For example, `DEFAULT_FEATURE_FLAGS = { 'FOO': True, 'BAR': False }` in `superset/config.py` and `FEATURE_FLAGS = { 'BAR': True, 'BAZ': True }` in `superset_config.py` will result
|
||||
in combined feature flags of `{ 'FOO': True, 'BAR': True, 'BAZ': True }`.
|
||||
|
||||
The current status of the usability of each flag (stable vs testing, etc) can be found in `RESOURCES/FEATURE_FLAGS.md`.
|
||||
The current status of the usability of each flag (stable vs testing, etc) can be found in the [Feature Flags](/docs/configuration/feature-flags) documentation.
|
||||
|
||||
## Git Hooks
|
||||
|
||||
@@ -692,6 +698,97 @@ secrets.
|
||||
|
||||
---
|
||||
|
||||
## Example Data and Test Loaders
|
||||
|
||||
### Example Datasets
|
||||
|
||||
Superset includes example datasets stored as Parquet files, organized by example name in the `superset/examples/` directory. Each example is self-contained:
|
||||
|
||||
```
|
||||
superset/examples/
|
||||
├── _shared/ # Shared configuration
|
||||
│ ├── database.yaml # Database connection config
|
||||
│ └── metadata.yaml # Import metadata
|
||||
├── birth_names/ # Example: US Birth Names
|
||||
│ ├── data.parquet # Dataset (compressed columnar)
|
||||
│ ├── dataset.yaml # Dataset metadata
|
||||
│ ├── dashboard.yaml # Dashboard configuration (optional)
|
||||
│ └── charts/ # Chart configurations (optional)
|
||||
│ ├── Boys.yaml
|
||||
│ ├── Girls.yaml
|
||||
│ └── ...
|
||||
├── energy_usage/ # Example: Energy Sankey
|
||||
│ ├── data.parquet
|
||||
│ ├── dataset.yaml
|
||||
│ └── charts/
|
||||
└── ... (27 example directories)
|
||||
```
|
||||
|
||||
#### Adding a New Example Dataset
|
||||
|
||||
**Simple dataset (data only):**
|
||||
|
||||
1. Create a directory: `superset/examples/my_dataset/`
|
||||
2. Add your data as `data.parquet`:
|
||||
```python
|
||||
import pandas as pd
|
||||
df = pd.read_csv("your_data.csv")
|
||||
df.to_parquet("superset/examples/my_dataset/data.parquet", compression="snappy")
|
||||
```
|
||||
3. The dataset will be auto-discovered when running `superset load-examples`
|
||||
|
||||
**Complete example with dashboard:**
|
||||
|
||||
1. Create your dataset directory with `data.parquet`
|
||||
2. Add `dataset.yaml` with metadata (columns, metrics, etc.)
|
||||
3. Add `dashboard.yaml` with dashboard layout
|
||||
4. Add chart configs in `charts/` directory
|
||||
5. See existing examples like `birth_names/` for reference
|
||||
|
||||
#### Exporting an Existing Dashboard
|
||||
|
||||
To export a dashboard and its charts as YAML configs:
|
||||
|
||||
1. In Superset, go to the dashboard you want to export
|
||||
2. Click the "..." menu → "Export"
|
||||
3. Unzip the exported file
|
||||
4. Copy the YAML files to your example directory
|
||||
5. Add the `data.parquet` file
|
||||
|
||||
#### Why Parquet?
|
||||
|
||||
- **Apache-friendly**: Parquet is an Apache project, ideal for ASF codebases
|
||||
- **Compressed**: Built-in Snappy compression (~27% smaller than CSV)
|
||||
- **Self-describing**: Schema is embedded in the file
|
||||
- **Widely supported**: Works with pandas, pyarrow, DuckDB, Spark, etc.
|
||||
|
||||
### Test Data Generation
|
||||
|
||||
For stress testing and development, Superset includes special test data generators that create synthetic data:
|
||||
|
||||
#### Big Data Loader (`--load-big-data`)
|
||||
|
||||
Located in `superset/cli/test_loaders.py`, this generates:
|
||||
|
||||
- **Wide Table** (`wide_table`): 100 columns of mixed types, 1000 rows
|
||||
- **Many Small Tables** (`small_table_0` through `small_table_999`): 1000 tables for testing catalog performance
|
||||
- **Long Name Table**: Table with 60-character random name for testing UI edge cases
|
||||
|
||||
This is primarily used for:
|
||||
- Performance testing with extreme data shapes
|
||||
- UI edge case validation
|
||||
- Database catalog stress testing
|
||||
- CI/CD pipeline validation
|
||||
|
||||
#### Test Dashboards (`--load-test-data`)
|
||||
|
||||
Loads additional test-specific content:
|
||||
- Tabbed dashboard example
|
||||
- Supported charts dashboard
|
||||
- Test configuration files (*.test.yaml)
|
||||
|
||||
---
|
||||
|
||||
## Testing
|
||||
|
||||
### Python Testing
|
||||
|
||||
@@ -157,7 +157,7 @@ table afterwards to configure the Columns tab, check the appropriate boxes and s
|
||||
|
||||
To clarify, the database backend is an OLTP database used by Superset to store its internal
|
||||
information like your list of users and dashboard definitions. While Superset supports a
|
||||
[variety of databases as data _sources_](/docs/configuration/databases#installing-database-drivers),
|
||||
[variety of databases as data _sources_](/docs/databases#installing-database-drivers),
|
||||
only a few database engines are supported for use as the OLTP backend / metadata store.
|
||||
|
||||
Superset is tested using MySQL, PostgreSQL, and SQLite backends. It’s recommended you install
|
||||
@@ -190,7 +190,7 @@ second etc). Example:
|
||||
|
||||
## Does Superset work with [insert database engine here]?
|
||||
|
||||
The [Connecting to Databases section](/docs/configuration/databases) provides the best
|
||||
The [Connecting to Databases section](/docs/databases) provides the best
|
||||
overview for supported databases. Database engines not listed on that page may work too. We rely on
|
||||
the community to contribute to this knowledge base.
|
||||
|
||||
|
||||
@@ -149,7 +149,7 @@ For production clusters it's recommended to build own image with this step done
|
||||
Superset requires a Python DB-API database driver and a SQLAlchemy
|
||||
dialect to be installed for each datastore you want to connect to.
|
||||
|
||||
See [Install Database Drivers](/docs/configuration/databases) for more information.
|
||||
See [Install Database Drivers](/docs/databases#installing-database-drivers) for more information.
|
||||
It is recommended that you refer to versions listed in
|
||||
[pyproject.toml](https://github.com/apache/superset/blob/master/pyproject.toml)
|
||||
instead of hard-coding them in your bootstrap script, as seen below.
|
||||
|
||||
@@ -47,3 +47,15 @@ superset init
|
||||
While upgrading superset should not delete your charts and dashboards, we recommend following best
|
||||
practices and to backup your metadata database before upgrading. Before upgrading production, we
|
||||
recommend upgrading in a staging environment and upgrading production finally during off-peak usage.
|
||||
|
||||
## Breaking Changes
|
||||
|
||||
For a detailed list of breaking changes and migration notes for each version, see
|
||||
[UPDATING.md](https://github.com/apache/superset/blob/master/UPDATING.md).
|
||||
|
||||
This file documents backwards-incompatible changes and provides guidance for migrating between
|
||||
major versions, including:
|
||||
- Configuration changes
|
||||
- API changes
|
||||
- Database migrations
|
||||
- Deprecated features
|
||||
|
||||
@@ -74,7 +74,7 @@ processes by running Docker Compose `stop` command. By doing so, you can avoid d
|
||||
From this point on, you can head on to:
|
||||
|
||||
- [Create your first Dashboard](/docs/using-superset/creating-your-first-dashboard)
|
||||
- [Connect to a Database](/docs/configuration/databases)
|
||||
- [Connect to a Database](/docs/databases)
|
||||
- [Using Docker Compose](/docs/installation/docker-compose)
|
||||
- [Configure Superset](/docs/configuration/configuring-superset/)
|
||||
- [Installing on Kubernetes](/docs/installation/kubernetes/)
|
||||
|
||||
@@ -134,7 +134,7 @@ if (!versionsConfig.developer_portal.disabled && !versionsConfig.developer_porta
|
||||
{
|
||||
type: 'doc',
|
||||
docsPluginId: 'developer_portal',
|
||||
docId: 'extensions/architectural-principles',
|
||||
docId: 'extensions/overview',
|
||||
label: 'Extensions',
|
||||
},
|
||||
{
|
||||
@@ -222,7 +222,7 @@ const config: Config = {
|
||||
from: '/gallery.html',
|
||||
},
|
||||
{
|
||||
to: '/docs/configuration/databases',
|
||||
to: '/docs/databases',
|
||||
from: '/druid.html',
|
||||
},
|
||||
{
|
||||
@@ -274,7 +274,7 @@ const config: Config = {
|
||||
from: '/docs/contributing/contribution-page',
|
||||
},
|
||||
{
|
||||
to: '/docs/configuration/databases',
|
||||
to: '/docs/databases',
|
||||
from: '/docs/databases/yugabyte/',
|
||||
},
|
||||
{
|
||||
@@ -410,6 +410,11 @@ const config: Config = {
|
||||
docId: 'intro',
|
||||
label: 'Getting Started',
|
||||
},
|
||||
{
|
||||
type: 'doc',
|
||||
docId: 'databases/index',
|
||||
label: 'Databases',
|
||||
},
|
||||
{
|
||||
type: 'doc',
|
||||
docId: 'faq',
|
||||
|
||||
@@ -6,17 +6,22 @@
|
||||
"scripts": {
|
||||
"docusaurus": "docusaurus",
|
||||
"_init": "cat src/intro_header.txt ../README.md > docs/intro.md",
|
||||
"start": "yarn run _init && yarn run generate:extension-components && NODE_ENV=development docusaurus start",
|
||||
"start": "yarn run _init && yarn run generate:extension-components && yarn run generate:database-docs && NODE_ENV=development docusaurus start",
|
||||
"stop": "pkill -f 'docusaurus start' || pkill -f 'docusaurus serve' || echo 'No docusaurus server running'",
|
||||
"build": "yarn run _init && yarn run generate:extension-components && DEBUG=docusaurus:* docusaurus build",
|
||||
"build": "yarn run _init && yarn run generate:extension-components && yarn run generate:database-docs && DEBUG=docusaurus:* docusaurus build",
|
||||
"swizzle": "docusaurus swizzle",
|
||||
"deploy": "docusaurus deploy",
|
||||
"clear": "docusaurus clear",
|
||||
"serve": "yarn run _init && docusaurus serve",
|
||||
"write-translations": "docusaurus write-translations",
|
||||
"write-heading-ids": "docusaurus write-heading-ids",
|
||||
"typecheck": "yarn run generate:extension-components && tsc",
|
||||
"typecheck": "yarn run generate:extension-components && yarn run generate:database-docs && tsc",
|
||||
"generate:extension-components": "node scripts/generate-extension-components.mjs",
|
||||
"generate:database-docs": "node scripts/generate-database-docs.mjs",
|
||||
"gen-db-docs": "node scripts/generate-database-docs.mjs",
|
||||
"lint:db-metadata": "python3 ../superset/db_engine_specs/lint_metadata.py",
|
||||
"lint:db-metadata:report": "python3 ../superset/db_engine_specs/lint_metadata.py --markdown -o ../superset/db_engine_specs/METADATA_STATUS.md",
|
||||
"update:readme-db-logos": "node scripts/generate-database-docs.mjs --update-readme",
|
||||
"eslint": "eslint .",
|
||||
"version:add": "node scripts/manage-versions.mjs add",
|
||||
"version:remove": "node scripts/manage-versions.mjs remove",
|
||||
@@ -51,8 +56,8 @@
|
||||
"@storybook/preview-api": "^8.6.11",
|
||||
"@storybook/theming": "^8.6.11",
|
||||
"@superset-ui/core": "^0.20.4",
|
||||
"antd": "^6.2.0",
|
||||
"caniuse-lite": "^1.0.30001764",
|
||||
"antd": "^6.2.1",
|
||||
"caniuse-lite": "^1.0.30001765",
|
||||
"docusaurus-plugin-less": "^2.0.2",
|
||||
"js-yaml": "^4.1.1",
|
||||
"js-yaml-loader": "^1.2.2",
|
||||
@@ -85,9 +90,9 @@
|
||||
"eslint-plugin-prettier": "^5.5.5",
|
||||
"eslint-plugin-react": "^7.37.5",
|
||||
"globals": "^17.0.0",
|
||||
"prettier": "^3.8.0",
|
||||
"prettier": "^3.8.1",
|
||||
"typescript": "~5.9.3",
|
||||
"typescript-eslint": "^8.53.0",
|
||||
"typescript-eslint": "^8.53.1",
|
||||
"webpack": "^5.104.1"
|
||||
},
|
||||
"browserslist": {
|
||||
|
||||
867
docs/scripts/generate-database-docs.mjs
Normal file
@@ -0,0 +1,867 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* This script generates database documentation data from engine spec metadata.
|
||||
* It outputs a JSON file that can be imported by React components for rendering.
|
||||
*
|
||||
* Usage: node scripts/generate-database-docs.mjs
|
||||
*
|
||||
* The script can run in two modes:
|
||||
* 1. With Flask app (full diagnostics) - requires superset to be installed
|
||||
* 2. Fallback mode (documentation only) - parses engine spec `metadata` attributes via AST
|
||||
*/
|
||||
|
||||
import { spawnSync } from 'child_process';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
const ROOT_DIR = path.resolve(__dirname, '../..');
|
||||
const DOCS_DIR = path.resolve(__dirname, '..');
|
||||
const DATA_OUTPUT_DIR = path.join(DOCS_DIR, 'src/data');
|
||||
const DATA_OUTPUT_FILE = path.join(DATA_OUTPUT_DIR, 'databases.json');
|
||||
const MDX_OUTPUT_DIR = path.join(DOCS_DIR, 'docs/databases');
|
||||
const MDX_SUPPORTED_DIR = path.join(MDX_OUTPUT_DIR, 'supported');
|
||||
|
||||
/**
|
||||
* Try to run the full lib.py script with Flask context
|
||||
*/
|
||||
function tryRunFullScript() {
|
||||
try {
|
||||
console.log('Attempting to run lib.py with Flask context...');
|
||||
const pythonCode = `
|
||||
import sys
|
||||
import json
|
||||
sys.path.insert(0, '.')
|
||||
from superset.app import create_app
|
||||
from superset.db_engine_specs.lib import generate_yaml_docs
|
||||
app = create_app()
|
||||
with app.app_context():
|
||||
docs = generate_yaml_docs()
|
||||
print(json.dumps(docs, default=str))
|
||||
`;
|
||||
const result = spawnSync('python', ['-c', pythonCode], {
|
||||
cwd: ROOT_DIR,
|
||||
encoding: 'utf-8',
|
||||
timeout: 60000,
|
||||
maxBuffer: 10 * 1024 * 1024,
|
||||
env: { ...process.env, SUPERSET_SECRET_KEY: 'docs-build-key' },
|
||||
});
|
||||
|
||||
if (result.error) {
|
||||
throw result.error;
|
||||
}
|
||||
if (result.status !== 0) {
|
||||
throw new Error(result.stderr || 'Python script failed');
|
||||
}
|
||||
return JSON.parse(result.stdout);
|
||||
} catch (error) {
|
||||
console.log('Full script execution failed, using fallback mode...');
|
||||
console.log(' Reason:', error.message?.split('\n')[0] || 'Unknown error');
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract metadata from individual engine spec files using AST parsing
|
||||
* This is the preferred approach - reads directly from spec.metadata attributes
|
||||
* Supports metadata inheritance - child classes inherit and merge with parent metadata
|
||||
*/
|
||||
function extractEngineSpecMetadata() {
|
||||
console.log('Extracting metadata from engine spec files...');
|
||||
console.log(` ROOT_DIR: ${ROOT_DIR}`);
|
||||
|
||||
try {
|
||||
const pythonCode = `
|
||||
import sys
|
||||
import json
|
||||
import ast
|
||||
import os
|
||||
|
||||
def eval_node(node):
|
||||
"""Safely evaluate an AST node as a Python literal."""
|
||||
if node is None:
|
||||
return None
|
||||
if isinstance(node, ast.Constant):
|
||||
return node.value
|
||||
elif isinstance(node, ast.List):
|
||||
return [eval_node(e) for e in node.elts]
|
||||
elif isinstance(node, ast.Dict):
|
||||
result = {}
|
||||
for k, v in zip(node.keys, node.values):
|
||||
if k is not None:
|
||||
key = eval_node(k)
|
||||
if key is not None:
|
||||
result[key] = eval_node(v)
|
||||
return result
|
||||
elif isinstance(node, ast.Name):
|
||||
# Handle True, False, None constants
|
||||
if node.id == 'True':
|
||||
return True
|
||||
elif node.id == 'False':
|
||||
return False
|
||||
elif node.id == 'None':
|
||||
return None
|
||||
return node.id
|
||||
elif isinstance(node, ast.Attribute):
|
||||
# Handle DatabaseCategory.SOMETHING - return just the attribute name
|
||||
return node.attr
|
||||
elif isinstance(node, ast.BinOp) and isinstance(node.op, ast.Add):
|
||||
left, right = eval_node(node.left), eval_node(node.right)
|
||||
if isinstance(left, str) and isinstance(right, str):
|
||||
return left + right
|
||||
return None
|
||||
elif isinstance(node, ast.Tuple):
|
||||
return tuple(eval_node(e) for e in node.elts)
|
||||
elif isinstance(node, ast.JoinedStr):
|
||||
# f-strings - just return a placeholder
|
||||
return "<f-string>"
|
||||
return None
|
||||
|
||||
def deep_merge(base, override):
|
||||
"""Deep merge two dictionaries. Override values take precedence."""
|
||||
if base is None:
|
||||
return override
|
||||
if override is None:
|
||||
return base
|
||||
if not isinstance(base, dict) or not isinstance(override, dict):
|
||||
return override
|
||||
|
||||
# Fields that should NOT be inherited from parent classes
|
||||
# - compatible_databases: Each class defines its own compatible DBs
|
||||
# - categories: Each class defines its own categories (not extended from parent)
|
||||
NON_INHERITABLE_FIELDS = {'compatible_databases', 'categories'}
|
||||
|
||||
result = base.copy()
|
||||
# Remove non-inheritable fields from base (they should only come from the class that defines them)
|
||||
for field in NON_INHERITABLE_FIELDS:
|
||||
result.pop(field, None)
|
||||
|
||||
for key, value in override.items():
|
||||
if key in result and isinstance(result[key], dict) and isinstance(value, dict):
|
||||
result[key] = deep_merge(result[key], value)
|
||||
elif key in result and isinstance(result[key], list) and isinstance(value, list):
|
||||
# Extend lists from parent (e.g., drivers)
|
||||
result[key] = result[key] + value
|
||||
else:
|
||||
result[key] = value
|
||||
return result
|
||||
|
||||
databases = {}
|
||||
specs_dir = 'superset/db_engine_specs'
|
||||
errors = []
|
||||
debug_info = {
|
||||
"cwd": os.getcwd(),
|
||||
"specs_dir_exists": os.path.isdir(specs_dir),
|
||||
"files_checked": 0,
|
||||
"classes_found": 0,
|
||||
"classes_with_metadata": 0,
|
||||
"inherited_metadata": 0,
|
||||
}
|
||||
|
||||
if not os.path.isdir(specs_dir):
|
||||
print(json.dumps({"error": f"Directory not found: {specs_dir}", "cwd": os.getcwd()}))
|
||||
sys.exit(1)
|
||||
|
||||
# First pass: collect all class info (name, bases, metadata)
|
||||
class_info = {} # class_name -> {bases: [], metadata: {}, engine_name: str, filename: str}
|
||||
|
||||
for filename in sorted(os.listdir(specs_dir)):
|
||||
if not filename.endswith('.py') or filename in ('__init__.py', 'lib.py', 'lint_metadata.py'):
|
||||
continue
|
||||
|
||||
debug_info["files_checked"] += 1
|
||||
filepath = os.path.join(specs_dir, filename)
|
||||
try:
|
||||
with open(filepath) as f:
|
||||
source = f.read()
|
||||
tree = ast.parse(source)
|
||||
|
||||
for node in ast.walk(tree):
|
||||
if not isinstance(node, ast.ClassDef):
|
||||
continue
|
||||
|
||||
# Get base class names
|
||||
base_names = []
|
||||
for b in node.bases:
|
||||
if isinstance(b, ast.Name):
|
||||
base_names.append(b.id)
|
||||
elif isinstance(b, ast.Attribute):
|
||||
base_names.append(b.attr)
|
||||
|
||||
is_engine_spec = any('EngineSpec' in name or 'Mixin' in name for name in base_names)
|
||||
if not is_engine_spec:
|
||||
continue
|
||||
|
||||
# Extract class attributes
|
||||
engine_name = None
|
||||
metadata = None
|
||||
|
||||
for item in node.body:
|
||||
if isinstance(item, ast.Assign):
|
||||
for target in item.targets:
|
||||
if isinstance(target, ast.Name):
|
||||
if target.id == 'engine_name':
|
||||
val = eval_node(item.value)
|
||||
if isinstance(val, str):
|
||||
engine_name = val
|
||||
elif target.id == 'metadata':
|
||||
metadata = eval_node(item.value)
|
||||
|
||||
# Check for engine attribute with non-empty value to distinguish
|
||||
# true base classes from product classes like OceanBaseEngineSpec
|
||||
has_non_empty_engine = False
|
||||
for item in node.body:
|
||||
if isinstance(item, ast.Assign):
|
||||
for target in item.targets:
|
||||
if isinstance(target, ast.Name) and target.id == 'engine':
|
||||
# Check if engine value is non-empty string
|
||||
if isinstance(item.value, ast.Constant):
|
||||
has_non_empty_engine = bool(item.value.value)
|
||||
break
|
||||
|
||||
# True base classes: end with BaseEngineSpec AND don't define engine
|
||||
# or have empty engine (like PostgresBaseEngineSpec with engine = "")
|
||||
is_true_base = (
|
||||
node.name.endswith('BaseEngineSpec') and not has_non_empty_engine
|
||||
) or 'Mixin' in node.name
|
||||
|
||||
# Store class info for inheritance resolution
|
||||
class_info[node.name] = {
|
||||
'bases': base_names,
|
||||
'metadata': metadata,
|
||||
'engine_name': engine_name,
|
||||
'filename': filename,
|
||||
'is_base_or_mixin': is_true_base,
|
||||
}
|
||||
except Exception as e:
|
||||
errors.append(f"{filename}: {str(e)}")
|
||||
|
||||
# Second pass: resolve inheritance and build final metadata
|
||||
def get_inherited_metadata(class_name, visited=None):
|
||||
"""Recursively get metadata from parent classes."""
|
||||
if visited is None:
|
||||
visited = set()
|
||||
if class_name in visited:
|
||||
return {} # Prevent circular inheritance
|
||||
visited.add(class_name)
|
||||
|
||||
info = class_info.get(class_name)
|
||||
if not info:
|
||||
return {}
|
||||
|
||||
# Start with parent metadata
|
||||
inherited = {}
|
||||
for base_name in info['bases']:
|
||||
parent_metadata = get_inherited_metadata(base_name, visited.copy())
|
||||
if parent_metadata:
|
||||
inherited = deep_merge(inherited, parent_metadata)
|
||||
|
||||
# Merge with own metadata (own takes precedence)
|
||||
if info['metadata']:
|
||||
inherited = deep_merge(inherited, info['metadata'])
|
||||
|
||||
return inherited
|
||||
|
||||
for class_name, info in class_info.items():
|
||||
# Skip base classes and mixins
|
||||
if info['is_base_or_mixin']:
|
||||
continue
|
||||
|
||||
debug_info["classes_found"] += 1
|
||||
|
||||
# Get final metadata with inheritance
|
||||
final_metadata = get_inherited_metadata(class_name)
|
||||
|
||||
# Remove compatible_databases if not defined by this class (it's not inheritable)
|
||||
own_metadata = info['metadata'] or {}
|
||||
if 'compatible_databases' not in own_metadata and 'compatible_databases' in final_metadata:
|
||||
del final_metadata['compatible_databases']
|
||||
|
||||
# Track if we inherited anything
|
||||
if final_metadata and final_metadata != own_metadata:
|
||||
debug_info["inherited_metadata"] += 1
|
||||
|
||||
# Use class name as fallback for engine_name
|
||||
display_name = info['engine_name'] or class_name.replace('EngineSpec', '').replace('_', ' ')
|
||||
|
||||
if final_metadata and isinstance(final_metadata, dict) and display_name:
|
||||
debug_info["classes_with_metadata"] += 1
|
||||
databases[display_name] = {
|
||||
'engine': display_name.lower().replace(' ', '_'),
|
||||
'engine_name': display_name,
|
||||
'module': info['filename'][:-3], # Remove .py extension
|
||||
'documentation': final_metadata,
|
||||
'time_grains': {},
|
||||
'score': 0,
|
||||
'max_score': 0,
|
||||
'joins': True,
|
||||
'subqueries': True,
|
||||
'supports_dynamic_schema': False,
|
||||
'supports_catalog': False,
|
||||
'supports_dynamic_catalog': False,
|
||||
'ssh_tunneling': False,
|
||||
'query_cancelation': False,
|
||||
'supports_file_upload': False,
|
||||
'user_impersonation': False,
|
||||
'query_cost_estimation': False,
|
||||
'sql_validation': False,
|
||||
}
|
||||
|
||||
if errors and not databases:
|
||||
print(json.dumps({"error": "Parse errors", "details": errors, "debug": debug_info}), file=sys.stderr)
|
||||
|
||||
# Print debug info to stderr for troubleshooting
|
||||
print(json.dumps(debug_info), file=sys.stderr)
|
||||
|
||||
print(json.dumps(databases, default=str))
|
||||
`;
|
||||
const result = spawnSync('python3', ['-c', pythonCode], {
|
||||
cwd: ROOT_DIR,
|
||||
encoding: 'utf-8',
|
||||
timeout: 30000,
|
||||
maxBuffer: 10 * 1024 * 1024,
|
||||
});
|
||||
|
||||
if (result.error) {
|
||||
throw result.error;
|
||||
}
|
||||
// Log debug info from stderr
|
||||
if (result.stderr) {
|
||||
console.log('Python debug info:', result.stderr.trim());
|
||||
}
|
||||
if (result.status !== 0) {
|
||||
throw new Error(result.stderr || 'Python script failed');
|
||||
}
|
||||
const databases = JSON.parse(result.stdout);
|
||||
if (Object.keys(databases).length === 0) {
|
||||
throw new Error('No metadata found in engine specs');
|
||||
}
|
||||
|
||||
console.log(`Extracted metadata from ${Object.keys(databases).length} engine specs`);
|
||||
return databases;
|
||||
} catch (err) {
|
||||
console.log('Engine spec metadata extraction failed:', err.message);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Build statistics from the database data
|
||||
*/
|
||||
function buildStatistics(databases) {
|
||||
const stats = {
|
||||
totalDatabases: Object.keys(databases).length,
|
||||
withDocumentation: 0,
|
||||
withConnectionString: 0,
|
||||
withDrivers: 0,
|
||||
withAuthMethods: 0,
|
||||
supportsJoins: 0,
|
||||
supportsSubqueries: 0,
|
||||
supportsDynamicSchema: 0,
|
||||
supportsCatalog: 0,
|
||||
averageScore: 0,
|
||||
maxScore: 0,
|
||||
byCategory: {},
|
||||
};
|
||||
|
||||
let totalScore = 0;
|
||||
|
||||
for (const [name, db] of Object.entries(databases)) {
|
||||
const docs = db.documentation || {};
|
||||
|
||||
if (Object.keys(docs).length > 0) stats.withDocumentation++;
|
||||
if (docs.connection_string || docs.drivers?.length > 0)
|
||||
stats.withConnectionString++;
|
||||
if (docs.drivers?.length > 0) stats.withDrivers++;
|
||||
if (docs.authentication_methods?.length > 0) stats.withAuthMethods++;
|
||||
if (db.joins) stats.supportsJoins++;
|
||||
if (db.subqueries) stats.supportsSubqueries++;
|
||||
if (db.supports_dynamic_schema) stats.supportsDynamicSchema++;
|
||||
if (db.supports_catalog) stats.supportsCatalog++;
|
||||
|
||||
totalScore += db.score || 0;
|
||||
if (db.max_score > stats.maxScore) stats.maxScore = db.max_score;
|
||||
|
||||
// Use categories from documentation metadata (computed by Python)
|
||||
// Each database can belong to multiple categories
|
||||
const categories = docs.categories || ['OTHER'];
|
||||
for (const cat of categories) {
|
||||
// Map category constant names to display names
|
||||
const categoryDisplayNames = {
|
||||
'CLOUD_AWS': 'Cloud - AWS',
|
||||
'CLOUD_GCP': 'Cloud - Google',
|
||||
'CLOUD_AZURE': 'Cloud - Azure',
|
||||
'CLOUD_DATA_WAREHOUSES': 'Cloud Data Warehouses',
|
||||
'APACHE_PROJECTS': 'Apache Projects',
|
||||
'TRADITIONAL_RDBMS': 'Traditional RDBMS',
|
||||
'ANALYTICAL_DATABASES': 'Analytical Databases',
|
||||
'SEARCH_NOSQL': 'Search & NoSQL',
|
||||
'QUERY_ENGINES': 'Query Engines',
|
||||
'TIME_SERIES': 'Time Series Databases',
|
||||
'OTHER': 'Other Databases',
|
||||
'OPEN_SOURCE': 'Open Source',
|
||||
'HOSTED_OPEN_SOURCE': 'Hosted Open Source',
|
||||
'PROPRIETARY': 'Proprietary',
|
||||
};
|
||||
const displayName = categoryDisplayNames[cat] || cat;
|
||||
if (!stats.byCategory[displayName]) {
|
||||
stats.byCategory[displayName] = [];
|
||||
}
|
||||
stats.byCategory[displayName].push(name);
|
||||
}
|
||||
}
|
||||
|
||||
stats.averageScore = Math.round(totalScore / stats.totalDatabases);
|
||||
|
||||
return stats;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert database name to a URL-friendly slug
|
||||
*/
|
||||
function toSlug(name) {
|
||||
return name
|
||||
.toLowerCase()
|
||||
.replace(/[^a-z0-9]+/g, '-')
|
||||
.replace(/^-|-$/g, '');
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate MDX content for a single database page
|
||||
*/
|
||||
function generateDatabaseMDX(name, db) {
|
||||
const description = db.documentation?.description || `Documentation for ${name} database connection.`;
|
||||
const shortDesc = description
|
||||
.slice(0, 160)
|
||||
.replace(/\\/g, '\\\\')
|
||||
.replace(/"/g, '\\"');
|
||||
|
||||
return `---
|
||||
title: ${name}
|
||||
sidebar_label: ${name}
|
||||
description: "${shortDesc}"
|
||||
hide_title: true
|
||||
---
|
||||
|
||||
{/*
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
*/}
|
||||
|
||||
import { DatabasePage } from '@site/src/components/databases';
|
||||
import databaseData from '@site/src/data/databases.json';
|
||||
|
||||
<DatabasePage name="${name}" database={databaseData.databases["${name}"]} />
|
||||
`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate the index MDX for the databases overview
|
||||
*/
|
||||
function generateIndexMDX(statistics, usedFlaskContext = true) {
|
||||
const fallbackNotice = usedFlaskContext ? '' : `
|
||||
:::info Developer Note
|
||||
This documentation was built without Flask context, so feature diagnostics (scores, time grain support, etc.)
|
||||
may not reflect actual database capabilities. For full diagnostics, build docs locally with:
|
||||
|
||||
\`\`\`bash
|
||||
cd docs && npm run gen-db-docs
|
||||
\`\`\`
|
||||
|
||||
This requires a working Superset development environment.
|
||||
:::
|
||||
|
||||
`;
|
||||
|
||||
return `---
|
||||
title: Connecting to Databases
|
||||
sidebar_label: Overview
|
||||
sidebar_position: 1
|
||||
---
|
||||
|
||||
{/*
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
*/}
|
||||
|
||||
import { DatabaseIndex } from '@site/src/components/databases';
|
||||
import databaseData from '@site/src/data/databases.json';
|
||||
|
||||
# Connecting to Databases
|
||||
|
||||
Superset does not ship bundled with connectivity to databases. The main step in connecting
|
||||
Superset to a database is to **install the proper database driver(s)** in your environment.
|
||||
|
||||
:::note
|
||||
You'll need to install the required packages for the database you want to use as your metadata database
|
||||
as well as the packages needed to connect to the databases you want to access through Superset.
|
||||
For information about setting up Superset's metadata database, please refer to
|
||||
installation documentations ([Docker Compose](/docs/installation/docker-compose), [Kubernetes](/docs/installation/kubernetes))
|
||||
:::
|
||||
|
||||
## Supported Databases
|
||||
|
||||
Superset supports **${statistics.totalDatabases} databases** with varying levels of feature support.
|
||||
Click on any database name to see detailed documentation including connection strings,
|
||||
authentication methods, and configuration options.
|
||||
|
||||
<DatabaseIndex data={databaseData} />
|
||||
|
||||
## Installing Database Drivers
|
||||
|
||||
Superset requires a Python [DB-API database driver](https://peps.python.org/pep-0249/)
|
||||
and a [SQLAlchemy dialect](https://docs.sqlalchemy.org/en/20/dialects/) to be installed for
|
||||
each database engine you want to connect to.
|
||||
|
||||
### Installing Drivers in Docker
|
||||
|
||||
For Docker deployments, create a \`requirements-local.txt\` file in the \`docker\` directory:
|
||||
|
||||
\`\`\`bash
|
||||
# Create the requirements file
|
||||
touch ./docker/requirements-local.txt
|
||||
|
||||
# Add your driver (e.g., for PostgreSQL)
|
||||
echo "psycopg2-binary" >> ./docker/requirements-local.txt
|
||||
\`\`\`
|
||||
|
||||
Then restart your containers. The drivers will be installed automatically.
|
||||
|
||||
### Installing Drivers with pip
|
||||
|
||||
For non-Docker installations:
|
||||
|
||||
\`\`\`bash
|
||||
pip install <driver-package>
|
||||
\`\`\`
|
||||
|
||||
See individual database pages for the specific driver packages needed.
|
||||
|
||||
## Connecting Through the UI
|
||||
|
||||
1. Go to **Settings → Data: Database Connections**
|
||||
2. Click **+ DATABASE**
|
||||
3. Select your database type or enter a SQLAlchemy URI
|
||||
4. Click **Test Connection** to verify
|
||||
5. Click **Connect** to save
|
||||
|
||||
## Contributing
|
||||
|
||||
To add or update database documentation, add a \`metadata\` attribute to your engine spec class in
|
||||
\`superset/db_engine_specs/\`. Documentation is auto-generated from these metadata attributes.
|
||||
|
||||
See [METADATA_STATUS.md](https://github.com/apache/superset/blob/master/superset/db_engine_specs/METADATA_STATUS.md)
|
||||
for the current status of database documentation and the [README](https://github.com/apache/superset/blob/master/superset/db_engine_specs/README.md) for the metadata schema.
|
||||
${fallbackNotice}`;
|
||||
}
|
||||
|
||||
const README_PATH = path.join(ROOT_DIR, 'README.md');
|
||||
const README_START_MARKER = '<!-- SUPPORTED_DATABASES_START -->';
|
||||
const README_END_MARKER = '<!-- SUPPORTED_DATABASES_END -->';
|
||||
|
||||
/**
|
||||
* Generate the database logos HTML for README.md
|
||||
* Only includes databases that have logos defined
|
||||
*/
|
||||
function generateReadmeLogos(databases) {
|
||||
// Get databases with logos, sorted alphabetically
|
||||
const dbsWithLogos = Object.entries(databases)
|
||||
.filter(([, db]) => db.documentation?.logo)
|
||||
.sort(([a], [b]) => a.localeCompare(b));
|
||||
|
||||
if (dbsWithLogos.length === 0) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Generate HTML img tags
|
||||
const logoTags = dbsWithLogos.map(([name, db]) => {
|
||||
const logo = db.documentation.logo;
|
||||
const alt = name.toLowerCase().replace(/\s+/g, '-');
|
||||
// Use docs site URL for logos
|
||||
return ` <img src="https://superset.apache.org/img/databases/${logo}" alt="${alt}" border="0" width="80" height="40" class="database-logo" />`;
|
||||
});
|
||||
|
||||
return `<p align="center">
|
||||
${logoTags.join('\n')}
|
||||
</p>`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the README.md with generated database logos
|
||||
*/
|
||||
function updateReadme(databases) {
|
||||
if (!fs.existsSync(README_PATH)) {
|
||||
console.log('README.md not found, skipping update');
|
||||
return false;
|
||||
}
|
||||
|
||||
const content = fs.readFileSync(README_PATH, 'utf-8');
|
||||
|
||||
// Check if markers exist
|
||||
if (!content.includes(README_START_MARKER) || !content.includes(README_END_MARKER)) {
|
||||
console.log('README.md missing database markers, skipping update');
|
||||
console.log(` Add ${README_START_MARKER} and ${README_END_MARKER} to enable auto-generation`);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Generate new logos section
|
||||
const logosHtml = generateReadmeLogos(databases);
|
||||
|
||||
// Replace content between markers
|
||||
const pattern = new RegExp(
|
||||
`${README_START_MARKER}[\\s\\S]*?${README_END_MARKER}`,
|
||||
'g'
|
||||
);
|
||||
const newContent = content.replace(
|
||||
pattern,
|
||||
`${README_START_MARKER}\n${logosHtml}\n${README_END_MARKER}`
|
||||
);
|
||||
|
||||
if (newContent !== content) {
|
||||
fs.writeFileSync(README_PATH, newContent);
|
||||
console.log('Updated README.md database logos');
|
||||
return true;
|
||||
}
|
||||
|
||||
console.log('README.md database logos unchanged');
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Load existing database data if available
|
||||
*/
|
||||
function loadExistingData() {
|
||||
if (!fs.existsSync(DATA_OUTPUT_FILE)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const content = fs.readFileSync(DATA_OUTPUT_FILE, 'utf-8');
|
||||
return JSON.parse(content);
|
||||
} catch (error) {
|
||||
console.log('Could not load existing data:', error.message);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge new documentation with existing diagnostics
|
||||
* Preserves score, time_grains, and feature flags from existing data
|
||||
*/
|
||||
function mergeWithExistingDiagnostics(newDatabases, existingData) {
|
||||
if (!existingData?.databases) return newDatabases;
|
||||
|
||||
const diagnosticFields = [
|
||||
'score', 'max_score', 'time_grains', 'joins', 'subqueries',
|
||||
'supports_dynamic_schema', 'supports_catalog', 'supports_dynamic_catalog',
|
||||
'ssh_tunneling', 'query_cancelation', 'supports_file_upload',
|
||||
'user_impersonation', 'query_cost_estimation', 'sql_validation'
|
||||
];
|
||||
|
||||
for (const [name, db] of Object.entries(newDatabases)) {
|
||||
const existingDb = existingData.databases[name];
|
||||
if (existingDb && existingDb.score > 0) {
|
||||
// Preserve diagnostics from existing data
|
||||
for (const field of diagnosticFields) {
|
||||
if (existingDb[field] !== undefined) {
|
||||
db[field] = existingDb[field];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const preserved = Object.values(newDatabases).filter(d => d.score > 0).length;
|
||||
if (preserved > 0) {
|
||||
console.log(`Preserved diagnostics for ${preserved} databases from existing data`);
|
||||
}
|
||||
|
||||
return newDatabases;
|
||||
}
|
||||
|
||||
/**
|
||||
* Main function
|
||||
*/
|
||||
async function main() {
|
||||
console.log('Generating database documentation...\n');
|
||||
|
||||
// Ensure output directories exist
|
||||
if (!fs.existsSync(DATA_OUTPUT_DIR)) {
|
||||
fs.mkdirSync(DATA_OUTPUT_DIR, { recursive: true });
|
||||
}
|
||||
if (!fs.existsSync(MDX_OUTPUT_DIR)) {
|
||||
fs.mkdirSync(MDX_OUTPUT_DIR, { recursive: true });
|
||||
}
|
||||
|
||||
// Load existing data for potential merge
|
||||
const existingData = loadExistingData();
|
||||
|
||||
// Try sources in order of preference:
|
||||
// 1. Full script with Flask context (richest data with diagnostics)
|
||||
// 2. Engine spec metadata files (works in CI without Flask)
|
||||
let databases = tryRunFullScript();
|
||||
let usedFlaskContext = !!databases;
|
||||
|
||||
if (!databases) {
|
||||
// Extract from engine spec metadata (preferred for CI)
|
||||
databases = extractEngineSpecMetadata();
|
||||
}
|
||||
|
||||
if (!databases || Object.keys(databases).length === 0) {
|
||||
console.error('Failed to generate database documentation data.');
|
||||
console.error('Could not extract from Flask app or engine spec metadata.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log(`Processed ${Object.keys(databases).length} databases\n`);
|
||||
|
||||
// Check if new data has scores; if not, preserve existing diagnostics
|
||||
const hasNewScores = Object.values(databases).some((db) => db.score > 0);
|
||||
if (!hasNewScores && existingData) {
|
||||
databases = mergeWithExistingDiagnostics(databases, existingData);
|
||||
}
|
||||
|
||||
// Build statistics
|
||||
const statistics = buildStatistics(databases);
|
||||
|
||||
// Create the final output structure
|
||||
const output = {
|
||||
generated: new Date().toISOString(),
|
||||
statistics,
|
||||
databases,
|
||||
};
|
||||
|
||||
// Write the JSON file (with trailing newline for POSIX compliance)
|
||||
fs.writeFileSync(DATA_OUTPUT_FILE, JSON.stringify(output, null, 2) + '\n');
|
||||
console.log(`Generated: ${path.relative(DOCS_DIR, DATA_OUTPUT_FILE)}`);
|
||||
|
||||
|
||||
// Ensure supported directory exists
|
||||
if (!fs.existsSync(MDX_SUPPORTED_DIR)) {
|
||||
fs.mkdirSync(MDX_SUPPORTED_DIR, { recursive: true });
|
||||
}
|
||||
|
||||
// Clean up old MDX files that are no longer in the database list
|
||||
console.log(`\nCleaning up old MDX files in ${path.relative(DOCS_DIR, MDX_SUPPORTED_DIR)}/`);
|
||||
const existingMdxFiles = fs.readdirSync(MDX_SUPPORTED_DIR).filter(f => f.endsWith('.mdx'));
|
||||
const validSlugs = new Set(Object.keys(databases).map(name => `${toSlug(name)}.mdx`));
|
||||
let removedCount = 0;
|
||||
for (const file of existingMdxFiles) {
|
||||
if (!validSlugs.has(file)) {
|
||||
fs.unlinkSync(path.join(MDX_SUPPORTED_DIR, file));
|
||||
removedCount++;
|
||||
}
|
||||
}
|
||||
if (removedCount > 0) {
|
||||
console.log(` Removed ${removedCount} outdated MDX files`);
|
||||
}
|
||||
|
||||
// Generate individual MDX files for each database in supported/ subdirectory
|
||||
console.log(`\nGenerating MDX files in ${path.relative(DOCS_DIR, MDX_SUPPORTED_DIR)}/`);
|
||||
|
||||
let mdxCount = 0;
|
||||
for (const [name, db] of Object.entries(databases)) {
|
||||
const slug = toSlug(name);
|
||||
const mdxContent = generateDatabaseMDX(name, db);
|
||||
const mdxPath = path.join(MDX_SUPPORTED_DIR, `${slug}.mdx`);
|
||||
fs.writeFileSync(mdxPath, mdxContent);
|
||||
mdxCount++;
|
||||
}
|
||||
console.log(` Generated ${mdxCount} database pages`);
|
||||
|
||||
// Generate index page in parent databases/ directory
|
||||
const indexContent = generateIndexMDX(statistics, usedFlaskContext);
|
||||
const indexPath = path.join(MDX_OUTPUT_DIR, 'index.mdx');
|
||||
fs.writeFileSync(indexPath, indexContent);
|
||||
console.log(` Generated index page`);
|
||||
|
||||
// Generate _category_.json for databases/ directory
|
||||
const categoryJson = {
|
||||
label: 'Databases',
|
||||
position: 1,
|
||||
link: {
|
||||
type: 'doc',
|
||||
id: 'databases/index',
|
||||
},
|
||||
};
|
||||
fs.writeFileSync(
|
||||
path.join(MDX_OUTPUT_DIR, '_category_.json'),
|
||||
JSON.stringify(categoryJson, null, 2) + '\n'
|
||||
);
|
||||
|
||||
// Generate _category_.json for supported/ subdirectory (collapsible)
|
||||
const supportedCategoryJson = {
|
||||
label: 'Supported Databases',
|
||||
position: 2,
|
||||
collapsed: true,
|
||||
collapsible: true,
|
||||
};
|
||||
fs.writeFileSync(
|
||||
path.join(MDX_SUPPORTED_DIR, '_category_.json'),
|
||||
JSON.stringify(supportedCategoryJson, null, 2) + '\n'
|
||||
);
|
||||
console.log(` Generated _category_.json files`);
|
||||
|
||||
// Update README.md database logos (only when explicitly requested)
|
||||
if (process.env.UPDATE_README === 'true' || process.argv.includes('--update-readme')) {
|
||||
console.log('');
|
||||
updateReadme(databases);
|
||||
}
|
||||
|
||||
console.log(`\nStatistics:`);
|
||||
console.log(` Total databases: ${statistics.totalDatabases}`);
|
||||
console.log(` With documentation: ${statistics.withDocumentation}`);
|
||||
console.log(` With connection strings: ${statistics.withConnectionString}`);
|
||||
console.log(` Categories: ${Object.keys(statistics.byCategory).length}`);
|
||||
|
||||
console.log('\nDone!');
|
||||
}
|
||||
|
||||
main().catch(console.error);
|
||||
@@ -57,6 +57,20 @@ const sidebars = {
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'category',
|
||||
label: 'Databases',
|
||||
link: {
|
||||
type: 'doc',
|
||||
id: 'databases/index',
|
||||
},
|
||||
items: [
|
||||
{
|
||||
type: 'autogenerated',
|
||||
dirName: 'databases',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'category',
|
||||
label: 'Using Superset',
|
||||
|
||||
@@ -98,6 +98,7 @@ interface SectionHeaderProps {
|
||||
title: string;
|
||||
subtitle?: string | ReactNode;
|
||||
dark?: boolean;
|
||||
link?: string;
|
||||
}
|
||||
|
||||
const SectionHeader = ({
|
||||
@@ -105,15 +106,24 @@ const SectionHeader = ({
|
||||
title,
|
||||
subtitle,
|
||||
dark,
|
||||
link,
|
||||
}: SectionHeaderProps) => {
|
||||
const Heading = level;
|
||||
|
||||
const StyledRoot =
|
||||
level === 'h1' ? StyledSectionHeaderH1 : StyledSectionHeaderH2;
|
||||
|
||||
const titleContent = link ? (
|
||||
<a href={link} style={{ color: 'inherit', textDecoration: 'none' }}>
|
||||
{title}
|
||||
</a>
|
||||
) : (
|
||||
title
|
||||
);
|
||||
|
||||
return (
|
||||
<StyledRoot dark={!!dark}>
|
||||
<Heading className="title">{title}</Heading>
|
||||
<Heading className="title">{titleContent}</Heading>
|
||||
<img className="line" src="/img/community/line.png" alt="line" />
|
||||
{subtitle && <div className="subtitle">{subtitle}</div>}
|
||||
</StyledRoot>
|
||||
|
||||
578
docs/src/components/databases/DatabaseIndex.tsx
Normal file
@@ -0,0 +1,578 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import React, { useState, useMemo } from 'react';
|
||||
import { Card, Row, Col, Statistic, Table, Tag, Input, Select, Tooltip } from 'antd';
|
||||
import {
|
||||
DatabaseOutlined,
|
||||
CheckCircleOutlined,
|
||||
ApiOutlined,
|
||||
KeyOutlined,
|
||||
SearchOutlined,
|
||||
LinkOutlined,
|
||||
} from '@ant-design/icons';
|
||||
import type { DatabaseData, DatabaseInfo, TimeGrains } from './types';
|
||||
|
||||
interface DatabaseIndexProps {
|
||||
data: DatabaseData;
|
||||
}
|
||||
|
||||
// Type for table entries (includes both regular DBs and compatible DBs)
|
||||
interface TableEntry {
|
||||
name: string;
|
||||
categories: string[]; // Multiple categories supported
|
||||
score: number;
|
||||
max_score: number;
|
||||
timeGrainCount: number;
|
||||
time_grains?: TimeGrains;
|
||||
hasDrivers: boolean;
|
||||
hasAuthMethods: boolean;
|
||||
hasConnectionString: boolean;
|
||||
joins?: boolean;
|
||||
subqueries?: boolean;
|
||||
supports_dynamic_schema?: boolean;
|
||||
supports_catalog?: boolean;
|
||||
ssh_tunneling?: boolean;
|
||||
supports_file_upload?: boolean;
|
||||
query_cancelation?: boolean;
|
||||
query_cost_estimation?: boolean;
|
||||
user_impersonation?: boolean;
|
||||
sql_validation?: boolean;
|
||||
documentation?: DatabaseInfo['documentation'];
|
||||
// For compatible databases
|
||||
isCompatible?: boolean;
|
||||
compatibleWith?: string;
|
||||
compatibleDescription?: string;
|
||||
}
|
||||
|
||||
// Map category constant names to display names
|
||||
const CATEGORY_DISPLAY_NAMES: Record<string, string> = {
|
||||
'CLOUD_AWS': 'Cloud - AWS',
|
||||
'CLOUD_GCP': 'Cloud - Google',
|
||||
'CLOUD_AZURE': 'Cloud - Azure',
|
||||
'CLOUD_DATA_WAREHOUSES': 'Cloud Data Warehouses',
|
||||
'APACHE_PROJECTS': 'Apache Projects',
|
||||
'TRADITIONAL_RDBMS': 'Traditional RDBMS',
|
||||
'ANALYTICAL_DATABASES': 'Analytical Databases',
|
||||
'SEARCH_NOSQL': 'Search & NoSQL',
|
||||
'QUERY_ENGINES': 'Query Engines',
|
||||
'TIME_SERIES': 'Time Series Databases',
|
||||
'OTHER': 'Other Databases',
|
||||
'OPEN_SOURCE': 'Open Source',
|
||||
'HOSTED_OPEN_SOURCE': 'Hosted Open Source',
|
||||
'PROPRIETARY': 'Proprietary',
|
||||
};
|
||||
|
||||
// Category colors for visual distinction
|
||||
const CATEGORY_COLORS: Record<string, string> = {
|
||||
'Cloud - AWS': 'orange',
|
||||
'Cloud - Google': 'blue',
|
||||
'Cloud - Azure': 'cyan',
|
||||
'Cloud Data Warehouses': 'purple',
|
||||
'Apache Projects': 'red',
|
||||
'Traditional RDBMS': 'green',
|
||||
'Analytical Databases': 'magenta',
|
||||
'Search & NoSQL': 'gold',
|
||||
'Query Engines': 'lime',
|
||||
'Time Series Databases': 'volcano',
|
||||
'Other Databases': 'default',
|
||||
// Licensing categories
|
||||
'Open Source': 'geekblue',
|
||||
'Hosted Open Source': 'cyan',
|
||||
'Proprietary': 'default',
|
||||
};
|
||||
|
||||
// Convert category constant to display name
|
||||
function getCategoryDisplayName(cat: string): string {
|
||||
return CATEGORY_DISPLAY_NAMES[cat] || cat;
|
||||
}
|
||||
|
||||
// Get categories for a database - uses categories from metadata when available
|
||||
// Falls back to name-based inference for compatible databases without categories
|
||||
function getCategories(
|
||||
name: string,
|
||||
documentationCategories?: string[]
|
||||
): string[] {
|
||||
// Prefer categories from documentation metadata (computed by Python)
|
||||
if (documentationCategories && documentationCategories.length > 0) {
|
||||
return documentationCategories.map(getCategoryDisplayName);
|
||||
}
|
||||
|
||||
// Fallback: infer from name (for compatible databases without categories)
|
||||
const nameLower = name.toLowerCase();
|
||||
|
||||
if (nameLower.includes('aws') || nameLower.includes('amazon'))
|
||||
return ['Cloud - AWS'];
|
||||
if (nameLower.includes('google') || nameLower.includes('bigquery'))
|
||||
return ['Cloud - Google'];
|
||||
if (nameLower.includes('azure') || nameLower.includes('microsoft'))
|
||||
return ['Cloud - Azure'];
|
||||
if (nameLower.includes('snowflake') || nameLower.includes('databricks'))
|
||||
return ['Cloud Data Warehouses'];
|
||||
if (
|
||||
nameLower.includes('apache') ||
|
||||
nameLower.includes('druid') ||
|
||||
nameLower.includes('hive') ||
|
||||
nameLower.includes('spark')
|
||||
)
|
||||
return ['Apache Projects'];
|
||||
if (
|
||||
nameLower.includes('postgres') ||
|
||||
nameLower.includes('mysql') ||
|
||||
nameLower.includes('sqlite') ||
|
||||
nameLower.includes('mariadb')
|
||||
)
|
||||
return ['Traditional RDBMS'];
|
||||
if (
|
||||
nameLower.includes('clickhouse') ||
|
||||
nameLower.includes('vertica') ||
|
||||
nameLower.includes('starrocks')
|
||||
)
|
||||
return ['Analytical Databases'];
|
||||
if (
|
||||
nameLower.includes('elastic') ||
|
||||
nameLower.includes('solr') ||
|
||||
nameLower.includes('couchbase')
|
||||
)
|
||||
return ['Search & NoSQL'];
|
||||
if (nameLower.includes('trino') || nameLower.includes('presto'))
|
||||
return ['Query Engines'];
|
||||
|
||||
return ['Other Databases'];
|
||||
}
|
||||
|
||||
// Count supported time grains
|
||||
function countTimeGrains(db: DatabaseInfo): number {
|
||||
if (!db.time_grains) return 0;
|
||||
return Object.values(db.time_grains).filter(Boolean).length;
|
||||
}
|
||||
|
||||
// Format time grain name for display (e.g., FIVE_MINUTES -> "5 min")
|
||||
function formatTimeGrain(grain: string): string {
|
||||
const mapping: Record<string, string> = {
|
||||
SECOND: 'Second',
|
||||
FIVE_SECONDS: '5 sec',
|
||||
THIRTY_SECONDS: '30 sec',
|
||||
MINUTE: 'Minute',
|
||||
FIVE_MINUTES: '5 min',
|
||||
TEN_MINUTES: '10 min',
|
||||
FIFTEEN_MINUTES: '15 min',
|
||||
THIRTY_MINUTES: '30 min',
|
||||
HALF_HOUR: '30 min',
|
||||
HOUR: 'Hour',
|
||||
SIX_HOURS: '6 hours',
|
||||
DAY: 'Day',
|
||||
WEEK: 'Week',
|
||||
WEEK_STARTING_SUNDAY: 'Week (Sun)',
|
||||
WEEK_STARTING_MONDAY: 'Week (Mon)',
|
||||
WEEK_ENDING_SATURDAY: 'Week (→Sat)',
|
||||
WEEK_ENDING_SUNDAY: 'Week (→Sun)',
|
||||
MONTH: 'Month',
|
||||
QUARTER: 'Quarter',
|
||||
QUARTER_YEAR: 'Quarter',
|
||||
YEAR: 'Year',
|
||||
};
|
||||
return mapping[grain] || grain;
|
||||
}
|
||||
|
||||
// Get list of supported time grains for tooltip
|
||||
function getSupportedTimeGrains(timeGrains?: TimeGrains): string[] {
|
||||
if (!timeGrains) return [];
|
||||
return Object.entries(timeGrains)
|
||||
.filter(([, supported]) => supported)
|
||||
.map(([grain]) => formatTimeGrain(grain));
|
||||
}
|
||||
|
||||
const DatabaseIndex: React.FC<DatabaseIndexProps> = ({ data }) => {
|
||||
const [searchText, setSearchText] = useState('');
|
||||
const [categoryFilter, setCategoryFilter] = useState<string | null>(null);
|
||||
|
||||
const { statistics, databases } = data;
|
||||
|
||||
// Convert databases object to array, including compatible databases
|
||||
const databaseList = useMemo(() => {
|
||||
const entries: TableEntry[] = [];
|
||||
|
||||
Object.entries(databases).forEach(([name, db]) => {
|
||||
// Add the main database
|
||||
// Use categories from documentation metadata (computed by Python) when available
|
||||
entries.push({
|
||||
...db,
|
||||
name,
|
||||
categories: getCategories(name, db.documentation?.categories),
|
||||
timeGrainCount: countTimeGrains(db),
|
||||
hasDrivers: (db.documentation?.drivers?.length ?? 0) > 0,
|
||||
hasAuthMethods: (db.documentation?.authentication_methods?.length ?? 0) > 0,
|
||||
hasConnectionString: Boolean(
|
||||
db.documentation?.connection_string ||
|
||||
(db.documentation?.drivers?.length ?? 0) > 0
|
||||
),
|
||||
isCompatible: false,
|
||||
});
|
||||
|
||||
// Add compatible databases from this database's documentation
|
||||
const compatibleDbs = db.documentation?.compatible_databases ?? [];
|
||||
compatibleDbs.forEach((compat) => {
|
||||
// Check if this compatible DB already exists as a main entry
|
||||
const existsAsMain = Object.keys(databases).some(
|
||||
(dbName) => dbName.toLowerCase() === compat.name.toLowerCase()
|
||||
);
|
||||
|
||||
if (!existsAsMain) {
|
||||
// Compatible databases: use their categories if defined, or infer from name
|
||||
entries.push({
|
||||
name: compat.name,
|
||||
categories: getCategories(compat.name, compat.categories),
|
||||
// Compatible DBs inherit scores from parent
|
||||
score: db.score,
|
||||
max_score: db.max_score,
|
||||
timeGrainCount: countTimeGrains(db),
|
||||
hasDrivers: false,
|
||||
hasAuthMethods: false,
|
||||
hasConnectionString: Boolean(compat.connection_string),
|
||||
joins: db.joins,
|
||||
subqueries: db.subqueries,
|
||||
supports_dynamic_schema: db.supports_dynamic_schema,
|
||||
supports_catalog: db.supports_catalog,
|
||||
ssh_tunneling: db.ssh_tunneling,
|
||||
documentation: {
|
||||
description: compat.description,
|
||||
connection_string: compat.connection_string,
|
||||
pypi_packages: compat.pypi_packages,
|
||||
},
|
||||
isCompatible: true,
|
||||
compatibleWith: name,
|
||||
compatibleDescription: `Uses ${name} driver`,
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return entries;
|
||||
}, [databases]);
|
||||
|
||||
// Filter and sort databases
|
||||
const filteredDatabases = useMemo(() => {
|
||||
return databaseList
|
||||
.filter((db) => {
|
||||
const matchesSearch =
|
||||
!searchText ||
|
||||
db.name.toLowerCase().includes(searchText.toLowerCase()) ||
|
||||
db.documentation?.description
|
||||
?.toLowerCase()
|
||||
.includes(searchText.toLowerCase());
|
||||
const matchesCategory = !categoryFilter || db.categories.includes(categoryFilter);
|
||||
return matchesSearch && matchesCategory;
|
||||
})
|
||||
.sort((a, b) => b.score - a.score);
|
||||
}, [databaseList, searchText, categoryFilter]);
|
||||
|
||||
// Get unique categories and counts for filter
|
||||
const { categories, categoryCounts } = useMemo(() => {
|
||||
const counts: Record<string, number> = {};
|
||||
databaseList.forEach((db) => {
|
||||
// Count each category the database belongs to
|
||||
db.categories.forEach((cat) => {
|
||||
counts[cat] = (counts[cat] || 0) + 1;
|
||||
});
|
||||
});
|
||||
return {
|
||||
categories: Object.keys(counts).sort(),
|
||||
categoryCounts: counts,
|
||||
};
|
||||
}, [databaseList]);
|
||||
|
||||
// Table columns
|
||||
const columns = [
|
||||
{
|
||||
title: 'Database',
|
||||
dataIndex: 'name',
|
||||
key: 'name',
|
||||
sorter: (a: TableEntry, b: TableEntry) => a.name.localeCompare(b.name),
|
||||
render: (name: string, record: TableEntry) => {
|
||||
// Convert name to URL slug
|
||||
const toSlug = (n: string) => n.toLowerCase().replace(/[^a-z0-9]+/g, '-').replace(/^-|-$/g, '');
|
||||
|
||||
// Link to parent for compatible DBs, otherwise to own page
|
||||
const linkTarget = record.isCompatible && record.compatibleWith
|
||||
? `/docs/databases/supported/${toSlug(record.compatibleWith)}`
|
||||
: `/docs/databases/supported/${toSlug(name)}`;
|
||||
|
||||
return (
|
||||
<div>
|
||||
<a href={linkTarget}>
|
||||
<strong>{name}</strong>
|
||||
</a>
|
||||
{record.isCompatible && record.compatibleWith && (
|
||||
<Tag
|
||||
icon={<LinkOutlined />}
|
||||
color="geekblue"
|
||||
style={{ marginLeft: 8, fontSize: '11px' }}
|
||||
>
|
||||
{record.compatibleWith} compatible
|
||||
</Tag>
|
||||
)}
|
||||
<div style={{ fontSize: '12px', color: '#666' }}>
|
||||
{record.documentation?.description?.slice(0, 80)}
|
||||
{(record.documentation?.description?.length ?? 0) > 80 ? '...' : ''}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
title: 'Categories',
|
||||
dataIndex: 'categories',
|
||||
key: 'categories',
|
||||
width: 220,
|
||||
filters: categories.map((cat) => ({ text: cat, value: cat })),
|
||||
onFilter: (value: React.Key | boolean, record: TableEntry) =>
|
||||
record.categories.includes(value as string),
|
||||
render: (cats: string[]) => (
|
||||
<div style={{ display: 'flex', gap: '4px', flexWrap: 'wrap' }}>
|
||||
{cats.map((cat) => (
|
||||
<Tag key={cat} color={CATEGORY_COLORS[cat] || 'default'}>{cat}</Tag>
|
||||
))}
|
||||
</div>
|
||||
),
|
||||
},
|
||||
{
|
||||
title: 'Score',
|
||||
dataIndex: 'score',
|
||||
key: 'score',
|
||||
width: 80,
|
||||
sorter: (a: TableEntry, b: TableEntry) => a.score - b.score,
|
||||
defaultSortOrder: 'descend' as const,
|
||||
render: (score: number, record: TableEntry) => (
|
||||
<span
|
||||
style={{
|
||||
color: score > 150 ? '#52c41a' : score > 100 ? '#1890ff' : '#666',
|
||||
fontWeight: score > 150 ? 'bold' : 'normal',
|
||||
}}
|
||||
>
|
||||
{score}/{record.max_score}
|
||||
</span>
|
||||
),
|
||||
},
|
||||
{
|
||||
title: 'Time Grains',
|
||||
dataIndex: 'timeGrainCount',
|
||||
key: 'timeGrainCount',
|
||||
width: 100,
|
||||
sorter: (a: TableEntry, b: TableEntry) => a.timeGrainCount - b.timeGrainCount,
|
||||
render: (count: number, record: TableEntry) => {
|
||||
if (count === 0) return <span>-</span>;
|
||||
const grains = getSupportedTimeGrains(record.time_grains);
|
||||
return (
|
||||
<Tooltip
|
||||
title={
|
||||
<div style={{ display: 'flex', flexWrap: 'wrap', gap: '4px', maxWidth: 280 }}>
|
||||
{grains.map((grain) => (
|
||||
<Tag key={grain} style={{ margin: 0 }}>{grain}</Tag>
|
||||
))}
|
||||
</div>
|
||||
}
|
||||
placement="top"
|
||||
>
|
||||
<span style={{ cursor: 'help', borderBottom: '1px dotted #999' }}>
|
||||
{count} grains
|
||||
</span>
|
||||
</Tooltip>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
title: 'Features',
|
||||
key: 'features',
|
||||
width: 280,
|
||||
filters: [
|
||||
{ text: 'JOINs', value: 'joins' },
|
||||
{ text: 'Subqueries', value: 'subqueries' },
|
||||
{ text: 'Dynamic Schema', value: 'dynamic_schema' },
|
||||
{ text: 'Catalog', value: 'catalog' },
|
||||
{ text: 'SSH Tunneling', value: 'ssh' },
|
||||
{ text: 'File Upload', value: 'file_upload' },
|
||||
{ text: 'Query Cancel', value: 'query_cancel' },
|
||||
{ text: 'Cost Estimation', value: 'cost_estimation' },
|
||||
{ text: 'User Impersonation', value: 'impersonation' },
|
||||
{ text: 'SQL Validation', value: 'sql_validation' },
|
||||
],
|
||||
onFilter: (value: React.Key | boolean, record: TableEntry) => {
|
||||
switch (value) {
|
||||
case 'joins':
|
||||
return Boolean(record.joins);
|
||||
case 'subqueries':
|
||||
return Boolean(record.subqueries);
|
||||
case 'dynamic_schema':
|
||||
return Boolean(record.supports_dynamic_schema);
|
||||
case 'catalog':
|
||||
return Boolean(record.supports_catalog);
|
||||
case 'ssh':
|
||||
return Boolean(record.ssh_tunneling);
|
||||
case 'file_upload':
|
||||
return Boolean(record.supports_file_upload);
|
||||
case 'query_cancel':
|
||||
return Boolean(record.query_cancelation);
|
||||
case 'cost_estimation':
|
||||
return Boolean(record.query_cost_estimation);
|
||||
case 'impersonation':
|
||||
return Boolean(record.user_impersonation);
|
||||
case 'sql_validation':
|
||||
return Boolean(record.sql_validation);
|
||||
default:
|
||||
return true;
|
||||
}
|
||||
},
|
||||
render: (_: unknown, record: TableEntry) => (
|
||||
<div style={{ display: 'flex', gap: '4px', flexWrap: 'wrap' }}>
|
||||
{record.joins && <Tag color="green">JOINs</Tag>}
|
||||
{record.subqueries && <Tag color="green">Subqueries</Tag>}
|
||||
{record.supports_dynamic_schema && <Tag color="blue">Dynamic Schema</Tag>}
|
||||
{record.supports_catalog && <Tag color="purple">Catalog</Tag>}
|
||||
{record.ssh_tunneling && <Tag color="cyan">SSH</Tag>}
|
||||
{record.supports_file_upload && <Tag color="orange">File Upload</Tag>}
|
||||
{record.query_cancelation && <Tag color="volcano">Query Cancel</Tag>}
|
||||
{record.query_cost_estimation && <Tag color="gold">Cost Est.</Tag>}
|
||||
{record.user_impersonation && <Tag color="magenta">Impersonation</Tag>}
|
||||
{record.sql_validation && <Tag color="lime">SQL Validation</Tag>}
|
||||
</div>
|
||||
),
|
||||
},
|
||||
{
|
||||
title: 'Documentation',
|
||||
key: 'docs',
|
||||
width: 150,
|
||||
render: (_: unknown, record: TableEntry) => (
|
||||
<div style={{ display: 'flex', gap: '4px', flexWrap: 'wrap' }}>
|
||||
{record.hasConnectionString && (
|
||||
<Tag icon={<ApiOutlined />} color="default">
|
||||
Connection
|
||||
</Tag>
|
||||
)}
|
||||
{record.hasDrivers && (
|
||||
<Tag icon={<DatabaseOutlined />} color="default">
|
||||
Drivers
|
||||
</Tag>
|
||||
)}
|
||||
{record.hasAuthMethods && (
|
||||
<Tag icon={<KeyOutlined />} color="default">
|
||||
Auth
|
||||
</Tag>
|
||||
)}
|
||||
</div>
|
||||
),
|
||||
},
|
||||
];
|
||||
|
||||
return (
|
||||
<div className="database-index">
|
||||
{/* Statistics Cards */}
|
||||
<Row gutter={[16, 16]} style={{ marginBottom: 24 }}>
|
||||
<Col xs={12} sm={6}>
|
||||
<Card>
|
||||
<Statistic
|
||||
title="Total Databases"
|
||||
value={statistics.totalDatabases}
|
||||
prefix={<DatabaseOutlined />}
|
||||
/>
|
||||
</Card>
|
||||
</Col>
|
||||
<Col xs={12} sm={6}>
|
||||
<Card>
|
||||
<Statistic
|
||||
title="With Documentation"
|
||||
value={statistics.withDocumentation}
|
||||
prefix={<CheckCircleOutlined />}
|
||||
suffix={`/ ${statistics.totalDatabases}`}
|
||||
/>
|
||||
</Card>
|
||||
</Col>
|
||||
<Col xs={12} sm={6}>
|
||||
<Card>
|
||||
<Statistic
|
||||
title="Multiple Drivers"
|
||||
value={statistics.withDrivers}
|
||||
prefix={<ApiOutlined />}
|
||||
/>
|
||||
</Card>
|
||||
</Col>
|
||||
<Col xs={12} sm={6}>
|
||||
<Card>
|
||||
<Statistic
|
||||
title="Auth Methods"
|
||||
value={statistics.withAuthMethods}
|
||||
prefix={<KeyOutlined />}
|
||||
/>
|
||||
</Card>
|
||||
</Col>
|
||||
</Row>
|
||||
|
||||
{/* Filters */}
|
||||
<Row gutter={[16, 16]} style={{ marginBottom: 16 }}>
|
||||
<Col xs={24} sm={12}>
|
||||
<Input
|
||||
placeholder="Search databases..."
|
||||
prefix={<SearchOutlined />}
|
||||
value={searchText}
|
||||
onChange={(e) => setSearchText(e.target.value)}
|
||||
allowClear
|
||||
/>
|
||||
</Col>
|
||||
<Col xs={24} sm={12}>
|
||||
<Select
|
||||
placeholder="Filter by category"
|
||||
style={{ width: '100%' }}
|
||||
value={categoryFilter}
|
||||
onChange={setCategoryFilter}
|
||||
allowClear
|
||||
options={categories.map((cat) => ({
|
||||
label: (
|
||||
<span>
|
||||
<Tag
|
||||
color={CATEGORY_COLORS[cat] || 'default'}
|
||||
style={{ marginRight: 8 }}
|
||||
>
|
||||
{categoryCounts[cat] || 0}
|
||||
</Tag>
|
||||
{cat}
|
||||
</span>
|
||||
),
|
||||
value: cat,
|
||||
}))}
|
||||
/>
|
||||
</Col>
|
||||
</Row>
|
||||
|
||||
{/* Database Table */}
|
||||
<Table
|
||||
dataSource={filteredDatabases}
|
||||
columns={columns}
|
||||
rowKey={(record) => record.isCompatible ? `${record.compatibleWith}-${record.name}` : record.name}
|
||||
pagination={{
|
||||
pageSize: 20,
|
||||
showSizeChanger: true,
|
||||
showTotal: (total) => `${total} databases`,
|
||||
}}
|
||||
size="middle"
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default DatabaseIndex;
|
||||
634
docs/src/components/databases/DatabasePage.tsx
Normal file
@@ -0,0 +1,634 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
import {
|
||||
Card,
|
||||
Collapse,
|
||||
Table,
|
||||
Tag,
|
||||
Typography,
|
||||
Alert,
|
||||
Space,
|
||||
Divider,
|
||||
Tabs,
|
||||
} from 'antd';
|
||||
import {
|
||||
CheckCircleOutlined,
|
||||
CloseCircleOutlined,
|
||||
WarningOutlined,
|
||||
LinkOutlined,
|
||||
KeyOutlined,
|
||||
SettingOutlined,
|
||||
BookOutlined,
|
||||
EditOutlined,
|
||||
GithubOutlined,
|
||||
} from '@ant-design/icons';
|
||||
import type { DatabaseInfo } from './types';
|
||||
|
||||
// Simple code block component for connection strings
|
||||
const CodeBlock: React.FC<{ children: React.ReactNode }> = ({ children }) => (
|
||||
<pre
|
||||
style={{
|
||||
background: 'var(--ifm-code-background)',
|
||||
padding: '12px 16px',
|
||||
borderRadius: '4px',
|
||||
overflow: 'auto',
|
||||
fontSize: '13px',
|
||||
fontFamily: 'var(--ifm-font-family-monospace)',
|
||||
}}
|
||||
>
|
||||
<code>{children}</code>
|
||||
</pre>
|
||||
);
|
||||
|
||||
const { Title, Paragraph, Text } = Typography;
|
||||
const { Panel } = Collapse;
|
||||
const { TabPane } = Tabs;
|
||||
|
||||
interface DatabasePageProps {
|
||||
database: DatabaseInfo;
|
||||
name: string;
|
||||
}
|
||||
|
||||
// Feature badge component
|
||||
const FeatureBadge: React.FC<{ supported: boolean; label: string }> = ({
|
||||
supported,
|
||||
label,
|
||||
}) => (
|
||||
<Tag
|
||||
icon={supported ? <CheckCircleOutlined /> : <CloseCircleOutlined />}
|
||||
color={supported ? 'success' : 'default'}
|
||||
>
|
||||
{label}
|
||||
</Tag>
|
||||
);
|
||||
|
||||
// Time grain badge
|
||||
const TimeGrainBadge: React.FC<{ supported: boolean; grain: string }> = ({
|
||||
supported,
|
||||
grain,
|
||||
}) => (
|
||||
<Tag color={supported ? 'blue' : 'default'} style={{ margin: '2px' }}>
|
||||
{grain}
|
||||
</Tag>
|
||||
);
|
||||
|
||||
const DatabasePage: React.FC<DatabasePageProps> = ({ database, name }) => {
|
||||
const { documentation: docs } = database;
|
||||
|
||||
// Helper to render connection string with copy button
|
||||
const renderConnectionString = (connStr: string, description?: string) => (
|
||||
<div style={{ marginBottom: 16 }}>
|
||||
{description && (
|
||||
<Text type="secondary" style={{ display: 'block', marginBottom: 4 }}>
|
||||
{description}
|
||||
</Text>
|
||||
)}
|
||||
<CodeBlock>{connStr}</CodeBlock>
|
||||
</div>
|
||||
);
|
||||
|
||||
// Render driver information
|
||||
const renderDrivers = () => {
|
||||
if (!docs?.drivers?.length) return null;
|
||||
|
||||
return (
|
||||
<Card title="Drivers" style={{ marginBottom: 16 }}>
|
||||
<Tabs>
|
||||
{docs.drivers.map((driver, idx) => (
|
||||
<TabPane
|
||||
tab={
|
||||
<span>
|
||||
{driver.name}
|
||||
{driver.is_recommended && (
|
||||
<Tag color="green" style={{ marginLeft: 8 }}>
|
||||
Recommended
|
||||
</Tag>
|
||||
)}
|
||||
</span>
|
||||
}
|
||||
key={idx}
|
||||
>
|
||||
<Space direction="vertical" style={{ width: '100%' }}>
|
||||
{driver.pypi_package && (
|
||||
<div>
|
||||
<Text strong>PyPI Package: </Text>
|
||||
<code>{driver.pypi_package}</code>
|
||||
</div>
|
||||
)}
|
||||
{driver.connection_string &&
|
||||
renderConnectionString(driver.connection_string)}
|
||||
{driver.notes && (
|
||||
<Alert message={driver.notes} type="info" showIcon />
|
||||
)}
|
||||
{driver.docs_url && (
|
||||
<a href={driver.docs_url} target="_blank" rel="noreferrer">
|
||||
<LinkOutlined /> Documentation
|
||||
</a>
|
||||
)}
|
||||
</Space>
|
||||
</TabPane>
|
||||
))}
|
||||
</Tabs>
|
||||
</Card>
|
||||
);
|
||||
};
|
||||
|
||||
// Render authentication methods
|
||||
const renderAuthMethods = () => {
|
||||
if (!docs?.authentication_methods?.length) return null;
|
||||
|
||||
return (
|
||||
<Card
|
||||
title={
|
||||
<>
|
||||
<KeyOutlined /> Authentication Methods
|
||||
</>
|
||||
}
|
||||
style={{ marginBottom: 16 }}
|
||||
>
|
||||
<Collapse accordion>
|
||||
{docs.authentication_methods.map((auth, idx) => (
|
||||
<Panel header={auth.name} key={idx}>
|
||||
{auth.description && <Paragraph>{auth.description}</Paragraph>}
|
||||
{auth.requirements && (
|
||||
<Alert
|
||||
message="Requirements"
|
||||
description={auth.requirements}
|
||||
type="warning"
|
||||
showIcon
|
||||
style={{ marginBottom: 16 }}
|
||||
/>
|
||||
)}
|
||||
{auth.connection_string &&
|
||||
renderConnectionString(
|
||||
auth.connection_string,
|
||||
'Connection String'
|
||||
)}
|
||||
{auth.secure_extra && (
|
||||
<div>
|
||||
<Text strong>Secure Extra Configuration:</Text>
|
||||
<CodeBlock>
|
||||
{JSON.stringify(auth.secure_extra, null, 2)}
|
||||
</CodeBlock>
|
||||
</div>
|
||||
)}
|
||||
{auth.engine_parameters && (
|
||||
<div>
|
||||
<Text strong>Engine Parameters:</Text>
|
||||
<CodeBlock>
|
||||
{JSON.stringify(auth.engine_parameters, null, 2)}
|
||||
</CodeBlock>
|
||||
</div>
|
||||
)}
|
||||
{auth.notes && (
|
||||
<Alert message={auth.notes} type="info" showIcon />
|
||||
)}
|
||||
</Panel>
|
||||
))}
|
||||
</Collapse>
|
||||
</Card>
|
||||
);
|
||||
};
|
||||
|
||||
// Render engine parameters
|
||||
const renderEngineParams = () => {
|
||||
if (!docs?.engine_parameters?.length) return null;
|
||||
|
||||
return (
|
||||
<Card
|
||||
title={
|
||||
<>
|
||||
<SettingOutlined /> Engine Parameters
|
||||
</>
|
||||
}
|
||||
style={{ marginBottom: 16 }}
|
||||
>
|
||||
<Collapse>
|
||||
{docs.engine_parameters.map((param, idx) => (
|
||||
<Panel header={param.name} key={idx}>
|
||||
{param.description && <Paragraph>{param.description}</Paragraph>}
|
||||
{param.json && (
|
||||
<CodeBlock>
|
||||
{JSON.stringify(param.json, null, 2)}
|
||||
</CodeBlock>
|
||||
)}
|
||||
{param.docs_url && (
|
||||
<a href={param.docs_url} target="_blank" rel="noreferrer">
|
||||
<LinkOutlined /> Learn more
|
||||
</a>
|
||||
)}
|
||||
</Panel>
|
||||
))}
|
||||
</Collapse>
|
||||
</Card>
|
||||
);
|
||||
};
|
||||
|
||||
// Render compatible databases (for PostgreSQL, etc.)
|
||||
const renderCompatibleDatabases = () => {
|
||||
if (!docs?.compatible_databases?.length) return null;
|
||||
|
||||
// Create array of all panel keys to expand by default
|
||||
const allPanelKeys = docs.compatible_databases.map((_, idx) => idx);
|
||||
|
||||
return (
|
||||
<Card title="Compatible Databases" style={{ marginBottom: 16 }}>
|
||||
<Paragraph>
|
||||
The following databases are compatible with the {name} driver:
|
||||
</Paragraph>
|
||||
<Collapse defaultActiveKey={allPanelKeys}>
|
||||
{docs.compatible_databases.map((compat, idx) => (
|
||||
<Panel
|
||||
header={
|
||||
<div style={{ display: 'flex', alignItems: 'center', gap: 12 }}>
|
||||
{compat.logo && (
|
||||
<img
|
||||
src={`/img/databases/${compat.logo}`}
|
||||
alt={compat.name}
|
||||
style={{
|
||||
width: 28,
|
||||
height: 28,
|
||||
objectFit: 'contain',
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
<span>{compat.name}</span>
|
||||
</div>
|
||||
}
|
||||
key={idx}
|
||||
>
|
||||
{compat.description && (
|
||||
<Paragraph>{compat.description}</Paragraph>
|
||||
)}
|
||||
{compat.connection_string &&
|
||||
renderConnectionString(compat.connection_string)}
|
||||
{compat.parameters && (
|
||||
<div>
|
||||
<Text strong>Parameters:</Text>
|
||||
<Table
|
||||
dataSource={Object.entries(compat.parameters).map(
|
||||
([key, value]) => ({
|
||||
key,
|
||||
parameter: key,
|
||||
description: value,
|
||||
})
|
||||
)}
|
||||
columns={[
|
||||
{ title: 'Parameter', dataIndex: 'parameter', key: 'p' },
|
||||
{
|
||||
title: 'Description',
|
||||
dataIndex: 'description',
|
||||
key: 'd',
|
||||
},
|
||||
]}
|
||||
pagination={false}
|
||||
size="small"
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{compat.notes && (
|
||||
<Alert
|
||||
message={compat.notes}
|
||||
type="info"
|
||||
showIcon
|
||||
style={{ marginTop: 16 }}
|
||||
/>
|
||||
)}
|
||||
</Panel>
|
||||
))}
|
||||
</Collapse>
|
||||
</Card>
|
||||
);
|
||||
};
|
||||
|
||||
// Render feature matrix
|
||||
const renderFeatures = () => {
|
||||
const features: Array<{ key: keyof DatabaseInfo; label: string }> = [
|
||||
{ key: 'joins', label: 'JOINs' },
|
||||
{ key: 'subqueries', label: 'Subqueries' },
|
||||
{ key: 'supports_dynamic_schema', label: 'Dynamic Schema' },
|
||||
{ key: 'supports_catalog', label: 'Catalog Support' },
|
||||
{ key: 'supports_dynamic_catalog', label: 'Dynamic Catalog' },
|
||||
{ key: 'ssh_tunneling', label: 'SSH Tunneling' },
|
||||
{ key: 'query_cancelation', label: 'Query Cancellation' },
|
||||
{ key: 'supports_file_upload', label: 'File Upload' },
|
||||
{ key: 'user_impersonation', label: 'User Impersonation' },
|
||||
{ key: 'query_cost_estimation', label: 'Cost Estimation' },
|
||||
{ key: 'sql_validation', label: 'SQL Validation' },
|
||||
];
|
||||
|
||||
return (
|
||||
<Card title="Supported Features" style={{ marginBottom: 16 }}>
|
||||
<div style={{ display: 'flex', flexWrap: 'wrap', gap: 8 }}>
|
||||
{features.map(({ key, label }) => (
|
||||
<FeatureBadge
|
||||
key={key}
|
||||
supported={Boolean(database[key])}
|
||||
label={label}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
{database.score > 0 && (
|
||||
<div style={{ marginTop: 16 }}>
|
||||
<Text>
|
||||
Feature Score:{' '}
|
||||
<Text strong>
|
||||
{database.score}/{database.max_score}
|
||||
</Text>
|
||||
</Text>
|
||||
</div>
|
||||
)}
|
||||
</Card>
|
||||
);
|
||||
};
|
||||
|
||||
// Render time grains
|
||||
const renderTimeGrains = () => {
|
||||
if (!database.time_grains) return null;
|
||||
|
||||
const commonGrains = [
|
||||
'SECOND',
|
||||
'MINUTE',
|
||||
'HOUR',
|
||||
'DAY',
|
||||
'WEEK',
|
||||
'MONTH',
|
||||
'QUARTER',
|
||||
'YEAR',
|
||||
];
|
||||
const extendedGrains = Object.keys(database.time_grains).filter(
|
||||
(g) => !commonGrains.includes(g)
|
||||
);
|
||||
|
||||
return (
|
||||
<Card title="Time Grains" style={{ marginBottom: 16 }}>
|
||||
<div style={{ marginBottom: 16 }}>
|
||||
<Text strong>Common Time Grains:</Text>
|
||||
<div style={{ marginTop: 8 }}>
|
||||
{commonGrains.map((grain) => (
|
||||
<TimeGrainBadge
|
||||
key={grain}
|
||||
grain={grain}
|
||||
supported={Boolean(
|
||||
database.time_grains[grain as keyof typeof database.time_grains]
|
||||
)}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
{extendedGrains.length > 0 && (
|
||||
<div>
|
||||
<Text strong>Extended Time Grains:</Text>
|
||||
<div style={{ marginTop: 8 }}>
|
||||
{extendedGrains.map((grain) => (
|
||||
<TimeGrainBadge
|
||||
key={grain}
|
||||
grain={grain}
|
||||
supported={Boolean(
|
||||
database.time_grains[grain as keyof typeof database.time_grains]
|
||||
)}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</Card>
|
||||
);
|
||||
};
|
||||
|
||||
return (
|
||||
<div
|
||||
className="database-page"
|
||||
id={name.toLowerCase().replace(/\s+/g, '-')}
|
||||
>
|
||||
<div style={{ marginBottom: 16 }}>
|
||||
{docs?.logo && (
|
||||
<img
|
||||
src={`/img/databases/${docs.logo}`}
|
||||
alt={name}
|
||||
style={{
|
||||
height: 120,
|
||||
objectFit: 'contain',
|
||||
marginBottom: 12,
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
<Title level={1} style={{ margin: 0 }}>{name}</Title>
|
||||
{docs?.homepage_url && (
|
||||
<a
|
||||
href={docs.homepage_url}
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
style={{ fontSize: 14 }}
|
||||
>
|
||||
<LinkOutlined /> {docs.homepage_url}
|
||||
</a>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{docs?.description && <Paragraph>{docs.description}</Paragraph>}
|
||||
|
||||
{/* Warnings */}
|
||||
{docs?.warnings?.map((warning, idx) => (
|
||||
<Alert
|
||||
key={idx}
|
||||
message={warning}
|
||||
type="warning"
|
||||
icon={<WarningOutlined />}
|
||||
showIcon
|
||||
style={{ marginBottom: 16 }}
|
||||
/>
|
||||
))}
|
||||
|
||||
{/* Known Limitations */}
|
||||
{docs?.limitations?.length > 0 && (
|
||||
<Card
|
||||
title="Known Limitations"
|
||||
style={{ marginBottom: 16 }}
|
||||
type="inner"
|
||||
>
|
||||
<ul style={{ margin: 0, paddingLeft: 20 }}>
|
||||
{docs.limitations.map((limitation, idx) => (
|
||||
<li key={idx}>{limitation}</li>
|
||||
))}
|
||||
</ul>
|
||||
</Card>
|
||||
)}
|
||||
|
||||
{/* Installation */}
|
||||
{(docs?.pypi_packages?.length || docs?.install_instructions) && (
|
||||
<Card title="Installation" style={{ marginBottom: 16 }}>
|
||||
{docs.pypi_packages?.length > 0 && (
|
||||
<div style={{ marginBottom: 16 }}>
|
||||
<Text strong>Required packages: </Text>
|
||||
{docs.pypi_packages.map((pkg) => (
|
||||
<Tag key={pkg} color="blue">
|
||||
{pkg}
|
||||
</Tag>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
{docs.version_requirements && (
|
||||
<Alert
|
||||
message={`Version requirement: ${docs.version_requirements}`}
|
||||
type="info"
|
||||
showIcon
|
||||
style={{ marginBottom: 16 }}
|
||||
/>
|
||||
)}
|
||||
{docs.install_instructions && (
|
||||
<CodeBlock>{docs.install_instructions}</CodeBlock>
|
||||
)}
|
||||
</Card>
|
||||
)}
|
||||
|
||||
{/* Basic Connection */}
|
||||
{docs?.connection_string && !docs?.drivers?.length && (
|
||||
<Card title="Connection String" style={{ marginBottom: 16 }}>
|
||||
{renderConnectionString(docs.connection_string)}
|
||||
{docs.parameters && (
|
||||
<Table
|
||||
dataSource={Object.entries(docs.parameters).map(
|
||||
([key, value]) => ({
|
||||
key,
|
||||
parameter: key,
|
||||
description: value,
|
||||
})
|
||||
)}
|
||||
columns={[
|
||||
{ title: 'Parameter', dataIndex: 'parameter', key: 'p' },
|
||||
{ title: 'Description', dataIndex: 'description', key: 'd' },
|
||||
]}
|
||||
pagination={false}
|
||||
size="small"
|
||||
/>
|
||||
)}
|
||||
{docs.default_port && (
|
||||
<Text type="secondary">Default port: {docs.default_port}</Text>
|
||||
)}
|
||||
</Card>
|
||||
)}
|
||||
|
||||
{/* Drivers */}
|
||||
{renderDrivers()}
|
||||
|
||||
{/* Connection Examples */}
|
||||
{docs?.connection_examples?.length > 0 && (
|
||||
<Card title="Connection Examples" style={{ marginBottom: 16 }}>
|
||||
{docs.connection_examples.map((example, idx) => (
|
||||
<div key={idx}>
|
||||
{renderConnectionString(
|
||||
example.connection_string,
|
||||
example.description
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
</Card>
|
||||
)}
|
||||
|
||||
{/* Authentication Methods */}
|
||||
{renderAuthMethods()}
|
||||
|
||||
{/* Engine Parameters */}
|
||||
{renderEngineParams()}
|
||||
|
||||
{/* Features */}
|
||||
{renderFeatures()}
|
||||
|
||||
{/* Time Grains */}
|
||||
{renderTimeGrains()}
|
||||
|
||||
{/* Compatible Databases */}
|
||||
{renderCompatibleDatabases()}
|
||||
|
||||
{/* Notes */}
|
||||
{docs?.notes && (
|
||||
<Alert
|
||||
message="Notes"
|
||||
description={docs.notes}
|
||||
type="info"
|
||||
showIcon
|
||||
style={{ marginBottom: 16 }}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* External Links */}
|
||||
{(docs?.docs_url || docs?.tutorials?.length) && (
|
||||
<Card
|
||||
title={
|
||||
<>
|
||||
<BookOutlined /> Resources
|
||||
</>
|
||||
}
|
||||
style={{ marginBottom: 16 }}
|
||||
>
|
||||
<Space direction="vertical">
|
||||
{docs.docs_url && (
|
||||
<a href={docs.docs_url} target="_blank" rel="noreferrer">
|
||||
<LinkOutlined /> Official Documentation
|
||||
</a>
|
||||
)}
|
||||
{docs.sqlalchemy_docs_url && (
|
||||
<a href={docs.sqlalchemy_docs_url} target="_blank" rel="noreferrer">
|
||||
<LinkOutlined /> SQLAlchemy Dialect Documentation
|
||||
</a>
|
||||
)}
|
||||
{docs.tutorials?.map((tutorial, idx) => (
|
||||
<a key={idx} href={tutorial} target="_blank" rel="noreferrer">
|
||||
<LinkOutlined /> Tutorial {idx + 1}
|
||||
</a>
|
||||
))}
|
||||
</Space>
|
||||
</Card>
|
||||
)}
|
||||
|
||||
{/* Edit link */}
|
||||
{database.module && (
|
||||
<Card
|
||||
style={{
|
||||
marginBottom: 16,
|
||||
background: 'var(--ifm-background-surface-color)',
|
||||
borderStyle: 'dashed',
|
||||
}}
|
||||
size="small"
|
||||
>
|
||||
<Space>
|
||||
<GithubOutlined />
|
||||
<Text type="secondary">
|
||||
Help improve this documentation by editing the engine spec:
|
||||
</Text>
|
||||
<a
|
||||
href={`https://github.com/apache/superset/edit/master/superset/db_engine_specs/${database.module}.py`}
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
>
|
||||
<EditOutlined /> Edit {database.module}.py
|
||||
</a>
|
||||
</Space>
|
||||
</Card>
|
||||
)}
|
||||
|
||||
<Divider />
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default DatabasePage;
|
||||
@@ -16,24 +16,7 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { getOverrideHtmlSchema } from '../../src/components/SafeMarkdown/SafeMarkdown';
|
||||
|
||||
describe('getOverrideHtmlSchema', () => {
|
||||
it('should append the override items', () => {
|
||||
const original = {
|
||||
attributes: {
|
||||
'*': ['size'],
|
||||
},
|
||||
clobberPrefix: 'original-prefix',
|
||||
tagNames: ['h1', 'h2', 'h3'],
|
||||
};
|
||||
const result = getOverrideHtmlSchema(original, {
|
||||
attributes: { '*': ['src'], h1: ['style'] },
|
||||
clobberPrefix: 'custom-prefix',
|
||||
tagNames: ['iframe'],
|
||||
});
|
||||
expect(result.clobberPrefix).toEqual('custom-prefix');
|
||||
expect(result.attributes).toEqual({ '*': ['size', 'src'], h1: ['style'] });
|
||||
expect(result.tagNames).toEqual(['h1', 'h2', 'h3', 'iframe']);
|
||||
});
|
||||
});
|
||||
export { default as DatabaseIndex } from './DatabaseIndex';
|
||||
export { default as DatabasePage } from './DatabasePage';
|
||||
export * from './types';
|
||||
243
docs/src/components/databases/types.ts
Normal file
@@ -0,0 +1,243 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* TypeScript types for database documentation data
|
||||
* Generated from superset/db_engine_specs/lib.py
|
||||
*/
|
||||
|
||||
export interface Driver {
|
||||
name: string;
|
||||
pypi_package?: string;
|
||||
connection_string?: string;
|
||||
is_recommended?: boolean;
|
||||
notes?: string;
|
||||
docs_url?: string;
|
||||
default_port?: number;
|
||||
odbc_driver_paths?: Record<string, string>;
|
||||
environment_variables?: Record<string, string>;
|
||||
}
|
||||
|
||||
export interface ConnectionExample {
|
||||
description: string;
|
||||
connection_string: string;
|
||||
}
|
||||
|
||||
export interface HostExample {
|
||||
platform: string;
|
||||
host: string;
|
||||
}
|
||||
|
||||
export interface AuthenticationMethod {
|
||||
name: string;
|
||||
description?: string;
|
||||
requirements?: string;
|
||||
connection_string?: string;
|
||||
secure_extra?: Record<string, unknown>;
|
||||
secure_extra_body?: Record<string, unknown>;
|
||||
secure_extra_path?: Record<string, unknown>;
|
||||
engine_parameters?: Record<string, unknown>;
|
||||
config_example?: Record<string, unknown>;
|
||||
notes?: string;
|
||||
}
|
||||
|
||||
export interface EngineParameter {
|
||||
name: string;
|
||||
description?: string;
|
||||
json?: Record<string, unknown>;
|
||||
secure_extra?: Record<string, unknown>;
|
||||
docs_url?: string;
|
||||
}
|
||||
|
||||
export interface SSLConfiguration {
|
||||
custom_certificate?: string;
|
||||
disable_ssl_verification?: {
|
||||
engine_params?: Record<string, unknown>;
|
||||
};
|
||||
}
|
||||
|
||||
export interface CompatibleDatabase {
|
||||
name: string;
|
||||
description?: string;
|
||||
logo?: string;
|
||||
homepage_url?: string;
|
||||
categories?: string[]; // Category classifications (e.g., ["TRADITIONAL_RDBMS", "OPEN_SOURCE"])
|
||||
pypi_packages?: string[];
|
||||
connection_string?: string;
|
||||
parameters?: Record<string, string>;
|
||||
connection_examples?: ConnectionExample[];
|
||||
notes?: string;
|
||||
docs_url?: string;
|
||||
}
|
||||
|
||||
export interface DatabaseDocumentation {
|
||||
description?: string;
|
||||
logo?: string;
|
||||
homepage_url?: string;
|
||||
categories?: string[]; // Category classifications (e.g., ["TRADITIONAL_RDBMS", "OPEN_SOURCE"])
|
||||
pypi_packages?: string[];
|
||||
connection_string?: string;
|
||||
default_port?: number;
|
||||
parameters?: Record<string, string>;
|
||||
notes?: string;
|
||||
limitations?: string[]; // Known limitations or caveats
|
||||
connection_examples?: ConnectionExample[];
|
||||
host_examples?: HostExample[];
|
||||
drivers?: Driver[];
|
||||
authentication_methods?: AuthenticationMethod[];
|
||||
engine_parameters?: EngineParameter[];
|
||||
ssl_configuration?: SSLConfiguration;
|
||||
version_requirements?: string;
|
||||
install_instructions?: string;
|
||||
warnings?: string[];
|
||||
tutorials?: string[];
|
||||
docs_url?: string;
|
||||
sqlalchemy_docs_url?: string;
|
||||
advanced_features?: Record<string, string>;
|
||||
compatible_databases?: CompatibleDatabase[];
|
||||
}
|
||||
|
||||
export interface TimeGrains {
|
||||
SECOND?: boolean;
|
||||
MINUTE?: boolean;
|
||||
HOUR?: boolean;
|
||||
DAY?: boolean;
|
||||
WEEK?: boolean;
|
||||
MONTH?: boolean;
|
||||
QUARTER?: boolean;
|
||||
YEAR?: boolean;
|
||||
FIVE_SECONDS?: boolean;
|
||||
THIRTY_SECONDS?: boolean;
|
||||
FIVE_MINUTES?: boolean;
|
||||
TEN_MINUTES?: boolean;
|
||||
FIFTEEN_MINUTES?: boolean;
|
||||
THIRTY_MINUTES?: boolean;
|
||||
HALF_HOUR?: boolean;
|
||||
SIX_HOURS?: boolean;
|
||||
WEEK_STARTING_SUNDAY?: boolean;
|
||||
WEEK_STARTING_MONDAY?: boolean;
|
||||
WEEK_ENDING_SATURDAY?: boolean;
|
||||
WEEK_ENDING_SUNDAY?: boolean;
|
||||
QUARTER_YEAR?: boolean;
|
||||
}
|
||||
|
||||
export interface DatabaseInfo {
|
||||
engine: string;
|
||||
engine_name: string;
|
||||
engine_aliases?: string[];
|
||||
default_driver?: string;
|
||||
module?: string;
|
||||
documentation: DatabaseDocumentation;
|
||||
|
||||
// Diagnostics from lib.py diagnose() function
|
||||
time_grains: TimeGrains;
|
||||
score: number;
|
||||
max_score: number;
|
||||
|
||||
// SQL capabilities
|
||||
joins: boolean;
|
||||
subqueries: boolean;
|
||||
alias_in_select?: boolean;
|
||||
alias_in_orderby?: boolean;
|
||||
cte_in_subquery?: boolean;
|
||||
sql_comments?: boolean;
|
||||
escaped_colons?: boolean;
|
||||
time_groupby_inline?: boolean;
|
||||
alias_to_source_column?: boolean;
|
||||
order_by_not_in_select?: boolean;
|
||||
expressions_in_orderby?: boolean;
|
||||
|
||||
// Platform features
|
||||
limit_method?: string;
|
||||
limit_clause?: boolean;
|
||||
max_column_name?: number;
|
||||
supports_file_upload?: boolean;
|
||||
supports_dynamic_schema?: boolean;
|
||||
supports_catalog?: boolean;
|
||||
supports_dynamic_catalog?: boolean;
|
||||
|
||||
// Advanced features
|
||||
user_impersonation?: boolean;
|
||||
ssh_tunneling?: boolean;
|
||||
query_cancelation?: boolean;
|
||||
expand_data?: boolean;
|
||||
query_cost_estimation?: boolean;
|
||||
sql_validation?: boolean;
|
||||
get_metrics?: boolean;
|
||||
where_latest_partition?: boolean;
|
||||
get_extra_table_metadata?: boolean;
|
||||
dbapi_exception_mapping?: boolean;
|
||||
custom_errors?: boolean;
|
||||
masked_encrypted_extra?: boolean;
|
||||
column_type_mapping?: boolean;
|
||||
function_names?: boolean;
|
||||
}
|
||||
|
||||
export interface Statistics {
|
||||
totalDatabases: number;
|
||||
withDocumentation: number;
|
||||
withConnectionString: number;
|
||||
withDrivers: number;
|
||||
withAuthMethods: number;
|
||||
supportsJoins: number;
|
||||
supportsSubqueries: number;
|
||||
supportsDynamicSchema: number;
|
||||
supportsCatalog: number;
|
||||
averageScore: number;
|
||||
maxScore: number;
|
||||
byCategory: Record<string, string[]>;
|
||||
}
|
||||
|
||||
export interface DatabaseData {
|
||||
generated: string;
|
||||
statistics: Statistics;
|
||||
databases: Record<string, DatabaseInfo>;
|
||||
}
|
||||
|
||||
// Helper type for sorting databases
|
||||
export type SortField = 'name' | 'score' | 'category';
|
||||
export type SortDirection = 'asc' | 'desc';
|
||||
|
||||
// Helper to get common time grains
|
||||
export const COMMON_TIME_GRAINS = [
|
||||
'SECOND',
|
||||
'MINUTE',
|
||||
'HOUR',
|
||||
'DAY',
|
||||
'WEEK',
|
||||
'MONTH',
|
||||
'QUARTER',
|
||||
'YEAR',
|
||||
] as const;
|
||||
|
||||
export const EXTENDED_TIME_GRAINS = [
|
||||
'FIVE_SECONDS',
|
||||
'THIRTY_SECONDS',
|
||||
'FIVE_MINUTES',
|
||||
'TEN_MINUTES',
|
||||
'FIFTEEN_MINUTES',
|
||||
'THIRTY_MINUTES',
|
||||
'HALF_HOUR',
|
||||
'SIX_HOURS',
|
||||
'WEEK_STARTING_SUNDAY',
|
||||
'WEEK_STARTING_MONDAY',
|
||||
'WEEK_ENDING_SATURDAY',
|
||||
'WEEK_ENDING_SUNDAY',
|
||||
'QUARTER_YEAR',
|
||||
] as const;
|
||||
4799
docs/src/data/databases.json
Normal file
@@ -23,12 +23,32 @@ import { Card, Carousel, Flex } from 'antd';
|
||||
import styled from '@emotion/styled';
|
||||
import GitHubButton from 'react-github-btn';
|
||||
import { mq } from '../utils';
|
||||
import { Databases } from '../resources/data';
|
||||
import SectionHeader from '../components/SectionHeader';
|
||||
import databaseData from '../data/databases.json';
|
||||
import BlurredSection from '../components/BlurredSection';
|
||||
import DataSet from '../../../RESOURCES/INTHEWILD.yaml';
|
||||
import type { DatabaseData } from '../components/databases/types';
|
||||
import '../styles/main.less';
|
||||
|
||||
// Build database list from databases.json (databases with logos)
|
||||
// Deduplicate by logo filename to avoid showing the same logo twice
|
||||
const typedDatabaseData = databaseData as DatabaseData;
|
||||
const seenLogos = new Set<string>();
|
||||
const Databases = Object.entries(typedDatabaseData.databases)
|
||||
.filter(([, db]) => db.documentation?.logo && db.documentation?.homepage_url)
|
||||
.map(([name, db]) => ({
|
||||
title: name,
|
||||
href: db.documentation?.homepage_url,
|
||||
imgName: db.documentation?.logo,
|
||||
docPath: `/docs/databases/supported/${name.toLowerCase().replace(/[^a-z0-9]+/g, '-').replace(/^-|-$/g, '')}`,
|
||||
}))
|
||||
.sort((a, b) => a.title.localeCompare(b.title))
|
||||
.filter((db) => {
|
||||
if (seenLogos.has(db.imgName!)) return false;
|
||||
seenLogos.add(db.imgName!);
|
||||
return true;
|
||||
});
|
||||
|
||||
interface Organization {
|
||||
name: string;
|
||||
url: string;
|
||||
@@ -440,22 +460,22 @@ const StyledIntegrations = styled('div')`
|
||||
padding: 0 20px;
|
||||
.database-grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(5, minmax(0, 1fr));
|
||||
gap: 14px;
|
||||
max-width: 1160px;
|
||||
grid-template-columns: repeat(8, minmax(0, 1fr));
|
||||
gap: 10px;
|
||||
max-width: 1200px;
|
||||
margin: 25px auto 0;
|
||||
${mq[1]} {
|
||||
grid-template-columns: repeat(4, minmax(0, 1fr));
|
||||
grid-template-columns: repeat(5, minmax(0, 1fr));
|
||||
}
|
||||
${mq[0]} {
|
||||
grid-template-columns: repeat(1, minmax(0, 1fr));
|
||||
grid-template-columns: repeat(2, minmax(0, 1fr));
|
||||
}
|
||||
& > .item {
|
||||
border: 1px solid var(--ifm-border-color);
|
||||
border-radius: 10px;
|
||||
border-radius: 8px;
|
||||
overflow: hidden;
|
||||
height: 120px;
|
||||
padding: 25px;
|
||||
height: 80px;
|
||||
padding: 14px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
@@ -759,23 +779,19 @@ export default function Home(): JSX.Element {
|
||||
</BlurredSection>
|
||||
<BlurredSection>
|
||||
<StyledIntegrations>
|
||||
<SectionHeader level="h2" title="Supported Databases" />
|
||||
<SectionHeader level="h2" title="Supported Databases" link="/docs/databases" />
|
||||
<div className="database-grid">
|
||||
{Databases.map(({ title, href, imgName }) => (
|
||||
{Databases.map(({ title, imgName, docPath }) => (
|
||||
<div className="item" key={title}>
|
||||
{href ? (
|
||||
<a href={href} aria-label={`Go to ${title} page`}>
|
||||
<img src={`/img/databases/${imgName}`} title={title} />
|
||||
</a>
|
||||
) : (
|
||||
<a href={docPath} aria-label={`${title} documentation`}>
|
||||
<img src={`/img/databases/${imgName}`} title={title} />
|
||||
)}
|
||||
</a>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
<span className="database-sub">
|
||||
...and many other{' '}
|
||||
<a href="/docs/configuration/databases#installing-database-drivers">
|
||||
<a href="/docs/databases#installing-database-drivers">
|
||||
compatible databases
|
||||
</a>
|
||||
</span>
|
||||
|
||||
@@ -123,6 +123,11 @@ ul.dropdown__menu svg {
|
||||
--ifm-code-padding-horizontal: 5px;
|
||||
}
|
||||
|
||||
/* Database logo images in intro/README */
|
||||
.database-logo {
|
||||
object-fit: contain;
|
||||
}
|
||||
|
||||
[data-theme='dark'] {
|
||||
--ifm-color-primary: #25c2a0;
|
||||
--ifm-color-primary-dark: #21af90;
|
||||
|
||||
6
docs/static/.htaccess
vendored
@@ -35,7 +35,7 @@ RewriteRule ^usertutorial\.html$ /docs/using-superset/creating-your-first-dashbo
|
||||
RewriteRule ^security\.html$ /docs/security/ [R=301,L]
|
||||
RewriteRule ^sqllab\.html$ /docs/configuration/sql-templating [R=301,L]
|
||||
RewriteRule ^gallery\.html$ /docs/intro [R=301,L]
|
||||
RewriteRule ^druid\.html$ /docs/configuration/databases [R=301,L]
|
||||
RewriteRule ^druid\.html$ /docs/databases [R=301,L]
|
||||
RewriteRule ^misc\.html$ /docs/configuration/country-map-tools [R=301,L]
|
||||
RewriteRule ^visualization\.html$ /docs/configuration/country-map-tools [R=301,L]
|
||||
RewriteRule ^videos\.html$ /docs/faq [R=301,L]
|
||||
@@ -47,7 +47,7 @@ RewriteRule ^docs/installation/email-reports$ /docs/configuration/alerts-reports
|
||||
RewriteRule ^docs/roadmap$ /docs/intro [R=301,L]
|
||||
RewriteRule ^docs/contributing/contribution-guidelines$ /docs/contributing/ [R=301,L]
|
||||
RewriteRule ^docs/contributing/contribution-page$ /docs/contributing/ [R=301,L]
|
||||
RewriteRule ^docs/databases/yugabyte/$ /docs/configuration/databases [R=301,L]
|
||||
RewriteRule ^docs/databases/yugabyte/$ /docs/databases [R=301,L]
|
||||
RewriteRule ^docs/frequently-asked-questions$ /docs/faq [R=301,L]
|
||||
RewriteRule ^docs/installation/running-on-kubernetes/$ /docs/installation/kubernetes [R=301,L]
|
||||
RewriteRule ^docs/contributing/testing-locally/$ /docs/contributing/howtos [R=301,L]
|
||||
@@ -62,7 +62,5 @@ RewriteRule ^docs/installation/cache/$ /docs/configuration/cache [R=301,L]
|
||||
RewriteRule ^docs/installation/async-queries-celery/$ /docs/configuration/async-queries-celery [R=301,L]
|
||||
RewriteRule ^docs/installation/event-logging/$ /docs/configuration/event-logging [R=301,L]
|
||||
|
||||
RewriteRule ^docs/databases.*$ /docs/configuration/databases [R=301,L]
|
||||
|
||||
# pre-commit hooks documentation
|
||||
RewriteRule ^docs/contributing/hooks-and-linting/$ /docs/contributing/development/#git-hooks-1
|
||||
|
||||
379
docs/static/feature-flags.json
vendored
Normal file
@@ -0,0 +1,379 @@
|
||||
{
|
||||
"generated": true,
|
||||
"source": "superset/config.py",
|
||||
"flags": {
|
||||
"development": [
|
||||
{
|
||||
"name": "AG_GRID_TABLE_ENABLED",
|
||||
"default": false,
|
||||
"lifecycle": "development",
|
||||
"description": "Enables Table V2 (AG Grid) viz plugin"
|
||||
},
|
||||
{
|
||||
"name": "ALERT_REPORT_TABS",
|
||||
"default": false,
|
||||
"lifecycle": "development",
|
||||
"description": "Enables experimental tabs UI for Alerts and Reports"
|
||||
},
|
||||
{
|
||||
"name": "CHART_PLUGINS_EXPERIMENTAL",
|
||||
"default": false,
|
||||
"lifecycle": "development",
|
||||
"description": "Enables experimental chart plugins"
|
||||
},
|
||||
{
|
||||
"name": "CSV_UPLOAD_PYARROW_ENGINE",
|
||||
"default": false,
|
||||
"lifecycle": "development",
|
||||
"description": "Experimental PyArrow engine for CSV parsing (may have issues with dates/nulls)"
|
||||
},
|
||||
{
|
||||
"name": "DATASET_FOLDERS",
|
||||
"default": false,
|
||||
"lifecycle": "development",
|
||||
"description": "Allow metrics and columns to be grouped into folders in the chart builder"
|
||||
},
|
||||
{
|
||||
"name": "DATE_RANGE_TIMESHIFTS_ENABLED",
|
||||
"default": false,
|
||||
"lifecycle": "development",
|
||||
"description": "Enable support for date range timeshifts (e.g., \"2015-01-03 : 2015-01-04\") in addition to relative timeshifts (e.g., \"1 day ago\")"
|
||||
},
|
||||
{
|
||||
"name": "ENABLE_ADVANCED_DATA_TYPES",
|
||||
"default": false,
|
||||
"lifecycle": "development",
|
||||
"description": "Enables advanced data type support"
|
||||
},
|
||||
{
|
||||
"name": "ENABLE_EXTENSIONS",
|
||||
"default": false,
|
||||
"lifecycle": "development",
|
||||
"description": "Enable Superset extensions for custom functionality without modifying core"
|
||||
},
|
||||
{
|
||||
"name": "MATRIXIFY",
|
||||
"default": false,
|
||||
"lifecycle": "development",
|
||||
"description": "Enable Matrixify feature for matrix-style chart layouts"
|
||||
},
|
||||
{
|
||||
"name": "OPTIMIZE_SQL",
|
||||
"default": false,
|
||||
"lifecycle": "development",
|
||||
"description": "Try to optimize SQL queries \u2014 for now only predicate pushdown is supported"
|
||||
},
|
||||
{
|
||||
"name": "PRESTO_EXPAND_DATA",
|
||||
"default": false,
|
||||
"lifecycle": "development",
|
||||
"description": "Expand nested types in Presto into extra columns/arrays. Experimental, doesn't work with all nested types."
|
||||
},
|
||||
{
|
||||
"name": "TABLE_V2_TIME_COMPARISON_ENABLED",
|
||||
"default": false,
|
||||
"lifecycle": "development",
|
||||
"description": "Enable Table V2 time comparison feature"
|
||||
},
|
||||
{
|
||||
"name": "TAGGING_SYSTEM",
|
||||
"default": false,
|
||||
"lifecycle": "development",
|
||||
"description": "Enables the tagging system for organizing assets"
|
||||
}
|
||||
],
|
||||
"testing": [
|
||||
{
|
||||
"name": "ALERT_REPORTS",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Enables Alerts and Reports functionality",
|
||||
"docs": "https://superset.apache.org/docs/configuration/alerts-reports"
|
||||
},
|
||||
{
|
||||
"name": "ALERT_REPORTS_FILTER",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Enables filter functionality in Alerts and Reports"
|
||||
},
|
||||
{
|
||||
"name": "ALERT_REPORT_SLACK_V2",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Enables Slack V2 integration for Alerts and Reports"
|
||||
},
|
||||
{
|
||||
"name": "ALERT_REPORT_WEBHOOK",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Enables webhook integration for Alerts and Reports"
|
||||
},
|
||||
{
|
||||
"name": "ALLOW_FULL_CSV_EXPORT",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Allow users to export full CSV of table viz type. Warning: Could cause server memory/compute issues with large datasets."
|
||||
},
|
||||
{
|
||||
"name": "CACHE_IMPERSONATION",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Enable caching per impersonation key in datasources with user impersonation"
|
||||
},
|
||||
{
|
||||
"name": "DATE_FORMAT_IN_EMAIL_SUBJECT",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Allow users to optionally specify date formats in email subjects",
|
||||
"docs": "https://superset.apache.org/docs/configuration/alerts-reports"
|
||||
},
|
||||
{
|
||||
"name": "DYNAMIC_PLUGINS",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Enable dynamic plugin loading"
|
||||
},
|
||||
{
|
||||
"name": "ENABLE_DASHBOARD_DOWNLOAD_WEBDRIVER_SCREENSHOT",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Generate screenshots (PDF/JPG) of dashboards using web driver. Depends on ENABLE_DASHBOARD_SCREENSHOT_ENDPOINTS."
|
||||
},
|
||||
{
|
||||
"name": "ENABLE_DASHBOARD_SCREENSHOT_ENDPOINTS",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Enables endpoints to cache and retrieve dashboard screenshots via webdriver. Requires Celery and THUMBNAIL_CACHE_CONFIG."
|
||||
},
|
||||
{
|
||||
"name": "ENABLE_SUPERSET_META_DB",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Allows users to add a superset:// DB that can query across databases. Experimental with potential security/performance risks. See SUPERSET_META_DB_LIMIT.",
|
||||
"docs": "https://superset.apache.org/docs/configuration/databases/#querying-across-databases"
|
||||
},
|
||||
{
|
||||
"name": "ESTIMATE_QUERY_COST",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Enable query cost estimation. Supported in Presto, Postgres, and BigQuery. Requires `cost_estimate_enabled: true` in database `extra` attribute."
|
||||
},
|
||||
{
|
||||
"name": "GLOBAL_ASYNC_QUERIES",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Enable async queries for dashboards and Explore via WebSocket. Requires Redis 5.0+ and Celery workers.",
|
||||
"docs": "https://superset.apache.org/docs/contributing/misc#async-chart-queries"
|
||||
},
|
||||
{
|
||||
"name": "IMPERSONATE_WITH_EMAIL_PREFIX",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "When impersonating a user, use the email prefix instead of username"
|
||||
},
|
||||
{
|
||||
"name": "PLAYWRIGHT_REPORTS_AND_THUMBNAILS",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Replace Selenium with Playwright for reports and thumbnails. Supports deck.gl visualizations. Requires playwright pip package."
|
||||
},
|
||||
{
|
||||
"name": "RLS_IN_SQLLAB",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Apply RLS rules to SQL Lab queries. Requires query parsing/manipulation. May break queries or allow RLS bypass. Use with care!"
|
||||
},
|
||||
{
|
||||
"name": "SSH_TUNNELING",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Allow users to enable SSH tunneling when creating a DB connection. DB engine must support SSH Tunnels.",
|
||||
"docs": "https://superset.apache.org/docs/configuration/setup-ssh-tunneling"
|
||||
},
|
||||
{
|
||||
"name": "USE_ANALOGOUS_COLORS",
|
||||
"default": false,
|
||||
"lifecycle": "testing",
|
||||
"description": "Use analogous colors in charts"
|
||||
}
|
||||
],
|
||||
"stable": [
|
||||
{
|
||||
"name": "ALERTS_ATTACH_REPORTS",
|
||||
"default": true,
|
||||
"lifecycle": "stable",
|
||||
"description": "When enabled, alerts send email/slack with screenshot AND link. When disabled, alerts send only link; reports still send screenshot.",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "ALLOW_ADHOC_SUBQUERY",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Allow ad-hoc subqueries in SQL Lab",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "CACHE_QUERY_BY_USER",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Enable caching per user key for Superset cache",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "CSS_TEMPLATES",
|
||||
"default": true,
|
||||
"lifecycle": "stable",
|
||||
"description": "Enables CSS Templates in Settings menu and dashboard forms",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "DASHBOARD_RBAC",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Role-based access control for dashboards",
|
||||
"docs": "https://superset.apache.org/docs/using-superset/creating-your-first-dashboard",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "DASHBOARD_VIRTUALIZATION",
|
||||
"default": true,
|
||||
"lifecycle": "stable",
|
||||
"description": "Enables dashboard virtualization for improved performance",
|
||||
"category": "path_to_deprecation"
|
||||
},
|
||||
{
|
||||
"name": "DATAPANEL_CLOSED_BY_DEFAULT",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Data panel closed by default in chart builder",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "DRILL_BY",
|
||||
"default": true,
|
||||
"lifecycle": "stable",
|
||||
"description": "Enable drill-by functionality in charts",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "DRUID_JOINS",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Enable Druid JOINs (requires Druid version with JOIN support)",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "EMBEDDABLE_CHARTS",
|
||||
"default": true,
|
||||
"lifecycle": "stable",
|
||||
"description": "Enable sharing charts with embedding",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "EMBEDDED_SUPERSET",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Enable embedded Superset functionality",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "ENABLE_FACTORY_RESET_COMMAND",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Enable factory reset CLI command",
|
||||
"category": "internal"
|
||||
},
|
||||
{
|
||||
"name": "ENABLE_TEMPLATE_PROCESSING",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Enable Jinja templating in SQL queries",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "ESCAPE_MARKDOWN_HTML",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Escape HTML in Markdown components (rather than rendering it)",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "FILTERBAR_CLOSED_BY_DEFAULT",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Filter bar closed by default when opening dashboard",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "FORCE_GARBAGE_COLLECTION_AFTER_EVERY_REQUEST",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Force garbage collection after every request",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "LISTVIEWS_DEFAULT_CARD_VIEW",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Use card view as default in list views",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "MENU_HIDE_USER_INFO",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Hide user info in the navigation menu",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "SLACK_ENABLE_AVATARS",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Use Slack avatars for users. Requires adding slack-edge.com to TALISMAN_CONFIG.",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "SQLLAB_BACKEND_PERSISTENCE",
|
||||
"default": true,
|
||||
"lifecycle": "stable",
|
||||
"description": "Enable SQL Lab backend persistence for query state",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "SQLLAB_FORCE_RUN_ASYNC",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Force SQL Lab to run async via Celery regardless of database settings",
|
||||
"category": "runtime_config"
|
||||
},
|
||||
{
|
||||
"name": "THUMBNAILS",
|
||||
"default": false,
|
||||
"lifecycle": "stable",
|
||||
"description": "Exposes API endpoint to compute thumbnails",
|
||||
"docs": "https://superset.apache.org/docs/configuration/cache",
|
||||
"category": "runtime_config"
|
||||
}
|
||||
],
|
||||
"deprecated": [
|
||||
{
|
||||
"name": "AVOID_COLORS_COLLISION",
|
||||
"default": true,
|
||||
"lifecycle": "deprecated",
|
||||
"description": "Avoid color collisions in charts by using distinct colors"
|
||||
},
|
||||
{
|
||||
"name": "DRILL_TO_DETAIL",
|
||||
"default": true,
|
||||
"lifecycle": "deprecated",
|
||||
"description": "Enable drill-to-detail functionality in charts"
|
||||
},
|
||||
{
|
||||
"name": "ENABLE_JAVASCRIPT_CONTROLS",
|
||||
"default": false,
|
||||
"lifecycle": "deprecated",
|
||||
"description": "Allow JavaScript in chart controls. WARNING: XSS security vulnerability!"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
BIN
docs/static/img/databases/altinity.png
vendored
Normal file
|
After Width: | Height: | Size: 19 KiB |
BIN
docs/static/img/databases/amazon-redshift.jpg
vendored
|
Before Width: | Height: | Size: 17 KiB |
BIN
docs/static/img/databases/apache-druid.jpeg
vendored
|
Before Width: | Height: | Size: 210 KiB |
BIN
docs/static/img/databases/apache-impala.png
vendored
|
Before Width: | Height: | Size: 5.1 KiB After Width: | Height: | Size: 25 KiB |
BIN
docs/static/img/databases/apache-solr.png
vendored
Normal file
|
After Width: | Height: | Size: 5.5 KiB |
40
docs/static/img/databases/apache-solr.svg
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 18.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 207.3 107.8" enable-background="new 0 0 207.3 107.8" xml:space="preserve">
|
||||
<g>
|
||||
<path fill="#FFFFFF" d="M43.1,73.3c-2.3-1.2-5-2.1-7.9-2.6c-2.8-0.5-5.7-0.7-8.6-0.7c-2.3,0-4.7-0.2-7-0.6
|
||||
c-2.3-0.4-4.3-1.1-6.1-2.1c-1.7-1-3.2-2.4-4.3-4.1c-1.1-1.7-1.6-4-1.6-6.7C7.8,54,8.3,52,9.4,50.3c1.1-1.7,2.5-3.1,4.1-4.1
|
||||
c1.7-1,3.7-1.8,5.9-2.3c3.8-0.8,7.7-0.9,11.7-0.4c1.6,0.2,3.1,0.6,4.6,1.2c1.5,0.5,2.9,1.3,4.1,2.2c1.2,0.9,2.3,2.1,3.2,3.4
|
||||
l0.5,0.8l3.8-1.6l-0.8-1.1c-1-1.5-2.2-2.8-3.4-3.9c-1.2-1.2-2.7-2.1-4.4-2.9c-1.6-0.8-3.5-1.4-5.5-1.8c-2-0.4-4.4-0.6-7-0.6
|
||||
c-2.5,0-5.1,0.3-7.7,0.9c-2.6,0.6-5.1,1.5-7.3,2.8c-2.2,1.3-4.1,3.1-5.5,5.3c-1.4,2.2-2.1,5-2.1,8.1c0,3.3,0.7,6.2,2,8.4
|
||||
c1.3,2.3,3.1,4.1,5.2,5.4c2.1,1.3,4.6,2.3,7.3,2.8c2.7,0.5,5.5,0.8,8.4,0.8c2.3,0,4.7,0.2,7.2,0.5c2.4,0.3,4.7,1,6.7,2
|
||||
c2,1,3.6,2.3,4.9,4c1.2,1.6,1.8,3.8,1.8,6.6c0,2.4-0.6,4.5-1.8,6.2c-1.2,1.7-2.8,3.1-4.7,4.2c-1.9,1.1-4.1,1.9-6.5,2.4
|
||||
c-2.4,0.5-4.8,0.8-7.1,0.8c-3.9,0-7.7-0.7-11.4-2.2c-3.7-1.5-6.9-3.6-9.6-6.5l-0.7-0.8l-3,2.6l0.8,0.8c2.7,2.7,6.1,5.1,10,7
|
||||
c4,2,8.7,3,13.9,3c2.5,0,5.2-0.3,7.9-0.9c2.8-0.6,5.4-1.6,7.8-2.9c2.4-1.4,4.4-3.2,6-5.4c1.6-2.3,2.4-5.1,2.4-8.4
|
||||
c0-3.4-0.8-6.2-2.2-8.5C47.4,76.3,45.5,74.6,43.1,73.3z"/>
|
||||
<path fill="#FFFFFF" d="M95.3,63.7c-2-2.3-4.4-4.2-7.2-5.7c-2.8-1.5-6-2.2-9.4-2.2c-3.1,0-6.1,0.7-8.9,2c-2.7,1.3-5.2,3.1-7.2,5.4
|
||||
c-2,2.2-3.7,4.9-4.9,7.9c-1.2,3-1.8,6.2-1.8,9.4c0,3.1,0.6,6.1,1.6,9c1.1,2.9,2.6,5.5,4.6,7.8c2,2.3,4.4,4.2,7.2,5.6
|
||||
c2.8,1.4,5.9,2.2,9.3,2.2h0h0c3.2-0.1,6.2-0.8,9-2.1c2.7-1.3,5.2-3.2,7.2-5.4c2-2.2,3.7-4.9,4.8-7.8c1.2-2.9,1.7-6,1.7-9.3
|
||||
c0-2.9-0.5-5.9-1.6-8.8C98.8,68.7,97.3,66,95.3,63.7z M78.7,101.1c-2.7,0-5.2-0.6-7.4-1.7c-2.2-1.1-4.2-2.7-5.8-4.6
|
||||
c-1.7-1.9-3-4.1-3.9-6.6c-0.9-2.5-1.5-5.1-1.6-7.8c0-2.4,0.4-4.9,1.3-7.4c0.9-2.5,2.2-4.7,3.8-6.7c1.7-2,3.7-3.6,6-4.8
|
||||
c2.3-1.2,4.8-1.8,7.6-1.8c2.6,0,5.1,0.6,7.3,1.7c2.3,1.2,4.3,2.7,6,4.6c1.7,1.9,3,4.1,4,6.6c1,2.5,1.5,5.1,1.5,7.7
|
||||
c0,2.4-0.4,4.9-1.3,7.4c-0.9,2.5-2.2,4.7-3.8,6.7c-1.7,2-3.7,3.6-6,4.8C84.1,100.5,81.5,101.1,78.7,101.1z"/>
|
||||
<path fill="#FFFFFF" d="M122.1,100c-0.7,0.2-1.3,0.3-1.8,0.4c-0.5,0.1-1.1,0.2-1.7,0.3c-0.6,0.1-1.1,0.1-1.6,0.1
|
||||
c-1.3,0-2.3-0.4-3-1.4c-0.8-1-1.1-2-1.1-3V38h-4v58.3c0,2.2,0.7,4.1,2.1,5.8c1.4,1.7,3.4,2.5,6,2.5c0.8,0,1.6,0,2.4-0.1
|
||||
c0.7-0.1,1.4-0.2,2-0.3c0.6-0.1,1.3-0.2,2.1-0.4l1.4-0.3l-1.8-3.7L122.1,100z"/>
|
||||
<path fill="#FFFFFF" d="M139.3,59.8c-2.1,1.5-3.9,3.3-5.5,5.5v-8.8h-4v47.7h4V74.5c0.6-1.9,1.3-3.8,2.3-5.4c1-1.7,2.1-3.1,3.5-4.4
|
||||
c1.4-1.2,3-2.2,4.8-3c1.8-0.7,3.8-1.2,6-1.3l1.1-0.1v-3.9h-1.1C146.2,56.5,142.4,57.6,139.3,59.8z"/>
|
||||
</g>
|
||||
<g>
|
||||
<path fill="#262130" d="M196.2,13l-33,35.7l41.5-19.1C203.5,23.2,200.5,17.5,196.2,13z"/>
|
||||
<path fill="#262130" d="M171.7,2.4c-4.6,0-8.9,0.9-12.9,2.6l-4.4,37.3l22.1-39.5C175,2.5,173.4,2.4,171.7,2.4z"/>
|
||||
<path fill="#262130" d="M205,31.6l-39.3,22l36.9-4.4c1.7-4.1,2.7-8.5,2.7-13.2C205.3,34.5,205.2,33,205,31.6z"/>
|
||||
<path fill="#262130" d="M190.5,63.8c4.4-3,8-7,10.6-11.6L166.6,59L190.5,63.8z"/>
|
||||
<path fill="#262130" d="M178.6,3.1l-19.2,41.7l35.7-33C190.5,7.5,184.9,4.4,178.6,3.1z"/>
|
||||
<path fill="#262130" d="M174.6,69.4c3.7-0.3,7.3-1.2,10.5-2.6l-19.4-2.3L174.6,69.4z"/>
|
||||
<path fill="#262130" d="M141.1,22.2c-1.5,3.3-2.5,7-2.8,10.8l5.2,9.3L141.1,22.2z"/>
|
||||
<path fill="#262130" d="M155.9,6.3c-4.7,2.5-8.8,6.2-11.8,10.6l4.9,24.5L155.9,6.3z"/>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 3.6 KiB |
BIN
docs/static/img/databases/apache-spark.png
vendored
Normal file
|
After Width: | Height: | Size: 26 KiB |
BIN
docs/static/img/databases/ascend.webp
vendored
Normal file
|
After Width: | Height: | Size: 35 KiB |
BIN
docs/static/img/databases/aws-aurora.jpg
vendored
Normal file
|
After Width: | Height: | Size: 78 KiB |
BIN
docs/static/img/databases/aws.png
vendored
Normal file
|
After Width: | Height: | Size: 7.7 KiB |
1
docs/static/img/databases/azure.svg
vendored
Normal file
@@ -0,0 +1 @@
|
||||
<svg id="f2f5701e-cb3b-4d6f-b407-5866ec5b7784" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 18 18"><defs><linearGradient id="a891901b-79ae-490a-8568-9c4334417d35" x1="9" y1="5.38" x2="9" gradientUnits="userSpaceOnUse"><stop offset="0.199" stop-color="#005ba1"/><stop offset="1" stop-color="#0078d4"/></linearGradient><linearGradient id="bbdaa009-2281-4da8-9e89-6f41689e91a7" x1="9" y1="12.713" x2="9" y2="5.287" gradientUnits="userSpaceOnUse"><stop offset="0" stop-color="#198ab3"/><stop offset="0.172" stop-color="#32bedd"/><stop offset="0.5" stop-color="#50e6ff"/><stop offset="0.5" stop-color="#4fe4fd"/><stop offset="0.5" stop-color="#4bddf8"/><stop offset="0.5" stop-color="#44d2ee"/><stop offset="0.5" stop-color="#3ac1e0"/><stop offset="0.5" stop-color="#2dabce"/><stop offset="0.5" stop-color="#1d90b8"/><stop offset="0.5" stop-color="#198ab3"/><stop offset="0.662" stop-color="#32bedd"/><stop offset="0.975" stop-color="#50e6ff"/></linearGradient></defs><path d="M9,0,1.15,4.49v8.97L9,18l7.85-4.49v-9Zm6.4,12.57L9,16.27,2.6,12.609V5.38L9,1.68l6.4,3.71Z" fill="#0078d4"/><polygon points="9 0 9 0 1.15 4.49 2.6 5.38 9 1.68 9 1.68 15.4 5.38 16.85 4.49 9 0" fill="url(#a891901b-79ae-490a-8568-9c4334417d35)"/><path d="M12.74,10.475a.73.73,0,0,0-.323-.286A5.835,5.835,0,0,0,7.939,6.843L14.416,3.1,12.91,2.236,5.534,6.5A.75.75,0,0,0,5.91,7.9.684.684,0,0,0,6,7.877l.125.523a4.319,4.319,0,0,1,4.837,2.238L3.613,14.885l1.5.866L12.466,11.5a.729.729,0,0,0,.242-.236l.075-.018c-.007-.029-.018-.055-.025-.084A.735.735,0,0,0,12.74,10.475Z" fill="#50e6ff"/><path d="M12.091,9.013a1.85,1.85,0,1,0,1.85,1.85A1.85,1.85,0,0,0,12.091,9.013ZM5.909,5.267a1.85,1.85,0,1,0,1.85,1.85A1.85,1.85,0,0,0,5.909,5.267Z" fill="url(#bbdaa009-2281-4da8-9e89-6f41689e91a7)"/></svg>
|
||||
|
After Width: | Height: | Size: 1.7 KiB |
BIN
docs/static/img/databases/celerdata.png
vendored
Normal file
|
After Width: | Height: | Size: 41 KiB |
BIN
docs/static/img/databases/cloudflare.png
vendored
Normal file
|
After Width: | Height: | Size: 2.8 KiB |
BIN
docs/static/img/databases/cockroachdb.png
vendored
Normal file
|
After Width: | Height: | Size: 237 KiB |
BIN
docs/static/img/databases/cratedb.png
vendored
Normal file
|
After Width: | Height: | Size: 1.5 KiB |
BIN
docs/static/img/databases/db2.png
vendored
|
Before Width: | Height: | Size: 7.3 KiB |
BIN
docs/static/img/databases/duckdb.png
vendored
Normal file
|
After Width: | Height: | Size: 8.2 KiB |
BIN
docs/static/img/databases/elasticsearch.png
vendored
Normal file
|
After Width: | Height: | Size: 13 KiB |
BIN
docs/static/img/databases/google-biquery.png
vendored
|
Before Width: | Height: | Size: 15 KiB |
BIN
docs/static/img/databases/greenplum.jpeg
vendored
|
Before Width: | Height: | Size: 7.4 KiB |
BIN
docs/static/img/databases/ibmdb2.png
vendored
|
Before Width: | Height: | Size: 14 KiB |
BIN
docs/static/img/databases/imply.png
vendored
Normal file
|
After Width: | Height: | Size: 16 KiB |
BIN
docs/static/img/databases/kusto.png
vendored
Normal file
|
After Width: | Height: | Size: 2.4 KiB |
BIN
docs/static/img/databases/monet.png
vendored
|
Before Width: | Height: | Size: 21 KiB |
BIN
docs/static/img/databases/motherduck.png
vendored
Normal file
|
After Width: | Height: | Size: 81 KiB |
BIN
docs/static/img/databases/mssql-server.png
vendored
|
Before Width: | Height: | Size: 679 KiB |
BIN
docs/static/img/databases/mssql.jpg
vendored
|
Before Width: | Height: | Size: 38 KiB |
BIN
docs/static/img/databases/mysql.jpg
vendored
|
Before Width: | Height: | Size: 42 KiB |
BIN
docs/static/img/databases/oracle-logo.png
vendored
|
Before Width: | Height: | Size: 10 KiB |
BIN
docs/static/img/databases/oracle.png
vendored
|
Before Width: | Height: | Size: 8.0 KiB |
BIN
docs/static/img/databases/pinot.png
vendored
|
Before Width: | Height: | Size: 7.0 KiB |
BIN
docs/static/img/databases/postgresql.jpg
vendored
|
Before Width: | Height: | Size: 19 KiB |
BIN
docs/static/img/databases/risingwave.png
vendored
Normal file
|
After Width: | Height: | Size: 1.5 KiB |
BIN
docs/static/img/databases/sap-hana.jpg
vendored
|
Before Width: | Height: | Size: 20 KiB |
BIN
docs/static/img/databases/shillelagh.png
vendored
Normal file
|
After Width: | Height: | Size: 111 KiB |
BIN
docs/static/img/databases/singlestore.png
vendored
Normal file
|
After Width: | Height: | Size: 16 KiB |
BIN
docs/static/img/databases/snowflake.png
vendored
|
Before Width: | Height: | Size: 15 KiB |
BIN
docs/static/img/databases/sqlite.jpg
vendored
|
Before Width: | Height: | Size: 13 KiB |
BIN
docs/static/img/databases/starburst.png
vendored
Normal file
|
After Width: | Height: | Size: 20 KiB |
43
docs/static/img/databases/superset.svg
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
<svg width="100%" height="100%" viewBox="0 0 266 69" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" xml:space="preserve" xmlns:serif="http://www.serif.com/" style="fill-rule:evenodd;clip-rule:evenodd;stroke-linejoin:round;stroke-miterlimit:2;">
|
||||
<path d="M73.79,15.23C67.32,15.23 61.36,18.87 55.6,25.23C49.94,18.77 43.88,15.23 37.11,15.23C25.9,15.23 17.72,23.23 17.72,34C17.72,44.77 25.9,52.67 37.11,52.67C44,52.67 49.34,49.44 55.3,43C61.06,49.46 66.92,52.69 73.79,52.69C85,52.67 93.18,44.8 93.18,34C93.18,23.2 85,15.23 73.79,15.23ZM37.19,41.37C32.44,41.37 29.61,38.24 29.61,34.1C29.61,29.96 32.44,26.74 37.19,26.74C41.19,26.74 44.46,29.96 48,34.3C44.66,38.34 41.13,41.37 37.19,41.37ZM73.45,41.37C69.51,41.37 66.18,38.24 62.64,34.1C66.28,29.76 69.41,26.74 73.45,26.74C78.2,26.74 81,30 81,34.1C81,38.2 78.2,41.37 73.45,41.37Z" style="fill:rgb(72,72,72);fill-rule:nonzero;"/>
|
||||
<path d="M63.74,50L71.28,41C68.28,40.1 65.51,37.4 62.64,34.05L55.3,43C57.703,45.788 60.556,48.154 63.74,50Z" style="fill:rgb(32,167,201);fill-rule:nonzero;"/>
|
||||
<g id="Main">
|
||||
<g id="Superset">
|
||||
<g id="Full-Lockup-With-Text">
|
||||
<g id="Group-7">
|
||||
<g id="Group-17">
|
||||
<g id="Superset-Copy">
|
||||
<g>
|
||||
<path d="M116.72,40.39C116.751,39.474 116.36,38.592 115.66,38C114.539,37.193 113.272,36.609 111.93,36.28C109.421,35.66 107.048,34.582 104.93,33.1C103.37,31.922 102.481,30.053 102.55,28.1C102.528,26.015 103.555,24.052 105.28,22.88C107.327,21.458 109.79,20.754 112.28,20.88C114.812,20.767 117.301,21.577 119.28,23.16C120.994,24.509 121.961,26.601 121.88,28.78L121.88,28.88L116.82,28.88C116.861,27.778 116.419,26.71 115.61,25.96C114.667,25.171 113.457,24.773 112.23,24.85C111.077,24.779 109.934,25.104 108.99,25.77C108.263,26.344 107.842,27.224 107.85,28.15C107.867,28.99 108.298,29.769 109,30.23C110.313,31.008 111.726,31.603 113.2,32C115.582,32.553 117.81,33.633 119.72,35.16C121.197,36.462 122.013,38.362 121.94,40.33C122.008,42.418 121.013,44.404 119.3,45.6C117.238,46.985 114.78,47.662 112.3,47.53C109.663,47.589 107.072,46.823 104.89,45.34C102.838,43.996 101.66,41.648 101.81,39.2L101.81,39.09L107,39.09C106.889,40.389 107.42,41.664 108.42,42.5C109.597,43.291 111.004,43.671 112.42,43.58C113.571,43.658 114.716,43.348 115.67,42.7C116.371,42.144 116.762,41.283 116.72,40.39Z" style="fill:rgb(72,72,72);fill-rule:nonzero;"/>
|
||||
<path d="M137,44.4C136.453,45.359 135.672,46.164 134.73,46.74C132.116,48.188 128.835,47.72 126.73,45.6C125.583,44.267 125.01,42.24 125.01,39.52L125.01,27.85L130.21,27.85L130.21,39.58C130.131,40.629 130.379,41.678 130.92,42.58C131.434,43.208 132.22,43.551 133.03,43.5C133.767,43.516 134.498,43.38 135.18,43.1C135.764,42.836 136.268,42.422 136.64,41.9L136.64,27.85L141.86,27.85L141.86,47.18L137.41,47.18L137,44.4Z" style="fill:rgb(72,72,72);fill-rule:nonzero;"/>
|
||||
<path d="M162.87,38.05C162.99,40.508 162.286,42.937 160.87,44.95C159.569,46.68 157.492,47.658 155.33,47.56C154.4,47.575 153.478,47.384 152.63,47C151.843,46.61 151.158,46.042 150.63,45.34L150.63,54.62L145.43,54.62L145.43,27.85L150.13,27.85L150.44,30.13C150.968,29.331 151.673,28.664 152.5,28.18C153.363,27.707 154.336,27.469 155.32,27.49C157.535,27.403 159.644,28.467 160.89,30.3C162.313,32.49 163.013,35.072 162.89,37.68L162.87,38.05ZM157.65,37.65C157.71,36.118 157.397,34.595 156.74,33.21C156.228,32.144 155.132,31.476 153.95,31.51C153.253,31.49 152.562,31.656 151.95,31.99C151.393,32.322 150.937,32.799 150.63,33.37L150.63,41.86C150.942,42.394 151.4,42.828 151.95,43.11C152.573,43.411 153.259,43.558 153.95,43.54C155.082,43.61 156.161,43.032 156.73,42.05C157.376,40.819 157.684,39.439 157.62,38.05L157.65,37.65Z" style="fill:rgb(72,72,72);fill-rule:nonzero;"/>
|
||||
<path d="M174.21,47.56C171.699,47.674 169.258,46.696 167.52,44.88C165.828,43.026 164.93,40.579 165.02,38.07L165.02,37.36C164.918,34.784 165.761,32.258 167.39,30.26C170.696,26.757 176.29,26.572 179.82,29.85C181.338,31.617 182.119,33.903 182,36.23L182,39.07L170.43,39.07L170.43,39.18C170.48,40.34 170.933,41.447 171.71,42.31C172.51,43.146 173.634,43.595 174.79,43.54C175.762,43.562 176.732,43.444 177.67,43.19C178.539,42.91 179.377,42.542 180.17,42.09L181.58,45.32C180.656,46.037 179.609,46.579 178.49,46.92C177.108,47.366 175.662,47.582 174.21,47.56ZM173.74,31.56C172.841,31.53 171.983,31.946 171.45,32.67C170.859,33.531 170.513,34.537 170.45,35.58L170.5,35.67L176.9,35.67L176.9,35.21C176.949,34.261 176.674,33.322 176.12,32.55C175.546,31.835 174.655,31.446 173.74,31.51L173.74,31.56Z" style="fill:rgb(72,72,72);fill-rule:nonzero;"/>
|
||||
<path d="M195.3,32.33L193.38,32.33C192.711,32.303 192.047,32.47 191.47,32.81C190.964,33.141 190.567,33.614 190.33,34.17L190.33,47.18L185.13,47.18L185.13,27.85L190,27.85L190.23,30.71C190.616,29.787 191.224,28.972 192,28.34C192.71,27.776 193.594,27.476 194.5,27.49C194.741,27.488 194.982,27.508 195.22,27.55L195.89,27.7L195.3,32.33Z" style="fill:rgb(72,72,72);fill-rule:nonzero;"/>
|
||||
<path d="M208.32,41.86C208.308,41.257 207.996,40.698 207.49,40.37C206.544,39.809 205.498,39.435 204.41,39.27C202.553,38.979 200.785,38.271 199.24,37.2C198.087,36.32 197.433,34.93 197.49,33.48C197.487,31.814 198.265,30.24 199.59,29.23C201.198,28.003 203.19,27.386 205.21,27.49C207.312,27.38 209.391,27.991 211.1,29.22C212.489,30.234 213.279,31.882 213.2,33.6L213.2,33.71L208.2,33.71C208.226,33.002 207.958,32.314 207.46,31.81C206.859,31.287 206.074,31.024 205.28,31.08C204.561,31.04 203.85,31.26 203.28,31.7C202.816,32.075 202.55,32.644 202.56,33.24C202.551,33.826 202.837,34.379 203.32,34.71C204.271,35.243 205.318,35.582 206.4,35.71C208.308,35.991 210.126,36.71 211.71,37.81C212.862,38.729 213.506,40.148 213.44,41.62C213.458,43.325 212.62,44.93 211.21,45.89C209.473,47.062 207.403,47.641 205.31,47.54C203.1,47.652 200.925,46.939 199.21,45.54C197.817,44.508 196.996,42.873 197,41.14L197,41.04L201.77,41.04C201.72,41.907 202.093,42.746 202.77,43.29C203.515,43.784 204.397,44.029 205.29,43.99C206.067,44.039 206.838,43.835 207.49,43.41C208.012,43.069 208.326,42.484 208.32,41.86Z" style="fill:rgb(72,72,72);fill-rule:nonzero;"/>
|
||||
<path d="M224.86,47.56C222.352,47.674 219.914,46.696 218.18,44.88C216.488,43.026 215.59,40.579 215.68,38.07L215.68,37.36C215.579,34.786 216.419,32.261 218.04,30.26C221.346,26.757 226.94,26.572 230.47,29.85C231.992,31.615 232.77,33.903 232.64,36.23L232.64,39.07L221.09,39.07L221.09,39.18C221.137,40.339 221.587,41.446 222.36,42.31C223.162,43.149 224.291,43.598 225.45,43.54C226.419,43.562 227.385,43.444 228.32,43.19C229.193,42.912 230.034,42.544 230.83,42.09L232.24,45.32C231.315,46.035 230.268,46.577 229.15,46.92C227.765,47.366 226.315,47.582 224.86,47.56ZM224.4,31.56C223.5,31.526 222.641,31.943 222.11,32.67C221.519,33.532 221.174,34.537 221.11,35.58L221.17,35.67L227.57,35.67L227.57,35.21C227.619,34.261 227.344,33.322 226.79,32.55C226.214,31.832 225.318,31.442 224.4,31.51L224.4,31.56Z" style="fill:rgb(72,72,72);fill-rule:nonzero;"/>
|
||||
<path d="M242.35,23.11L242.35,27.85L245.61,27.85L245.61,31.51L242.35,31.51L242.35,41.36C242.296,41.937 242.465,42.513 242.82,42.97C243.15,43.299 243.604,43.474 244.07,43.45C244.304,43.451 244.538,43.435 244.77,43.4C245.003,43.363 245.233,43.313 245.46,43.25L245.91,47.02C245.408,47.195 244.893,47.332 244.37,47.43C243.834,47.516 243.293,47.56 242.75,47.56C241.219,47.662 239.712,47.126 238.59,46.08C237.508,44.765 236.984,43.077 237.13,41.38L237.13,31.51L234.31,31.51L234.31,27.85L237.13,27.85L237.13,23.11L242.35,23.11Z" style="fill:rgb(72,72,72);fill-rule:nonzero;"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
<path d="M55.6,25.22C53.213,22.392 50.378,19.973 47.21,18.06L39.66,27.16C42.53,28.16 45.07,30.74 47.77,34.03L48.07,34.24L55.6,25.22Z" style="fill:rgb(32,167,201);fill-rule:nonzero;"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 8.7 KiB |
BIN
docs/static/img/databases/trino2.jpg
vendored
|
Before Width: | Height: | Size: 35 KiB |
@@ -8,6 +8,8 @@
|
||||
"strict": false,
|
||||
"jsx": "react-jsx",
|
||||
"moduleResolution": "node",
|
||||
"resolveJsonModule": true,
|
||||
"esModuleInterop": true,
|
||||
"types": ["@docusaurus/module-type-aliases"],
|
||||
"paths": {
|
||||
"@superset-ui/core": ["../superset-frontend/packages/superset-ui-core/src"],
|
||||
|
||||
@@ -441,4 +441,4 @@ FEATURE_FLAGS = {
|
||||
}
|
||||
```
|
||||
|
||||
A current list of feature flags can be found in [RESOURCES/FEATURE_FLAGS.md](https://github.com/apache/superset/blob/master/RESOURCES/FEATURE_FLAGS.md).
|
||||
A current list of feature flags can be found in the [Feature Flags](/docs/configuration/feature-flags) documentation.
|
||||
|
||||
@@ -51,7 +51,7 @@ Restart Superset for this configuration change to take effect.
|
||||
|
||||
#### Making a Dashboard Public
|
||||
|
||||
1. Add the `'DASHBOARD_RBAC': True` [Feature Flag](https://github.com/apache/superset/blob/master/RESOURCES/FEATURE_FLAGS.md) to `superset_config.py`
|
||||
1. Add the `'DASHBOARD_RBAC': True` [Feature Flag](/docs/configuration/feature-flags) to `superset_config.py`
|
||||
2. Add the `Public` role to your dashboard as described [here](https://superset.apache.org/docs/using-superset/creating-your-first-dashboard/#manage-access-to-dashboards)
|
||||
|
||||
#### Embedding a Public Dashboard
|
||||
|
||||