mirror of
https://github.com/apache/superset.git
synced 2026-05-13 11:55:16 +00:00
Compare commits
47 Commits
oauth-exce
...
explorable
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7e917a18e7 | ||
|
|
76fd626d57 | ||
|
|
df2cad1aed | ||
|
|
278e982ab0 | ||
|
|
e6beebc88c | ||
|
|
8a9cca546b | ||
|
|
9997cdeb62 | ||
|
|
ff20d991ab | ||
|
|
97a0eb5ffa | ||
|
|
3891cfeeb3 | ||
|
|
97e52d9485 | ||
|
|
debcde2057 | ||
|
|
f6be0b4dea | ||
|
|
836dddafc6 | ||
|
|
fb39bcbde3 | ||
|
|
0348fe93bd | ||
|
|
9215d3f064 | ||
|
|
4aa4985562 | ||
|
|
d07e209a9d | ||
|
|
29e335aa3e | ||
|
|
e3dec47a5e | ||
|
|
0cc1f46516 | ||
|
|
7aa9c63b66 | ||
|
|
5ba6db46a7 | ||
|
|
d525b05d71 | ||
|
|
d14bcba501 | ||
|
|
15d286aacf | ||
|
|
9e16d111fb | ||
|
|
297bd1e732 | ||
|
|
bfa930a3ac | ||
|
|
befcf96027 | ||
|
|
6f6567d5c9 | ||
|
|
3fb58b996a | ||
|
|
ffae2063e2 | ||
|
|
e1899f1014 | ||
|
|
dfc6aad5f0 | ||
|
|
837ea2a07f | ||
|
|
b83596893a | ||
|
|
a7e446d2ff | ||
|
|
ccbdc2359e | ||
|
|
7e40403287 | ||
|
|
4d83840f81 | ||
|
|
4c77a527c5 | ||
|
|
52b1530666 | ||
|
|
70fd9ff617 | ||
|
|
ae415b93d5 | ||
|
|
15bfab6b1e |
@@ -24,9 +24,7 @@ notifications:
|
||||
discussions: notifications@superset.apache.org
|
||||
|
||||
github:
|
||||
pull_requests:
|
||||
del_branch_on_merge: true
|
||||
allow_update_branch: true
|
||||
del_branch_on_merge: true
|
||||
description: "Apache Superset is a Data Visualization and Data Exploration Platform"
|
||||
homepage: https://superset.apache.org/
|
||||
labels:
|
||||
@@ -85,7 +83,6 @@ github:
|
||||
- cypress-matrix (5, chrome)
|
||||
- dependency-review
|
||||
- frontend-build
|
||||
- playwright-tests (chromium)
|
||||
- pre-commit (current)
|
||||
- pre-commit (previous)
|
||||
- test-mysql
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"hooks": {
|
||||
"PreToolUse": [
|
||||
{
|
||||
"matcher": "Bash",
|
||||
"hooks": [
|
||||
{
|
||||
"type": "command",
|
||||
"command": "jq -r '.tool_input.command // \"\"' | grep -qE '^git commit' && cd \"$CLAUDE_PROJECT_DIR\" && echo '🔍 Running pre-commit before commit...' && pre-commit run || true"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -3,3 +3,14 @@
|
||||
For complete documentation on using GitHub Codespaces with Apache Superset, please see:
|
||||
|
||||
**[Setting up a Development Environment - GitHub Codespaces](https://superset.apache.org/docs/contributing/development#github-codespaces-cloud-development)**
|
||||
|
||||
## Pre-installed Development Environment
|
||||
|
||||
When you create a new Codespace from this repository, it automatically:
|
||||
|
||||
1. **Creates a Python virtual environment** using `uv venv`
|
||||
2. **Installs all development dependencies** via `uv pip install -r requirements/development.txt`
|
||||
3. **Sets up pre-commit hooks** with `pre-commit install`
|
||||
4. **Activates the virtual environment** automatically in all terminals
|
||||
|
||||
The virtual environment is located at `/workspaces/{repository-name}/.venv` and is automatically activated through environment variables set in the devcontainer configuration.
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
{
|
||||
// Extend the base configuration
|
||||
"extends": "../devcontainer-base.json",
|
||||
|
||||
"name": "Apache Superset Development (Default)",
|
||||
|
||||
// Forward ports for development
|
||||
"forwardPorts": [9001],
|
||||
"portsAttributes": {
|
||||
"9001": {
|
||||
"label": "Superset (via Webpack Dev Server)",
|
||||
"onAutoForward": "notify",
|
||||
"visibility": "public"
|
||||
}
|
||||
},
|
||||
|
||||
// Auto-start Superset on Codespace resume
|
||||
"postStartCommand": ".devcontainer/start-superset.sh"
|
||||
}
|
||||
@@ -1,39 +0,0 @@
|
||||
{
|
||||
"name": "Apache Superset Development",
|
||||
// Keep this in sync with the base image in Dockerfile (ARG PY_VER)
|
||||
// Using the same base as Dockerfile, but non-slim for dev tools
|
||||
"image": "python:3.11.13-bookworm",
|
||||
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/docker-in-docker:2": {
|
||||
"moby": true,
|
||||
"dockerDashComposeVersion": "v2"
|
||||
},
|
||||
"ghcr.io/devcontainers/features/node:1": {
|
||||
"version": "20"
|
||||
},
|
||||
"ghcr.io/devcontainers/features/git:1": {},
|
||||
"ghcr.io/devcontainers/features/common-utils:2": {
|
||||
"configureZshAsDefaultShell": true
|
||||
},
|
||||
"ghcr.io/devcontainers/features/sshd:1": {
|
||||
"version": "latest"
|
||||
}
|
||||
},
|
||||
|
||||
// Run commands after container is created
|
||||
"postCreateCommand": "chmod +x .devcontainer/setup-dev.sh && .devcontainer/setup-dev.sh",
|
||||
|
||||
// VS Code customizations
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"ms-python.vscode-pylance",
|
||||
"charliermarsh.ruff",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"esbenp.prettier-vscode"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -13,7 +13,7 @@
|
||||
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/docker-in-docker:2": {
|
||||
"moby": false,
|
||||
"moby": true,
|
||||
"dockerDashComposeVersion": "v2"
|
||||
},
|
||||
"ghcr.io/devcontainers/features/node:1": {
|
||||
|
||||
@@ -3,30 +3,76 @@
|
||||
|
||||
echo "🔧 Setting up Superset development environment..."
|
||||
|
||||
# The universal image has most tools, just need Superset-specific libs
|
||||
echo "📦 Installing Superset-specific dependencies..."
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y \
|
||||
libsasl2-dev \
|
||||
libldap2-dev \
|
||||
libpq-dev \
|
||||
tmux \
|
||||
gh
|
||||
# System dependencies and uv are now pre-installed in the Docker image
|
||||
# This speeds up Codespace creation significantly!
|
||||
|
||||
# Install uv for fast Python package management
|
||||
echo "📦 Installing uv..."
|
||||
curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
# Create virtual environment using uv
|
||||
echo "🐍 Creating Python virtual environment..."
|
||||
if ! uv venv; then
|
||||
echo "❌ Failed to create virtual environment"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Add cargo/bin to PATH for uv
|
||||
echo 'export PATH="$HOME/.cargo/bin:$PATH"' >> ~/.bashrc
|
||||
echo 'export PATH="$HOME/.cargo/bin:$PATH"' >> ~/.zshrc
|
||||
# Install Python dependencies
|
||||
echo "📦 Installing Python dependencies..."
|
||||
if ! uv pip install -r requirements/development.txt; then
|
||||
echo "❌ Failed to install Python dependencies"
|
||||
echo "💡 You may need to run this manually after the Codespace starts"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Install pre-commit hooks
|
||||
echo "🪝 Installing pre-commit hooks..."
|
||||
if source .venv/bin/activate && pre-commit install; then
|
||||
echo "✅ Pre-commit hooks installed"
|
||||
else
|
||||
echo "⚠️ Pre-commit hooks installation failed (non-critical)"
|
||||
fi
|
||||
|
||||
# Install Claude Code CLI via npm
|
||||
echo "🤖 Installing Claude Code..."
|
||||
npm install -g @anthropic-ai/claude-code
|
||||
if npm install -g @anthropic-ai/claude-code; then
|
||||
echo "✅ Claude Code installed"
|
||||
else
|
||||
echo "⚠️ Claude Code installation failed (non-critical)"
|
||||
fi
|
||||
|
||||
# Make the start script executable
|
||||
chmod +x .devcontainer/start-superset.sh
|
||||
|
||||
# Add bashrc additions for automatic venv activation
|
||||
echo "🔧 Setting up automatic environment activation..."
|
||||
if [ -f ~/.bashrc ]; then
|
||||
# Check if we've already added our additions
|
||||
if ! grep -q "Superset Codespaces environment setup" ~/.bashrc; then
|
||||
echo "" >> ~/.bashrc
|
||||
cat .devcontainer/bashrc-additions >> ~/.bashrc
|
||||
echo "✅ Added automatic venv activation to ~/.bashrc"
|
||||
else
|
||||
echo "✅ Bashrc additions already present"
|
||||
fi
|
||||
else
|
||||
# Create bashrc if it doesn't exist
|
||||
cat .devcontainer/bashrc-additions > ~/.bashrc
|
||||
echo "✅ Created ~/.bashrc with automatic venv activation"
|
||||
fi
|
||||
|
||||
# Also add to zshrc since that's the default shell
|
||||
if [ -f ~/.zshrc ] || [ -n "$ZSH_VERSION" ]; then
|
||||
if ! grep -q "Superset Codespaces environment setup" ~/.zshrc; then
|
||||
echo "" >> ~/.zshrc
|
||||
cat .devcontainer/bashrc-additions >> ~/.zshrc
|
||||
echo "✅ Added automatic venv activation to ~/.zshrc"
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "✅ Development environment setup complete!"
|
||||
echo "🚀 Run '.devcontainer/start-superset.sh' to start Superset"
|
||||
echo ""
|
||||
echo "📝 The virtual environment will be automatically activated in new terminals"
|
||||
echo ""
|
||||
echo "🔄 To activate in this terminal, run:"
|
||||
echo " source ~/.bashrc"
|
||||
echo ""
|
||||
echo "🚀 To start Superset:"
|
||||
echo " start-superset"
|
||||
echo ""
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
#!/bin/bash
|
||||
# Startup script for Superset in Codespaces
|
||||
|
||||
# Log to a file for debugging
|
||||
LOG_FILE="/tmp/superset-startup.log"
|
||||
echo "[$(date)] Starting Superset startup script" >> "$LOG_FILE"
|
||||
echo "[$(date)] User: $(whoami), PWD: $(pwd)" >> "$LOG_FILE"
|
||||
|
||||
echo "🚀 Starting Superset in Codespaces..."
|
||||
echo "🌐 Frontend will be available at port 9001"
|
||||
|
||||
# Check if MCP is enabled
|
||||
if [ "$ENABLE_MCP" = "true" ]; then
|
||||
echo "🤖 MCP Service will be available at port 5008"
|
||||
fi
|
||||
|
||||
# Find the workspace directory (Codespaces clones as 'superset', not 'superset-2')
|
||||
WORKSPACE_DIR=$(find /workspaces -maxdepth 1 -name "superset*" -type d | head -1)
|
||||
if [ -n "$WORKSPACE_DIR" ]; then
|
||||
@@ -18,32 +18,71 @@ else
|
||||
echo "📁 Using current directory: $(pwd)"
|
||||
fi
|
||||
|
||||
# Check if docker is running
|
||||
if ! docker info > /dev/null 2>&1; then
|
||||
echo "⏳ Waiting for Docker to start..."
|
||||
sleep 5
|
||||
# Wait for Docker to be available
|
||||
echo "⏳ Waiting for Docker to start..."
|
||||
echo "[$(date)] Waiting for Docker..." >> "$LOG_FILE"
|
||||
max_attempts=30
|
||||
attempt=0
|
||||
while ! docker info > /dev/null 2>&1; do
|
||||
if [ $attempt -eq $max_attempts ]; then
|
||||
echo "❌ Docker failed to start after $max_attempts attempts"
|
||||
echo "[$(date)] Docker failed to start after $max_attempts attempts" >> "$LOG_FILE"
|
||||
echo "🔄 Please restart the Codespace or run this script manually later"
|
||||
exit 1
|
||||
fi
|
||||
echo " Attempt $((attempt + 1))/$max_attempts..."
|
||||
echo "[$(date)] Docker check attempt $((attempt + 1))/$max_attempts" >> "$LOG_FILE"
|
||||
sleep 2
|
||||
attempt=$((attempt + 1))
|
||||
done
|
||||
echo "✅ Docker is ready!"
|
||||
echo "[$(date)] Docker is ready" >> "$LOG_FILE"
|
||||
|
||||
# Check if Superset containers are already running
|
||||
if docker ps | grep -q "superset"; then
|
||||
echo "✅ Superset containers are already running!"
|
||||
echo ""
|
||||
echo "🌐 To access Superset:"
|
||||
echo " 1. Click the 'Ports' tab at the bottom of VS Code"
|
||||
echo " 2. Find port 9001 and click the globe icon to open"
|
||||
echo " 3. Wait 10-20 minutes for initial startup"
|
||||
echo ""
|
||||
echo "📝 Login credentials: admin/admin"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Clean up any existing containers
|
||||
echo "🧹 Cleaning up existing containers..."
|
||||
docker-compose -f docker-compose-light.yml --profile mcp down
|
||||
docker-compose -f docker-compose-light.yml down
|
||||
|
||||
# Start services
|
||||
echo "🏗️ Building and starting services..."
|
||||
echo "🏗️ Starting Superset in background (daemon mode)..."
|
||||
echo ""
|
||||
echo "📝 Once started, login with:"
|
||||
echo " Username: admin"
|
||||
echo " Password: admin"
|
||||
echo ""
|
||||
echo "📋 Running in foreground with live logs (Ctrl+C to stop)..."
|
||||
|
||||
# Run docker-compose and capture exit code
|
||||
if [ "$ENABLE_MCP" = "true" ]; then
|
||||
echo "🤖 Starting with MCP Service enabled..."
|
||||
docker-compose -f docker-compose-light.yml --profile mcp up
|
||||
else
|
||||
docker-compose -f docker-compose-light.yml up
|
||||
fi
|
||||
# Start in detached mode
|
||||
docker-compose -f docker-compose-light.yml up -d
|
||||
|
||||
echo ""
|
||||
echo "✅ Docker Compose started successfully!"
|
||||
echo ""
|
||||
echo "📋 Important information:"
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
echo "⏱️ Initial startup takes 10-20 minutes"
|
||||
echo "🌐 Check the 'Ports' tab for your Superset URL (port 9001)"
|
||||
echo "👤 Login: admin / admin"
|
||||
echo ""
|
||||
echo "📊 Useful commands:"
|
||||
echo " docker-compose -f docker-compose-light.yml logs -f # Follow logs"
|
||||
echo " docker-compose -f docker-compose-light.yml ps # Check status"
|
||||
echo " docker-compose -f docker-compose-light.yml down # Stop services"
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
echo ""
|
||||
echo "💤 Keeping terminal open for 60 seconds to test persistence..."
|
||||
sleep 60
|
||||
echo "✅ Test complete - check if this terminal is still visible!"
|
||||
|
||||
# Show final status
|
||||
docker-compose -f docker-compose-light.yml ps
|
||||
EXIT_CODE=$?
|
||||
|
||||
# If it failed, provide helpful instructions
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
{
|
||||
// Extend the base configuration
|
||||
"extends": "../devcontainer-base.json",
|
||||
|
||||
"name": "Apache Superset Development with MCP",
|
||||
|
||||
// Forward ports for development
|
||||
"forwardPorts": [9001, 5008],
|
||||
"portsAttributes": {
|
||||
"9001": {
|
||||
"label": "Superset (via Webpack Dev Server)",
|
||||
"onAutoForward": "notify",
|
||||
"visibility": "public"
|
||||
},
|
||||
"5008": {
|
||||
"label": "MCP Service (Model Context Protocol)",
|
||||
"onAutoForward": "notify",
|
||||
"visibility": "private"
|
||||
}
|
||||
},
|
||||
|
||||
// Auto-start Superset with MCP on Codespace resume
|
||||
"postStartCommand": "ENABLE_MCP=true .devcontainer/start-superset.sh",
|
||||
|
||||
// Environment variables
|
||||
"containerEnv": {
|
||||
"ENABLE_MCP": "true"
|
||||
}
|
||||
}
|
||||
@@ -1,41 +0,0 @@
|
||||
#
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
# Auto-configure Docker Compose for multi-instance support
|
||||
# Requires direnv: https://direnv.net/
|
||||
#
|
||||
# Install: brew install direnv (or apt install direnv)
|
||||
# Setup: Add 'eval "$(direnv hook bash)"' to ~/.bashrc (or ~/.zshrc)
|
||||
# Allow: Run 'direnv allow' in this directory once
|
||||
|
||||
# Generate unique project name from directory
|
||||
export COMPOSE_PROJECT_NAME=$(basename "$PWD" | tr '[:upper:]' '[:lower:]' | sed 's/[^a-z0-9]/-/g')
|
||||
|
||||
# Find available ports sequentially to avoid collisions
|
||||
_is_free() { ! lsof -i ":$1" &>/dev/null 2>&1; }
|
||||
|
||||
_p=80; while ! _is_free $_p; do ((_p++)); done; export NGINX_PORT=$_p
|
||||
_p=8088; while ! _is_free $_p; do ((_p++)); done; export SUPERSET_PORT=$_p
|
||||
_p=9000; while ! _is_free $_p; do ((_p++)); done; export NODE_PORT=$_p
|
||||
_p=8080; while ! _is_free $_p || [ $_p -eq $NGINX_PORT ]; do ((_p++)); done; export WEBSOCKET_PORT=$_p
|
||||
_p=8081; while ! _is_free $_p || [ $_p -eq $WEBSOCKET_PORT ]; do ((_p++)); done; export CYPRESS_PORT=$_p
|
||||
_p=5432; while ! _is_free $_p; do ((_p++)); done; export DATABASE_PORT=$_p
|
||||
_p=6379; while ! _is_free $_p; do ((_p++)); done; export REDIS_PORT=$_p
|
||||
|
||||
unset _p _is_free
|
||||
|
||||
echo "🐳 Superset configured: http://localhost:$SUPERSET_PORT (dev: localhost:$NODE_PORT)"
|
||||
12
.github/CODEOWNERS
vendored
12
.github/CODEOWNERS
vendored
@@ -20,12 +20,7 @@
|
||||
|
||||
# Notify PMC members of changes to GitHub Actions
|
||||
|
||||
/.github/ @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @kgabryje @dpgaspar @sadpandajoe @hainenber
|
||||
|
||||
# Notify PMC members of changes to CI-executed scripts (supply-chain risk:
|
||||
# scripts/ files run directly in CI workflows and can execute arbitrary code)
|
||||
|
||||
/scripts/ @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @kgabryje @dpgaspar @sadpandajoe @hainenber
|
||||
/.github/ @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @kgabryje @dpgaspar @sadpandajoe
|
||||
|
||||
# Notify PMC members of changes to required GitHub Actions
|
||||
|
||||
@@ -36,13 +31,8 @@
|
||||
**/*.geojson @villebro @rusackas
|
||||
/superset-frontend/plugins/legacy-plugin-chart-country-map/ @villebro @rusackas
|
||||
|
||||
# Notify translation maintainers of changes to translations
|
||||
|
||||
/superset/translations/ @sfirke
|
||||
|
||||
# Notify PMC members of changes to extension-related files
|
||||
|
||||
/docs/developer_portal/extensions/ @michael-s-molina @villebro @rusackas
|
||||
/superset-core/ @michael-s-molina @villebro @geido @eschutho @rusackas @kgabryje
|
||||
/superset-extensions-cli/ @michael-s-molina @villebro @geido @eschutho @rusackas @kgabryje
|
||||
/superset/core/ @michael-s-molina @villebro @geido @eschutho @rusackas @kgabryje
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
2
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
@@ -41,8 +41,8 @@ body:
|
||||
label: Superset version
|
||||
options:
|
||||
- master / latest-dev
|
||||
- "6.0.0"
|
||||
- "5.0.0"
|
||||
- "4.1.3"
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
||||
37
.github/SECURITY.md
vendored
37
.github/SECURITY.md
vendored
@@ -18,32 +18,10 @@ e-mail address [security@superset.apache.org](mailto:security@superset.apache.or
|
||||
More details can be found on the ASF website at
|
||||
[ASF vulnerability reporting process](https://apache.org/security/#reporting-a-vulnerability)
|
||||
|
||||
**Submission Standards & AI Policy**
|
||||
|
||||
To ensure engineering focus remains on verified risks and to manage high reporting volumes, all reports must meet the following criteria:
|
||||
- Plain Text Format: In accordance with Apache guidelines, please provide all details in plain text within the email body. Avoid sending PDFs, Word documents, or password-protected archives.
|
||||
- Mandatory AI Disclosure: If you utilized Large Language Models (LLMs) or AI tools to identify a flaw or assist in writing a report, you must disclose this in your submission so our triage team can contextualize the findings.
|
||||
- Human-Verified PoC: All submissions must include a manual, step-by-step Proof of Concept (PoC) performed on a supported release. Raw AI outputs, hypothetical chat transcripts, or unverified scanner logs will be closed as Invalid.
|
||||
|
||||
We kindly ask you to include the following information in your report to assist our developers in triaging and remediating issues efficiently:
|
||||
- Version/Commit: The specific version of Apache Superset or the Git commit hash you are using.
|
||||
- Configuration: A sanitized copy of your `superset_config.py` file or any config overrides.
|
||||
- Environment: Your deployment method (e.g., Docker Compose, Helm, or source) and relevant OS/Browser details.
|
||||
- Impacted Component: Identification of the affected area (e.g., Python backend, React frontend, or a specific database connector).
|
||||
- Expected vs. Actual Behavior: A clear description of the intended system behavior versus the observed vulnerability.
|
||||
- Detailed Reproduction Steps: Clear, manual steps to reproduce the vulnerability.
|
||||
|
||||
**Out of Scope Vulnerabilities**
|
||||
|
||||
To prioritize engineering efforts on genuine architectural risks, the following scenarios are explicitly out of scope and will not be issued a CVE:
|
||||
- Attacks requiring Admin privileges: (e.g., CSS injection, template manipulation, dashboard ownership overrides, or modifying global system settings). Per the CVE vulnerability definition in CNA Operational Rules 4.1, a qualifying vulnerability must allow violation of a security policy. The Admin role is a fully trusted operational boundary defined by Apache Superset's security policy; actions within this boundary do not violate that policy and are therefore considered intended capabilities 'by design,' not vulnerabilities.
|
||||
- Brute Force and Rate Limiting: Reports targeting a lack of resource exhaustion protections, generic rate-limiting, or volumetric Denial of Service (DoS) attempts.
|
||||
- Theoretical attack vectors: Issues without a demonstrable, reproducible exploit path.
|
||||
- Non-Exploitable Findings: Missing security headers, generic banner disclosures, or descriptive error messages that do not lead to a direct, documented exploit.
|
||||
|
||||
**Outcome of Reports**
|
||||
|
||||
Reports that are deemed out-of-scope for a CVE but represent valid security best practices or hardening opportunities may be converted into public GitHub issues. This allows the community to contribute to the general hardening of the platform even when a specific vulnerability threshold is not met.
|
||||
We kindly ask you to include the following information in your report:
|
||||
- Apache Superset version that you are using
|
||||
- A sanitized copy of your `superset_config.py` file or any config overrides
|
||||
- Detailed steps to reproduce the vulnerability
|
||||
|
||||
Note that Apache Superset is not responsible for any third-party dependencies that may
|
||||
have security issues. Any vulnerabilities found in third-party dependencies should be
|
||||
@@ -51,13 +29,6 @@ reported to the maintainers of those projects. Results from security scans of Ap
|
||||
Superset dependencies found on its official Docker image can be remediated at release time
|
||||
by extending the image itself.
|
||||
|
||||
**Vulnerability Aggregation & CVE Attribution**
|
||||
|
||||
In accordance with MITRE CNA Operational Rules (4.1.10, 4.1.11, and 4.2.13), Apache Superset issues CVEs based on the underlying architectural root cause rather than the number of affected endpoints or exploit payloads.
|
||||
- Aggregation: If multiple exploit vectors stem from the same programmatic failure or shared vulnerable code, they must be aggregated into a single, comprehensive report.
|
||||
- Independent Fixes: Separate CVEs will only be assigned if the vulnerabilities reside in decoupled architectural modules and can be fixed independently of one another.
|
||||
Reports that fail to aggregate related findings will be merged during triage to ensure an accurate and defensible CVE record.
|
||||
|
||||
**Your responsible disclosure and collaboration are invaluable.**
|
||||
|
||||
## Extra Information
|
||||
|
||||
@@ -10,7 +10,7 @@ jobs:
|
||||
steps:
|
||||
- name: Check if the PR is a draft
|
||||
id: check-draft
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
script: |
|
||||
const isDraft = context.payload.pull_request.draft;
|
||||
|
||||
6
.github/actions/setup-docker/action.yml
vendored
6
.github/actions/setup-docker/action.yml
vendored
@@ -26,16 +26,16 @@ runs:
|
||||
|
||||
- name: Set up QEMU
|
||||
if: ${{ inputs.build == 'true' }}
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
if: ${{ inputs.build == 'true' }}
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Try to login to DockerHub
|
||||
if: ${{ inputs.login-to-dockerhub == 'true' }}
|
||||
continue-on-error: true
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ inputs.dockerhub-user }}
|
||||
password: ${{ inputs.dockerhub-token }}
|
||||
|
||||
134
.github/dependabot.yml
vendored
134
.github/dependabot.yml
vendored
@@ -4,51 +4,18 @@ updates:
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
ignore:
|
||||
# Ignore temporarily as release schedule is too mentally taxing for dep-handling maintainers
|
||||
# Additionally, very few PRs are reviewed by this action.
|
||||
- dependency-name: anthropics/claude-code-action
|
||||
schedule:
|
||||
interval: "daily"
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
ignore:
|
||||
# TODO: remove below entries until React >= 18.0.0
|
||||
# not until React >= 18.0.0
|
||||
- dependency-name: "storybook"
|
||||
update-types: ["version-update:semver-major", "version-update:semver-minor"]
|
||||
- dependency-name: "@storybook*"
|
||||
update-types: ["version-update:semver-major", "version-update:semver-minor"]
|
||||
- dependency-name: "eslint-plugin-storybook"
|
||||
- dependency-name: "react-error-boundary"
|
||||
- dependency-name: "@rjsf/*"
|
||||
# remark-gfm v4+ requires react-markdown v9+, which needs React 18
|
||||
- dependency-name: "remark-gfm"
|
||||
- dependency-name: "react-markdown"
|
||||
# TODO: remove below entries until React >= 19.0.0
|
||||
- dependency-name: "react-icons"
|
||||
# JSDOM v30 doesn't play well with Jest v30
|
||||
# Source: https://jestjs.io/blog#known-issues
|
||||
# GH thread: https://github.com/jsdom/jsdom/issues/3492
|
||||
- dependency-name: "jest-environment-jsdom"
|
||||
# `@swc/plugin-transform-imports` doesn't work with current Webpack-SWC hybrid setup
|
||||
# See https://github.com/apache/superset/pull/37384#issuecomment-3793991389
|
||||
# TODO: remove the plugin once Lodash usage has been migrated to a more readily tree-shakeable alternative
|
||||
- dependency-name: "@swc/plugin-transform-imports"
|
||||
# `just-handlerbars-helpers` library in plugin-chart-handlebars requires `currencyformatter`` to be < 2
|
||||
- dependency-name: "currencyformatter.js"
|
||||
update-types: ["version-update:semver-major"]
|
||||
# TODO: remove below clause once https://github.com/pmmmwh/react-refresh-webpack-plugin/pull/940 lands onto a future release
|
||||
# and confirm the issue https://github.com/apache/superset/issues/39600 is fixed
|
||||
- dependency-name: "react-checkbox-tree"
|
||||
update-types: ["version-update:semver-major"]
|
||||
groups:
|
||||
storybook:
|
||||
applies-to: version-updates
|
||||
patterns:
|
||||
- "@storybook*"
|
||||
- "storybook"
|
||||
update-types:
|
||||
- "patch"
|
||||
directory: "/superset-frontend/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
@@ -59,13 +26,15 @@ updates:
|
||||
versioning-strategy: increase
|
||||
|
||||
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
# NOTE: `uv` support is in beta, more details here:
|
||||
# https://github.com/dependabot/dependabot-core/pull/10040#issuecomment-2696978430
|
||||
- package-ecosystem: "uv"
|
||||
directory: "requirements/"
|
||||
open-pull-requests-limit: 10
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
labels:
|
||||
- pip
|
||||
- uv
|
||||
- dependabot
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
@@ -77,22 +46,6 @@ updates:
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/docs/"
|
||||
ignore:
|
||||
# TODO: remove below entries until React >= 18.0.0 in superset-frontend
|
||||
- dependency-name: "storybook"
|
||||
update-types: ["version-update:semver-major", "version-update:semver-minor"]
|
||||
- dependency-name: "@storybook*"
|
||||
update-types: ["version-update:semver-major", "version-update:semver-minor"]
|
||||
- dependency-name: "eslint-plugin-storybook"
|
||||
- dependency-name: "react-error-boundary"
|
||||
groups:
|
||||
storybook:
|
||||
applies-to: version-updates
|
||||
patterns:
|
||||
- "@storybook*"
|
||||
- "storybook"
|
||||
update-types:
|
||||
- "patch"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
@@ -129,6 +82,16 @@ updates:
|
||||
open-pull-requests-limit: 5
|
||||
versioning-strategy: increase
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/superset-frontend/plugins/legacy-plugin-chart-histogram/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
labels:
|
||||
- npm
|
||||
- dependabot
|
||||
open-pull-requests-limit: 5
|
||||
versioning-strategy: increase
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/superset-frontend/plugins/legacy-plugin-chart-partition/"
|
||||
schedule:
|
||||
@@ -151,9 +114,6 @@ updates:
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/superset-frontend/plugins/plugin-chart-pivot-table/"
|
||||
ignore:
|
||||
# TODO: remove below entries until React >= 19.0.0
|
||||
- dependency-name: "react-icons"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
labels:
|
||||
@@ -204,9 +164,6 @@ updates:
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/superset-frontend/plugins/plugin-chart-table/"
|
||||
ignore:
|
||||
# TODO: remove below entries until React >= 19.0.0
|
||||
- dependency-name: "react-icons"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
labels:
|
||||
@@ -235,6 +192,16 @@ updates:
|
||||
open-pull-requests-limit: 5
|
||||
versioning-strategy: increase
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/superset-frontend/plugins/legacy-plugin-chart-sankey/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
labels:
|
||||
- npm
|
||||
- dependabot
|
||||
open-pull-requests-limit: 5
|
||||
versioning-strategy: increase
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/superset-frontend/plugins/legacy-preset-chart-nvd3/"
|
||||
schedule:
|
||||
@@ -255,6 +222,16 @@ updates:
|
||||
open-pull-requests-limit: 5
|
||||
versioning-strategy: increase
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/superset-frontend/plugins/legacy-plugin-chart-event-flow/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
labels:
|
||||
- npm
|
||||
- dependabot
|
||||
open-pull-requests-limit: 5
|
||||
versioning-strategy: increase
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/superset-frontend/plugins/legacy-plugin-chart-paired-t-test/"
|
||||
schedule:
|
||||
@@ -265,6 +242,16 @@ updates:
|
||||
open-pull-requests-limit: 5
|
||||
versioning-strategy: increase
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/superset-frontend/plugins/legacy-plugin-chart-sankey-loop/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
labels:
|
||||
- npm
|
||||
- dependabot
|
||||
open-pull-requests-limit: 5
|
||||
versioning-strategy: increase
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/superset-frontend/plugins/plugin-chart-echarts/"
|
||||
schedule:
|
||||
@@ -276,7 +263,7 @@ updates:
|
||||
versioning-strategy: increase
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/superset-frontend/plugins/plugin-chart-ag-grid-table/"
|
||||
directory: "/superset-frontend/plugins/preset-chart-xy/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
labels:
|
||||
@@ -286,7 +273,7 @@ updates:
|
||||
versioning-strategy: increase
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/superset-frontend/plugins/plugin-chart-cartodiagram/"
|
||||
directory: "/superset-frontend/plugins/legacy-plugin-chart-heatmap/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
labels:
|
||||
@@ -305,12 +292,18 @@ updates:
|
||||
open-pull-requests-limit: 5
|
||||
versioning-strategy: increase
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/superset-frontend/plugins/legacy-plugin-chart-sunburst/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
labels:
|
||||
- npm
|
||||
- dependabot
|
||||
open-pull-requests-limit: 5
|
||||
versioning-strategy: increase
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/superset-frontend/plugins/plugin-chart-handlebars/"
|
||||
ignore:
|
||||
# `just-handlerbars-helpers` library in plugin-chart-handlebars requires `currencyformatter`` to be < 2
|
||||
- dependency-name: "currencyformatter.js"
|
||||
update-types: ["version-update:semver-major"]
|
||||
schedule:
|
||||
interval: "daily"
|
||||
labels:
|
||||
@@ -345,7 +338,16 @@ updates:
|
||||
# not until React >= 18.0.0
|
||||
- dependency-name: "react-markdown"
|
||||
- dependency-name: "remark-gfm"
|
||||
- dependency-name: "react-error-boundary"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
labels:
|
||||
- npm
|
||||
- dependabot
|
||||
open-pull-requests-limit: 5
|
||||
versioning-strategy: increase
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/superset-frontend/packages/superset-ui-demo/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
labels:
|
||||
|
||||
15
.github/labeler.yml
vendored
15
.github/labeler.yml
vendored
@@ -17,11 +17,6 @@
|
||||
- any-glob-to-any-file:
|
||||
- 'superset/migrations/**'
|
||||
|
||||
"risk:ci-script":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- 'scripts/**'
|
||||
|
||||
############################################
|
||||
# Dependencies
|
||||
############################################
|
||||
@@ -77,11 +72,6 @@
|
||||
- any-glob-to-any-file:
|
||||
- 'superset/translations/zh/**'
|
||||
|
||||
"i18n:czech":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- 'superset/translations/cs/**'
|
||||
|
||||
"i18n:traditional-chinese":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
@@ -127,11 +117,6 @@
|
||||
- any-glob-to-any-file:
|
||||
- 'superset/translations/sk/**'
|
||||
|
||||
"i18n:latvian":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- 'superset/translations/lv/**'
|
||||
|
||||
"i18n:ukrainian":
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
|
||||
73
.github/workflows/bashlib.sh
vendored
73
.github/workflows/bashlib.sh
vendored
@@ -117,33 +117,6 @@ testdata() {
|
||||
say "::endgroup::"
|
||||
}
|
||||
|
||||
playwright_testdata() {
|
||||
cd "$GITHUB_WORKSPACE"
|
||||
say "::group::Load all examples for Playwright tests"
|
||||
# must specify PYTHONPATH to make `tests.superset_test_config` importable
|
||||
export PYTHONPATH="$GITHUB_WORKSPACE"
|
||||
pip install -e .
|
||||
superset db upgrade
|
||||
superset load_test_users
|
||||
superset load_examples
|
||||
superset init
|
||||
# Enable DML on the examples database so Playwright tests can create/drop
|
||||
# temporary tables via SQL Lab without depending on external data sources.
|
||||
superset shell <<'PYEOF'
|
||||
import sys
|
||||
from superset.extensions import db
|
||||
from superset.models.core import Database
|
||||
examples_db = db.session.query(Database).filter_by(database_name='examples').first()
|
||||
if not examples_db:
|
||||
sys.exit('ERROR: examples database not found. load_examples may have failed.')
|
||||
|
||||
examples_db.allow_dml = True
|
||||
db.session.commit()
|
||||
print('Enabled allow_dml on examples database')
|
||||
PYEOF
|
||||
say "::endgroup::"
|
||||
}
|
||||
|
||||
celery-worker() {
|
||||
cd "$GITHUB_WORKSPACE"
|
||||
say "::group::Start Celery worker"
|
||||
@@ -222,7 +195,6 @@ playwright-install() {
|
||||
|
||||
playwright-run() {
|
||||
local APP_ROOT=$1
|
||||
local TEST_PATH=$2
|
||||
|
||||
# Start Flask from the project root (same as Cypress)
|
||||
cd "$GITHUB_WORKSPACE"
|
||||
@@ -266,26 +238,8 @@ playwright-run() {
|
||||
|
||||
say "::group::Run Playwright tests"
|
||||
echo "Running Playwright with baseURL: ${PLAYWRIGHT_BASE_URL}"
|
||||
if [ -n "$TEST_PATH" ]; then
|
||||
# Check if there are any test files in the specified path
|
||||
if ! find "playwright/tests/${TEST_PATH}" -name "*.spec.ts" -type f 2>/dev/null | grep -q .; then
|
||||
echo "No test files found in ${TEST_PATH} - skipping test run"
|
||||
say "::endgroup::"
|
||||
kill $flaskProcessId
|
||||
return 0
|
||||
fi
|
||||
echo "Running tests: ${TEST_PATH}"
|
||||
# Set INCLUDE_EXPERIMENTAL=true to allow experimental tests to run
|
||||
export INCLUDE_EXPERIMENTAL=true
|
||||
npx playwright test "${TEST_PATH}" --output=playwright-results
|
||||
local status=$?
|
||||
# Unset to prevent leaking into subsequent commands
|
||||
unset INCLUDE_EXPERIMENTAL
|
||||
else
|
||||
echo "Running all required tests (experimental/ excluded via playwright.config.ts)"
|
||||
npx playwright test --output=playwright-results
|
||||
local status=$?
|
||||
fi
|
||||
npx playwright test auth/login --reporter=github --output=playwright-results
|
||||
local status=$?
|
||||
say "::endgroup::"
|
||||
|
||||
# After job is done, print out Flask log for debugging
|
||||
@@ -318,3 +272,26 @@ monitor_memory() {
|
||||
sleep 2
|
||||
done
|
||||
}
|
||||
|
||||
cypress-run-applitools() {
|
||||
cd "$GITHUB_WORKSPACE/superset-frontend/cypress-base"
|
||||
|
||||
local flasklog="${HOME}/flask.log"
|
||||
local port=8081
|
||||
local cypress="./node_modules/.bin/cypress run"
|
||||
local browser=${CYPRESS_BROWSER:-chrome}
|
||||
|
||||
export CYPRESS_BASE_URL="http://localhost:${port}"
|
||||
|
||||
nohup flask run --no-debugger -p $port >"$flasklog" 2>&1 </dev/null &
|
||||
local flaskProcessId=$!
|
||||
|
||||
$cypress --spec "cypress/applitools/**/*" --browser "$browser" --headless
|
||||
|
||||
say "::group::Flask log for default run"
|
||||
cat "$flasklog"
|
||||
say "::endgroup::"
|
||||
|
||||
# make sure the program exits
|
||||
kill $flaskProcessId
|
||||
}
|
||||
|
||||
20
.github/workflows/bump-python-package.yml
vendored
20
.github/workflows/bump-python-package.yml
vendored
@@ -32,7 +32,7 @@ jobs:
|
||||
steps:
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: true
|
||||
ref: master
|
||||
@@ -41,7 +41,7 @@ jobs:
|
||||
uses: ./.github/actions/setup-supersetbot/
|
||||
|
||||
- name: Set up Python ${{ inputs.python-version }}
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
@@ -51,31 +51,27 @@ jobs:
|
||||
- name: supersetbot bump-python -p "${{ github.event.inputs.package }}"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
INPUT_PACKAGE: ${{ github.event.inputs.package }}
|
||||
INPUT_GROUP: ${{ github.event.inputs.group }}
|
||||
INPUT_EXTRA_FLAGS: ${{ github.event.inputs.extra-flags }}
|
||||
INPUT_LIMIT: ${{ github.event.inputs.limit }}
|
||||
run: |
|
||||
git config --global user.email "action@github.com"
|
||||
git config --global user.name "GitHub Action"
|
||||
|
||||
PACKAGE_OPT=""
|
||||
if [ -n "${INPUT_PACKAGE}" ]; then
|
||||
PACKAGE_OPT="-p ${INPUT_PACKAGE}"
|
||||
if [ -n "${{ github.event.inputs.package }}" ]; then
|
||||
PACKAGE_OPT="-p ${{ github.event.inputs.package }}"
|
||||
fi
|
||||
|
||||
GROUP_OPT=""
|
||||
if [ -n "${INPUT_GROUP}" ]; then
|
||||
GROUP_OPT="-g ${INPUT_GROUP}"
|
||||
if [ -n "${{ github.event.inputs.group }}" ]; then
|
||||
GROUP_OPT="-g ${{ github.event.inputs.group }}"
|
||||
fi
|
||||
|
||||
EXTRA_FLAGS="${INPUT_EXTRA_FLAGS}"
|
||||
EXTRA_FLAGS="${{ github.event.inputs.extra-flags }}"
|
||||
|
||||
supersetbot bump-python \
|
||||
--verbose \
|
||||
--use-current-repo \
|
||||
--include-subpackages \
|
||||
--limit ${INPUT_LIMIT} \
|
||||
--limit ${{ github.event.inputs.limit }} \
|
||||
$PACKAGE_OPT \
|
||||
$GROUP_OPT \
|
||||
$EXTRA_FLAGS
|
||||
|
||||
2
.github/workflows/cancel_duplicates.yml
vendored
2
.github/workflows/cancel_duplicates.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
if: steps.check_queued.outputs.count >= 20
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Cancel duplicate workflow runs
|
||||
if: steps.check_queued.outputs.count >= 20
|
||||
|
||||
2
.github/workflows/check-python-deps.yml
vendored
2
.github/workflows/check-python-deps.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
|
||||
@@ -25,9 +25,9 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
- name: Check and notify
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
script: |
|
||||
@@ -69,7 +69,7 @@ jobs:
|
||||
`❗ @${pull.user.login} Your base branch \`${currentBranch}\` has ` +
|
||||
'also updated `superset/migrations`.\n' +
|
||||
'\n' +
|
||||
'**Please consider rebasing your branch and [resolving potential db migration conflicts](https://superset.apache.org/docs/contributing/development#merging-db-migrations).**',
|
||||
'**Please consider rebasing your branch and [resolving potential db migration conflicts](https://github.com/apache/superset/blob/master/CONTRIBUTING.md#merging-db-migrations).**',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
16
.github/workflows/claude.yml
vendored
16
.github/workflows/claude.yml
vendored
@@ -17,12 +17,13 @@ jobs:
|
||||
steps:
|
||||
- name: Check if user is allowed
|
||||
id: check
|
||||
env:
|
||||
COMMENTER: ${{ github.event.comment.user.login }}
|
||||
run: |
|
||||
# List of allowed users
|
||||
ALLOWED_USERS="mistercrunch,rusackas"
|
||||
|
||||
# Get the commenter's username
|
||||
COMMENTER="${{ github.event.comment.user.login }}"
|
||||
|
||||
echo "Checking permissions for user: $COMMENTER"
|
||||
|
||||
# Check if user is in allowed list
|
||||
@@ -43,13 +44,10 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Comment access denied
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
env:
|
||||
COMMENTER_LOGIN: ${{ github.event.comment.user.login || github.event.review.user.login || github.event.issue.user.login }}
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
const commenter = process.env.COMMENTER_LOGIN;
|
||||
const message = `👋 Hi @${commenter}!
|
||||
const message = `👋 Hi @${{ github.event.comment.user.login || github.event.review.user.login || github.event.issue.user.login }}!
|
||||
|
||||
Thanks for trying to use Claude Code, but currently only certain team members have access to this feature.
|
||||
|
||||
@@ -73,12 +71,12 @@ jobs:
|
||||
id-token: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Run Claude PR Action
|
||||
uses: anthropics/claude-code-action@5fb899572b81d2bb648d4d187173a2f423a9677c # beta
|
||||
uses: anthropics/claude-code-action@beta
|
||||
with:
|
||||
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
timeout_minutes: "60"
|
||||
|
||||
6
.github/workflows/codeql-analysis.yml
vendored
6
.github/workflows/codeql-analysis.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Check for file changes
|
||||
id: check
|
||||
@@ -41,7 +41,7 @@ jobs:
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@68bde559dea0fdcac2102bfdf6230c5f70eb485e # v4
|
||||
uses: github/codeql-action/init@v4
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
@@ -53,6 +53,6 @@ jobs:
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: github/codeql-action/analyze@68bde559dea0fdcac2102bfdf6230c5f70eb485e # v4
|
||||
uses: github/codeql-action/analyze@v4
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
12
.github/workflows/dependency-review.yml
vendored
12
.github/workflows/dependency-review.yml
vendored
@@ -27,9 +27,9 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout Repository"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
- name: "Dependency Review"
|
||||
uses: actions/dependency-review-action@2031cfc080254a8a887f58cffee85186f0e49e48 # v4.9.0
|
||||
uses: actions/dependency-review-action@v4
|
||||
continue-on-error: true
|
||||
with:
|
||||
fail-on-severity: critical
|
||||
@@ -39,9 +39,13 @@ jobs:
|
||||
# pkg:npm/store2@2.14.2
|
||||
# adding an exception for an ambigious license on store2, which has been resolved in
|
||||
# the latest version. It's MIT: https://github.com/nbubna/store/blob/master/LICENSE-MIT
|
||||
# pkg:npm/applitools/*
|
||||
# adding exception for all applitools modules (eyes-cypress and its dependencies),
|
||||
# which has an explicit OSS license approved by ASF
|
||||
# license: https://applitools.com/legal/open-source-terms-of-use/
|
||||
# pkg:npm/node-forge@1.3.1
|
||||
# selecting BSD-3-Clause licensing terms for node-forge to ensure compatibility with Apache
|
||||
allow-dependencies-licenses: pkg:npm/store2@2.14.2, pkg:npm/node-forge@1.3.1, pkg:npm/rgbcolor, pkg:npm/jszip@3.10.1
|
||||
allow-dependencies-licenses: pkg:npm/store2@2.14.2, pkg:npm/applitools/core, pkg:npm/applitools/core-base, pkg:npm/applitools/css-tree, pkg:npm/applitools/ec-client, pkg:npm/applitools/eg-socks5-proxy-server, pkg:npm/applitools/eyes, pkg:npm/applitools/eyes-cypress, pkg:npm/applitools/nml-client, pkg:npm/applitools/tunnel-client, pkg:npm/applitools/utils, pkg:npm/node-forge@1.3.1, pkg:npm/rgbcolor, pkg:npm/jszip@3.10.1
|
||||
|
||||
python-dependency-liccheck:
|
||||
# NOTE: Configuration for liccheck lives in our pyproject.yml.
|
||||
@@ -49,7 +53,7 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: "Checkout Repository"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Setup Python
|
||||
uses: ./.github/actions/setup-backend/
|
||||
|
||||
4
.github/workflows/docker.yml
vendored
4
.github/workflows/docker.yml
vendored
@@ -42,7 +42,7 @@ jobs:
|
||||
steps:
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
@@ -117,7 +117,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Check for file changes
|
||||
|
||||
8
.github/workflows/embedded-sdk-release.yml
vendored
8
.github/workflows/embedded-sdk-release.yml
vendored
@@ -16,12 +16,10 @@ jobs:
|
||||
id: check
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "${NPM_TOKEN}" ]; then
|
||||
if [ -n "${{ (secrets.NPM_TOKEN != '') || '' }}" ]; then
|
||||
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
env:
|
||||
NPM_TOKEN: ${{ (secrets.NPM_TOKEN != '') || '' }}
|
||||
build:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
@@ -30,8 +28,8 @@ jobs:
|
||||
run:
|
||||
working-directory: superset-embedded-sdk
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: './superset-embedded-sdk/.nvmrc'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
|
||||
4
.github/workflows/embedded-sdk-test.yml
vendored
4
.github/workflows/embedded-sdk-test.yml
vendored
@@ -18,8 +18,8 @@ jobs:
|
||||
run:
|
||||
working-directory: superset-embedded-sdk
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: './superset-embedded-sdk/.nvmrc'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
|
||||
10
.github/workflows/ephemeral-env-pr-close.yml
vendored
10
.github/workflows/ephemeral-env-pr-close.yml
vendored
@@ -20,12 +20,10 @@ jobs:
|
||||
id: check
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "${AWS_ACCESS_KEY_ID}" ]; then
|
||||
if [ -n "${{ (secrets.AWS_ACCESS_KEY_ID != '' && secrets.AWS_SECRET_ACCESS_KEY != '') || '' }}" ]; then
|
||||
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ (secrets.AWS_ACCESS_KEY_ID != '' && secrets.AWS_SECRET_ACCESS_KEY != '') || '' }}
|
||||
ephemeral-env-cleanup:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
@@ -35,7 +33,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7 # v6
|
||||
uses: aws-actions/configure-aws-credentials@v5
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
@@ -58,7 +56,7 @@ jobs:
|
||||
- name: Login to Amazon ECR
|
||||
if: steps.describe-services.outputs.active == 'true'
|
||||
id: login-ecr
|
||||
uses: aws-actions/amazon-ecr-login@fa648b43de3d4d023bcb3f89ed6940096949c419 # v2
|
||||
uses: aws-actions/amazon-ecr-login@v2
|
||||
|
||||
- name: Delete ECR image tag
|
||||
if: steps.describe-services.outputs.active == 'true'
|
||||
@@ -71,7 +69,7 @@ jobs:
|
||||
|
||||
- name: Comment (success)
|
||||
if: steps.describe-services.outputs.active == 'true'
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
github-token: ${{github.token}}
|
||||
script: |
|
||||
|
||||
40
.github/workflows/ephemeral-env.yml
vendored
40
.github/workflows/ephemeral-env.yml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
id: eval-label
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
|
||||
LABEL_NAME="${INPUT_LABEL_NAME}"
|
||||
LABEL_NAME="${{ github.event.inputs.label_name }}"
|
||||
else
|
||||
LABEL_NAME="${{ github.event.label.name }}"
|
||||
fi
|
||||
@@ -60,12 +60,10 @@ jobs:
|
||||
echo "result=noop" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
env:
|
||||
INPUT_LABEL_NAME: ${{ github.event.inputs.label_name }}
|
||||
- name: Get event SHA
|
||||
id: get-sha
|
||||
if: steps.eval-label.outputs.result == 'up'
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
@@ -96,7 +94,7 @@ jobs:
|
||||
core.setOutput("sha", prSha);
|
||||
|
||||
- name: Looking for feature flags in PR description
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@v8
|
||||
id: eval-feature-flags
|
||||
if: steps.eval-label.outputs.result == 'up'
|
||||
with:
|
||||
@@ -118,7 +116,7 @@ jobs:
|
||||
return results;
|
||||
|
||||
- name: Reply with confirmation comment
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@v8
|
||||
if: steps.eval-label.outputs.result == 'up'
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
@@ -162,7 +160,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ needs.ephemeral-env-label.outputs.sha }} : ${{steps.get-sha.outputs.sha}} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ needs.ephemeral-env-label.outputs.sha }}
|
||||
persist-credentials: false
|
||||
@@ -191,7 +189,7 @@ jobs:
|
||||
--extra-flags "--build-arg INCLUDE_CHROMIUM=false"
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7 # v6
|
||||
uses: aws-actions/configure-aws-credentials@v5
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
@@ -199,7 +197,7 @@ jobs:
|
||||
|
||||
- name: Login to Amazon ECR
|
||||
id: login-ecr
|
||||
uses: aws-actions/amazon-ecr-login@fa648b43de3d4d023bcb3f89ed6940096949c419 # v2
|
||||
uses: aws-actions/amazon-ecr-login@v2
|
||||
|
||||
- name: Load, tag and push image to ECR
|
||||
id: push-image
|
||||
@@ -222,12 +220,12 @@ jobs:
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7 # v6
|
||||
uses: aws-actions/configure-aws-credentials@v5
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
@@ -235,7 +233,7 @@ jobs:
|
||||
|
||||
- name: Login to Amazon ECR
|
||||
id: login-ecr
|
||||
uses: aws-actions/amazon-ecr-login@fa648b43de3d4d023bcb3f89ed6940096949c419 # v2
|
||||
uses: aws-actions/amazon-ecr-login@v2
|
||||
|
||||
- name: Check target image exists in ECR
|
||||
id: check-image
|
||||
@@ -250,7 +248,7 @@ jobs:
|
||||
|
||||
- name: Fail on missing container image
|
||||
if: steps.check-image.outcome == 'failure'
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
script: |
|
||||
@@ -265,7 +263,7 @@ jobs:
|
||||
|
||||
- name: Fill in the new image ID in the Amazon ECS task definition
|
||||
id: task-def
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@6853cfae8c3a7d978fbf68b5a55453395541dfbb # v1
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: .github/workflows/ecs-task-definition.json
|
||||
container-name: superset-ci
|
||||
@@ -278,9 +276,7 @@ jobs:
|
||||
- name: Describe ECS service
|
||||
id: describe-services
|
||||
run: |
|
||||
echo "active=$(aws ecs describe-services --cluster superset-ci --services pr-${INPUT_ISSUE_NUMBER}-service | jq '.services[] | select(.status == "ACTIVE") | any')" >> $GITHUB_OUTPUT
|
||||
env:
|
||||
INPUT_ISSUE_NUMBER: ${{ github.event.inputs.issue_number || github.event.pull_request.number }}
|
||||
echo "active=$(aws ecs describe-services --cluster superset-ci --services pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-service | jq '.services[] | select(.status == "ACTIVE") | any')" >> $GITHUB_OUTPUT
|
||||
- name: Create ECS service
|
||||
id: create-service
|
||||
if: steps.describe-services.outputs.active != 'true'
|
||||
@@ -300,7 +296,7 @@ jobs:
|
||||
--tags key=pr,value=$PR_NUMBER key=github_user,value=${{ github.actor }}
|
||||
- name: Deploy Amazon ECS task definition
|
||||
id: deploy-task
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@a310a830f5c14e583e35d84e4e1ec7dd177c3c9c # v2
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
|
||||
with:
|
||||
task-definition: ${{ steps.task-def.outputs.task-definition }}
|
||||
service: pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-service
|
||||
@@ -311,9 +307,7 @@ jobs:
|
||||
- name: List tasks
|
||||
id: list-tasks
|
||||
run: |
|
||||
echo "task=$(aws ecs list-tasks --cluster superset-ci --service-name pr-${INPUT_ISSUE_NUMBER}-service | jq '.taskArns | first')" >> $GITHUB_OUTPUT
|
||||
env:
|
||||
INPUT_ISSUE_NUMBER: ${{ github.event.inputs.issue_number || github.event.pull_request.number }}
|
||||
echo "task=$(aws ecs list-tasks --cluster superset-ci --service-name pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-service | jq '.taskArns | first')" >> $GITHUB_OUTPUT
|
||||
- name: Get network interface
|
||||
id: get-eni
|
||||
run: |
|
||||
@@ -324,7 +318,7 @@ jobs:
|
||||
echo "ip=$(aws ec2 describe-network-interfaces --network-interface-ids ${{ steps.get-eni.outputs.eni }} | jq -r '.NetworkInterfaces | first | .Association.PublicIp')" >> $GITHUB_OUTPUT
|
||||
- name: Comment (success)
|
||||
if: ${{ success() }}
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
github-token: ${{github.token}}
|
||||
script: |
|
||||
@@ -337,7 +331,7 @@ jobs:
|
||||
});
|
||||
- name: Comment (failure)
|
||||
if: ${{ failure() }}
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
github-token: ${{github.token}}
|
||||
script: |
|
||||
|
||||
8
.github/workflows/generate-FOSSA-report.yml
vendored
8
.github/workflows/generate-FOSSA-report.yml
vendored
@@ -16,12 +16,10 @@ jobs:
|
||||
id: check
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "${FOSSA_API_KEY}" ]; then
|
||||
if [ -n "${{ (secrets.FOSSA_API_KEY != '' ) || '' }}" ]; then
|
||||
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
env:
|
||||
FOSSA_API_KEY: ${{ (secrets.FOSSA_API_KEY != '' ) || '' }}
|
||||
license_check:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
@@ -29,12 +27,12 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Setup Java
|
||||
uses: actions/setup-java@be666c2fcd27ec809703dec50e508c2fdc7f6654 # v5
|
||||
uses: actions/setup-java@v5
|
||||
with:
|
||||
distribution: "temurin"
|
||||
java-version: "11"
|
||||
|
||||
@@ -14,10 +14,10 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
|
||||
2
.github/workflows/issue_creation.yml
vendored
2
.github/workflows/issue_creation.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
steps:
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
|
||||
4
.github/workflows/latest-release-tag.yml
vendored
4
.github/workflows/latest-release-tag.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -29,7 +29,7 @@ jobs:
|
||||
|
||||
- name: Run latest-tag
|
||||
uses: ./.github/actions/latest-tag
|
||||
if: steps.latest-tag.outputs.SKIP_TAG != 'true'
|
||||
if: (! ${{ steps.latest-tag.outputs.SKIP_TAG }} )
|
||||
with:
|
||||
description: Superset latest release
|
||||
tag-name: latest
|
||||
|
||||
4
.github/workflows/license-check.yml
vendored
4
.github/workflows/license-check.yml
vendored
@@ -15,12 +15,12 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Setup Java
|
||||
uses: actions/setup-java@be666c2fcd27ec809703dec50e508c2fdc7f6654 # v5
|
||||
uses: actions/setup-java@v5
|
||||
with:
|
||||
distribution: 'temurin'
|
||||
java-version: '11'
|
||||
|
||||
5
.github/workflows/no-hold-label.yml
vendored
5
.github/workflows/no-hold-label.yml
vendored
@@ -4,9 +4,6 @@ on:
|
||||
pull_request:
|
||||
types: [labeled, unlabeled, opened, reopened, synchronize]
|
||||
|
||||
permissions:
|
||||
pull-requests: read
|
||||
|
||||
# cancel previous workflow jobs for PRs
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
|
||||
@@ -17,7 +14,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Check for 'hold' label
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
script: |
|
||||
|
||||
2
.github/workflows/pr-lint.yml
vendored
2
.github/workflows/pr-lint.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
|
||||
23
.github/workflows/pre-commit.yml
vendored
23
.github/workflows/pre-commit.yml
vendored
@@ -8,9 +8,6 @@ on:
|
||||
pull_request:
|
||||
types: [synchronize, opened, reopened, ready_for_review]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
# cancel previous workflow jobs for PRs
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
|
||||
@@ -24,7 +21,7 @@ jobs:
|
||||
python-version: ["current", "previous", "next"]
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -42,7 +39,7 @@ jobs:
|
||||
echo "HOMEBREW_REPOSITORY=$HOMEBREW_REPOSITORY" >>"${GITHUB_ENV}"
|
||||
brew install norwoodj/tap/helm-docs
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
@@ -57,32 +54,24 @@ jobs:
|
||||
yarn install --immutable
|
||||
|
||||
- name: Cache pre-commit environments
|
||||
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.cache/pre-commit
|
||||
key: pre-commit-v2-${{ runner.os }}-py${{ matrix.python-version }}-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
restore-keys: |
|
||||
pre-commit-v2-${{ runner.os }}-py${{ matrix.python-version }}-
|
||||
|
||||
- name: Get changed files
|
||||
id: changed_files
|
||||
uses: ./.github/actions/file-changes-action
|
||||
with:
|
||||
output: ' '
|
||||
|
||||
- name: pre-commit
|
||||
run: |
|
||||
set +e # Don't exit immediately on failure
|
||||
export SKIP=type-checking-frontend
|
||||
pre-commit run --files ${{ steps.changed_files.outputs.files }}
|
||||
export SKIP=eslint-frontend,type-checking-frontend
|
||||
pre-commit run --all-files
|
||||
PRE_COMMIT_EXIT_CODE=$?
|
||||
git diff --quiet --exit-code
|
||||
GIT_DIFF_EXIT_CODE=$?
|
||||
if [ "${PRE_COMMIT_EXIT_CODE}" -ne 0 ] || [ "${GIT_DIFF_EXIT_CODE}" -ne 0 ]; then
|
||||
if [ "${PRE_COMMIT_EXIT_CODE}" -ne 0 ]; then
|
||||
echo "❌ Pre-commit check failed (exit code: ${PRE_COMMIT_EXIT_CODE})."
|
||||
echo "🔍 Modified files:"
|
||||
git diff --name-only
|
||||
echo "❌ Pre-commit check failed (exit code: ${EXIT_CODE})."
|
||||
else
|
||||
echo "❌ Git working directory is dirty."
|
||||
echo "📌 This likely means that pre-commit made changes that were not committed."
|
||||
|
||||
70
.github/workflows/prefer-typescript.yml
vendored
Normal file
70
.github/workflows/prefer-typescript.yml
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
name: Prefer TypeScript
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "[0-9].[0-9]*"
|
||||
paths:
|
||||
- "superset-frontend/src/**"
|
||||
pull_request:
|
||||
types: [synchronize, opened, reopened, ready_for_review]
|
||||
paths:
|
||||
- "superset-frontend/src/**"
|
||||
|
||||
# cancel previous workflow jobs for PRs
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
prefer_typescript:
|
||||
if: github.ref == 'ref/heads/master' && github.event_name == 'pull_request'
|
||||
name: Prefer TypeScript
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Get changed files
|
||||
id: changed
|
||||
uses: ./.github/actions/file-changes-action
|
||||
with:
|
||||
githubToken: ${{ github.token }}
|
||||
|
||||
- name: Determine if a .js or .jsx file was added
|
||||
id: check
|
||||
run: |
|
||||
js_files_added() {
|
||||
jq -r '
|
||||
map(
|
||||
select(
|
||||
endswith(".js") or endswith(".jsx")
|
||||
)
|
||||
) | join("\n")
|
||||
' ${HOME}/files_added.json
|
||||
}
|
||||
echo "js_files_added=$(js_files_added)" >> $GITHUB_OUTPUT
|
||||
|
||||
- if: steps.check.outputs.js_files_added
|
||||
name: Add Comment to PR
|
||||
uses: ./.github/actions/comment-on-pr
|
||||
continue-on-error: true
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
with:
|
||||
msg: |
|
||||
### WARNING: Prefer TypeScript
|
||||
|
||||
Looks like your PR contains new `.js` or `.jsx` files:
|
||||
|
||||
```
|
||||
${{steps.check.outputs.js_files_added}}
|
||||
```
|
||||
|
||||
As decided in [SIP-36](https://github.com/apache/superset/issues/9101), all new frontend code should be written in TypeScript. Please convert above files to TypeScript then re-request review.
|
||||
12
.github/workflows/release.yml
vendored
12
.github/workflows/release.yml
vendored
@@ -16,19 +16,17 @@ jobs:
|
||||
id: check
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "${NPM_TOKEN}" ]; then
|
||||
if [ -n "${{ (secrets.NPM_TOKEN != '' && secrets.GH_PERSONAL_ACCESS_TOKEN != '') || '' }}" ]; then
|
||||
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
env:
|
||||
NPM_TOKEN: ${{ (secrets.NPM_TOKEN != '' && secrets.GH_PERSONAL_ACCESS_TOKEN != '') || '' }}
|
||||
build:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
name: Bump version and publish package(s)
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
# pulls all commits (needed for lerna / semantic release to correctly version)
|
||||
fetch-depth: 0
|
||||
@@ -44,13 +42,13 @@ jobs:
|
||||
|
||||
- name: Install Node.js
|
||||
if: env.HAS_TAGS
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
|
||||
- name: Cache npm
|
||||
if: env.HAS_TAGS
|
||||
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.npm # npm cache files are stored in `~/.npm` on Linux/macOS
|
||||
key: ${{ runner.OS }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||
@@ -64,7 +62,7 @@ jobs:
|
||||
run: echo "dir=$(npm config get cache)" >> $GITHUB_OUTPUT
|
||||
- name: Cache npm
|
||||
if: env.HAS_TAGS
|
||||
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5
|
||||
uses: actions/cache@v4
|
||||
id: npm-cache # use this to check for `cache-hit` (`steps.npm-cache.outputs.cache-hit != 'true'`)
|
||||
with:
|
||||
path: ${{ steps.npm-cache-dir-path.outputs.dir }}
|
||||
|
||||
18
.github/workflows/showtime-cleanup.yml
vendored
18
.github/workflows/showtime-cleanup.yml
vendored
@@ -7,6 +7,12 @@ on:
|
||||
|
||||
# Manual trigger for testing
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
max_age_hours:
|
||||
description: 'Maximum age in hours before cleanup'
|
||||
required: false
|
||||
default: '48'
|
||||
type: string
|
||||
|
||||
# Common environment variables
|
||||
env:
|
||||
@@ -32,5 +38,13 @@ jobs:
|
||||
|
||||
- name: Cleanup expired environments
|
||||
run: |
|
||||
echo "Cleaning up environments respecting TTL labels"
|
||||
python -m showtime cleanup --respect-ttl
|
||||
MAX_AGE="${{ github.event.inputs.max_age_hours || '48' }}"
|
||||
|
||||
# Validate max_age is numeric
|
||||
if [[ ! "$MAX_AGE" =~ ^[0-9]+$ ]]; then
|
||||
echo "❌ Invalid max_age_hours format: $MAX_AGE (must be numeric)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Cleaning up environments older than ${MAX_AGE}h"
|
||||
python -m showtime cleanup --older-than "${MAX_AGE}h"
|
||||
|
||||
18
.github/workflows/showtime-trigger.yml
vendored
18
.github/workflows/showtime-trigger.yml
vendored
@@ -37,7 +37,7 @@ jobs:
|
||||
steps:
|
||||
- name: Security Check - Authorize Maintainers Only
|
||||
id: auth
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@v8
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
@@ -102,12 +102,10 @@ jobs:
|
||||
- name: Install Superset Showtime
|
||||
if: steps.auth.outputs.authorized == 'true'
|
||||
run: |
|
||||
echo "::notice::Maintainer ${{ github.actor }} triggered deploy for PR ${PULL_REQUEST_NUMBER}"
|
||||
echo "::notice::Maintainer ${{ github.actor }} triggered deploy for PR ${{ github.event.pull_request.number || github.event.inputs.pr_number }}"
|
||||
pip install --upgrade superset-showtime
|
||||
showtime version
|
||||
|
||||
env:
|
||||
PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number || github.event.inputs.pr_number }}
|
||||
- name: Check what actions are needed
|
||||
if: steps.auth.outputs.authorized == 'true'
|
||||
id: check
|
||||
@@ -115,14 +113,12 @@ jobs:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
INPUT_PR_NUMBER: ${{ github.event.inputs.pr_number }}
|
||||
INPUT_SHA: ${{ github.event.inputs.sha }}
|
||||
run: |
|
||||
# Bulletproof PR number extraction
|
||||
if [[ -n "${{ github.event.pull_request.number }}" ]]; then
|
||||
PR_NUM="${{ github.event.pull_request.number }}"
|
||||
elif [[ -n "${INPUT_PR_NUMBER}" ]]; then
|
||||
PR_NUM="${INPUT_PR_NUMBER}"
|
||||
elif [[ -n "${{ github.event.inputs.pr_number }}" ]]; then
|
||||
PR_NUM="${{ github.event.inputs.pr_number }}"
|
||||
else
|
||||
echo "❌ No PR number found in event or inputs"
|
||||
exit 1
|
||||
@@ -131,8 +127,8 @@ jobs:
|
||||
echo "Using PR number: $PR_NUM"
|
||||
|
||||
# Run sync check-only with optional SHA override
|
||||
if [[ -n "${INPUT_SHA}" ]]; then
|
||||
OUTPUT=$(python -m showtime sync $PR_NUM --check-only --sha "${INPUT_SHA}")
|
||||
if [[ -n "${{ github.event.inputs.sha }}" ]]; then
|
||||
OUTPUT=$(python -m showtime sync $PR_NUM --check-only --sha "${{ github.event.inputs.sha }}")
|
||||
else
|
||||
OUTPUT=$(python -m showtime sync $PR_NUM --check-only)
|
||||
fi
|
||||
@@ -151,7 +147,7 @@ jobs:
|
||||
|
||||
- name: Checkout PR code (only if build needed)
|
||||
if: steps.auth.outputs.authorized == 'true' && steps.check.outputs.build_needed == 'true'
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ steps.check.outputs.target_sha }}
|
||||
persist-credentials: false
|
||||
|
||||
4
.github/workflows/superset-app-cli.yml
vendored
4
.github/workflows/superset-app-cli.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:17-alpine
|
||||
image: postgres:16-alpine
|
||||
env:
|
||||
POSTGRES_USER: superset
|
||||
POSTGRES_PASSWORD: superset
|
||||
@@ -37,7 +37,7 @@ jobs:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
|
||||
91
.github/workflows/superset-applitool-cypress.yml
vendored
Normal file
91
.github/workflows/superset-applitool-cypress.yml
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
name: Applitools Cypress
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 1 * * *"
|
||||
|
||||
jobs:
|
||||
config:
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
has-secrets: ${{ steps.check.outputs.has-secrets }}
|
||||
steps:
|
||||
- name: "Check for secrets"
|
||||
id: check
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "${{ (secrets.APPLITOOLS_API_KEY != '' && secrets.APPLITOOLS_API_KEY != '') || '' }}" ]; then
|
||||
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
cypress-applitools:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
runs-on: ubuntu-24.04
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
browser: ["chrome"]
|
||||
env:
|
||||
SUPERSET_ENV: development
|
||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||
SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
REDIS_PORT: 16379
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
APPLITOOLS_APP_NAME: Superset
|
||||
APPLITOOLS_API_KEY: ${{ secrets.APPLITOOLS_API_KEY }}
|
||||
APPLITOOLS_BATCH_ID: ${{ github.sha }}
|
||||
APPLITOOLS_BATCH_NAME: Superset Cypress
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
env:
|
||||
POSTGRES_USER: superset
|
||||
POSTGRES_PASSWORD: superset
|
||||
ports:
|
||||
- 15432:5432
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
ports:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
ref: master
|
||||
- name: Setup Python
|
||||
uses: ./.github/actions/setup-backend/
|
||||
- name: Import test data
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
with:
|
||||
run: testdata
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
- name: Install npm dependencies
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
with:
|
||||
run: npm-install
|
||||
- name: Build javascript packages
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
with:
|
||||
run: build-instrumented-assets
|
||||
- name: Setup Postgres
|
||||
if: steps.check.outcome == 'failure'
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
with:
|
||||
run: setup-postgres
|
||||
- name: Install cypress
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
with:
|
||||
run: cypress-install
|
||||
- name: Run Cypress
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
env:
|
||||
CYPRESS_BROWSER: ${{ matrix.browser }}
|
||||
with:
|
||||
run: cypress-run-applitools
|
||||
52
.github/workflows/superset-applitools-storybook.yml
vendored
Normal file
52
.github/workflows/superset-applitools-storybook.yml
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
name: Applitools Storybook
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * *"
|
||||
|
||||
env:
|
||||
APPLITOOLS_APP_NAME: Superset
|
||||
APPLITOOLS_API_KEY: ${{ secrets.APPLITOOLS_API_KEY }}
|
||||
APPLITOOLS_BATCH_ID: ${{ github.sha }}
|
||||
APPLITOOLS_BATCH_NAME: Superset Storybook
|
||||
|
||||
jobs:
|
||||
config:
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
has-secrets: ${{ steps.check.outputs.has-secrets }}
|
||||
steps:
|
||||
- name: "Check for secrets"
|
||||
id: check
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "${{ (secrets.APPLITOOLS_API_KEY != '' && secrets.APPLITOOLS_API_KEY != '') || '' }}" ]; then
|
||||
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
cron:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
ref: master
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
- name: Install eyes-storybook dependencies
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
with:
|
||||
run: eyes-storybook-dependencies
|
||||
- name: Install NPM dependencies
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
with:
|
||||
run: npm-install
|
||||
- name: Run Applitools Eyes-Storybook
|
||||
working-directory: ./superset-frontend
|
||||
run: npx eyes-storybook -u https://superset-storybook.netlify.app/
|
||||
61
.github/workflows/superset-docs-deploy.yml
vendored
61
.github/workflows/superset-docs-deploy.yml
vendored
@@ -1,13 +1,6 @@
|
||||
name: Docs Deployment
|
||||
|
||||
on:
|
||||
# Deploy after integration tests complete on master
|
||||
workflow_run:
|
||||
workflows: ["Python-Integration"]
|
||||
types: [completed]
|
||||
branches: [master]
|
||||
|
||||
# Also allow manual trigger and direct pushes to docs
|
||||
push:
|
||||
paths:
|
||||
- "docs/**"
|
||||
@@ -17,16 +10,6 @@ on:
|
||||
|
||||
workflow_dispatch: {}
|
||||
|
||||
# Serialize deploys: the action pushes to apache/superset-site without
|
||||
# rebasing, so concurrent runs race on the final push and the loser fails
|
||||
# with `! [rejected] asf-site -> asf-site (fetch first)`. Cancel any
|
||||
# in-progress run as soon as a newer one starts — the destination repo
|
||||
# isn't touched until the final push step, so canceling mid-build is safe,
|
||||
# and the freshest content always wins.
|
||||
concurrency:
|
||||
group: docs-deploy-asf-site
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
config:
|
||||
runs-on: ubuntu-24.04
|
||||
@@ -37,31 +20,28 @@ jobs:
|
||||
id: check
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "${SUPERSET_SITE_BUILD}" ]; then
|
||||
if [ -n "${{ (secrets.SUPERSET_SITE_BUILD != '' && secrets.SUPERSET_SITE_BUILD != '') || '' }}" ]; then
|
||||
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
env:
|
||||
SUPERSET_SITE_BUILD: ${{ (secrets.SUPERSET_SITE_BUILD != '' && secrets.SUPERSET_SITE_BUILD != '') || '' }}
|
||||
build-deploy:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
name: Build & Deploy
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.event.workflow_run.head_sha || github.sha }}"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ github.event.workflow_run.head_sha || github.sha }}
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: './docs/.nvmrc'
|
||||
- name: Setup Python
|
||||
uses: ./.github/actions/setup-backend/
|
||||
- uses: actions/setup-java@be666c2fcd27ec809703dec50e508c2fdc7f6654 # v5
|
||||
- uses: actions/setup-java@v5
|
||||
with:
|
||||
distribution: 'zulu'
|
||||
java-version: '21'
|
||||
@@ -78,35 +58,6 @@ jobs:
|
||||
working-directory: docs
|
||||
run: |
|
||||
yarn install --check-cache
|
||||
- name: Download database diagnostics (if triggered by integration tests)
|
||||
if: github.event_name == 'workflow_run' && github.event.workflow_run.conclusion == 'success'
|
||||
uses: dawidd6/action-download-artifact@b6e2e70617bc3265edd6dab6c906732b2f1ae151 # v21
|
||||
continue-on-error: true
|
||||
with:
|
||||
workflow: superset-python-integrationtest.yml
|
||||
run_id: ${{ github.event.workflow_run.id }}
|
||||
name: database-diagnostics
|
||||
path: docs/src/data/
|
||||
- name: Try to download latest diagnostics (for push/dispatch triggers)
|
||||
if: github.event_name != 'workflow_run'
|
||||
uses: dawidd6/action-download-artifact@b6e2e70617bc3265edd6dab6c906732b2f1ae151 # v21
|
||||
continue-on-error: true
|
||||
with:
|
||||
workflow: superset-python-integrationtest.yml
|
||||
name: database-diagnostics
|
||||
path: docs/src/data/
|
||||
branch: master
|
||||
search_artifacts: true
|
||||
if_no_artifact_found: warn
|
||||
- name: Use diagnostics artifact if available
|
||||
working-directory: docs
|
||||
run: |
|
||||
if [ -f "src/data/databases-diagnostics.json" ]; then
|
||||
echo "Using fresh diagnostics from integration tests"
|
||||
mv src/data/databases-diagnostics.json src/data/databases.json
|
||||
else
|
||||
echo "Using committed databases.json (no artifact found)"
|
||||
fi
|
||||
- name: yarn build
|
||||
working-directory: docs
|
||||
run: |
|
||||
@@ -120,5 +71,5 @@ jobs:
|
||||
destination-github-username: "apache"
|
||||
destination-repository-name: "superset-site"
|
||||
target-branch: "asf-site"
|
||||
commit-message: "deploying docs: ${{ github.event.head_commit.message || 'triggered by integration tests' }} (apache/superset@${{ github.event.workflow_run.head_sha || github.sha }})"
|
||||
commit-message: "deploying docs: ${{ github.event.head_commit.message }} (apache/superset@${{ github.sha }})"
|
||||
user-email: dev@superset.apache.org
|
||||
|
||||
71
.github/workflows/superset-docs-verify.yml
vendored
71
.github/workflows/superset-docs-verify.yml
vendored
@@ -4,30 +4,24 @@ on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "docs/**"
|
||||
- "superset/db_engine_specs/**"
|
||||
- ".github/workflows/superset-docs-verify.yml"
|
||||
types: [synchronize, opened, reopened, ready_for_review]
|
||||
workflow_run:
|
||||
workflows: ["Python-Integration"]
|
||||
types: [completed]
|
||||
|
||||
# cancel previous workflow jobs for PRs
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.event.workflow_run.head_sha || github.run_id }}
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
linkinator:
|
||||
# See docs here: https://github.com/marketplace/actions/linkinator
|
||||
# Only run on pull_request, not workflow_run
|
||||
if: github.event_name == 'pull_request'
|
||||
name: Link Checking
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
- uses: actions/checkout@v5
|
||||
# Do not bump this linkinator-action version without opening
|
||||
# an ASF Infra ticket to allow the new version first!
|
||||
- uses: JustinBeckwith/linkinator-action@af984b9f30f63e796ae2ea5be5e07cb587f1bbd9 # v2.3
|
||||
- uses: JustinBeckwith/linkinator-action@3d5ba091319fa7b0ac14703761eebb7d100e6f6d # v1.11.0
|
||||
continue-on-error: true # This will make the job advisory (non-blocking, no red X)
|
||||
with:
|
||||
paths: "**/*.md, **/*.mdx"
|
||||
@@ -56,23 +50,20 @@ jobs:
|
||||
https://timbr.ai/,
|
||||
https://opensource.org/license/apache-2-0,
|
||||
https://www.plaidcloud.com/
|
||||
|
||||
build-on-pr:
|
||||
# Build docs when PR changes docs/** (uses committed databases.json)
|
||||
if: github.event_name == 'pull_request'
|
||||
name: Build (PR trigger)
|
||||
build-deploy:
|
||||
name: Build & Deploy
|
||||
runs-on: ubuntu-24.04
|
||||
defaults:
|
||||
run:
|
||||
working-directory: docs
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: './docs/.nvmrc'
|
||||
- name: yarn install
|
||||
@@ -84,51 +75,3 @@ jobs:
|
||||
- name: yarn build
|
||||
run: |
|
||||
yarn build
|
||||
|
||||
build-after-tests:
|
||||
# Build docs after integration tests complete (uses fresh diagnostics)
|
||||
# Only runs if integration tests succeeded
|
||||
if: >
|
||||
github.event_name == 'workflow_run' &&
|
||||
github.event.workflow_run.conclusion == 'success'
|
||||
name: Build (after integration tests)
|
||||
runs-on: ubuntu-24.04
|
||||
defaults:
|
||||
run:
|
||||
working-directory: docs
|
||||
steps:
|
||||
- name: "Checkout PR head: ${{ github.event.workflow_run.head_sha }}"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
ref: ${{ github.event.workflow_run.head_sha }}
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version-file: './docs/.nvmrc'
|
||||
- name: yarn install
|
||||
run: |
|
||||
yarn install --check-cache
|
||||
- name: Download database diagnostics from integration tests
|
||||
uses: dawidd6/action-download-artifact@b6e2e70617bc3265edd6dab6c906732b2f1ae151 # v21
|
||||
with:
|
||||
workflow: superset-python-integrationtest.yml
|
||||
run_id: ${{ github.event.workflow_run.id }}
|
||||
name: database-diagnostics
|
||||
path: docs/src/data/
|
||||
if_no_artifact_found: 'warning'
|
||||
- name: Use fresh diagnostics
|
||||
run: |
|
||||
if [ -f "src/data/databases-diagnostics.json" ]; then
|
||||
echo "Using fresh diagnostics from integration tests"
|
||||
mv src/data/databases-diagnostics.json src/data/databases.json
|
||||
else
|
||||
echo "Warning: No diagnostics artifact found, using committed data"
|
||||
fi
|
||||
- name: yarn typecheck
|
||||
run: |
|
||||
yarn typecheck
|
||||
- name: yarn build
|
||||
run: |
|
||||
yarn build
|
||||
|
||||
127
.github/workflows/superset-e2e.yml
vendored
127
.github/workflows/superset-e2e.yml
vendored
@@ -54,7 +54,7 @@ jobs:
|
||||
USE_DASHBOARD: ${{ github.event.inputs.use_dashboard == 'true' || 'false' }}
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:17-alpine
|
||||
image: postgres:16-alpine
|
||||
env:
|
||||
POSTGRES_USER: superset
|
||||
POSTGRES_PASSWORD: superset
|
||||
@@ -69,21 +69,21 @@ jobs:
|
||||
# Conditional checkout based on context
|
||||
- name: Checkout for push or pull_request event
|
||||
if: github.event_name == 'push' || github.event_name == 'pull_request'
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
- name: Checkout using ref (workflow_dispatch)
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.ref != ''
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.inputs.ref }}
|
||||
submodules: recursive
|
||||
- name: Checkout using PR ID (workflow_dispatch)
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.pr_id != ''
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: refs/pull/${{ github.event.inputs.pr_id }}/merge
|
||||
@@ -109,7 +109,7 @@ jobs:
|
||||
run: testdata
|
||||
- name: Setup Node.js
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
- name: Install npm dependencies
|
||||
@@ -146,123 +146,8 @@ jobs:
|
||||
SAFE_APP_ROOT=${APP_ROOT//\//_}
|
||||
echo "safe_app_root=$SAFE_APP_ROOT" >> $GITHUB_OUTPUT
|
||||
- name: Upload Artifacts
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
|
||||
uses: actions/upload-artifact@v4
|
||||
if: failure()
|
||||
with:
|
||||
path: ${{ github.workspace }}/superset-frontend/cypress-base/cypress/screenshots
|
||||
name: cypress-artifact-${{ github.run_id }}-${{ github.job }}-${{ matrix.browser }}-${{ matrix.parallel_id }}--${{ steps.set-safe-app-root.outputs.safe_app_root }}
|
||||
|
||||
playwright-tests:
|
||||
runs-on: ubuntu-22.04
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
browser: ["chromium"]
|
||||
app_root: ["", "/app/prefix"]
|
||||
env:
|
||||
SUPERSET_ENV: development
|
||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||
SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
REDIS_PORT: 16379
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:17-alpine
|
||||
env:
|
||||
POSTGRES_USER: superset
|
||||
POSTGRES_PASSWORD: superset
|
||||
ports:
|
||||
- 15432:5432
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
ports:
|
||||
- 16379:6379
|
||||
steps:
|
||||
# -------------------------------------------------------
|
||||
# Conditional checkout based on context (same as Cypress workflow)
|
||||
- name: Checkout for push or pull_request event
|
||||
if: github.event_name == 'push' || github.event_name == 'pull_request'
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
- name: Checkout using ref (workflow_dispatch)
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.ref != ''
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.inputs.ref }}
|
||||
submodules: recursive
|
||||
- name: Checkout using PR ID (workflow_dispatch)
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.pr_id != ''
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: refs/pull/${{ github.event.inputs.pr_id }}/merge
|
||||
submodules: recursive
|
||||
# -------------------------------------------------------
|
||||
- name: Check for file changes
|
||||
id: check
|
||||
uses: ./.github/actions/change-detector/
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Setup Python
|
||||
uses: ./.github/actions/setup-backend/
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
- name: Setup postgres
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
with:
|
||||
run: setup-postgres
|
||||
- name: Import test data
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
with:
|
||||
run: playwright_testdata
|
||||
- name: Setup Node.js
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
- name: Install npm dependencies
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
with:
|
||||
run: npm-install
|
||||
- name: Build javascript packages
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
with:
|
||||
run: build-instrumented-assets
|
||||
- name: Install Playwright
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
with:
|
||||
run: playwright-install
|
||||
- name: Run Playwright (Required Tests)
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
env:
|
||||
NODE_OPTIONS: "--max-old-space-size=4096"
|
||||
with:
|
||||
run: playwright-run "${{ matrix.app_root }}"
|
||||
- name: Set safe app root
|
||||
if: failure()
|
||||
id: set-safe-app-root
|
||||
run: |
|
||||
APP_ROOT="${{ matrix.app_root }}"
|
||||
SAFE_APP_ROOT=${APP_ROOT//\//_}
|
||||
echo "safe_app_root=$SAFE_APP_ROOT" >> $GITHUB_OUTPUT
|
||||
- name: Upload Playwright Artifacts
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
|
||||
if: failure()
|
||||
with:
|
||||
path: |
|
||||
${{ github.workspace }}/superset-frontend/playwright-results/
|
||||
${{ github.workspace }}/superset-frontend/test-results/
|
||||
name: playwright-artifact-${{ github.run_id }}-${{ github.job }}-${{ matrix.browser }}--${{ steps.set-safe-app-root.outputs.safe_app_root }}
|
||||
|
||||
@@ -24,7 +24,7 @@ jobs:
|
||||
working-directory: superset-extensions-cli
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.check.outputs.superset-extensions-cli
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v5
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
file: ./coverage.xml
|
||||
flags: superset-extensions-cli
|
||||
@@ -58,7 +58,7 @@ jobs:
|
||||
|
||||
- name: Upload HTML coverage report
|
||||
if: steps.check.outputs.superset-extensions-cli
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: superset-extensions-cli-coverage-html
|
||||
path: htmlcov/
|
||||
|
||||
75
.github/workflows/superset-frontend.yml
vendored
75
.github/workflows/superset-frontend.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
should-run: ${{ steps.check.outputs.frontend }}
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
@@ -54,14 +54,14 @@ jobs:
|
||||
- name: Save Docker Image as Artifact
|
||||
if: steps.check.outputs.frontend
|
||||
run: |
|
||||
docker save $TAG | zstd -3 --threads=0 > docker-image.tar.zst
|
||||
docker save $TAG | gzip > docker-image.tar.gz
|
||||
|
||||
- name: Upload Docker Image Artifact
|
||||
if: steps.check.outputs.frontend
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: docker-image
|
||||
path: docker-image.tar.zst
|
||||
path: docker-image.tar.gz
|
||||
|
||||
sharded-jest-tests:
|
||||
needs: frontend-build
|
||||
@@ -73,13 +73,12 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Download Docker Image Artifact
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
- name: Load Docker Image
|
||||
run: |
|
||||
zstd -d < docker-image.tar.zst | docker load
|
||||
run: docker load < docker-image.tar.gz
|
||||
|
||||
- name: npm run test with coverage
|
||||
run: |
|
||||
@@ -88,10 +87,10 @@ jobs:
|
||||
-v ${{ github.workspace }}/superset-frontend/coverage:/app/superset-frontend/coverage \
|
||||
--rm $TAG \
|
||||
bash -c \
|
||||
"npm run test -- --coverage --shard=${{ matrix.shard }}/8 --coverageReporters=json"
|
||||
"npm run test -- --coverage --shard=${{ matrix.shard }}/8 --coverageReporters=json-summary"
|
||||
|
||||
- name: Upload Coverage Artifact
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: coverage-artifacts-${{ matrix.shard }}
|
||||
path: superset-frontend/coverage
|
||||
@@ -100,40 +99,25 @@ jobs:
|
||||
needs: [sharded-jest-tests]
|
||||
if: needs.frontend-build.outputs.should-run == 'true'
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
id-token: write
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
|
||||
- name: Download Coverage Artifacts
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
pattern: coverage-artifacts-*
|
||||
path: coverage/
|
||||
|
||||
- name: Reorganize test result reports
|
||||
run: |
|
||||
find coverage/
|
||||
for i in {1..8}; do
|
||||
mv coverage/coverage-artifacts-${i}/coverage-final.json coverage/coverage-shard-${i}.json
|
||||
done
|
||||
shell: bash
|
||||
- name: Show Files
|
||||
run: find coverage/
|
||||
|
||||
- name: Merge Code Coverage
|
||||
run: npx nyc merge coverage/ merged-output/coverage-summary.json
|
||||
|
||||
- name: Upload Code Coverage
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v5
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: javascript
|
||||
use_oidc: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
disable_search: true
|
||||
files: merged-output/coverage-summary.json
|
||||
slug: apache/superset
|
||||
|
||||
@@ -143,23 +127,23 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Download Docker Image Artifact
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
- name: Load Docker Image
|
||||
run: |
|
||||
zstd -d < docker-image.tar.zst | docker load
|
||||
docker load < docker-image.tar.gz
|
||||
|
||||
- name: lint
|
||||
- name: eslint
|
||||
run: |
|
||||
docker run --rm $TAG bash -c \
|
||||
"npm i && npm run lint"
|
||||
"npm i && npm run eslint -- . --quiet"
|
||||
|
||||
- name: tsc
|
||||
run: |
|
||||
docker run --rm $TAG bash -c \
|
||||
"npm i && npm run plugins:build && npm run type"
|
||||
"npm run plugins:build && npm run type"
|
||||
|
||||
validate-frontend:
|
||||
needs: frontend-build
|
||||
@@ -167,34 +151,19 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Download Docker Image Artifact
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
- name: Load Docker Image
|
||||
run: |
|
||||
zstd -d < docker-image.tar.zst | docker load
|
||||
run: docker load < docker-image.tar.gz
|
||||
|
||||
- name: Build Plugins Packages
|
||||
run: |
|
||||
docker run --rm $TAG bash -c \
|
||||
"npm run plugins:build"
|
||||
|
||||
test-storybook:
|
||||
needs: frontend-build
|
||||
if: needs.frontend-build.outputs.should-run == 'true'
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Download Docker Image Artifact
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
- name: Load Docker Image
|
||||
run: |
|
||||
zstd -d < docker-image.tar.zst | docker load
|
||||
|
||||
- name: Build Storybook and Run Tests
|
||||
- name: Build Plugins Storybook
|
||||
run: |
|
||||
docker run --rm $TAG bash -c \
|
||||
"npm run build-storybook && npx playwright install-deps && npx playwright install chromium && npm run test-storybook:ci"
|
||||
"npm run plugins:build-storybook"
|
||||
|
||||
4
.github/workflows/superset-helm-lint.yml
vendored
4
.github/workflows/superset-helm-lint.yml
vendored
@@ -16,14 +16,14 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Helm
|
||||
uses: azure/setup-helm@dda3372f752e03dde6b3237bc9431cdc2f7a02a2 # v5.0.0
|
||||
uses: azure/setup-helm@v4
|
||||
with:
|
||||
version: v3.16.4
|
||||
|
||||
|
||||
6
.github/workflows/superset-helm-release.yml
vendored
6
.github/workflows/superset-helm-release.yml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.ref || github.ref_name }}
|
||||
persist-credentials: true
|
||||
@@ -42,7 +42,7 @@ jobs:
|
||||
git config user.email "$GITHUB_ACTOR@users.noreply.github.com"
|
||||
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@dda3372f752e03dde6b3237bc9431cdc2f7a02a2 # v5.0.0
|
||||
uses: azure/setup-helm@v4
|
||||
with:
|
||||
version: v3.5.4
|
||||
|
||||
@@ -101,7 +101,7 @@ jobs:
|
||||
CR_RELEASE_NAME_TEMPLATE: "superset-helm-chart-{{ .Version }}"
|
||||
|
||||
- name: Open Pull Request
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
const branchName = '${{ env.branch_name }}';
|
||||
|
||||
27
.github/workflows/superset-playwright.yml
vendored
27
.github/workflows/superset-playwright.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: Playwright Experimental Tests
|
||||
name: Playwright E2E Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -23,10 +23,9 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# NOTE: Required Playwright tests are in superset-e2e.yml (E2E / playwright-tests)
|
||||
# This workflow contains only experimental tests that run in shadow mode
|
||||
playwright-tests-experimental:
|
||||
playwright-tests:
|
||||
runs-on: ubuntu-22.04
|
||||
# Allow workflow to succeed even if tests fail during shadow mode
|
||||
continue-on-error: true
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -45,7 +44,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:17-alpine
|
||||
image: postgres:16-alpine
|
||||
env:
|
||||
POSTGRES_USER: superset
|
||||
POSTGRES_PASSWORD: superset
|
||||
@@ -60,21 +59,21 @@ jobs:
|
||||
# Conditional checkout based on context (same as Cypress workflow)
|
||||
- name: Checkout for push or pull_request event
|
||||
if: github.event_name == 'push' || github.event_name == 'pull_request'
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
|
||||
- name: Checkout using ref (workflow_dispatch)
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.ref != ''
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.inputs.ref }}
|
||||
submodules: recursive
|
||||
- name: Checkout using PR ID (workflow_dispatch)
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.pr_id != ''
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: refs/pull/${{ github.event.inputs.pr_id }}/merge
|
||||
@@ -97,10 +96,10 @@ jobs:
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
with:
|
||||
run: playwright_testdata
|
||||
run: testdata
|
||||
- name: Setup Node.js
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
- name: Install npm dependencies
|
||||
@@ -118,13 +117,13 @@ jobs:
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
with:
|
||||
run: playwright-install
|
||||
- name: Run Playwright (Experimental Tests)
|
||||
- name: Run Playwright
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
env:
|
||||
NODE_OPTIONS: "--max-old-space-size=4096"
|
||||
with:
|
||||
run: playwright-run "${{ matrix.app_root }}" experimental/
|
||||
run: playwright-run ${{ matrix.app_root }}
|
||||
- name: Set safe app root
|
||||
if: failure()
|
||||
id: set-safe-app-root
|
||||
@@ -133,10 +132,10 @@ jobs:
|
||||
SAFE_APP_ROOT=${APP_ROOT//\//_}
|
||||
echo "safe_app_root=$SAFE_APP_ROOT" >> $GITHUB_OUTPUT
|
||||
- name: Upload Playwright Artifacts
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
|
||||
uses: actions/upload-artifact@v4
|
||||
if: failure()
|
||||
with:
|
||||
path: |
|
||||
${{ github.workspace }}/superset-frontend/playwright-results/
|
||||
${{ github.workspace }}/superset-frontend/test-results/
|
||||
name: playwright-experimental-artifact-${{ github.run_id }}-${{ github.job }}-${{ matrix.browser }}--${{ steps.set-safe-app-root.outputs.safe_app_root }}
|
||||
name: playwright-artifact-${{ github.run_id }}-${{ github.job }}-${{ matrix.browser }}--${{ steps.set-safe-app-root.outputs.safe_app_root }}
|
||||
|
||||
@@ -16,8 +16,6 @@ concurrency:
|
||||
jobs:
|
||||
test-mysql:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
id-token: write
|
||||
env:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||
@@ -43,7 +41,7 @@ jobs:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -70,46 +68,13 @@ jobs:
|
||||
run: |
|
||||
./scripts/python_tests.sh
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v5
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: python,mysql
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
use_oidc: true
|
||||
slug: apache/superset
|
||||
- name: Generate database diagnostics for docs
|
||||
if: steps.check.outputs.python
|
||||
env:
|
||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||
SUPERSET__SQLALCHEMY_DATABASE_URI: |
|
||||
mysql+mysqldb://superset:superset@127.0.0.1:13306/superset?charset=utf8mb4&binary_prefix=true
|
||||
run: |
|
||||
python -c "
|
||||
import json
|
||||
from superset.app import create_app
|
||||
from superset.db_engine_specs.lib import generate_yaml_docs
|
||||
app = create_app()
|
||||
with app.app_context():
|
||||
docs = generate_yaml_docs()
|
||||
# Wrap in the expected format
|
||||
output = {
|
||||
'generated': '$(date -Iseconds)',
|
||||
'databases': docs
|
||||
}
|
||||
with open('databases-diagnostics.json', 'w') as f:
|
||||
json.dump(output, f, indent=2, default=str)
|
||||
print(f'Generated diagnostics for {len(docs)} databases')
|
||||
"
|
||||
- name: Upload database diagnostics artifact
|
||||
if: steps.check.outputs.python
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
|
||||
with:
|
||||
name: database-diagnostics
|
||||
path: databases-diagnostics.json
|
||||
retention-days: 7
|
||||
test-postgres:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
id-token: write
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["current", "previous", "next"]
|
||||
@@ -120,7 +85,7 @@ jobs:
|
||||
SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:17-alpine
|
||||
image: postgres:16-alpine
|
||||
env:
|
||||
POSTGRES_USER: superset
|
||||
POSTGRES_PASSWORD: superset
|
||||
@@ -134,7 +99,7 @@ jobs:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -164,17 +129,14 @@ jobs:
|
||||
run: |
|
||||
./scripts/python_tests.sh
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v5
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: python,postgres
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
use_oidc: true
|
||||
slug: apache/superset
|
||||
|
||||
test-sqlite:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
id-token: write
|
||||
env:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||
@@ -190,7 +152,7 @@ jobs:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -219,9 +181,8 @@ jobs:
|
||||
run: |
|
||||
./scripts/python_tests.sh
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v5
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: python,sqlite
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
use_oidc: true
|
||||
slug: apache/superset
|
||||
|
||||
@@ -17,8 +17,6 @@ concurrency:
|
||||
jobs:
|
||||
test-postgres-presto:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
id-token: write
|
||||
env:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||
@@ -27,7 +25,7 @@ jobs:
|
||||
SUPERSET__SQLALCHEMY_EXAMPLES_URI: presto://localhost:15433/memory/default
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:17-alpine
|
||||
image: postgres:16-alpine
|
||||
env:
|
||||
POSTGRES_USER: superset
|
||||
POSTGRES_PASSWORD: superset
|
||||
@@ -50,7 +48,7 @@ jobs:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -79,17 +77,14 @@ jobs:
|
||||
run: |
|
||||
./scripts/python_tests.sh -m 'chart_data_flow or sql_json_flow'
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v5
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: python,presto
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
use_oidc: true
|
||||
slug: apache/superset
|
||||
|
||||
test-postgres-hive:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
id-token: write
|
||||
env:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
|
||||
@@ -99,7 +94,7 @@ jobs:
|
||||
UPLOAD_FOLDER: /tmp/.superset/uploads/
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:17-alpine
|
||||
image: postgres:16-alpine
|
||||
env:
|
||||
POSTGRES_USER: superset
|
||||
POSTGRES_PASSWORD: superset
|
||||
@@ -113,7 +108,7 @@ jobs:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -150,9 +145,8 @@ jobs:
|
||||
pip install -e .[hive]
|
||||
./scripts/python_tests.sh -m 'chart_data_flow or sql_json_flow'
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v5
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: python,hive
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
use_oidc: true
|
||||
slug: apache/superset
|
||||
|
||||
10
.github/workflows/superset-python-unittest.yml
vendored
10
.github/workflows/superset-python-unittest.yml
vendored
@@ -17,8 +17,6 @@ concurrency:
|
||||
jobs:
|
||||
unit-tests:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
id-token: write
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["previous", "current", "next"]
|
||||
@@ -26,7 +24,7 @@ jobs:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -54,11 +52,9 @@ jobs:
|
||||
SUPERSET_SECRET_KEY: not-a-secret
|
||||
run: |
|
||||
pytest --durations-min=0.5 --cov=superset/sql/ ./tests/unit_tests/sql/ --cache-clear --cov-fail-under=100
|
||||
pytest --durations-min=0.5 --cov=superset/semantic_layers/ ./tests/unit_tests/semantic_layers/ --cache-clear --cov-fail-under=100
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v5
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: python,unit
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
use_oidc: true
|
||||
slug: apache/superset
|
||||
|
||||
10
.github/workflows/superset-translations.yml
vendored
10
.github/workflows/superset-translations.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -31,7 +31,7 @@ jobs:
|
||||
|
||||
- name: Setup Node.js
|
||||
if: steps.check.outputs.frontend
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
- name: Install dependencies
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -62,10 +62,6 @@ jobs:
|
||||
- name: Setup Python
|
||||
if: steps.check.outputs.python
|
||||
uses: ./.github/actions/setup-backend/
|
||||
|
||||
- name: Install msgcat
|
||||
run: sudo apt update && sudo apt install gettext
|
||||
|
||||
- name: Test babel extraction
|
||||
if: steps.check.outputs.python
|
||||
run: ./scripts/translations/babel_update.sh
|
||||
|
||||
2
.github/workflows/superset-websocket.yml
vendored
2
.github/workflows/superset-websocket.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Install dependencies
|
||||
|
||||
4
.github/workflows/supersetbot.yml
vendored
4
.github/workflows/supersetbot.yml
vendored
@@ -26,7 +26,7 @@ jobs:
|
||||
steps:
|
||||
- name: Quickly add thumbs up!
|
||||
if: github.event_name == 'issue_comment' && contains(github.event.comment.body, '@supersetbot')
|
||||
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/')
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
});
|
||||
|
||||
- name: "Checkout ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
|
||||
24
.github/workflows/tag-release.yml
vendored
24
.github/workflows/tag-release.yml
vendored
@@ -31,12 +31,10 @@ jobs:
|
||||
id: check
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "${DOCKERHUB_USER}" ]; then
|
||||
if [ -n "${{ (secrets.DOCKERHUB_USER != '' && secrets.DOCKERHUB_TOKEN != '') || '' }}" ]; then
|
||||
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
env:
|
||||
DOCKERHUB_USER: ${{ (secrets.DOCKERHUB_USER != '' && secrets.DOCKERHUB_TOKEN != '') || '' }}
|
||||
docker-release:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
@@ -49,7 +47,7 @@ jobs:
|
||||
steps:
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -62,7 +60,7 @@ jobs:
|
||||
build: "true"
|
||||
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
@@ -74,20 +72,17 @@ jobs:
|
||||
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
|
||||
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
INPUT_RELEASE: ${{ github.event.inputs.release }}
|
||||
INPUT_FORCE_LATEST: ${{ github.event.inputs.force-latest }}
|
||||
INPUT_GIT_REF: ${{ github.event.inputs.git-ref }}
|
||||
run: |
|
||||
RELEASE="${{ github.event.release.tag_name }}"
|
||||
FORCE_LATEST=""
|
||||
EVENT="${{github.event_name}}"
|
||||
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
|
||||
# in the case of a manually-triggered run, read release from input
|
||||
RELEASE="${INPUT_RELEASE}"
|
||||
if [ "${INPUT_FORCE_LATEST}" = "true" ]; then
|
||||
RELEASE="${{ github.event.inputs.release }}"
|
||||
if [ "${{ github.event.inputs.force-latest }}" = "true" ]; then
|
||||
FORCE_LATEST="--force-latest"
|
||||
fi
|
||||
git checkout "${INPUT_GIT_REF}"
|
||||
git checkout "${{ github.event.inputs.git-ref }}"
|
||||
EVENT="release"
|
||||
fi
|
||||
|
||||
@@ -112,12 +107,12 @@ jobs:
|
||||
steps:
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
@@ -127,7 +122,6 @@ jobs:
|
||||
- name: Label the PRs with the right release-related labels
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
INPUT_RELEASE: ${{ github.event.inputs.release }}
|
||||
run: |
|
||||
export GITHUB_ACTOR=""
|
||||
git fetch --all --tags
|
||||
@@ -135,6 +129,6 @@ jobs:
|
||||
RELEASE="${{ github.event.release.tag_name }}"
|
||||
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
|
||||
# in the case of a manually-triggered run, read release from input
|
||||
RELEASE="${INPUT_RELEASE}"
|
||||
RELEASE="${{ github.event.inputs.release }}"
|
||||
fi
|
||||
supersetbot release-label $RELEASE
|
||||
|
||||
11
.github/workflows/tech-debt.yml
vendored
11
.github/workflows/tech-debt.yml
vendored
@@ -6,9 +6,6 @@ on:
|
||||
- master
|
||||
- "[0-9].[0-9]*"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
config:
|
||||
runs-on: ubuntu-24.04
|
||||
@@ -19,12 +16,10 @@ jobs:
|
||||
id: check
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "${GSHEET_KEY}" ]; then
|
||||
if [ -n "${{ (secrets.GSHEET_KEY != '' ) || '' }}" ]; then
|
||||
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
env:
|
||||
GSHEET_KEY: ${{ (secrets.GSHEET_KEY != '' ) || '' }}
|
||||
process-and-upload:
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
@@ -32,10 +27,10 @@ jobs:
|
||||
name: Generate Reports
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: './superset-frontend/.nvmrc'
|
||||
|
||||
|
||||
15
.gitignore
vendored
15
.gitignore
vendored
@@ -33,7 +33,6 @@ cover
|
||||
.env
|
||||
.envrc
|
||||
.idea
|
||||
.roo
|
||||
.mypy_cache
|
||||
.python-version
|
||||
.tox
|
||||
@@ -61,19 +60,21 @@ tmp
|
||||
rat-results.txt
|
||||
superset/app/
|
||||
superset-websocket/config.json
|
||||
.direnv
|
||||
*.log
|
||||
|
||||
# Node.js, webpack artifacts, storybook
|
||||
*.entry.js
|
||||
*.js.map
|
||||
node_modules
|
||||
npm-debug.log*
|
||||
superset/static/*
|
||||
superset/static/assets/*
|
||||
!superset/static/assets/.gitkeep
|
||||
superset/static/uploads/*
|
||||
!superset/static/uploads/.gitkeep
|
||||
superset/static/version_info.json
|
||||
superset-frontend/**/esm/*
|
||||
superset-frontend/**/lib/*
|
||||
superset-frontend/**/storybook-static/*
|
||||
superset-frontend/migration-storybook.log
|
||||
yarn-error.log
|
||||
*.map
|
||||
*.min.js
|
||||
@@ -120,8 +121,6 @@ docker/requirements-local.txt
|
||||
|
||||
cache/
|
||||
docker/*local*
|
||||
docker/superset-websocket/config.json
|
||||
docker-compose.override.yml
|
||||
|
||||
.temp_cache
|
||||
|
||||
@@ -134,8 +133,4 @@ CLAUDE.local.md
|
||||
PROJECT.md
|
||||
.aider*
|
||||
.claude_rc*
|
||||
.claude/settings.local.json
|
||||
.env.local
|
||||
oxc-custom-build/
|
||||
*.code-workspace
|
||||
*.duckdb
|
||||
|
||||
@@ -27,7 +27,6 @@ repos:
|
||||
args: [--check-untyped-defs]
|
||||
exclude: ^superset-extensions-cli/
|
||||
additional_dependencies: [
|
||||
types-cachetools,
|
||||
types-simplejson,
|
||||
types-python-dateutil,
|
||||
types-requests,
|
||||
@@ -50,40 +49,26 @@ repos:
|
||||
hooks:
|
||||
- id: check-docstring-first
|
||||
- id: check-added-large-files
|
||||
exclude: ^.*\.(geojson)$|^docs/static/img/screenshots/.*|^superset-frontend/CHANGELOG\.md$|^superset/examples/.*/data\.parquet$
|
||||
exclude: ^.*\.(geojson)$|^docs/static/img/screenshots/.*|^superset-frontend/CHANGELOG\.md$
|
||||
- id: check-yaml
|
||||
exclude: ^helm/superset/templates/
|
||||
- id: debug-statements
|
||||
- id: end-of-file-fixer
|
||||
exclude: .*/lerna\.json$|^docs/static/img/logos/
|
||||
exclude: .*/lerna\.json$
|
||||
- id: trailing-whitespace
|
||||
exclude: ^.*\.(snap)
|
||||
args: ["--markdown-linebreak-ext=md"]
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: prettier-frontend
|
||||
name: prettier (frontend)
|
||||
entry: bash -c 'cd superset-frontend && for file in "$@"; do npx prettier --write "${file#superset-frontend/}"; done'
|
||||
language: system
|
||||
pass_filenames: true
|
||||
files: ^superset-frontend/.*\.(js|jsx|ts|tsx|css|scss|sass|json)$
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: oxlint-frontend
|
||||
name: oxlint (frontend)
|
||||
entry: ./scripts/oxlint.sh
|
||||
language: system
|
||||
pass_filenames: true
|
||||
files: ^superset-frontend/.*\.(js|jsx|ts|tsx)$
|
||||
- id: custom-rules-frontend
|
||||
name: custom rules (frontend)
|
||||
entry: ./scripts/check-custom-rules.sh
|
||||
- id: eslint-frontend
|
||||
name: eslint (frontend)
|
||||
entry: ./scripts/eslint.sh
|
||||
language: system
|
||||
pass_filenames: true
|
||||
files: ^superset-frontend/.*\.(js|jsx|ts|tsx)$
|
||||
- id: eslint-docs
|
||||
name: eslint (docs)
|
||||
entry: bash -c 'cd docs && FILES=$(printf "%s\n" "$@" | sed "s|^docs/||" | tr "\n" " ") && yarn eslint --fix --quiet $FILES'
|
||||
entry: bash -c 'cd docs && FILES=$(echo "$@" | sed "s|docs/||g") && yarn eslint --fix --quiet $FILES'
|
||||
language: system
|
||||
pass_filenames: true
|
||||
files: ^docs/.*\.(js|jsx|ts|tsx)$
|
||||
@@ -107,19 +92,12 @@ repos:
|
||||
files: helm
|
||||
verbose: false
|
||||
args: ["--log-level", "error"]
|
||||
# Using local hooks ensures ruff version matches requirements/development.txt
|
||||
- repo: local
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.9.7
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
name: ruff-format
|
||||
entry: ruff format
|
||||
language: system
|
||||
types: [python]
|
||||
- id: ruff
|
||||
name: ruff
|
||||
entry: ruff check --fix --show-fixes
|
||||
language: system
|
||||
types: [python]
|
||||
args: [--fix]
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: pylint
|
||||
@@ -132,29 +110,11 @@ repos:
|
||||
- -c
|
||||
- |
|
||||
TARGET_BRANCH=${GITHUB_BASE_REF:-master}
|
||||
# Only fetch if we're not in CI (CI already has all refs)
|
||||
if [ -z "$CI" ]; then
|
||||
git fetch --no-recurse-submodules origin "$TARGET_BRANCH" 2>/dev/null || true
|
||||
fi
|
||||
BASE=$(git merge-base origin/"$TARGET_BRANCH" HEAD 2>/dev/null) || BASE="HEAD"
|
||||
files=$(git diff --name-only --diff-filter=ACM "$BASE"..HEAD 2>/dev/null | grep '^superset/.*\.py$' || true)
|
||||
git fetch origin "$TARGET_BRANCH"
|
||||
BASE=$(git merge-base origin/"$TARGET_BRANCH" HEAD)
|
||||
files=$(git diff --name-only --diff-filter=ACM "$BASE"..HEAD | grep '^superset/.*\.py$' || true)
|
||||
if [ -n "$files" ]; then
|
||||
pylint --rcfile=.pylintrc --load-plugins=superset.extensions.pylint --reports=no $files
|
||||
else
|
||||
echo "No Python files to lint."
|
||||
fi
|
||||
- id: db-engine-spec-metadata
|
||||
name: database engine spec metadata validation
|
||||
entry: python superset/db_engine_specs/lint_metadata.py --strict
|
||||
language: system
|
||||
files: ^superset/db_engine_specs/.*\.py$
|
||||
exclude: ^superset/db_engine_specs/(base|lib|lint_metadata|__init__)\.py$
|
||||
pass_filenames: false
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: feature-flags-sync
|
||||
name: feature flags documentation sync
|
||||
entry: bash -c 'python scripts/extract_feature_flags.py > docs/static/feature-flags.json.tmp && if ! diff -q docs/static/feature-flags.json docs/static/feature-flags.json.tmp > /dev/null 2>&1; then mv docs/static/feature-flags.json.tmp docs/static/feature-flags.json && echo "Updated docs/static/feature-flags.json" && exit 1; else rm docs/static/feature-flags.json.tmp; fi'
|
||||
language: system
|
||||
files: ^superset/config\.py$
|
||||
pass_filenames: false
|
||||
|
||||
@@ -53,7 +53,7 @@ extension-pkg-whitelist=pyarrow
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
disable=all
|
||||
enable=json-import,disallowed-sql-import,consider-using-transaction
|
||||
enable=disallowed-json-import,disallowed-sql-import,consider-using-transaction
|
||||
|
||||
|
||||
[REPORTS]
|
||||
|
||||
@@ -11,7 +11,6 @@
|
||||
.nvmrc
|
||||
.prettierrc
|
||||
.rat-excludes
|
||||
.swcrc
|
||||
.*log
|
||||
.*pyc
|
||||
.*lock
|
||||
@@ -67,19 +66,22 @@ temporary_superset_ui/*
|
||||
# skip license checks for auto-generated test snapshots
|
||||
.*snap
|
||||
|
||||
# docs third-party logos (database logos, org logos, etc.)
|
||||
databases/*
|
||||
logos/*
|
||||
# docs overrides for third party logos we don't have the rights to
|
||||
google-big-query.svg
|
||||
google-sheets.svg
|
||||
ibm-db2.svg
|
||||
postgresql.svg
|
||||
snowflake.svg
|
||||
ydb.svg
|
||||
loading.svg
|
||||
|
||||
# docs-related
|
||||
erd.puml
|
||||
erd.svg
|
||||
intro_header.txt
|
||||
TODO.md
|
||||
|
||||
# for LLMs
|
||||
llm-context.md
|
||||
llms.txt
|
||||
AGENTS.md
|
||||
LLMS.md
|
||||
CLAUDE.md
|
||||
|
||||
45
AGENTS.md
45
AGENTS.md
@@ -2,27 +2,6 @@
|
||||
|
||||
Apache Superset is a data visualization platform with Flask/Python backend and React/TypeScript frontend.
|
||||
|
||||
## ⚠️ CRITICAL: Always Run Pre-commit Before Pushing
|
||||
|
||||
**ALWAYS run `pre-commit run --all-files` before pushing commits.** CI will fail if pre-commit checks don't pass. This is non-negotiable.
|
||||
|
||||
```bash
|
||||
# Stage your changes first
|
||||
git add .
|
||||
|
||||
# Run pre-commit on all files
|
||||
pre-commit run --all-files
|
||||
|
||||
# If there are auto-fixes, stage them and commit
|
||||
git add .
|
||||
git commit --amend # or new commit
|
||||
```
|
||||
|
||||
Common pre-commit failures:
|
||||
- **Formatting** - black, prettier, eslint will auto-fix
|
||||
- **Type errors** - mypy failures need manual fixes
|
||||
- **Linting** - ruff, pylint issues need manual fixes
|
||||
|
||||
## ⚠️ CRITICAL: Ongoing Refactors (What NOT to Do)
|
||||
|
||||
**These migrations are actively happening - avoid deprecated patterns:**
|
||||
@@ -101,30 +80,6 @@ superset/
|
||||
- **UPDATING.md**: Add breaking changes here
|
||||
- **Docstrings**: Required for new functions/classes
|
||||
|
||||
## Developer Portal: Storybook-to-MDX Documentation
|
||||
|
||||
The Developer Portal auto-generates MDX documentation from Storybook stories. **Stories are the single source of truth.**
|
||||
|
||||
### Core Philosophy
|
||||
- **Fix issues in the STORY, not the generator** - When something doesn't render correctly, update the story file first
|
||||
- **Generator should be lightweight** - It extracts and passes through data; avoid special cases
|
||||
- **Stories define everything** - Props, controls, galleries, examples all come from story metadata
|
||||
|
||||
### Story Requirements for Docs Generation
|
||||
- Use `export default { title: '...' }` (inline), not `const meta = ...; export default meta;`
|
||||
- Name interactive stories `Interactive${ComponentName}` (e.g., `InteractiveButton`)
|
||||
- Define `args` for default prop values
|
||||
- Define `argTypes` at the story level (not meta level) with control types and descriptions
|
||||
- Use `parameters.docs.gallery` for size×style variant grids
|
||||
- Use `parameters.docs.sampleChildren` for components that need children
|
||||
- Use `parameters.docs.liveExample` for custom live code blocks
|
||||
- Use `parameters.docs.staticProps` for complex object props that can't be parsed inline
|
||||
|
||||
### Generator Location
|
||||
- Script: `docs/scripts/generate-superset-components.mjs`
|
||||
- Wrapper: `docs/src/components/StorybookWrapper.jsx`
|
||||
- Output: `docs/developer_portal/components/`
|
||||
|
||||
## Architecture Patterns
|
||||
|
||||
### Security & Features
|
||||
|
||||
@@ -49,4 +49,3 @@ under the License.
|
||||
- [4.1.3](./CHANGELOG/4.1.3.md)
|
||||
- [4.1.4](./CHANGELOG/4.1.4.md)
|
||||
- [5.0.0](./CHANGELOG/5.0.0.md)
|
||||
- [6.0.0](./CHANGELOG/6.0.0.md)
|
||||
|
||||
1062
CHANGELOG/6.0.0.md
1062
CHANGELOG/6.0.0.md
File diff suppressed because it is too large
Load Diff
@@ -25,14 +25,14 @@ little bit helps, and credit will always be given.
|
||||
|
||||
All developer and contribution documentation has moved to the Apache Superset Developer Portal:
|
||||
|
||||
**[📚 View the Developer Portal →](https://superset.apache.org/developer_portal/)**
|
||||
**[📚 View the Developer Portal →](https://superset.apache.org/docs/developer-portal/)**
|
||||
|
||||
The Developer Portal includes comprehensive guides for:
|
||||
- [Contributing Overview](https://superset.apache.org/developer_portal/contributing/overview)
|
||||
- [Development Setup](https://superset.apache.org/developer_portal/contributing/development-setup)
|
||||
- [Submitting Pull Requests](https://superset.apache.org/developer_portal/contributing/submitting-pr)
|
||||
- [Contribution Guidelines](https://superset.apache.org/developer_portal/contributing/guidelines)
|
||||
- [Code Review Process](https://superset.apache.org/developer_portal/contributing/code-review)
|
||||
- [Development How-tos](https://superset.apache.org/developer_portal/contributing/howtos)
|
||||
- [Contributing Overview](https://superset.apache.org/docs/developer-portal/contributing/overview)
|
||||
- [Development Setup](https://superset.apache.org/docs/developer-portal/contributing/development-setup)
|
||||
- [Submitting Pull Requests](https://superset.apache.org/docs/developer-portal/contributing/submitting-pr)
|
||||
- [Contribution Guidelines](https://superset.apache.org/docs/developer-portal/contributing/guidelines)
|
||||
- [Code Review Process](https://superset.apache.org/docs/developer-portal/contributing/code-review)
|
||||
- [Development How-tos](https://superset.apache.org/docs/developer-portal/contributing/howtos)
|
||||
|
||||
Source for the Developer Portal documentation is [located here](https://github.com/apache/superset/tree/master/docs/developer_portal).
|
||||
|
||||
28
Dockerfile
28
Dockerfile
@@ -18,7 +18,7 @@
|
||||
######################################################################
|
||||
# Node stage to deal with static asset construction
|
||||
######################################################################
|
||||
ARG PY_VER=3.11.14-slim-trixie
|
||||
ARG PY_VER=3.11.13-slim-trixie
|
||||
|
||||
# If BUILDPLATFORM is null, set it to 'amd64' (or leave as is otherwise).
|
||||
ARG BUILDPLATFORM=${BUILDPLATFORM:-amd64}
|
||||
@@ -26,10 +26,13 @@ ARG BUILDPLATFORM=${BUILDPLATFORM:-amd64}
|
||||
# Include translations in the final build
|
||||
ARG BUILD_TRANSLATIONS="false"
|
||||
|
||||
# Build arg to pre-populate examples DuckDB file
|
||||
ARG LOAD_EXAMPLES_DUCKDB="false"
|
||||
|
||||
######################################################################
|
||||
# superset-node-ci used as a base for building frontend assets and CI
|
||||
######################################################################
|
||||
FROM --platform=${BUILDPLATFORM} node:22-trixie-slim AS superset-node-ci
|
||||
FROM --platform=${BUILDPLATFORM} node:20-trixie-slim AS superset-node-ci
|
||||
ARG BUILD_TRANSLATIONS
|
||||
ENV BUILD_TRANSLATIONS=${BUILD_TRANSLATIONS}
|
||||
ARG DEV_MODE="false" # Skip frontend build in dev mode
|
||||
@@ -143,6 +146,9 @@ RUN if [ "${BUILD_TRANSLATIONS}" = "true" ]; then \
|
||||
######################################################################
|
||||
FROM python-base AS python-common
|
||||
|
||||
# Re-declare build arg to receive it in this stage
|
||||
ARG LOAD_EXAMPLES_DUCKDB
|
||||
|
||||
ENV SUPERSET_HOME="/app/superset_home" \
|
||||
HOME="/app/superset_home" \
|
||||
SUPERSET_ENV="production" \
|
||||
@@ -154,7 +160,7 @@ ENV SUPERSET_HOME="/app/superset_home" \
|
||||
COPY --chmod=755 docker/entrypoints /app/docker/entrypoints
|
||||
|
||||
WORKDIR /app
|
||||
# Set up necessary directories
|
||||
# Set up necessary directories and user
|
||||
RUN mkdir -p \
|
||||
${PYTHONPATH} \
|
||||
superset/static \
|
||||
@@ -165,8 +171,6 @@ RUN mkdir -p \
|
||||
&& touch superset/static/version_info.json
|
||||
|
||||
# Install Playwright and optionally setup headless browsers
|
||||
ENV PLAYWRIGHT_BROWSERS_PATH=/usr/local/share/playwright-browsers
|
||||
|
||||
ARG INCLUDE_CHROMIUM="false"
|
||||
ARG INCLUDE_FIREFOX="false"
|
||||
RUN --mount=type=cache,target=${SUPERSET_HOME}/.cache/uv \
|
||||
@@ -196,9 +200,17 @@ RUN /app/docker/apt-install.sh \
|
||||
libecpg-dev \
|
||||
libldap2-dev
|
||||
|
||||
# Create data directory for DuckDB examples database
|
||||
# The database file will be created at runtime when examples are loaded from Parquet files
|
||||
RUN mkdir -p /app/data && chown -R superset:superset /app/data
|
||||
# Pre-load examples DuckDB file if requested
|
||||
RUN if [ "$LOAD_EXAMPLES_DUCKDB" = "true" ]; then \
|
||||
mkdir -p /app/data && \
|
||||
echo "Downloading pre-built examples.duckdb..." && \
|
||||
curl -L -o /app/data/examples.duckdb \
|
||||
"https://raw.githubusercontent.com/apache-superset/examples-data/master/examples.duckdb" && \
|
||||
chown -R superset:superset /app/data; \
|
||||
else \
|
||||
mkdir -p /app/data && \
|
||||
chown -R superset:superset /app/data; \
|
||||
fi
|
||||
|
||||
# Copy compiled things from previous stages
|
||||
COPY --from=superset-node /app/superset/static/assets superset/static/assets
|
||||
|
||||
20
INSTALL.md
20
INSTALL.md
@@ -16,20 +16,8 @@ KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
# Installing Apache Superset
|
||||
# INSTALL / BUILD instructions for Apache Superset
|
||||
|
||||
For comprehensive installation instructions, please see the Apache Superset documentation:
|
||||
|
||||
**[📚 Installation Guide →](https://superset.apache.org/docs/installation/installation-methods)**
|
||||
|
||||
The documentation covers:
|
||||
- [Docker Compose](https://superset.apache.org/docs/installation/docker-compose) (recommended for development)
|
||||
- [Kubernetes / Helm](https://superset.apache.org/docs/installation/kubernetes)
|
||||
- [PyPI](https://superset.apache.org/docs/installation/pypi)
|
||||
- [Docker Builds](https://superset.apache.org/docs/installation/docker-builds)
|
||||
- [Architecture Overview](https://superset.apache.org/docs/installation/architecture)
|
||||
|
||||
## Building from Source
|
||||
|
||||
For building from a source release tarball, see the Dockerfile at:
|
||||
`RELEASING/Dockerfile.from_local_tarball`
|
||||
At this time, the docker file at RELEASING/Dockerfile.from_local_tarball
|
||||
constitutes the recipe on how to get to a working release from a source
|
||||
release tarball.
|
||||
|
||||
27
Makefile
27
Makefile
@@ -18,7 +18,7 @@
|
||||
# Python version installed; we need 3.10-3.11
|
||||
PYTHON=`command -v python3.11 || command -v python3.10`
|
||||
|
||||
.PHONY: install superset venv pre-commit up down logs ps nuke ports open
|
||||
.PHONY: install superset venv pre-commit
|
||||
|
||||
install: superset pre-commit
|
||||
|
||||
@@ -112,28 +112,3 @@ report-celery-beat:
|
||||
|
||||
admin-user:
|
||||
superset fab create-admin
|
||||
|
||||
# Docker Compose with auto-assigned ports (for running multiple instances)
|
||||
up:
|
||||
./scripts/docker-compose-up.sh
|
||||
|
||||
up-detached:
|
||||
./scripts/docker-compose-up.sh -d
|
||||
|
||||
down:
|
||||
./scripts/docker-compose-up.sh down
|
||||
|
||||
logs:
|
||||
./scripts/docker-compose-up.sh logs -f
|
||||
|
||||
ps:
|
||||
./scripts/docker-compose-up.sh ps
|
||||
|
||||
nuke:
|
||||
./scripts/docker-compose-up.sh nuke
|
||||
|
||||
ports:
|
||||
./scripts/docker-compose-up.sh ports
|
||||
|
||||
open:
|
||||
./scripts/docker-compose-up.sh open
|
||||
|
||||
128
README.md
128
README.md
@@ -23,12 +23,8 @@ under the License.
|
||||
[](https://github.com/apache/superset/releases/latest)
|
||||
[](https://github.com/apache/superset/actions)
|
||||
[](https://badge.fury.io/py/apache_superset)
|
||||
[](https://codecov.io/github/apache/superset)
|
||||
[](https://pypi.python.org/pypi/apache_superset)
|
||||
[](https://github.com/apache/superset/stargazers)
|
||||
[](https://github.com/apache/superset/graphs/contributors)
|
||||
[](https://github.com/apache/superset/commits/master)
|
||||
[](https://github.com/apache/superset/issues)
|
||||
[](https://github.com/apache/superset/pulls)
|
||||
[](http://bit.ly/join-superset-slack)
|
||||
[](https://superset.apache.org)
|
||||
|
||||
@@ -48,18 +44,14 @@ under the License.
|
||||
|
||||
A modern, enterprise-ready business intelligence web application.
|
||||
|
||||
### Documentation
|
||||
|
||||
- **[User Guide](https://superset.apache.org/user-docs/)** — For analysts and business users. Explore data, build charts, create dashboards, and connect databases.
|
||||
- **[Administrator Guide](https://superset.apache.org/admin-docs/)** — Install, configure, and operate Superset. Covers security, scaling, and database drivers.
|
||||
- **[Developer Guide](https://superset.apache.org/developer-docs/)** — Contribute to Superset or build on its REST API and extension framework.
|
||||
|
||||
[**Why Superset?**](#why-superset) |
|
||||
[**Supported Databases**](#supported-databases) |
|
||||
[**Installation and Configuration**](#installation-and-configuration) |
|
||||
[**Release Notes**](https://github.com/apache/superset/blob/master/RELEASING/README.md#release-notes-for-recent-releases) |
|
||||
[**Get Involved**](#get-involved) |
|
||||
[**Contributor Guide**](#contributor-guide) |
|
||||
[**Resources**](#resources) |
|
||||
[**Organizations Using Superset**](https://superset.apache.org/inTheWild)
|
||||
[**Organizations Using Superset**](https://github.com/apache/superset/blob/master/RESOURCES/INTHEWILD.md)
|
||||
|
||||
## Why Superset?
|
||||
|
||||
@@ -93,7 +85,7 @@ Superset provides:
|
||||
|
||||
**Craft Beautiful, Dynamic Dashboards**
|
||||
|
||||
<kbd><img title="View Dashboards" src="https://superset.apache.org/img/screenshots/dashboard.jpg"/></kbd><br/>
|
||||
<kbd><img title="View Dashboards" src="https://superset.apache.org/img/screenshots/slack_dash.jpg"/></kbd><br/>
|
||||
|
||||
**No-Code Chart Builder**
|
||||
|
||||
@@ -105,77 +97,51 @@ Superset provides:
|
||||
|
||||
## Supported Databases
|
||||
|
||||
Superset can query data from any SQL-speaking datastore or data engine (Presto, Trino, Athena, [and more](https://superset.apache.org/docs/databases)) that has a Python DB-API driver and a SQLAlchemy dialect.
|
||||
Superset can query data from any SQL-speaking datastore or data engine (Presto, Trino, Athena, [and more](https://superset.apache.org/docs/configuration/databases)) that has a Python DB-API driver and a SQLAlchemy dialect.
|
||||
|
||||
Here are some of the major database solutions that are supported:
|
||||
|
||||
<!-- SUPPORTED_DATABASES_START -->
|
||||
<p align="center">
|
||||
<a href="https://superset.apache.org/docs/databases/supported/amazon-athena" title="Amazon Athena"><img src="docs/static/img/databases/amazon-athena.jpg" alt="Amazon Athena" width="76" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/amazon-dynamodb" title="Amazon DynamoDB"><img src="docs/static/img/databases/aws.png" alt="Amazon DynamoDB" width="40" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/amazon-redshift" title="Amazon Redshift"><img src="docs/static/img/databases/redshift.png" alt="Amazon Redshift" width="100" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/apache-doris" title="Apache Doris"><img src="docs/static/img/databases/doris.png" alt="Apache Doris" width="103" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/apache-drill" title="Apache Drill"><img src="docs/static/img/databases/apache-drill.png" alt="Apache Drill" width="81" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/apache-druid" title="Apache Druid"><img src="docs/static/img/databases/druid.png" alt="Apache Druid" width="117" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/apache-hive" title="Apache Hive"><img src="docs/static/img/databases/apache-hive.svg" alt="Apache Hive" width="44" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/apache-impala" title="Apache Impala"><img src="docs/static/img/databases/apache-impala.png" alt="Apache Impala" width="21" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/apache-kylin" title="Apache Kylin"><img src="docs/static/img/databases/apache-kylin.png" alt="Apache Kylin" width="44" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/apache-pinot" title="Apache Pinot"><img src="docs/static/img/databases/apache-pinot.svg" alt="Apache Pinot" width="76" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/apache-solr" title="Apache Solr"><img src="docs/static/img/databases/apache-solr.png" alt="Apache Solr" width="79" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/apache-spark-sql" title="Apache Spark SQL"><img src="docs/static/img/databases/apache-spark.png" alt="Apache Spark SQL" width="75" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/ascend" title="Ascend"><img src="docs/static/img/databases/ascend.webp" alt="Ascend" width="117" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/aurora-mysql-data-api" title="Aurora MySQL (Data API)"><img src="docs/static/img/databases/mysql.png" alt="Aurora MySQL (Data API)" width="77" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/aurora-postgresql-data-api" title="Aurora PostgreSQL (Data API)"><img src="docs/static/img/databases/postgresql.svg" alt="Aurora PostgreSQL (Data API)" width="76" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/azure-data-explorer" title="Azure Data Explorer"><img src="docs/static/img/databases/kusto.png" alt="Azure Data Explorer" width="40" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/azure-synapse" title="Azure Synapse"><img src="docs/static/img/databases/azure.svg" alt="Azure Synapse" width="40" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/clickhouse" title="ClickHouse"><img src="docs/static/img/databases/clickhouse.png" alt="ClickHouse" width="150" height="37" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/cloudflare-d1" title="Cloudflare D1"><img src="docs/static/img/databases/cloudflare.png" alt="Cloudflare D1" width="40" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/cockroachdb" title="CockroachDB"><img src="docs/static/img/databases/cockroachdb.png" alt="CockroachDB" width="150" height="24" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/couchbase" title="Couchbase"><img src="docs/static/img/databases/couchbase.svg" alt="Couchbase" width="150" height="35" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/cratedb" title="CrateDB"><img src="docs/static/img/databases/cratedb.svg" alt="CrateDB" width="180" height="24" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/databend" title="Databend"><img src="docs/static/img/databases/databend.png" alt="Databend" width="100" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/databricks" title="Databricks"><img src="docs/static/img/databases/databricks.png" alt="Databricks" width="152" height="24" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/denodo" title="Denodo"><img src="docs/static/img/databases/denodo.png" alt="Denodo" width="138" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/dremio" title="Dremio"><img src="docs/static/img/databases/dremio.png" alt="Dremio" width="126" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/duckdb" title="DuckDB"><img src="docs/static/img/databases/duckdb.png" alt="DuckDB" width="52" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/elasticsearch" title="Elasticsearch"><img src="docs/static/img/databases/elasticsearch.png" alt="Elasticsearch" width="40" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/exasol" title="Exasol"><img src="docs/static/img/databases/exasol.png" alt="Exasol" width="72" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/firebird" title="Firebird"><img src="docs/static/img/databases/firebird.png" alt="Firebird" width="100" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/firebolt" title="Firebolt"><img src="docs/static/img/databases/firebolt.png" alt="Firebolt" width="100" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/google-bigquery" title="Google BigQuery"><img src="docs/static/img/databases/google-big-query.svg" alt="Google BigQuery" width="76" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/google-sheets" title="Google Sheets"><img src="docs/static/img/databases/google-sheets.svg" alt="Google Sheets" width="76" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/greenplum" title="Greenplum"><img src="docs/static/img/databases/greenplum.png" alt="Greenplum" width="124" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/hologres" title="Hologres"><img src="docs/static/img/databases/hologres.png" alt="Hologres" width="44" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/ibm-db2" title="IBM Db2"><img src="docs/static/img/databases/ibm-db2.svg" alt="IBM Db2" width="91" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/ibm-netezza-performance-server" title="IBM Netezza Performance Server"><img src="docs/static/img/databases/netezza.png" alt="IBM Netezza Performance Server" width="40" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/mariadb" title="MariaDB"><img src="docs/static/img/databases/mariadb.png" alt="MariaDB" width="150" height="37" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/microsoft-sql-server" title="Microsoft SQL Server"><img src="docs/static/img/databases/msql.png" alt="Microsoft SQL Server" width="50" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/monetdb" title="MonetDB"><img src="docs/static/img/databases/monet-db.png" alt="MonetDB" width="100" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/mongodb" title="MongoDB"><img src="docs/static/img/databases/mongodb.png" alt="MongoDB" width="150" height="38" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/motherduck" title="MotherDuck"><img src="docs/static/img/databases/motherduck.png" alt="MotherDuck" width="40" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/oceanbase" title="OceanBase"><img src="docs/static/img/databases/oceanbase.svg" alt="OceanBase" width="175" height="24" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/oracle" title="Oracle"><img src="docs/static/img/databases/oraclelogo.png" alt="Oracle" width="111" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/presto" title="Presto"><img src="docs/static/img/databases/presto-og.png" alt="Presto" width="127" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/risingwave" title="RisingWave"><img src="docs/static/img/databases/risingwave.svg" alt="RisingWave" width="147" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/sap-hana" title="SAP HANA"><img src="docs/static/img/databases/sap-hana.png" alt="SAP HANA" width="137" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/sap-sybase" title="SAP Sybase"><img src="docs/static/img/databases/sybase.png" alt="SAP Sybase" width="100" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/shillelagh" title="Shillelagh"><img src="docs/static/img/databases/shillelagh.png" alt="Shillelagh" width="40" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/singlestore" title="SingleStore"><img src="docs/static/img/databases/singlestore.png" alt="SingleStore" width="150" height="31" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/snowflake" title="Snowflake"><img src="docs/static/img/databases/snowflake.svg" alt="Snowflake" width="76" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/sqlite" title="SQLite"><img src="docs/static/img/databases/sqlite.png" alt="SQLite" width="84" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/starrocks" title="StarRocks"><img src="docs/static/img/databases/starrocks.png" alt="StarRocks" width="149" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/superset-meta-database" title="Superset meta database"><img src="docs/static/img/databases/superset.svg" alt="Superset meta database" width="150" height="39" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/tdengine" title="TDengine"><img src="docs/static/img/databases/tdengine.png" alt="TDengine" width="140" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/teradata" title="Teradata"><img src="docs/static/img/databases/teradata.png" alt="Teradata" width="124" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/timescaledb" title="TimescaleDB"><img src="docs/static/img/databases/timescale.png" alt="TimescaleDB" width="150" height="36" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/trino" title="Trino"><img src="docs/static/img/databases/trino.png" alt="Trino" width="89" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/vertica" title="Vertica"><img src="docs/static/img/databases/vertica.png" alt="Vertica" width="128" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/ydb" title="YDB"><img src="docs/static/img/databases/ydb.svg" alt="YDB" width="110" height="40" /></a>
|
||||
<a href="https://superset.apache.org/docs/databases/supported/yugabytedb" title="YugabyteDB"><img src="docs/static/img/databases/yugabyte.png" alt="YugabyteDB" width="150" height="26" /></a>
|
||||
<img src="https://superset.apache.org/img/databases/redshift.png" alt="redshift" border="0" width="200"/>
|
||||
<img src="https://superset.apache.org/img/databases/google-biquery.png" alt="google-bigquery" border="0" width="200"/>
|
||||
<img src="https://superset.apache.org/img/databases/snowflake.png" alt="snowflake" border="0" width="200"/>
|
||||
<img src="https://superset.apache.org/img/databases/trino.png" alt="trino" border="0" width="150" />
|
||||
<img src="https://superset.apache.org/img/databases/presto.png" alt="presto" border="0" width="200"/>
|
||||
<img src="https://superset.apache.org/img/databases/databricks.png" alt="databricks" border="0" width="160" />
|
||||
<img src="https://superset.apache.org/img/databases/druid.png" alt="druid" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/firebolt.png" alt="firebolt" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/timescale.png" alt="timescale" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/postgresql.png" alt="postgresql" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/mysql.png" alt="mysql" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/mssql-server.png" alt="mssql-server" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/ibm-db2.svg" alt="db2" border="0" width="220" />
|
||||
<img src="https://superset.apache.org/img/databases/sqlite.png" alt="sqlite" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/sybase.png" alt="sybase" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/mariadb.png" alt="mariadb" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/vertica.png" alt="vertica" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/oracle.png" alt="oracle" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/firebird.png" alt="firebird" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/greenplum.png" alt="greenplum" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/clickhouse.png" alt="clickhouse" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/exasol.png" alt="exasol" border="0" width="160" />
|
||||
<img src="https://superset.apache.org/img/databases/monet-db.png" alt="monet-db" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/apache-kylin.png" alt="apache-kylin" border="0" width="80"/>
|
||||
<img src="https://superset.apache.org/img/databases/hologres.png" alt="hologres" border="0" width="80"/>
|
||||
<img src="https://superset.apache.org/img/databases/netezza.png" alt="netezza" border="0" width="80"/>
|
||||
<img src="https://superset.apache.org/img/databases/pinot.png" alt="pinot" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/teradata.png" alt="teradata" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/yugabyte.png" alt="yugabyte" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/databend.png" alt="databend" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/starrocks.png" alt="starrocks" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/doris.png" alt="doris" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/oceanbase.svg" alt="oceanbase" border="0" width="220" />
|
||||
<img src="https://superset.apache.org/img/databases/sap-hana.png" alt="sap-hana" border="0" width="220" />
|
||||
<img src="https://superset.apache.org/img/databases/denodo.png" alt="denodo" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/ydb.svg" alt="ydb" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/tdengine.png" alt="TDengine" border="0" width="200" />
|
||||
</p>
|
||||
<!-- SUPPORTED_DATABASES_END -->
|
||||
|
||||
**A more comprehensive list of supported databases** along with the configuration instructions can be found [here](https://superset.apache.org/docs/databases).
|
||||
**A more comprehensive list of supported databases** along with the configuration instructions can be found [here](https://superset.apache.org/docs/configuration/databases).
|
||||
|
||||
Want to add support for your datastore or data engine? Read more [here](https://superset.apache.org/docs/frequently-asked-questions#does-superset-work-with-insert-database-engine-here) about the technical requirements.
|
||||
|
||||
@@ -195,14 +161,14 @@ Try out Superset's [quickstart](https://superset.apache.org/docs/quickstart/) gu
|
||||
## Contributor Guide
|
||||
|
||||
Interested in contributing? Check out our
|
||||
[Developer Guide](https://superset.apache.org/developer-docs/)
|
||||
[CONTRIBUTING.md](https://github.com/apache/superset/blob/master/CONTRIBUTING.md)
|
||||
to find resources around contributing along with a detailed guide on
|
||||
how to set up a development environment.
|
||||
|
||||
## Resources
|
||||
|
||||
- [Superset "In the Wild"](https://superset.apache.org/inTheWild) - see who's using Superset, and [add your organization](https://github.com/apache/superset/edit/master/RESOURCES/INTHEWILD.yaml) to the list!
|
||||
- [Feature Flags](https://superset.apache.org/docs/configuration/feature-flags) - the status of Superset's Feature Flags.
|
||||
- [Superset "In the Wild"](https://github.com/apache/superset/blob/master/RESOURCES/INTHEWILD.md) - open a PR to add your org to the list!
|
||||
- [Feature Flags](https://github.com/apache/superset/blob/master/RESOURCES/FEATURE_FLAGS.md) - the status of Superset's Feature Flags.
|
||||
- [Standard Roles](https://github.com/apache/superset/blob/master/RESOURCES/STANDARD_ROLES.md) - How RBAC permissions map to roles.
|
||||
- [Superset Wiki](https://github.com/apache/superset/wiki) - Tons of additional community resources: best practices, community content and other information.
|
||||
- [Superset SIPs](https://github.com/orgs/apache/projects/170) - The status of Superset's SIPs (Superset Improvement Proposals) for both consensus and implementation status.
|
||||
|
||||
@@ -458,7 +458,7 @@ cd ../
|
||||
sed -i '' "s/version_string = .*/version_string = \"$SUPERSET_VERSION\"/" setup.py
|
||||
|
||||
# build the python distribution
|
||||
python -m build
|
||||
python setup.py sdist
|
||||
```
|
||||
|
||||
Publish to PyPI
|
||||
|
||||
@@ -92,7 +92,7 @@ Some of the new features in this release are disabled by default. Each has a fea
|
||||
|
||||
| Feature | Feature Flag | Dependencies | Documentation
|
||||
| --- | --- | --- | --- |
|
||||
| Global Async Queries | `GLOBAL_ASYNC_QUERIES: True` | Redis 5.0+, celery workers configured and running | [Extra documentation](https://superset.apache.org/docs/contributing/misc#async-chart-queries)
|
||||
| Global Async Queries | `GLOBAL_ASYNC_QUERIES: True` | Redis 5.0+, celery workers configured and running | [Extra documentation](https://github.com/apache/superset/blob/master/CONTRIBUTING.md#async-chart-queries )
|
||||
| Dashboard Native Filters | `DASHBOARD_NATIVE_FILTERS: True` | |
|
||||
| Alerts & Reporting | `ALERT_REPORTS: True` | [Celery workers configured & celery beat process](https://superset.apache.org/docs/installation/async-queries-celery) |
|
||||
| Homescreen Thumbnails | `THUMBNAILS: TRUE, THUMBNAIL_CACHE_CONFIG: CacheConfig = { "CACHE_TYPE": "null", "CACHE_NO_NULL_WARNING": True}`| selenium, pillow 7, celery |
|
||||
|
||||
@@ -56,33 +56,8 @@ def verify_sha512(filename: str) -> str:
|
||||
# Part 2: Verify RSA key - this is the same as running `gpg --verify {release}.asc {release}` and comparing the RSA key and email address against the KEYS file # noqa: E501
|
||||
|
||||
|
||||
KEYS_URL = "https://downloads.apache.org/superset/KEYS"
|
||||
|
||||
|
||||
def ensure_keys_imported() -> None:
|
||||
"""Import the Apache Superset KEYS file into the local GPG keyring.
|
||||
|
||||
Without this, `gpg --verify` returns "No public key" and the signature
|
||||
cannot actually be verified — only the key ID in the signature metadata
|
||||
is visible.
|
||||
"""
|
||||
try:
|
||||
keys = requests.get(KEYS_URL, timeout=30)
|
||||
except requests.RequestException as exc:
|
||||
print(f"Warning: could not fetch KEYS file for import: {exc}")
|
||||
return
|
||||
if keys.status_code != 200:
|
||||
print(f"Warning: could not fetch KEYS file (HTTP {keys.status_code})")
|
||||
return
|
||||
subprocess.run( # noqa: S603
|
||||
["gpg", "--import"], # noqa: S607
|
||||
input=keys.content,
|
||||
capture_output=True,
|
||||
)
|
||||
|
||||
|
||||
def get_gpg_info(filename: str) -> tuple[Optional[str], Optional[str]]:
|
||||
"""Run the GPG verify command and extract RSA/EDDSA key and email address."""
|
||||
"""Run the GPG verify command and extract RSA key and email address."""
|
||||
asc_filename = filename + ".asc"
|
||||
result = subprocess.run( # noqa: S603
|
||||
["gpg", "--verify", asc_filename, filename], # noqa: S607
|
||||
@@ -90,50 +65,25 @@ def get_gpg_info(filename: str) -> tuple[Optional[str], Optional[str]]:
|
||||
)
|
||||
output = result.stderr.decode()
|
||||
|
||||
# If no public key was available, import KEYS and retry so that
|
||||
# `Good signature from "Name <email>"` appears in the output.
|
||||
if "No public key" in output:
|
||||
ensure_keys_imported()
|
||||
result = subprocess.run( # noqa: S603
|
||||
["gpg", "--verify", asc_filename, filename], # noqa: S607
|
||||
capture_output=True, # noqa: S607
|
||||
)
|
||||
output = result.stderr.decode()
|
||||
|
||||
rsa_key = re.search(r"RSA key ([0-9A-F]+)", output)
|
||||
eddsa_key = re.search(r"EDDSA key ([0-9A-F]+)", output)
|
||||
|
||||
# Try multiple patterns — `Good signature from` is the most reliable
|
||||
# source of the email; `issuer` is a fallback for older gpg output.
|
||||
email_patterns = (
|
||||
r'Good signature from ".*?<([^>]+)>"',
|
||||
r'aka ".*?<([^>]+)>"',
|
||||
r'issuer "([^"]+)"',
|
||||
)
|
||||
email_result: Optional[str] = None
|
||||
for pattern in email_patterns:
|
||||
match = re.search(pattern, output)
|
||||
if match:
|
||||
email_result = match.group(1)
|
||||
break
|
||||
email = re.search(r'issuer "([^"]+)"', output)
|
||||
|
||||
rsa_key_result = rsa_key.group(1) if rsa_key else None
|
||||
eddsa_key_result = eddsa_key.group(1) if eddsa_key else None
|
||||
email_result = email.group(1) if email else None
|
||||
|
||||
key_result = rsa_key_result or eddsa_key_result
|
||||
|
||||
# Debugging:
|
||||
if key_result:
|
||||
print("RSA or EDDSA Key found")
|
||||
else:
|
||||
print("Warning: No RSA or EDDSA key found in GPG verification output.")
|
||||
if email_result:
|
||||
print(f"Email found: {email_result}")
|
||||
print("email found")
|
||||
else:
|
||||
print("Warning: No email address found in GPG verification output.")
|
||||
if "No public key" in output:
|
||||
print(
|
||||
"Hint: public key is not in your keyring. Import it with:\n"
|
||||
f" curl -s {KEYS_URL} | gpg --import"
|
||||
)
|
||||
|
||||
return key_result, email_result
|
||||
|
||||
|
||||
103
RESOURCES/FEATURE_FLAGS.md
Normal file
103
RESOURCES/FEATURE_FLAGS.md
Normal file
@@ -0,0 +1,103 @@
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Superset Feature Flags
|
||||
|
||||
This is a list of the current Superset optional features. See config.py for default values. These features can be turned on/off by setting your preferred values in superset_config.py to True/False respectively
|
||||
|
||||
## In Development
|
||||
|
||||
These features are considered **unfinished** and should only be used on development environments.
|
||||
|
||||
[//]: # "PLEASE KEEP THE LIST SORTED ALPHABETICALLY"
|
||||
|
||||
- ALERT_REPORT_TABS
|
||||
- DATE_RANGE_TIMESHIFTS_ENABLED
|
||||
- ENABLE_ADVANCED_DATA_TYPES
|
||||
- PRESTO_EXPAND_DATA
|
||||
- SHARE_QUERIES_VIA_KV_STORE
|
||||
- TAGGING_SYSTEM
|
||||
- CHART_PLUGINS_EXPERIMENTAL
|
||||
|
||||
## In Testing
|
||||
|
||||
These features are **finished** but currently being tested. They are usable, but may still contain some bugs.
|
||||
|
||||
[//]: # "PLEASE KEEP THE LIST SORTED ALPHABETICALLY"
|
||||
|
||||
- ALERT_REPORTS: [(docs)](https://superset.apache.org/docs/configuration/alerts-reports)
|
||||
- ALLOW_FULL_CSV_EXPORT
|
||||
- CACHE_IMPERSONATION
|
||||
- CONFIRM_DASHBOARD_DIFF
|
||||
- DYNAMIC_PLUGINS
|
||||
- DATE_FORMAT_IN_EMAIL_SUBJECT: [(docs)](https://superset.apache.org/docs/configuration/alerts-reports#commons)
|
||||
- ENABLE_SUPERSET_META_DB: [(docs)](https://superset.apache.org/docs/configuration/databases/#querying-across-databases)
|
||||
- ESTIMATE_QUERY_COST
|
||||
- GLOBAL_ASYNC_QUERIES [(docs)](https://github.com/apache/superset/blob/master/CONTRIBUTING.md#async-chart-queries)
|
||||
- IMPERSONATE_WITH_EMAIL_PREFIX
|
||||
- PLAYWRIGHT_REPORTS_AND_THUMBNAILS
|
||||
- RLS_IN_SQLLAB
|
||||
- SSH_TUNNELING [(docs)](https://superset.apache.org/docs/configuration/setup-ssh-tunneling)
|
||||
- USE_ANALAGOUS_COLORS
|
||||
|
||||
## Stable
|
||||
|
||||
These features flags are **safe for production**. They have been tested and will be supported for the at least the current major version cycle.
|
||||
|
||||
[//]: # "PLEASE KEEP THESE LISTS SORTED ALPHABETICALLY"
|
||||
|
||||
### Flags on the path to feature launch and flag deprecation/removal
|
||||
|
||||
- DASHBOARD_VIRTUALIZATION
|
||||
|
||||
### Flags retained for runtime configuration
|
||||
|
||||
Currently some of our feature flags act as dynamic configurations that can changed
|
||||
on the fly. This acts in contradiction with the typical ephemeral feature flag use case,
|
||||
where the flag is used to mature a feature, and eventually deprecated once the feature is
|
||||
solid. Eventually we'll likely refactor these under a more formal "dynamic configurations" managed
|
||||
independently. This new framework will also allow for non-boolean configurations.
|
||||
|
||||
- ALERTS_ATTACH_REPORTS
|
||||
- ALLOW_ADHOC_SUBQUERY
|
||||
- DASHBOARD_RBAC [(docs)](https://superset.apache.org/docs/using-superset/creating-your-first-dashboard#manage-access-to-dashboards)
|
||||
- DATAPANEL_CLOSED_BY_DEFAULT
|
||||
- DRILL_BY
|
||||
- DRUID_JOINS
|
||||
- EMBEDDABLE_CHARTS
|
||||
- EMBEDDED_SUPERSET
|
||||
- ENABLE_TEMPLATE_PROCESSING
|
||||
- ESCAPE_MARKDOWN_HTML
|
||||
- LISTVIEWS_DEFAULT_CARD_VIEW
|
||||
- SCHEDULED_QUERIES [(docs)](https://superset.apache.org/docs/configuration/alerts-reports)
|
||||
- SLACK_ENABLE_AVATARS (see `superset/config.py` for more information)
|
||||
- SQLLAB_BACKEND_PERSISTENCE
|
||||
- SQL_VALIDATORS_BY_ENGINE [(docs)](https://superset.apache.org/docs/configuration/sql-templating)
|
||||
- THUMBNAILS [(docs)](https://superset.apache.org/docs/configuration/cache)
|
||||
|
||||
## Deprecated Flags
|
||||
|
||||
These features flags currently default to True and **will be removed in a future major release**. For this current release you can turn them off by setting your config to False, but it is advised to remove or set these flags in your local configuration to **True** so that you do not experience any unexpected changes in a future release.
|
||||
|
||||
[//]: # "PLEASE KEEP THE LIST SORTED ALPHABETICALLY"
|
||||
|
||||
- AVOID_COLORS_COLLISION
|
||||
- DRILL_TO_DETAIL
|
||||
- ENABLE_JAVASCRIPT_CONTROLS
|
||||
- KV_STORE
|
||||
226
RESOURCES/INTHEWILD.md
Normal file
226
RESOURCES/INTHEWILD.md
Normal file
@@ -0,0 +1,226 @@
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
## Superset Users in the Wild
|
||||
|
||||
Here's a list of organizations, broken down into broad industry categories, that have taken the time to send a PR to let
|
||||
the world know they are using Apache Superset. If you are a user and want to be recognized,
|
||||
all you have to do is file a simple PR [like this one](https://github.com/apache/superset/pull/10122) — [just click here](https://github.com/apache/superset/edit/master/RESOURCES/INTHEWILD.md) to do so. If you think
|
||||
the categorization is inaccurate, please file a PR with your correction as well.
|
||||
Join our growing community!
|
||||
|
||||
### Sharing Economy
|
||||
|
||||
- [Airbnb](https://github.com/airbnb)
|
||||
- [Faasos](https://faasos.com/) [@shashanksingh]
|
||||
- [Free2Move](https://www.free2move.com/) [@PaoloTerzi]
|
||||
- [Hostnfly](https://www.hostnfly.com/) [@alexisrosuel]
|
||||
- [Lime](https://www.li.me/) [@cxmcc]
|
||||
- [Lyft](https://www.lyft.com/)
|
||||
- [Ontruck](https://www.ontruck.com/)
|
||||
|
||||
### Financial Services
|
||||
|
||||
- [Aktia Bank plc](https://www.aktia.com)
|
||||
- [American Express](https://www.americanexpress.com) [@TheLastSultan]
|
||||
- [bumper](https://www.bumper.co/) [@vasu-ram, @JamiePercival]
|
||||
- [Cape Crypto](https://capecrypto.com)
|
||||
- [Capital Service S.A.](https://capitalservice.pl) [@pkonarzewski]
|
||||
- [Clark.de](https://clark.de/)
|
||||
- [Europace](https://europace.de)
|
||||
- [KarrotPay](https://www.daangnpay.com/)
|
||||
- [Remita](https://remita.net) [@mujibishola]
|
||||
- [Taveo](https://www.taveo.com) [@codek]
|
||||
- [Unit](https://www.unit.co/about-us) [@amitmiran137]
|
||||
- [Wise](https://wise.com) [@koszti]
|
||||
- [Xendit](https://xendit.co/) [@LieAlbertTriAdrian]
|
||||
- [Cover Genius](https://covergenius.com/)
|
||||
|
||||
### Gaming
|
||||
|
||||
- [Popoko VM Games Studio](https://popoko.live)
|
||||
|
||||
### E-Commerce
|
||||
|
||||
- [AiHello](https://www.aihello.com) [@ganeshkrishnan1]
|
||||
- [Bazaar Technologies](https://www.bazaartech.com) [@umair-abro]
|
||||
- [Dragonpass](https://www.dragonpass.com.cn/) [@zhxjdwh]
|
||||
- [Dropit Shopping](https://www.dropit.shop/) [@dropit-dev]
|
||||
- [Fanatics](https://www.fanatics.com/) [@coderfender]
|
||||
- [Fordeal](https://www.fordeal.com) [@Renkai]
|
||||
- [Fynd](https://www.fynd.com/) [@darpanjain07]
|
||||
- [GFG - Global Fashion Group](https://global-fashion-group.com) [@ksaagariconic]
|
||||
- [GoTo/Gojek](https://www.gojek.io/) [@gwthm-in]
|
||||
- [HuiShouBao](https://www.huishoubao.com/) [@Yukinoshita-Yukino]
|
||||
- [Now](https://www.now.vn/) [@davidkohcw]
|
||||
- [Qunar](https://www.qunar.com/) [@flametest]
|
||||
- [Rakuten Viki](https://www.viki.com)
|
||||
- [Shopee](https://shopee.sg) [@xiaohanyu]
|
||||
- [Shopkick](https://www.shopkick.com) [@LAlbertalli]
|
||||
- [ShopUp](https://www.shopup.org/) [@gwthm-in]
|
||||
- [Tails.com](https://tails.com/gb/) [@alanmcruickshank]
|
||||
- [THE ICONIC](https://theiconic.com.au/) [@ksaagariconic]
|
||||
- [Utair](https://www.utair.ru) [@utair-digital]
|
||||
- [VkusVill](https://vkusvill.ru/) [@ETselikov]
|
||||
- [Zalando](https://www.zalando.com) [@dmigo]
|
||||
- [Zalora](https://www.zalora.com) [@ksaagariconic]
|
||||
- [Zepto](https://www.zeptonow.com/) [@gwthm-in]
|
||||
|
||||
### Enterprise Technology
|
||||
|
||||
- [A3Data](https://a3data.com.br) [@neylsoncrepalde]
|
||||
- [Analytics Aura](https://analyticsaura.com/) [@Analytics-Aura]
|
||||
- [Apollo GraphQL](https://www.apollographql.com/) [@evans]
|
||||
- [Astronomer](https://www.astronomer.io) [@ryw]
|
||||
- [Avesta Technologies](https://avestatechnologies.com/) [@TheRum]
|
||||
- [Caizin](https://caizin.com/) [@tejaskatariya]
|
||||
- [Canonical](https://canonical.com)
|
||||
- [Careem](https://www.careem.com/) [@samraHanif0340]
|
||||
- [Cloudsmith](https://cloudsmith.io) [@alancarson]
|
||||
- [Cyberhaven](https://www.cyberhaven.com/) [@toliver-ch]
|
||||
- [Deepomatic](https://deepomatic.com/) [@Zanoellia]
|
||||
- [Dial Once](https://www.dial-once.com/)
|
||||
- [Dremio](https://dremio.com) [@narendrans]
|
||||
- [EFinance](https://www.efinance.com.eg) [@habeeb556]
|
||||
- [Elestio](https://elest.io/) [@kaiwalyakoparkar]
|
||||
- [ELMO Cloud HR & Payroll](https://elmosoftware.com.au/)
|
||||
- [Endress+Hauser](https://www.endress.com/) [@rumbin]
|
||||
- [FBK - ICT center](https://ict.fbk.eu)
|
||||
- [Formbricks](https://formbricks.com)
|
||||
- [Gavagai](https://gavagai.io) [@gavagai-corp]
|
||||
- [GfK Data Lab](https://www.gfk.com/home) [@mherr]
|
||||
- [HPE](https://www.hpe.com/in/en/home.html) [@anmol-hpe]
|
||||
- [Hydrolix](https://www.hydrolix.io/)
|
||||
- [Intercom](https://www.intercom.com/) [@kate-gallo]
|
||||
- [jampp](https://jampp.com/)
|
||||
- [Konfío](https://konfio.mx) [@uis-rodriguez]
|
||||
- [Mainstrat](https://mainstrat.com/)
|
||||
- [mishmash io](https://mishmash.io/) [@mishmash-io]
|
||||
- [Myra Labs](https://www.myralabs.com/) [@viksit]
|
||||
- [Nielsen](https://www.nielsen.com/) [@amitNielsen]
|
||||
- [Ona](https://ona.io) [@pld]
|
||||
- [Orange](https://www.orange.com) [@icsu]
|
||||
- [Oslandia](https://oslandia.com)
|
||||
- [Oxylabs](https://oxylabs.io/) [@rytis-ulys]
|
||||
- [Peak AI](https://www.peak.ai/) [@azhar22k]
|
||||
- [PeopleDoc](https://www.people-doc.com) [@rodo]
|
||||
- [PlaidCloud](https://www.plaidcloud.com)
|
||||
- [Preset, Inc.](https://preset.io)
|
||||
- [PubNub](https://pubnub.com) [@jzucker2]
|
||||
- [ReadyTech](https://www.readytech.io)
|
||||
- [Reward Gateway](https://www.rewardgateway.com)
|
||||
- [RIADVICE](https://riadvice.tn) [@riadvice]
|
||||
- [ScopeAI](https://www.getscopeai.com) [@iloveluce]
|
||||
- [shipmnts](https://shipmnts.com)
|
||||
- [Showmax](https://showmax.com) [@bobek]
|
||||
- [SingleStore](https://www.singlestore.com/)
|
||||
- [TechAudit](https://www.techaudit.info) [@ETselikov]
|
||||
- [Tenable](https://www.tenable.com) [@dflionis]
|
||||
- [Tentacle](https://www.linkedin.com/company/tentacle-cmi/) [@jdclarke5]
|
||||
- [timbr.ai](https://timbr.ai/) [@semantiDan]
|
||||
- [Tobii](https://www.tobii.com/) [@dwa]
|
||||
- [Tooploox](https://www.tooploox.com/) [@jakubczaplicki]
|
||||
- [Unvired](https://unvired.com) [@srinisubramanian]
|
||||
- [Virtuoso QA](https://www.virtuosoqa.com)
|
||||
- [Whale](https://whale.im)
|
||||
- [Windsor.ai](https://www.windsor.ai/) [@octaviancorlade]
|
||||
- [WinWin Network马上赢](https://brandct.cn/) [@wenbinye]
|
||||
- [Zeta](https://www.zeta.tech/) [@shaikidris]
|
||||
|
||||
### Media & Entertainment
|
||||
|
||||
- [6play](https://www.6play.fr) [@CoryChaplin]
|
||||
- [bilibili](https://www.bilibili.com) [@Moinheart]
|
||||
- [BurdaForward](https://www.burda-forward.de/en/)
|
||||
- [Douban](https://www.douban.com/) [@luchuan]
|
||||
- [Kuaishou](https://www.kuaishou.com/) [@zhaoyu89730105]
|
||||
- [Netflix](https://www.netflix.com/)
|
||||
- [Prensa Iberica](https://www.prensaiberica.es/) [@zamar-roura]
|
||||
- [TME QQMUSIC/WESING](https://www.tencentmusic.com/) [@shenyuanli,@marklaw]
|
||||
- [Xite](https://xite.com/) [@shashankkoppar]
|
||||
- [Zaihang](https://www.zaih.com/)
|
||||
|
||||
### Education
|
||||
|
||||
- [Aveti Learning](https://avetilearning.com/) [@TheShubhendra]
|
||||
- [Brilliant.org](https://brilliant.org/)
|
||||
- [Open edX](https://openedx.org/)
|
||||
- [Platzi.com](https://platzi.com/)
|
||||
- [Sunbird](https://www.sunbird.org/) [@eksteporg]
|
||||
- [The GRAPH Network](https://thegraphnetwork.org/) [@fccoelho]
|
||||
- [Udemy](https://www.udemy.com/) [@sungjuly]
|
||||
- [VIPKID](https://www.vipkid.com.cn/) [@illpanda]
|
||||
- [WikiMedia Foundation](https://wikimediafoundation.org) [@vg]
|
||||
|
||||
### Energy
|
||||
|
||||
- [Airboxlab](https://foobot.io) [@antoine-galataud]
|
||||
- [DouroECI](https://www.douroeci.com/) [@nunohelibeires]
|
||||
- [Safaricom](https://www.safaricom.co.ke/) [@mmutiso]
|
||||
- [Scoot](https://scoot.co/) [@haaspt]
|
||||
- [Wattbewerb](https://wattbewerb.de/) [@wattbewerb]
|
||||
|
||||
### Healthcare
|
||||
|
||||
- [Amino](https://amino.com) [@shkr]
|
||||
- [Bluesquare](https://www.bluesquarehub.com/) [@madewulf]
|
||||
- [Care](https://www.getcare.io/) [@alandao2021]
|
||||
- [Living Goods](https://www.livinggoods.org) [@chelule]
|
||||
- [Maieutical Labs](https://maieuticallabs.it) [@xrmx]
|
||||
- [Medic](https://medic.org) [@1yuv]
|
||||
- [REDCap Cloud](https://www.redcapcloud.com/)
|
||||
- [TrustMedis](https://trustmedis.com/) [@famasya]
|
||||
- [WeSure](https://www.wesure.cn/)
|
||||
- [2070Health](https://2070health.com/)
|
||||
|
||||
### HR / Staffing
|
||||
|
||||
- [Swile](https://www.swile.co/) [@PaoloTerzi]
|
||||
- [Symmetrics](https://www.symmetrics.fyi)
|
||||
- [bluquist](https://bluquist.com/)
|
||||
|
||||
### Government
|
||||
|
||||
- [City of Ann Arbor, MI](https://www.a2gov.org/) [@sfirke]
|
||||
- [RIS3 Strategy of CZ, MIT CR](https://www.ris3.cz/) [@RIS3CZ]
|
||||
- [NRLM - Sarathi, India](https://pib.gov.in/PressReleasePage.aspx?PRID=1999586)
|
||||
|
||||
### Travel
|
||||
|
||||
- [Agoda](https://www.agoda.com/) [@lostseaway, @maiake, @obombayo]
|
||||
- [HomeToGo](https://hometogo.com/) [@pedromartinsteenstrup]
|
||||
- [Skyscanner](https://www.skyscanner.net/) [@cleslie, @stanhoucke]
|
||||
|
||||
### Others
|
||||
|
||||
- [10Web](https://10web.io/)
|
||||
- [AI inside](https://inside.ai/en/)
|
||||
- [Automattic](https://automattic.com/) [@Khrol, @Usiel]
|
||||
- [Dropbox](https://www.dropbox.com/) [@bkyryliuk]
|
||||
- [Flowbird](https://flowbird.com) [@EmmanuelCbd]
|
||||
- [GEOTAB](https://www.geotab.com) [@JZ6]
|
||||
- [Grassroot](https://www.grassrootinstitute.org/)
|
||||
- [Increff](https://www.increff.com/) [@ishansinghania]
|
||||
- [komoot](https://www.komoot.com/) [@christophlingg]
|
||||
- [Let's Roam](https://www.letsroam.com/)
|
||||
- [Machrent SA](https://www.machrent.com/)
|
||||
- [Onebeat](https://1beat.com/) [@GuyAttia]
|
||||
- [X](https://x.com/)
|
||||
- [VLMedia](https://www.vlmedia.com.tr/) [@ibotheperfect]
|
||||
- [Yahoo!](https://yahoo.com/)
|
||||
@@ -1,703 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
# Apache Superset Users in the Wild
|
||||
#
|
||||
# To add your organization:
|
||||
# 1. Find the appropriate category (or add a new one)
|
||||
# 2. Add an entry with your organization details
|
||||
# 3. Optionally add a logo file to docs/static/img/logos/
|
||||
#
|
||||
# Required fields:
|
||||
# - name: Your organization name
|
||||
# - url: Link to your organization's website
|
||||
#
|
||||
# Optional fields:
|
||||
# - logo: Filename of logo in docs/static/img/logos/ (e.g., "mycompany.svg")
|
||||
# - contributors: List of GitHub usernames who contributed (e.g., ["@username"])
|
||||
|
||||
categories:
|
||||
Sharing Economy:
|
||||
- name: Airbnb
|
||||
url: https://github.com/airbnb
|
||||
|
||||
- name: Faasos
|
||||
url: https://faasos.com/
|
||||
contributors: ["@shashanksingh"]
|
||||
|
||||
- name: Free2Move
|
||||
url: https://www.free2move.com/
|
||||
contributors: ["@PaoloTerzi"]
|
||||
|
||||
- name: Hostnfly
|
||||
url: https://www.hostnfly.com/
|
||||
contributors: ["@alexisrosuel"]
|
||||
|
||||
- name: Lime
|
||||
url: https://www.li.me/
|
||||
contributors: ["@cxmcc"]
|
||||
|
||||
- name: Lyft
|
||||
url: https://www.lyft.com/
|
||||
|
||||
- name: Ontruck
|
||||
url: https://www.ontruck.com/
|
||||
|
||||
Financial Services:
|
||||
- name: Aadhar Housing Finance Limited
|
||||
url: https://www.aadharhousing.com
|
||||
contributors: ["@thakerhardiks"]
|
||||
|
||||
- name: Aktia Bank plc
|
||||
url: https://www.aktia.com
|
||||
|
||||
- name: American Express
|
||||
url: https://www.americanexpress.com
|
||||
contributors: ["@TheLastSultan"]
|
||||
|
||||
- name: bumper
|
||||
url: https://www.bumper.co/
|
||||
contributors: ["@vasu-ram", "@JamiePercival"]
|
||||
|
||||
- name: Cape Crypto
|
||||
url: https://capecrypto.com
|
||||
|
||||
- name: Capital Service S.A.
|
||||
url: https://capitalservice.pl
|
||||
contributors: ["@pkonarzewski"]
|
||||
|
||||
- name: Clark.de
|
||||
url: https://clark.de/
|
||||
|
||||
- name: EnquiryLabs
|
||||
url: https://www.enquirylabs.co.uk
|
||||
|
||||
- name: Europace
|
||||
url: https://europace.de
|
||||
|
||||
- name: KarrotPay
|
||||
url: https://www.daangnpay.com/
|
||||
|
||||
- name: Remita
|
||||
url: https://remita.net
|
||||
contributors: ["@mujibishola"]
|
||||
|
||||
- name: Taveo
|
||||
url: https://www.taveo.com
|
||||
contributors: ["@codek"]
|
||||
|
||||
- name: Unit
|
||||
url: https://www.unit.co/about-us
|
||||
contributors: ["@amitmiran137"]
|
||||
|
||||
- name: Wise
|
||||
url: https://wise.com
|
||||
contributors: ["@koszti"]
|
||||
|
||||
- name: Xendit
|
||||
url: https://xendit.co/
|
||||
contributors: ["@LieAlbertTriAdrian"]
|
||||
|
||||
- name: Cover Genius
|
||||
url: https://covergenius.com/
|
||||
|
||||
Gaming:
|
||||
- name: Popoko VM Games Studio
|
||||
url: https://popoko.live
|
||||
|
||||
E-Commerce:
|
||||
- name: AiHello
|
||||
url: https://www.aihello.com
|
||||
contributors: ["@ganeshkrishnan1"]
|
||||
|
||||
- name: Bazaar Technologies
|
||||
url: https://www.bazaartech.com
|
||||
contributors: ["@umair-abro"]
|
||||
|
||||
- name: Blinkit
|
||||
url: https://www.blinkit.com/
|
||||
contributors: ["@amsharm2"]
|
||||
|
||||
- name: Dragonpass
|
||||
url: https://www.dragonpass.com.cn/
|
||||
contributors: ["@zhxjdwh"]
|
||||
|
||||
- name: Dropit Shopping
|
||||
url: https://www.dropit.shop/
|
||||
contributors: ["@dropit-dev"]
|
||||
|
||||
- name: Fordeal
|
||||
url: https://www.fordeal.com
|
||||
contributors: ["@Renkai"]
|
||||
|
||||
- name: Fynd
|
||||
url: https://www.fynd.com/
|
||||
contributors: ["@darpanjain07"]
|
||||
|
||||
- name: GFG - Global Fashion Group
|
||||
url: https://global-fashion-group.com
|
||||
contributors: ["@ksaagariconic"]
|
||||
|
||||
- name: GoTo/Gojek
|
||||
url: https://www.gojek.io/
|
||||
contributors: ["@gwthm-in"]
|
||||
|
||||
- name: HuiShouBao
|
||||
url: https://www.huishoubao.com/
|
||||
contributors: ["@Yukinoshita-Yukino"]
|
||||
|
||||
- name: Now
|
||||
url: https://www.now.vn/
|
||||
contributors: ["@davidkohcw"]
|
||||
|
||||
- name: Qunar
|
||||
url: https://www.qunar.com/
|
||||
contributors: ["@flametest"]
|
||||
|
||||
- name: Rakuten Viki
|
||||
url: https://www.viki.com
|
||||
|
||||
- name: Shopee
|
||||
url: https://shopee.sg
|
||||
contributors: ["@xiaohanyu"]
|
||||
|
||||
- name: Shopkick
|
||||
url: https://www.shopkick.com
|
||||
contributors: ["@LAlbertalli"]
|
||||
|
||||
- name: ShopUp
|
||||
url: https://www.shopup.org/
|
||||
contributors: ["@gwthm-in"]
|
||||
|
||||
- name: Tails.com
|
||||
url: https://tails.com/gb/
|
||||
contributors: ["@alanmcruickshank"]
|
||||
|
||||
- name: THE ICONIC
|
||||
url: https://theiconic.com.au/
|
||||
contributors: ["@ksaagariconic"]
|
||||
|
||||
- name: Utair
|
||||
url: https://www.utair.ru
|
||||
contributors: ["@utair-digital"]
|
||||
|
||||
- name: VkusVill
|
||||
url: https://vkusvill.ru/
|
||||
contributors: ["@ETselikov"]
|
||||
|
||||
- name: Zalando
|
||||
url: https://www.zalando.com
|
||||
contributors: ["@dmigo"]
|
||||
|
||||
- name: Zalora
|
||||
url: https://www.zalora.com
|
||||
contributors: ["@ksaagariconic"]
|
||||
|
||||
- name: Zepto
|
||||
url: https://www.zeptonow.com/
|
||||
contributors: ["@gwthm-in"]
|
||||
|
||||
Enterprise Technology:
|
||||
- name: A3Data
|
||||
url: https://a3data.com.br
|
||||
contributors: ["@neylsoncrepalde"]
|
||||
|
||||
- name: Analytics Aura
|
||||
url: https://analyticsaura.com/
|
||||
contributors: ["@Analytics-Aura"]
|
||||
|
||||
- name: Apollo GraphQL
|
||||
url: https://www.apollographql.com/
|
||||
contributors: ["@evans"]
|
||||
|
||||
- name: Astronomer
|
||||
url: https://www.astronomer.io
|
||||
contributors: ["@ryw"]
|
||||
|
||||
- name: Avesta Technologies
|
||||
url: https://avestatechnologies.com/
|
||||
contributors: ["@TheRum"]
|
||||
|
||||
- name: Caizin
|
||||
url: https://caizin.com/
|
||||
contributors: ["@tejaskatariya"]
|
||||
|
||||
- name: Canonical
|
||||
url: https://canonical.com
|
||||
|
||||
- name: Careem
|
||||
url: https://www.careem.com/
|
||||
contributors: ["@samraHanif0340"]
|
||||
|
||||
- name: Cloudsmith
|
||||
url: https://cloudsmith.io
|
||||
contributors: ["@alancarson"]
|
||||
|
||||
- name: Cyberhaven
|
||||
url: https://www.cyberhaven.com/
|
||||
contributors: ["@toliver-ch"]
|
||||
|
||||
- name: Deepomatic
|
||||
url: https://deepomatic.com/
|
||||
contributors: ["@Zanoellia"]
|
||||
|
||||
- name: Dial Once
|
||||
url: https://www.dial-once.com/
|
||||
|
||||
- name: Dremio
|
||||
url: https://dremio.com
|
||||
contributors: ["@narendrans"]
|
||||
|
||||
- name: EFinance
|
||||
url: https://www.efinance.com.eg
|
||||
contributors: ["@habeeb556"]
|
||||
|
||||
- name: Elestio
|
||||
url: https://elest.io/
|
||||
contributors: ["@kaiwalyakoparkar"]
|
||||
|
||||
- name: ELMO Cloud HR & Payroll
|
||||
url: https://elmosoftware.com.au/
|
||||
|
||||
- name: Endress+Hauser
|
||||
url: https://www.endress.com/
|
||||
contributors: ["@rumbin"]
|
||||
|
||||
- name: FBK - ICT center
|
||||
url: https://ict.fbk.eu
|
||||
|
||||
- name: Formbricks
|
||||
url: https://formbricks.com
|
||||
|
||||
- name: Gavagai
|
||||
url: https://gavagai.io
|
||||
contributors: ["@gavagai-corp"]
|
||||
|
||||
- name: GfK Data Lab
|
||||
url: https://www.gfk.com/home
|
||||
contributors: ["@mherr"]
|
||||
|
||||
- name: Hifadih Business & Technology
|
||||
url: https://hifadih.net/en
|
||||
logo: hifadih.png
|
||||
contributors: ["@saintLaurent00"]
|
||||
|
||||
# Logo approved by @anmol-hpe on behalf of HPE
|
||||
- name: HPE
|
||||
url: https://www.hpe.com/in/en/home.html
|
||||
logo: hpe.png
|
||||
contributors: ["@anmol-hpe"]
|
||||
|
||||
- name: Hydrolix
|
||||
url: https://www.hydrolix.io/
|
||||
|
||||
- name: Intercom
|
||||
url: https://www.intercom.com/
|
||||
contributors: ["@kate-gallo"]
|
||||
|
||||
- name: jampp
|
||||
url: https://jampp.com/
|
||||
|
||||
- name: Konfío
|
||||
url: https://konfio.mx
|
||||
contributors: ["@uis-rodriguez"]
|
||||
|
||||
- name: Mainstrat
|
||||
url: https://mainstrat.com/
|
||||
|
||||
- name: mishmash io
|
||||
url: https://mishmash.io/
|
||||
contributors: ["@mishmash-io"]
|
||||
|
||||
- name: Myra Labs
|
||||
url: https://www.myralabs.com/
|
||||
contributors: ["@viksit"]
|
||||
|
||||
- name: Nielsen
|
||||
url: https://www.nielsen.com/
|
||||
contributors: ["@amitNielsen"]
|
||||
|
||||
- name: Ona
|
||||
url: https://ona.io
|
||||
contributors: ["@pld"]
|
||||
|
||||
- name: Orange
|
||||
url: https://www.orange.com
|
||||
contributors: ["@icsu"]
|
||||
|
||||
- name: Oslandia
|
||||
url: https://oslandia.com
|
||||
|
||||
- name: Oxylabs
|
||||
url: https://oxylabs.io/
|
||||
contributors: ["@rytis-ulys"]
|
||||
|
||||
- name: Peak AI
|
||||
url: https://www.peak.ai/
|
||||
contributors: ["@azhar22k"]
|
||||
|
||||
- name: PeopleDoc
|
||||
url: https://www.people-doc.com
|
||||
contributors: ["@rodo"]
|
||||
|
||||
- name: PlaidCloud
|
||||
url: https://plaidcloud.com
|
||||
logo: plaidcloud.svg
|
||||
contributors: ["@rad-pat"]
|
||||
|
||||
- name: Preset, Inc.
|
||||
url: https://preset.io
|
||||
logo: preset.svg
|
||||
contributors: ["@mistercrunch", "@betodealmeida", "@dpgaspar", "@rusackas", "@sadpandajoe", "@Vitor-Avila", "@kgabryje", "@geido", "@eschutho", "@Antonio-RiveroMartnez", "@yousoph"]
|
||||
|
||||
- name: PubNub
|
||||
url: https://pubnub.com
|
||||
contributors: ["@jzucker2"]
|
||||
|
||||
- name: ReadyTech
|
||||
url: https://www.readytech.io
|
||||
|
||||
- name: Reward Gateway
|
||||
url: https://www.rewardgateway.com
|
||||
|
||||
- name: RIADVICE
|
||||
url: https://riadvice.tn
|
||||
contributors: ["@riadvice"]
|
||||
|
||||
- name: ScopeAI
|
||||
url: https://www.getscopeai.com
|
||||
contributors: ["@iloveluce"]
|
||||
|
||||
- name: shipmnts
|
||||
url: https://shipmnts.com
|
||||
|
||||
- name: Showmax
|
||||
url: https://showmax.com
|
||||
contributors: ["@bobek"]
|
||||
|
||||
- name: SingleStore
|
||||
url: https://www.singlestore.com/
|
||||
|
||||
- name: TechAudit
|
||||
url: https://www.techaudit.info
|
||||
contributors: ["@ETselikov"]
|
||||
|
||||
- name: Tenable
|
||||
url: https://www.tenable.com
|
||||
contributors: ["@dflionis"]
|
||||
|
||||
- name: Tentacle
|
||||
url: https://www.linkedin.com/company/tentacle-cmi/
|
||||
contributors: ["@jdclarke5"]
|
||||
|
||||
- name: timbr.ai
|
||||
url: https://timbr.ai/
|
||||
contributors: ["@semantiDan"]
|
||||
|
||||
- name: Tobii
|
||||
url: https://www.tobii.com/
|
||||
contributors: ["@dwa"]
|
||||
|
||||
- name: Tooploox
|
||||
url: https://www.tooploox.com/
|
||||
contributors: ["@jakubczaplicki"]
|
||||
|
||||
- name: Unvired
|
||||
url: https://unvired.com
|
||||
contributors: ["@srinisubramanian"]
|
||||
|
||||
- name: UserGuiding
|
||||
url: https://userguiding.com/
|
||||
logo: userguiding.svg
|
||||
contributors: ["@tzercin"]
|
||||
|
||||
- name: Virtuoso QA
|
||||
url: https://www.virtuosoqa.com
|
||||
|
||||
- name: Whale
|
||||
url: https://whale.im
|
||||
|
||||
- name: Windsor.ai
|
||||
url: https://www.windsor.ai/
|
||||
contributors: ["@octaviancorlade"]
|
||||
|
||||
- name: WinWin Network马上赢
|
||||
url: https://brandct.cn/
|
||||
contributors: ["@wenbinye"]
|
||||
|
||||
- name: XNET
|
||||
url: https://xnetmobile.com/
|
||||
logo: xnet.png
|
||||
contributors: ["@deuspt"]
|
||||
|
||||
- name: Zeta
|
||||
url: https://www.zeta.tech/
|
||||
contributors: ["@shaikidris"]
|
||||
|
||||
Media & Entertainment:
|
||||
- name: 6play
|
||||
url: https://www.6play.fr
|
||||
contributors: ["@CoryChaplin"]
|
||||
|
||||
- name: bilibili
|
||||
url: https://www.bilibili.com
|
||||
contributors: ["@Moinheart"]
|
||||
|
||||
- name: BurdaForward
|
||||
url: https://www.burda-forward.de/en/
|
||||
|
||||
- name: Douban
|
||||
url: https://www.douban.com/
|
||||
contributors: ["@luchuan"]
|
||||
|
||||
- name: Kuaishou
|
||||
url: https://www.kuaishou.com/
|
||||
contributors: ["@zhaoyu89730105"]
|
||||
|
||||
- name: Netflix
|
||||
url: https://www.netflix.com/
|
||||
|
||||
- name: Prensa Iberica
|
||||
url: https://www.prensaiberica.es/
|
||||
contributors: ["@zamar-roura"]
|
||||
|
||||
- name: TME QQMUSIC/WESING
|
||||
url: https://www.tencentmusic.com/
|
||||
contributors: ["@shenyuanli", "@marklaw"]
|
||||
|
||||
- name: Xite
|
||||
url: https://xite.com/
|
||||
contributors: ["@shashankkoppar"]
|
||||
|
||||
- name: Zaihang
|
||||
url: https://www.zaih.com/
|
||||
|
||||
Education:
|
||||
- name: Aveti Learning
|
||||
url: https://avetilearning.com/
|
||||
contributors: ["@TheShubhendra"]
|
||||
|
||||
- name: Brilliant.org
|
||||
url: https://brilliant.org/
|
||||
|
||||
- name: Cirrus Assessment
|
||||
url: https://cirrusassessment.com/
|
||||
logo: cirrus.svg
|
||||
contributors: ["@jeroenhabets", "@ddmm-white", "@paulrocost"]
|
||||
|
||||
- name: Open edX
|
||||
url: https://openedx.org/
|
||||
|
||||
- name: Platzi.com
|
||||
url: https://platzi.com/
|
||||
|
||||
- name: Sunbird
|
||||
url: https://www.sunbird.org/
|
||||
contributors: ["@eksteporg"]
|
||||
|
||||
- name: The GRAPH Network
|
||||
url: https://thegraphnetwork.org/
|
||||
contributors: ["@fccoelho"]
|
||||
|
||||
- name: Udemy
|
||||
url: https://www.udemy.com/
|
||||
contributors: ["@sungjuly"]
|
||||
|
||||
- name: VIPKID
|
||||
url: https://www.vipkid.com.cn/
|
||||
contributors: ["@illpanda"]
|
||||
|
||||
- name: WikiMedia Foundation
|
||||
url: https://wikimediafoundation.org
|
||||
contributors: ["@vg"]
|
||||
|
||||
Energy:
|
||||
- name: Airboxlab
|
||||
url: https://foobot.io
|
||||
contributors: ["@antoine-galataud"]
|
||||
|
||||
- name: DouroECI
|
||||
url: https://www.douroeci.com/
|
||||
contributors: ["@nunohelibeires"]
|
||||
|
||||
- name: Safaricom
|
||||
url: https://www.safaricom.co.ke/
|
||||
contributors: ["@mmutiso"]
|
||||
|
||||
- name: Scoot
|
||||
url: https://scoot.co/
|
||||
contributors: ["@haaspt"]
|
||||
|
||||
- name: Wattbewerb
|
||||
url: https://wattbewerb.de/
|
||||
contributors: ["@wattbewerb"]
|
||||
|
||||
- name: Rogow
|
||||
url: https://rogow.com.br/
|
||||
contributors: ["@nilmonto"]
|
||||
|
||||
Healthcare:
|
||||
- name: Amino
|
||||
url: https://amino.com
|
||||
contributors: ["@shkr"]
|
||||
|
||||
- name: Bluesquare
|
||||
url: https://www.bluesquarehub.com/
|
||||
contributors: ["@madewulf"]
|
||||
|
||||
- name: Care
|
||||
url: https://www.getcare.io/
|
||||
contributors: ["@alandao2021"]
|
||||
|
||||
- name: Living Goods
|
||||
url: https://www.livinggoods.org
|
||||
contributors: ["@chelule"]
|
||||
|
||||
- name: Maieutical Labs
|
||||
url: https://maieuticallabs.it
|
||||
contributors: ["@xrmx"]
|
||||
|
||||
- name: Medic
|
||||
url: https://medic.org
|
||||
contributors: ["@1yuv"]
|
||||
|
||||
- name: REDCap Cloud
|
||||
url: https://www.redcapcloud.com/
|
||||
|
||||
- name: TrustMedis
|
||||
url: https://trustmedis.com/
|
||||
contributors: ["@famasya"]
|
||||
|
||||
- name: WeSure
|
||||
url: https://www.wesure.cn/
|
||||
|
||||
- name: 2070Health
|
||||
url: https://2070health.com/
|
||||
|
||||
HR / Staffing:
|
||||
- name: Swile
|
||||
url: https://www.swile.co/
|
||||
contributors: ["@PaoloTerzi"]
|
||||
|
||||
- name: Symmetrics
|
||||
url: https://www.symmetrics.fyi
|
||||
|
||||
- name: bluquist
|
||||
url: https://bluquist.com/
|
||||
|
||||
Government:
|
||||
- name: City of Ann Arbor, MI
|
||||
url: https://www.a2gov.org/
|
||||
contributors: ["@sfirke"]
|
||||
|
||||
- name: RIS3 Strategy of CZ, MIT CR
|
||||
url: https://www.ris3.cz/
|
||||
contributors: ["@RIS3CZ"]
|
||||
|
||||
- name: NRLM - Sarathi, India
|
||||
url: https://pib.gov.in/PressReleasePage.aspx?PRID=1999586
|
||||
|
||||
Mobile Software:
|
||||
- name: VLMedia
|
||||
url: https://www.vlmedia.com.tr
|
||||
logo: vlmedia.svg
|
||||
contributors: ["@iercan"]
|
||||
|
||||
Travel:
|
||||
- name: Agoda
|
||||
url: https://www.agoda.com/
|
||||
contributors: ["@lostseaway", "@maiake", "@obombayo"]
|
||||
|
||||
- name: HomeToGo
|
||||
url: https://hometogo.com/
|
||||
contributors: ["@pedromartinsteenstrup"]
|
||||
|
||||
- name: Skyscanner
|
||||
url: https://www.skyscanner.net/
|
||||
contributors: ["@cleslie", "@stanhoucke"]
|
||||
|
||||
Logistics:
|
||||
- name: Stockarea
|
||||
url: https://stockarea.io
|
||||
|
||||
- name: VTG
|
||||
url: https://www.vtg.de
|
||||
|
||||
Sports:
|
||||
- name: Club 25 de Agosto (Femenino / Women's Team)
|
||||
url: https://www.instagram.com/25deagosto.basketfemenino/
|
||||
contributors: [ "@lion90" ]
|
||||
logo: club25deagosto.svg
|
||||
|
||||
- name: Fanatics
|
||||
url: https://www.fanatics.com/
|
||||
contributors: [ "@coderfender" ]
|
||||
|
||||
- name: komoot
|
||||
url: https://www.komoot.com/
|
||||
contributors: [ "@christophlingg" ]
|
||||
|
||||
Others:
|
||||
- name: 10Web
|
||||
url: https://10web.io/
|
||||
|
||||
- name: AI inside
|
||||
url: https://inside.ai/en/
|
||||
|
||||
- name: Automattic
|
||||
url: https://automattic.com/
|
||||
contributors: ["@Khrol", "@Usiel"]
|
||||
|
||||
- name: Dropbox
|
||||
url: https://www.dropbox.com/
|
||||
contributors: ["@bkyryliuk"]
|
||||
|
||||
- name: Flowbird
|
||||
url: https://flowbird.com
|
||||
contributors: ["@EmmanuelCbd"]
|
||||
|
||||
- name: GEOTAB
|
||||
url: https://www.geotab.com
|
||||
contributors: ["@JZ6"]
|
||||
|
||||
- name: Grassroot
|
||||
url: https://www.grassrootinstitute.org/
|
||||
|
||||
- name: HOLLYLAND猛玛
|
||||
url: https://www.hollyland.com
|
||||
logo: hollyland猛玛.svg
|
||||
contributors: ["@hlyda0601"]
|
||||
|
||||
- name: Increff
|
||||
url: https://www.increff.com/
|
||||
contributors: ["@ishansinghania"]
|
||||
|
||||
- name: Let's Roam
|
||||
url: https://www.letsroam.com/
|
||||
|
||||
- name: Machrent SA
|
||||
url: https://www.machrent.com/
|
||||
|
||||
- name: Onebeat
|
||||
url: https://1beat.com/
|
||||
contributors: ["@GuyAttia"]
|
||||
|
||||
- name: X
|
||||
url: https://x.com/
|
||||
|
||||
- name: Yahoo!
|
||||
url: https://yahoo.com/
|
||||
@@ -17,193 +17,192 @@ specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
| |Admin|Alpha|Gamma|Public|SQL_LAB|
|
||||
|--------------------------------------------------|---|---|---|---|---|
|
||||
| Permission/role description |Admins have all possible rights, including granting or revoking rights from other users and altering other people's slices and dashboards.|Alpha users have access to all data sources, but they cannot grant or revoke access from other users. They are also limited to altering the objects that they own. Alpha users can add and alter data sources.|Gamma users have limited access. They can only consume data coming from data sources they have been given access to through another complementary role. They only have access to view the slices and dashboards made from data sources that they have access to. Currently Gamma users are not able to alter or add data sources. We assume that they are mostly content consumers, though they can create slices and dashboards.|Public is the most restrictive built-in role, designed for anonymous/unauthenticated users viewing public dashboards. It provides minimal read-only access for dashboard viewing with interactive filters. Use `PUBLIC_ROLE_LIKE = "Public"` to apply these permissions to anonymous users.|The sql_lab role grants access to SQL Lab. Note that while Admin users have access to all databases by default, both Alpha and Gamma users need to be given access on a per database basis.||
|
||||
| can read on SavedQuery |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can write on SavedQuery |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can read on CssTemplate |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can write on CssTemplate |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can read on ReportSchedule |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can write on ReportSchedule |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can read on Chart |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can write on Chart |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can read on Annotation |:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|O|
|
||||
| can write on Annotation |:heavy_check_mark:|:heavy_check_mark:|O|O|O|
|
||||
| can read on AnnotationLayerRestApi |:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|O|
|
||||
| can read on Dataset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can write on Dataset |:heavy_check_mark:|:heavy_check_mark:|O|O|O|
|
||||
| can read on Log |:heavy_check_mark:|O|O|O|O|
|
||||
| can write on Log |:heavy_check_mark:|O|O|O|O|
|
||||
| can read on Dashboard |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can write on Dashboard |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can read on Database |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can write on Database |:heavy_check_mark:|O|O|O|O|
|
||||
| can read on Query |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can this form get on ResetPasswordView |:heavy_check_mark:|O|O|O|O|
|
||||
| can this form post on ResetPasswordView |:heavy_check_mark:|O|O|O|O|
|
||||
| can this form get on ResetMyPasswordView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can this form post on ResetMyPasswordView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can this form get on UserInfoEditView |:heavy_check_mark:|O|O|O|O|
|
||||
| can this form post on UserInfoEditView |:heavy_check_mark:|O|O|O|O|
|
||||
| can show on UserDBModelView |:heavy_check_mark:|O|O|O|O|
|
||||
| can edit on UserDBModelView |:heavy_check_mark:|O|O|O|O|
|
||||
| can delete on UserDBModelView |:heavy_check_mark:|O|O|O|O|
|
||||
| can add on UserDBModelView |:heavy_check_mark:|O|O|O|O|
|
||||
| can list on UserDBModelView |:heavy_check_mark:|O|O|O|O|
|
||||
| can userinfo on UserDBModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| resetmypassword on UserDBModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| resetpasswords on UserDBModelView |:heavy_check_mark:|O|O|O|O|
|
||||
| userinfoedit on UserDBModelView |:heavy_check_mark:|O|O|O|O|
|
||||
| can show on RoleModelView |:heavy_check_mark:|O|O|O|O|
|
||||
| can edit on RoleModelView |:heavy_check_mark:|O|O|O|O|
|
||||
| can delete on RoleModelView |:heavy_check_mark:|O|O|O|O|
|
||||
| can add on RoleModelView |:heavy_check_mark:|O|O|O|O|
|
||||
| can list on RoleModelView |:heavy_check_mark:|O|O|O|O|
|
||||
| copyrole on RoleModelView |:heavy_check_mark:|O|O|O|O|
|
||||
| can get on OpenApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can show on SwaggerView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can get on MenuApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can list on AsyncEventsRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can invalidate on CacheRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can csv upload on Database |:heavy_check_mark:|O|O|O|O|
|
||||
| can excel upload on Database |:heavy_check_mark:|O|O|O|O|
|
||||
| can query form data on Api |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can query on Api |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can time range on Api |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can external metadata on Datasource |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can save on Datasource |:heavy_check_mark:|:heavy_check_mark:|O|O|O|
|
||||
| can get on Datasource |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can my queries on SqlLab |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can log on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can import dashboards on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can schemas on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can sqllab history on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can publish on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can csv on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can slice on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can sync druid source on Superset |:heavy_check_mark:|O|O|O|O|
|
||||
| can explore on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can approve on Superset |:heavy_check_mark:|O|O|O|O|
|
||||
| can explore json on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can fetch datasource metadata on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can csrf token on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can sqllab on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can select star on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can warm up cache on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can sqllab table viz on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can available domains on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can request access on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can dashboard on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can post on TableSchemaView |:heavy_check_mark:|O|O|O|:heavy_check_mark:|
|
||||
| can expanded on TableSchemaView |:heavy_check_mark:|O|O|O|:heavy_check_mark:|
|
||||
| can delete on TableSchemaView |:heavy_check_mark:|O|O|O|:heavy_check_mark:|
|
||||
| can get on TabStateView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can post on TabStateView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can delete query on TabStateView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can migrate query on TabStateView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can activate on TabStateView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can delete on TabStateView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can put on TabStateView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can read on SecurityRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| menu access on Security |:heavy_check_mark:|O|O|O|O|
|
||||
| menu access on List Users |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on List Roles |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Action Log |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Manage |:heavy_check_mark:|:heavy_check_mark:|O|O|O|
|
||||
| menu access on Annotation Layers |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on CSS Templates |:heavy_check_mark:|:heavy_check_mark:|O|O|O|
|
||||
| menu access on Import Dashboards |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Data |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Databases |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Datasets |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Charts |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Dashboards |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on SQL Lab |:heavy_check_mark:|O|O|O|:heavy_check_mark:|
|
||||
| menu access on SQL Editor |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| menu access on Saved Queries |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| menu access on Query Search |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| all datasource access on all_datasource_access |:heavy_check_mark:|:heavy_check_mark:|O|O|O|
|
||||
| all database access on all_database_access |:heavy_check_mark:|:heavy_check_mark:|O|O|O|
|
||||
| all query access on all_query_access |:heavy_check_mark:|O|O|O|O|
|
||||
| can write on DynamicPlugin |:heavy_check_mark:|O|O|O|O|
|
||||
| can edit on DynamicPlugin |:heavy_check_mark:|O|O|O|O|
|
||||
| can list on DynamicPlugin |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can show on DynamicPlugin |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can download on DynamicPlugin |:heavy_check_mark:|O|O|O|O|
|
||||
| can add on DynamicPlugin |:heavy_check_mark:|O|O|O|O|
|
||||
| can delete on DynamicPlugin |:heavy_check_mark:|O|O|O|O|
|
||||
| can external metadata by name on Datasource |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can get value on KV |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can store on KV |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can tagged objects on TagView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can suggestions on TagView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can get on TagView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can post on TagView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can delete on TagView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can edit on DashboardEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can list on DashboardEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can show on DashboardEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can add on DashboardEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can delete on DashboardEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| muldelete on DashboardEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|O|O|O|
|
||||
| can edit on SliceEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can list on SliceEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can show on SliceEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can add on SliceEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can delete on SliceEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| muldelete on SliceEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|O|O|O|
|
||||
| can edit on AlertModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can list on AlertModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can show on AlertModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can add on AlertModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can delete on AlertModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can list on AlertLogModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can show on AlertLogModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can list on AlertObservationModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can show on AlertObservationModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Row Level Security |:heavy_check_mark:|O|O|O|O|
|
||||
| menu access on Access requests |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Home |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Plugins |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Dashboard Email Schedules |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Chart Emails |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Alerts |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Alerts & Report |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Scan New Datasources |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can share dashboard on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can share chart on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can this form get on ColumnarToDatabaseView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can this form post on ColumnarToDatabaseView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can export on Chart |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can write on DashboardFilterStateRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can read on DashboardFilterStateRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can write on DashboardPermalinkRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can read on DashboardPermalinkRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can delete embedded on Dashboard |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can set embedded on Dashboard |:heavy_check_mark:|O|O|O|O|
|
||||
| can export on Dashboard |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can get embedded on Dashboard |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can export on Database |:heavy_check_mark:|O|O|O|O|
|
||||
| can export on Dataset |:heavy_check_mark:|:heavy_check_mark:|O|O|O|
|
||||
| can write on ExploreFormDataRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can read on ExploreFormDataRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can write on ExplorePermalinkRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can read on ExplorePermalinkRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can export on ImportExportRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can import on ImportExportRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can export on SavedQuery |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|:heavy_check_mark:|
|
||||
| can dashboard permalink on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can grant guest token on SecurityRestApi |:heavy_check_mark:|O|O|O|O|
|
||||
| can read on AdvancedDataType |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can read on EmbeddedDashboard |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can duplicate on Dataset |:heavy_check_mark:|:heavy_check_mark:|O|O|O|
|
||||
| can read on Explore |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can samples on Datasource |:heavy_check_mark:|:heavy_check_mark:|O|O|O|
|
||||
| can read on AvailableDomains |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can get or create dataset on Dataset |:heavy_check_mark:|:heavy_check_mark:|O|O|O|
|
||||
| can get column values on Datasource |:heavy_check_mark:|:heavy_check_mark:|O|O|O|
|
||||
| can export csv on SQLLab |:heavy_check_mark:|O|O|O|:heavy_check_mark:|
|
||||
| can get results on SQLLab |:heavy_check_mark:|O|O|O|:heavy_check_mark:|
|
||||
| can execute sql query on SQLLab |:heavy_check_mark:|O|O|O|:heavy_check_mark:|
|
||||
| can recent activity on Log |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| |Admin|Alpha|Gamma|SQL_LAB|
|
||||
|--------------------------------------------------|---|---|---|---|
|
||||
| Permission/role description |Admins have all possible rights, including granting or revoking rights from other users and altering other people’s slices and dashboards.|Alpha users have access to all data sources, but they cannot grant or revoke access from other users. They are also limited to altering the objects that they own. Alpha users can add and alter data sources.|Gamma users have limited access. They can only consume data coming from data sources they have been given access to through another complementary role. They only have access to view the slices and dashboards made from data sources that they have access to. Currently Gamma users are not able to alter or add data sources. We assume that they are mostly content consumers, though they can create slices and dashboards.|The sql_lab role grants access to SQL Lab. Note that while Admin users have access to all databases by default, both Alpha and Gamma users need to be given access on a per database basis.||
|
||||
| can read on SavedQuery |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can write on SavedQuery |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can read on CssTemplate |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can write on CssTemplate |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can read on ReportSchedule |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can write on ReportSchedule |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can read on Chart |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can write on Chart |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can read on Annotation |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can write on Annotation |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can read on Dataset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can write on Dataset |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can read on Log |:heavy_check_mark:|O|O|O|
|
||||
| can write on Log |:heavy_check_mark:|O|O|O|
|
||||
| can read on Dashboard |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can write on Dashboard |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can read on Database |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can write on Database |:heavy_check_mark:|O|O|O|
|
||||
| can read on Query |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can this form get on ResetPasswordView |:heavy_check_mark:|O|O|O|
|
||||
| can this form post on ResetPasswordView |:heavy_check_mark:|O|O|O|
|
||||
| can this form get on ResetMyPasswordView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can this form post on ResetMyPasswordView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can this form get on UserInfoEditView |:heavy_check_mark:|O|O|O|
|
||||
| can this form post on UserInfoEditView |:heavy_check_mark:|O|O|O|
|
||||
| can show on UserDBModelView |:heavy_check_mark:|O|O|O|
|
||||
| can edit on UserDBModelView |:heavy_check_mark:|O|O|O|
|
||||
| can delete on UserDBModelView |:heavy_check_mark:|O|O|O|
|
||||
| can add on UserDBModelView |:heavy_check_mark:|O|O|O|
|
||||
| can list on UserDBModelView |:heavy_check_mark:|O|O|O|
|
||||
| can userinfo on UserDBModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| resetmypassword on UserDBModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| resetpasswords on UserDBModelView |:heavy_check_mark:|O|O|O|
|
||||
| userinfoedit on UserDBModelView |:heavy_check_mark:|O|O|O|
|
||||
| can show on RoleModelView |:heavy_check_mark:|O|O|O|
|
||||
| can edit on RoleModelView |:heavy_check_mark:|O|O|O|
|
||||
| can delete on RoleModelView |:heavy_check_mark:|O|O|O|
|
||||
| can add on RoleModelView |:heavy_check_mark:|O|O|O|
|
||||
| can list on RoleModelView |:heavy_check_mark:|O|O|O|
|
||||
| copyrole on RoleModelView |:heavy_check_mark:|O|O|O|
|
||||
| can get on OpenApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can show on SwaggerView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can get on MenuApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can list on AsyncEventsRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can invalidate on CacheRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can csv upload on Database |:heavy_check_mark:|O|O|O|
|
||||
| can excel upload on Database |:heavy_check_mark:|O|O|O|
|
||||
| can query form data on Api |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can query on Api |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can time range on Api |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can external metadata on Datasource |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can save on Datasource |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can get on Datasource |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can my queries on SqlLab |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can log on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can import dashboards on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can schemas on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can sqllab history on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can publish on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can csv on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can slice on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can sync druid source on Superset |:heavy_check_mark:|O|O|O|
|
||||
| can explore on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can approve on Superset |:heavy_check_mark:|O|O|O|
|
||||
| can explore json on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can fetch datasource metadata on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can csrf token on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can sqllab on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can select star on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can warm up cache on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can sqllab table viz on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can available domains on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can request access on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can dashboard on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can post on TableSchemaView |:heavy_check_mark:|O|O|:heavy_check_mark:|
|
||||
| can expanded on TableSchemaView |:heavy_check_mark:|O|O|:heavy_check_mark:|
|
||||
| can delete on TableSchemaView |:heavy_check_mark:|O|O|:heavy_check_mark:|
|
||||
| can get on TabStateView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can post on TabStateView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can delete query on TabStateView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can migrate query on TabStateView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can activate on TabStateView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can delete on TabStateView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can put on TabStateView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can read on SecurityRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| menu access on Security |:heavy_check_mark:|O|O|O|
|
||||
| menu access on List Users |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on List Roles |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Action Log |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Manage |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Annotation Layers |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on CSS Templates |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| menu access on Import Dashboards |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Data |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Databases |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Datasets |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Charts |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Dashboards |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on SQL Lab |:heavy_check_mark:|O|O|:heavy_check_mark:|
|
||||
| menu access on SQL Editor |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| menu access on Saved Queries |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| menu access on Query Search |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| all datasource access on all_datasource_access |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| all database access on all_database_access |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| all query access on all_query_access |:heavy_check_mark:|O|O|O|
|
||||
| can write on DynamicPlugin |:heavy_check_mark:|O|O|O|
|
||||
| can edit on DynamicPlugin |:heavy_check_mark:|O|O|O|
|
||||
| can list on DynamicPlugin |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can show on DynamicPlugin |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can download on DynamicPlugin |:heavy_check_mark:|O|O|O|
|
||||
| can add on DynamicPlugin |:heavy_check_mark:|O|O|O|
|
||||
| can delete on DynamicPlugin |:heavy_check_mark:|O|O|O|
|
||||
| can external metadata by name on Datasource |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can get value on KV |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can store on KV |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can tagged objects on TagView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can suggestions on TagView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can get on TagView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can post on TagView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can delete on TagView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can edit on DashboardEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can list on DashboardEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can show on DashboardEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can add on DashboardEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can delete on DashboardEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| muldelete on DashboardEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can edit on SliceEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can list on SliceEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can show on SliceEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can add on SliceEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can delete on SliceEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| muldelete on SliceEmailScheduleView |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can edit on AlertModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can list on AlertModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can show on AlertModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can add on AlertModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can delete on AlertModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can list on AlertLogModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can show on AlertLogModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can list on AlertObservationModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can show on AlertObservationModelView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Row Level Security |:heavy_check_mark:|O|O|O|
|
||||
| menu access on Access requests |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Home |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Plugins |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Dashboard Email Schedules |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Chart Emails |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Alerts |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Alerts & Report |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| menu access on Scan New Datasources |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can share dashboard on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can share chart on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can this form get on ColumnarToDatabaseView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can this form post on ColumnarToDatabaseView |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can export on Chart |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can write on DashboardFilterStateRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can read on DashboardFilterStateRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can write on DashboardPermalinkRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can read on DashboardPermalinkRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can delete embedded on Dashboard |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can set embedded on Dashboard |:heavy_check_mark:|O|O|O|
|
||||
| can export on Dashboard |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can get embedded on Dashboard |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can export on Database |:heavy_check_mark:|O|O|O|
|
||||
| can export on Dataset |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can write on ExploreFormDataRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can read on ExploreFormDataRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can write on ExplorePermalinkRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can read on ExplorePermalinkRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can export on ImportExportRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can import on ImportExportRestApi |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can export on SavedQuery |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|
||||
| can dashboard permalink on Superset |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can grant guest token on SecurityRestApi |:heavy_check_mark:|O|O|O|
|
||||
| can read on AdvancedDataType |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can read on EmbeddedDashboard |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can duplicate on Dataset |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can read on Explore |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can samples on Datasource |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can read on AvailableDomains |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
| can get or create dataset on Dataset |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can get column values on Datasource |:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
| can export csv on SQLLab |:heavy_check_mark:|O|O|:heavy_check_mark:|
|
||||
| can get results on SQLLab |:heavy_check_mark:|O|O|:heavy_check_mark:|
|
||||
| can execute sql query on SQLLab |:heavy_check_mark:|O|O|:heavy_check_mark:|
|
||||
| can recent activity on Log |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
|
||||
301
UPDATING.md
301
UPDATING.md
@@ -23,295 +23,8 @@ This file documents any backwards-incompatible changes in Superset and
|
||||
assists people when migrating to a new version.
|
||||
|
||||
## Next
|
||||
|
||||
### Granular Export Controls
|
||||
|
||||
A new feature flag `GRANULAR_EXPORT_CONTROLS` introduces three fine-grained permissions that replace the legacy `can_csv` permission:
|
||||
|
||||
| Permission | Controls |
|
||||
|---|---|
|
||||
| `can_export_data` | CSV, Excel, JSON exports |
|
||||
| `can_export_image` | Screenshot/PDF exports |
|
||||
| `can_copy_clipboard` | Copy-to-clipboard operations |
|
||||
|
||||
When the feature flag is enabled, these permissions are enforced on both the frontend (disabled buttons with tooltips) and backend (403 responses from API endpoints). When disabled, legacy `can_csv` behavior is preserved.
|
||||
|
||||
**Migration behavior:** All three new permissions are granted to every role that currently has `can_csv`, preserving existing access. Admins can then selectively revoke individual export permissions from specific roles as needed.
|
||||
|
||||
### Deck.gl MapBox viewport and opacity controls are functional
|
||||
|
||||
The Deck.gl MapBox chart's **Opacity**, **Default longitude**, **Default latitude**, and **Zoom** controls were previously non-functional — changing them had no effect on the rendered map. These controls are now wired up correctly.
|
||||
|
||||
**Behavior change for existing charts:** Previously, the viewport controls had hard-coded default values (`-122.405293`, `37.772123`, zoom `11` — San Francisco) that were stored in each chart's `form_data` but never applied. The map always used `fitBounds` to center on the data. With this fix, those stored values are now respected, which means existing MapBox charts may open centered on the old default coordinates instead of fitting to data bounds.
|
||||
|
||||
**To restore fit-to-data behavior:** Open the chart in Explore, clear the **Default longitude**, **Default latitude**, and **Zoom** fields in the Viewport section, and re-save the chart.
|
||||
|
||||
### Combined datasource list endpoint
|
||||
|
||||
Added a new combined datasource list endpoint at `GET /api/v1/datasource/` to serve datasets and semantic views in one response.
|
||||
|
||||
- The endpoint is available to users with at least one of `can_read` on `Dataset` or `SemanticView`.
|
||||
- Semantic views are included only when the `SEMANTIC_LAYERS` feature flag is enabled.
|
||||
- The endpoint enforces strict `order_column` validation and returns `400` for invalid sort columns.
|
||||
### ClickHouse minimum driver version bump
|
||||
|
||||
The minimum required version of `clickhouse-connect` has been raised to `>=0.13.0`. If you are using the ClickHouse connector, please upgrade your `clickhouse-connect` package. The `_mutate_label` workaround that appended hash suffixes to column aliases has also been removed, as it is no longer needed with modern versions of the driver.
|
||||
|
||||
### MCP Tool Observability
|
||||
|
||||
MCP (Model Context Protocol) tools now include enhanced observability instrumentation for monitoring and debugging:
|
||||
|
||||
**Two-layer instrumentation:**
|
||||
1. **Middleware layer** (`LoggingMiddleware`): Automatically logs all MCP tool calls with `duration_ms` and `success` status in the audit log (Action Log UI, logs table)
|
||||
2. **Sub-operation tracking**: All 19 MCP tools include granular `event_logger.log_context()` blocks for tracking individual operations like validation, database writes, and query execution
|
||||
|
||||
**Action naming convention:**
|
||||
- Tool-level logs: `mcp_tool_call` (via middleware)
|
||||
- Sub-operation logs: `mcp.{tool_name}.{operation}` (e.g., `mcp.generate_chart.validation`, `mcp.execute_sql.query_execution`)
|
||||
|
||||
**Querying MCP logs:**
|
||||
```sql
|
||||
-- Top slowest MCP operations
|
||||
SELECT action, COUNT(*) as calls, AVG(duration_ms) as avg_ms
|
||||
FROM logs
|
||||
WHERE action LIKE 'mcp.%'
|
||||
GROUP BY action
|
||||
ORDER BY avg_ms DESC
|
||||
LIMIT 20;
|
||||
|
||||
-- MCP tool success rate
|
||||
SELECT
|
||||
json_extract(curated_payload, '$.tool') as tool,
|
||||
COUNT(*) as total_calls,
|
||||
SUM(CASE WHEN json_extract(curated_payload, '$.success') = 'true' THEN 1 ELSE 0 END) as successful,
|
||||
ROUND(100.0 * SUM(CASE WHEN json_extract(curated_payload, '$.success') = 'true' THEN 1 ELSE 0 END) / COUNT(*), 2) as success_rate
|
||||
FROM logs
|
||||
WHERE action = 'mcp_tool_call'
|
||||
GROUP BY tool
|
||||
ORDER BY total_calls DESC;
|
||||
```
|
||||
|
||||
**Security note:** Sensitive parameters (passwords, API keys, tokens) are automatically redacted in logs as `[REDACTED]`.
|
||||
|
||||
### Distributed Coordination Backend
|
||||
|
||||
A new `DISTRIBUTED_COORDINATION_CONFIG` configuration provides a unified Redis-based backend for real-time coordination features in Superset. This backend enables:
|
||||
|
||||
- **Pub/sub messaging** for real-time event notifications between workers
|
||||
- **Atomic distributed locking** using Redis SET NX EX (more performant than database-backed locks)
|
||||
- **Event-based coordination** for background task management
|
||||
|
||||
The distributed coordination is used by the Global Task Framework (GTF) for abort notifications and task completion signaling, and will eventually replace `GLOBAL_ASYNC_QUERIES_CACHE_BACKEND` as the standard signaling backend. Configuring this is recommended for Redis enabled production deployments.
|
||||
|
||||
Example configuration in `superset_config.py`:
|
||||
```python
|
||||
DISTRIBUTED_COORDINATION_CONFIG = {
|
||||
"CACHE_TYPE": "RedisCache",
|
||||
"CACHE_KEY_PREFIX": "signal_",
|
||||
"CACHE_REDIS_URL": "redis://localhost:6379/1",
|
||||
"CACHE_DEFAULT_TIMEOUT": 300,
|
||||
}
|
||||
```
|
||||
|
||||
See `superset/config.py` for complete configuration options.
|
||||
|
||||
### WebSocket config for GAQ with Docker
|
||||
|
||||
[35896](https://github.com/apache/superset/pull/35896) and [37624](https://github.com/apache/superset/pull/37624) updated documentation on how to run and configure Superset with Docker. Specifically for the WebSocket configuration, a new `docker/superset-websocket/config.example.json` was added to the repo, so that users could copy it to create a `docker/superset-websocket/config.json` file. The existing `docker/superset-websocket/config.json` was removed and git-ignored, so if you're using GAQ / WebSocket make sure to:
|
||||
- Stash/backup your existing `config.json` file, to re-apply it after (will get git-ignored going forward)
|
||||
- Update the `volumes` configuration for the `superset-websocket` service in your `docker-compose.override.yml` file, to include the `docker/superset-websocket/config.json` file. For example:
|
||||
``` yaml
|
||||
services:
|
||||
superset-websocket:
|
||||
volumes:
|
||||
- ./superset-websocket:/home/superset-websocket
|
||||
- /home/superset-websocket/node_modules
|
||||
- /home/superset-websocket/dist
|
||||
- ./docker/superset-websocket/config.json:/home/superset-websocket/config.json:ro
|
||||
```
|
||||
|
||||
### Example Data Loading Improvements
|
||||
|
||||
#### New Directory Structure
|
||||
Examples are now organized by name with data and configs co-located:
|
||||
```
|
||||
superset/examples/
|
||||
├── _shared/ # Shared database & metadata configs
|
||||
├── birth_names/ # Each example is self-contained
|
||||
│ ├── data.parquet # Dataset (Parquet format)
|
||||
│ ├── dataset.yaml # Dataset metadata
|
||||
│ ├── dashboard.yaml # Dashboard config (optional)
|
||||
│ └── charts/ # Chart configs (optional)
|
||||
└── ...
|
||||
```
|
||||
|
||||
#### Simplified Parquet-based Loading
|
||||
- Auto-discovery: create `superset/examples/my_dataset/data.parquet` to add a new example
|
||||
- Parquet is an Apache project format: compressed (~27% smaller), self-describing schema
|
||||
- YAML configs define datasets, charts, and dashboards declaratively
|
||||
- Removed Python-based data generation from individual example files
|
||||
|
||||
#### Test Data Reorganization
|
||||
- Moved `big_data.py` to `superset/cli/test_loaders.py` - better reflects its purpose as a test utility
|
||||
- Fixed inverted logic for `--load-test-data` flag (now correctly includes .test.yaml files when flag is set)
|
||||
- Clarified CLI flags:
|
||||
- `--force` / `-f`: Force reload even if tables exist
|
||||
- `--only-metadata` / `-m`: Create table metadata without loading data
|
||||
- `--load-test-data` / `-t`: Include test dashboards and .test.yaml configs
|
||||
- `--load-big-data` / `-b`: Generate synthetic stress-test data
|
||||
|
||||
#### Bug Fixes
|
||||
- Fixed numpy array serialization for PostgreSQL (converts complex types to JSON strings)
|
||||
- Fixed KeyError for `allow_csv_upload` field in database configs (now optional with default)
|
||||
- Fixed test data loading logic that was incorrectly filtering files
|
||||
|
||||
### MCP Service
|
||||
|
||||
The MCP (Model Context Protocol) service enables AI assistants and automation tools to interact programmatically with Superset.
|
||||
|
||||
#### New Features
|
||||
- MCP service infrastructure with FastMCP framework
|
||||
- Tools for dashboards, charts, datasets, SQL Lab, and instance metadata
|
||||
- Optional dependency: install with `pip install apache-superset[fastmcp]`
|
||||
- Runs as separate process from Superset web server
|
||||
- JWT-based authentication for production deployments
|
||||
|
||||
#### New Configuration Options
|
||||
|
||||
**Development** (single-user, local testing):
|
||||
```python
|
||||
# superset_config.py
|
||||
MCP_DEV_USERNAME = "admin" # User for MCP authentication
|
||||
MCP_SERVICE_HOST = "localhost"
|
||||
MCP_SERVICE_PORT = 5008
|
||||
```
|
||||
|
||||
**Production** (JWT-based, multi-user):
|
||||
```python
|
||||
# superset_config.py
|
||||
MCP_AUTH_ENABLED = True
|
||||
MCP_JWT_ISSUER = "https://your-auth-provider.com"
|
||||
MCP_JWT_AUDIENCE = "superset-mcp"
|
||||
MCP_JWT_ALGORITHM = "RS256" # or "HS256" for shared secrets
|
||||
|
||||
# Option 1: Use JWKS endpoint (recommended for RS256)
|
||||
MCP_JWKS_URI = "https://auth.example.com/.well-known/jwks.json"
|
||||
|
||||
# Option 2: Use static public key (RS256)
|
||||
MCP_JWT_PUBLIC_KEY = "-----BEGIN PUBLIC KEY-----..."
|
||||
|
||||
# Option 3: Use shared secret (HS256)
|
||||
MCP_JWT_ALGORITHM = "HS256"
|
||||
MCP_JWT_SECRET = "your-shared-secret-key"
|
||||
|
||||
# Optional overrides
|
||||
MCP_SERVICE_HOST = "0.0.0.0"
|
||||
MCP_SERVICE_PORT = 5008
|
||||
MCP_SESSION_CONFIG = {
|
||||
"SESSION_COOKIE_SECURE": True,
|
||||
"SESSION_COOKIE_HTTPONLY": True,
|
||||
"SESSION_COOKIE_SAMESITE": "Strict",
|
||||
}
|
||||
```
|
||||
|
||||
#### Running the MCP Service
|
||||
|
||||
```bash
|
||||
# Development
|
||||
superset mcp run --port 5008 --debug
|
||||
|
||||
# Production
|
||||
superset mcp run --port 5008
|
||||
|
||||
# With factory config
|
||||
superset mcp run --port 5008 --use-factory-config
|
||||
```
|
||||
|
||||
#### Deployment Considerations
|
||||
|
||||
The MCP service runs as a **separate process** from the Superset web server.
|
||||
|
||||
**Important**:
|
||||
- Requires same Python environment and configuration as Superset
|
||||
- Shares database connections with main Superset app
|
||||
- Can be scaled independently from web server
|
||||
- Requires `fastmcp` package (optional dependency)
|
||||
|
||||
**Installation**:
|
||||
```bash
|
||||
# Install with MCP support
|
||||
pip install apache-superset[fastmcp]
|
||||
|
||||
# Or add to requirements.txt
|
||||
apache-superset[fastmcp]>=X.Y.Z
|
||||
```
|
||||
|
||||
**Process Management**:
|
||||
Use systemd, supervisord, or Kubernetes to manage the MCP service process.
|
||||
See `superset/mcp_service/PRODUCTION.md` for deployment guides.
|
||||
|
||||
**Security**:
|
||||
- Development: Uses `MCP_DEV_USERNAME` for single-user access
|
||||
- Production: **MUST** configure JWT authentication
|
||||
- See `superset/mcp_service/SECURITY.md` for details
|
||||
|
||||
#### Documentation
|
||||
|
||||
- Architecture: `superset/mcp_service/ARCHITECTURE.md`
|
||||
- Security: `superset/mcp_service/SECURITY.md`
|
||||
- Production: `superset/mcp_service/PRODUCTION.md`
|
||||
- Developer Guide: `superset/mcp_service/CLAUDE.md`
|
||||
- Quick Start: `superset/mcp_service/README.md`
|
||||
|
||||
---
|
||||
|
||||
- [35621](https://github.com/apache/superset/pull/35621): The default hash algorithm has changed from MD5 to SHA-256 for improved security and FedRAMP compliance. This affects cache keys for thumbnails, dashboard digests, chart digests, and filter option names. Existing cached data will be invalidated upon upgrade. To opt out of this change and maintain backward compatibility, set `HASH_ALGORITHM = "md5"` in your `superset_config.py`.
|
||||
- [35062](https://github.com/apache/superset/pull/35062): Changed the function signature of `setupExtensions` to `setupCodeOverrides` with options as arguments.
|
||||
|
||||
### Breaking Changes
|
||||
- [37370](https://github.com/apache/superset/pull/37370): The `APP_NAME` configuration variable no longer controls the browser window/tab title or other frontend branding. Application names should now be configured using the theme system with the `brandAppName` token. The `APP_NAME` config is still used for backend contexts (MCP service, logs, etc.) and serves as a fallback if `brandAppName` is not set.
|
||||
- **Migration:**
|
||||
```python
|
||||
# Before (Superset 5.x)
|
||||
APP_NAME = "My Custom App"
|
||||
|
||||
# After (Superset 6.x) - Option 1: Use theme system (recommended)
|
||||
THEME_DEFAULT = {
|
||||
"token": {
|
||||
"brandAppName": "My Custom App", # Window titles
|
||||
"brandLogoAlt": "My Custom App", # Logo alt text
|
||||
"brandLogoUrl": "/static/assets/images/custom_logo.png"
|
||||
}
|
||||
}
|
||||
|
||||
# After (Superset 6.x) - Option 2: Temporary fallback
|
||||
# Keep APP_NAME for now (will be used as fallback for brandAppName)
|
||||
APP_NAME = "My Custom App"
|
||||
# But you should migrate to THEME_DEFAULT.token.brandAppName
|
||||
```
|
||||
- **Note:** For dark mode, set the same tokens in `THEME_DARK` configuration.
|
||||
|
||||
- [36317](https://github.com/apache/superset/pull/36317): The `CUSTOM_FONT_URLS` configuration option has been removed. Use the new per-theme `fontUrls` token in `THEME_DEFAULT` or database-managed themes instead.
|
||||
- **Before:**
|
||||
```python
|
||||
CUSTOM_FONT_URLS = [
|
||||
"https://fonts.example.com/myfont.css",
|
||||
]
|
||||
```
|
||||
- **After:**
|
||||
```python
|
||||
THEME_DEFAULT = {
|
||||
"token": {
|
||||
"fontUrls": [
|
||||
"https://fonts.example.com/myfont.css",
|
||||
],
|
||||
# ... other tokens
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## 6.0.0
|
||||
- [33055](https://github.com/apache/superset/pull/33055): Upgrades Flask-AppBuilder to 5.0.0. The AUTH_OID authentication type has been deprecated and is no longer available as an option in Flask-AppBuilder. OpenID (OID) is considered a deprecated authentication protocol - if you are using AUTH_OID, you will need to migrate to an alternative authentication method such as OAuth, LDAP, or database authentication before upgrading.
|
||||
- [35062](https://github.com/apache/superset/pull/35062): Changed the function signature of `setupExtensions` to `setupCodeOverrides` with options as arguments.
|
||||
- [34871](https://github.com/apache/superset/pull/34871): Fixed Jest test hanging issue from Ant Design v5 upgrade. MessageChannel is now mocked in test environment to prevent rc-overflow from causing Jest to hang. Test environment only - no production impact.
|
||||
- [34782](https://github.com/apache/superset/pull/34782): Dataset exports now include the dataset ID in their file name (similar to charts and dashboards). If managing assets as code, make sure to rename existing dataset YAMLs to include the ID (and avoid duplicated files).
|
||||
- [34536](https://github.com/apache/superset/pull/34536): The `ENVIRONMENT_TAG_CONFIG` color values have changed to support only Ant Design semantic colors. Update your `superset_config.py`:
|
||||
@@ -328,14 +41,14 @@ Note: Pillow is now a required dependency (previously optional) to support image
|
||||
- [33116](https://github.com/apache/superset/pull/33116) In Echarts Series charts (e.g. Line, Area, Bar, etc.) charts, the `x_axis_sort_series` and `x_axis_sort_series_ascending` form data items have been renamed with `x_axis_sort` and `x_axis_sort_asc`.
|
||||
There's a migration added that can potentially affect a significant number of existing charts.
|
||||
- [32317](https://github.com/apache/superset/pull/32317) The horizontal filter bar feature is now out of testing/beta development and its feature flag `HORIZONTAL_FILTER_BAR` has been removed.
|
||||
- [31590](https://github.com/apache/superset/pull/31590) Marks the beginning of intricate work around supporting dynamic Theming, and breaks support for [THEME_OVERRIDES](https://github.com/apache/superset/blob/732de4ac7fae88e29b7f123b6cbb2d7cd411b0e4/superset/config.py#L671) in favor of a new theming system based on AntD V5. Likely this will be in disrepair until settling over the 5.x lifecycle.
|
||||
- [32432](https://github.com/apache/superset/pull/32432) Moves the List Roles FAB view to the frontend and requires `FAB_ADD_SECURITY_API` to be enabled in the configuration and `superset init` to be executed.
|
||||
- [31590](https://github.com/apache/superset/pull/31590) Marks the begining of intricate work around supporting dynamic Theming, and breaks support for [THEME_OVERRIDES](https://github.com/apache/superset/blob/732de4ac7fae88e29b7f123b6cbb2d7cd411b0e4/superset/config.py#L671) in favor of a new theming system based on AntD V5. Likely this will be in disrepair until settling over the 5.x lifecycle.
|
||||
- [32432](https://github.com/apache/superset/pull/31260) Moves the List Roles FAB view to the frontend and requires `FAB_ADD_SECURITY_API` to be enabled in the configuration and `superset init` to be executed.
|
||||
- [34319](https://github.com/apache/superset/pull/34319) Drill to Detail and Drill By is now supported in Embedded mode, and also with the `DASHBOARD_RBAC` FF. If you don't want to expose these features in Embedded / `DASHBOARD_RBAC`, make sure the roles used for Embedded / `DASHBOARD_RBAC`don't have the required permissions to perform D2D actions.
|
||||
|
||||
## 5.0.0
|
||||
|
||||
- [31976](https://github.com/apache/superset/pull/31976) Removed the `DISABLE_LEGACY_DATASOURCE_EDITOR` feature flag. The previous value of the feature flag was `True` and now the feature is permanently removed.
|
||||
- [32000](https://github.com/apache/superset/pull/32000) Removes CSV_UPLOAD_MAX_SIZE config, use your web server to control file upload size.
|
||||
- [31959](https://github.com/apache/superset/pull/32000) Removes CSV_UPLOAD_MAX_SIZE config, use your web server to control file upload size.
|
||||
- [31959](https://github.com/apache/superset/pull/31959) Removes the following endpoints from data uploads: `/api/v1/database/<id>/<file type>_upload` and `/api/v1/database/<file type>_metadata`, in favour of new one (Details on the PR). And simplifies permissions.
|
||||
- [31844](https://github.com/apache/superset/pull/31844) The `ALERT_REPORTS_EXECUTE_AS` and `THUMBNAILS_EXECUTE_AS` config parameters have been renamed to `ALERT_REPORTS_EXECUTORS` and `THUMBNAILS_EXECUTORS` respectively. A new config flag `CACHE_WARMUP_EXECUTORS` has also been introduced to be able to control which user is used to execute cache warmup tasks. Finally, the config flag `THUMBNAILS_SELENIUM_USER` has been removed. To use a fixed executor for async tasks, use the new `FixedExecutor` class. See the config and docs for more info on setting up different executor profiles.
|
||||
- [31894](https://github.com/apache/superset/pull/31894) Domain sharding is deprecated in favor of HTTP2. The `SUPERSET_WEBSERVER_DOMAINS` configuration will be removed in the next major version (6.0)
|
||||
@@ -343,7 +56,7 @@ Note: Pillow is now a required dependency (previously optional) to support image
|
||||
- [31774](https://github.com/apache/superset/pull/31774): Fixes the spelling of the `USE-ANALAGOUS-COLORS` feature flag. Please update any scripts/configuration item to use the new/corrected `USE-ANALOGOUS-COLORS` flag spelling.
|
||||
- [31582](https://github.com/apache/superset/pull/31582) Removed the legacy Area, Bar, Event Flow, Heatmap, Histogram, Line, Sankey, and Sankey Loop charts. They were all automatically migrated to their ECharts counterparts with the exception of the Event Flow and Sankey Loop charts which were removed as they were not actively maintained and not widely used. If you were using the Event Flow or Sankey Loop charts, you will need to find an alternative solution.
|
||||
- [31198](https://github.com/apache/superset/pull/31198) Disallows by default the use of the following ClickHouse functions: "version", "currentDatabase", "hostName".
|
||||
- [29798](https://github.com/apache/superset/pull/29798) Since 3.1.0, the initial schedule for an alert or report was mistakenly offset by the specified timezone's relation to UTC. The initial schedule should now begin at the correct time.
|
||||
- [29798](https://github.com/apache/superset/pull/29798) Since 3.1.0, the intial schedule for an alert or report was mistakenly offset by the specified timezone's relation to UTC. The initial schedule should now begin at the correct time.
|
||||
- [30021](https://github.com/apache/superset/pull/30021) The `dev` layer in our Dockerfile no long includes firefox binaries, only Chromium to reduce bloat/docker-build-time.
|
||||
- [30099](https://github.com/apache/superset/pull/30099) Translations are no longer included in the default docker image builds. If your environment requires translations, you'll want to set the docker build arg `BUILD_TRANSLATIONS=true`.
|
||||
- [31262](https://github.com/apache/superset/pull/31262) NOTE: deprecated `pylint` in favor of `ruff` as our only python linter. Only affect development workflows positively (not the release itself). It should cover most important rules, be much faster, but some things linting rules that were enforced before may not be enforce in the exact same way as before.
|
||||
@@ -356,7 +69,7 @@ Note: Pillow is now a required dependency (previously optional) to support image
|
||||
- [25166](https://github.com/apache/superset/pull/25166) Changed the default configuration of `UPLOAD_FOLDER` from `/app/static/uploads/` to `/static/uploads/`. It also removed the unused `IMG_UPLOAD_FOLDER` and `IMG_UPLOAD_URL` configuration options.
|
||||
- [30284](https://github.com/apache/superset/pull/30284) Deprecated GLOBAL_ASYNC_QUERIES_REDIS_CONFIG in favor of the new GLOBAL_ASYNC_QUERIES_CACHE_BACKEND configuration. To leverage Redis Sentinel, set CACHE_TYPE to RedisSentinelCache, or use RedisCache for standalone Redis
|
||||
- [31961](https://github.com/apache/superset/pull/31961) Upgraded React from version 16.13.1 to 17.0.2. If you are using custom frontend extensions or plugins, you may need to update them to be compatible with React 17.
|
||||
- [31260](https://github.com/apache/superset/pull/31260) Docker images now use `uv pip install` instead of `pip install` to manage the python environment. Most docker-based deployments will be affected, whether you derive one of the published images, or have custom bootstrap script that install python libraries (drivers)
|
||||
- [31260](https://github.com/apache/superset/pull/31260) Docker images now use `uv pip install` instead of `pip install` to manage the python envrionment. Most docker-based deployments will be affected, whether you derive one of the published images, or have custom bootstrap script that install python libraries (drivers)
|
||||
|
||||
### Potential Downtime
|
||||
|
||||
@@ -433,7 +146,7 @@ Note: Pillow is now a required dependency (previously optional) to support image
|
||||
- [26462](https://github.com/apache/superset/issues/26462): Removes the Profile feature given that it's not actively maintained and not widely used.
|
||||
- [26377](https://github.com/apache/superset/pull/26377): Removes the deprecated Redirect API that supported short URLs used before the permalink feature.
|
||||
- [26329](https://github.com/apache/superset/issues/26329): Removes the deprecated `DASHBOARD_NATIVE_FILTERS` feature flag. The previous value of the feature flag was `True` and now the feature is permanently enabled.
|
||||
- [25510](https://github.com/apache/superset/pull/25510): Reinforces that any newly defined Python data format (other than epoch) must adhere to the ISO 8601 standard (enforced by way of validation at the API and database level) after a previous relaxation to include slashes in addition to dashes. From now on when specifying new columns, dataset owners will need to use a SQL expression instead to convert their string columns of the form %Y/%m/%d etc. to a `DATE`, `DATETIME`, etc. type.
|
||||
- [25510](https://github.com/apache/superset/pull/25510): Reenforces that any newly defined Python data format (other than epoch) must adhere to the ISO 8601 standard (enforced by way of validation at the API and database level) after a previous relaxation to include slashes in addition to dashes. From now on when specifying new columns, dataset owners will need to use a SQL expression instead to convert their string columns of the form %Y/%m/%d etc. to a `DATE`, `DATETIME`, etc. type.
|
||||
- [26372](https://github.com/apache/superset/issues/26372): Removes the deprecated `GENERIC_CHART_AXES` feature flag. The previous value of the feature flag was `True` and now the feature is permanently enabled.
|
||||
|
||||
### Potential Downtime
|
||||
|
||||
359
chart-request-flow.md
Normal file
359
chart-request-flow.md
Normal file
@@ -0,0 +1,359 @@
|
||||
# Chart Data Request Flow in Apache Superset
|
||||
|
||||
This document traces the complete path of a chart data request through the Superset backend, from API endpoint to database query and back.
|
||||
|
||||
## Overview
|
||||
|
||||
When a client requests chart data (e.g., loading a histogram chart), the request flows through multiple layers:
|
||||
|
||||
1. API Endpoint
|
||||
2. Schema Validation/Parsing
|
||||
3. Command Pattern (Business Logic)
|
||||
4. Query Context Processing
|
||||
5. Database Execution
|
||||
6. Post-Processing
|
||||
7. Response Formatting
|
||||
|
||||
## Detailed Flow
|
||||
|
||||
### 1. Entry Point: API Endpoint
|
||||
|
||||
**File**: `superset/charts/data/api.py:187`
|
||||
|
||||
**Endpoint**: `POST /api/v1/chart/data`
|
||||
|
||||
The request hits `ChartDataRestApi.data()` method which:
|
||||
- Parses the JSON body from the request
|
||||
- Creates a `QueryContext` object from the form data via `ChartDataQueryContextSchema`
|
||||
- Creates a `ChartDataCommand` to execute the query
|
||||
- Validates and executes the command
|
||||
|
||||
```python
|
||||
def data(self) -> Response:
|
||||
json_body = request.json
|
||||
query_context = self._create_query_context_from_form(json_body)
|
||||
command = ChartDataCommand(query_context)
|
||||
command.validate()
|
||||
return self._get_data_response(command, ...)
|
||||
```
|
||||
|
||||
### 2. Schema Layer: Request Parsing
|
||||
|
||||
**File**: `superset/charts/schemas.py:1384`
|
||||
|
||||
`ChartDataQueryContextSchema.load()` deserializes the request into:
|
||||
|
||||
**QueryContext object** (the main container):
|
||||
- datasource: Database table/query info
|
||||
- queries: List of query objects
|
||||
- result_format: JSON/CSV/XLSX
|
||||
- result_type: FULL/SAMPLES/QUERY/etc
|
||||
- force: Whether to bypass cache
|
||||
|
||||
**List of QueryObject instances** (one per query in the request):
|
||||
- columns: Columns to select (e.g., ["age"])
|
||||
- metrics: Aggregations to compute
|
||||
- filters: WHERE clause filters
|
||||
- post_processing: Client-side transformations (e.g., histogram with bins=25)
|
||||
|
||||
### 3. Command Pattern: Business Logic
|
||||
|
||||
**File**: `superset/commands/chart/data/get_data_command.py:39`
|
||||
|
||||
`ChartDataCommand.run()` orchestrates the execution:
|
||||
|
||||
```python
|
||||
def run(self, **kwargs: Any) -> dict[str, Any]:
|
||||
payload = self._query_context.get_payload(
|
||||
cache_query_context=cache_query_context,
|
||||
force_cached=force_cached
|
||||
)
|
||||
|
||||
for query in payload["queries"]:
|
||||
if query.get("error"):
|
||||
raise ChartDataQueryFailedError(query["error"])
|
||||
|
||||
return {
|
||||
"query_context": self._query_context,
|
||||
"queries": payload["queries"]
|
||||
}
|
||||
```
|
||||
|
||||
### 4. Query Context Processor: Core Execution
|
||||
|
||||
**File**: `superset/common/query_context_processor.py:1052`
|
||||
|
||||
`QueryContextProcessor.get_payload()`:
|
||||
- Iterates through each `QueryObject` in `query_context.queries`
|
||||
- For each query, calls `get_query_results()` which routes based on result_type:
|
||||
- `FULL` → `_get_full()` → `get_df_payload()`
|
||||
- `SAMPLES` → `_get_samples()`
|
||||
- `QUERY` → `_get_query()`
|
||||
|
||||
**File**: `superset/common/query_context_processor.py:128`
|
||||
|
||||
`QueryContextProcessor.get_df_payload()`:
|
||||
|
||||
1. **Generate cache key** from query object
|
||||
2. **Check cache** using `QueryCacheManager`
|
||||
3. **If cache miss**:
|
||||
- Validate columns exist in datasource
|
||||
- Call `get_query_result(query_obj)` to execute SQL
|
||||
- Get annotation data if needed
|
||||
- Cache the result with appropriate timeout
|
||||
4. **Return payload** with DataFrame and metadata
|
||||
|
||||
```python
|
||||
def get_df_payload(self, query_obj, force_cached=False):
|
||||
cache_key = self.query_cache_key(query_obj)
|
||||
timeout = self.get_cache_timeout()
|
||||
cache = QueryCacheManager.get(key=cache_key, ...)
|
||||
|
||||
if not cache.is_loaded:
|
||||
query_result = self.get_query_result(query_obj)
|
||||
annotation_data = self.get_annotation_data(query_obj)
|
||||
cache.set_query_result(...)
|
||||
|
||||
return {
|
||||
"cache_key": cache_key,
|
||||
"df": cache.df,
|
||||
"query": cache.query,
|
||||
"is_cached": cache.is_cached,
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
### 5. Database Query Execution
|
||||
|
||||
**File**: `superset/common/query_context_processor.py:267`
|
||||
|
||||
`QueryContextProcessor.get_query_result()`:
|
||||
|
||||
```python
|
||||
def get_query_result(self, query_object: QueryObject) -> QueryResult:
|
||||
# Execute SQL query on the datasource
|
||||
result = query_context.datasource.query(query_object.to_dict())
|
||||
df = result.df
|
||||
|
||||
# Normalize timestamps to pandas datetime format
|
||||
if not df.empty:
|
||||
df = self.normalize_df(df, query_object)
|
||||
|
||||
# Handle time offset comparisons if specified
|
||||
if query_object.time_offsets:
|
||||
time_offsets = self.processing_time_offsets(df, query_object)
|
||||
df = time_offsets["df"]
|
||||
|
||||
# Apply post-processing operations
|
||||
df = query_object.exec_post_processing(df)
|
||||
|
||||
result.df = df
|
||||
return result
|
||||
```
|
||||
|
||||
The `datasource.query()` call goes to your database connector (e.g., `SqlaTable.query()`) which:
|
||||
- Converts the QueryObject dict to SQL using SQLAlchemy
|
||||
- Executes the query via database engine
|
||||
- Returns a `QueryResult` with a pandas DataFrame
|
||||
|
||||
### 6. Post-Processing
|
||||
|
||||
**File**: `superset/common/query_object.py:484`
|
||||
|
||||
`QueryObject.exec_post_processing()`:
|
||||
- Applies operations from `post_processing` list in sequence
|
||||
- Each operation is a pandas transformation (e.g., pivot, aggregate, histogram)
|
||||
- Uses functions from `superset.utils.pandas_postprocessing`
|
||||
|
||||
Example for histogram:
|
||||
```python
|
||||
def exec_post_processing(self, df: DataFrame) -> DataFrame:
|
||||
for post_process in self.post_processing:
|
||||
operation = post_process.get("operation") # "histogram"
|
||||
options = post_process.get("options", {}) # {column: "age", bins: 25}
|
||||
df = getattr(pandas_postprocessing, operation)(df, **options)
|
||||
return df
|
||||
```
|
||||
|
||||
### 7. Response Formatting
|
||||
|
||||
**File**: `superset/charts/data/api.py:346`
|
||||
|
||||
`ChartDataRestApi._send_chart_response()`:
|
||||
- Takes the result dict from command
|
||||
- Formats based on `result_format`:
|
||||
- **JSON**: Converts DataFrame to list of dicts
|
||||
- **CSV**: Converts to CSV string
|
||||
- **XLSX**: Converts to Excel binary
|
||||
- Returns Flask Response with appropriate headers
|
||||
|
||||
```python
|
||||
def _send_chart_response(self, result, form_data=None, datasource=None):
|
||||
result_format = result["query_context"].result_format
|
||||
|
||||
if result_format == ChartDataResultFormat.JSON:
|
||||
queries = result["queries"]
|
||||
response_data = json.dumps(
|
||||
{"result": queries},
|
||||
default=json.json_int_dttm_ser,
|
||||
ignore_nan=True,
|
||||
)
|
||||
resp = make_response(response_data, 200)
|
||||
resp.headers["Content-Type"] = "application/json; charset=utf-8"
|
||||
return resp
|
||||
```
|
||||
|
||||
## Key Objects and Data Structures
|
||||
|
||||
### QueryContext
|
||||
|
||||
**File**: `superset/common/query_context.py:41`
|
||||
|
||||
The main container for a chart data request.
|
||||
|
||||
```python
|
||||
{
|
||||
datasource: BaseDatasource, # Dataset (e.g., id=19, type="table")
|
||||
queries: list[QueryObject], # List of queries to execute
|
||||
result_type: ChartDataResultType, # "full", "samples", "query", etc.
|
||||
result_format: ChartDataResultFormat, # "json", "csv", "xlsx"
|
||||
force: bool, # Bypass cache flag
|
||||
form_data: dict, # Original form_data from client
|
||||
custom_cache_timeout: int | None # Override cache timeout
|
||||
}
|
||||
```
|
||||
|
||||
### QueryObject
|
||||
|
||||
**File**: `superset/common/query_object.py:79`
|
||||
|
||||
Represents a single database query.
|
||||
|
||||
```python
|
||||
{
|
||||
columns: list[Column], # Columns to select ["age"]
|
||||
metrics: list[Metric] | None, # Aggregations to compute
|
||||
filters: list[FilterClause], # WHERE clause filters
|
||||
extras: dict[str, Any], # Additional query options
|
||||
post_processing: list[dict], # Client-side transformations
|
||||
row_limit: int | None, # LIMIT clause
|
||||
row_offset: int, # OFFSET clause
|
||||
order_desc: bool, # Sort direction
|
||||
time_range: str | None, # Time filter range
|
||||
granularity: str | None, # Temporal grouping column
|
||||
annotation_layers: list[dict], # Annotations to overlay
|
||||
from_dttm: datetime | None, # Computed time range start
|
||||
to_dttm: datetime | None # Computed time range end
|
||||
}
|
||||
```
|
||||
|
||||
### QueryResult
|
||||
|
||||
**File**: `superset/models/helpers.py`
|
||||
|
||||
Returned from `datasource.query()`.
|
||||
|
||||
```python
|
||||
{
|
||||
df: pd.DataFrame, # The data from database
|
||||
query: str, # Executed SQL query
|
||||
from_dttm: datetime, # Time range start
|
||||
to_dttm: datetime, # Time range end
|
||||
error: str | None, # Error message if failed
|
||||
status: QueryStatus # success, failed, etc.
|
||||
}
|
||||
```
|
||||
|
||||
## Example Request Flow
|
||||
|
||||
For a histogram chart request like:
|
||||
|
||||
```bash
|
||||
curl 'https://example.com/api/v1/chart/data' \
|
||||
-H 'content-type: application/json' \
|
||||
--data-raw '{
|
||||
"datasource":{"id":19,"type":"table"},
|
||||
"queries":[{
|
||||
"columns":["age"],
|
||||
"filters":[{
|
||||
"col":"time_start",
|
||||
"op":"TEMPORAL_RANGE",
|
||||
"val":"No filter"
|
||||
}],
|
||||
"row_limit":10000,
|
||||
"post_processing":[{
|
||||
"operation":"histogram",
|
||||
"options":{"column":"age","bins":25}
|
||||
}]
|
||||
}],
|
||||
"result_format":"json",
|
||||
"result_type":"full"
|
||||
}'
|
||||
```
|
||||
|
||||
### Flow Summary
|
||||
|
||||
```
|
||||
Client Request (curl)
|
||||
↓
|
||||
ChartDataRestApi.data()
|
||||
↓ (parses JSON)
|
||||
ChartDataQueryContextSchema.load()
|
||||
↓ (creates objects)
|
||||
QueryContext + [QueryObject]
|
||||
↓
|
||||
ChartDataCommand.run()
|
||||
↓
|
||||
QueryContextProcessor.get_payload()
|
||||
↓ (for each QueryObject)
|
||||
get_query_results() → _get_full()
|
||||
↓
|
||||
get_df_payload()
|
||||
├→ Check Cache (QueryCacheManager)
|
||||
└→ get_query_result()
|
||||
├→ datasource.query() → Build SQL → Execute → pandas DataFrame
|
||||
├→ normalize_df() → Timestamp normalization
|
||||
└→ exec_post_processing() → Apply histogram operation
|
||||
↓
|
||||
Return payload {df, query, metadata}
|
||||
↓
|
||||
_send_chart_response()
|
||||
↓ (format as JSON)
|
||||
Flask Response → Client
|
||||
```
|
||||
|
||||
## Architecture Patterns
|
||||
|
||||
The codebase follows clean separation of concerns:
|
||||
|
||||
1. **API Layer** (`superset/charts/data/api.py`): Handles HTTP requests/responses
|
||||
2. **Schema Layer** (`superset/charts/schemas.py`): Validates and deserializes input
|
||||
3. **Command Layer** (`superset/commands/`): Orchestrates business logic
|
||||
4. **Query Context/Processor** (`superset/common/`): Manages execution and caching
|
||||
5. **Query Object**: Represents individual database queries
|
||||
6. **Datasource Layer** (`superset/connectors/`): Database abstraction and SQL generation
|
||||
|
||||
### Key Benefits
|
||||
|
||||
- **Caching**: Results cached at multiple levels (query result, query context)
|
||||
- **Security**: Access control enforced via `raise_for_access()`
|
||||
- **Flexibility**: Supports multiple result types and formats
|
||||
- **Post-processing**: Client-side transformations without re-querying database
|
||||
- **Time Comparison**: Built-in support for time offset queries
|
||||
- **Annotations**: Overlay additional data layers on charts
|
||||
|
||||
## Caching Strategy
|
||||
|
||||
**File**: `superset/common/utils/query_cache_manager.py`
|
||||
|
||||
Cache keys are generated from:
|
||||
- Query object (columns, metrics, filters, etc.)
|
||||
- Datasource UID
|
||||
- RLS (Row Level Security) rules
|
||||
- User context (if per-user caching enabled)
|
||||
- Time range (using relative time strings, not absolute timestamps)
|
||||
|
||||
This ensures that:
|
||||
- Same query returns cached results
|
||||
- Different users see appropriate cached data
|
||||
- Time-relative queries (e.g., "Last 7 days") cache correctly
|
||||
@@ -45,7 +45,7 @@ services:
|
||||
required: true
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
image: postgres:17
|
||||
image: postgres:16
|
||||
container_name: superset_db
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
|
||||
@@ -77,6 +77,7 @@ x-common-build: &common-build
|
||||
INCLUDE_CHROMIUM: ${INCLUDE_CHROMIUM:-false}
|
||||
INCLUDE_FIREFOX: ${INCLUDE_FIREFOX:-false}
|
||||
BUILD_TRANSLATIONS: ${BUILD_TRANSLATIONS:-false}
|
||||
LOAD_EXAMPLES_DUCKDB: ${LOAD_EXAMPLES_DUCKDB:-true}
|
||||
|
||||
services:
|
||||
db-light:
|
||||
@@ -85,7 +86,7 @@ services:
|
||||
required: true
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
image: postgres:17
|
||||
image: postgres:16
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- db_home_light:/var/lib/postgresql/data
|
||||
@@ -115,10 +116,7 @@ services:
|
||||
DATABASE_HOST: db-light
|
||||
DATABASE_DB: superset_light
|
||||
POSTGRES_DB: superset_light
|
||||
EXAMPLES_HOST: db-light
|
||||
EXAMPLES_DB: superset_light
|
||||
EXAMPLES_USER: superset
|
||||
EXAMPLES_PASSWORD: superset
|
||||
SUPERSET__SQLALCHEMY_EXAMPLES_URI: "duckdb:////app/data/examples.duckdb"
|
||||
SUPERSET_CONFIG_PATH: /app/docker/pythonpath_dev/superset_config_docker_light.py
|
||||
GITHUB_HEAD_REF: ${GITHUB_HEAD_REF:-}
|
||||
GITHUB_SHA: ${GITHUB_SHA:-}
|
||||
@@ -141,10 +139,7 @@ services:
|
||||
DATABASE_HOST: db-light
|
||||
DATABASE_DB: superset_light
|
||||
POSTGRES_DB: superset_light
|
||||
EXAMPLES_HOST: db-light
|
||||
EXAMPLES_DB: superset_light
|
||||
EXAMPLES_USER: superset
|
||||
EXAMPLES_PASSWORD: superset
|
||||
SUPERSET__SQLALCHEMY_EXAMPLES_URI: "duckdb:////app/data/examples.duckdb"
|
||||
SUPERSET_CONFIG_PATH: /app/docker/pythonpath_dev/superset_config_docker_light.py
|
||||
healthcheck:
|
||||
disable: true
|
||||
@@ -165,11 +160,10 @@ services:
|
||||
BUILD_SUPERSET_FRONTEND_IN_DOCKER: true
|
||||
NPM_RUN_PRUNE: false
|
||||
SCARF_ANALYTICS: "${SCARF_ANALYTICS:-}"
|
||||
DISABLE_TS_CHECKER: "${DISABLE_TS_CHECKER:-true}"
|
||||
# configuring the dev-server to use the host.docker.internal to connect to the backend
|
||||
superset: "http://superset-light:8088"
|
||||
# Webpack dev server must bind to 0.0.0.0 to be accessible from outside the container
|
||||
WEBPACK_DEVSERVER_HOST: "${WEBPACK_DEVSERVER_HOST:-0.0.0.0}"
|
||||
# Webpack dev server configuration
|
||||
WEBPACK_DEVSERVER_HOST: "${WEBPACK_DEVSERVER_HOST:-127.0.0.1}"
|
||||
WEBPACK_DEVSERVER_PORT: "${WEBPACK_DEVSERVER_PORT:-9000}"
|
||||
ports:
|
||||
- "${NODE_PORT:-9001}:9000" # Parameterized port, accessible on all interfaces
|
||||
@@ -202,6 +196,7 @@ services:
|
||||
DATABASE_DB: test
|
||||
POSTGRES_DB: test
|
||||
SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@db-light:5432/test
|
||||
SUPERSET__SQLALCHEMY_EXAMPLES_URI: "duckdb:////app/data/examples.duckdb"
|
||||
SUPERSET_CONFIG: superset_test_config_light
|
||||
PYTHONPATH: /app/pythonpath:/app/docker/pythonpath_dev:/app
|
||||
|
||||
|
||||
@@ -49,7 +49,7 @@ services:
|
||||
required: true
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
image: postgres:17
|
||||
image: postgres:16
|
||||
container_name: superset_db
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
|
||||
@@ -44,6 +44,7 @@ x-common-build: &common-build
|
||||
INCLUDE_CHROMIUM: ${INCLUDE_CHROMIUM:-false}
|
||||
INCLUDE_FIREFOX: ${INCLUDE_FIREFOX:-false}
|
||||
BUILD_TRANSLATIONS: ${BUILD_TRANSLATIONS:-false}
|
||||
LOAD_EXAMPLES_DUCKDB: ${LOAD_EXAMPLES_DUCKDB:-true}
|
||||
|
||||
services:
|
||||
nginx:
|
||||
@@ -53,9 +54,10 @@ services:
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
image: nginx:latest
|
||||
container_name: superset_nginx
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "${NGINX_PORT:-80}:80"
|
||||
- "80:80"
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
volumes:
|
||||
@@ -64,9 +66,10 @@ services:
|
||||
|
||||
redis:
|
||||
image: redis:7
|
||||
container_name: superset_cache
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "127.0.0.1:${REDIS_PORT:-6379}:6379"
|
||||
- "127.0.0.1:6379:6379"
|
||||
volumes:
|
||||
- redis:/data
|
||||
|
||||
@@ -76,10 +79,11 @@ services:
|
||||
required: true
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
image: postgres:17
|
||||
image: postgres:16
|
||||
container_name: superset_db
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "127.0.0.1:${DATABASE_PORT:-5432}:5432"
|
||||
- "127.0.0.1:5432:5432"
|
||||
volumes:
|
||||
- db_home:/var/lib/postgresql/data
|
||||
- ./docker/docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d
|
||||
@@ -92,12 +96,13 @@ services:
|
||||
required: false
|
||||
build:
|
||||
<<: *common-build
|
||||
container_name: superset_app
|
||||
command: ["/app/docker/docker-bootstrap.sh", "app"]
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- ${SUPERSET_PORT:-8088}:8088
|
||||
- 8088:8088
|
||||
# When in cypress-mode ->
|
||||
- ${CYPRESS_PORT:-8081}:8081
|
||||
- 8081:8081
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
user: *superset-user
|
||||
@@ -105,11 +110,14 @@ services:
|
||||
superset-init:
|
||||
condition: service_completed_successfully
|
||||
volumes: *superset-volumes
|
||||
environment:
|
||||
SUPERSET__SQLALCHEMY_EXAMPLES_URI: "duckdb:////app/data/examples.duckdb"
|
||||
|
||||
superset-websocket:
|
||||
container_name: superset_websocket
|
||||
build: ./superset-websocket
|
||||
ports:
|
||||
- ${WEBSOCKET_PORT:-8080}:8080
|
||||
- 8080:8080
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
depends_on:
|
||||
@@ -129,9 +137,9 @@ services:
|
||||
- /home/superset-websocket/node_modules
|
||||
- /home/superset-websocket/dist
|
||||
|
||||
# Mount config file. Create your own docker/superset-websocket/config.json
|
||||
# for custom settings, then point to it here. Do not use this example in production.
|
||||
- ./docker/superset-websocket/config.example.json:/home/superset-websocket/config.json:ro
|
||||
# Mounting a config file that contains a dummy secret required to boot up.
|
||||
# do not use this docker compose in production
|
||||
- ./docker/superset-websocket/config.json:/home/superset-websocket/config.json
|
||||
environment:
|
||||
- PORT=8080
|
||||
- REDIS_HOST=redis
|
||||
@@ -141,6 +149,7 @@ services:
|
||||
superset-init:
|
||||
build:
|
||||
<<: *common-build
|
||||
container_name: superset_init
|
||||
command: ["/app/docker/docker-init.sh"]
|
||||
env_file:
|
||||
- path: docker/.env # default
|
||||
@@ -154,6 +163,8 @@ services:
|
||||
condition: service_started
|
||||
user: *superset-user
|
||||
volumes: *superset-volumes
|
||||
environment:
|
||||
SUPERSET__SQLALCHEMY_EXAMPLES_URI: "duckdb:////app/data/examples.duckdb"
|
||||
healthcheck:
|
||||
disable: true
|
||||
|
||||
@@ -175,10 +186,9 @@ services:
|
||||
SCARF_ANALYTICS: "${SCARF_ANALYTICS:-}"
|
||||
# configuring the dev-server to use the host.docker.internal to connect to the backend
|
||||
superset: "http://superset:8088"
|
||||
# Webpack dev server must bind to 0.0.0.0 to be accessible from outside the container
|
||||
WEBPACK_DEVSERVER_HOST: "0.0.0.0"
|
||||
ports:
|
||||
- "127.0.0.1:${NODE_PORT:-9000}:9000" # exposing the dynamic webpack dev server
|
||||
- "127.0.0.1:9000:9000" # exposing the dynamic webpack dev server
|
||||
container_name: superset_node
|
||||
command: ["/app/docker/docker-frontend.sh"]
|
||||
env_file:
|
||||
- path: docker/.env # default
|
||||
@@ -190,6 +200,7 @@ services:
|
||||
superset-worker:
|
||||
build:
|
||||
<<: *common-build
|
||||
container_name: superset_worker
|
||||
command: ["/app/docker/docker-bootstrap.sh", "worker"]
|
||||
env_file:
|
||||
- path: docker/.env # default
|
||||
@@ -215,6 +226,7 @@ services:
|
||||
superset-worker-beat:
|
||||
build:
|
||||
<<: *common-build
|
||||
container_name: superset_worker_beat
|
||||
command: ["/app/docker/docker-bootstrap.sh", "beat"]
|
||||
env_file:
|
||||
- path: docker/.env # default
|
||||
@@ -232,6 +244,7 @@ services:
|
||||
superset-tests-worker:
|
||||
build:
|
||||
<<: *common-build
|
||||
container_name: superset_tests_worker
|
||||
command: ["/app/docker/docker-bootstrap.sh", "worker"]
|
||||
env_file:
|
||||
- path: docker/.env # default
|
||||
|
||||
@@ -21,15 +21,6 @@ PYTHONUNBUFFERED=1
|
||||
COMPOSE_PROJECT_NAME=superset
|
||||
DEV_MODE=true
|
||||
|
||||
# Port configuration (override in .env-local for multiple instances)
|
||||
# NGINX_PORT=80
|
||||
# SUPERSET_PORT=8088
|
||||
# NODE_PORT=9000
|
||||
# WEBSOCKET_PORT=8080
|
||||
# CYPRESS_PORT=8081
|
||||
# DATABASE_PORT=5432
|
||||
# REDIS_PORT=6379
|
||||
|
||||
# database configurations (do not modify)
|
||||
DATABASE_DB=superset
|
||||
DATABASE_HOST=db
|
||||
|
||||
@@ -1,39 +0,0 @@
|
||||
#
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
# -----------------------------------------------------------------------
|
||||
# Example .env-local file for running multiple Superset instances
|
||||
# Copy this file to .env-local and customize for your setup
|
||||
# -----------------------------------------------------------------------
|
||||
|
||||
# Unique project name prevents container/volume conflicts between clones
|
||||
# Each clone should have a different name (e.g., superset-pr123, superset-feature-x)
|
||||
COMPOSE_PROJECT_NAME=superset-dev2
|
||||
|
||||
# Port offsets for running multiple instances simultaneously
|
||||
# Instance 1 (default): 80, 8088, 9000, 8080, 8081, 5432, 6379
|
||||
# Instance 2 example: 81, 8089, 9001, 8082, 8083, 5433, 6380
|
||||
NGINX_PORT=81
|
||||
SUPERSET_PORT=8089
|
||||
NODE_PORT=9001
|
||||
WEBSOCKET_PORT=8082
|
||||
CYPRESS_PORT=8083
|
||||
DATABASE_PORT=5433
|
||||
REDIS_PORT=6380
|
||||
|
||||
# For verbose logging during development:
|
||||
# SUPERSET_LOG_LEVEL=debug
|
||||
@@ -34,24 +34,8 @@ intended for use with local development.
|
||||
|
||||
### Local overrides
|
||||
|
||||
#### Environment Variables
|
||||
|
||||
To override environment variables locally, create a `./docker/.env-local` file (git-ignored). This file will be loaded after `.env` and can override any settings.
|
||||
|
||||
#### Python Configuration
|
||||
|
||||
In order to override configuration settings locally, simply make a copy of [`./docker/pythonpath_dev/superset_config_local.example`](./pythonpath_dev/superset_config_local.example)
|
||||
into `./docker/pythonpath_dev/superset_config_docker.py` (git-ignored) and fill in your overrides.
|
||||
|
||||
#### WebSocket Configuration
|
||||
|
||||
To customize the WebSocket server configuration, create `./docker/superset-websocket/config.json` (git-ignored) based on [`./docker/superset-websocket/config.example.json`](./superset-websocket/config.example.json).
|
||||
|
||||
Then update the `superset-websocket`.`volumes` config to mount it.
|
||||
|
||||
#### Docker Compose Overrides
|
||||
|
||||
For advanced Docker Compose customization, create a `docker-compose-override.yml` file (git-ignored) to override or extend services without modifying the main compose file.
|
||||
into `./docker/pythonpath_dev/superset_config_docker.py` (git ignored) and fill in your overrides.
|
||||
|
||||
### Local packages
|
||||
|
||||
@@ -77,34 +61,6 @@ To run the container, simply run: `docker compose up`
|
||||
After waiting several minutes for Superset initialization to finish, you can open a browser and view [`http://localhost:8088`](http://localhost:8088)
|
||||
to start your journey.
|
||||
|
||||
### Running Multiple Instances
|
||||
|
||||
If you need to run multiple Superset instances simultaneously (e.g., different branches or clones), use the make targets which automatically find available ports:
|
||||
|
||||
```bash
|
||||
make up
|
||||
```
|
||||
|
||||
This automatically:
|
||||
- Generates a unique project name from your directory
|
||||
- Finds available ports (incrementing from defaults if in use)
|
||||
- Displays the assigned URLs before starting
|
||||
|
||||
Available commands (run from repo root):
|
||||
|
||||
| Command | Description |
|
||||
|---------|-------------|
|
||||
| `make up` | Start services (foreground) |
|
||||
| `make up-detached` | Start services (background) |
|
||||
| `make down` | Stop all services |
|
||||
| `make ps` | Show running containers |
|
||||
| `make logs` | Follow container logs |
|
||||
| `make nuke` | Stop, remove volumes & local images |
|
||||
|
||||
From a subdirectory, use: `make -C $(git rev-parse --show-toplevel) up`
|
||||
|
||||
**Important**: Always use these commands instead of plain `docker compose down`, which won't know the correct project name.
|
||||
|
||||
## Developing
|
||||
|
||||
While running, the container server will reload on modification of the Superset Python and JavaScript source code.
|
||||
|
||||
@@ -80,16 +80,12 @@ case "${1}" in
|
||||
;;
|
||||
app)
|
||||
echo "Starting web app (using development server)..."
|
||||
flask run -p $PORT --reload --debugger --host=0.0.0.0 --exclude-patterns "*/node_modules/*:*/.venv/*:*/build/*:*/__pycache__/*:*/superset-frontend/*"
|
||||
flask run -p $PORT --reload --debugger --without-threads --host=0.0.0.0
|
||||
;;
|
||||
app-gunicorn)
|
||||
echo "Starting web app..."
|
||||
/usr/bin/run-server.sh
|
||||
;;
|
||||
mcp)
|
||||
echo "Starting MCP service..."
|
||||
superset mcp run --host 0.0.0.0 --port ${MCP_PORT:-5008} --debug
|
||||
;;
|
||||
*)
|
||||
echo "Unknown Operation!!!"
|
||||
;;
|
||||
|
||||
@@ -28,11 +28,11 @@ if [ "$BUILD_SUPERSET_FRONTEND_IN_DOCKER" = "true" ]; then
|
||||
cd /app/superset-frontend
|
||||
|
||||
if [ "$NPM_RUN_PRUNE" = "true" ]; then
|
||||
echo "Running \"npm run prune\""
|
||||
echo "Running `npm run prune`"
|
||||
npm run prune
|
||||
fi
|
||||
|
||||
echo "Running \"npm install\""
|
||||
echo "Running `npm install`"
|
||||
npm install
|
||||
|
||||
echo "Start webpack dev server"
|
||||
|
||||
@@ -105,13 +105,7 @@ class CeleryConfig:
|
||||
|
||||
CELERY_CONFIG = CeleryConfig
|
||||
|
||||
FEATURE_FLAGS = {
|
||||
"ALERT_REPORTS": True,
|
||||
"DATASET_FOLDERS": True,
|
||||
"ENABLE_EXTENSIONS": True,
|
||||
"SEMANTIC_LAYERS": True,
|
||||
}
|
||||
EXTENSIONS_PATH = "/app/docker/extensions"
|
||||
FEATURE_FLAGS = {"ALERT_REPORTS": True}
|
||||
ALERT_REPORTS_NOTIFICATION_DRY_RUN = True
|
||||
WEBDRIVER_BASEURL = f"http://superset_app{os.environ.get('SUPERSET_APP_ROOT', '/')}/" # When using docker compose baseurl should be http://superset_nginx{ENV{BASEPATH}}/ # noqa: E501
|
||||
# The base URL for the email report hyperlinks.
|
||||
|
||||
@@ -1,115 +0,0 @@
|
||||
# Developer Portal Documentation Instructions
|
||||
|
||||
## Core Principle: Stories Are the Single Source of Truth
|
||||
|
||||
When working on the Storybook-to-MDX documentation system:
|
||||
|
||||
**ALWAYS fix the story first. NEVER add workarounds to the generator.**
|
||||
|
||||
## Why This Matters
|
||||
|
||||
The generator (`scripts/generate-superset-components.mjs`) should be lightweight - it extracts data from stories and passes it through. When you add special cases to the generator:
|
||||
- It becomes harder to maintain
|
||||
- Stories diverge from their docs representation
|
||||
- Future stories need to know about generator quirks
|
||||
|
||||
When you fix stories to match the expected patterns:
|
||||
- Stories work identically in Storybook and Docs
|
||||
- The generator stays simple and predictable
|
||||
- Patterns are consistent and learnable
|
||||
|
||||
## Story Patterns for Docs Generation
|
||||
|
||||
### Required Structure
|
||||
```tsx
|
||||
// Use inline export default (NOT const meta = ...; export default meta)
|
||||
export default {
|
||||
title: 'Components/MyComponent',
|
||||
component: MyComponent,
|
||||
};
|
||||
|
||||
// Name interactive stories with Interactive prefix
|
||||
export const InteractiveMyComponent: Story = {
|
||||
args: {
|
||||
// Default prop values
|
||||
},
|
||||
argTypes: {
|
||||
// Control definitions - MUST be at story level, not meta level
|
||||
propName: {
|
||||
control: { type: 'select' },
|
||||
options: ['a', 'b', 'c'],
|
||||
description: 'What this prop does',
|
||||
},
|
||||
},
|
||||
};
|
||||
```
|
||||
|
||||
### For Components with Variants (size × style grids)
|
||||
```tsx
|
||||
const sizes = ['small', 'medium', 'large'];
|
||||
const variants = ['primary', 'secondary', 'danger'];
|
||||
|
||||
InteractiveButton.parameters = {
|
||||
docs: {
|
||||
gallery: {
|
||||
component: 'Button',
|
||||
sizes,
|
||||
styles: variants,
|
||||
sizeProp: 'size',
|
||||
styleProp: 'variant',
|
||||
},
|
||||
},
|
||||
};
|
||||
```
|
||||
|
||||
### For Components Requiring Children
|
||||
```tsx
|
||||
InteractiveIconTooltip.parameters = {
|
||||
docs: {
|
||||
// Component descriptors with dot notation for nested components
|
||||
sampleChildren: [{ component: 'Icons.InfoCircleOutlined', props: { iconSize: 'l' } }],
|
||||
},
|
||||
};
|
||||
```
|
||||
|
||||
### For Custom Live Code Examples
|
||||
```tsx
|
||||
InteractiveMyComponent.parameters = {
|
||||
docs: {
|
||||
liveExample: `function Demo() {
|
||||
return <MyComponent prop="value">Content</MyComponent>;
|
||||
}`,
|
||||
},
|
||||
};
|
||||
```
|
||||
|
||||
### For Complex Props (objects, arrays)
|
||||
```tsx
|
||||
InteractiveMenu.parameters = {
|
||||
docs: {
|
||||
staticProps: {
|
||||
items: [
|
||||
{ key: '1', label: 'Item 1' },
|
||||
{ key: '2', label: 'Item 2' },
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
```
|
||||
|
||||
## Common Issues and How to Fix Them (in the Story)
|
||||
|
||||
| Issue | Wrong Approach | Right Approach |
|
||||
|-------|---------------|----------------|
|
||||
| Component not generated | Add pattern to generator | Change story to use inline `export default` |
|
||||
| Control shows as text instead of select | Add special case in generator | Add `argTypes` with `control: { type: 'select' }` |
|
||||
| Missing children/content | Modify StorybookWrapper | Add `parameters.docs.sampleChildren` |
|
||||
| Gallery not showing | Add to generator output | Add `parameters.docs.gallery` config |
|
||||
| Wrong live example | Hardcode in generator | Add `parameters.docs.liveExample` |
|
||||
|
||||
## Files
|
||||
|
||||
- **Generator**: `docs/scripts/generate-superset-components.mjs`
|
||||
- **Wrapper**: `docs/src/components/StorybookWrapper.jsx`
|
||||
- **Output**: `docs/developer_docs/components/`
|
||||
- **Stories**: `superset-frontend/packages/superset-ui-core/src/components/*/`
|
||||
21
docs/.gitignore
vendored
21
docs/.gitignore
vendored
@@ -23,24 +23,3 @@ docs/.zshrc
|
||||
|
||||
# Gets copied from the root of the project at build time (yarn start / yarn build)
|
||||
docs/intro.md
|
||||
|
||||
# Generated badge images (downloaded at build time by remark-localize-badges plugin)
|
||||
static/badges/
|
||||
|
||||
# Generated database documentation MDX files (regenerated at build time)
|
||||
# Source of truth is in superset/db_engine_specs/*.py metadata attributes
|
||||
docs/databases/
|
||||
|
||||
# Generated API documentation (regenerated at build time from openapi.json)
|
||||
# Source of truth is static/resources/openapi.json
|
||||
developer_docs/api/
|
||||
|
||||
# Generated component documentation MDX files (regenerated at build time)
|
||||
# Source of truth is Storybook stories in superset-frontend/packages/superset-ui-core/src/components/
|
||||
developer_docs/components/
|
||||
|
||||
# Note: src/data/databases.json is COMMITTED (not ignored) to preserve feature diagnostics
|
||||
# that require Flask context to generate. Update it locally with: npm run gen-db-docs
|
||||
|
||||
# Generated component metadata JSON (regenerated by generate-superset-components.mjs)
|
||||
static/data/components.json
|
||||
|
||||
@@ -1 +1 @@
|
||||
v22.22.0
|
||||
v20.18.3
|
||||
|
||||
@@ -416,7 +416,7 @@ If versions don't appear in dropdown:
|
||||
|
||||
- [Docusaurus Documentation](https://docusaurus.io/docs)
|
||||
- [MDX Documentation](https://mdxjs.com/)
|
||||
- [Superset Developer Docs](https://superset.apache.org/developer-docs/)
|
||||
- [Superset Contributing Guide](../CONTRIBUTING.md)
|
||||
- [Main Superset Documentation](https://superset.apache.org/docs/intro)
|
||||
|
||||
## 📖 Real Examples and Patterns
|
||||
|
||||
@@ -18,17 +18,16 @@ under the License.
|
||||
-->
|
||||
|
||||
This is the public documentation site for Superset, built using
|
||||
[Docusaurus 3](https://docusaurus.io/). See the
|
||||
[Developer Docs](https://superset.apache.org/developer-docs/contributing/development-setup#documentation)
|
||||
for documentation on contributing to documentation.
|
||||
[Docusaurus 3](https://docusaurus.io/). See
|
||||
[CONTRIBUTING.md](../CONTRIBUTING.md#documentation) for documentation on
|
||||
contributing to documentation.
|
||||
|
||||
## Version Management
|
||||
|
||||
The Superset documentation site uses Docusaurus versioning with four independent sections:
|
||||
The Superset documentation site uses Docusaurus versioning with three independent versioned sections:
|
||||
|
||||
- **User Documentation** (`/user-docs/`) - End-user guides and tutorials
|
||||
- **Admin Documentation** (`/admin-docs/`) - Installation, configuration, and security
|
||||
- **Developer Docs** (`/developer-docs/`) - Developer guides, contributing, and extensions
|
||||
- **Main Documentation** (`/docs/`) - Core Superset documentation
|
||||
- **Developer Portal** (`/developer_portal/`) - Developer guides and tutorials
|
||||
- **Component Playground** (`/components/`) - Interactive component examples (currently disabled)
|
||||
|
||||
Each section maintains its own version history and can be versioned independently.
|
||||
|
||||
@@ -1,162 +0,0 @@
|
||||
{/*
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
*/}
|
||||
|
||||
---
|
||||
title: AWS IAM Authentication
|
||||
sidebar_label: AWS IAM Authentication
|
||||
sidebar_position: 15
|
||||
---
|
||||
|
||||
# AWS IAM Authentication for AWS Databases
|
||||
|
||||
Superset supports IAM-based authentication for **Amazon Aurora** (PostgreSQL and MySQL) and **Amazon Redshift**. IAM auth eliminates the need for database passwords — Superset generates a short-lived auth token using temporary AWS credentials instead.
|
||||
|
||||
Cross-account IAM role assumption via STS `AssumeRole` is supported, allowing a Superset deployment in one AWS account to connect to databases in a different account.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Enable the `AWS_DATABASE_IAM_AUTH` feature flag in `superset_config.py`. IAM authentication is gated behind this flag; if it is disabled, connections using `aws_iam` fail with *"AWS IAM database authentication is not enabled."*
|
||||
```python
|
||||
FEATURE_FLAGS = {
|
||||
"AWS_DATABASE_IAM_AUTH": True,
|
||||
}
|
||||
```
|
||||
- `boto3` must be installed in your Superset environment:
|
||||
```bash
|
||||
pip install boto3
|
||||
```
|
||||
- The Superset server's IAM role (or static credentials) must have permission to call `sts:AssumeRole` (for cross-account) or the same-account permissions for the target service:
|
||||
- **Aurora (RDS)**: `rds-db:connect`
|
||||
- **Redshift provisioned**: `redshift:GetClusterCredentials`
|
||||
- **Redshift Serverless**: `redshift-serverless:GetCredentials` and `redshift-serverless:GetWorkgroup`
|
||||
- SSL must be enabled on the Aurora / Redshift endpoint (required for IAM token auth).
|
||||
|
||||
## Configuration
|
||||
|
||||
IAM authentication is configured via the **encrypted_extra** field of the database connection. Access this field in the **Advanced** → **Security** section of the database connection form, under **Secure Extra**.
|
||||
|
||||
### Aurora PostgreSQL or Aurora MySQL
|
||||
|
||||
```json
|
||||
{
|
||||
"aws_iam": {
|
||||
"enabled": true,
|
||||
"role_arn": "arn:aws:iam::222222222222:role/SupersetDatabaseAccess",
|
||||
"external_id": "superset-prod-12345",
|
||||
"region": "us-east-1",
|
||||
"db_username": "superset_iam_user",
|
||||
"session_duration": 3600
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
| Field | Required | Description |
|
||||
|-------|----------|-------------|
|
||||
| `enabled` | Yes | Set to `true` to activate IAM auth |
|
||||
| `role_arn` | No | ARN of the cross-account IAM role to assume via STS. Omit for same-account auth |
|
||||
| `external_id` | No | External ID for the STS `AssumeRole` call, if required by the target role's trust policy |
|
||||
| `region` | Yes | AWS region of the database cluster |
|
||||
| `db_username` | Yes | The database username associated with the IAM identity |
|
||||
| `session_duration` | No | STS session duration in seconds (default: `3600`) |
|
||||
|
||||
### Redshift (Serverless)
|
||||
|
||||
```json
|
||||
{
|
||||
"aws_iam": {
|
||||
"enabled": true,
|
||||
"role_arn": "arn:aws:iam::222222222222:role/SupersetRedshiftAccess",
|
||||
"region": "us-east-1",
|
||||
"workgroup_name": "my-workgroup",
|
||||
"db_name": "dev"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Redshift (Provisioned Cluster)
|
||||
|
||||
```json
|
||||
{
|
||||
"aws_iam": {
|
||||
"enabled": true,
|
||||
"role_arn": "arn:aws:iam::222222222222:role/SupersetRedshiftAccess",
|
||||
"region": "us-east-1",
|
||||
"cluster_identifier": "my-cluster",
|
||||
"db_username": "superset_iam_user",
|
||||
"db_name": "dev"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Cross-Account IAM Setup
|
||||
|
||||
To connect to a database in Account B from a Superset deployment in Account A:
|
||||
|
||||
**1. In Account B — create a database-access role:**
|
||||
|
||||
```json
|
||||
{
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Effect": "Allow",
|
||||
"Action": ["rds-db:connect"],
|
||||
"Resource": "arn:aws:rds-db:us-east-1:222222222222:dbuser/db-XXXXXXXXXXXX/superset_iam_user"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**Trust policy** (allows Account A's Superset role to assume it):
|
||||
|
||||
```json
|
||||
{
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Effect": "Allow",
|
||||
"Principal": {
|
||||
"AWS": "arn:aws:iam::111111111111:role/SupersetInstanceRole"
|
||||
},
|
||||
"Action": "sts:AssumeRole",
|
||||
"Condition": {
|
||||
"StringEquals": {
|
||||
"sts:ExternalId": "superset-prod-12345"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**2. In Account A — grant Superset's role permission to assume the Account B role:**
|
||||
|
||||
```json
|
||||
{
|
||||
"Effect": "Allow",
|
||||
"Action": "sts:AssumeRole",
|
||||
"Resource": "arn:aws:iam::222222222222:role/SupersetDatabaseAccess"
|
||||
}
|
||||
```
|
||||
|
||||
**3. Configure the database connection in Superset** using the `role_arn` and `external_id` from the trust policy (as shown in the configuration example above).
|
||||
|
||||
## Credential Caching
|
||||
|
||||
STS credentials are cached in memory keyed by `(role_arn, region, external_id)` with a 10-minute TTL. This reduces the number of STS API calls when multiple queries are executed with the same connection. Tokens are refreshed automatically before expiry.
|
||||
@@ -1,276 +0,0 @@
|
||||
---
|
||||
title: Caching
|
||||
hide_title: true
|
||||
sidebar_position: 3
|
||||
version: 1
|
||||
---
|
||||
|
||||
# Caching
|
||||
|
||||
:::note
|
||||
When a cache backend is configured, Superset expects it to remain available. Operations will
|
||||
fail if the configured backend becomes unavailable rather than silently degrading. This
|
||||
fail-fast behavior ensures operators are immediately aware of infrastructure issues.
|
||||
:::
|
||||
|
||||
Superset uses [Flask-Caching](https://flask-caching.readthedocs.io/) for caching purposes.
|
||||
Flask-Caching supports various caching backends, including Redis (recommended), Memcached,
|
||||
SimpleCache (in-memory), or the local filesystem.
|
||||
[Custom cache backends](https://flask-caching.readthedocs.io/en/latest/#custom-cache-backends)
|
||||
are also supported.
|
||||
|
||||
Caching can be configured by providing dictionaries in
|
||||
`superset_config.py` that comply with [the Flask-Caching config specifications](https://flask-caching.readthedocs.io/en/latest/#configuring-flask-caching).
|
||||
|
||||
The following cache configurations can be customized in this way:
|
||||
|
||||
- Dashboard filter state (required): `FILTER_STATE_CACHE_CONFIG`.
|
||||
- Explore chart form data (required): `EXPLORE_FORM_DATA_CACHE_CONFIG`
|
||||
- Metadata cache (optional): `CACHE_CONFIG`
|
||||
- Charting data queried from datasets (optional): `DATA_CACHE_CONFIG`
|
||||
|
||||
For example, to configure the filter state cache using Redis:
|
||||
|
||||
```python
|
||||
FILTER_STATE_CACHE_CONFIG = {
|
||||
'CACHE_TYPE': 'RedisCache',
|
||||
'CACHE_DEFAULT_TIMEOUT': 86400,
|
||||
'CACHE_KEY_PREFIX': 'superset_filter_cache',
|
||||
'CACHE_REDIS_URL': 'redis://localhost:6379/0'
|
||||
}
|
||||
```
|
||||
|
||||
## Dependencies
|
||||
|
||||
In order to use dedicated cache stores, additional python libraries must be installed
|
||||
|
||||
- For Redis: we recommend the [redis](https://pypi.python.org/pypi/redis) Python package
|
||||
- Memcached: we recommend using [pylibmc](https://pypi.org/project/pylibmc/) client library as
|
||||
`python-memcached` does not handle storing binary data correctly.
|
||||
|
||||
These libraries can be installed using pip.
|
||||
|
||||
## Fallback Metastore Cache
|
||||
|
||||
Note, that some form of Filter State and Explore caching are required. If either of these caches
|
||||
are undefined, Superset falls back to using a built-in cache that stores data in the metadata
|
||||
database. While it is recommended to use a dedicated cache, the built-in cache can also be used
|
||||
to cache other data.
|
||||
|
||||
For example, to use the built-in cache to store chart data, use the following config:
|
||||
|
||||
```python
|
||||
DATA_CACHE_CONFIG = {
|
||||
"CACHE_TYPE": "SupersetMetastoreCache",
|
||||
"CACHE_KEY_PREFIX": "superset_results", # make sure this string is unique to avoid collisions
|
||||
"CACHE_DEFAULT_TIMEOUT": 86400, # 60 seconds * 60 minutes * 24 hours
|
||||
}
|
||||
```
|
||||
|
||||
## Chart Cache Timeout
|
||||
|
||||
The cache timeout for charts may be overridden by the settings for an individual chart, dataset, or
|
||||
database. Each of these configurations will be checked in order before falling back to the default
|
||||
value defined in `DATA_CACHE_CONFIG`.
|
||||
|
||||
Note, that by setting the cache timeout to `-1`, caching for charting data can be disabled, either
|
||||
per chart, dataset or database, or by default if set in `DATA_CACHE_CONFIG`.
|
||||
|
||||
## SQL Lab Query Results
|
||||
|
||||
Caching for SQL Lab query results is used when async queries are enabled and is configured using
|
||||
`RESULTS_BACKEND`.
|
||||
|
||||
Note that this configuration does not use a flask-caching dictionary for its configuration, but
|
||||
instead requires a cachelib object.
|
||||
|
||||
See [Async Queries via Celery](/admin-docs/configuration/async-queries-celery) for details.
|
||||
|
||||
## Caching Thumbnails
|
||||
|
||||
This is an optional feature that can be turned on by activating its [feature flag](/admin-docs/configuration/configuring-superset#feature-flags) on config:
|
||||
|
||||
```
|
||||
FEATURE_FLAGS = {
|
||||
"THUMBNAILS": True,
|
||||
"THUMBNAILS_SQLA_LISTENERS": True,
|
||||
}
|
||||
```
|
||||
|
||||
By default thumbnails are rendered per user, and will fall back to the Selenium user for anonymous users.
|
||||
To always render thumbnails as a fixed user (`admin` in this example), use the following configuration:
|
||||
|
||||
```python
|
||||
from superset.tasks.types import FixedExecutor
|
||||
|
||||
THUMBNAIL_EXECUTORS = [FixedExecutor("admin")]
|
||||
```
|
||||
|
||||
For this feature you will need a cache system and celery workers. All thumbnails are stored on cache
|
||||
and are processed asynchronously by the workers.
|
||||
|
||||
An example config where images are stored on S3 could be:
|
||||
|
||||
```python
|
||||
from flask import Flask
|
||||
from s3cache.s3cache import S3Cache
|
||||
|
||||
...
|
||||
|
||||
class CeleryConfig(object):
|
||||
broker_url = "redis://localhost:6379/0"
|
||||
imports = (
|
||||
"superset.sql_lab",
|
||||
"superset.tasks.thumbnails",
|
||||
)
|
||||
result_backend = "redis://localhost:6379/0"
|
||||
worker_prefetch_multiplier = 10
|
||||
task_acks_late = True
|
||||
|
||||
|
||||
CELERY_CONFIG = CeleryConfig
|
||||
|
||||
def init_thumbnail_cache(app: Flask) -> S3Cache:
|
||||
return S3Cache("bucket_name", 'thumbs_cache/')
|
||||
|
||||
|
||||
THUMBNAIL_CACHE_CONFIG = init_thumbnail_cache
|
||||
```
|
||||
|
||||
Using the above example cache keys for dashboards will be `superset_thumb__dashboard__{ID}`. You can
|
||||
override the base URL for Selenium using:
|
||||
|
||||
```
|
||||
WEBDRIVER_BASEURL = "https://superset.company.com"
|
||||
```
|
||||
|
||||
To control which user account is used for rendering thumbnails and warming up caches, configure
|
||||
`THUMBNAIL_EXECUTORS` and `CACHE_WARMUP_EXECUTORS`. Each accepts a list of executor types (which
|
||||
resolve to an owner, creator, modifier, or the currently-logged-in user) and/or a `FixedExecutor`
|
||||
pinned to a specific username. By default, thumbnails render as the current user
|
||||
(`ExecutorType.CURRENT_USER`) and cache warmup runs as the chart/dashboard owner
|
||||
(`ExecutorType.OWNER`).
|
||||
|
||||
To force both to run as a dedicated service account (`admin` in this example):
|
||||
|
||||
```python
|
||||
from superset.tasks.types import ExecutorType, FixedExecutor
|
||||
|
||||
THUMBNAIL_EXECUTORS = [FixedExecutor("admin")]
|
||||
CACHE_WARMUP_EXECUTORS = [FixedExecutor("admin")]
|
||||
```
|
||||
|
||||
Use a dedicated read-only service account here rather than a personal admin account, so that
|
||||
thumbnail rendering and cache warmup tasks don't fail if a specific user's credentials change.
|
||||
|
||||
Additional Selenium WebDriver configuration can be set using `WEBDRIVER_CONFIGURATION`. You can
|
||||
implement a custom function to authenticate Selenium. The default function uses the `flask-login`
|
||||
session cookie. Here's an example of a custom function signature:
|
||||
|
||||
```python
|
||||
def auth_driver(driver: WebDriver, user: "User") -> WebDriver:
|
||||
pass
|
||||
```
|
||||
|
||||
Then on configuration:
|
||||
|
||||
```
|
||||
WEBDRIVER_AUTH_FUNC = auth_driver
|
||||
```
|
||||
|
||||
## ETag Support for Thumbnails
|
||||
|
||||
Thumbnail and screenshot endpoints return `ETag` response headers based on the cached content digest. Clients can use conditional requests to avoid downloading unchanged images:
|
||||
|
||||
```
|
||||
GET /api/v1/chart/42/thumbnail/
|
||||
If-None-Match: "abc123..."
|
||||
|
||||
→ 304 Not Modified (if unchanged)
|
||||
→ 200 OK (with new image if changed)
|
||||
```
|
||||
|
||||
This is particularly useful for embedded dashboards and external integrations that periodically poll for updated screenshots — unchanged thumbnails return immediately with no payload.
|
||||
|
||||
## Distributed Coordination Backend
|
||||
|
||||
Superset supports an optional distributed coordination (`DISTRIBUTED_COORDINATION_CONFIG`) for
|
||||
high-performance distributed operations. This configuration enables:
|
||||
|
||||
- **Distributed locking**: Moves lock operations from the metadata database to Redis, improving
|
||||
performance and reducing metastore load
|
||||
- **Real-time event notifications**: Enables instant pub/sub messaging for task abort signals and
|
||||
completion notifications instead of polling-based approaches
|
||||
|
||||
:::note
|
||||
This requires Redis or Valkey specifically—it uses Redis-specific features (pub/sub, `SET NX EX`)
|
||||
that are not available in general Flask-Caching backends.
|
||||
:::
|
||||
|
||||
### Configuration
|
||||
|
||||
The distributed coordination uses Flask-Caching style configuration for consistency with other cache
|
||||
backends. Configure `DISTRIBUTED_COORDINATION_CONFIG` in `superset_config.py`:
|
||||
|
||||
```python
|
||||
DISTRIBUTED_COORDINATION_CONFIG = {
|
||||
"CACHE_TYPE": "RedisCache",
|
||||
"CACHE_REDIS_HOST": "localhost",
|
||||
"CACHE_REDIS_PORT": 6379,
|
||||
"CACHE_REDIS_DB": 0,
|
||||
"CACHE_REDIS_PASSWORD": "", # Optional
|
||||
}
|
||||
```
|
||||
|
||||
For Redis Sentinel deployments:
|
||||
|
||||
```python
|
||||
DISTRIBUTED_COORDINATION_CONFIG = {
|
||||
"CACHE_TYPE": "RedisSentinelCache",
|
||||
"CACHE_REDIS_SENTINELS": [("sentinel1", 26379), ("sentinel2", 26379)],
|
||||
"CACHE_REDIS_SENTINEL_MASTER": "mymaster",
|
||||
"CACHE_REDIS_SENTINEL_PASSWORD": None, # Sentinel password (if different)
|
||||
"CACHE_REDIS_PASSWORD": "", # Redis password
|
||||
"CACHE_REDIS_DB": 0,
|
||||
}
|
||||
```
|
||||
|
||||
For SSL/TLS connections:
|
||||
|
||||
```python
|
||||
DISTRIBUTED_COORDINATION_CONFIG = {
|
||||
"CACHE_TYPE": "RedisCache",
|
||||
"CACHE_REDIS_HOST": "redis.example.com",
|
||||
"CACHE_REDIS_PORT": 6380,
|
||||
"CACHE_REDIS_SSL": True,
|
||||
"CACHE_REDIS_SSL_CERTFILE": "/path/to/client.crt",
|
||||
"CACHE_REDIS_SSL_KEYFILE": "/path/to/client.key",
|
||||
"CACHE_REDIS_SSL_CA_CERTS": "/path/to/ca.crt",
|
||||
}
|
||||
```
|
||||
|
||||
### Distributed Lock TTL
|
||||
|
||||
You can configure the default lock TTL (time-to-live) in seconds. Locks automatically expire after
|
||||
this duration to prevent deadlocks from crashed processes:
|
||||
|
||||
```python
|
||||
DISTRIBUTED_LOCK_DEFAULT_TTL = 30 # Default: 30 seconds
|
||||
```
|
||||
|
||||
Individual lock acquisitions can override this value when needed.
|
||||
|
||||
### Database-Only Mode
|
||||
|
||||
When `DISTRIBUTED_COORDINATION_CONFIG` is not configured, Superset uses database-backed operations:
|
||||
|
||||
- **Locking**: Uses the KeyValue table with periodic cleanup of expired entries
|
||||
- **Event notifications**: Uses database polling instead of pub/sub
|
||||
|
||||
While database-backed operations work reliably, the Redis backend is recommended for production
|
||||
deployments where low latency and reduced database load are important.
|
||||
|
||||
:::resources
|
||||
- [Blog: The Data Engineer's Guide to Lightning-Fast Superset Dashboards](https://preset.io/blog/the-data-engineers-guide-to-lightning-fast-apache-superset-dashboards/)
|
||||
- [Blog: Accelerating Dashboards with Materialized Views](https://preset.io/blog/accelerating-apache-superset-dashboards-with-materialized-views/)
|
||||
:::
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user