Compare commits

..

1 Commits

Author SHA1 Message Date
Bogdan Kyryliuk
c308339573 Admin / Alpha permission cleanup and fixes. 2016-11-21 13:31:43 -08:00
6497 changed files with 51694 additions and 1330317 deletions

107
.asf.yaml
View File

@@ -1,107 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# https://cwiki.apache.org/confluence/display/INFRA/.asf.yaml+features+for+git+repositories
---
notifications:
commits: commits@superset.apache.org
issues: notifications@superset.apache.org
pullrequests: notifications@superset.apache.org
discussions: notifications@superset.apache.org
github:
del_branch_on_merge: true
description: "Apache Superset is a Data Visualization and Data Exploration Platform"
homepage: https://superset.apache.org/
labels:
- superset
- apache
- apache-superset
- data-visualization
- data-viz
- analytics
- business-intelligence
- data-science
- data-engineering
- asf
- bi
- business-analytics
- data-analytics
- data-analysis
- data-science
- python
- react
- sql-editor
- flask
features:
# Enable issues management
issues: true
# Enable projects for project management boards
projects: true
# Enable wiki for documentation
wiki: true
# Enable discussions
discussions: true
enabled_merge_buttons:
squash: true
merge: false
rebase: false
ghp_branch: gh-pages
ghp_path: /
protected_branches:
master:
required_status_checks:
# strict means "Require branches to be up to date before merging".
strict: false
# contexts are the names of checks that must pass
# unfortunately AFAICT for `matrix:` jobs, we have to itemize every
# combination here.
contexts:
- lint-check
- cypress-matrix (0, chrome)
- cypress-matrix (1, chrome)
- cypress-matrix (2, chrome)
- cypress-matrix (3, chrome)
- cypress-matrix (4, chrome)
- cypress-matrix (5, chrome)
- dependency-review
- frontend-build
- pre-commit (current)
- pre-commit (previous)
- test-mysql
- test-postgres (current)
- test-postgres-hive
- test-postgres-presto
- test-sqlite
- unit-tests (current)
required_pull_request_reviews:
dismiss_stale_reviews: false
require_code_owner_reviews: true
required_approving_review_count: 1
required_signatures: false
gh-pages:
required_pull_request_reviews:
dismiss_stale_reviews: false
require_code_owner_reviews: true
required_approving_review_count: 1
required_signatures: false

36
.codeclimate.yml Normal file
View File

@@ -0,0 +1,36 @@
engines:
csslint:
enabled: false
duplication:
enabled: false
eslint:
enabled: true
config:
config: superset/assets/.eslintrc
pep8:
enabled: true
fixme:
enabled: false
radon:
enabled: true
checks:
Complexity:
enabled: false
ratings:
paths:
- "**.py"
- "superset/assets/**.js"
- "superset/assets/**.jsx"
exclude_paths:
- ".*"
- "**.pyc"
- "**.gz"
- "env/"
- "tests/"
- "superset/ascii_art.py"
- "superset/assets/images/"
- "superset/assets/vendor/"
- "superset/assets/node_modules/"
- "superset/assets/javascripts/dist/"
- "superset/migrations"
- "docs/"

View File

@@ -1,35 +0,0 @@
codecov:
notify:
after_n_builds: 4
ignore:
- "superset/migrations/versions/*.py"
- "superset-frontend/packages/superset-ui-demo/**/*"
- "**/*.stories.tsx"
- "**/*.stories.jsx"
coverage:
status:
project:
default:
informational: true
# Commits pushed to master should not make the overall
# project coverage decrease:
target: auto
threshold: 0%
core-packages-ts:
target: 100%
paths:
- 'superset-frontend/packages'
- '!superset-frontend/packages/**/*.jsx'
- '!superset-frontend/packages/**/*.tsx'
core-packages-tsx:
target: 50%
paths:
- 'superset-frontend/packages/**/*.jsx'
- 'superset-frontend/packages/**/*.tsx'
patch:
default:
informational: true
threshold: 0%
flag_management:
default_rules:
carryforward: true

View File

@@ -1,36 +0,0 @@
# .coveragerc to control coverage.py
[run]
branch = True
source = superset
# omit = bad_file.py
[paths]
source =
superset/
*/site-packages/
[report]
# Regexes for lines to exclude from consideration
exclude_lines =
# Have to re-enable the standard pragma
pragma: no cover
# Don't complain about missing debug-only code:
def __repr__
if self\.debug
# Don't complain if tests don't hit defensive assertion code:
raise AssertionError
raise NotImplementedError
# Don't complain if non-runnable code isn't run:
if 0:
if __name__ == .__main__.:
# Ignore importlib backport
from importlib
if TYPE_CHECKING:
#fail_under = 100
show_missing = True

1
.coveralls.yml Normal file
View File

@@ -0,0 +1 @@
repo_token: eESbYiv4An6KEvjpmguDs4L7YkubXbqn1

View File

@@ -1,125 +0,0 @@
---
description: Apache Superset development standards and guidelines for Cursor IDE
globs: ["**/*.py", "**/*.ts", "**/*.tsx", "**/*.js", "**/*.jsx", "**/*.sql", "**/*.md"]
alwaysApply: true
---
# Apache Superset Development Standards for Cursor IDE
Apache Superset is a data visualization platform with Flask/Python backend and React/TypeScript frontend.
## ⚠️ CRITICAL: Ongoing Refactors (What NOT to Do)
**These migrations are actively happening - avoid deprecated patterns:**
### Frontend Modernization
- **NO `any` types** - Use proper TypeScript types
- **NO JavaScript files** - Convert to TypeScript (.ts/.tsx)
- **NO Enzyme** - Use React Testing Library/Jest (Enzyme fully removed)
- **Use @superset-ui/core** - Don't import Ant Design directly
### Testing Strategy Migration
- **Prefer unit tests** over integration tests
- **Prefer integration tests** over Cypress end-to-end tests
- **Cypress is last resort** - Actively moving away from Cypress
- **Use Jest + React Testing Library** for component testing
### Backend Type Safety
- **Add type hints** - All new Python code needs proper typing
- **MyPy compliance** - Run `pre-commit run mypy` to validate
- **SQLAlchemy typing** - Use proper model annotations
## Code Standards
### TypeScript Frontend
- **NO `any` types** - Use proper TypeScript
- **Functional components** with hooks
- **@superset-ui/core** for UI components (not direct antd)
- **Jest** for testing (NO Enzyme)
- **Redux** for global state, hooks for local
### Python Backend
- **Type hints required** for all new code
- **MyPy compliant** - run `pre-commit run mypy`
- **SQLAlchemy models** with proper typing
- **pytest** for testing
### Apache License Headers
- **New files require ASF license headers** - When creating new code files, include the standard Apache Software Foundation license header
- **LLM instruction files are excluded** - Files like LLMS.md, CLAUDE.md, etc. are in `.rat-excludes` to avoid header token overhead
## Key Directory Structure
```
superset/
├── superset/ # Python backend (Flask, SQLAlchemy)
│ ├── views/api/ # REST API endpoints
│ ├── models/ # Database models
│ └── connectors/ # Database connections
├── superset-frontend/src/ # React TypeScript frontend
│ ├── components/ # Reusable components
│ ├── explore/ # Chart builder
│ ├── dashboard/ # Dashboard interface
│ └── SqlLab/ # SQL editor
├── superset-frontend/packages/
│ └── superset-ui-core/ # UI component library (USE THIS)
├── tests/ # Python/integration tests
├── docs/ # Documentation (UPDATE FOR CHANGES)
└── UPDATING.md # Breaking changes log
```
## Architecture Patterns
### Dataset-Centric Approach
Charts built from enriched datasets containing:
- Dimension columns with labels/descriptions
- Predefined metrics as SQL expressions
- Self-service analytics within defined contexts
### Security & Features
- **RBAC**: Role-based access via Flask-AppBuilder
- **Feature flags**: Control feature rollouts
- **Row-level security**: SQL-based data access control
## Test Utilities
### Python Test Helpers
- **`SupersetTestCase`** - Base class in `tests/integration_tests/base_tests.py`
- **`@with_config`** - Config mocking decorator
- **`@with_feature_flags`** - Feature flag testing
- **`login_as()`, `login_as_admin()`** - Authentication helpers
- **`create_dashboard()`, `create_slice()`** - Data setup utilities
### TypeScript Test Helpers
- **`superset-frontend/spec/helpers/testing-library.tsx`** - Custom render() with providers
- **`createWrapper()`** - Redux/Router/Theme wrapper
- **`selectOption()`** - Select component helper
- **React Testing Library** - NO Enzyme (removed)
## Pre-commit Validation
**Use pre-commit hooks for quality validation:**
```bash
# Install hooks
pre-commit install
# Quick validation (faster than --all-files)
pre-commit run # Staged files only
pre-commit run mypy # Python type checking
pre-commit run prettier # Code formatting
pre-commit run eslint # Frontend linting
```
## Development Guidelines
- **Documentation**: Update docs/ for any user-facing changes
- **Breaking Changes**: Add to UPDATING.md
- **Docstrings**: Required for new functions/classes
- **Follow existing patterns**: Mimic code style, use existing libraries and utilities
- **Type Safety**: This codebase is actively modernizing toward full TypeScript and type safety
- **Always run `pre-commit run`** to validate changes before committing
---
**Note**: This codebase is actively modernizing toward full TypeScript and type safety. Always run `pre-commit run` to validate changes. Follow the ongoing refactors section to avoid deprecated patterns.

View File

@@ -1,20 +0,0 @@
# Keep this in sync with the base image in the main Dockerfile (ARG PY_VER)
FROM python:3.11.13-bookworm AS base
# Install system dependencies that Superset needs
# This layer will be cached across Codespace sessions
RUN apt-get update && apt-get install -y \
libsasl2-dev \
libldap2-dev \
libpq-dev \
tmux \
gh \
&& rm -rf /var/lib/apt/lists/*
# Install uv for fast Python package management
# This will also be cached in the image
RUN curl -LsSf https://astral.sh/uv/install.sh | sh && \
echo 'export PATH="/root/.cargo/bin:$PATH"' >> /etc/bash.bashrc
# Set the cargo/bin directory in PATH for all users
ENV PATH="/root/.cargo/bin:${PATH}"

View File

@@ -1,16 +0,0 @@
# Superset Development with GitHub Codespaces
For complete documentation on using GitHub Codespaces with Apache Superset, please see:
**[Setting up a Development Environment - GitHub Codespaces](https://superset.apache.org/docs/contributing/development#github-codespaces-cloud-development)**
## Pre-installed Development Environment
When you create a new Codespace from this repository, it automatically:
1. **Creates a Python virtual environment** using `uv venv`
2. **Installs all development dependencies** via `uv pip install -r requirements/development.txt`
3. **Sets up pre-commit hooks** with `pre-commit install`
4. **Activates the virtual environment** automatically in all terminals
The virtual environment is located at `/workspaces/{repository-name}/.venv` and is automatically activated through environment variables set in the devcontainer configuration.

View File

@@ -1,62 +0,0 @@
# Superset Codespaces environment setup
# This file is appended to ~/.bashrc during Codespace setup
# Find the workspace directory (handles both 'superset' and 'superset-2' names)
WORKSPACE_DIR=$(find /workspaces -maxdepth 1 -name "superset*" -type d | head -1)
if [ -n "$WORKSPACE_DIR" ]; then
# Check if virtual environment exists
if [ -d "$WORKSPACE_DIR/.venv" ]; then
# Activate the virtual environment
source "$WORKSPACE_DIR/.venv/bin/activate"
echo "✅ Python virtual environment activated"
# Verify pre-commit is installed and set up
if command -v pre-commit &> /dev/null; then
echo "✅ pre-commit is available ($(pre-commit --version))"
# Install git hooks if not already installed
if [ -d "$WORKSPACE_DIR/.git" ] && [ ! -f "$WORKSPACE_DIR/.git/hooks/pre-commit" ]; then
echo "🪝 Installing pre-commit hooks..."
cd "$WORKSPACE_DIR" && pre-commit install
fi
else
echo "⚠️ pre-commit not found. Run: pip install pre-commit"
fi
else
echo "⚠️ Python virtual environment not found at $WORKSPACE_DIR/.venv"
echo " Run: cd $WORKSPACE_DIR && .devcontainer/setup-dev.sh"
fi
# Always cd to the workspace directory for convenience
cd "$WORKSPACE_DIR"
fi
# Add helpful aliases for Superset development
alias start-superset="$WORKSPACE_DIR/.devcontainer/start-superset.sh"
alias setup-dev="$WORKSPACE_DIR/.devcontainer/setup-dev.sh"
# Show helpful message on login
echo ""
echo "🚀 Superset Codespaces Environment"
echo "=================================="
# Check if Superset is running
if docker ps 2>/dev/null | grep -q "superset"; then
echo "✅ Superset is running!"
echo " - Check the 'Ports' tab for your live Superset URL"
echo " - Initial startup takes 10-20 minutes"
echo " - Login: admin/admin"
else
echo "⚠️ Superset is not running. Use: start-superset"
# Check if there's a startup log
if [ -f "/tmp/superset-startup.log" ]; then
echo " 📋 Startup log found: cat /tmp/superset-startup.log"
fi
fi
echo ""
echo "Quick commands:"
echo " start-superset - Start Superset with Docker Compose"
echo " setup-dev - Set up Python environment (if not already done)"
echo " pre-commit run - Run pre-commit checks on staged files"
echo ""

View File

@@ -1,20 +0,0 @@
#!/bin/bash
# Script to build and push the devcontainer image to GitHub Container Registry
# This allows caching the image between Codespace sessions
# You'll need to run this with appropriate GitHub permissions
# gh auth login --scopes write:packages
REGISTRY="ghcr.io"
OWNER="apache"
REPO="superset"
TAG="devcontainer-base"
echo "Building devcontainer image..."
docker build -t $REGISTRY/$OWNER/$REPO:$TAG .devcontainer/
echo "Pushing to GitHub Container Registry..."
docker push $REGISTRY/$OWNER/$REPO:$TAG
echo "Done! Update .devcontainer/devcontainer.json to use:"
echo " \"image\": \"$REGISTRY/$OWNER/$REPO:$TAG\""

View File

@@ -1,66 +0,0 @@
{
"name": "Apache Superset Development",
// Option 1: Use pre-built image directly
// "image": "ghcr.io/apache/superset:devcontainer-base",
// Option 2: Build from Dockerfile with cache (current approach)
"build": {
"dockerfile": "Dockerfile",
"context": ".",
// Cache from the Apache registry image
"cacheFrom": ["ghcr.io/apache/superset:devcontainer-base"]
},
"features": {
"ghcr.io/devcontainers/features/docker-in-docker:2": {
"moby": true,
"dockerDashComposeVersion": "v2"
},
"ghcr.io/devcontainers/features/node:1": {
"version": "20"
},
"ghcr.io/devcontainers/features/git:1": {},
"ghcr.io/devcontainers/features/common-utils:2": {
"configureZshAsDefaultShell": true
},
"ghcr.io/devcontainers/features/sshd:1": {
"version": "latest"
}
},
// Forward ports for development
"forwardPorts": [9001],
"portsAttributes": {
"9001": {
"label": "Superset (via Webpack Dev Server)",
"onAutoForward": "notify",
"visibility": "public"
}
},
// Run commands after container is created
"postCreateCommand": "bash .devcontainer/setup-dev.sh || echo '⚠️ Setup had issues - run .devcontainer/setup-dev.sh manually'",
// Auto-start Superset after ensuring Docker is ready
// Run in foreground to see any errors, but don't block on failures
"postStartCommand": "bash -c 'echo \"Waiting 30s for services to initialize...\"; sleep 30; .devcontainer/start-superset.sh || echo \"⚠️ Auto-start failed - run start-superset manually\"'",
// Set environment variables
"remoteEnv": {
// Removed automatic venv activation to prevent startup issues
// The setup script will handle this
},
// VS Code customizations
"customizations": {
"vscode": {
"extensions": [
"ms-python.python",
"ms-python.vscode-pylance",
"charliermarsh.ruff",
"dbaeumer.vscode-eslint",
"esbenp.prettier-vscode"
]
}
}
}

View File

@@ -1,78 +0,0 @@
#!/bin/bash
# Setup script for Superset Codespaces development environment
echo "🔧 Setting up Superset development environment..."
# System dependencies and uv are now pre-installed in the Docker image
# This speeds up Codespace creation significantly!
# Create virtual environment using uv
echo "🐍 Creating Python virtual environment..."
if ! uv venv; then
echo "❌ Failed to create virtual environment"
exit 1
fi
# Install Python dependencies
echo "📦 Installing Python dependencies..."
if ! uv pip install -r requirements/development.txt; then
echo "❌ Failed to install Python dependencies"
echo "💡 You may need to run this manually after the Codespace starts"
exit 1
fi
# Install pre-commit hooks
echo "🪝 Installing pre-commit hooks..."
if source .venv/bin/activate && pre-commit install; then
echo "✅ Pre-commit hooks installed"
else
echo "⚠️ Pre-commit hooks installation failed (non-critical)"
fi
# Install Claude Code CLI via npm
echo "🤖 Installing Claude Code..."
if npm install -g @anthropic-ai/claude-code; then
echo "✅ Claude Code installed"
else
echo "⚠️ Claude Code installation failed (non-critical)"
fi
# Make the start script executable
chmod +x .devcontainer/start-superset.sh
# Add bashrc additions for automatic venv activation
echo "🔧 Setting up automatic environment activation..."
if [ -f ~/.bashrc ]; then
# Check if we've already added our additions
if ! grep -q "Superset Codespaces environment setup" ~/.bashrc; then
echo "" >> ~/.bashrc
cat .devcontainer/bashrc-additions >> ~/.bashrc
echo "✅ Added automatic venv activation to ~/.bashrc"
else
echo "✅ Bashrc additions already present"
fi
else
# Create bashrc if it doesn't exist
cat .devcontainer/bashrc-additions > ~/.bashrc
echo "✅ Created ~/.bashrc with automatic venv activation"
fi
# Also add to zshrc since that's the default shell
if [ -f ~/.zshrc ] || [ -n "$ZSH_VERSION" ]; then
if ! grep -q "Superset Codespaces environment setup" ~/.zshrc; then
echo "" >> ~/.zshrc
cat .devcontainer/bashrc-additions >> ~/.zshrc
echo "✅ Added automatic venv activation to ~/.zshrc"
fi
fi
echo "✅ Development environment setup complete!"
echo ""
echo "📝 The virtual environment will be automatically activated in new terminals"
echo ""
echo "🔄 To activate in this terminal, run:"
echo " source ~/.bashrc"
echo ""
echo "🚀 To start Superset:"
echo " start-superset"
echo ""

View File

@@ -1,108 +0,0 @@
#!/bin/bash
# Startup script for Superset in Codespaces
# Log to a file for debugging
LOG_FILE="/tmp/superset-startup.log"
echo "[$(date)] Starting Superset startup script" >> "$LOG_FILE"
echo "[$(date)] User: $(whoami), PWD: $(pwd)" >> "$LOG_FILE"
echo "🚀 Starting Superset in Codespaces..."
echo "🌐 Frontend will be available at port 9001"
# Find the workspace directory (Codespaces clones as 'superset', not 'superset-2')
WORKSPACE_DIR=$(find /workspaces -maxdepth 1 -name "superset*" -type d | head -1)
if [ -n "$WORKSPACE_DIR" ]; then
cd "$WORKSPACE_DIR"
echo "📁 Working in: $WORKSPACE_DIR"
else
echo "📁 Using current directory: $(pwd)"
fi
# Wait for Docker to be available
echo "⏳ Waiting for Docker to start..."
echo "[$(date)] Waiting for Docker..." >> "$LOG_FILE"
max_attempts=30
attempt=0
while ! docker info > /dev/null 2>&1; do
if [ $attempt -eq $max_attempts ]; then
echo "❌ Docker failed to start after $max_attempts attempts"
echo "[$(date)] Docker failed to start after $max_attempts attempts" >> "$LOG_FILE"
echo "🔄 Please restart the Codespace or run this script manually later"
exit 1
fi
echo " Attempt $((attempt + 1))/$max_attempts..."
echo "[$(date)] Docker check attempt $((attempt + 1))/$max_attempts" >> "$LOG_FILE"
sleep 2
attempt=$((attempt + 1))
done
echo "✅ Docker is ready!"
echo "[$(date)] Docker is ready" >> "$LOG_FILE"
# Check if Superset containers are already running
if docker ps | grep -q "superset"; then
echo "✅ Superset containers are already running!"
echo ""
echo "🌐 To access Superset:"
echo " 1. Click the 'Ports' tab at the bottom of VS Code"
echo " 2. Find port 9001 and click the globe icon to open"
echo " 3. Wait 10-20 minutes for initial startup"
echo ""
echo "📝 Login credentials: admin/admin"
exit 0
fi
# Clean up any existing containers
echo "🧹 Cleaning up existing containers..."
docker-compose -f docker-compose-light.yml down
# Start services
echo "🏗️ Starting Superset in background (daemon mode)..."
echo ""
# Start in detached mode
docker-compose -f docker-compose-light.yml up -d
echo ""
echo "✅ Docker Compose started successfully!"
echo ""
echo "📋 Important information:"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "⏱️ Initial startup takes 10-20 minutes"
echo "🌐 Check the 'Ports' tab for your Superset URL (port 9001)"
echo "👤 Login: admin / admin"
echo ""
echo "📊 Useful commands:"
echo " docker-compose -f docker-compose-light.yml logs -f # Follow logs"
echo " docker-compose -f docker-compose-light.yml ps # Check status"
echo " docker-compose -f docker-compose-light.yml down # Stop services"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo ""
echo "💤 Keeping terminal open for 60 seconds to test persistence..."
sleep 60
echo "✅ Test complete - check if this terminal is still visible!"
# Show final status
docker-compose -f docker-compose-light.yml ps
EXIT_CODE=$?
# If it failed, provide helpful instructions
if [ $EXIT_CODE -ne 0 ] && [ $EXIT_CODE -ne 130 ]; then # 130 is Ctrl+C
echo ""
echo "❌ Superset startup failed (exit code: $EXIT_CODE)"
echo ""
echo "🔄 To restart Superset, run:"
echo " .devcontainer/start-superset.sh"
echo ""
echo "🔧 For troubleshooting:"
echo " # View logs:"
echo " docker-compose -f docker-compose-light.yml logs"
echo ""
echo " # Clean restart (removes volumes):"
echo " docker-compose -f docker-compose-light.yml down -v"
echo " .devcontainer/start-superset.sh"
echo ""
echo " # Common issues:"
echo " - Network timeouts: Just retry, often transient"
echo " - Port conflicts: Check 'docker ps'"
echo " - Database issues: Try clean restart with -v"
fi

View File

@@ -1,48 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
**/__pycache__/
**/.git
**/.apache_superset.egg-info
**/.github
**/.mypy_cache
**/.pytest_cache
**/.tox
**/.vscode
**/.idea
**/.coverage
**/.DS_Store
**/.eggs
**/.python-version
**/*.egg-info
**/*.bak
**/*.db
**/*.pyc
**/*.sqllite
**/*.swp
**/.terser-plugin-cache/
**/node_modules/
tests/
docs/
install/
superset-frontend/cypress-base/
superset-frontend/coverage/
superset-frontend/.temp_cache/
superset/static/assets/
superset-websocket/dist/
venv
.venv

View File

@@ -1,47 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# EditorConfig is awesome: https://EditorConfig.org
# top-most EditorConfig file
root = true
# Unix-style newlines with a newline ending every file
[*]
end_of_line = lf
insert_final_newline = true
charset = utf-8
# 4 space indentation for Python files
[*.py]
indent_style = space
indent_size = 4
max_line_length=88
# 2 space indentation for Frontend files
[*.{js,jsx,ts,tsx,html,less,css}]
indent_style = space
indent_size = 2
# 2 space indentation for json and yaml files
[*.{json,yml}]
indent_style = space
indent_size = 2
# Tab indentation
[Makefile]
indent_style = tab

View File

@@ -1,18 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
FLASK_APP="superset.app:create_app()"
FLASK_DEBUG=true

View File

@@ -1,36 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated by FOSSA CLI (https://github.com/fossas/fossa-cli)
# Visit https://fossa.com to learn more
version: 2
cli:
server: https://app.fossa.com
fetcher: custom
analyze:
modules:
- name: assets
type: npm
target: superset-frontend
path: superset-frontend
- name: base
type: pip
target: .
path: .
options:
requirements: ./requirements/base.txt

4
.gitattributes vendored
View File

@@ -1,4 +0,0 @@
docker/**/*.sh text eol=lf
*.svg binary
*.ipynb binary
*.geojson binary

32
.github/CODEOWNERS vendored
View File

@@ -1,32 +0,0 @@
# Notify all committers of DB migration changes, per SIP-59
# https://github.com/apache/superset/issues/13351
/superset/migrations/ @mistercrunch @michael-s-molina @betodealmeida @eschutho @sadpandajoe
# Notify some committers of changes in the components
/superset-frontend/src/components/Select/ @michael-s-molina @geido @kgabryje
/superset-frontend/src/components/MetadataBar/ @michael-s-molina @geido @kgabryje
/superset-frontend/src/components/DropdownContainer/ @michael-s-molina @geido @kgabryje
# Notify Helm Chart maintainers about changes in it
/helm/superset/ @craig-rueda @dpgaspar @villebro @nytai @michael-s-molina @mistercrunch @rusackas @Antonio-RiveroMartnez
# Notify E2E test maintainers of changes
/superset-frontend/cypress-base/ @sadpandajoe @geido @eschutho @rusackas @betodealmeida @mistercrunch
# Notify PMC members of changes to GitHub Actions
/.github/ @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @kgabryje @dpgaspar
# Notify PMC members of changes to required GitHub Actions
/.asf.yaml @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @kgabryje @dpgaspar @Antonio-RiveroMartnez
# Maps are a finicky contribution process we care about
**/*.geojson @villebro @rusackas
/superset-frontend/plugins/legacy-plugin-chart-country-map/ @villebro @rusackas

View File

@@ -1,99 +0,0 @@
name: Bug report
description: Report a bug to improve Superset's stability
labels: ["bug"]
body:
- type: markdown
attributes:
value: |
Hello Superset Community member! Please keep things tidy by putting your post in the proper place:
🚨 Reporting a security issue: send an email to security@superset.apache.org. DO NOT USE GITHUB ISSUES TO REPORT SECURITY PROBLEMS.
🐛 Reporting a bug: use this form.
🙏 Asking a question or getting help: post in the [Superset Slack chat](http://bit.ly/join-superset-slack) or [GitHub Discussions](https://github.com/apache/superset/discussions) under "Q&A / Help".
💡 Requesting a new feature: Search [GitHub Discussions](https://github.com/apache/superset/discussions) to see if it exists already. If not, add a new post there under "Ideas".
- type: textarea
id: bug-description
attributes:
label: Bug description
description: A clear description of what the bug is, including reproduction steps and expected behavior.
placeholder: |
The bug is that...
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
validations:
required: true
- type: textarea
id: screenshots-recordings
attributes:
label: Screenshots/recordings
description: If applicable, add screenshots or recordings to help explain your problem.
- type: markdown
attributes:
value: |
### Environment
Please specify your environment. If your environment does not match the alternatives, you need to upgrade your environment before submitting the issue as it may have already been fixed. For additional information about the releases, see [Release Process](https://github.com/apache/superset/wiki/Release-Process).
- type: dropdown
id: superset-version
attributes:
label: Superset version
options:
- master / latest-dev
- "5.0.0"
- "4.1.3"
validations:
required: true
- type: dropdown
id: python-version
attributes:
label: Python version
options:
- "3.9"
- "3.10"
- "3.11"
- Not applicable
- I don't know
validations:
required: true
- type: dropdown
id: node-version
attributes:
label: Node version
options:
- "16"
- "17"
- "18 or greater"
- Not applicable
- I don't know
validations:
required: true
- type: dropdown
id: browser
attributes:
label: Browser
options:
- Chrome
- Firefox
- Safari
- Not applicable
validations:
required: true
- type: textarea
id: additional-context
attributes:
label: Additional context
description: |
Add any other context about the problem here such as the feature flags that you have enabled, any customizations you have made, the data source you are querying, etc.
- type: checkboxes
id: checklist
attributes:
label: Checklist
description: Make sure to follow these steps before submitting your issue - thank you!
options:
- label: I have searched Superset docs and Slack and didn't find a solution to my problem.
- label: I have searched the GitHub issue tracker and didn't find a similar bug report.
- label: I have checked Superset's logs for errors and if I found a relevant Python stacktrace, I included it here as text in the "additional context" section.
validations:
required: true

View File

@@ -1,12 +0,0 @@
---
blank_issues_enabled: false
contact_links:
- name: Feature Request
url: https://github.com/apache/superset/discussions/new?category=ideas
about: Propose a feature request to the Superset community
- name: Q&A
url: https://github.com/apache/superset/discussions/new?category=q-a-help
about: Open a community Q&A thread on GitHub Discussions
- name: Slack
url: https://bit.ly/join-superset-slack
about: Join the Superset Community on Slack for other discussions and assistance

View File

@@ -1,19 +0,0 @@
---
name: Cosmetic Issue
about: Describe a cosmetic issue with CSS, positioning, layout, labeling, or similar
labels: "cosmetic-issue"
---
## Screenshot
[drag & drop image(s) here!]
## Description
[describe the issue here!]
## Design input
[describe any input/collaboration you'd like from designers, and
tag accordingly. For design review, add the
label `design:review`. If this includes a design proposal,
include the label `design:suggest`]

View File

@@ -1,36 +0,0 @@
---
name: SIP
about: "Superset Improvement Proposal. See SIP-0 (https://github.com/apache/superset/issues/5602) for details. A SIP introduces any major change into Apache Superset's code or process."
labels: sip
title: "[SIP] Your Title Here (do not add SIP number)"
assignees: "apache/superset-committers"
---
*Please make sure you are familiar with the SIP process documented*
[here](https://github.com/apache/superset/issues/5602). The SIP will be numbered by a committer upon acceptance.
## [SIP] Proposal for ...<title>
### Motivation
Description of the problem to be solved.
### Proposed Change
Describe how the feature will be implemented, or the problem will be solved. If possible, include mocks, screenshots, or screencasts (even if from different tools).
### New or Changed Public Interfaces
Describe any new additions to the model, views or `REST` endpoints. Describe any changes to existing visualizations, dashboards and React components. Describe changes that affect the Superset CLI and how Superset is deployed.
### New dependencies
Describe any `npm`/`PyPI` packages that are required. Are they actively maintained? What are their licenses?
### Migration Plan and Compatibility
Describe any database migrations that are necessary, or updates to stored URLs.
### Rejected Alternatives
Describe alternative approaches that were considered and rejected.

View File

@@ -1,27 +0,0 @@
<!---
Please write the PR title following the conventions at https://www.conventionalcommits.org/en/v1.0.0/
Example:
fix(dashboard): load charts correctly
-->
### SUMMARY
<!--- Describe the change below, including rationale and design decisions -->
### BEFORE/AFTER SCREENSHOTS OR ANIMATED GIF
<!--- Skip this if not applicable -->
### TESTING INSTRUCTIONS
<!--- Required! What steps can be taken to manually verify the changes? -->
### ADDITIONAL INFORMATION
<!--- Check any relevant boxes with "x" -->
<!--- HINT: Include "Fixes #nnn" if you are fixing an existing issue -->
- [ ] Has associated issue:
- [ ] Required feature flags:
- [ ] Changes UI
- [ ] Includes DB Migration (follow approval process in [SIP-59](https://github.com/apache/superset/issues/13351))
- [ ] Migration is atomic, supports rollback & is backwards-compatible
- [ ] Confirm DB migration upgrade and downgrade tested
- [ ] Runtime estimates and downtime expectations provided
- [ ] Introduces new feature or API
- [ ] Removes existing feature or API

38
.github/SECURITY.md vendored
View File

@@ -1,38 +0,0 @@
# Security Policy
This is a project of the [Apache Software Foundation](https://apache.org) and follows the
ASF [vulnerability handling process](https://apache.org/security/#vulnerability-handling).
## Reporting Vulnerabilities
**⚠️ Please do not file GitHub issues for security vulnerabilities as they are public! ⚠️**
Apache Software Foundation takes a rigorous standpoint in annihilating the security issues
in its software projects. Apache Superset is highly sensitive and forthcoming to issues
pertaining to its features and functionality.
If you have any concern or believe you have found a vulnerability in Apache Superset,
please get in touch with the Apache Superset Security Team privately at
e-mail address [security@superset.apache.org](mailto:security@superset.apache.org).
More details can be found on the ASF website at
[ASF vulnerability reporting process](https://apache.org/security/#reporting-a-vulnerability)
We kindly ask you to include the following information in your report:
- Apache Superset version that you are using
- A sanitized copy of your `superset_config.py` file or any config overrides
- Detailed steps to reproduce the vulnerability
Note that Apache Superset is not responsible for any third-party dependencies that may
have security issues. Any vulnerabilities found in third-party dependencies should be
reported to the maintainers of those projects. Results from security scans of Apache
Superset dependencies found on its official Docker image can be remediated at release time
by extending the image itself.
**Your responsible disclosure and collaboration are invaluable.**
## Extra Information
- [Apache Superset documentation](https://superset.apache.org/docs/security)
- [Common Vulnerabilities and Exposures by release](https://superset.apache.org/docs/security/cves)
- [How Security Vulnerabilities are Reported & Handled in Apache Superset (Blog)](https://preset.io/blog/how-security-vulnerabilities-are-reported-and-handled-in-apache-superset/)

View File

@@ -1,31 +0,0 @@
name: 'Change Detector'
description: 'Detects file changes for pull request and push events'
inputs:
token:
description: 'GitHub token for authentication'
required: true
outputs:
python:
description: 'Whether Python-related files were changed'
value: ${{ steps.change-detector.outputs.python }}
frontend:
description: 'Whether frontend-related files were changed'
value: ${{ steps.change-detector.outputs.frontend }}
docker:
description: 'Whether docker-related files were changed'
value: ${{ steps.change-detector.outputs.docker }}
docs:
description: 'Whether docs-related files were changed'
value: ${{ steps.change-detector.outputs.docs }}
runs:
using: 'composite'
steps:
- name: Detect file changes
id: change-detector
run: |
python --version
python scripts/change_detector.py
shell: bash
env:
GITHUB_TOKEN: ${{ inputs.token }}
GITHUB_OUTPUT: ${{ github.output }}

View File

@@ -1,23 +0,0 @@
name: Label Draft PRs
on:
pull_request:
types:
- opened
- converted_to_draft
jobs:
label-draft:
runs-on: ubuntu-latest
steps:
- name: Check if the PR is a draft
id: check-draft
uses: actions/github-script@v6
with:
script: |
const isDraft = context.payload.pull_request.draft;
core.setOutput('isDraft', isDraft);
- name: Add `review:draft` Label
if: steps.check-draft.outputs.isDraft == 'true'
uses: actions-ecosystem/action-add-labels@v1
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
labels: "review:draft"

View File

@@ -1,58 +0,0 @@
name: 'Setup Python Environment'
description: 'Set up Python and install dependencies with optional configurations.'
inputs:
python-version:
description: 'Python version to set up. Accepts a version number, "current", or "next".'
required: true
default: 'current'
cache:
description: 'Cache dependencies. Options: pip'
required: false
default: 'pip'
requirements-type:
description: 'Type of requirements to install. Options: base, development, default'
required: false
default: 'dev'
install-superset:
description: 'Whether to install Superset itself. If false, only python is installed'
required: false
default: 'true'
runs:
using: 'composite'
steps:
- name: Interpret Python Version
id: set-python-version
shell: bash
run: |
if [ "${{ inputs.python-version }}" = "current" ]; then
echo "PYTHON_VERSION=3.11" >> $GITHUB_ENV
elif [ "${{ inputs.python-version }}" = "next" ]; then
# currently disabled in GHA matrixes because of library compatibility issues
echo "PYTHON_VERSION=3.12" >> $GITHUB_ENV
elif [ "${{ inputs.python-version }}" = "previous" ]; then
echo "PYTHON_VERSION=3.10" >> $GITHUB_ENV
else
echo "PYTHON_VERSION=${{ inputs.python-version }}" >> $GITHUB_ENV
fi
- name: Set up Python ${{ env.PYTHON_VERSION }}
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
cache: ${{ inputs.cache }}
- name: Install dependencies
run: |
if [ "${{ inputs.install-superset }}" = "true" ]; then
sudo apt-get update && sudo apt-get -y install libldap2-dev libsasl2-dev
pip install --upgrade pip setuptools wheel uv
if [ "${{ inputs.requirements-type }}" = "dev" ]; then
uv pip install --system -r requirements/development.txt
elif [ "${{ inputs.requirements-type }}" = "base" ]; then
uv pip install --system -r requirements/base.txt
fi
uv pip install --system -e .
fi
shell: bash

View File

@@ -1,69 +0,0 @@
name: "Setup Docker Environment"
description: "Reusable steps for setting up QEMU, Docker Buildx, DockerHub login, Supersetbot, and optionally Docker Compose"
inputs:
build:
description: "Used for building?"
required: false
default: "false"
dockerhub-user:
description: "DockerHub username"
required: false
dockerhub-token:
description: "DockerHub token"
required: false
install-docker-compose:
description: "Flag to install Docker Compose"
required: false
default: "true"
login-to-dockerhub:
description: "Whether you want to log into dockerhub"
required: false
default: "true"
outputs: {}
runs:
using: "composite"
steps:
- name: Set up QEMU
if: ${{ inputs.build == 'true' }}
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
if: ${{ inputs.build == 'true' }}
uses: docker/setup-buildx-action@v3
- name: Try to login to DockerHub
if: ${{ inputs.login-to-dockerhub == 'true' }}
continue-on-error: true
uses: docker/login-action@v3
with:
username: ${{ inputs.dockerhub-user }}
password: ${{ inputs.dockerhub-token }}
- name: Install Docker Compose
if: ${{ inputs.install-docker-compose == 'true' }}
shell: bash
run: |
sudo apt-get update
sudo apt-get install -y ca-certificates curl
sudo install -m 0755 -d /etc/apt/keyrings
# Download and save the Docker GPG key in the correct format
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /etc/apt/keyrings/docker.gpg
# Ensure the key file is readable
sudo chmod a+r /etc/apt/keyrings/docker.gpg
# Add the Docker repository using the correct key
echo \
"deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/ubuntu \
$(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \
sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
# Update package lists and install Docker Compose plugin
sudo apt update
sudo apt install -y docker-compose-plugin
- name: Docker Version Info
shell: bash
run: docker info

View File

@@ -1,40 +0,0 @@
name: 'Setup supersetbot'
description: 'Sets up supersetbot npm lib from the repo or npm'
inputs:
from-npm:
description: 'Install from npm instead of local setup'
required: false
default: 'true' # Defaults to using the local setup
runs:
using: 'composite'
steps:
- name: Setup Node Env
uses: actions/setup-node@v4
with:
node-version: '20'
- name: Install supersetbot from npm
if: ${{ inputs.from-npm == 'true' }}
shell: bash
run: npm install -g supersetbot
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
if: ${{ inputs.from-npm == 'false' }}
uses: actions/checkout@v4
with:
repository: apache-superset/supersetbot
path: supersetbot
- name: Setup supersetbot from repo
if: ${{ inputs.from-npm == 'false' }}
shell: bash
working-directory: supersetbot
run: |
# simple trick to install globally with dependencies
npm pack
npm install -g ./supersetbot*.tgz
- name: echo supersetbot version
shell: bash
run: supersetbot version

15
.github/config.yml vendored
View File

@@ -1,15 +0,0 @@
# Configuration for request-info - https://github.com/behaviorbot/request-info
# *Required* Comment to reply with
requestInfoReplyComment: >
We would appreciate it if you could provide us with more info about this issue/pr!
Please do not leave the `title` or `description` empty.
# *OPTIONAL* default titles to check against for lack of descriptiveness
# MUST BE ALL LOWERCASE
requestInfoDefaultTitles:
- update readme.md
- updates
# *OPTIONAL* Label to be added to Issues and Pull Requests with insufficient information given
requestInfoLabelToAdd: "need:more-info"

View File

@@ -1 +0,0 @@
../LLMS.md

367
.github/dependabot.yml vendored
View File

@@ -1,367 +0,0 @@
version: 2
enable-beta-ecosystems: true
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "monthly"
- package-ecosystem: "npm"
ignore:
# not until React >= 18.0.0
- dependency-name: "storybook"
- dependency-name: "@storybook*"
# JSDOM v30 doesn't play well with Jest v30
# Source: https://jestjs.io/blog#known-issues
# GH thread: https://github.com/jsdom/jsdom/issues/3492
- dependency-name: "jest-environment-jsdom"
directory: "/superset-frontend/"
schedule:
interval: "monthly"
labels:
- npm
- dependabot
open-pull-requests-limit: 30
versioning-strategy: increase
# NOTE: `uv` support is in beta, more details here:
# https://github.com/dependabot/dependabot-core/pull/10040#issuecomment-2696978430
- package-ecosystem: "uv"
directory: "requirements/"
open-pull-requests-limit: 10
schedule:
interval: "weekly"
labels:
- uv
- dependabot
- package-ecosystem: "npm"
directory: ".github/actions"
schedule:
interval: "monthly"
open-pull-requests-limit: 10
versioning-strategy: increase
- package-ecosystem: "npm"
directory: "/docs/"
schedule:
interval: "monthly"
open-pull-requests-limit: 10
versioning-strategy: increase
- package-ecosystem: "npm"
directory: "/superset-websocket/"
schedule:
interval: "monthly"
labels:
- npm
- dependabot
versioning-strategy: increase
- package-ecosystem: "npm"
directory: "/superset-websocket/utils/client-ws-app/"
schedule:
interval: "monthly"
labels:
- npm
- dependabot
open-pull-requests-limit: 10
versioning-strategy: increase
# Now for all of our plugins and packages!
- package-ecosystem: "npm"
directory: "/superset-frontend/plugins/legacy-plugin-chart-calendar/"
schedule:
interval: "monthly"
labels:
- npm
- dependabot
open-pull-requests-limit: 5
versioning-strategy: increase
- package-ecosystem: "npm"
directory: "/superset-frontend/plugins/legacy-plugin-chart-histogram/"
schedule:
interval: "monthly"
labels:
- npm
- dependabot
open-pull-requests-limit: 5
versioning-strategy: increase
- package-ecosystem: "npm"
directory: "/superset-frontend/plugins/legacy-plugin-chart-partition/"
schedule:
interval: "monthly"
labels:
- npm
- dependabot
open-pull-requests-limit: 5
versioning-strategy: increase
- package-ecosystem: "npm"
directory: "/superset-frontend/plugins/legacy-plugin-chart-world-map/"
schedule:
interval: "monthly"
labels:
- npm
- dependabot
open-pull-requests-limit: 5
versioning-strategy: increase
- package-ecosystem: "npm"
directory: "/superset-frontend/plugins/plugin-chart-pivot-table/"
schedule:
interval: "monthly"
labels:
- npm
- dependabot
open-pull-requests-limit: 5
versioning-strategy: increase
- package-ecosystem: "npm"
directory: "/superset-frontend/plugins/legacy-plugin-chart-chord/"
schedule:
interval: "monthly"
labels:
- npm
- dependabot
open-pull-requests-limit: 5
versioning-strategy: increase
- package-ecosystem: "npm"
directory: "/superset-frontend/plugins/legacy-plugin-chart-horizon/"
schedule:
interval: "monthly"
labels:
- npm
- dependabot
open-pull-requests-limit: 5
versioning-strategy: increase
- package-ecosystem: "npm"
directory: "/superset-frontend/plugins/legacy-plugin-chart-rose/"
schedule:
interval: "monthly"
labels:
- npm
- dependabot
open-pull-requests-limit: 5
versioning-strategy: increase
- package-ecosystem: "npm"
directory: "/superset-frontend/plugins/legacy-preset-chart-deckgl/"
schedule:
interval: "monthly"
labels:
- npm
- dependabot
open-pull-requests-limit: 5
versioning-strategy: increase
- package-ecosystem: "npm"
directory: "/superset-frontend/plugins/plugin-chart-table/"
schedule:
interval: "monthly"
labels:
- npm
- dependabot
open-pull-requests-limit: 5
versioning-strategy: increase
- package-ecosystem: "npm"
directory: "/superset-frontend/plugins/legacy-plugin-chart-country-map/"
schedule:
interval: "monthly"
labels:
- npm
- dependabot
open-pull-requests-limit: 5
versioning-strategy: increase
- package-ecosystem: "npm"
directory: "/superset-frontend/plugins/legacy-plugin-chart-map-box/"
schedule:
interval: "monthly"
labels:
- npm
- dependabot
open-pull-requests-limit: 5
versioning-strategy: increase
- package-ecosystem: "npm"
directory: "/superset-frontend/plugins/legacy-plugin-chart-sankey/"
schedule:
interval: "monthly"
labels:
- npm
- dependabot
open-pull-requests-limit: 5
versioning-strategy: increase
- package-ecosystem: "npm"
directory: "/superset-frontend/plugins/legacy-preset-chart-nvd3/"
schedule:
interval: "monthly"
labels:
- npm
- dependabot
open-pull-requests-limit: 5
versioning-strategy: increase
- package-ecosystem: "npm"
directory: "/superset-frontend/plugins/plugin-chart-word-cloud/"
schedule:
interval: "monthly"
labels:
- npm
- dependabot
open-pull-requests-limit: 5
versioning-strategy: increase
- package-ecosystem: "npm"
directory: "/superset-frontend/plugins/legacy-plugin-chart-event-flow/"
schedule:
interval: "monthly"
labels:
- npm
- dependabot
open-pull-requests-limit: 5
versioning-strategy: increase
- package-ecosystem: "npm"
directory: "/superset-frontend/plugins/legacy-plugin-chart-paired-t-test/"
schedule:
interval: "monthly"
labels:
- npm
- dependabot
open-pull-requests-limit: 5
versioning-strategy: increase
- package-ecosystem: "npm"
directory: "/superset-frontend/plugins/legacy-plugin-chart-sankey-loop/"
schedule:
interval: "monthly"
labels:
- npm
- dependabot
open-pull-requests-limit: 5
versioning-strategy: increase
- package-ecosystem: "npm"
directory: "/superset-frontend/plugins/plugin-chart-echarts/"
schedule:
interval: "monthly"
labels:
- npm
- dependabot
open-pull-requests-limit: 5
versioning-strategy: increase
- package-ecosystem: "npm"
directory: "/superset-frontend/plugins/preset-chart-xy/"
schedule:
interval: "monthly"
labels:
- npm
- dependabot
open-pull-requests-limit: 5
versioning-strategy: increase
- package-ecosystem: "npm"
directory: "/superset-frontend/plugins/legacy-plugin-chart-heatmap/"
schedule:
interval: "monthly"
labels:
- npm
- dependabot
open-pull-requests-limit: 5
versioning-strategy: increase
- package-ecosystem: "npm"
directory: "/superset-frontend/plugins/legacy-plugin-chart-parallel-coordinates/"
schedule:
interval: "monthly"
labels:
- npm
- dependabot
open-pull-requests-limit: 5
versioning-strategy: increase
- package-ecosystem: "npm"
directory: "/superset-frontend/plugins/legacy-plugin-chart-sunburst/"
schedule:
interval: "monthly"
labels:
- npm
- dependabot
open-pull-requests-limit: 5
versioning-strategy: increase
- package-ecosystem: "npm"
directory: "/superset-frontend/plugins/plugin-chart-handlebars/"
schedule:
interval: "monthly"
labels:
- npm
- dependabot
open-pull-requests-limit: 5
versioning-strategy: increase
- package-ecosystem: "npm"
directory: "/superset-frontend/packages/generator-superset/"
schedule:
interval: "monthly"
labels:
- npm
- dependabot
open-pull-requests-limit: 5
versioning-strategy: increase
- package-ecosystem: "npm"
directory: "/superset-frontend/packages/superset-ui-chart-controls/"
schedule:
interval: "monthly"
labels:
- npm
- dependabot
open-pull-requests-limit: 5
versioning-strategy: increase
- package-ecosystem: "npm"
directory: "/superset-frontend/packages/superset-ui-core/"
ignore:
# not until React >= 18.0.0
- dependency-name: "react-markdown"
- dependency-name: "remark-gfm"
schedule:
interval: "monthly"
labels:
- npm
- dependabot
open-pull-requests-limit: 5
versioning-strategy: increase
- package-ecosystem: "npm"
directory: "/superset-frontend/packages/superset-ui-demo/"
schedule:
interval: "monthly"
labels:
- npm
- dependabot
open-pull-requests-limit: 5
versioning-strategy: increase
- package-ecosystem: "npm"
directory: "/superset-frontend/packages/superset-ui-switchboard/"
schedule:
interval: "monthly"
labels:
- npm
- dependabot
open-pull-requests-limit: 5
versioning-strategy: increase

View File

@@ -1,5 +0,0 @@
# for Issue Label Bot https://github.com/marketplace/issue-label-bot
label-alias:
bug: '#bug'
feature_request: '#enhancement'
question: '#question'

158
.github/labeler.yml vendored
View File

@@ -1,158 +0,0 @@
# TODO (if we can)
# - Label PRs in need of codeowner review
# - viz:charts:xyz labels
# component/design system areas
# - storybook(s)
# - f/e and b/e test changes?
# - product areas (SQL Lab, Explore, Dashboard, etc.)
# - database areas (SQLAlchemy, labelind DBs by driver, etc.)
############################################
# General workflow warnings
# full list of labels is here: https://github.com/apache/superset/labels
############################################
"risk:db-migration":
- changed-files:
- any-glob-to-any-file:
- 'superset/migrations/**'
############################################
# Dependencies
############################################
"dependencies:python":
- changed-files:
- any-glob-to-any-file:
- 'superset/requirements/**'
- 'superset/translations/requirements.txt'
- 'RELEASING/requirements.txt'
"dependencies:npm":
- changed-files:
- any-glob-to-any-file:
- 'superset-frontend/package.json'
- 'superset-frontend/package-lock.json'
- 'superset-embedded-sdk/package.json'
- 'superset-embedded-sdk/package-lock.json'
- 'superset-websocket/package.json'
- 'superset-websocket/package-lock.json'
- 'superset-frontend/cypress-base/package.json'
- 'superset-frontend/cypress-base/package-lock.json'
- 'superset-frontend/packages/**/package.json'
- 'superset-frontend/plugins/**/package.json'
############################################
# Areas of the main codebase
############################################
"doc":
- changed-files:
- any-glob-to-any-file:
- 'docs/**'
"api":
- changed-files:
- any-glob-to-any-file:
- 'superset/**/api.py'
- 'superset/views/core.py'
"i18n":
- changed-files:
- any-glob-to-any-file:
- 'superset/translations/**'
"i18n:brazilian":
- changed-files:
- any-glob-to-any-file:
- 'superset/translations/pt_BR/**'
"i18n:chinese":
- changed-files:
- any-glob-to-any-file:
- 'superset/translations/zh/**'
"i18n:traditional-chinese":
- changed-files:
- any-glob-to-any-file:
- 'superset/translations/zh_TW/**'
"i18n:dutch":
- changed-files:
- any-glob-to-any-file:
- 'superset/translations/nl/**'
"i18n:french":
- changed-files:
- any-glob-to-any-file:
- 'superset/translations/fr/**'
"i18n:italian":
- changed-files:
- any-glob-to-any-file:
- 'superset/translations/it/**'
"i18n:japanese":
- changed-files:
- any-glob-to-any-file:
- 'superset/translations/ja/**'
"i18n:korean":
- changed-files:
- any-glob-to-any-file:
- 'superset/translations/ko/**'
"i18n:portuguese":
- changed-files:
- any-glob-to-any-file:
- 'superset/translations/pt/**'
"i18n:russian":
- changed-files:
- any-glob-to-any-file:
- 'superset/translations/ru/**'
"i18n:slovak":
- changed-files:
- any-glob-to-any-file:
- 'superset/translations/sk/**'
"i18n:ukrainian":
- changed-files:
- any-glob-to-any-file:
- 'superset/translations/uk/**'
"i18n:spanish":
- changed-files:
- any-glob-to-any-file:
- 'superset/translations/es/**'
"i18n:persian":
- changed-files:
- any-glob-to-any-file:
- 'superset/translations/fa/**'
############################################
# Sub-projects and monorepo packages
############################################
"plugins":
- changed-files:
- any-glob-to-any-file:
- 'superset-frontend/plugins/**'
"packages":
- changed-files:
- any-glob-to-any-file:
- 'superset-frontend/packages/**'
"embedded":
- changed-files:
- any-glob-to-any-file:
- 'superset-embedded-sdk/**'
"github_actions":
- changed-files:
- any-glob-to-any-file:
- '.github/actions/**'
- '.github/workflows/**'

27
.github/move.yml vendored
View File

@@ -1,27 +0,0 @@
# Configuration for Move Issues - https://github.com/dessant/move-issues
# Delete the command comment when it contains no other content
deleteCommand: true
# Close the source issue after moving
closeSourceIssue: true
# Lock the source issue after moving
lockSourceIssue: false
# Mention issue and comment authors
mentionAuthors: true
# Preserve mentions in the issue content
keepContentMentions: false
# Move labels that also exist on the target repository
moveLabels: true
# Set custom aliases for targets
# aliases:
# r: repo
# or: owner/repo
# Repository to extend settings from
# _extends: repo

19
.github/stale.yml vendored
View File

@@ -1,19 +0,0 @@
# Number of days of inactivity before an issue becomes stale
daysUntilStale: 60
# Number of days of inactivity before a stale issue is closed
daysUntilClose: 7
# Issues with these labels will never be considered stale
exemptLabels:
- "#SIP"
- ".pinned"
- ".security"
# Label to use when marking an issue as stale
staleLabel: inactive
# Comment to post when marking an issue as stale. Set to `false` to disable
markComment: >
This issue has been automatically marked as stale because it has not had
recent activity. It will be closed if no further activity occurs.
Thank you for your contributions. For admin, please label this issue `.pinned`
to prevent stale bot from closing the issue.
# Comment to post when closing a stale issue. Set to `false` to disable
closeComment: false

View File

@@ -1,227 +0,0 @@
#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -e
GITHUB_WORKSPACE=${GITHUB_WORKSPACE:-.}
ASSETS_MANIFEST="$GITHUB_WORKSPACE/superset/static/assets/manifest.json"
# Rounded job start time, used to create a unique Cypress build id for
# parallelization so we can manually rerun a job after 20 minutes
NONCE=$(echo "$(date "+%Y%m%d%H%M") - ($(date +%M)%20)" | bc)
# Echo only when not in parallel mode
say() {
if [[ $(echo "$INPUT_PARALLEL" | tr '[:lower:]' '[:upper:]') != 'TRUE' ]]; then
echo "$1"
fi
}
pip-upgrade() {
say "::group::Upgrade pip"
pip install --upgrade pip
say "::endgroup::"
}
# prepare (lint and build) frontend code
npm-install() {
cd "$GITHUB_WORKSPACE/superset-frontend"
# cache-restore npm
say "::group::Install npm packages"
echo "npm: $(npm --version)"
echo "node: $(node --version)"
npm ci
say "::endgroup::"
# cache-save npm
}
build-assets() {
cd "$GITHUB_WORKSPACE/superset-frontend"
say "::group::Build static assets"
npm run build
say "::endgroup::"
}
build-instrumented-assets() {
cd "$GITHUB_WORKSPACE/superset-frontend"
say "::group::Build static assets with JS instrumented for test coverage"
cache-restore instrumented-assets
if [[ -f "$ASSETS_MANIFEST" ]]; then
echo 'Skip frontend build because instrumented static assets already exist.'
else
npm run build-instrumented
cache-save instrumented-assets
fi
say "::endgroup::"
}
setup-postgres() {
say "::group::Install dependency for unit tests"
sudo apt-get update && sudo apt-get install --yes libecpg-dev
say "::group::Initialize database"
psql "postgresql://superset:superset@127.0.0.1:15432/superset" <<-EOF
DROP SCHEMA IF EXISTS sqllab_test_db CASCADE;
DROP SCHEMA IF EXISTS admin_database CASCADE;
CREATE SCHEMA sqllab_test_db;
CREATE SCHEMA admin_database;
EOF
say "::endgroup::"
}
setup-mysql() {
say "::group::Initialize database"
mysql -h 127.0.0.1 -P 13306 -u root --password=root <<-EOF
SET GLOBAL transaction_isolation='READ-COMMITTED';
SET GLOBAL TRANSACTION ISOLATION LEVEL READ COMMITTED;
DROP DATABASE IF EXISTS superset;
CREATE DATABASE superset DEFAULT CHARACTER SET utf8 COLLATE utf8_unicode_ci;
DROP DATABASE IF EXISTS sqllab_test_db;
CREATE DATABASE sqllab_test_db DEFAULT CHARACTER SET utf8 COLLATE utf8_unicode_ci;
DROP DATABASE IF EXISTS admin_database;
CREATE DATABASE admin_database DEFAULT CHARACTER SET utf8 COLLATE utf8_unicode_ci;
CREATE USER 'superset'@'%' IDENTIFIED BY 'superset';
GRANT ALL ON *.* TO 'superset'@'%';
FLUSH PRIVILEGES;
EOF
say "::endgroup::"
}
testdata() {
cd "$GITHUB_WORKSPACE"
say "::group::Load test data"
# must specify PYTHONPATH to make `tests.superset_test_config` importable
export PYTHONPATH="$GITHUB_WORKSPACE"
pip install -e .
superset db upgrade
superset load_test_users
superset load_examples --load-test-data
superset init
say "::endgroup::"
}
celery-worker() {
cd "$GITHUB_WORKSPACE"
say "::group::Start Celery worker"
# must specify PYTHONPATH to make `tests.superset_test_config` importable
export PYTHONPATH="$GITHUB_WORKSPACE"
celery \
--app=superset.tasks.celery_app:app \
worker \
--concurrency=2 \
--detach \
--optimization=fair
say "::endgroup::"
}
cypress-install() {
cd "$GITHUB_WORKSPACE/superset-frontend/cypress-base"
cache-restore cypress
say "::group::Install Cypress"
npm ci
say "::endgroup::"
cache-save cypress
}
cypress-run-all() {
local USE_DASHBOARD=$1
local APP_ROOT=$2
cd "$GITHUB_WORKSPACE/superset-frontend/cypress-base"
# Start Flask and run it in background
# --no-debugger means disable the interactive debugger on the 500 page
# so errors can print to stderr.
local flasklog="${HOME}/flask.log"
local port=8081
CYPRESS_BASE_URL="http://localhost:${port}"
if [ -n "$APP_ROOT" ]; then
export SUPERSET_APP_ROOT=$APP_ROOT
CYPRESS_BASE_URL=${CYPRESS_BASE_URL}${APP_ROOT}
fi
export CYPRESS_BASE_URL
nohup flask run --no-debugger -p $port >"$flasklog" 2>&1 </dev/null &
local flaskProcessId=$!
USE_DASHBOARD_FLAG=''
if [ "$USE_DASHBOARD" = "true" ]; then
USE_DASHBOARD_FLAG='--use-dashboard'
fi
# UNCOMMENT the next few commands to monitor memory usage
# monitor_memory & # Start memory monitoring in the background
# memoryMonitorPid=$!
python ../../scripts/cypress_run.py --parallelism $PARALLELISM --parallelism-id $PARALLEL_ID --group $PARALLEL_ID --retries 5 $USE_DASHBOARD_FLAG
# kill $memoryMonitorPid
# After job is done, print out Flask log for debugging
echo "::group::Flask log for default run"
cat "$flasklog"
echo "::endgroup::"
# make sure the program exits
kill $flaskProcessId
}
eyes-storybook-dependencies() {
say "::group::install eyes-storyook dependencies"
sudo apt-get update -y && sudo apt-get -y install gconf-service ca-certificates libxshmfence-dev fonts-liberation libappindicator3-1 libasound2 libatk-bridge2.0-0 libatk1.0-0 libc6 libcairo2 libcups2 libdbus-1-3 libexpat1 libfontconfig1 libgbm1 libgcc1 libgconf-2-4 libglib2.0-0 libgdk-pixbuf2.0-0 libgtk-3-0 libnspr4 libnss3 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 libxcomposite1 libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 libxtst6 lsb-release xdg-utils libappindicator1
say "::endgroup::"
}
monitor_memory() {
# This is a small utility to monitor memory usage. Useful for debugging memory in GHA.
# To use wrap your command as follows
#
# monitor_memory & # Start memory monitoring in the background
# memoryMonitorPid=$!
# YOUR_COMMAND_HERE
# kill $memoryMonitorPid
while true; do
echo "$(date) - Top 5 memory-consuming processes:"
ps -eo pid,comm,%mem --sort=-%mem | head -n 6 # First line is the header, next 5 are top processes
sleep 2
done
}
cypress-run-applitools() {
cd "$GITHUB_WORKSPACE/superset-frontend/cypress-base"
local flasklog="${HOME}/flask.log"
local port=8081
local cypress="./node_modules/.bin/cypress run"
local browser=${CYPRESS_BROWSER:-chrome}
export CYPRESS_BASE_URL="http://localhost:${port}"
nohup flask run --no-debugger -p $port >"$flasklog" 2>&1 </dev/null &
local flaskProcessId=$!
$cypress --spec "cypress/applitools/**/*" --browser "$browser" --headless
say "::group::Flask log for default run"
cat "$flasklog"
say "::endgroup::"
# make sure the program exits
kill $flaskProcessId
}

View File

@@ -1,77 +0,0 @@
name: Bump Python Package
on:
# Can be triggered manually
workflow_dispatch:
inputs:
package:
required: false
description: The python package to bump (all if empty)
group:
required: false
description: The optional dependency group to bump (as defined in pyproject.toml)
limit:
required: true
description: Max number of PRs to open (0 for no limit)
default: 5
extra-flags:
required: false
default: --only-base
description: Additional flags to pass to the bump-python command
#schedule:
# - cron: '0 0 * * *' # Runs daily at midnight UTC
jobs:
bump-python-package:
runs-on: ubuntu-24.04
permissions:
actions: write
contents: write
pull-requests: write
checks: write
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: true
ref: master
- name: Setup supersetbot
uses: ./.github/actions/setup-supersetbot/
- name: Set up Python ${{ inputs.python-version }}
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Install uv
run: pip install uv
- name: supersetbot bump-python -p "${{ github.event.inputs.package }}"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
git config --global user.email "action@github.com"
git config --global user.name "GitHub Action"
PACKAGE_OPT=""
if [ -n "${{ github.event.inputs.package }}" ]; then
PACKAGE_OPT="-p ${{ github.event.inputs.package }}"
fi
GROUP_OPT=""
if [ -n "${{ github.event.inputs.group }}" ]; then
GROUP_OPT="-g ${{ github.event.inputs.group }}"
fi
EXTRA_FLAGS="${{ github.event.inputs.extra-flags }}"
supersetbot bump-python \
--verbose \
--use-current-repo \
--include-subpackages \
--limit ${{ github.event.inputs.limit }} \
$PACKAGE_OPT \
$GROUP_OPT \
$EXTRA_FLAGS

View File

@@ -1,58 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
// always use absolute directory
const workspaceDirectory = process.env.GITHUB_WORKSPACE;
const homeDirectory = process.env.HOME;
const assetsConfig = {
path: [`${workspaceDirectory}/superset/static/assets`],
hashFiles: [
`${workspaceDirectory}/superset-frontend/src/**/*`,
`${workspaceDirectory}/superset-frontend/packages/**/*`,
`${workspaceDirectory}/superset-frontend/plugins/**/*`,
`${workspaceDirectory}/superset-frontend/*.js`,
`${workspaceDirectory}/superset-frontend/*.json`,
],
// dont use restore keys as it may give an invalid older build
restoreKeys: '',
};
// Multi-layer cache definition
module.exports = {
pip: {
path: [`${homeDirectory}/.cache/pip`],
hashFiles: [`${workspaceDirectory}/requirements/*.txt`],
},
npm: {
path: [`${homeDirectory}/.npm`],
hashFiles: [`${workspaceDirectory}/superset-frontend/package-lock.json`],
},
assets: assetsConfig,
// use separate cache for instrumented JS files and regular assets
// one is built with `npm run build`,
// another is built with `npm run build-instrumented`
'instrumented-assets': assetsConfig,
cypress: {
path: [`${homeDirectory}/.cache/Cypress`],
hashFiles: [
`${workspaceDirectory}/superset-frontend/cypress-base/package-lock.json`,
],
},
};

View File

@@ -1,43 +0,0 @@
name: Cancel Duplicates
on:
workflow_run:
workflows:
- "Miscellaneous"
types:
- requested
jobs:
cancel-duplicate-runs:
name: Cancel duplicate workflow runs
runs-on: ubuntu-24.04
permissions:
actions: write
contents: read
steps:
- name: Check number of queued tasks
id: check_queued
env:
GITHUB_TOKEN: ${{ github.token }}
GITHUB_REPO: ${{ github.repository }}
run: |
get_count() {
echo $(curl -s -H "Authorization: token $GITHUB_TOKEN" \
"https://api.github.com/repos/$GITHUB_REPO/actions/runs?status=$1" | \
jq ".total_count")
}
count=$(( `get_count queued` + `get_count in_progress` ))
echo "Found $count unfinished jobs."
echo "count=$count" >> $GITHUB_OUTPUT
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
if: steps.check_queued.outputs.count >= 20
uses: actions/checkout@v4
- name: Cancel duplicate workflow runs
if: steps.check_queued.outputs.count >= 20
env:
GITHUB_TOKEN: ${{ github.token }}
GITHUB_REPOSITORY: ${{ github.repository }}
run: |
pip install click requests typing_extensions python-dateutil
python ./scripts/cancel_github_workflows.py

View File

@@ -1,59 +0,0 @@
name: Check python dependencies
on:
push:
branches:
- "master"
- "[0-9].[0-9]*"
pull_request:
types: [synchronize, opened, reopened, ready_for_review]
# cancel previous workflow jobs for PRs
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
jobs:
check-python-deps:
runs-on: ubuntu-22.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
fetch-depth: 1
- name: Check for file changes
id: check
uses: ./.github/actions/change-detector/
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Setup Python
if: steps.check.outputs.python
uses: ./.github/actions/setup-backend/
- name: Run uv
if: steps.check.outputs.python
run: ./scripts/uv-pip-compile.sh
- name: Check for uncommitted changes
if: steps.check.outputs.python
run: |
echo "Full diff (for logging/debugging):"
git diff
echo "Filtered diff (excluding comments and whitespace):"
filtered_diff=$(git diff -U0 | grep '^[-+]' | grep -vE '^[-+]{3}' | grep -vE '^[-+][[:space:]]*#' | grep -vE '^[-+][[:space:]]*$' || true)
echo "$filtered_diff"
if [[ -n "$filtered_diff" ]]; then
echo
echo "ERROR: The pinned dependencies are not up-to-date."
echo "Please run './scripts/uv-pip-compile.sh' and commit the changes."
echo "More info: https://github.com/apache/superset/tree/master/requirements"
exit 1
else
echo "Pinned dependencies are up-to-date."
fi

View File

@@ -1,75 +0,0 @@
name: Check DB migration conflict
on:
push:
paths:
- "superset/migrations/**"
branches:
- "master"
- "[0-9].[0-9]*"
pull_request:
paths:
- "superset/migrations/**"
types: [synchronize, opened, reopened, ready_for_review]
# cancel previous workflow jobs for PRs
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
jobs:
check_db_migration_conflict:
name: Check DB migration conflict
runs-on: ubuntu-24.04
permissions:
contents: read
pull-requests: write
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
- name: Check and notify
uses: actions/github-script@v7
with:
github-token: ${{ github.token }}
script: |
// API reference: https://octokit.github.io/rest.js
const currentBranch = context.ref.replace('refs/heads/', '');
// Find all pull requests to current branch
const opts = github.rest.pulls.list.endpoint.merge({
owner: context.repo.owner,
repo: context.repo.repo,
base: context.ref,
state: 'open',
sort: 'updated',
per_page: 100,
});
const pulls = await github.paginate(opts);
if (pulls.length > 0) {
console.log(`Found ${pulls.length} open PRs for base branch "${currentBranch}"`)
}
for (const pull of pulls) {
const listFilesOpts = await github.rest.pulls.listFiles.endpoint.merge({
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: pull.number,
});
const files = await github.paginate(listFilesOpts);
if (
files.some(x => x.contents_url.includes('/contents/superset/migrations'))
) {
console.log(`PR #${pull.number} "${pull.title}" also added db migration`)
await github.rest.issues.createComment({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: pull.number,
body:
`# 🙅‍♂️ 🙅‍♂️ 🙅‍♂️ 🙅‍♂️ 🙅‍♂️ 🙅‍♂️ 🙅‍♂️ 🙅‍♂️ 🙅‍♂️ 🙅‍♂️ 🙅‍♂️ 🙅‍♂️ 🙅‍♂️ 🙅‍♂️ 🙅‍♂️ 🙅‍♂️` +
`❗ @${pull.user.login} Your base branch \`${currentBranch}\` has ` +
'also updated `superset/migrations`.\n' +
'\n' +
'**Please consider rebasing your branch and [resolving potential db migration conflicts](https://github.com/apache/superset/blob/master/CONTRIBUTING.md#merging-db-migrations).**',
});
}
}

View File

@@ -1,82 +0,0 @@
name: Claude PR Assistant
on:
issue_comment:
types: [created]
pull_request_review_comment:
types: [created]
jobs:
check-permissions:
if: |
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude'))
runs-on: ubuntu-latest
outputs:
allowed: ${{ steps.check.outputs.allowed }}
steps:
- name: Check if user is allowed
id: check
run: |
# List of allowed users
ALLOWED_USERS="mistercrunch,rusackas"
# Get the commenter's username
COMMENTER="${{ github.event.comment.user.login }}"
echo "Checking permissions for user: $COMMENTER"
# Check if user is in allowed list
if [[ ",$ALLOWED_USERS," == *",$COMMENTER,"* ]]; then
echo "allowed=true" >> $GITHUB_OUTPUT
echo "✅ User $COMMENTER is allowed to use Claude"
else
echo "allowed=false" >> $GITHUB_OUTPUT
echo "❌ User $COMMENTER is not allowed to use Claude"
fi
deny-access:
needs: check-permissions
if: needs.check-permissions.outputs.allowed == 'false'
runs-on: ubuntu-latest
permissions:
issues: write
pull-requests: write
steps:
- name: Comment access denied
uses: actions/github-script@v7
with:
script: |
const message = `👋 Hi @${{ github.event.comment.user.login || github.event.review.user.login || github.event.issue.user.login }}!
Thanks for trying to use Claude Code, but currently only certain team members have access to this feature.
If you believe you should have access, please contact a project maintainer.`;
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
body: message
});
claude-code-action:
needs: check-permissions
if: needs.check-permissions.outputs.allowed == 'true'
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
issues: write
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Run Claude PR Action
uses: anthropics/claude-code-action@beta
with:
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
timeout_minutes: "60"

View File

@@ -1,58 +0,0 @@
name: "CodeQL"
on:
push:
branches: ["master", "[0-9].[0-9]*"]
pull_request:
# The branches below must be a subset of the branches above
branches: ["master"]
schedule:
- cron: "0 4 * * *"
# cancel previous workflow jobs for PRs
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
jobs:
analyze:
name: Analyze
runs-on: ubuntu-24.04
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: ["python", "javascript"]
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Check for file changes
id: check
uses: ./.github/actions/change-detector/
with:
token: ${{ secrets.GITHUB_TOKEN }}
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
- name: Perform CodeQL Analysis
if: steps.check.outputs.python || steps.check.outputs.frontend
uses: github/codeql-action/analyze@v3
with:
category: "/language:${{matrix.language}}"

View File

@@ -1,71 +0,0 @@
# Dependency Review Action
#
# This Action will scan dependency manifest files that change as part of a Pull Request, surfacing known-vulnerable versions of the packages declared or updated in the PR. Once installed, if the workflow run is marked as required, PRs introducing known-vulnerable packages will be blocked from merging.
#
# Source repository: https://github.com/actions/dependency-review-action
# Public documentation: https://docs.github.com/en/code-security/supply-chain-security/understanding-your-software-supply-chain/about-dependency-review#dependency-review-enforcement
name: "Dependency Review"
on:
push:
branches:
- "master"
- "[0-9].[0-9]*"
pull_request:
types: [synchronize, opened, reopened, ready_for_review]
# cancel previous workflow jobs for PRs
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
permissions:
contents: read
jobs:
dependency-review:
if: github.event_name == 'pull_request'
runs-on: ubuntu-24.04
steps:
- name: "Checkout Repository"
uses: actions/checkout@v4
- name: "Dependency Review"
uses: actions/dependency-review-action@v4
continue-on-error: true
with:
fail-on-severity: critical
# compatible/incompatible licenses addressed here: https://www.apache.org/legal/resolved.html
# find SPDX identifiers here: https://spdx.org/licenses/
deny-licenses: MS-LPL, BUSL-1.1, QPL-1.0, Sleepycat, SSPL-1.0, CPOL-1.02, AGPL-3.0, GPL-1.0+, BSD-4-Clause-UC, NPL-1.0, NPL-1.1, JSON
# pkg:npm/store2@2.14.2
# adding an exception for an ambigious license on store2, which has been resolved in
# the latest version. It's MIT: https://github.com/nbubna/store/blob/master/LICENSE-MIT
# pkg:npm/applitools/*
# adding exception for all applitools modules (eyes-cypress and its dependencies),
# which has an explicit OSS license approved by ASF
# license: https://applitools.com/legal/open-source-terms-of-use/
# pkg:npm/node-forge@1.3.1
# selecting BSD-3-Clause licensing terms for node-forge to ensure compatibility with Apache
allow-dependencies-licenses: pkg:npm/store2@2.14.2, pkg:npm/applitools/core, pkg:npm/applitools/core-base, pkg:npm/applitools/css-tree, pkg:npm/applitools/ec-client, pkg:npm/applitools/eg-socks5-proxy-server, pkg:npm/applitools/eyes, pkg:npm/applitools/eyes-cypress, pkg:npm/applitools/nml-client, pkg:npm/applitools/tunnel-client, pkg:npm/applitools/utils, pkg:npm/node-forge@1.3.1, pkg:npm/rgbcolor, pkg:npm/jszip@3.10.1
python-dependency-liccheck:
# NOTE: Configuration for liccheck lives in our pyproject.yml.
# You cannot use a liccheck.ini file in this workflow.
runs-on: ubuntu-22.04
steps:
- name: "Checkout Repository"
uses: actions/checkout@v4
- name: Setup Python
uses: ./.github/actions/setup-backend/
with:
requirements-type: base
- name: "Set up liccheck"
run: |
uv pip install --system liccheck
- name: "Run liccheck"
run: |
# run the checks
liccheck -R output.txt
# Print the report
cat output.txt

View File

@@ -1,140 +0,0 @@
name: Build & publish docker images
on:
push:
branches:
- "master"
- "[0-9].[0-9]*"
pull_request:
branches:
- "master"
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
jobs:
setup_matrix:
runs-on: ubuntu-24.04
outputs:
matrix_config: ${{ steps.set_matrix.outputs.matrix_config }}
steps:
- id: set_matrix
run: |
MATRIX_CONFIG=$(if [ "${{ github.event_name }}" == "pull_request" ]; then echo '["dev", "lean"]'; else echo '["dev", "lean", "py310", "websocket", "dockerize", "py311"]'; fi)
echo "matrix_config=${MATRIX_CONFIG}" >> $GITHUB_OUTPUT
echo $GITHUB_OUTPUT
docker-build:
name: docker-build
needs: setup_matrix
runs-on: ubuntu-24.04
strategy:
matrix:
build_preset: ${{fromJson(needs.setup_matrix.outputs.matrix_config)}}
fail-fast: false
env:
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
IMAGE_TAG: apache/superset:GHA-${{ matrix.build_preset }}-${{ github.run_id }}
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Check for file changes
id: check
uses: ./.github/actions/change-detector/
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Setup Docker Environment
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
uses: ./.github/actions/setup-docker
with:
dockerhub-user: ${{ secrets.DOCKERHUB_USER }}
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
build: "true"
- name: Setup supersetbot
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
uses: ./.github/actions/setup-supersetbot/
- name: Build Docker Image
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
shell: bash
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
# Single platform builds in pull_request context to speed things up
if [ "${{ github.event_name }}" = "push" ]; then
PLATFORM_ARG="--platform linux/arm64 --platform linux/amd64"
# can only --load images in single-platform builds
PUSH_OR_LOAD="--push"
elif [ "${{ github.event_name }}" = "pull_request" ]; then
PLATFORM_ARG="--platform linux/amd64"
PUSH_OR_LOAD="--load"
fi
supersetbot docker \
$PUSH_OR_LOAD \
--preset ${{ matrix.build_preset }} \
--context "$EVENT" \
--context-ref "$RELEASE" $FORCE_LATEST \
--extra-flags "--build-arg INCLUDE_CHROMIUM=false --tag $IMAGE_TAG" \
$PLATFORM_ARG
# in the context of push (using multi-platform build), we need to pull the image locally
- name: Docker pull
if: github.event_name == 'push' && (steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker)
run: docker pull $IMAGE_TAG
- name: Print docker stats
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
run: |
echo "SHA: ${{ github.sha }}"
echo "IMAGE: $IMAGE_TAG"
docker images $IMAGE_TAG
docker history $IMAGE_TAG
- name: docker-compose sanity check
if: (steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker) && (matrix.build_preset == 'dev' || matrix.build_preset == 'lean')
shell: bash
run: |
export SUPERSET_BUILD_TARGET=${{ matrix.build_preset }}
# This should reuse the CACHED image built in the previous steps
docker compose build superset-init --build-arg DEV_MODE=false --build-arg INCLUDE_CHROMIUM=false
docker compose up superset-init --exit-code-from superset-init
docker-compose-image-tag:
# Run this job only on pushes to master (not for PRs)
# goal is to check that building the latest image works, not required for all PR pushes
if: github.event_name == 'push' && github.ref == 'refs/heads/master'
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Check for file changes
id: check
uses: ./.github/actions/change-detector/
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Setup Docker Environment
if: steps.check.outputs.docker
uses: ./.github/actions/setup-docker
with:
dockerhub-user: ${{ secrets.DOCKERHUB_USER }}
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
build: "false"
install-docker-compose: "true"
- name: docker-compose sanity check
if: steps.check.outputs.docker
shell: bash
run: |
docker compose -f docker-compose-image-tag.yml up superset-init --exit-code-from superset-init

View File

@@ -1,59 +0,0 @@
{
"containerDefinitions": [
{
"name": "superset-ci",
"image": "apache/superset:latest",
"cpu": 0,
"links": [],
"portMappings": [
{
"containerPort": 8080,
"hostPort": 8080,
"protocol": "tcp"
}
],
"essential": true,
"entryPoint": [],
"command": [],
"environment": [
{
"name": "SUPERSET_LOAD_EXAMPLES",
"value": "yes"
},
{
"name": "SUPERSET_PORT",
"value": "8080"
},
{
"name": "SUPERSET_SECRET_KEY",
"value": "super-secret-for-ephemerals"
},
{
"name": "TALISMAN_ENABLED",
"value": "False"
}
],
"mountPoints": [],
"volumesFrom": [],
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-group": "/ecs/superset-ci",
"awslogs-region": "us-west-2",
"awslogs-stream-prefix": "ecs"
}
}
}
],
"family": "superset-ci",
"taskRoleArn": "ecsTaskExecutionRole",
"executionRoleArn": "ecsTaskExecutionRole",
"networkMode": "awsvpc",
"volumes": [],
"placementConstraints": [],
"requiresCompatibilities": [
"FARGATE"
],
"cpu": "512",
"memory": "1024"
}

View File

@@ -1,39 +0,0 @@
name: Embedded SDK Release
on:
push:
branches:
- "master"
- "[0-9].[0-9]*"
jobs:
config:
runs-on: ubuntu-24.04
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
- name: "Check for secrets"
id: check
shell: bash
run: |
if [ -n "${{ (secrets.NPM_TOKEN != '') || '' }}" ]; then
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
fi
build:
needs: config
if: needs.config.outputs.has-secrets
runs-on: ubuntu-24.04
defaults:
run:
working-directory: superset-embedded-sdk
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version-file: './superset-embedded-sdk/.nvmrc'
registry-url: 'https://registry.npmjs.org'
- run: npm ci
- run: npm run ci:release
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}

View File

@@ -1,28 +0,0 @@
name: Embedded SDK PR Checks
on:
pull_request:
paths:
- "superset-embedded-sdk/**"
types: [synchronize, opened, reopened, ready_for_review]
# cancel previous workflow jobs for PRs
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
jobs:
embedded-sdk-test:
runs-on: ubuntu-24.04
defaults:
run:
working-directory: superset-embedded-sdk
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version-file: './superset-embedded-sdk/.nvmrc'
registry-url: 'https://registry.npmjs.org'
- run: npm ci
- run: npm test
- run: npm run build

View File

@@ -1,75 +0,0 @@
name: Cleanup ephemeral envs (PR close)
on:
pull_request_target:
types: [closed]
jobs:
config:
runs-on: ubuntu-24.04
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
- name: "Check for secrets"
id: check
shell: bash
run: |
if [ -n "${{ (secrets.AWS_ACCESS_KEY_ID != '' && secrets.AWS_SECRET_ACCESS_KEY != '') || '' }}" ]; then
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
fi
ephemeral-env-cleanup:
needs: config
if: needs.config.outputs.has-secrets
name: Cleanup ephemeral envs
runs-on: ubuntu-24.04
permissions:
pull-requests: write
steps:
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-west-2
- name: Describe ECS service
id: describe-services
run: |
echo "active=$(aws ecs describe-services --cluster superset-ci --services pr-${{ github.event.number }}-service | jq '.services[] | select(.status == "ACTIVE") | any')" >> $GITHUB_OUTPUT
- name: Delete ECS service
if: steps.describe-services.outputs.active == 'true'
id: delete-service
run: |
aws ecs delete-service \
--cluster superset-ci \
--service pr-${{ github.event.number }}-service \
--force
- name: Login to Amazon ECR
if: steps.describe-services.outputs.active == 'true'
id: login-ecr
uses: aws-actions/amazon-ecr-login@v2
- name: Delete ECR image tag
if: steps.describe-services.outputs.active == 'true'
id: delete-image-tag
run: |
aws ecr batch-delete-image \
--registry-id $(echo "${{ steps.login-ecr.outputs.registry }}" | grep -Eo "^[0-9]+") \
--repository-name superset-ci \
--image-ids imageTag=pr-${{ github.event.number }}
- name: Comment (success)
if: steps.describe-services.outputs.active == 'true'
uses: actions/github-script@v7
with:
github-token: ${{github.token}}
script: |
github.rest.issues.createComment({
issue_number: ${{ github.event.number }},
owner: context.repo.owner,
repo: context.repo.repo,
body: 'Ephemeral environment shutdown and build artifacts deleted.'
})

View File

@@ -1,333 +0,0 @@
name: Ephemeral env workflow
# Example manual trigger:
# gh workflow run ephemeral-env.yml --ref fix_ephemerals --field label_name="testenv-up" --field issue_number=666
on:
pull_request_target:
types:
- labeled
workflow_dispatch:
inputs:
label_name:
description: 'Label name to simulate label-based /testenv trigger'
required: true
default: 'testenv-up'
issue_number:
description: 'Issue or PR number'
required: true
jobs:
ephemeral-env-label:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}-label
cancel-in-progress: true
name: Evaluate ephemeral env label trigger
runs-on: ubuntu-24.04
permissions:
pull-requests: write
outputs:
slash-command: ${{ steps.eval-label.outputs.result }}
feature-flags: ${{ steps.eval-feature-flags.outputs.result }}
sha: ${{ steps.get-sha.outputs.sha }}
env:
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
steps:
- name: Check for the "testenv-up" label
id: eval-label
run: |
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
LABEL_NAME="${{ github.event.inputs.label_name }}"
else
LABEL_NAME="${{ github.event.label.name }}"
fi
echo "Evaluating label: $LABEL_NAME"
if [[ "$LABEL_NAME" == "testenv-up" ]]; then
echo "result=up" >> $GITHUB_OUTPUT
else
echo "result=noop" >> $GITHUB_OUTPUT
fi
- name: Get event SHA
id: get-sha
if: steps.eval-label.outputs.result == 'up'
uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
let prSha;
// If event is workflow_dispatch, use the issue_number from inputs
if (context.eventName === "workflow_dispatch") {
const prNumber = "${{ github.event.inputs.issue_number }}";
if (!prNumber) {
console.log("No PR number found.");
return;
}
// Fetch PR details using the provided issue_number
const { data: pr } = await github.rest.pulls.get({
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: prNumber
});
prSha = pr.head.sha;
} else {
// If it's not workflow_dispatch, use the PR head sha from the event
prSha = context.payload.pull_request.head.sha;
}
console.log(`PR SHA: ${prSha}`);
core.setOutput("sha", prSha);
- name: Looking for feature flags in PR description
uses: actions/github-script@v7
id: eval-feature-flags
if: steps.eval-label.outputs.result == 'up'
with:
script: |
const description = context.payload.pull_request
? context.payload.pull_request.body || ''
: context.payload.inputs.pr_description || '';
const pattern = /FEATURE_(\w+)=(\w+)/g;
let results = [];
[...description.matchAll(pattern)].forEach(match => {
const config = {
name: `SUPERSET_FEATURE_${match[1]}`,
value: match[2],
};
results.push(config);
});
return results;
- name: Reply with confirmation comment
uses: actions/github-script@v7
if: steps.eval-label.outputs.result == 'up'
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const action = '${{ steps.eval-label.outputs.result }}';
const user = context.actor;
const runId = context.runId;
const workflowUrl = `${context.serverUrl}/${context.repo.owner}/${context.repo.repo}/actions/runs/${runId}`;
const issueNumber = context.payload.pull_request
? context.payload.pull_request.number
: context.payload.inputs.issue_number;
if (!issueNumber) {
throw new Error("Issue number is not available.");
}
const body = `@${user} Processing your ephemeral environment request [here](${workflowUrl}).` +
` Action: **${action}**.` +
` More information on [how to use or configure ephemeral environments]` +
`(https://superset.apache.org/docs/contributing/howtos/#github-ephemeral-environments)`;
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: issueNumber,
body,
});
ephemeral-docker-build:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}-build
cancel-in-progress: true
needs: ephemeral-env-label
if: needs.ephemeral-env-label.outputs.slash-command == 'up'
name: ephemeral-docker-build
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ needs.ephemeral-env-label.outputs.sha }} : ${{steps.get-sha.outputs.sha}} )"
uses: actions/checkout@v4
with:
ref: ${{ needs.ephemeral-env-label.outputs.sha }}
persist-credentials: false
- name: Setup Docker Environment
uses: ./.github/actions/setup-docker
with:
dockerhub-user: ${{ secrets.DOCKERHUB_USER }}
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
build: "true"
install-docker-compose: "false"
- name: Setup supersetbot
uses: ./.github/actions/setup-supersetbot/
- name: Build ephemeral env image
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
supersetbot docker \
--push \
--load \
--preset ci \
--platform linux/amd64 \
--context-ref "$RELEASE" \
--extra-flags "--build-arg INCLUDE_CHROMIUM=false"
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-west-2
- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v2
- name: Load, tag and push image to ECR
id: push-image
env:
ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }}
ECR_REPOSITORY: superset-ci
IMAGE_TAG: apache/superset:${{ needs.ephemeral-env-label.outputs.sha }}-ci
PR_NUMBER: ${{ github.event.inputs.issue_number || github.event.pull_request.number }}
run: |
docker tag $IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:pr-$PR_NUMBER-ci
docker push -a $ECR_REGISTRY/$ECR_REPOSITORY
ephemeral-env-up:
needs: [ephemeral-env-label, ephemeral-docker-build]
if: needs.ephemeral-env-label.outputs.slash-command == 'up'
name: Spin up an ephemeral environment
runs-on: ubuntu-24.04
permissions:
contents: read
pull-requests: write
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-west-2
- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v2
- name: Check target image exists in ECR
id: check-image
continue-on-error: true
env:
PR_NUMBER: ${{ github.event.inputs.issue_number || github.event.pull_request.number }}
run: |
aws ecr describe-images \
--registry-id $(echo "${{ steps.login-ecr.outputs.registry }}" | grep -Eo "^[0-9]+") \
--repository-name superset-ci \
--image-ids imageTag=pr-$PR_NUMBER-ci
- name: Fail on missing container image
if: steps.check-image.outcome == 'failure'
uses: actions/github-script@v7
with:
github-token: ${{ github.token }}
script: |
const errMsg = '@${{ github.event.comment.user.login }} Container image not yet published for this PR. Please try again when build is complete.';
github.rest.issues.createComment({
issue_number: ${{ github.event.inputs.issue_number || github.event.pull_request.number }},
owner: context.repo.owner,
repo: context.repo.repo,
body: errMsg
});
core.setFailed(errMsg);
- name: Fill in the new image ID in the Amazon ECS task definition
id: task-def
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: .github/workflows/ecs-task-definition.json
container-name: superset-ci
image: ${{ steps.login-ecr.outputs.registry }}/superset-ci:pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-ci
- name: Update env vars in the Amazon ECS task definition
run: |
cat <<< "$(jq '.containerDefinitions[0].environment += ${{ needs.ephemeral-env-label.outputs.feature-flags }}' < ${{ steps.task-def.outputs.task-definition }})" > ${{ steps.task-def.outputs.task-definition }}
- name: Describe ECS service
id: describe-services
run: |
echo "active=$(aws ecs describe-services --cluster superset-ci --services pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-service | jq '.services[] | select(.status == "ACTIVE") | any')" >> $GITHUB_OUTPUT
- name: Create ECS service
id: create-service
if: steps.describe-services.outputs.active != 'true'
env:
ECR_SUBNETS: subnet-0e15a5034b4121710,subnet-0e8efef4a72224974
ECR_SECURITY_GROUP: sg-092ff3a6ae0574d91
PR_NUMBER: ${{ github.event.inputs.issue_number || github.event.pull_request.number }}
run: |
aws ecs create-service \
--cluster superset-ci \
--service-name pr-$PR_NUMBER-service \
--task-definition superset-ci \
--launch-type FARGATE \
--desired-count 1 \
--platform-version LATEST \
--network-configuration "awsvpcConfiguration={subnets=[$ECR_SUBNETS],securityGroups=[$ECR_SECURITY_GROUP],assignPublicIp=ENABLED}" \
--tags key=pr,value=$PR_NUMBER key=github_user,value=${{ github.actor }}
- name: Deploy Amazon ECS task definition
id: deploy-task
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
with:
task-definition: ${{ steps.task-def.outputs.task-definition }}
service: pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-service
cluster: superset-ci
wait-for-service-stability: true
wait-for-minutes: 10
- name: List tasks
id: list-tasks
run: |
echo "task=$(aws ecs list-tasks --cluster superset-ci --service-name pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-service | jq '.taskArns | first')" >> $GITHUB_OUTPUT
- name: Get network interface
id: get-eni
run: |
echo "eni=$(aws ecs describe-tasks --cluster superset-ci --tasks ${{ steps.list-tasks.outputs.task }} | jq '.tasks[0].attachments[0].details | map(select(.name=="networkInterfaceId"))[0].value')" >> $GITHUB_OUTPUT
- name: Get public IP
id: get-ip
run: |
echo "ip=$(aws ec2 describe-network-interfaces --network-interface-ids ${{ steps.get-eni.outputs.eni }} | jq -r '.NetworkInterfaces | first | .Association.PublicIp')" >> $GITHUB_OUTPUT
- name: Comment (success)
if: ${{ success() }}
uses: actions/github-script@v7
with:
github-token: ${{github.token}}
script: |
const issue_number = context.payload.inputs?.issue_number || context.issue.number;
github.rest.issues.createComment({
issue_number: issue_number,
owner: context.repo.owner,
repo: context.repo.repo,
body: `@${{ github.actor }} Ephemeral environment spinning up at http://${{ steps.get-ip.outputs.ip }}:8080. Credentials are 'admin'/'admin'. Please allow several minutes for bootstrapping and startup.`
});
- name: Comment (failure)
if: ${{ failure() }}
uses: actions/github-script@v7
with:
github-token: ${{github.token}}
script: |
const issue_number = context.payload.inputs?.issue_number || context.issue.number;
github.rest.issues.createComment({
issue_number: issue_number,
owner: context.repo.owner,
repo: context.repo.repo,
body: '@${{ github.event.inputs.user_login || github.event.comment.user.login }} Ephemeral environment creation failed. Please check the Actions logs for details.'
})

View File

@@ -1,65 +0,0 @@
name: Generate FOSSA report
on:
push:
branches:
- "master"
- "[0-9].[0-9]*"
jobs:
config:
runs-on: ubuntu-24.04
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
- name: "Check for secrets"
id: check
shell: bash
run: |
if [ -n "${{ (secrets.FOSSA_API_KEY != '' ) || '' }}" ]; then
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
fi
license_check:
needs: config
if: needs.config.outputs.has-secrets
name: Generate Report
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Setup Java
uses: actions/setup-java@v4
with:
distribution: "temurin"
java-version: "11"
- name: Generate fossa report
env:
FOSSA_API_KEY: ${{ secrets.FOSSA_API_KEY }}
run: |
set -eo pipefail
if [[ "${{github.event_name}}" != "pull_request" ]]; then
./scripts/fossa.sh
exit 0
fi
URL="https://api.github.com/repos/${{ github.repository }}/pulls/${{ github.event.pull_request.number }}/files"
FILES=$(curl -s -X GET -G $URL | jq -r '.[] | .filename')
cat<<EOF
CHANGED FILES:
$FILES
EOF
if [[ "${FILES}" =~ (.*package*\.json|requirements\/[a-z_-]+\.txt|setup\.py) ]]; then
echo "Detected dependency changes... running fossa check"
./scripts/fossa.sh
else
echo "No dependency changes... skiping fossa check"
fi
shell: bash

View File

@@ -1,28 +0,0 @@
#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Inspired from https://github.com/mpalmer/action-validator?tab=readme-ov-file#pre-commit-hook-example
echo "Running pre-commit hook for GitHub Actions: https://github.com/mpalmer/action-validator"
for action in $(git ls-files .github/ | grep -E '^\.github/(workflows|actions)/.*\.ya?ml$'); do
if action-validator "$action"; then
echo "$action"
else
echo "$action"
exit 1
fi
done

View File

@@ -1,28 +0,0 @@
name: Validate All GitHub Actions
on:
push:
branches:
- "master"
- "[0-9].[0-9]*"
pull_request:
types: [synchronize, opened, reopened, ready_for_review]
jobs:
validate-all-ghas:
runs-on: ubuntu-24.04
steps:
- name: Checkout Repository
uses: actions/checkout@v4
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: '20'
- name: Install Dependencies
run: npm install -g @action-validator/core @action-validator/cli --save-dev
- name: Run Script
run: bash .github/workflows/github-action-validator.sh

View File

@@ -1,34 +0,0 @@
name: supersetbot orglabel based on author
on:
issues:
types: [created, edited]
pull_request:
types: [created, edited]
jobs:
superbot-orglabel:
runs-on: ubuntu-24.04
permissions:
contents: read
pull-requests: write
issues: write
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup supersetbot
uses: ./.github/actions/setup-supersetbot/
- name: Execute supersetbot orglabel command
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
# Label the issue with the appropriate org using supersetbot
# - this requires for the author to be publicly associated with their org
# - and for the org to be listed in `supersetbot/src/metadata.js`
supersetbot orglabel --issue ${{ github.event.number }} --repo ${{ github.repository }} || true

View File

@@ -1,21 +0,0 @@
name: "Pull Request Labeler"
on:
- pull_request_target
jobs:
labeler:
permissions:
contents: read
pull-requests: write
runs-on: ubuntu-24.04
steps:
- uses: actions/labeler@v5
with:
sync-labels: true
# TODO: run scripts based on labels!
# - id: run-translation-scripts
# if: contains(steps.label-the-PR.outputs.all-labels, 'i18n')
# run: |
# echo "Running translation scripts"
# # Generate .pot -> .po -> .json files

View File

@@ -1,37 +0,0 @@
name: Tags
on:
release:
types: [published] # This makes it run only when a new released is published
jobs:
latest-release:
name: Add/update tag to new release
runs-on: ubuntu-24.04
permissions:
contents: write
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Check for latest tag
id: latest-tag
run: |
source ./scripts/tag_latest_release.sh $(echo ${{ github.event.release.tag_name }}) --dry-run
- name: Configure Git
run: |
git config user.name "$GITHUB_ACTOR"
git config user.email "$GITHUB_ACTOR@users.noreply.github.com"
- name: Run latest-tag
uses: ./.github/actions/latest-tag
if: (! ${{ steps.latest-tag.outputs.SKIP_TAG }} )
with:
description: Superset latest release
tag-name: latest
env:
GITHUB_TOKEN: ${{ github.token }}

View File

@@ -1,28 +0,0 @@
name: License Template Check
on:
pull_request:
types: [synchronize, opened, reopened, ready_for_review]
# cancel previous workflow jobs for PRs
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
jobs:
license_check:
name: License Check
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Setup Java
uses: actions/setup-java@v4
with:
distribution: 'temurin'
java-version: '11'
- name: Run license check
run: ./scripts/check_license.sh

View File

@@ -1,25 +0,0 @@
name: Hold Label Check
on:
pull_request:
types: [labeled, unlabeled, opened, reopened, synchronize]
# cancel previous workflow jobs for PRs
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
jobs:
check-hold-label:
runs-on: ubuntu-24.04
steps:
- name: Check for 'hold' label
uses: actions/github-script@v7
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
const payload = context.payload.pull_request
const holdLabelPresent = !!payload.labels.find(label => label.name.includes('hold'))
if (holdLabelPresent) {
core.setFailed('Hold label is present, merge is blocked.')
}

View File

@@ -1,31 +0,0 @@
name: PR Lint
on:
pull_request:
# By default, a workflow only runs when a pull_request's activity type is opened, synchronize, or reopened. We
# explicity override here so that PR titles are re-linted when the PR text content is edited.
#
# Possible values: https://help.github.com/en/actions/reference/events-that-trigger-workflows#pull-request-event-pull_request
types: [opened, edited, reopened, synchronize]
jobs:
lint-check:
runs-on: ubuntu-24.04
permissions:
contents: read
pull-requests: write
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- uses: ./.github/actions/pr-lint-action
with:
title-regex: "^(build|chore|ci|docs|feat|fix|perf|refactor|style|test|other)(\\(.+\\))?(\\!)?:\\s.+"
on-failed-regex-fail-action: true
on-failed-regex-request-changes: false
on-failed-regex-create-review: false
on-failed-regex-comment:
"Please format your PR title to match: `%regex%`!"
repo-token: "${{ github.token }}"

View File

@@ -1,85 +0,0 @@
name: pre-commit checks
on:
push:
branches:
- "master"
- "[0-9].[0-9]*"
pull_request:
types: [synchronize, opened, reopened, ready_for_review]
# cancel previous workflow jobs for PRs
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
jobs:
pre-commit:
runs-on: ubuntu-24.04
strategy:
matrix:
python-version: ["current", "previous", "next"]
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Setup Python
uses: ./.github/actions/setup-backend/
with:
python-version: ${{ matrix.python-version }}
- name: Enable brew and helm-docs
# Add brew to the path - see https://github.com/actions/runner-images/issues/6283
run: |
echo "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin" >> $GITHUB_PATH
eval "$(/home/linuxbrew/.linuxbrew/bin/brew shellenv)"
echo "HOMEBREW_PREFIX=$HOMEBREW_PREFIX" >>"${GITHUB_ENV}"
echo "HOMEBREW_CELLAR=$HOMEBREW_CELLAR" >>"${GITHUB_ENV}"
echo "HOMEBREW_REPOSITORY=$HOMEBREW_REPOSITORY" >>"${GITHUB_ENV}"
brew install norwoodj/tap/helm-docs
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '20'
- name: Install Frontend Dependencies
run: |
cd superset-frontend
npm ci
- name: Install Docs Dependencies
run: |
cd docs
yarn install --immutable
- name: Cache pre-commit environments
uses: actions/cache@v4
with:
path: ~/.cache/pre-commit
key: pre-commit-v2-${{ runner.os }}-py${{ matrix.python-version }}-${{ hashFiles('.pre-commit-config.yaml') }}
restore-keys: |
pre-commit-v2-${{ runner.os }}-py${{ matrix.python-version }}-
- name: pre-commit
run: |
set +e # Don't exit immediately on failure
export SKIP=eslint-frontend,type-checking-frontend
pre-commit run --all-files
PRE_COMMIT_EXIT_CODE=$?
git diff --quiet --exit-code
GIT_DIFF_EXIT_CODE=$?
if [ "${PRE_COMMIT_EXIT_CODE}" -ne 0 ] || [ "${GIT_DIFF_EXIT_CODE}" -ne 0 ]; then
if [ "${PRE_COMMIT_EXIT_CODE}" -ne 0 ]; then
echo "❌ Pre-commit check failed (exit code: ${EXIT_CODE})."
else
echo "❌ Git working directory is dirty."
echo "📌 This likely means that pre-commit made changes that were not committed."
echo "🔍 Modified files:"
git diff --name-only
fi
echo "🚒 To prevent/address this CI issue, please install/use pre-commit locally."
echo "📖 More details here: https://superset.apache.org/docs/contributing/development#git-hooks"
exit 1
fi

View File

@@ -1,70 +0,0 @@
name: Prefer TypeScript
on:
push:
branches:
- "master"
- "[0-9].[0-9]*"
paths:
- "superset-frontend/src/**"
pull_request:
types: [synchronize, opened, reopened, ready_for_review]
paths:
- "superset-frontend/src/**"
# cancel previous workflow jobs for PRs
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
jobs:
prefer_typescript:
if: github.ref == 'ref/heads/master' && github.event_name == 'pull_request'
name: Prefer TypeScript
runs-on: ubuntu-24.04
permissions:
contents: read
pull-requests: write
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Get changed files
id: changed
uses: ./.github/actions/file-changes-action
with:
githubToken: ${{ github.token }}
- name: Determine if a .js or .jsx file was added
id: check
run: |
js_files_added() {
jq -r '
map(
select(
endswith(".js") or endswith(".jsx")
)
) | join("\n")
' ${HOME}/files_added.json
}
echo "js_files_added=$(js_files_added)" >> $GITHUB_OUTPUT
- if: steps.check.outputs.js_files_added
name: Add Comment to PR
uses: ./.github/actions/comment-on-pr
continue-on-error: true
env:
GITHUB_TOKEN: ${{ github.token }}
with:
msg: |
### WARNING: Prefer TypeScript
Looks like your PR contains new `.js` or `.jsx` files:
```
${{steps.check.outputs.js_files_added}}
```
As decided in [SIP-36](https://github.com/apache/superset/issues/9101), all new frontend code should be written in TypeScript. Please convert above files to TypeScript then re-request review.

View File

@@ -1,108 +0,0 @@
name: release-workflow
on:
push:
branches:
- "master"
- "[0-9].[0-9]*"
jobs:
config:
runs-on: ubuntu-24.04
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
- name: "Check for secrets"
id: check
shell: bash
run: |
if [ -n "${{ (secrets.NPM_TOKEN != '' && secrets.GH_PERSONAL_ACCESS_TOKEN != '') || '' }}" ]; then
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
fi
build:
needs: config
if: needs.config.outputs.has-secrets
name: Bump version and publish package(s)
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v4
with:
# pulls all commits (needed for lerna / semantic release to correctly version)
fetch-depth: 0
- name: Get tags and filter trigger tags
run: |
if ! git fetch --depth=1 origin "+refs/tags/*:refs/tags/*"; then
echo "::notice title=Workflow skipped::No tags present in repository"
exit
fi
echo "HAS_TAGS=1" >> $GITHUB_ENV"
git fetch --prune --unshallow
git tag -d `git tag | grep -E '^trigger-'`
- name: Install Node.js
if: env.HAS_TAGS
uses: actions/setup-node@v4
with:
node-version-file: './superset-frontend/.nvmrc'
- name: Cache npm
if: env.HAS_TAGS
uses: actions/cache@v4
with:
path: ~/.npm # npm cache files are stored in `~/.npm` on Linux/macOS
key: ${{ runner.OS }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.OS }}-node-
${{ runner.OS }}-
- name: Get npm cache directory path
if: env.HAS_TAGS
id: npm-cache-dir-path
run: echo "dir=$(npm config get cache)" >> $GITHUB_OUTPUT
- name: Cache npm
if: env.HAS_TAGS
uses: actions/cache@v4
id: npm-cache # use this to check for `cache-hit` (`steps.npm-cache.outputs.cache-hit != 'true'`)
with:
path: ${{ steps.npm-cache-dir-path.outputs.dir }}
key: ${{ runner.os }}-npm-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-npm-
- name: Install dependencies
if: env.HAS_TAGS
working-directory: ./superset-frontend
run: npm ci
- name: Run unit tests
if: env.HAS_TAGS
working-directory: ./superset-frontend
run: npm run test -- plugins packages
- name: Build packages
if: env.HAS_TAGS
working-directory: ./superset-frontend
run: npm run plugins:build
- name: Configure npm and git
if: env.HAS_TAGS
run: |
echo "@superset-ui:registry=https://registry.npmjs.org/" > .npmrc
echo "registry=https://registry.npmjs.org/" >> .npmrc
echo "//registry.npmjs.org/:_authToken=\${NPM_TOKEN}" >> $HOME/.npmrc 2> /dev/null
npm whoami
git config --local user.email "action@github.com"
git config --local user.name "GitHub Action"
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
GITHUB_TOKEN: ${{ github.token }}
- name: Bump version and publish package(s)
if: env.HAS_TAGS
working-directory: ./superset-frontend
run: |
git tag -d `git tag | grep -E '^trigger-'`
npm run plugins:release-from-tag
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
GITHUB_TOKEN: ${{ github.token }}
GH_TOKEN: ${{ secrets.GH_PERSONAL_ACCESS_TOKEN }}

View File

@@ -1,91 +0,0 @@
name: Applitools Cypress
on:
schedule:
- cron: "0 1 * * *"
jobs:
config:
runs-on: ubuntu-24.04
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
- name: "Check for secrets"
id: check
shell: bash
run: |
if [ -n "${{ (secrets.APPLITOOLS_API_KEY != '' && secrets.APPLITOOLS_API_KEY != '') || '' }}" ]; then
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
fi
cypress-applitools:
needs: config
if: needs.config.outputs.has-secrets
runs-on: ubuntu-24.04
strategy:
fail-fast: false
matrix:
browser: ["chrome"]
env:
SUPERSET_ENV: development
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset
PYTHONPATH: ${{ github.workspace }}
REDIS_PORT: 16379
GITHUB_TOKEN: ${{ github.token }}
APPLITOOLS_APP_NAME: Superset
APPLITOOLS_API_KEY: ${{ secrets.APPLITOOLS_API_KEY }}
APPLITOOLS_BATCH_ID: ${{ github.sha }}
APPLITOOLS_BATCH_NAME: Superset Cypress
services:
postgres:
image: postgres:16-alpine
env:
POSTGRES_USER: superset
POSTGRES_PASSWORD: superset
ports:
- 15432:5432
redis:
image: redis:7-alpine
ports:
- 16379:6379
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
ref: master
- name: Setup Python
uses: ./.github/actions/setup-backend/
- name: Import test data
uses: ./.github/actions/cached-dependencies
with:
run: testdata
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version-file: './superset-frontend/.nvmrc'
- name: Install npm dependencies
uses: ./.github/actions/cached-dependencies
with:
run: npm-install
- name: Build javascript packages
uses: ./.github/actions/cached-dependencies
with:
run: build-instrumented-assets
- name: Setup Postgres
if: steps.check.outcome == 'failure'
uses: ./.github/actions/cached-dependencies
with:
run: setup-postgres
- name: Install cypress
uses: ./.github/actions/cached-dependencies
with:
run: cypress-install
- name: Run Cypress
uses: ./.github/actions/cached-dependencies
env:
CYPRESS_BROWSER: ${{ matrix.browser }}
with:
run: cypress-run-applitools

View File

@@ -1,52 +0,0 @@
name: Applitools Storybook
on:
schedule:
- cron: "0 0 * * *"
env:
APPLITOOLS_APP_NAME: Superset
APPLITOOLS_API_KEY: ${{ secrets.APPLITOOLS_API_KEY }}
APPLITOOLS_BATCH_ID: ${{ github.sha }}
APPLITOOLS_BATCH_NAME: Superset Storybook
jobs:
config:
runs-on: ubuntu-24.04
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
- name: "Check for secrets"
id: check
shell: bash
run: |
if [ -n "${{ (secrets.APPLITOOLS_API_KEY != '' && secrets.APPLITOOLS_API_KEY != '') || '' }}" ]; then
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
fi
cron:
needs: config
if: needs.config.outputs.has-secrets
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
ref: master
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version-file: './superset-frontend/.nvmrc'
- name: Install eyes-storybook dependencies
uses: ./.github/actions/cached-dependencies
with:
run: eyes-storybook-dependencies
- name: Install NPM dependencies
uses: ./.github/actions/cached-dependencies
with:
run: npm-install
- name: Run Applitools Eyes-Storybook
working-directory: ./superset-frontend
run: npx eyes-storybook -u https://superset-storybook.netlify.app/

View File

@@ -1,67 +0,0 @@
name: Superset CLI tests
on:
push:
branches:
- "master"
- "[0-9].[0-9]*"
pull_request:
types: [synchronize, opened, reopened, ready_for_review]
# cancel previous workflow jobs for PRs
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
jobs:
test-load-examples:
runs-on: ubuntu-24.04
env:
PYTHONPATH: ${{ github.workspace }}
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
REDIS_PORT: 16379
SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset
services:
postgres:
image: postgres:16-alpine
env:
POSTGRES_USER: superset
POSTGRES_PASSWORD: superset
ports:
# Use custom ports for services to avoid accidentally connecting to
# GitHub action runner's default installations
- 15432:5432
redis:
image: redis:7-alpine
ports:
- 16379:6379
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Check for file changes
id: check
uses: ./.github/actions/change-detector/
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Setup Python
if: steps.check.outputs.python
uses: ./.github/actions/setup-backend/
- name: Setup Postgres
if: steps.check.outputs.python
uses: ./.github/actions/cached-dependencies
with:
run: setup-postgres
- name: superset init
if: steps.check.outputs.python
run: |
pip install -e .
superset db upgrade
superset load_test_users
- name: superset load_examples
if: steps.check.outputs.python
run: |
# load examples without test data
superset load_examples --load-big-data

View File

@@ -1,75 +0,0 @@
name: Docs Deployment
on:
push:
paths:
- "docs/**"
- "README.md"
branches:
- "master"
workflow_dispatch: {}
jobs:
config:
runs-on: ubuntu-24.04
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
- name: "Check for secrets"
id: check
shell: bash
run: |
if [ -n "${{ (secrets.SUPERSET_SITE_BUILD != '' && secrets.SUPERSET_SITE_BUILD != '') || '' }}" ]; then
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
fi
build-deploy:
needs: config
if: needs.config.outputs.has-secrets
name: Build & Deploy
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version-file: './docs/.nvmrc'
- name: Setup Python
uses: ./.github/actions/setup-backend/
- uses: actions/setup-java@v4
with:
distribution: 'zulu'
java-version: '21'
- name: Install Graphviz
run: sudo apt-get install -y graphviz
- name: Compute Entity Relationship diagram (ERD)
env:
SUPERSET_SECRET_KEY: not-a-secret
run: |
python scripts/erd/erd.py
curl -L http://sourceforge.net/projects/plantuml/files/1.2023.7/plantuml.1.2023.7.jar/download > ~/plantuml.jar
java -jar ~/plantuml.jar -v -tsvg -r -o "${{ github.workspace }}/docs/static/img/" "${{ github.workspace }}/scripts/erd/erd.puml"
- name: yarn install
working-directory: docs
run: |
yarn install --check-cache
- name: yarn build
working-directory: docs
run: |
yarn build
- name: deploy docs
uses: ./.github/actions/github-action-push-to-another-repository
env:
API_TOKEN_GITHUB: ${{ secrets.SUPERSET_SITE_BUILD }}
with:
source-directory: "./docs/build"
destination-github-username: "apache"
destination-repository-name: "superset-site"
target-branch: "asf-site"
commit-message: "deploying docs: ${{ github.event.head_commit.message }} (apache/superset@${{ github.sha }})"
user-email: dev@superset.apache.org

View File

@@ -1,75 +0,0 @@
name: Docs Testing
on:
pull_request:
paths:
- "docs/**"
- ".github/workflows/superset-docs-verify.yml"
types: [synchronize, opened, reopened, ready_for_review]
# cancel previous workflow jobs for PRs
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
jobs:
linkinator:
# See docs here: https://github.com/marketplace/actions/linkinator
name: Link Checking
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
# Do not bump this linkinator-action version without opening
# an ASF Infra ticket to allow the new version first!
- uses: JustinBeckwith/linkinator-action@v1.11.0
continue-on-error: true # This will make the job advisory (non-blocking, no red X)
with:
paths: "**/*.md, **/*.mdx, !superset-frontend/CHANGELOG.md"
linksToSkip: >-
^https://github.com/apache/(superset|incubator-superset)/(pull|issue)/\d+,
http://localhost:8088/,
http://127.0.0.1:3000/,
http://localhost:9001/,
https://charts.bitnami.com/bitnami,
https://www.li.me/,
https://www.fanatics.com/,
https://tails.com/gb/,
https://www.techaudit.info/,
https://avetilearning.com/,
https://www.udemy.com/,
https://trustmedis.com/,
http://theiconic.com.au/,
https://dev.mysql.com/doc/refman/5.7/en/innodb-limits.html,
^https://img\.shields\.io/.*,
https://vkusvill.ru/
https://www.linkedin.com/in/mark-thomas-b16751158/
https://theiconic.com.au/
https://wattbewerb.de/
https://timbr.ai/
https://opensource.org/license/apache-2-0
https://www.plaidcloud.com/
build-deploy:
name: Build & Deploy
runs-on: ubuntu-24.04
defaults:
run:
working-directory: docs
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version-file: './docs/.nvmrc'
- name: yarn install
run: |
yarn install --check-cache
- name: yarn typecheck
run: |
yarn typecheck
- name: yarn build
run: |
yarn build

View File

@@ -1,153 +0,0 @@
name: E2E
on:
push:
branches:
- "master"
- "[0-9].[0-9]*"
pull_request:
types: [synchronize, opened, reopened, ready_for_review]
workflow_dispatch:
inputs:
use_dashboard:
description: 'Use Cypress Dashboard (true/false) [paid service - trigger manually when needed]. You MUST provide a branch and/or PR number below for this to work.'
required: false
default: 'false'
ref:
description: 'The branch or tag to checkout'
required: false
default: ''
pr_id:
description: 'The pull request ID to checkout'
required: false
default: ''
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
jobs:
cypress-matrix:
# Somehow one test flakes on 24.04 for unknown reasons, this is the only GHA left on 22.04
runs-on: ubuntu-22.04
permissions:
contents: read
pull-requests: read
strategy:
# when one test fails, DO NOT cancel the other
# parallel_id, because this will kill Cypress processes
# leaving the Dashboard hanging ...
# https://github.com/cypress-io/github-action/issues/48
fail-fast: false
matrix:
parallel_id: [0, 1, 2, 3, 4, 5]
browser: ["chrome"]
app_root: ["", "/app/prefix"]
env:
SUPERSET_ENV: development
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset
PYTHONPATH: ${{ github.workspace }}
REDIS_PORT: 16379
GITHUB_TOKEN: ${{ github.token }}
# Only use dashboard when explicitly requested via workflow_dispatch
USE_DASHBOARD: ${{ github.event.inputs.use_dashboard == 'true' || 'false' }}
services:
postgres:
image: postgres:16-alpine
env:
POSTGRES_USER: superset
POSTGRES_PASSWORD: superset
ports:
- 15432:5432
redis:
image: redis:7-alpine
ports:
- 16379:6379
steps:
# -------------------------------------------------------
# Conditional checkout based on context
- name: Checkout for push or pull_request event
if: github.event_name == 'push' || github.event_name == 'pull_request'
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
- name: Checkout using ref (workflow_dispatch)
if: github.event_name == 'workflow_dispatch' && github.event.inputs.ref != ''
uses: actions/checkout@v4
with:
persist-credentials: false
ref: ${{ github.event.inputs.ref }}
submodules: recursive
- name: Checkout using PR ID (workflow_dispatch)
if: github.event_name == 'workflow_dispatch' && github.event.inputs.pr_id != ''
uses: actions/checkout@v4
with:
persist-credentials: false
ref: refs/pull/${{ github.event.inputs.pr_id }}/merge
submodules: recursive
# -------------------------------------------------------
- name: Check for file changes
id: check
uses: ./.github/actions/change-detector/
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Setup Python
uses: ./.github/actions/setup-backend/
if: steps.check.outputs.python || steps.check.outputs.frontend
- name: Setup postgres
if: steps.check.outputs.python || steps.check.outputs.frontend
uses: ./.github/actions/cached-dependencies
with:
run: setup-postgres
- name: Import test data
if: steps.check.outputs.python || steps.check.outputs.frontend
uses: ./.github/actions/cached-dependencies
with:
run: testdata
- name: Setup Node.js
if: steps.check.outputs.python || steps.check.outputs.frontend
uses: actions/setup-node@v4
with:
node-version-file: './superset-frontend/.nvmrc'
- name: Install npm dependencies
if: steps.check.outputs.python || steps.check.outputs.frontend
uses: ./.github/actions/cached-dependencies
with:
run: npm-install
- name: Build javascript packages
if: steps.check.outputs.python || steps.check.outputs.frontend
uses: ./.github/actions/cached-dependencies
with:
run: build-instrumented-assets
- name: Install cypress
if: steps.check.outputs.python || steps.check.outputs.frontend
uses: ./.github/actions/cached-dependencies
with:
run: cypress-install
- name: Run Cypress
if: steps.check.outputs.python || steps.check.outputs.frontend
uses: ./.github/actions/cached-dependencies
env:
CYPRESS_BROWSER: ${{ matrix.browser }}
PARALLEL_ID: ${{ matrix.parallel_id }}
PARALLELISM: 6
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
NODE_OPTIONS: "--max-old-space-size=4096"
with:
run: cypress-run-all ${{ env.USE_DASHBOARD }} ${{ matrix.app_root }}
- name: Set safe app root
if: failure()
id: set-safe-app-root
run: |
APP_ROOT="${{ matrix.app_root }}"
SAFE_APP_ROOT=${APP_ROOT//\//_}
echo "safe_app_root=$SAFE_APP_ROOT" >> $GITHUB_OUTPUT
- name: Upload Artifacts
uses: actions/upload-artifact@v4
if: failure()
with:
path: ${{ github.workspace }}/superset-frontend/cypress-base/cypress/screenshots
name: cypress-artifact-${{ github.run_id }}-${{ github.job }}-${{ matrix.browser }}-${{ matrix.parallel_id }}--${{ steps.set-safe-app-root.outputs.safe_app_root }}

View File

@@ -1,169 +0,0 @@
name: "Frontend Build CI (unit tests, linting & sanity checks)"
on:
push:
branches:
- "master"
- "[0-9].[0-9]*"
pull_request:
types: [synchronize, opened, reopened, ready_for_review]
# cancel previous workflow jobs for PRs
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
env:
TAG: apache/superset:GHA-${{ github.run_id }}
jobs:
frontend-build:
runs-on: ubuntu-24.04
outputs:
should-run: ${{ steps.check.outputs.frontend }}
steps:
- name: Checkout Code
uses: actions/checkout@v4
with:
persist-credentials: false
fetch-depth: 0
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
- name: Check for File Changes
id: check
uses: ./.github/actions/change-detector/
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker Image
if: steps.check.outputs.frontend
shell: bash
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
echo "git rev-parse --short HEAD"
git rev-parse --short HEAD
echo "git show -s --format=raw HEAD"
git show -s --format=raw HEAD
docker buildx build \
-t $TAG \
--cache-from=type=registry,ref=apache/superset-cache:3.10-slim-bookworm \
--target superset-node-ci \
.
- name: Save Docker Image as Artifact
if: steps.check.outputs.frontend
run: |
docker save $TAG | gzip > docker-image.tar.gz
- name: Upload Docker Image Artifact
if: steps.check.outputs.frontend
uses: actions/upload-artifact@v4
with:
name: docker-image
path: docker-image.tar.gz
sharded-jest-tests:
needs: frontend-build
if: needs.frontend-build.outputs.should-run == 'true'
strategy:
matrix:
shard: [1, 2, 3, 4, 5, 6, 7, 8]
fail-fast: false
runs-on: ubuntu-24.04
steps:
- name: Download Docker Image Artifact
uses: actions/download-artifact@v4
with:
name: docker-image
- name: Load Docker Image
run: docker load < docker-image.tar.gz
- name: npm run test with coverage
run: |
mkdir -p ${{ github.workspace }}/superset-frontend/coverage
docker run \
-v ${{ github.workspace }}/superset-frontend/coverage:/app/superset-frontend/coverage \
--rm $TAG \
bash -c \
"npm run test -- --coverage --shard=${{ matrix.shard }}/8 --coverageReporters=json-summary"
- name: Upload Coverage Artifact
uses: actions/upload-artifact@v4
with:
name: coverage-artifacts-${{ matrix.shard }}
path: superset-frontend/coverage
report-coverage:
needs: [sharded-jest-tests]
if: needs.frontend-build.outputs.should-run == 'true'
runs-on: ubuntu-24.04
steps:
- name: Download Coverage Artifacts
uses: actions/download-artifact@v4
with:
pattern: coverage-artifacts-*
path: coverage/
- name: Show Files
run: find coverage/
- name: Merge Code Coverage
run: npx nyc merge coverage/ merged-output/coverage-summary.json
- name: Upload Code Coverage
uses: codecov/codecov-action@v5
with:
flags: javascript
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
files: merged-output/coverage-summary.json
slug: apache/superset
lint-frontend:
needs: frontend-build
if: needs.frontend-build.outputs.should-run == 'true'
runs-on: ubuntu-24.04
steps:
- name: Download Docker Image Artifact
uses: actions/download-artifact@v4
with:
name: docker-image
- name: Load Docker Image
run: |
docker load < docker-image.tar.gz
- name: eslint
run: |
docker run --rm $TAG bash -c \
"npm i && npm run eslint -- . --quiet"
- name: tsc
run: |
docker run --rm $TAG bash -c \
"npm run type"
validate-frontend:
needs: frontend-build
if: needs.frontend-build.outputs.should-run == 'true'
runs-on: ubuntu-24.04
steps:
- name: Download Docker Image Artifact
uses: actions/download-artifact@v4
with:
name: docker-image
- name: Load Docker Image
run: docker load < docker-image.tar.gz
- name: Build Plugins Packages
run: |
docker run --rm $TAG bash -c \
"npm run plugins:build"
- name: Build Plugins Storybook
run: |
docker run --rm $TAG bash -c \
"npm run plugins:build-storybook"

View File

@@ -1,55 +0,0 @@
name: "Helm: lint and test charts"
on:
pull_request:
types: [opened, edited, reopened, synchronize]
paths:
- "helm/**"
# cancel previous workflow jobs for PRs
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
jobs:
lint-test:
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
fetch-depth: 0
- name: Set up Helm
uses: azure/setup-helm@v4
with:
version: v3.16.4
- name: Setup Python
uses: ./.github/actions/setup-backend/
with:
install-superset: 'false'
- name: Set up chart-testing
uses: ./.github/actions/chart-testing-action
- name: Run chart-testing (list-changed)
id: list-changed
run: |
changed=$(ct list-changed --print-config)
if [[ -n "$changed" ]]; then
echo "changed=true" >> $GITHUB_OUTPUT
fi
env:
CT_CHART_DIRS: helm
CT_SINCE: HEAD
- name: Run chart-testing (lint)
run: ct lint --print-config
env:
CT_CHART_DIRS: helm
CT_LINT_CONF: lintconf.yaml
CT_SINCE: HEAD
CT_CHART_REPOS: bitnami=https://charts.bitnami.com/bitnami

View File

@@ -1,125 +0,0 @@
# This workflow automates the release process for Helm charts.
# The workflow creates a new branch for the release and opens a pull request against the 'gh-pages' branch,
# allowing the changes to be reviewed and merged manually.
name: "Helm: release charts"
on:
push:
branches:
- "master"
- "[0-9].[0-9]*"
paths:
- "helm/**"
workflow_dispatch:
inputs:
ref:
description: "The branch, tag, or commit SHA to check out"
required: false
default: "master"
jobs:
release:
runs-on: ubuntu-24.04
permissions:
contents: write
pull-requests: write
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ inputs.ref || github.ref_name }}
persist-credentials: true
submodules: recursive
fetch-depth: 0
- name: Configure Git
run: |
git config user.name "$GITHUB_ACTOR"
git config user.email "$GITHUB_ACTOR@users.noreply.github.com"
- name: Install Helm
uses: azure/setup-helm@v4
with:
version: v3.5.4
- name: Add bitnami repo dependency
run: helm repo add bitnami https://charts.bitnami.com/bitnami
- name: Fetch/list all tags
run: |
# Debugging tags
git fetch --tags --force
git tag -d superset-helm-chart-0.13.4 || true
echo "DEBUG TAGS"
git show-ref --tags
- name: Create unique pages branch name
id: vars
run: echo "branch_name=helm-publish-${GITHUB_SHA:0:7}" >> $GITHUB_ENV
- name: Force recreate branch from gh-pages
run: |
# Ensure a clean working directory
git reset --hard
git clean -fdx
git checkout -b local_gha_temp
git submodule update
# Fetch the latest gh-pages branch
git fetch origin gh-pages
# Check out and reset the target branch based on gh-pages
git checkout -B ${{ env.branch_name }} origin/gh-pages
# Remove submodules from the branch
git submodule deinit -f --all
# Force push to the remote branch
git push origin ${{ env.branch_name }} --force
# Return to the original branch
git checkout local_gha_temp
- name: Fetch/list all tags
run: |
git submodule update
cat .github/actions/chart-releaser-action/action.yml
- name: Run chart-releaser
uses: ./.github/actions/chart-releaser-action
with:
version: v1.6.0
charts_dir: helm
mark_as_latest: false
pages_branch: ${{ env.branch_name }}
env:
CR_TOKEN: "${{ github.token }}"
CR_RELEASE_NAME_TEMPLATE: "superset-helm-chart-{{ .Version }}"
- name: Open Pull Request
uses: actions/github-script@v7
with:
script: |
const branchName = '${{ env.branch_name }}';
const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');
if (!branchName) {
throw new Error("Branch name is not defined.");
}
const pr = await github.rest.pulls.create({
owner,
repo,
title: `Helm chart release for ${branchName}`,
head: branchName,
base: "gh-pages", // Adjust if the target branch is different
body: `This PR releases Helm charts to the gh-pages branch.`,
});
core.info(`Pull request created: ${pr.data.html_url}`);
env:
BRANCH_NAME: ${{ env.branch_name }}

View File

@@ -1,188 +0,0 @@
# Python integration tests
name: Python-Integration
on:
push:
branches:
- "master"
- "[0-9].[0-9]*"
pull_request:
types: [synchronize, opened, reopened, ready_for_review]
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
jobs:
test-mysql:
runs-on: ubuntu-24.04
env:
PYTHONPATH: ${{ github.workspace }}
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
REDIS_PORT: 16379
SUPERSET__SQLALCHEMY_DATABASE_URI: |
mysql+mysqldb://superset:superset@127.0.0.1:13306/superset?charset=utf8mb4&binary_prefix=true
services:
mysql:
image: mysql:8.0
env:
MYSQL_ROOT_PASSWORD: root
ports:
- 13306:3306
options: >-
--health-cmd="mysqladmin ping --silent"
--health-interval=10s
--health-timeout=5s
--health-retries=5
redis:
image: redis:7-alpine
options: --entrypoint redis-server
ports:
- 16379:6379
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Check for file changes
id: check
uses: ./.github/actions/change-detector/
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Setup Python
uses: ./.github/actions/setup-backend/
if: steps.check.outputs.python
- name: Setup MySQL
if: steps.check.outputs.python
uses: ./.github/actions/cached-dependencies
with:
run: setup-mysql
- name: Start Celery worker
if: steps.check.outputs.python
uses: ./.github/actions/cached-dependencies
with:
run: celery-worker
- name: Python integration tests (MySQL)
if: steps.check.outputs.python
run: |
./scripts/python_tests.sh
- name: Upload code coverage
uses: codecov/codecov-action@v5
with:
flags: python,mysql
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
test-postgres:
runs-on: ubuntu-24.04
strategy:
matrix:
python-version: ["current", "previous", "next"]
env:
PYTHONPATH: ${{ github.workspace }}
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
REDIS_PORT: 16379
SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset
services:
postgres:
image: postgres:16-alpine
env:
POSTGRES_USER: superset
POSTGRES_PASSWORD: superset
ports:
# Use custom ports for services to avoid accidentally connecting to
# GitHub action runner's default installations
- 15432:5432
redis:
image: redis:7-alpine
ports:
- 16379:6379
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Check for file changes
id: check
uses: ./.github/actions/change-detector/
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Setup Python
uses: ./.github/actions/setup-backend/
if: steps.check.outputs.python
with:
python-version: ${{ matrix.python-version }}
- name: Setup Postgres
if: steps.check.outputs.python
uses: ./.github/actions/cached-dependencies
with:
run: |
setup-postgres
- name: Start Celery worker
if: steps.check.outputs.python
uses: ./.github/actions/cached-dependencies
with:
run: celery-worker
- name: Python integration tests (PostgreSQL)
if: steps.check.outputs.python
run: |
./scripts/python_tests.sh
- name: Upload code coverage
uses: codecov/codecov-action@v5
with:
flags: python,postgres
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
test-sqlite:
runs-on: ubuntu-24.04
env:
PYTHONPATH: ${{ github.workspace }}
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
REDIS_PORT: 16379
SUPERSET__SQLALCHEMY_DATABASE_URI: |
sqlite:///${{ github.workspace }}/.temp/superset.db?check_same_thread=true
SUPERSET__SQLALCHEMY_EXAMPLES_URI: |
sqlite:///${{ github.workspace }}/.temp/examples.db?check_same_thread=true
services:
redis:
image: redis:7-alpine
ports:
- 16379:6379
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Check for file changes
id: check
uses: ./.github/actions/change-detector/
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Setup Python
uses: ./.github/actions/setup-backend/
if: steps.check.outputs.python
- name: Install dependencies
if: steps.check.outputs.python
uses: ./.github/actions/cached-dependencies
with:
run: |
# sqlite needs this working directory
mkdir ${{ github.workspace }}/.temp
- name: Start Celery worker
if: steps.check.outputs.python
uses: ./.github/actions/cached-dependencies
with:
run: celery-worker
- name: Python integration tests (SQLite)
if: steps.check.outputs.python
run: |
./scripts/python_tests.sh
- name: Upload code coverage
uses: codecov/codecov-action@v5
with:
flags: python,sqlite
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true

View File

@@ -1,152 +0,0 @@
# Python Presto/Hive unit tests
name: Python Presto/Hive
on:
push:
branches:
- "master"
- "[0-9].[0-9]*"
pull_request:
types: [synchronize, opened, reopened, ready_for_review]
# cancel previous workflow jobs for PRs
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
jobs:
test-postgres-presto:
runs-on: ubuntu-24.04
env:
PYTHONPATH: ${{ github.workspace }}
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
REDIS_PORT: 16379
SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset
SUPERSET__SQLALCHEMY_EXAMPLES_URI: presto://localhost:15433/memory/default
services:
postgres:
image: postgres:16-alpine
env:
POSTGRES_USER: superset
POSTGRES_PASSWORD: superset
ports:
# Use custom ports for services to avoid accidentally connecting to
# GitHub action runner's default installations
- 15432:5432
presto:
image: starburstdata/presto:350-e.6
env:
POSTGRES_USER: superset
POSTGRES_PASSWORD: superset
ports:
# Use custom ports for services to avoid accidentally connecting to
# GitHub action runner's default installations
- 15433:8080
redis:
image: redis:7-alpine
ports:
- 16379:6379
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Check for file changes
id: check
uses: ./.github/actions/change-detector/
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Setup Python
uses: ./.github/actions/setup-backend/
if: steps.check.outputs.python == 'true'
- name: Setup Postgres
if: steps.check.outputs.python
uses: ./.github/actions/cached-dependencies
with:
run: |
echo "${{ steps.check.outputs.python }}"
setup-postgres
- name: Start Celery worker
if: steps.check.outputs.python
uses: ./.github/actions/cached-dependencies
with:
run: celery-worker
- name: Python unit tests (PostgreSQL)
if: steps.check.outputs.python
run: |
./scripts/python_tests.sh -m 'chart_data_flow or sql_json_flow'
- name: Upload code coverage
uses: codecov/codecov-action@v5
with:
flags: python,presto
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
test-postgres-hive:
runs-on: ubuntu-24.04
env:
PYTHONPATH: ${{ github.workspace }}
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
REDIS_PORT: 16379
SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset
SUPERSET__SQLALCHEMY_EXAMPLES_URI: hive://localhost:10000/default
UPLOAD_FOLDER: /tmp/.superset/uploads/
services:
postgres:
image: postgres:16-alpine
env:
POSTGRES_USER: superset
POSTGRES_PASSWORD: superset
ports:
# Use custom ports for services to avoid accidentally connecting to
# GitHub action runner's default installations
- 15432:5432
redis:
image: redis:7-alpine
ports:
- 16379:6379
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Check for file changes
id: check
uses: ./.github/actions/change-detector/
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Create csv upload directory
if: steps.check.outputs.python
run: sudo mkdir -p /tmp/.superset/uploads
- name: Give write access to the csv upload directory
if: steps.check.outputs.python
run: sudo chown -R $USER:$USER /tmp/.superset
- name: Start hadoop and hive
if: steps.check.outputs.python
run: docker compose -f scripts/databases/hive/docker-compose.yml up -d
- name: Setup Python
uses: ./.github/actions/setup-backend/
if: steps.check.outputs.python
- name: Setup Postgres
if: steps.check.outputs.python
uses: ./.github/actions/cached-dependencies
with:
run: setup-postgres
- name: Start Celery worker
if: steps.check.outputs.python
uses: ./.github/actions/cached-dependencies
with:
run: celery-worker
- name: Python unit tests (PostgreSQL)
if: steps.check.outputs.python
run: |
pip install -e .[hive]
./scripts/python_tests.sh -m 'chart_data_flow or sql_json_flow'
- name: Upload code coverage
uses: codecov/codecov-action@v5
with:
flags: python,hive
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true

View File

@@ -1,60 +0,0 @@
# Python unit tests
name: Python-Unit
on:
push:
branches:
- "master"
- "[0-9].[0-9]*"
pull_request:
types: [synchronize, opened, reopened, ready_for_review]
# cancel previous workflow jobs for PRs
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
jobs:
unit-tests:
runs-on: ubuntu-24.04
strategy:
matrix:
python-version: ["previous", "current", "next"]
env:
PYTHONPATH: ${{ github.workspace }}
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Check for file changes
id: check
uses: ./.github/actions/change-detector/
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Setup Python
uses: ./.github/actions/setup-backend/
if: steps.check.outputs.python
with:
python-version: ${{ matrix.python-version }}
- name: Python unit tests
if: steps.check.outputs.python
env:
SUPERSET_TESTENV: true
SUPERSET_SECRET_KEY: not-a-secret
run: |
pytest --durations-min=0.5 --cov-report= --cov=superset ./tests/common ./tests/unit_tests --cache-clear --maxfail=50
- name: Python 100% coverage unit tests
if: steps.check.outputs.python
env:
SUPERSET_TESTENV: true
SUPERSET_SECRET_KEY: not-a-secret
run: |
pytest --durations-min=0.5 --cov=superset/sql/ ./tests/unit_tests/sql/ --cache-clear --cov-fail-under=100
- name: Upload code coverage
uses: codecov/codecov-action@v5
with:
flags: python,unit
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true

View File

@@ -1,67 +0,0 @@
name: Translations
on:
push:
branches:
- "master"
- "[0-9].[0-9]*"
pull_request:
types: [synchronize, opened, reopened, ready_for_review]
# cancel previous workflow jobs for PRs
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
jobs:
frontend-check-translations:
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Check for file changes
id: check
uses: ./.github/actions/change-detector/
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Setup Node.js
if: steps.check.outputs.frontend
uses: actions/setup-node@v4
with:
node-version-file: './superset-frontend/.nvmrc'
- name: Install dependencies
if: steps.check.outputs.frontend
uses: ./.github/actions/cached-dependencies
with:
run: npm-install
- name: lint
if: steps.check.outputs.frontend
working-directory: ./superset-frontend
run: |
npm run build-translation
babel-extract:
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Check for file changes
id: check
uses: ./.github/actions/change-detector/
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Setup Python
if: steps.check.outputs.python
uses: ./.github/actions/setup-backend/
- name: Test babel extraction
if: steps.check.outputs.python
run: ./scripts/translations/babel_update.sh

View File

@@ -1,44 +0,0 @@
name: WebSocket server
on:
push:
branches:
- "master"
- "[0-9].[0-9]*"
paths:
- "superset-websocket/**"
pull_request:
paths:
- "superset-websocket/**"
types: [synchronize, opened, reopened, ready_for_review]
# cancel previous workflow jobs for PRs
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
jobs:
app-checks:
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Install dependencies
working-directory: ./superset-websocket
run: npm ci
- name: eslint
working-directory: ./superset-websocket
run: npm run eslint -- . --quiet
- name: typescript checks
working-directory: ./superset-websocket
run: npm run type
- name: prettier
working-directory: ./superset-websocket
run: npm run prettier-check
- name: unit tests
working-directory: ./superset-websocket
run: npm run test
- name: build
working-directory: ./superset-websocket
run: npm run build

View File

@@ -1,56 +0,0 @@
name: SupersetBot Workflow
on:
issue_comment:
types: [created, edited]
# Making the workflow testable since `issue_comment` only triggers on
# the default branch
workflow_dispatch:
inputs:
comment_body:
description: 'Comment Body'
required: true
type: string
jobs:
supersetbot:
runs-on: ubuntu-24.04
if: >
github.event_name == 'workflow_dispatch' ||
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@supersetbot'))
permissions:
contents: read
pull-requests: write
issues: write
steps:
- name: Quickly add thumbs up!
if: github.event_name == 'issue_comment' && contains(github.event.comment.body, '@supersetbot')
uses: actions/github-script@v7
with:
script: |
const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/')
await github.rest.reactions.createForIssueComment({
owner,
repo,
comment_id: context.payload.comment.id,
content: '+1'
});
- name: "Checkout ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup supersetbot
uses: ./.github/actions/setup-supersetbot/
- name: Execute custom Node.js script
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_ACTOR: ${{ github.actor }}
GITHUB_REPOSITORY: ${{ github.repository }}
GITHUB_ISSUE_NUMBER: ${{ github.event.issue.number }}
COMMENT_BODY: ${{ github.event.comment.body }}${{ github.event.inputs.comment_body }}
run: |
supersetbot run "$COMMENT_BODY"

View File

@@ -1,134 +0,0 @@
name: Publish a Release
on:
release:
types: [published, edited]
# Can be triggered manually
workflow_dispatch:
inputs:
release:
required: true
description: The version to generate
git-ref:
required: true
description: The git reference to checkout prior to running the docker build
force-latest:
required: true
type: choice
default: 'false'
description: Whether to force a latest tag on the release
options:
- 'true'
- 'false'
jobs:
config:
runs-on: ubuntu-24.04
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
- name: "Check for secrets"
id: check
shell: bash
run: |
if [ -n "${{ (secrets.DOCKERHUB_USER != '' && secrets.DOCKERHUB_TOKEN != '') || '' }}" ]; then
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
fi
docker-release:
needs: config
if: needs.config.outputs.has-secrets
name: docker-release
runs-on: ubuntu-24.04
strategy:
matrix:
build_preset: ["dev", "lean", "py310", "websocket", "dockerize", "py311"]
fail-fast: false
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Docker Environment
uses: ./.github/actions/setup-docker
with:
dockerhub-user: ${{ secrets.DOCKERHUB_USER }}
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
install-docker-compose: "false"
build: "true"
- name: Use Node.js 20
uses: actions/setup-node@v4
with:
node-version: 20
- name: Setup supersetbot
uses: ./.github/actions/setup-supersetbot/
- name: Execute custom Node.js script
env:
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
RELEASE="${{ github.event.release.tag_name }}"
FORCE_LATEST=""
EVENT="${{github.event_name}}"
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
# in the case of a manually-triggered run, read release from input
RELEASE="${{ github.event.inputs.release }}"
if [ "${{ github.event.inputs.force-latest }}" = "true" ]; then
FORCE_LATEST="--force-latest"
fi
git checkout "${{ github.event.inputs.git-ref }}"
EVENT="release"
fi
supersetbot docker \
--push \
--preset ${{ matrix.build_preset }} \
--context "$EVENT" \
--context-ref "$RELEASE" $FORCE_LATEST \
--platform "linux/arm64" \
--platform "linux/amd64"
# Returning to master to support closing setup-supersetbot
git checkout master
update-prs-with-release-info:
needs: config
if: needs.config.outputs.has-secrets
runs-on: ubuntu-24.04
permissions:
contents: read
pull-requests: write
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Use Node.js 20
uses: actions/setup-node@v4
with:
node-version: 20
- name: Setup supersetbot
uses: ./.github/actions/setup-supersetbot/
- name: Label the PRs with the right release-related labels
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
export GITHUB_ACTOR=""
git fetch --all --tags
git checkout master
RELEASE="${{ github.event.release.tag_name }}"
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
# in the case of a manually-triggered run, read release from input
RELEASE="${{ github.event.inputs.release }}"
fi
supersetbot release-label $RELEASE

View File

@@ -1,47 +0,0 @@
name: Upload Technical Debt Metrics to Google Sheets
on:
push:
branches:
- master
- "[0-9].[0-9]*"
jobs:
config:
runs-on: ubuntu-24.04
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
- name: "Check for secrets"
id: check
shell: bash
run: |
if [ -n "${{ (secrets.GSHEET_KEY != '' ) || '' }}" ]; then
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
fi
process-and-upload:
needs: config
if: needs.config.outputs.has-secrets
runs-on: ubuntu-24.04
name: Generate Reports
steps:
- name: Checkout Repository
uses: actions/checkout@v4
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version-file: './superset-frontend/.nvmrc'
- name: Install Dependencies
run: npm ci
working-directory: ./superset-frontend
- name: Run Script
env:
SPREADSHEET_ID: "1oABNnzxJYzwUrHjr_c9wfYEq9dFL1ScVof9LlaAdxvo"
SERVICE_ACCOUNT_KEY: ${{ secrets.GSHEET_KEY }}
run: npm run lint-stats
continue-on-error: true
working-directory: ./superset-frontend

View File

@@ -1,22 +0,0 @@
name: Welcome New Contributor
on:
pull_request_target:
types: [opened]
jobs:
welcome:
runs-on: ubuntu-24.04
permissions:
pull-requests: write
steps:
- name: Welcome Message
uses: actions/first-interaction@v2
continue-on-error: true
with:
repo-token: ${{ github.token }}
pr-message: |-
Congrats on making your first PR and thank you for contributing to Superset! :tada: :heart:
We hope to see you in our [Slack](https://apache-superset.slack.com/) community too! Not signed up? Use our [Slack App](http://bit.ly/join-superset-slack) to self-register.

147
.gitignore vendored
View File

@@ -1,136 +1,33 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
*.bak
*.db
*.pyc
*.sqllite
*.swp
__pycache__
.aider*
.local
.cache
.bento*
.cache-loader
.coverage
cover
yarn-error.log
_modules
superset/assets/coverage/*
changelog.sh
.DS_Store
.eggs
.env
.envrc
.idea
.mypy_cache
.python-version
.tox
.vscode
.coverage
_build
_static
_images
_modules
_static
build
app.db
apache_superset.egg-info/
changelog.sh
dist
dump.rdb
env
venv*
env_py3
envpy3
local_config.py
/superset_config.py
/superset_text.yml
superset.egg-info/
superset/bin/supersetc
env_py3
.eggs
build
*.db
tmp
rat-results.txt
superset/app/
superset-websocket/config.json
superset_config.py
local_config.py
env
dist
superset.egg-info/
app.db
*.bak
.idea
*.sqllite
# Node.js, webpack artifacts, storybook
# Node.js, webpack artifacts
*.entry.js
*.js.map
node_modules
npm-debug.log*
superset/static/assets/*
!superset/static/assets/.gitkeep
superset/static/uploads/*
!superset/static/uploads/.gitkeep
superset/static/version_info.json
superset-frontend/**/esm/*
superset-frontend/**/lib/*
superset-frontend/**/storybook-static/*
superset-frontend/migration-storybook.log
yarn-error.log
*.map
*.min.js
test-changelog.md
*.tsbuildinfo
# Ignore package-lock in packages
plugins/*/package-lock.json
packages/*/package-lock.json
# For country map geojson conversion script
.ipynb_checkpoints/
scripts/*.zip
# IntelliJ
*.iml
venv
.venv
@eaDir/
# PyCharm
.run
# Test data
celery_results.sqlite
celerybeat-schedule
celerydb.sqlite
celerybeat.pid
geckodriver.log
ghostdriver.log
testCSV.csv
.terser-plugin-cache/
apache-superset-*.tar.gz*
apache_superset-*.tar.gz*
release.json
# Translation-related files
# these json files are generated by ./scripts/po2json.sh
superset/translations/**/messages.json
# these mo binary files are generated by `pybabel compile`
superset/translations/**/messages.mo
docker/requirements-local.txt
cache/
docker/*local*
.temp_cache
# Jest test report
test-report.html
superset/static/stats/statistics.html
# LLM-related
CLAUDE.local.md
.aider*
.claude_rc*
.env.local
PROJECT.md
npm-debug.log
yarn.lock

41
.gitmodules vendored
View File

@@ -1,41 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
[submodule ".github/actions/latest-tag"]
path = .github/actions/latest-tag
url = https://github.com/EndBug/latest-tag
[submodule ".github/actions/pr-lint-action"]
path = .github/actions/pr-lint-action
url = https://github.com/morrisoncole/pr-lint-action
[submodule ".github/actions/file-changes-action"]
path = .github/actions/file-changes-action
url = https://github.com/trilom/file-changes-action
[submodule ".github/actions/cached-dependencies"]
path = .github/actions/cached-dependencies
url = https://github.com/apache-superset/cached-dependencies
[submodule ".github/actions/comment-on-pr"]
path = .github/actions/comment-on-pr
url = https://github.com/unsplash/comment-on-pr
[submodule ".github/actions/chart-testing-action"]
path = .github/actions/chart-testing-action
url = https://github.com/helm/chart-testing-action
[submodule ".github/actions/chart-releaser-action"]
path = .github/actions/chart-releaser-action
url = https://github.com/helm/chart-releaser-action
[submodule ".github/actions/github-action-push-to-another-repository"]
path = .github/actions/github-action-push-to-another-repository
url = https://github.com/cpina/github-action-push-to-another-repository

23
.landscape.yml Normal file
View File

@@ -0,0 +1,23 @@
doc-warnings: yes
test-warnings: no
strictness: medium
max-line-length: 90
uses:
- flask
autodetect: yes
pylint:
disable:
- cyclic-import
- invalid-name
- logging-format-interpolation
options:
docstring-min-length: 10
pep8:
full: true
ignore-paths:
- docs
- superset/migrations/env.py
- superset/ascii_art.py
ignore-patterns:
- ^example/doc_.*\.py$
- (^|/)docs(/|$)

View File

@@ -1,4 +0,0 @@
{
"no-bare-urls": false,
"line-length": false
}

View File

@@ -1,114 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
repos:
- repo: https://github.com/MarcoGorelli/auto-walrus
rev: 0.3.4
hooks:
- id: auto-walrus
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.15.0
hooks:
- id: mypy
args: [--check-untyped-defs]
additional_dependencies: [
types-simplejson,
types-python-dateutil,
types-requests,
# types-redis 4.6.0.5 is failing mypy
# because of https://github.com/python/typeshed/pull/10531
types-redis==4.6.0.4,
types-pytz,
types-croniter,
types-PyYAML,
types-setuptools,
types-paramiko,
types-Markdown,
]
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
hooks:
- id: check-docstring-first
- id: check-added-large-files
exclude: ^.*\.(geojson)$|^docs/static/img/screenshots/.*|^superset-frontend/CHANGELOG\.md$
- id: check-yaml
exclude: ^helm/superset/templates/
- id: debug-statements
- id: end-of-file-fixer
exclude: .*/lerna\.json$
- id: trailing-whitespace
exclude: ^.*\.(snap)
args: ["--markdown-linebreak-ext=md"]
- repo: local
hooks:
- id: eslint-frontend
name: eslint (frontend)
entry: ./scripts/eslint.sh
language: system
pass_filenames: true
files: ^superset-frontend/.*\.(js|jsx|ts|tsx)$
- id: eslint-docs
name: eslint (docs)
entry: bash -c 'cd docs && FILES=$(echo "$@" | sed "s|docs/||g") && yarn eslint --fix --ext .js,.jsx,.ts,.tsx --quiet $FILES'
language: system
pass_filenames: true
files: ^docs/.*\.(js|jsx|ts|tsx)$
- id: type-checking-frontend
name: Type-Checking (Frontend)
entry: ./scripts/check-type.js package=superset-frontend excludeDeclarationDir=cypress-base
language: system
files: ^superset-frontend\/.*\.(js|jsx|ts|tsx)$
exclude: ^superset-frontend/cypress-base\/
require_serial: true
# blacklist unsafe functions like make_url (see #19526)
- repo: https://github.com/skorokithakis/blacklist-pre-commit-hook
rev: e2f070289d8eddcaec0b580d3bde29437e7c8221
hooks:
- id: blacklist
args: ["--blacklisted-names=make_url", "--ignore=tests/"]
- repo: https://github.com/norwoodj/helm-docs
rev: v1.14.2
hooks:
- id: helm-docs
files: helm
verbose: false
args: ["--log-level", "error"]
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.9.7
hooks:
- id: ruff-format
- id: ruff
args: [--fix]
- repo: local
hooks:
- id: pylint
name: pylint with custom Superset plugins
entry: bash
language: system
types: [python]
exclude: ^(tests/|superset/migrations/|scripts/|RELEASING/|docker/)
args:
- -c
- |
TARGET_BRANCH=${GITHUB_BASE_REF:-master}
git fetch origin "$TARGET_BRANCH"
BASE=$(git merge-base origin/"$TARGET_BRANCH" HEAD)
files=$(git diff --name-only --diff-filter=ACM "$BASE"..HEAD | grep '^superset/.*\.py$' || true)
if [ -n "$files" ]; then
pylint --rcfile=.pylintrc --load-plugins=superset.extensions.pylint --reports=no $files
else
echo "No Python files to lint."
fi

2
.pycodestyle Normal file
View File

@@ -0,0 +1,2 @@
[pycodestyle]
max-line-length = 90

355
.pylintrc
View File

@@ -1,355 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
[MASTER]
# Specify a configuration file.
#rcfile=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=CVS,migrations
# Add files or directories matching the regex patterns to the blacklist. The
# regex matches against base names, not paths.
ignore-patterns=
# Pickle collected data for later comparisons.
persistent=yes
# List of plugins (as comma separated values of python modules names) to load,
# usually to register additional checkers.
load-plugins=superset.extensions.pylint
# Use multiple processes to speed up Pylint.
jobs=2
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
unsafe-load-any-extension=no
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code
extension-pkg-whitelist=pyarrow
[MESSAGES CONTROL]
disable=all
enable=disallowed-json-import,disallowed-sql-import,consider-using-transaction
[REPORTS]
# Set the output format. Available formats are text, parseable, colorized, msvs
# (visual studio) and html. You can also give a reporter class, eg
# mypackage.mymodule.MyReporterClass.
output-format=text
# Tells whether to display a full report or only the messages
reports=yes
# Python expression which should return a note less than 10 (10 is the highest
# note). You have access to the variables errors warning, statement which
# respectively contain the number of errors / warnings messages and the total
# number of statements analyzed. This is used by the global evaluation report
# (RP0004).
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details
#msg-template=
[BASIC]
# Good variable names which should always be accepted, separated by a comma
good-names=_,df,ex,f,i,id,j,k,l,o,pk,Run,ts,v,x,y
# Bad variable names which should always be refused, separated by a comma
bad-names=bar,baz,db,fd,foo,sesh,session,tata,toto,tutu
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
name-group=
# Include a hint for the correct naming format with invalid-name
include-naming-hint=no
# List of decorators that produce properties, such as abc.abstractproperty. Add
# to this list to register other decorators that produce valid properties.
property-classes=
abc.abstractproperty,
sqlalchemy.ext.hybrid.hybrid_property
# Regular expression matching correct argument names
argument-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct method names
method-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct variable names
variable-rgx=[a-z_][a-z0-9_]{1,30}$
# Regular expression matching correct inline iteration names
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
# Regular expression matching correct constant names
const-rgx=(([A-Za-z_][A-Za-z0-9_]*)|(__.*__))$
# Regular expression matching correct class names
class-rgx=[A-Z_][a-zA-Z0-9]+$
# Regular expression matching correct class attribute names
class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
# Regular expression matching correct module names
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Regular expression matching correct attribute names
attr-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct function names
function-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=^_
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=10
[ELIF]
# Maximum number of nested blocks for function / method body
max-nested-blocks=5
[FORMAT]
# Maximum number of characters on a single line.
max-line-length=100
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=no
# Maximum number of lines in a module
max-module-lines=1000
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren=4
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
expected-line-ending-format=
[LOGGING]
# Logging modules to check that the string format arguments are in logging
# function parameter format
logging-modules=logging
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,XXX
[SIMILARITIES]
# Minimum lines number of a similarity.
min-similarity-lines=5
# Ignore comments when computing similarities.
ignore-comments=yes
# Ignore docstrings when computing similarities.
ignore-docstrings=yes
# Ignore imports when computing similarities.
ignore-imports=no
[SPELLING]
# Spelling dictionary name. Available dictionaries: none. To make it working
# install python-enchant package.
spelling-dict=
# List of comma separated words that should not be checked.
spelling-ignore-words=
# A path to a file that contains private dictionary; one word per line.
spelling-private-dict-file=
# Tells whether to store unknown words to indicated private dictionary in
# --spelling-private-dict-file option instead of raising a message.
spelling-store-unknown-words=no
[TYPECHECK]
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis. It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=numpy,pandas,alembic.op,sqlalchemy,alembic.context,flask_appbuilder.security.sqla.PermissionView.role,flask_appbuilder.Model.metadata,flask_appbuilder.Base.metadata
# List of class names for which member attributes should not be checked (useful
# for classes with dynamically set attributes). This supports the use of
# qualified names.
ignored-classes=contextlib.closing,optparse.Values,thread._local,_thread._local
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
generated-members=
# List of decorators that produce context managers, such as
# contextlib.contextmanager. Add to this list to register other decorators that
# produce valid context managers.
contextmanager-decorators=contextlib.contextmanager
[VARIABLES]
# Tells whether we should check for unused import in __init__ files.
init-import=no
# A regular expression matching the name of dummy variables (i.e. expectedly
# not used).
dummy-variables-rgx=(_+[a-zA-Z0-9]*?$)|dummy
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid to define new builtins when possible.
additional-builtins=
# List of strings which can identify a callback function by name. A callback
# name must start or end with one of those strings.
callbacks=cb_,_cb
# List of qualified module names which can have objects that can redefine
# builtins.
redefining-builtins-modules=six.moves,future.builtins
[CLASSES]
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,__new__,setUp
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=mcs
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected=_asdict,_fields,_replace,_source,_make
[DESIGN]
# Maximum number of arguments for function / method
max-args=5
# Argument names that match this expression will be ignored. Default to name
# with leading underscore
ignored-argument-names=_.*
# Maximum number of locals for function / method body
max-locals=15
# Maximum number of return / yield for function / method body
max-returns=10
# Maximum number of branch for function / method body
max-branches=15
# Maximum number of statements in function / method body
max-statements=50
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of attributes for a class (see R0902).
max-attributes=8
# Minimum number of public methods for a class (see R0903).
min-public-methods=2
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
# Maximum number of boolean expressions in a if statement
max-bool-expr=5
[IMPORTS]
# Deprecated modules which should not be used, separated by a comma
deprecated-modules=optparse
# Create a graph of every (i.e. internal and external) dependencies in the
# given file (report RP0402 must not be disabled)
import-graph=
# Create a graph of external dependencies in the given file (report RP0402 must
# not be disabled)
ext-import-graph=
# Create a graph of internal dependencies in the given file (report RP0402 must
# not be disabled)
int-import-graph=
# Force import order to recognize a module as part of the standard
# compatibility libraries.
known-standard-library=
# Force import order to recognize a module as part of a third party library.
known-third-party=enchant
# Analyse import fallback blocks. This can be used to support both Python 2 and
# 3 compatible code, which means that the block might have code that exists
# only in one or another interpreter, leading to false positives when analysed.
analyse-fallback-blocks=no
[EXCEPTIONS]
# Exceptions that will emit a warning when being caught. Defaults to
# "Exception"
overgeneral-exceptions=builtins.Exception

View File

@@ -1,86 +0,0 @@
# Note: these patterns are applied to single files or directories, not full paths
.gitignore
.gitattributes
.gitkeep
.coverage
.coveragerc
.codecov.yml
.eslintrc
.eslintignore
.flake8
.nvmrc
.prettierrc
.rat-excludes
.*log
.*pyc
.*lock
.*geojson
DISCLAIMER
licenses/*
node_modules/*
rat-results.txt
babel-node
dist
superset/static/*
build
superset.egg-info
apache_superset.egg-info
.idea
.*sql
.*zip
.*lock
# json and csv in general cannot have comments
.*json
.*csv
# Generated doc files
env/*
docs/.htaccess*
.nojekyll
_build/*
_static/*
.buildinfo
searchindex.js
# auto generated
requirements/*
# vendorized
vendor/*
# github configuration
.github/*
.*mdx
# skip license check in superset-ui
tmp/*
lib/*
esm/*
tsconfig.tsbuildinfo
.*ipynb
.*yml
.*iml
.esprintrc
.prettierignore
generator-superset/*
temporary_superset_ui/*
# skip license checks for auto-generated test snapshots
.*snap
# docs overrides for third party logos we don't have the rights to
google-big-query.svg
google-sheets.svg
ibm-db2.svg
postgresql.svg
snowflake.svg
ydb.svg
# docs-related
erd.puml
erd.svg
intro_header.txt
# for LLMs
llm-context.md
LLMS.md
CLAUDE.md
CURSOR.md
GEMINI.md
GPT.md

38
.travis.yml Normal file
View File

@@ -0,0 +1,38 @@
language: python
addons:
code_climate:
repo_token: 5f3a06c425eef7be4b43627d7d07a3e46c45bdc07155217825ff7c49cb6a470c
apt:
sources:
- deadsnakes
packages:
- python3.5
cache:
directories:
- $HOME/.wheelhouse/
env:
global:
- TRAVIS_CACHE=$HOME/.travis_cache/
- TRAVIS_NODE_VERSION="5.11"
matrix:
- TOX_ENV=javascript
- TOX_ENV=py34-postgres
- TOX_ENV=py34-sqlite
- TOX_ENV=py27-mysql
- TOX_ENV=py27-sqlite
before_install:
- npm install -g npm@'>=3.9.5'
before_script:
- mysql -e 'drop database if exists superset; create database superset DEFAULT CHARACTER SET utf8 COLLATE utf8_unicode_ci' -u root
- mysql -u root -e "CREATE USER 'mysqluser'@'localhost' IDENTIFIED BY 'mysqluserpassword';"
- mysql -u root -e "GRANT ALL ON superset.* TO 'mysqluser'@'localhost';"
- psql -c 'create database superset;' -U postgres
- psql -c "CREATE USER postgresuser WITH PASSWORD 'pguserpassword';" -U postgres
- export PATH=${PATH}:/tmp/hive/bin
install:
- pip install --upgrade pip
- pip install tox tox-travis
- rm -rf ~/.nvm && git clone https://github.com/creationix/nvm.git ~/.nvm && (cd ~/.nvm && git checkout `git describe --abbrev=0 --tags`) && source ~/.nvm/nvm.sh && nvm install $TRAVIS_NODE_VERSION
- npm install
script: tox -e $TOX_ENV

View File

@@ -1,63 +0,0 @@
<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl"?>
<rdf:RDF xml:lang="en"
xmlns="http://usefulinc.com/ns/doap#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:asfext="http://projects.apache.org/ns/asfext#"
xmlns:foaf="http://xmlns.com/foaf/0.1/">
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<Project rdf:about="https://superset.apache.org">
<created>2024-04-10</created>
<license rdf:resource="https://spdx.org/licenses/Apache-2.0" />
<name>Apache Superset</name>
<homepage rdf:resource="https://superset.apache.org" />
<asfext:pmc rdf:resource="https://superset.apache.org" />
<shortdesc>Apache Superset™ is an open-source modern data exploration and visualization platform.</shortdesc>
<description>Superset is a fast, lightweight, intuitive, business intelligence platform. Loaded with options, Superset makes it easy for users of all skill sets to explore and visualize their data, from simple line charts to highly detailed geospatial charts.
* Powerful yet easy to use:
Superset makes it easy to explore your data, using either our simple no-code viz builder or state-of-the-art SQL IDE.
* Integrates with modern databases
Superset can connect to any SQL-based databases including modern cloud-native databases and engines at petabyte scale.
* Modern architecture
Superset is lightweight and highly scalable, leveraging the power of your existing data infrastructure without requiring yet another ingestion layer.
* Rich visualizations and dashboards
Superset ships with 40+ pre-installed visualization types. Our plug-in architecture makes it easy to build custom visualizations.
</description>
<bug-database rdf:resource="https://github.com/apache/superset/issues" />
<mailing-list rdf:resource="https://lists.apache.org/list.html?dev@superset.apache.org" />
<download-page rdf:resource="https://dist.apache.org/repos/dist/release/superset/" />
<programming-language>JavaScript</programming-language>
<programming-language>Python</programming-language>
<programming-language>Typescript</programming-language>
<category rdf:resource="https://projects.apache.org/category/big-data" />
<category rdf:resource="https://projects.apache.org/category/database" />
<category rdf:resource="https://projects.apache.org/category/data-engineering" />
<category rdf:resource="https://projects.apache.org/category/geospatial" />
<repository>
<GitRepository>
<location rdf:resource="https://github.com/apache/superset.git"/>
<browse rdf:resource="https://github.com/apache/superset/"/>
</GitRepository>
</repository>
</Project>
</rdf:RDF>

Some files were not shown because too many files have changed in this diff Show More