Compare commits

..

2 Commits

Author SHA1 Message Date
Maxime Beauchemin
29fd4a3094 clarifying messages, removing some useless output 2024-12-15 23:17:33 -08:00
Maxime Beauchemin
cd6cc34735 chore: set dev env logging level to INFO (from DEBUG)
Using a higher logging default in dev mode to clean up output. Also setting so TMI messages to debug to prevent not-so-useful information to show up on startup.

Devs can easily switch the logging level to DEBUG if/when needed.
2024-12-15 22:55:47 -08:00
494 changed files with 2539 additions and 4870 deletions

View File

@@ -72,7 +72,6 @@ github:
- cypress-matrix (3, chrome)
- cypress-matrix (4, chrome)
- cypress-matrix (5, chrome)
- dependency-review
- frontend-build
- pre-commit (current)
- pre-commit (next)

View File

@@ -42,7 +42,6 @@ docs/
install/
superset-frontend/cypress-base/
superset-frontend/coverage/
superset-frontend/.temp_cache/
superset/static/assets/
superset-websocket/dist/
venv

8
.github/CODEOWNERS vendored
View File

@@ -16,17 +16,17 @@
# Notify E2E test maintainers of changes
/superset-frontend/cypress-base/ @sadpandajoe @geido @eschutho @rusackas @betodealmeida
/superset-frontend/cypress-base/ @jinghua-qa @geido @eschutho @rusackas @betodealmeida
# Notify PMC members of changes to GitHub Actions
/.github/ @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @kgabryje @dpgaspar
/.github/ @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @john-bodley @kgabryje @dpgaspar
# Notify PMC members of changes to required GitHub Actions
/.asf.yaml @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @kgabryje @dpgaspar
/.asf.yaml @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @john-bodley @kgabryje @dpgaspar
# Maps are a finicky contribution process we care about
# Maps are a finnicky contribution process we care about
**/*.geojson @villebro @rusackas
/superset-frontend/plugins/legacy-plugin-chart-country-map/ @villebro @rusackas

View File

@@ -44,13 +44,10 @@ runs:
if [ "${{ inputs.install-superset }}" = "true" ]; then
sudo apt-get update && sudo apt-get -y install libldap2-dev libsasl2-dev
pip install --upgrade pip setuptools wheel uv
if [ "${{ inputs.requirements-type }}" = "dev" ]; then
uv pip install --system -r requirements/development.txt
elif [ "${{ inputs.requirements-type }}" = "base" ]; then
uv pip install --system -r requirements/base.txt
fi
uv pip install --system -e .
fi
shell: bash

View File

@@ -1,69 +0,0 @@
name: "Setup Docker Environment"
description: "Reusable steps for setting up QEMU, Docker Buildx, DockerHub login, Supersetbot, and optionally Docker Compose"
inputs:
build:
description: "Used for building?"
required: false
default: "false"
dockerhub-user:
description: "DockerHub username"
required: false
dockerhub-token:
description: "DockerHub token"
required: false
install-docker-compose:
description: "Flag to install Docker Compose"
required: false
default: "true"
login-to-dockerhub:
description: "Whether you want to log into dockerhub"
required: false
default: "true"
outputs: {}
runs:
using: "composite"
steps:
- name: Set up QEMU
if: ${{ inputs.build == 'true' }}
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
if: ${{ inputs.build == 'true' }}
uses: docker/setup-buildx-action@v3
- name: Try to login to DockerHub
if: ${{ inputs.login-to-dockerhub == 'true' }}
continue-on-error: true
uses: docker/login-action@v3
with:
username: ${{ inputs.dockerhub-user }}
password: ${{ inputs.dockerhub-token }}
- name: Install Docker Compose
if: ${{ inputs.install-docker-compose == 'true' }}
shell: bash
run: |
sudo apt-get update
sudo apt-get install -y ca-certificates curl
sudo install -m 0755 -d /etc/apt/keyrings
# Download and save the Docker GPG key in the correct format
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /etc/apt/keyrings/docker.gpg
# Ensure the key file is readable
sudo chmod a+r /etc/apt/keyrings/docker.gpg
# Add the Docker repository using the correct key
echo \
"deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/ubuntu \
$(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \
sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
# Update package lists and install Docker Compose plugin
sudo apt update
sudo apt install -y docker-compose-plugin
- name: Docker Version Info
shell: bash
run: docker info

View File

@@ -22,7 +22,8 @@ updates:
# - package-ecosystem: "pip"
# NOTE: as dependabot isn't compatible with our usage of `uv pip compile` we're using
# NOTE: as dependabot isn't compatible with our python
# dependency setup (pip-compile-multi), we'll be using
# `supersetbot` instead
- package-ecosystem: "npm"

View File

@@ -23,7 +23,7 @@ on:
jobs:
bump-python-package:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
permissions:
actions: write
contents: write
@@ -45,8 +45,8 @@ jobs:
with:
python-version: "3.10"
- name: Install uv
run: pip install uv
- name: Install pip-compile-multi
run: pip install pip-compile-multi
- name: supersetbot bump-python -p "${{ github.event.inputs.package }}"
env:

View File

@@ -9,7 +9,7 @@ on:
jobs:
cancel-duplicate-runs:
name: Cancel duplicate workflow runs
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
permissions:
actions: write
contents: read

View File

@@ -1,44 +0,0 @@
name: Check python dependencies
on:
push:
branches:
- "master"
- "[0-9].[0-9]*"
pull_request:
types: [synchronize, opened, reopened, ready_for_review]
# cancel previous workflow jobs for PRs
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
jobs:
check-python-deps:
runs-on: ubuntu-22.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
depth: 1
- name: Setup Python
if: steps.check.outputs.python
uses: ./.github/actions/setup-backend/
- name: Run uv
if: steps.check.outputs.python
run: ./scripts/uv-pip-compile.sh
- name: Check for uncommitted changes
run: |
if [[ -n "$(git diff)" ]]; then
echo "ERROR: The pinned dependencies are not up-to-date."
echo "Please run './scripts/uv-pip-compile.sh' and commit the changes."
exit 1
else
echo "Pinned dependencies are up-to-date."
fi

View File

@@ -19,7 +19,7 @@ concurrency:
jobs:
check_db_migration_conflict:
name: Check DB migration conflict
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
permissions:
contents: read
pull-requests: write

View File

@@ -17,7 +17,7 @@ concurrency:
jobs:
analyze:
name: Analyze
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
permissions:
actions: read
contents: read

View File

@@ -5,26 +5,14 @@
# Source repository: https://github.com/actions/dependency-review-action
# Public documentation: https://docs.github.com/en/code-security/supply-chain-security/understanding-your-software-supply-chain/about-dependency-review#dependency-review-enforcement
name: "Dependency Review"
on:
push:
branches:
- "master"
- "[0-9].[0-9]*"
pull_request:
types: [synchronize, opened, reopened, ready_for_review]
# cancel previous workflow jobs for PRs
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
on: [pull_request]
permissions:
contents: read
jobs:
dependency-review:
if: github.event_name == 'pull_request'
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
steps:
- name: "Checkout Repository"
uses: actions/checkout@v4
@@ -45,24 +33,3 @@ jobs:
# pkg:npm/node-forge@1.3.1
# selecting BSD-3-Clause licensing terms for node-forge to ensure compatibility with Apache
allow-dependencies-licenses: pkg:npm/store2@2.14.2, pkg:npm/applitools/core, pkg:npm/applitools/core-base, pkg:npm/applitools/css-tree, pkg:npm/applitools/ec-client, pkg:npm/applitools/eg-socks5-proxy-server, pkg:npm/applitools/eyes, pkg:npm/applitools/eyes-cypress, pkg:npm/applitools/nml-client, pkg:npm/applitools/tunnel-client, pkg:npm/applitools/utils, pkg:npm/node-forge@1.3.1, pkg:npm/rgbcolor, pkg:npm/jszip@3.10.1
python-dependency-liccheck:
runs-on: ubuntu-22.04
steps:
- name: "Checkout Repository"
uses: actions/checkout@v4
- name: Setup Python
uses: ./.github/actions/setup-backend/
with:
requirements-type: base
- name: "Set up liccheck"
run: |
uv pip install --system liccheck
- name: "Run liccheck"
run: |
# run the checks
liccheck -R output.txt
# Print the report
cat output.txt

View File

@@ -15,20 +15,20 @@ concurrency:
jobs:
setup_matrix:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
outputs:
matrix_config: ${{ steps.set_matrix.outputs.matrix_config }}
steps:
- id: set_matrix
run: |
MATRIX_CONFIG=$(if [ "${{ github.event_name }}" == "pull_request" ]; then echo '["dev", "lean"]'; else echo '["dev", "lean", "py310", "websocket", "dockerize", "py311"]'; fi)
MATRIX_CONFIG=$(if [ "${{ github.event_name }}" == "pull_request" ]; then echo '["dev"]'; else echo '["dev", "lean", "py310", "websocket", "dockerize", "py311"]'; fi)
echo "matrix_config=${MATRIX_CONFIG}" >> $GITHUB_OUTPUT
echo $GITHUB_OUTPUT
docker-build:
name: docker-build
needs: setup_matrix
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
strategy:
matrix:
build_preset: ${{fromJson(needs.setup_matrix.outputs.matrix_config)}}
@@ -50,13 +50,21 @@ jobs:
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Setup Docker Environment
- name: Set up QEMU
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
uses: ./.github/actions/setup-docker
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
uses: docker/setup-buildx-action@v3
- name: Try to login to DockerHub
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
continue-on-error: true
uses: docker/login-action@v3
with:
dockerhub-user: ${{ secrets.DOCKERHUB_USER }}
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
build: "true"
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Setup supersetbot
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
@@ -76,30 +84,7 @@ jobs:
fi
supersetbot docker \
--push \
--preset ${{ matrix.build_preset }} \
--context "$EVENT" \
--context-ref "$RELEASE" $FORCE_LATEST \
--extra-flags "--build-arg INCLUDE_CHROMIUM=false" \
$PLATFORM_ARG
- name: Docker pull
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
run: docker pull apache/superset:GHA-${GITHUB_RUN_ID}
- name: Print docker stats
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
run: |
IMAGE_ID=$(docker images --filter "label=sha=${{ github.sha }}" --format "{{.ID}}" | head -n 1)
echo "SHA: ${{ github.sha }}"
echo "IMAGE: $IMAGE_ID"
docker images $IMAGE_ID
docker history $IMAGE_ID
- name: docker-compose sanity check
if: (steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker) && (matrix.build_preset == 'dev' || matrix.build_preset == 'lean')
shell: bash
run: |
export SUPERSET_BUILD_TARGET=${{ matrix.build_preset }}
docker compose build superset-init --build-arg DEV_MODE=false --build-arg INCLUDE_CHROMIUM=false
docker compose up superset-init --exit-code-from superset-init

View File

@@ -8,7 +8,7 @@ on:
jobs:
config:
runs-on: ubuntu-24.04
runs-on: "ubuntu-22.04"
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
@@ -23,7 +23,7 @@ jobs:
build:
needs: config
if: needs.config.outputs.has-secrets
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
defaults:
run:
working-directory: superset-embedded-sdk

View File

@@ -13,7 +13,7 @@ concurrency:
jobs:
embedded-sdk-test:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
defaults:
run:
working-directory: superset-embedded-sdk

View File

@@ -6,7 +6,7 @@ on:
jobs:
config:
runs-on: ubuntu-24.04
runs-on: "ubuntu-22.04"
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
@@ -22,7 +22,7 @@ jobs:
needs: config
if: needs.config.outputs.has-secrets
name: Cleanup ephemeral envs
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
permissions:
pull-requests: write
steps:

View File

@@ -21,15 +21,12 @@ jobs:
group: ${{ github.workflow }}-${{ github.event.inputs.issue_number || github.event.issue.number || github.run_id }}-comment
cancel-in-progress: true
name: Evaluate ephemeral env comment trigger (/testenv)
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
permissions:
pull-requests: write
outputs:
slash-command: ${{ steps.eval-body.outputs.result }}
feature-flags: ${{ steps.eval-feature-flags.outputs.result }}
env:
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
steps:
- name: Debug
@@ -115,7 +112,7 @@ jobs:
needs: ephemeral-env-comment
if: needs.ephemeral-env-comment.outputs.slash-command == 'up'
name: ephemeral-docker-build
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
steps:
- name: Get Info from comment
uses: actions/github-script@v7
@@ -142,24 +139,31 @@ jobs:
ref: ${{ steps.get-sha.outputs.sha }}
persist-credentials: false
- name: Setup Docker Environment
uses: ./.github/actions/setup-docker
with:
dockerhub-user: ${{ secrets.DOCKERHUB_USER }}
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
build: "true"
install-docker-compose: "false"
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Setup supersetbot
uses: ./.github/actions/setup-supersetbot/
- name: Try to login to DockerHub
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
continue-on-error: true
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build ephemeral env image
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
run: |
supersetbot docker \
--push \
--load \
--preset ci \
--platform linux/amd64 \
--context-ref "$RELEASE" \
@@ -190,7 +194,7 @@ jobs:
needs: [ephemeral-env-comment, ephemeral-docker-build]
if: needs.ephemeral-env-comment.outputs.slash-command == 'up'
name: Spin up an ephemeral environment
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
permissions:
contents: read
pull-requests: write

View File

@@ -8,7 +8,7 @@ on:
jobs:
config:
runs-on: ubuntu-24.04
runs-on: "ubuntu-22.04"
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
@@ -24,7 +24,7 @@ jobs:
needs: config
if: needs.config.outputs.has-secrets
name: Generate Report
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4

View File

@@ -11,7 +11,7 @@ on:
jobs:
validate-all-ghas:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
steps:
- name: Checkout Repository
uses: actions/checkout@v4

View File

@@ -9,7 +9,7 @@ on:
jobs:
superbot-orglabel:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
permissions:
contents: read
pull-requests: write

View File

@@ -7,7 +7,7 @@ jobs:
permissions:
contents: read
pull-requests: write
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
steps:
- uses: actions/labeler@v5
with:

View File

@@ -6,7 +6,7 @@ on:
jobs:
latest-release:
name: Add/update tag to new release
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
permissions:
contents: write

View File

@@ -12,7 +12,7 @@ concurrency:
jobs:
license_check:
name: License Check
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4

View File

@@ -11,7 +11,7 @@ concurrency:
jobs:
check-hold-label:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
steps:
- name: Check for 'hold' label
uses: actions/github-script@v7

View File

@@ -10,7 +10,7 @@ on:
jobs:
lint-check:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
permissions:
contents: read
pull-requests: write

View File

@@ -15,7 +15,7 @@ concurrency:
jobs:
pre-commit:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
strategy:
matrix:
python-version: ["current", "next", "previous"]

View File

@@ -21,7 +21,7 @@ jobs:
prefer_typescript:
if: github.ref == 'ref/heads/master' && github.event_name == 'pull_request'
name: Prefer TypeScript
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
permissions:
contents: read
pull-requests: write

View File

@@ -8,7 +8,7 @@ on:
jobs:
config:
runs-on: ubuntu-24.04
runs-on: "ubuntu-22.04"
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
@@ -25,7 +25,7 @@ jobs:
if: needs.config.outputs.has-secrets
name: Bump version and publish package(s)
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
strategy:
matrix:

View File

@@ -6,7 +6,7 @@ on:
jobs:
config:
runs-on: ubuntu-24.04
runs-on: "ubuntu-22.04"
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
@@ -21,7 +21,7 @@ jobs:
cypress-applitools:
needs: config
if: needs.config.outputs.has-secrets
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
strategy:
fail-fast: false
matrix:

View File

@@ -12,7 +12,7 @@ env:
jobs:
config:
runs-on: ubuntu-24.04
runs-on: "ubuntu-22.04"
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
@@ -27,7 +27,7 @@ jobs:
cron:
needs: config
if: needs.config.outputs.has-secrets
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
strategy:
matrix:
node: [20]

View File

@@ -15,7 +15,7 @@ concurrency:
jobs:
test-load-examples:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
env:
PYTHONPATH: ${{ github.workspace }}
SUPERSET_CONFIG: tests.integration_tests.superset_test_config

View File

@@ -12,7 +12,7 @@ on:
jobs:
config:
runs-on: ubuntu-24.04
runs-on: "ubuntu-22.04"
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
@@ -28,7 +28,7 @@ jobs:
needs: config
if: needs.config.outputs.has-secrets
name: Build & Deploy
runs-on: ubuntu-24.04
runs-on: "ubuntu-22.04"
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4

View File

@@ -51,7 +51,7 @@ jobs:
https://www.plaidcloud.com/
build-deploy:
name: Build & Deploy
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
defaults:
run:
working-directory: docs

View File

@@ -28,7 +28,6 @@ concurrency:
jobs:
cypress-matrix:
# Somehow one test flakes on 24.04 for unknown reasons, this is the only GHA left on 22.04
runs-on: ubuntu-22.04
permissions:
contents: read

View File

@@ -15,7 +15,7 @@ concurrency:
jobs:
frontend-build:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4

View File

@@ -13,7 +13,7 @@ concurrency:
jobs:
lint-test:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4

View File

@@ -20,7 +20,7 @@ on:
jobs:
release:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
permissions:
contents: write
pull-requests: write

View File

@@ -15,7 +15,7 @@ concurrency:
jobs:
test-mysql:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
env:
PYTHONPATH: ${{ github.workspace }}
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
@@ -74,7 +74,7 @@ jobs:
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
test-postgres:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
strategy:
matrix:
python-version: ["current", "next", "previous"]
@@ -136,7 +136,7 @@ jobs:
verbose: true
test-sqlite:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
env:
PYTHONPATH: ${{ github.workspace }}
SUPERSET_CONFIG: tests.integration_tests.superset_test_config

View File

@@ -16,7 +16,7 @@ concurrency:
jobs:
test-postgres-presto:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
env:
PYTHONPATH: ${{ github.workspace }}
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
@@ -84,7 +84,7 @@ jobs:
verbose: true
test-postgres-hive:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
env:
PYTHONPATH: ${{ github.workspace }}
SUPERSET_CONFIG: tests.integration_tests.superset_test_config

View File

@@ -16,7 +16,7 @@ concurrency:
jobs:
unit-tests:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
strategy:
matrix:
python-version: ["current", "next"]

View File

@@ -15,7 +15,7 @@ concurrency:
jobs:
frontend-check-translations:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
@@ -46,7 +46,7 @@ jobs:
npm run build-translation
babel-extract:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4

View File

@@ -18,7 +18,7 @@ concurrency:
jobs:
app-checks:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4

View File

@@ -15,7 +15,7 @@ on:
jobs:
supersetbot:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
if: >
github.event_name == 'workflow_dispatch' ||
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@supersetbot'))

View File

@@ -23,7 +23,7 @@ on:
- 'false'
jobs:
config:
runs-on: ubuntu-24.04
runs-on: "ubuntu-22.04"
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
@@ -39,26 +39,23 @@ jobs:
needs: config
if: needs.config.outputs.has-secrets
name: docker-release
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
strategy:
matrix:
build_preset: ["dev", "lean", "py310", "websocket", "dockerize", "py311"]
fail-fast: false
steps:
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Docker Environment
uses: ./.github/actions/setup-docker
with:
dockerhub-user: ${{ secrets.DOCKERHUB_USER }}
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
install-docker-compose: "false"
build: "true"
- name: Use Node.js 20
uses: actions/setup-node@v4
with:
@@ -67,6 +64,13 @@ jobs:
- name: Setup supersetbot
uses: ./.github/actions/setup-supersetbot/
- name: Try to login to DockerHub
continue-on-error: true
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Execute custom Node.js script
env:
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
@@ -87,7 +91,6 @@ jobs:
fi
supersetbot docker \
--push \
--preset ${{ matrix.build_preset }} \
--context "$EVENT" \
--context-ref "$RELEASE" $FORCE_LATEST \
@@ -100,7 +103,7 @@ jobs:
update-prs-with-release-info:
needs: config
if: needs.config.outputs.has-secrets
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
permissions:
contents: read
pull-requests: write

View File

@@ -8,7 +8,7 @@ on:
jobs:
config:
runs-on: ubuntu-24.04
runs-on: "ubuntu-22.04"
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
@@ -23,7 +23,7 @@ jobs:
process-and-upload:
needs: config
if: needs.config.outputs.has-secrets
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
name: Generate Reports
steps:
- name: Checkout Repository

View File

@@ -6,7 +6,7 @@ on:
jobs:
welcome:
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
permissions:
pull-requests: write

View File

@@ -38,6 +38,10 @@ repos:
types-paramiko,
types-Markdown,
]
- repo: https://github.com/peterdemin/pip-compile-multi
rev: v2.6.4
hooks:
- id: pip-compile-multi-verify
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
hooks:

View File

@@ -52,11 +52,12 @@ WORKDIR /app/superset-frontend
RUN mkdir -p /app/superset/static/assets \
/app/superset/translations
# Copy translation files
COPY superset/translations /app/superset/translations
# Mount package files and install dependencies if not in dev mode
RUN --mount=type=bind,source=./superset-frontend/package.json,target=./package.json \
--mount=type=bind,source=./superset-frontend/package-lock.json,target=./package-lock.json \
--mount=type=cache,target=/root/.cache \
--mount=type=cache,target=/root/.npm \
if [ "$DEV_MODE" = "false" ]; then \
npm ci; \
else \
@@ -67,24 +68,16 @@ RUN --mount=type=bind,source=./superset-frontend/package.json,target=./package.j
COPY superset-frontend /app/superset-frontend
# Build the frontend if not in dev mode
RUN --mount=type=cache,target=/app/superset-frontend/.temp_cache \
--mount=type=cache,target=/root/.npm \
if [ "$DEV_MODE" = "false" ]; then \
RUN if [ "$DEV_MODE" = "false" ]; then \
echo "Running 'npm run ${BUILD_CMD}'"; \
if [ "$BUILD_TRANSLATIONS" = "true" ]; then \
npm run build-translation; \
fi; \
npm run ${BUILD_CMD}; \
else \
echo "Skipping 'npm run ${BUILD_CMD}' in dev mode"; \
fi;
# Copy translation files
COPY superset/translations /app/superset/translations
# Build the frontend if not in dev mode
RUN if [ "$BUILD_TRANSLATIONS" = "true" ]; then \
npm run build-translation; \
fi; \
rm -rf /app/superset/translations/*/*/*.po; \
rm -rf /app/superset/translations/*/*/*.mo;
fi && \
rm -rf /app/superset/translations/*/*/*.po
######################################################################
@@ -110,7 +103,7 @@ RUN useradd --user-group -d ${SUPERSET_HOME} -m --no-log-init --shell /bin/bash
# Some bash scripts needed throughout the layers
COPY --chmod=755 docker/*.sh /app/docker/
RUN pip install --no-cache-dir --upgrade uv
RUN pip install --no-cache-dir --upgrade setuptools pip uv
# Using uv as it's faster/simpler than pip
RUN uv venv /app/.venv
@@ -119,9 +112,9 @@ ENV PATH="/app/.venv/bin:${PATH}"
# Install Playwright and optionally setup headless browsers
ARG INCLUDE_CHROMIUM="true"
ARG INCLUDE_FIREFOX="false"
RUN --mount=type=cache,target=/root/.cache/uv\
RUN --mount=type=cache,target=/root/.cache/pip \
if [ "$INCLUDE_CHROMIUM" = "true" ] || [ "$INCLUDE_FIREFOX" = "true" ]; then \
uv pip install playwright && \
pip install playwright && \
playwright install-deps && \
if [ "$INCLUDE_CHROMIUM" = "true" ]; then playwright install chromium; fi && \
if [ "$INCLUDE_FIREFOX" = "true" ]; then playwright install firefox; fi; \
@@ -136,15 +129,12 @@ FROM python-base AS python-translation-compiler
# Install Python dependencies using docker/pip-install.sh
COPY requirements/translations.txt requirements/
RUN --mount=type=cache,target=/root/.cache/uv \
/app/docker/pip-install.sh --requires-build-essential -r requirements/translations.txt
RUN --mount=type=cache,target=/root/.cache/pip \
/app/docker/pip-install.sh -r requirements/translations.txt
COPY superset/translations/ /app/translations_mo/
RUN if [ "$BUILD_TRANSLATIONS" = "true" ]; then \
pybabel compile -d /app/translations_mo | true; \
fi; \
rm -f /app/translations_mo/*/*/*.po; \
rm -f /app/translations_mo/*/*/*.json;
RUN pybabel compile -d /app/translations_mo | true && \
rm -f /app/translations_mo/*/*/*.po
######################################################################
# Python APP common layer
@@ -185,11 +175,6 @@ RUN /app/docker/apt-install.sh \
# Copy compiled things from previous stages
COPY --from=superset-node /app/superset/static/assets superset/static/assets
# TODO, when the next version comes out, use --exclude superset/translations
COPY superset superset
# TODO in the meantime, remove the .po files
RUN rm superset/translations/*/*/*.po
# Merging translations from backend and frontend stages
COPY --from=superset-node /app/superset/translations superset/translations
COPY --from=python-translation-compiler /app/translations_mo superset/translations
@@ -202,13 +187,12 @@ EXPOSE ${SUPERSET_PORT}
# Final lean image...
######################################################################
FROM python-common AS lean
COPY superset superset
# Install Python dependencies using docker/pip-install.sh
COPY requirements/base.txt requirements/
RUN --mount=type=cache,target=/root/.cache/uv \
/app/docker/pip-install.sh --requires-build-essential -r requirements/base.txt
# Install the superset package
RUN --mount=type=cache,target=/root/.cache/uv \
RUN --mount=type=cache,target=/root/.cache/pip \
/app/docker/pip-install.sh --requires-build-essential -r requirements/base.txt && \
uv pip install .
RUN python -m compileall /app/superset
@@ -219,6 +203,7 @@ USER superset
# Dev image...
######################################################################
FROM python-common AS dev
COPY superset superset
# Debian libs needed for dev
RUN /app/docker/apt-install.sh \
@@ -229,10 +214,8 @@ RUN /app/docker/apt-install.sh \
# Copy development requirements and install them
COPY requirements/*.txt requirements/
# Install Python dependencies using docker/pip-install.sh
RUN --mount=type=cache,target=/root/.cache/uv \
/app/docker/pip-install.sh --requires-build-essential -r requirements/development.txt
# Install the superset package
RUN --mount=type=cache,target=/root/.cache/uv \
RUN --mount=type=cache,target=/root/.cache/pip \
/app/docker/pip-install.sh --requires-build-essential -r requirements/development.txt && \
uv pip install .
RUN python -m compileall /app/superset

View File

@@ -272,14 +272,14 @@ class GitLogs:
@staticmethod
def _git_get_current_head() -> str:
output = os.popen("git status | head -1").read() # noqa: S605, S607
output = os.popen("git status | head -1").read()
match = re.match("(?:HEAD detached at|On branch) (.*)", output)
if not match:
return ""
return match.group(1)
def _git_checkout(self, git_ref: str) -> None:
os.popen(f"git checkout {git_ref}").read() # noqa: S605
os.popen(f"git checkout {git_ref}").read()
current_head = self._git_get_current_head()
if current_head != git_ref:
print(f"Could not checkout {git_ref}")
@@ -290,7 +290,7 @@ class GitLogs:
current_git_ref = self._git_get_current_head()
self._git_checkout(self._git_ref)
output = (
os.popen('git --no-pager log --pretty=format:"%h|%an|%ae|%ad|%s|"') # noqa: S605, S607
os.popen('git --no-pager log --pretty=format:"%h|%an|%ae|%ad|%s|"')
.read()
.split("\n")
)

View File

@@ -31,7 +31,7 @@ except ModuleNotFoundError:
RECEIVER_EMAIL = "dev@superset.apache.org"
PROJECT_NAME = "Superset"
PROJECT_MODULE = "superset"
PROJECT_DESCRIPTION = "Apache Superset is a modern, enterprise-ready business intelligence web application." # noqa: E501
PROJECT_DESCRIPTION = "Apache Superset is a modern, enterprise-ready business intelligence web application."
def string_comma_to_list(message: str) -> list[str]:

View File

@@ -23,12 +23,12 @@ from typing import Optional
import requests
# Part 1: Verify SHA512 hash - this is the same as running `shasum -a 512 {release}` and comparing it against `{release}.sha512` # noqa: E501
# Part 1: Verify SHA512 hash - this is the same as running `shasum -a 512 {release}` and comparing it against `{release}.sha512`
def get_sha512_hash(filename: str) -> str:
"""Run the shasum command on the file and return the SHA512 hash."""
result = subprocess.run(["shasum", "-a", "512", filename], stdout=subprocess.PIPE) # noqa: S603, S607
result = subprocess.run(["shasum", "-a", "512", filename], stdout=subprocess.PIPE)
sha512_hash = result.stdout.decode().split()[0]
return sha512_hash
@@ -43,7 +43,7 @@ def read_sha512_file(filename: str) -> str:
def verify_sha512(filename: str) -> str:
"""Verify if the SHA512 hash of the file matches with the hash in the .sha512 file.""" # noqa: E501
"""Verify if the SHA512 hash of the file matches with the hash in the .sha512 file."""
sha512_hash = get_sha512_hash(filename)
sha512_file_content = read_sha512_file(filename)
@@ -53,15 +53,14 @@ def verify_sha512(filename: str) -> str:
return "SHA failed"
# Part 2: Verify RSA key - this is the same as running `gpg --verify {release}.asc {release}` and comparing the RSA key and email address against the KEYS file # noqa: E501
# Part 2: Verify RSA key - this is the same as running `gpg --verify {release}.asc {release}` and comparing the RSA key and email address against the KEYS file
def get_gpg_info(filename: str) -> tuple[Optional[str], Optional[str]]:
"""Run the GPG verify command and extract RSA key and email address."""
asc_filename = filename + ".asc"
result = subprocess.run( # noqa: S603
["gpg", "--verify", asc_filename, filename], # noqa: S607
capture_output=True, # noqa: S607
result = subprocess.run(
["gpg", "--verify", asc_filename, filename], capture_output=True
)
output = result.stderr.decode()
@@ -91,7 +90,7 @@ def get_gpg_info(filename: str) -> tuple[Optional[str], Optional[str]]:
def verify_key(key: str, email: Optional[str]) -> str:
"""Fetch the KEYS file and verify if the RSA/EDDSA key and email match."""
url = "https://downloads.apache.org/superset/KEYS"
response = requests.get(url) # noqa: S113
response = requests.get(url)
if response.status_code == 200:
if key not in response.text:
return "RSA/EDDSA key not found on KEYS page"

View File

@@ -79,7 +79,7 @@ Join our growing community!
- [Astronomer](https://www.astronomer.io) [@ryw]
- [Avesta Technologies](https://avestatechnologies.com/) [@TheRum]
- [Caizin](https://caizin.com/) [@tejaskatariya]
- [Careem](https://www.careem.com/) [@samraHanif0340]
- [Careem](https://www.careem.com/) [@SamraHanifCareem]
- [Cloudsmith](https://cloudsmith.io) [@alancarson]
- [Cyberhaven](https://www.cyberhaven.com/) [@toliver-ch]
- [Deepomatic](https://deepomatic.com/) [@Zanoellia]

View File

@@ -35,14 +35,11 @@ x-superset-volumes: &superset-volumes
x-common-build: &common-build
context: .
target: ${SUPERSET_BUILD_TARGET:-dev} # can use `dev` (default) or `lean`
target: dev
cache_from:
- apache/superset-cache:3.10-slim-bookworm
args:
DEV_MODE: "true"
INCLUDE_CHROMIUM: ${INCLUDE_CHROMIUM:-false}
INCLUDE_FIREFOX: ${INCLUDE_FIREFOX:-false}
BUILD_TRANSLATIONS: ${BUILD_TRANSLATIONS:-false}
services:
nginx:
@@ -160,7 +157,6 @@ services:
# and build it on startup while firing docker-frontend.sh in dev mode, where
# it'll mount and watch local files and rebuild as you update them
DEV_MODE: "true"
BUILD_TRANSLATIONS: ${BUILD_TRANSLATIONS:-false}
environment:
# set this to false if you have perf issues running the npm i; npm run dev in-docker
# if you do so, you have to run this manually on the host, which should perform better!

View File

@@ -20,7 +20,8 @@ set -eo pipefail
# Make python interactive
if [ "$DEV_MODE" == "true" ]; then
echo "Reinstalling the app in editable mode"
echo "[DEV_MODE detected] Setting the superset package to be in editable mode"
echo "RUN: uv pip install -e ."
uv pip install -e .
fi
REQUIREMENTS_LOCAL="/app/docker/requirements-local.txt"
@@ -30,18 +31,12 @@ if [ "$CYPRESS_CONFIG" == "true" ]; then
export SUPERSET_TESTENV=true
export SUPERSET__SQLALCHEMY_DATABASE_URI=postgresql+psycopg2://superset:superset@db:5432/superset
fi
if [[ "$DATABASE_DIALECT" == postgres* ]] ; then
echo "Installing postgres requirements"
uv pip install -e .[postgres]
fi
#
# Make sure we have dev requirements installed
#
if [ -f "${REQUIREMENTS_LOCAL}" ]; then
echo "Installing local overrides at ${REQUIREMENTS_LOCAL}"
echo "Installing python packages specified at ${REQUIREMENTS_LOCAL}"
uv pip install --no-cache-dir -r "${REQUIREMENTS_LOCAL}"
else
echo "Skipping local overrides"
fi
case "${1}" in

View File

@@ -35,7 +35,7 @@ if [ "$BUILD_SUPERSET_FRONTEND_IN_DOCKER" = "true" ]; then
echo "Running `npm install`"
npm install
echo "Start webpack dev server"
echo "Running frontend"
npm run dev
else

View File

@@ -30,15 +30,9 @@ fi
echo_step() {
cat <<EOF
######################################################################
Init Step ${1}/${STEP_CNT} [${2}] -- ${3}
Docker Init Step ${1}/${STEP_CNT} [${2}] -- ${3}
######################################################################
EOF
}
ADMIN_PASSWORD="${ADMIN_PASSWORD:-admin}"
@@ -52,7 +46,6 @@ fi
# Initialize the database
echo_step "1" "Starting" "Applying DB migrations"
superset db upgrade
echo_step "1" "Complete" "Applying DB migrations"
# Create an admin user
echo_step "2" "Starting" "Setting up admin user ( admin / $ADMIN_PASSWORD )"
@@ -62,11 +55,9 @@ superset fab create-admin \
--lastname Admin \
--email admin@superset.com \
--password "$ADMIN_PASSWORD"
echo_step "2" "Complete" "Setting up admin user"
# Create default roles and permissions
echo_step "3" "Starting" "Setting up roles and perms"
superset init
echo_step "3" "Complete" "Setting up roles and perms"
if [ "$SUPERSET_LOAD_EXAMPLES" = "yes" ]; then
# Load some data to play with
@@ -78,5 +69,4 @@ if [ "$SUPERSET_LOAD_EXAMPLES" = "yes" ]; then
else
superset load_examples --force
fi
echo_step "4" "Complete" "Loading examples"
fi

View File

@@ -99,10 +99,11 @@ CELERY_CONFIG = CeleryConfig
FEATURE_FLAGS = {"ALERT_REPORTS": True}
ALERT_REPORTS_NOTIFICATION_DRY_RUN = True
WEBDRIVER_BASEURL = "http://superset:8088/" # When using docker compose baseurl should be http://superset_app:8088/ # noqa: E501
WEBDRIVER_BASEURL = "http://superset:8088/" # When using docker compose baseurl should be http://superset_app:8088/
# The base URL for the email report hyperlinks.
WEBDRIVER_BASEURL_USER_FRIENDLY = WEBDRIVER_BASEURL
SQLLAB_CTAS_NO_LIMIT = True
LOG_LEVEL = logging.INFO
#
# Optionally import superset_config_docker.py (which will have been included on

View File

@@ -63,7 +63,6 @@
"Fiji",
"Finland",
"France",
"France (with overseas)",
"France (regions)",
"French Polynesia",
"Gabon",

View File

@@ -72,19 +72,6 @@ documentation.
configured to be secure.
:::
### Supported environment variables
Affecting the Docker build process:
- **SUPERSET_BUILD_TARGET (default=dev):** which --target to build, either `lean` or `dev` are commonly used
- **INCLUDE_FIREFOX (default=false):** whether to include the Firefox headless browser in the build
- **INCLUDE_CHROMIUM (default=false):** whether to include the Firefox headless browser in the build
- **BUILD_TRANSLATIONS(default=false):** whether to compile the translations from the .po files available
For more env vars that affect your configuration, see this
[superset_config.py](https://github.com/apache/superset/blob/master/docker/pythonpath_dev/superset_config.py)
used in the `docker compose` context to assign env vars to the superset configuration.
### Nuking the postgres database
At times, it's possible to end up with your development database in a bad state, it's
@@ -255,19 +242,19 @@ If you add a new requirement or update an existing requirement (per the `install
$ python3 -m venv venv
$ source venv/bin/activate
$ python3 -m pip install -r requirements/development.txt
$ ./scripts/uv-pip-compile.sh
$ pip-compile-multi --no-upgrade
```
When upgrading the version number of a single package, you should run `./scripts/uv-pip-compile.sh` with the `-P` flag:
When upgrading the version number of a single package, you should run `pip-compile-multi` with the `-P` flag:
```bash
$ ./scripts/uv-pip-compile.sh -P some-package-to-upgrade
$ pip-compile-multi -P my-package
```
To bring all dependencies up to date as per the restrictions defined in `setup.py` and `requirements/*.in`, run `./scripts/uv-pip-compile.sh --upgrade`
To bring all dependencies up to date as per the restrictions defined in `setup.py` and `requirements/*.in`, run pip-compile-multi` without any flags:
```bash
$ ./scripts/uv-pip-compile.sh --upgrade
$ pip-compile-multi
```
This should be done periodically, but it is recommended to do thorough manual testing of the application to ensure no breaking changes have been introduced that aren't caught by the unit and integration tests.

View File

@@ -24,7 +24,7 @@ name = "apache-superset"
description = "A modern, enterprise-ready business intelligence web application"
readme = "README.md"
dynamic = ["version", "scripts", "entry-points"]
requires-python = ">=3.9"
requires-python = "~=3.9"
license = { file="LICENSE.txt" }
authors = [
{ name = "Apache Software Foundation", email = "dev@superset.apache.org" },
@@ -191,6 +191,7 @@ development = [
"grpcio>=1.55.3",
"openapi-spec-validator",
"parameterized",
"pip-compile-multi",
"pre-commit",
"progress>=1.5,<2",
"psutil",
@@ -215,7 +216,7 @@ combine_as_imports = true
include_trailing_comma = true
line_length = 88
known_first_party = "superset"
known_third_party = "alembic, apispec, backoff, celery, click, colorama, cron_descriptor, croniter, cryptography, dateutil, deprecation, flask, flask_appbuilder, flask_babel, flask_caching, flask_compress, flask_jwt_extended, flask_login, flask_migrate, flask_sqlalchemy, flask_talisman, flask_testing, flask_wtf, freezegun, geohash, geopy, holidays, humanize, isodate, jinja2, jwt, markdown, markupsafe, marshmallow, msgpack, nh3, numpy, pandas, parameterized, parsedatetime, pgsanity, polyline, prison, progress, pyarrow, sqlalchemy_bigquery, pyhive, pyparsing, pytest, pytest_mock, pytz, redis, requests, selenium, setuptools, shillelagh, simplejson, slack, sqlalchemy, sqlalchemy_utils, sqlparse, typing_extensions, urllib3, werkzeug, wtforms, wtforms_json, yaml"
known_third_party = "alembic, apispec, backoff, celery, click, colorama, cron_descriptor, croniter, cryptography, dateutil, deprecation, flask, flask_appbuilder, flask_babel, flask_caching, flask_compress, flask_jwt_extended, flask_login, flask_migrate, flask_sqlalchemy, flask_talisman, flask_testing, flask_wtf, freezegun, geohash, geopy, holidays, humanize, isodate, jinja2, jwt, markdown, markupsafe, marshmallow, msgpack, nh3, numpy, pandas, parameterized, parsedatetime, pgsanity, pkg_resources, polyline, prison, progress, pyarrow, sqlalchemy_bigquery, pyhive, pyparsing, pytest, pytest_mock, pytz, redis, requests, selenium, setuptools, shillelagh, simplejson, slack, sqlalchemy, sqlalchemy_utils, sqlparse, typing_extensions, urllib3, werkzeug, wtforms, wtforms_json, yaml"
multi_line_output = 3
order_by_type = false
@@ -276,8 +277,8 @@ exclude = [
line-length = 88
indent-width = 4
# Assume Python 3.9
target-version = "py39"
# Assume Python 3.8
target-version = "py310"
[tool.ruff.lint]
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
@@ -290,24 +291,22 @@ select = [
"E9",
"PT009",
"TRY201",
"B",
"C",
"E",
"F",
"F",
"I",
"N",
"PT",
"Q",
"S",
"T",
"W",
# TODO add these rules in follow up PR
# "B",
# "C",
# "E",
# "F",
#"F",
# "I",
# "N",
# "PT",
# "Q",
# "S",
# "T",
#"W",
]
ignore = [
"S101",
"PT006",
"T201",
"N999",
]
extend-select = ["I"]
@@ -361,36 +360,3 @@ docstring-code-format = false
# This only has an effect when the `docstring-code-format` setting is
# enabled.
docstring-code-line-length = "dynamic"
[tool.liccheck]
requirement_txt_file = "requirements/base.txt"
authorized_licenses = [
"academic free license (afl)",
"apache license 2.0",
"apache software",
"apache software, bsd",
"bsd",
"isc license (iscl)",
"isc license",
"mit",
"mozilla public license 2.0 (mpl 2.0)",
"osi approved",
"osi approved",
"python software foundation",
"the unlicense (unlicense)",
"the unlicense",
]
[tool.liccheck.authorized_packages]
# --------------------------------------------------------------
# These are ok, checked manually
# Seems ok, might need legal review
# https://github.com/urschrei/pypolyline/blob/master/LICENSE.md
polyline = "2"
# Apache 2.0 https://github.com/hkwi/python-geohash
python-geohash = "0"
# --------------------------------------------------------------
# TODO REMOVE THESE DEPS FROM CODEBASE
func-timeout = "4" # AGPL
paramiko = "3" # GPL
pyxlsb = "1" # GPL

View File

@@ -16,6 +16,7 @@
# specific language governing permissions and limitations
# under the License.
#
-e file:.
urllib3>=1.26.18
werkzeug>=3.0.1
numexpr>=2.9.0

View File

@@ -1,10 +1,17 @@
# This file was autogenerated by uv via the following command:
# uv pip compile pyproject.toml requirements/base.in -o requirements/base.txt
# SHA1:04f7e0860829f18926ea238354e6d4a6ab823d50
#
# This file is autogenerated by pip-compile-multi
# To update, run:
#
# pip-compile-multi
#
-e file:.
# via -r requirements/base.in
alembic==1.14.0
# via flask-migrate
amqp==5.3.1
# via kombu
apispec==6.3.0
apispec[yaml]==6.3.0
# via flask-appbuilder
apsw==3.46.0.0
# via shillelagh
@@ -20,7 +27,7 @@ attrs==24.2.0
babel==2.16.0
# via flask-babel
backoff==2.2.1
# via apache-superset (pyproject.toml)
# via apache-superset
bcrypt==4.2.1
# via paramiko
billiard==4.2.1
@@ -28,7 +35,7 @@ billiard==4.2.1
blinker==1.9.0
# via flask
bottleneck==1.4.2
# via apache-superset (pyproject.toml)
# via apache-superset
brotli==1.1.0
# via flask-compress
cachelib==0.9.0
@@ -40,7 +47,7 @@ cachetools==5.5.0
cattrs==24.1.2
# via requests-cache
celery==5.4.0
# via apache-superset (pyproject.toml)
# via apache-superset
certifi==2024.8.30
# via requests
cffi==1.17.1
@@ -51,7 +58,7 @@ charset-normalizer==3.4.0
# via requests
click==8.1.7
# via
# apache-superset (pyproject.toml)
# apache-superset
# celery
# click-didyoumean
# click-option-group
@@ -62,22 +69,22 @@ click==8.1.7
click-didyoumean==0.3.1
# via celery
click-option-group==0.5.6
# via apache-superset (pyproject.toml)
# via apache-superset
click-plugins==1.1.1
# via celery
click-repl==0.3.0
# via celery
colorama==0.4.6
# via
# apache-superset (pyproject.toml)
# apache-superset
# flask-appbuilder
cron-descriptor==1.4.5
# via apache-superset (pyproject.toml)
# via apache-superset
croniter==5.0.1
# via apache-superset (pyproject.toml)
# via apache-superset
cryptography==43.0.3
# via
# apache-superset (pyproject.toml)
# apache-superset
# paramiko
# pyopenssl
defusedxml==0.7.1
@@ -85,7 +92,7 @@ defusedxml==0.7.1
deprecated==1.2.15
# via limits
deprecation==2.1.0
# via apache-superset (pyproject.toml)
# via apache-superset
dnspython==2.7.0
# via email-validator
email-validator==2.2.0
@@ -96,7 +103,7 @@ exceptiongroup==1.2.2
# via cattrs
flask==2.3.3
# via
# apache-superset (pyproject.toml)
# apache-superset
# flask-appbuilder
# flask-babel
# flask-caching
@@ -109,66 +116,66 @@ flask==2.3.3
# flask-sqlalchemy
# flask-wtf
flask-appbuilder==4.5.2
# via apache-superset (pyproject.toml)
# via apache-superset
flask-babel==2.0.0
# via flask-appbuilder
flask-caching==2.3.0
# via apache-superset (pyproject.toml)
# via apache-superset
flask-compress==1.17
# via apache-superset (pyproject.toml)
# via apache-superset
flask-jwt-extended==4.7.1
# via flask-appbuilder
flask-limiter==3.8.0
# via flask-appbuilder
flask-login==0.6.3
# via
# apache-superset (pyproject.toml)
# apache-superset
# flask-appbuilder
flask-migrate==3.1.0
# via apache-superset (pyproject.toml)
# via apache-superset
flask-session==0.8.0
# via apache-superset (pyproject.toml)
# via apache-superset
flask-sqlalchemy==2.5.1
# via
# flask-appbuilder
# flask-migrate
flask-talisman==1.1.0
# via apache-superset (pyproject.toml)
# via apache-superset
flask-wtf==1.2.2
# via
# apache-superset (pyproject.toml)
# apache-superset
# flask-appbuilder
func-timeout==4.3.5
# via apache-superset (pyproject.toml)
# via apache-superset
geographiclib==2.0
# via geopy
geopy==2.4.1
# via apache-superset (pyproject.toml)
# via apache-superset
google-auth==2.36.0
# via shillelagh
greenlet==3.0.3
# via
# -r requirements/base.in
# apache-superset (pyproject.toml)
# apache-superset
# shillelagh
gunicorn==23.0.0
# via apache-superset (pyproject.toml)
# via apache-superset
hashids==1.3.1
# via apache-superset (pyproject.toml)
# via apache-superset
holidays==0.25
# via apache-superset (pyproject.toml)
# via apache-superset
humanize==4.11.0
# via apache-superset (pyproject.toml)
# via apache-superset
idna==3.10
# via
# email-validator
# requests
importlib-metadata==8.5.0
# via apache-superset (pyproject.toml)
# via apache-superset
importlib-resources==6.4.5
# via limits
isodate==0.7.2
# via apache-superset (pyproject.toml)
# via apache-superset
itsdangerous==2.2.0
# via
# flask
@@ -178,7 +185,7 @@ jinja2==3.1.4
# flask
# flask-babel
jsonpath-ng==1.7.0
# via apache-superset (pyproject.toml)
# via apache-superset
jsonschema==4.17.3
# via flask-appbuilder
kombu==5.4.2
@@ -189,10 +196,10 @@ limits==3.13.0
# via flask-limiter
mako==1.3.6
# via
# apache-superset (pyproject.toml)
# alembic
# apache-superset
markdown==3.7
# via apache-superset (pyproject.toml)
# via apache-superset
markdown-it-py==3.0.0
# via rich
markupsafe==3.0.2
@@ -210,16 +217,16 @@ marshmallow-sqlalchemy==0.28.2
mdurl==0.1.2
# via markdown-it-py
msgpack==1.0.8
# via apache-superset (pyproject.toml)
# via apache-superset
msgspec==0.18.6
# via flask-session
nh3==0.2.19
# via apache-superset (pyproject.toml)
# via apache-superset
numexpr==2.10.2
# via -r requirements/base.in
numpy==1.23.5
# via
# apache-superset (pyproject.toml)
# apache-superset
# bottleneck
# numexpr
# pandas
@@ -232,7 +239,7 @@ ordered-set==4.1.0
# via flask-limiter
packaging==24.2
# via
# apache-superset (pyproject.toml)
# apache-superset
# apispec
# deprecation
# gunicorn
@@ -240,28 +247,28 @@ packaging==24.2
# marshmallow
# marshmallow-sqlalchemy
# shillelagh
pandas==2.0.3
# via apache-superset (pyproject.toml)
pandas[excel]==2.0.3
# via apache-superset
paramiko==3.5.0
# via
# apache-superset (pyproject.toml)
# apache-superset
# sshtunnel
parsedatetime==2.6
# via apache-superset (pyproject.toml)
# via apache-superset
pgsanity==0.2.9
# via apache-superset (pyproject.toml)
# via apache-superset
platformdirs==3.8.1
# via requests-cache
ply==3.11
# via jsonpath-ng
polyline==2.0.2
# via apache-superset (pyproject.toml)
# via apache-superset
prison==0.2.1
# via flask-appbuilder
prompt-toolkit==3.0.48
# via click-repl
pyarrow==14.0.2
# via apache-superset (pyproject.toml)
# via apache-superset
pyasn1==0.6.1
# via
# pyasn1-modules
@@ -274,7 +281,7 @@ pygments==2.18.0
# via rich
pyjwt==2.10.1
# via
# apache-superset (pyproject.toml)
# apache-superset
# flask-appbuilder
# flask-jwt-extended
pynacl==1.5.0
@@ -282,12 +289,12 @@ pynacl==1.5.0
pyopenssl==24.2.1
# via shillelagh
pyparsing==3.2.0
# via apache-superset (pyproject.toml)
# via apache-superset
pyrsistent==0.20.0
# via jsonschema
python-dateutil==2.9.0.post0
# via
# apache-superset (pyproject.toml)
# apache-superset
# celery
# croniter
# flask-appbuilder
@@ -295,9 +302,9 @@ python-dateutil==2.9.0.post0
# pandas
# shillelagh
python-dotenv==1.0.1
# via apache-superset (pyproject.toml)
# via apache-superset
python-geohash==0.8.5
# via apache-superset (pyproject.toml)
# via apache-superset
pytz==2024.2
# via
# croniter
@@ -307,10 +314,10 @@ pyxlsb==1.0.10
# via pandas
pyyaml==6.0.2
# via
# apache-superset (pyproject.toml)
# apache-superset
# apispec
redis==4.6.0
# via apache-superset (pyproject.toml)
# via apache-superset
requests==2.32.2
# via
# requests-cache
@@ -322,13 +329,13 @@ rich==13.9.4
rsa==4.9
# via google-auth
selenium==3.141.0
# via apache-superset (pyproject.toml)
shillelagh==1.2.18
# via apache-superset (pyproject.toml)
# via apache-superset
shillelagh[gsheetsapi]==1.2.18
# via apache-superset
shortid==0.1.2
# via apache-superset (pyproject.toml)
# via apache-superset
simplejson==3.19.3
# via apache-superset (pyproject.toml)
# via apache-superset
six==1.16.0
# via
# prison
@@ -336,11 +343,11 @@ six==1.16.0
# url-normalize
# wtforms-json
slack-sdk==3.33.4
# via apache-superset (pyproject.toml)
# via apache-superset
sqlalchemy==1.4.54
# via
# apache-superset (pyproject.toml)
# alembic
# apache-superset
# flask-appbuilder
# flask-sqlalchemy
# marshmallow-sqlalchemy
@@ -348,20 +355,20 @@ sqlalchemy==1.4.54
# sqlalchemy-utils
sqlalchemy-utils==0.38.3
# via
# apache-superset (pyproject.toml)
# apache-superset
# flask-appbuilder
sqlglot==25.24.5
# via apache-superset (pyproject.toml)
# via apache-superset
sqlparse==0.5.2
# via apache-superset (pyproject.toml)
# via apache-superset
sshtunnel==0.4.0
# via apache-superset (pyproject.toml)
# via apache-superset
tabulate==0.8.10
# via apache-superset (pyproject.toml)
# via apache-superset
typing-extensions==4.12.2
# via
# apache-superset (pyproject.toml)
# alembic
# apache-superset
# cattrs
# flask-limiter
# limits
@@ -398,17 +405,17 @@ wrapt==1.17.0
# via deprecated
wtforms==3.2.1
# via
# apache-superset (pyproject.toml)
# apache-superset
# flask-appbuilder
# flask-wtf
# wtforms-json
wtforms-json==0.3.5
# via apache-superset (pyproject.toml)
# via apache-superset
xlrd==2.0.1
# via pandas
xlsxwriter==3.0.9
# via
# apache-superset (pyproject.toml)
# apache-superset
# pandas
zipp==3.21.0
# via importlib-metadata

View File

@@ -16,4 +16,5 @@
# specific language governing permissions and limitations
# under the License.
#
-e .[development,bigquery,cors,druid,gevent,gsheets,mysql,postgres,presto,prophet,trino,thumbnails]
-r base.in
-e .[development,bigquery,cors,druid,gevent,gsheets,mysql,playwright,postgres,presto,prophet,trino,thumbnails]

View File

@@ -1,294 +1,54 @@
# This file was autogenerated by uv via the following command:
# uv pip compile requirements/development.in -c requirements/base.txt -o requirements/development.txt
-e .
# via -r requirements/development.in
alembic==1.14.0
# SHA1:dc767a7288b56c785b0cd3c38e95e7b5e66be1ac
#
# This file is autogenerated by pip-compile-multi
# To update, run:
#
# pip-compile-multi
#
-r base.txt
-e file:.
# via
# -c requirements/base.txt
# flask-migrate
amqp==5.3.1
# via
# -c requirements/base.txt
# kombu
apispec==6.3.0
# via
# -c requirements/base.txt
# flask-appbuilder
apsw==3.46.0.0
# via
# -c requirements/base.txt
# shillelagh
async-timeout==4.0.3
# via
# -c requirements/base.txt
# redis
attrs==24.2.0
# via
# -c requirements/base.txt
# cattrs
# jsonschema
# requests-cache
babel==2.16.0
# via
# -c requirements/base.txt
# flask-babel
backoff==2.2.1
# via
# -c requirements/base.txt
# apache-superset
bcrypt==4.2.1
# via
# -c requirements/base.txt
# paramiko
billiard==4.2.1
# via
# -c requirements/base.txt
# celery
blinker==1.9.0
# via
# -c requirements/base.txt
# flask
bottleneck==1.4.2
# via
# -c requirements/base.txt
# apache-superset
brotli==1.1.0
# via
# -c requirements/base.txt
# flask-compress
cachelib==0.9.0
# via
# -c requirements/base.txt
# flask-caching
# flask-session
cachetools==5.5.0
# via
# -c requirements/base.txt
# google-auth
cattrs==24.1.2
# via
# -c requirements/base.txt
# requests-cache
celery==5.4.0
# via
# -c requirements/base.txt
# apache-superset
certifi==2024.8.30
# via
# -c requirements/base.txt
# requests
cffi==1.17.1
# via
# -c requirements/base.txt
# cryptography
# pynacl
# -r /Users/max/code/superset/requirements/base.in
# -r requirements/development.in
build==1.2.1
# via pip-tools
cfgv==3.4.0
# via pre-commit
charset-normalizer==3.4.0
# via
# -c requirements/base.txt
# requests
click==8.1.7
# via
# -c requirements/base.txt
# apache-superset
# celery
# click-didyoumean
# click-option-group
# click-plugins
# click-repl
# flask
# flask-appbuilder
click-didyoumean==0.3.1
# via
# -c requirements/base.txt
# celery
click-option-group==0.5.6
# via
# -c requirements/base.txt
# apache-superset
click-plugins==1.1.1
# via
# -c requirements/base.txt
# celery
click-repl==0.3.0
# via
# -c requirements/base.txt
# celery
cmdstanpy==1.1.0
# via prophet
colorama==0.4.6
# via
# -c requirements/base.txt
# apache-superset
# flask-appbuilder
contourpy==1.0.7
# via matplotlib
coverage==7.6.8
coverage[toml]==7.6.8
# via pytest-cov
cron-descriptor==1.4.5
# via
# -c requirements/base.txt
# apache-superset
croniter==5.0.1
# via
# -c requirements/base.txt
# apache-superset
cryptography==43.0.3
# via
# -c requirements/base.txt
# apache-superset
# paramiko
# pyopenssl
cycler==0.12.1
# via matplotlib
db-dtypes==1.3.1
# via pandas-gbq
defusedxml==0.7.1
# via
# -c requirements/base.txt
# odfpy
deprecated==1.2.15
# via
# -c requirements/base.txt
# limits
deprecation==2.1.0
# via
# -c requirements/base.txt
# apache-superset
distlib==0.3.8
# via virtualenv
dnspython==2.7.0
# via
# -c requirements/base.txt
# email-validator
docker==7.0.0
# via apache-superset
email-validator==2.2.0
# via
# -c requirements/base.txt
# flask-appbuilder
et-xmlfile==2.0.0
# via
# -c requirements/base.txt
# openpyxl
exceptiongroup==1.2.2
# via
# -c requirements/base.txt
# cattrs
# pytest
filelock==3.12.2
# via virtualenv
flask==2.3.3
# via
# -c requirements/base.txt
# apache-superset
# flask-appbuilder
# flask-babel
# flask-caching
# flask-compress
# flask-cors
# flask-jwt-extended
# flask-limiter
# flask-login
# flask-migrate
# flask-session
# flask-sqlalchemy
# flask-testing
# flask-wtf
flask-appbuilder==4.5.2
# via
# -c requirements/base.txt
# apache-superset
flask-babel==2.0.0
# via
# -c requirements/base.txt
# flask-appbuilder
flask-caching==2.3.0
# via
# -c requirements/base.txt
# apache-superset
flask-compress==1.17
# via
# -c requirements/base.txt
# apache-superset
flask-cors==4.0.0
# via apache-superset
flask-jwt-extended==4.7.1
# via
# -c requirements/base.txt
# flask-appbuilder
flask-limiter==3.8.0
# via
# -c requirements/base.txt
# flask-appbuilder
flask-login==0.6.3
# via
# -c requirements/base.txt
# apache-superset
# flask-appbuilder
flask-migrate==3.1.0
# via
# -c requirements/base.txt
# apache-superset
flask-session==0.8.0
# via
# -c requirements/base.txt
# apache-superset
flask-sqlalchemy==2.5.1
# via
# -c requirements/base.txt
# flask-appbuilder
# flask-migrate
flask-talisman==1.1.0
# via
# -c requirements/base.txt
# apache-superset
flask-testing==0.8.1
# via apache-superset
flask-wtf==1.2.2
# via
# -c requirements/base.txt
# apache-superset
# flask-appbuilder
fonttools==4.55.0
# via matplotlib
freezegun==1.5.1
# via apache-superset
func-timeout==4.3.5
# via
# -c requirements/base.txt
# apache-superset
future==1.0.0
# via pyhive
geographiclib==2.0
# via
# -c requirements/base.txt
# geopy
geopy==2.4.1
# via
# -c requirements/base.txt
# apache-superset
gevent==24.2.1
# via apache-superset
google-api-core==2.23.0
google-api-core[grpc]==2.23.0
# via
# google-cloud-bigquery
# google-cloud-bigquery-storage
# google-cloud-core
# pandas-gbq
# sqlalchemy-bigquery
google-auth==2.36.0
# via
# -c requirements/base.txt
# google-api-core
# google-auth-oauthlib
# google-cloud-bigquery
# google-cloud-core
# pandas-gbq
# pydata-google-auth
# shillelagh
# sqlalchemy-bigquery
google-auth-oauthlib==1.2.1
# via
# pandas-gbq
@@ -310,12 +70,6 @@ googleapis-common-protos==1.66.0
# via
# google-api-core
# grpcio-status
greenlet==3.0.3
# via
# -c requirements/base.txt
# apache-superset
# gevent
# shillelagh
grpcio==1.68.0
# via
# apache-superset
@@ -323,241 +77,50 @@ grpcio==1.68.0
# grpcio-status
grpcio-status==1.60.1
# via google-api-core
gunicorn==23.0.0
# via
# -c requirements/base.txt
# apache-superset
hashids==1.3.1
# via
# -c requirements/base.txt
# apache-superset
holidays==0.25
# via
# -c requirements/base.txt
# apache-superset
# prophet
humanize==4.11.0
# via
# -c requirements/base.txt
# apache-superset
identify==2.5.36
# via pre-commit
idna==3.10
# via
# -c requirements/base.txt
# email-validator
# requests
importlib-metadata==8.5.0
# via
# -c requirements/base.txt
# apache-superset
importlib-resources==6.4.5
# via
# -c requirements/base.txt
# limits
# prophet
iniconfig==2.0.0
# via pytest
isodate==0.7.2
# via
# -c requirements/base.txt
# apache-superset
itsdangerous==2.2.0
# via
# -c requirements/base.txt
# flask
# flask-wtf
jinja2==3.1.4
# via
# -c requirements/base.txt
# flask
# flask-babel
jsonpath-ng==1.7.0
# via
# -c requirements/base.txt
# apache-superset
jsonschema==4.17.3
# via
# -c requirements/base.txt
# flask-appbuilder
# jsonschema-spec
# openapi-schema-validator
# openapi-spec-validator
jsonschema-spec==0.1.6
# via openapi-spec-validator
kiwisolver==1.4.7
# via matplotlib
kombu==5.4.2
# via
# -c requirements/base.txt
# celery
korean-lunar-calendar==0.3.1
# via
# -c requirements/base.txt
# holidays
lazy-object-proxy==1.10.0
# via openapi-spec-validator
limits==3.13.0
# via
# -c requirements/base.txt
# flask-limiter
mako==1.3.6
# via
# -c requirements/base.txt
# alembic
# apache-superset
markdown==3.7
# via
# -c requirements/base.txt
# apache-superset
markdown-it-py==3.0.0
# via
# -c requirements/base.txt
# rich
markupsafe==3.0.2
# via
# -c requirements/base.txt
# jinja2
# mako
# werkzeug
# wtforms
marshmallow==3.23.1
# via
# -c requirements/base.txt
# flask-appbuilder
# marshmallow-sqlalchemy
marshmallow-sqlalchemy==0.28.2
# via
# -c requirements/base.txt
# flask-appbuilder
matplotlib==3.9.0
# via prophet
mdurl==0.1.2
# via
# -c requirements/base.txt
# markdown-it-py
msgpack==1.0.8
# via
# -c requirements/base.txt
# apache-superset
msgspec==0.18.6
# via
# -c requirements/base.txt
# flask-session
mysqlclient==2.2.6
# via apache-superset
nh3==0.2.19
# via
# -c requirements/base.txt
# apache-superset
nodeenv==1.8.0
# via pre-commit
numpy==1.23.5
# via
# -c requirements/base.txt
# apache-superset
# bottleneck
# cmdstanpy
# contourpy
# db-dtypes
# matplotlib
# pandas
# pandas-gbq
# prophet
# pyarrow
oauthlib==3.2.2
# via requests-oauthlib
odfpy==1.4.1
# via
# -c requirements/base.txt
# pandas
openapi-schema-validator==0.4.4
# via openapi-spec-validator
openapi-spec-validator==0.5.6
# via apache-superset
openpyxl==3.1.5
# via
# -c requirements/base.txt
# pandas
ordered-set==4.1.0
# via
# -c requirements/base.txt
# flask-limiter
packaging==24.2
# via
# -c requirements/base.txt
# apache-superset
# apispec
# db-dtypes
# deprecation
# docker
# google-cloud-bigquery
# gunicorn
# limits
# marshmallow
# marshmallow-sqlalchemy
# matplotlib
# pytest
# shillelagh
# sqlalchemy-bigquery
pandas==2.0.3
# via
# -c requirements/base.txt
# apache-superset
# cmdstanpy
# db-dtypes
# pandas-gbq
# prophet
pandas-gbq==0.19.1
# via apache-superset
parameterized==0.9.0
# via apache-superset
paramiko==3.5.0
# via
# -c requirements/base.txt
# apache-superset
# sshtunnel
parsedatetime==2.6
# via
# -c requirements/base.txt
# apache-superset
pathable==0.4.3
# via jsonschema-spec
pgsanity==0.2.9
# via
# -c requirements/base.txt
# apache-superset
pillow==10.3.0
# via
# apache-superset
# matplotlib
platformdirs==3.8.1
# via
# -c requirements/base.txt
# requests-cache
# virtualenv
pip-compile-multi==2.6.3
# via apache-superset
pip-tools==7.4.1
# via pip-compile-multi
playwright==1.42.0
# via apache-superset
pluggy==1.5.0
# via pytest
ply==3.11
# via
# -c requirements/base.txt
# jsonpath-ng
polyline==2.0.2
# via
# -c requirements/base.txt
# apache-superset
pre-commit==4.0.1
# via apache-superset
prison==0.2.1
# via
# -c requirements/base.txt
# flask-appbuilder
progress==1.6
# via apache-superset
prompt-toolkit==3.0.48
# via
# -c requirements/base.txt
# click-repl
prophet==1.1.5
# via apache-superset
proto-plus==1.25.0
@@ -575,64 +138,22 @@ psutil==6.1.0
# via apache-superset
psycopg2-binary==2.9.6
# via apache-superset
pyarrow==14.0.2
# via
# -c requirements/base.txt
# apache-superset
# db-dtypes
# pandas-gbq
pyasn1==0.6.1
# via
# -c requirements/base.txt
# pyasn1-modules
# python-ldap
# rsa
pyasn1-modules==0.4.1
# via
# -c requirements/base.txt
# google-auth
# python-ldap
pycparser==2.22
# via
# -c requirements/base.txt
# cffi
pydata-google-auth==1.9.0
# via pandas-gbq
pydruid==0.6.9
# via apache-superset
pyee==11.0.1
# via playwright
pyfakefs==5.3.5
# via apache-superset
pygments==2.18.0
# via
# -c requirements/base.txt
# rich
pyhive==0.7.0
pyhive[presto]==0.7.0
# via apache-superset
pyinstrument==4.4.0
# via apache-superset
pyjwt==2.10.1
pyproject-hooks==1.2.0
# via
# -c requirements/base.txt
# apache-superset
# flask-appbuilder
# flask-jwt-extended
pynacl==1.5.0
# via
# -c requirements/base.txt
# paramiko
pyopenssl==24.2.1
# via
# -c requirements/base.txt
# shillelagh
pyparsing==3.2.0
# via
# -c requirements/base.txt
# apache-superset
# matplotlib
pyrsistent==0.20.0
# via
# -c requirements/base.txt
# jsonschema
# build
# pip-tools
pytest==7.4.4
# via
# apache-superset
@@ -642,248 +163,45 @@ pytest-cov==6.0.0
# via apache-superset
pytest-mock==3.10.0
# via apache-superset
python-dateutil==2.9.0.post0
# via
# -c requirements/base.txt
# apache-superset
# celery
# croniter
# flask-appbuilder
# freezegun
# google-cloud-bigquery
# holidays
# matplotlib
# pandas
# pyhive
# shillelagh
# trino
python-dotenv==1.0.1
# via
# -c requirements/base.txt
# apache-superset
python-geohash==0.8.5
# via
# -c requirements/base.txt
# apache-superset
python-ldap==3.4.4
# via apache-superset
pytz==2024.2
# via
# -c requirements/base.txt
# croniter
# flask-babel
# pandas
# trino
pyxlsb==1.0.10
# via
# -c requirements/base.txt
# pandas
pyyaml==6.0.2
# via
# -c requirements/base.txt
# apache-superset
# apispec
# jsonschema-spec
# pre-commit
redis==4.6.0
# via
# -c requirements/base.txt
# apache-superset
requests==2.32.2
# via
# -c requirements/base.txt
# docker
# google-api-core
# google-cloud-bigquery
# jsonschema-spec
# pydruid
# pyhive
# requests-cache
# requests-oauthlib
# shillelagh
# trino
requests-cache==1.2.0
# via
# -c requirements/base.txt
# shillelagh
requests-oauthlib==2.0.0
# via google-auth-oauthlib
rfc3339-validator==0.1.4
# via openapi-schema-validator
rich==13.9.4
# via
# -c requirements/base.txt
# flask-limiter
rsa==4.9
# via
# -c requirements/base.txt
# google-auth
ruff==0.8.0
# via apache-superset
selenium==3.141.0
# via
# -c requirements/base.txt
# apache-superset
setuptools==75.6.0
# via
# nodeenv
# pandas-gbq
# pydata-google-auth
# zope-event
# zope-interface
shillelagh==1.2.18
# via
# -c requirements/base.txt
# apache-superset
shortid==0.1.2
# via
# -c requirements/base.txt
# apache-superset
simplejson==3.19.3
# via
# -c requirements/base.txt
# apache-superset
six==1.16.0
# via
# -c requirements/base.txt
# prison
# python-dateutil
# rfc3339-validator
# url-normalize
# wtforms-json
slack-sdk==3.33.4
# via
# -c requirements/base.txt
# apache-superset
sqlalchemy==1.4.54
# via
# -c requirements/base.txt
# alembic
# apache-superset
# flask-appbuilder
# flask-sqlalchemy
# marshmallow-sqlalchemy
# shillelagh
# sqlalchemy-bigquery
# sqlalchemy-utils
sqlalchemy-bigquery==1.12.0
# via apache-superset
sqlalchemy-utils==0.38.3
# via
# -c requirements/base.txt
# apache-superset
# flask-appbuilder
sqlglot==25.24.5
# via
# -c requirements/base.txt
# apache-superset
sqloxide==0.1.51
# via apache-superset
sqlparse==0.5.2
# via
# -c requirements/base.txt
# apache-superset
sshtunnel==0.4.0
# via
# -c requirements/base.txt
# apache-superset
statsd==4.0.1
# via apache-superset
tabulate==0.8.10
# via
# -c requirements/base.txt
# apache-superset
tomli==2.2.1
tomli==2.1.0
# via
# build
# coverage
# pip-tools
# pytest
toposort==1.10
# via pip-compile-multi
tqdm==4.67.1
# via
# cmdstanpy
# prophet
trino==0.330.0
# via apache-superset
typing-extensions==4.12.2
# via
# -c requirements/base.txt
# alembic
# apache-superset
# cattrs
# flask-limiter
# limits
# rich
# shillelagh
tzdata==2024.2
# via
# -c requirements/base.txt
# celery
# kombu
# pandas
tzlocal==5.2
# via trino
url-normalize==1.4.3
# via
# -c requirements/base.txt
# requests-cache
urllib3==1.26.18
# via
# -c requirements/base.txt
# docker
# requests
# requests-cache
# selenium
vine==5.1.0
# via
# -c requirements/base.txt
# amqp
# celery
# kombu
virtualenv==20.23.1
# via pre-commit
wcwidth==0.2.13
# via
# -c requirements/base.txt
# prompt-toolkit
werkzeug==3.1.3
# via
# -c requirements/base.txt
# flask
# flask-appbuilder
# flask-jwt-extended
# flask-login
wrapt==1.17.0
# via
# -c requirements/base.txt
# deprecated
wtforms==3.2.1
# via
# -c requirements/base.txt
# apache-superset
# flask-appbuilder
# flask-wtf
# wtforms-json
wtforms-json==0.3.5
# via
# -c requirements/base.txt
# apache-superset
xlrd==2.0.1
# via
# -c requirements/base.txt
# pandas
xlsxwriter==3.0.9
# via
# -c requirements/base.txt
# apache-superset
# pandas
zipp==3.21.0
# via
# -c requirements/base.txt
# importlib-metadata
wheel==0.45.1
# via pip-tools
zope-event==5.0
# via gevent
zope-interface==5.4.0
# via gevent
zstandard==0.23.0
# via
# -c requirements/base.txt
# flask-compress
# The following packages are considered to be unsafe in a requirements file:
# pip
# setuptools

View File

@@ -1,4 +1,9 @@
# This file was autogenerated by uv via the following command:
# uv pip compile requirements/translations.in -o requirements/translations.txt
# SHA1:cad160f3d4cd7c33896f42a479eeaa1b5bedc5fb
#
# This file is autogenerated by pip-compile-multi
# To update, run:
#
# pip-compile-multi
#
babel==2.16.0
# via -r requirements/translations.in

View File

@@ -70,7 +70,7 @@ def extract_modified_tables(module: ModuleType) -> set[str]:
return tables
def find_models(module: ModuleType) -> list[type[Model]]: # noqa: C901
def find_models(module: ModuleType) -> list[type[Model]]:
"""
Find all models in a migration script.
"""
@@ -94,7 +94,7 @@ def find_models(module: ModuleType) -> list[type[Model]]: # noqa: C901
# downgrade
sqlalchemy_uri = current_app.config["SQLALCHEMY_DATABASE_URI"]
engine = create_engine(sqlalchemy_uri)
Base = automap_base() # noqa: N806
Base = automap_base()
Base.prepare(engine, reflect=True)
seen = set()
while tables:
@@ -138,7 +138,7 @@ def find_models(module: ModuleType) -> list[type[Model]]: # noqa: C901
@click.option("--limit", default=1000, help="Maximum number of entities.")
@click.option("--force", is_flag=True, help="Do not prompt for confirmation.")
@click.option("--no-auto-cleanup", is_flag=True, help="Do not remove created models.")
def main( # noqa: C901
def main(
filepath: str, limit: int = 1000, force: bool = False, no_auto_cleanup: bool = False
) -> None:
auto_cleanup = not no_auto_cleanup

View File

@@ -49,7 +49,7 @@ github_repo = os.environ.get("GITHUB_REPOSITORY", "apache/superset")
def request(
method: Literal["GET", "POST", "DELETE", "PUT"], endpoint: str, **kwargs: Any
) -> dict[str, Any]:
resp = requests.request( # noqa: S113
resp = requests.request(
method,
f"https://api.github.com/{endpoint.lstrip('/')}",
headers={"Authorization": f"Bearer {github_token}"},
@@ -152,7 +152,7 @@ Date: {date_str}
help="Whether to also cancel running workflows.",
)
@click.argument("branch_or_pull", required=False)
def cancel_github_workflows( # noqa: C901
def cancel_github_workflows(
branch_or_pull: Optional[str],
repo: str,
event: list[str],

View File

@@ -51,12 +51,12 @@ GITHUB_TOKEN = os.environ.get("GITHUB_TOKEN")
def fetch_files_github_api(url: str): # type: ignore
"""Fetches data using GitHub API."""
req = Request(url) # noqa: S310
req = Request(url)
req.add_header("Authorization", f"Bearer {GITHUB_TOKEN}")
req.add_header("Accept", "application/vnd.github.v3+json")
print(f"Fetching from {url}")
with urlopen(req) as response: # noqa: S310
with urlopen(req) as response:
body = response.read()
return json.loads(body)
@@ -130,7 +130,7 @@ def main(event_type: str, sha: str, repo: str) -> None:
)
# Output results
output_path = os.getenv("GITHUB_OUTPUT") or "/tmp/GITHUB_OUTPUT.txt" # noqa: S108
output_path = os.getenv("GITHUB_OUTPUT") or "/tmp/GITHUB_OUTPUT.txt"
with open(output_path, "a") as f:
for check, changed in changes_detected.items():
if changed:
@@ -139,8 +139,8 @@ def main(event_type: str, sha: str, repo: str) -> None:
def get_git_sha() -> str:
return os.getenv("GITHUB_SHA") or subprocess.check_output( # noqa: S603
["git", "rev-parse", "HEAD"] # noqa: S607
return os.getenv("GITHUB_SHA") or subprocess.check_output(
["git", "rev-parse", "HEAD"]
).strip().decode("utf-8")

View File

@@ -47,7 +47,7 @@ class Requirement:
def get_version(self) -> Optional[str]:
try:
version = subprocess.check_output(self.command, shell=True).decode().strip() # noqa: S602
version = subprocess.check_output(self.command, shell=True).decode().strip()
if self.version_post_process:
version = self.version_post_process(version)
return version.split()[-1]
@@ -76,7 +76,7 @@ class Requirement:
def format_result(self) -> str:
ideal_range_str = f"{self.ideal_range[0]} - {self.ideal_range[1]}"
supported_range_str = f"{self.supported_range[0]} - {self.supported_range[1]}"
return f"{self.status.split()[0]} {self.name:<25} {self.version or 'N/A':<25} {ideal_range_str:<25} {supported_range_str:<25}" # noqa: E501
return f"{self.status.split()[0]} {self.name:<25} {self.version or 'N/A':<25} {ideal_range_str:<25} {supported_range_str:<25}"
def check_memory(min_gb: int) -> str:
@@ -101,9 +101,8 @@ def get_cpu_info() -> str:
def get_docker_platform() -> str:
try:
output = (
subprocess.check_output( # noqa: S602
"docker info --format '{{.OperatingSystem}}'", # noqa: S607
shell=True, # noqa: S607
subprocess.check_output(
"docker info --format '{{.OperatingSystem}}'", shell=True
)
.decode()
.strip()
@@ -118,7 +117,7 @@ def get_docker_platform() -> str:
@click.command(
help="""
This script checks the local environment for various software versions and other requirements, providing feedback on whether they are ideal, supported, or unsupported.
""" # noqa: E501
"""
)
@click.option(
"--docker", is_flag=True, help="Check Docker and Docker Compose requirements"
@@ -129,7 +128,7 @@ This script checks the local environment for various software versions and other
help="Check frontend requirements (npm, Node.js, memory)",
)
@click.option("--backend", is_flag=True, help="Check backend requirements (Python)")
def main(docker: bool, frontend: bool, backend: bool) -> None: # noqa: C901
def main(docker: bool, frontend: bool, backend: bool) -> None:
requirements = [
Requirement(
"python",

View File

@@ -74,7 +74,7 @@ def run_cypress_for_test_file(
print(f"DRY RUN: {cmd}")
return 0
process = subprocess.Popen( # noqa: S602
process = subprocess.Popen(
cmd,
shell=True,
stdout=subprocess.PIPE,

View File

@@ -171,7 +171,7 @@ def generate_erd(file_path: str) -> None:
"""
data = introspect_models()
templates_path = os.path.dirname(__file__)
env = jinja2.Environment(loader=jinja2.FileSystemLoader(templates_path)) # noqa: S701
env = jinja2.Environment(loader=jinja2.FileSystemLoader(templates_path))
# Load the template
template = env.get_template("erd.template.puml")

View File

@@ -1,30 +0,0 @@
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -e
ADDITIONAL_ARGS="$@"
# Generate the requirements/base.txt file
uv pip compile pyproject.toml requirements/base.in -o requirements/base.txt $ADDITIONAL_ARGS
# Generate the requirements/development.txt file, making sure requirements/base.txt is a constraint to keep the versions in sync
uv pip compile requirements/development.in -c requirements/base.txt -o requirements/development.txt $ADDITIONAL_ARGS
uv pip compile requirements/translations.in -o requirements/translations.txt $ADDITIONAL_ARGS

View File

@@ -30,7 +30,7 @@ with open(PACKAGE_JSON) as package_file:
def get_git_sha() -> str:
try:
output = subprocess.check_output(["git", "rev-parse", "HEAD"]) # noqa: S603, S607
output = subprocess.check_output(["git", "rev-parse", "HEAD"])
return output.decode().strip()
except Exception: # pylint: disable=broad-except
return ""
@@ -58,7 +58,7 @@ setup(
zip_safe=False,
entry_points={
"console_scripts": ["superset=superset.cli.main:superset"],
# the `postgres` and `postgres+psycopg2://` schemes were removed in SQLAlchemy 1.4 # noqa: E501
# the `postgres` and `postgres+psycopg2://` schemes were removed in SQLAlchemy 1.4
# add an alias here to prevent breaking existing databases
"sqlalchemy.dialects": [
"postgres.psycopg2 = sqlalchemy.dialects.postgresql:dialect",

View File

@@ -57,7 +57,7 @@ const drillBy = (targetDrillByColumn: string, isLegacy = false) => {
cy.get('.ant-dropdown:not(.ant-dropdown-hidden)')
.first()
.find("[role='menu'] [role='menuitem'] [title='Drill by']")
.trigger('mouseover', { force: true });
.trigger('mouseover');
cy.get(
'.ant-dropdown-menu-submenu:not(.ant-dropdown-menu-hidden) [data-test="drill-by-submenu"]',
)

View File

@@ -51,7 +51,7 @@ function openProperties() {
cy.getBySel('header-actions-menu')
.contains('Edit properties')
.click({ force: true });
cy.get('.antd5-modal-body').should('be.visible');
cy.get('.ant-modal-body').should('be.visible');
});
}
@@ -60,7 +60,7 @@ function openExploreProperties() {
cy.get('.ant-dropdown-menu')
.contains('Edit chart properties')
.click({ force: true });
cy.get('.antd5-modal-body').should('be.visible');
cy.get('.ant-modal-body').should('be.visible');
}
function assertMetadata(text: string) {
@@ -77,7 +77,7 @@ function assertMetadata(text: string) {
}
function openAdvancedProperties() {
cy.get('.antd5-modal-body')
cy.get('.ant-modal-body')
.contains('Advanced')
.should('be.visible')
.click({ force: true });
@@ -1093,14 +1093,14 @@ describe('Dashboard edit', () => {
applyChanges();
});
it.skip('should not accept an invalid color scheme', () => {
it('should not accept an invalid color scheme', () => {
openAdvancedProperties();
clearMetadata();
// allow console error
cy.allowConsoleErrors(['Error: A valid color scheme is required']);
writeMetadata('{"color_scheme":"wrongcolorscheme"}');
applyChanges();
cy.get('.antd5-modal-body')
cy.get('.ant-modal-body')
.contains('A valid color scheme is required')
.should('be.visible');
});

View File

@@ -56,7 +56,7 @@ describe('Datasource control', () => {
cy.focused().type(`${newMetricName}{enter}`);
cy.get('[data-test="datasource-modal-save"]').click();
cy.get('.antd5-modal-confirm-btns button').contains('OK').click();
cy.get('.ant-modal-confirm-btns button').contains('OK').click();
// select new metric
cy.get('[data-test=metrics]')
.contains('Drop columns/metrics here or click')
@@ -68,7 +68,7 @@ describe('Datasource control', () => {
// delete metric
cy.get('[data-test="datasource-menu-trigger"]').click();
cy.get('[data-test="edit-dataset"]').click();
cy.get('.antd5-modal-content').within(() => {
cy.get('.ant-modal-content').within(() => {
cy.get('[data-test="collection-tab-Metrics"]')
.contains('Metrics')
.click();
@@ -78,7 +78,7 @@ describe('Datasource control', () => {
.find('[data-test="crud-delete-icon"]')
.click();
cy.get('[data-test="datasource-modal-save"]').click();
cy.get('.antd5-modal-confirm-btns button').contains('OK').click();
cy.get('.ant-modal-confirm-btns button').contains('OK').click();
cy.get('[data-test="metrics"]').contains(newMetricName).should('not.exist');
});
});
@@ -121,7 +121,7 @@ describe('VizType control', () => {
cy.contains('View all charts').click();
cy.get('.antd5-modal-content').within(() => {
cy.get('.ant-modal-content').within(() => {
cy.get('button').contains('KPI').click(); // change categories
cy.get('[role="button"]').contains('Big Number').click();
cy.get('button').contains('Select').click();

View File

@@ -42,8 +42,8 @@ describe('Test explore links', () => {
cy.wait('@chartData').then(() => {
cy.get('code');
});
cy.get('.antd5-modal-content').within(() => {
cy.get('button.antd5-modal-close').first().click({ force: true });
cy.get('.ant-modal-content').within(() => {
cy.get('button.ant-modal-close').first().click({ force: true });
});
});

View File

@@ -97,8 +97,8 @@ export const databasesPage = {
infoAlert: '.antd5-alert',
serviceAccountInput: '[name="credentials_info"]',
connectionStep: {
modal: '.antd5-modal-content',
modalBody: '.antd5-modal-body',
modal: '.ant-modal-content',
modalBody: '.ant-modal-body',
stepTitle: '.css-7x6kk > h4',
helperBottom: '.helper-bottom',
postgresDatabase: '[name="database"]',
@@ -150,7 +150,7 @@ export const sqlLabView = {
sqlEditor: '#brace-editor textarea',
saveAsButton: '.SaveQuery > .ant-btn',
saveAsModal: {
footer: '.antd5-modal-footer',
footer: '.ant-modal-footer',
queryNameInput: 'input[class^="ant-input"]',
},
sqlToolbar: {
@@ -199,12 +199,12 @@ export const annotationLayersView = {
},
modal: {
content: {
content: '.antd5-modal-body',
title: '.antd5-modal-body > :nth-child(2) > input',
content: '.ant-modal-body',
title: '.ant-modal-body > :nth-child(2) > input',
description: "[name='descr']",
},
footer: {
footer: '.antd5-modal-footer',
footer: '.ant-modal-footer',
addButton: dataTestLocator('modal-confirm-button'),
cancelButton: dataTestLocator('modal-cancel-button'),
},
@@ -216,7 +216,7 @@ export const datasetsList = {
newDatasetModal: {
inputField: '[class="section"]',
addButton: dataTestLocator('modal-confirm-button'),
body: '.antd5-modal-body',
body: '.ant-modal-body',
},
table: {
tableRow: {
@@ -261,7 +261,7 @@ export const datasetsList = {
},
},
deleteDatasetModal: {
modal: '.antd5-modal-content',
modal: '.ant-modal-content',
deleteInput: dataTestLocator('delete-modal-input'),
deleteButton: dataTestLocator('modal-confirm-button'),
text: '.css-kxmt87',
@@ -318,8 +318,8 @@ export const chartListView = {
};
export const nativeFilters = {
modal: {
container: '.antd5-modal',
footer: '.antd5-modal-footer',
container: '.ant-modal',
footer: '.ant-modal-footer',
saveButton: dataTestLocator('native-filter-modal-save-button'),
cancelButton: dataTestLocator('native-filter-modal-cancel-button'),
confirmCancelButton: dataTestLocator(
@@ -476,15 +476,15 @@ export const exploreView = {
},
chartAreaItem: '.nv-legend-text',
viewQueryModal: {
container: '.antd5-modal-content',
closeButton: 'button.antd5-modal-close',
container: '.ant-modal-content',
closeButton: 'button.ant-modal-close',
},
embedCodeModal: {
container: dataTestLocator('embed-code-popover'),
textfield: dataTestLocator('embed-code-textarea'),
},
saveModal: {
modal: '.antd5-modal-content',
modal: '.ant-modal-content',
chartNameInput: dataTestLocator('new-chart-name'),
dashboardNameInput: '.ant-select-selection-search-input',
addToDashboardInput: dataTestLocator(
@@ -580,7 +580,7 @@ export const exploreView = {
},
},
editDatasetModal: {
container: '.antd5-modal-content',
container: '.ant-modal-content',
datasetTabsContainer: dataTestLocator('edit-dataset-tabs'),
saveButton: dataTestLocator('datasource-modal-save'),
metricsTab: {
@@ -588,7 +588,7 @@ export const exploreView = {
rowsContainer: dataTestLocator('table-content-rows'),
},
confirmModal: {
okButton: '.antd5-modal-confirm-btns .ant-btn-primary',
okButton: '.ant-modal-confirm-btns .ant-btn-primary',
},
},
visualizationTypeModal: {
@@ -619,12 +619,12 @@ export const dashboardView = {
closeButton: dataTestLocator('close-button'),
},
saveModal: {
modal: '.antd5-modal-content',
modal: '.ant-modal-content',
dashboardNameInput: '.ant-input',
saveButton: dataTestLocator('modal-save-dashboard-button'),
},
dashboardProperties: {
modal: '.antd5-modal-content',
modal: '.ant-modal-content',
dashboardTitleInput: dataTestLocator('dashboard-title-input'),
modalButton: '[type="button"]',
},

View File

@@ -221,7 +221,6 @@
"babel-plugin-dynamic-import-node": "^2.3.3",
"babel-plugin-jsx-remove-data-test-id": "^3.0.0",
"babel-plugin-lodash": "^3.3.4",
"compression-webpack-plugin": "^11.1.0",
"copy-webpack-plugin": "^12.0.2",
"cross-env": "^7.0.3",
"css-loader": "^6.8.1",
@@ -284,7 +283,6 @@
"speed-measure-webpack-plugin": "^1.5.0",
"storybook": "8.1.11",
"style-loader": "^4.0.0",
"terser-webpack-plugin": "^5.3.11",
"thread-loader": "^4.0.2",
"ts-loader": "^9.5.1",
"typescript": "^4.8.4",
@@ -19747,79 +19745,6 @@
"node": ">= 0.8.0"
}
},
"node_modules/compression-webpack-plugin": {
"version": "11.1.0",
"resolved": "https://registry.npmjs.org/compression-webpack-plugin/-/compression-webpack-plugin-11.1.0.tgz",
"integrity": "sha512-zDOQYp10+upzLxW+VRSjEpRRwBXJdsb5lBMlRxx1g8hckIFBpe3DTI0en2w7h+beuq89576RVzfiXrkdPGrHhA==",
"dev": true,
"dependencies": {
"schema-utils": "^4.2.0",
"serialize-javascript": "^6.0.2"
},
"engines": {
"node": ">= 18.12.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/webpack"
},
"peerDependencies": {
"webpack": "^5.1.0"
}
},
"node_modules/compression-webpack-plugin/node_modules/ajv": {
"version": "8.17.1",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz",
"integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==",
"dev": true,
"dependencies": {
"fast-deep-equal": "^3.1.3",
"fast-uri": "^3.0.1",
"json-schema-traverse": "^1.0.0",
"require-from-string": "^2.0.2"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/epoberezkin"
}
},
"node_modules/compression-webpack-plugin/node_modules/ajv-keywords": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz",
"integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==",
"dev": true,
"dependencies": {
"fast-deep-equal": "^3.1.3"
},
"peerDependencies": {
"ajv": "^8.8.2"
}
},
"node_modules/compression-webpack-plugin/node_modules/json-schema-traverse": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz",
"integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==",
"dev": true
},
"node_modules/compression-webpack-plugin/node_modules/schema-utils": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.3.0.tgz",
"integrity": "sha512-Gf9qqc58SpCA/xdziiHz35F4GNIWYWZrEshUc/G/r5BnLph6xpKuLeoJoQuj5WfBIx/eQLf+hmVPYHaxJu7V2g==",
"dev": true,
"dependencies": {
"@types/json-schema": "^7.0.9",
"ajv": "^8.9.0",
"ajv-formats": "^2.1.1",
"ajv-keywords": "^5.1.0"
},
"engines": {
"node": ">= 10.13.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/webpack"
}
},
"node_modules/compute-gcd": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/compute-gcd/-/compute-gcd-1.2.1.tgz",
@@ -51131,9 +51056,8 @@
}
},
"node_modules/terser": {
"version": "5.37.0",
"resolved": "https://registry.npmjs.org/terser/-/terser-5.37.0.tgz",
"integrity": "sha512-B8wRRkmre4ERucLM/uXx4MOV5cbnOlVAqUst+1+iLKPI0dOgFO28f84ptoQt9HEI537PMzfYa/d+GEPKTRXmYA==",
"version": "5.27.0",
"license": "BSD-2-Clause",
"dependencies": {
"@jridgewell/source-map": "^0.3.3",
"acorn": "^8.8.2",
@@ -51148,16 +51072,15 @@
}
},
"node_modules/terser-webpack-plugin": {
"version": "5.3.11",
"resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.11.tgz",
"integrity": "sha512-RVCsMfuD0+cTt3EwX8hSl2Ks56EbFHWmhluwcqoPKtBnfjiT6olaq7PRIRfhyU8nnC2MrnDrBLfrD/RGE+cVXQ==",
"version": "5.3.10",
"devOptional": true,
"license": "MIT",
"dependencies": {
"@jridgewell/trace-mapping": "^0.3.25",
"@jridgewell/trace-mapping": "^0.3.20",
"jest-worker": "^27.4.5",
"schema-utils": "^4.3.0",
"serialize-javascript": "^6.0.2",
"terser": "^5.31.1"
"schema-utils": "^3.1.1",
"serialize-javascript": "^6.0.1",
"terser": "^5.26.0"
},
"engines": {
"node": ">= 10.13.0"
@@ -51181,34 +51104,6 @@
}
}
},
"node_modules/terser-webpack-plugin/node_modules/ajv": {
"version": "8.17.1",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz",
"integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==",
"devOptional": true,
"dependencies": {
"fast-deep-equal": "^3.1.3",
"fast-uri": "^3.0.1",
"json-schema-traverse": "^1.0.0",
"require-from-string": "^2.0.2"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/epoberezkin"
}
},
"node_modules/terser-webpack-plugin/node_modules/ajv-keywords": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz",
"integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==",
"devOptional": true,
"dependencies": {
"fast-deep-equal": "^3.1.3"
},
"peerDependencies": {
"ajv": "^8.8.2"
}
},
"node_modules/terser-webpack-plugin/node_modules/has-flag": {
"version": "4.0.0",
"devOptional": true,
@@ -51230,31 +51125,6 @@
"node": ">= 10.13.0"
}
},
"node_modules/terser-webpack-plugin/node_modules/json-schema-traverse": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz",
"integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==",
"devOptional": true
},
"node_modules/terser-webpack-plugin/node_modules/schema-utils": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.3.0.tgz",
"integrity": "sha512-Gf9qqc58SpCA/xdziiHz35F4GNIWYWZrEshUc/G/r5BnLph6xpKuLeoJoQuj5WfBIx/eQLf+hmVPYHaxJu7V2g==",
"devOptional": true,
"dependencies": {
"@types/json-schema": "^7.0.9",
"ajv": "^8.9.0",
"ajv-formats": "^2.1.1",
"ajv-keywords": "^5.1.0"
},
"engines": {
"node": ">= 10.13.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/webpack"
}
},
"node_modules/terser-webpack-plugin/node_modules/supports-color": {
"version": "8.1.1",
"devOptional": true,
@@ -73618,57 +73488,6 @@
"vary": "~1.1.2"
}
},
"compression-webpack-plugin": {
"version": "11.1.0",
"resolved": "https://registry.npmjs.org/compression-webpack-plugin/-/compression-webpack-plugin-11.1.0.tgz",
"integrity": "sha512-zDOQYp10+upzLxW+VRSjEpRRwBXJdsb5lBMlRxx1g8hckIFBpe3DTI0en2w7h+beuq89576RVzfiXrkdPGrHhA==",
"dev": true,
"requires": {
"schema-utils": "^4.2.0",
"serialize-javascript": "^6.0.2"
},
"dependencies": {
"ajv": {
"version": "8.17.1",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz",
"integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==",
"dev": true,
"requires": {
"fast-deep-equal": "^3.1.3",
"fast-uri": "^3.0.1",
"json-schema-traverse": "^1.0.0",
"require-from-string": "^2.0.2"
}
},
"ajv-keywords": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz",
"integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==",
"dev": true,
"requires": {
"fast-deep-equal": "^3.1.3"
}
},
"json-schema-traverse": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz",
"integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==",
"dev": true
},
"schema-utils": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.3.0.tgz",
"integrity": "sha512-Gf9qqc58SpCA/xdziiHz35F4GNIWYWZrEshUc/G/r5BnLph6xpKuLeoJoQuj5WfBIx/eQLf+hmVPYHaxJu7V2g==",
"dev": true,
"requires": {
"@types/json-schema": "^7.0.9",
"ajv": "^8.9.0",
"ajv-formats": "^2.1.1",
"ajv-keywords": "^5.1.0"
}
}
}
},
"compute-gcd": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/compute-gcd/-/compute-gcd-1.2.1.tgz",
@@ -93486,9 +93305,7 @@
"dev": true
},
"terser": {
"version": "5.37.0",
"resolved": "https://registry.npmjs.org/terser/-/terser-5.37.0.tgz",
"integrity": "sha512-B8wRRkmre4ERucLM/uXx4MOV5cbnOlVAqUst+1+iLKPI0dOgFO28f84ptoQt9HEI537PMzfYa/d+GEPKTRXmYA==",
"version": "5.27.0",
"requires": {
"@jridgewell/source-map": "^0.3.3",
"acorn": "^8.8.2",
@@ -93497,39 +93314,16 @@
}
},
"terser-webpack-plugin": {
"version": "5.3.11",
"resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.11.tgz",
"integrity": "sha512-RVCsMfuD0+cTt3EwX8hSl2Ks56EbFHWmhluwcqoPKtBnfjiT6olaq7PRIRfhyU8nnC2MrnDrBLfrD/RGE+cVXQ==",
"version": "5.3.10",
"devOptional": true,
"requires": {
"@jridgewell/trace-mapping": "^0.3.25",
"@jridgewell/trace-mapping": "^0.3.20",
"jest-worker": "^27.4.5",
"schema-utils": "^4.3.0",
"serialize-javascript": "^6.0.2",
"terser": "^5.31.1"
"schema-utils": "^3.1.1",
"serialize-javascript": "^6.0.1",
"terser": "^5.26.0"
},
"dependencies": {
"ajv": {
"version": "8.17.1",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz",
"integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==",
"devOptional": true,
"requires": {
"fast-deep-equal": "^3.1.3",
"fast-uri": "^3.0.1",
"json-schema-traverse": "^1.0.0",
"require-from-string": "^2.0.2"
}
},
"ajv-keywords": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz",
"integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==",
"devOptional": true,
"requires": {
"fast-deep-equal": "^3.1.3"
}
},
"has-flag": {
"version": "4.0.0",
"devOptional": true
@@ -93543,24 +93337,6 @@
"supports-color": "^8.0.0"
}
},
"json-schema-traverse": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz",
"integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==",
"devOptional": true
},
"schema-utils": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.3.0.tgz",
"integrity": "sha512-Gf9qqc58SpCA/xdziiHz35F4GNIWYWZrEshUc/G/r5BnLph6xpKuLeoJoQuj5WfBIx/eQLf+hmVPYHaxJu7V2g==",
"devOptional": true,
"requires": {
"@types/json-schema": "^7.0.9",
"ajv": "^8.9.0",
"ajv-formats": "^2.1.1",
"ajv-keywords": "^5.1.0"
}
},
"supports-color": {
"version": "8.1.1",
"devOptional": true,

View File

@@ -288,7 +288,6 @@
"babel-plugin-dynamic-import-node": "^2.3.3",
"babel-plugin-jsx-remove-data-test-id": "^3.0.0",
"babel-plugin-lodash": "^3.3.4",
"compression-webpack-plugin": "^11.1.0",
"copy-webpack-plugin": "^12.0.2",
"cross-env": "^7.0.3",
"css-loader": "^6.8.1",
@@ -351,7 +350,6 @@
"speed-measure-webpack-plugin": "^1.5.0",
"storybook": "8.1.11",
"style-loader": "^4.0.0",
"terser-webpack-plugin": "^5.3.11",
"thread-loader": "^4.0.2",
"ts-loader": "^9.5.1",
"typescript": "^4.8.4",

View File

@@ -80,7 +80,6 @@ import ethiopia from './countries/ethiopia.geojson';
import fiji from './countries/fiji.geojson';
import finland from './countries/finland.geojson';
import france from './countries/france.geojson';
import france_overseas from './countries/france_overseas.geojson';
import france_regions from './countries/france_regions.geojson';
import french_polynesia from './countries/french_polynesia.geojson';
import gabon from './countries/gabon.geojson';
@@ -281,7 +280,6 @@ export const countries = {
fiji,
finland,
france,
france_overseas,
france_regions,
french_polynesia,
gabon,
@@ -429,9 +427,6 @@ export const countryOptions = Object.keys(countries).map(x => {
if (x === 'france_regions') {
return [x, 'France (regions)'];
}
if (x === 'france_overseas') {
return [x, 'France (with overseas)'];
}
if (x === 'turkey_regions') {
return [x, 'Turkey (regions)'];
}

View File

@@ -188,11 +188,7 @@ export default function transformProps(
showTotal,
sliceId,
} = formData;
const {
currencyFormats = {},
columnFormats = {},
verboseMap = {},
} = datasource;
const { currencyFormats = {}, columnFormats = {} } = datasource;
const refs: Refs = {};
const primaryValueFormatter = getValueFormatter(
metric,
@@ -338,10 +334,8 @@ export default function transformProps(
secondaryValueFormatter,
colorByCategory,
totalValue,
metricLabel: verboseMap[metricLabel] || metricLabel,
secondaryMetricLabel: secondaryMetricLabel
? verboseMap[secondaryMetricLabel] || secondaryMetricLabel
: undefined,
metricLabel,
secondaryMetricLabel,
}),
},
series: [

View File

@@ -39,37 +39,34 @@ export const GlobalStyles = () => (
.echarts-tooltip[style*='visibility: hidden'] {
display: none !important;
}
.antd5-dropdown,
.ant-dropdown {
z-index: ${theme.zIndex.max};
}
// TODO: Remove when buttons have been upgraded to Ant Design 5.
// TODO: Remove when on Ant Design 5.
// Check src/components/Modal for more info.
.ant-modal-confirm {
button {
border: none;
border-radius: ${theme.borderRadius}px;
line-height: 1.5715;
font-size: ${theme.typography.sizes.s}px;
font-weight: ${theme.typography.weights.bold};
.modal-functions-ok-button {
border-radius: ${theme.borderRadius}px;
background: ${theme.colors.primary.base};
border: none;
color: ${theme.colors.grayscale.light5};
line-height: 1.5715;
font-size: ${theme.typography.sizes.s}px;
font-weight: ${theme.typography.weights.bold};
&:hover {
background: ${theme.colors.primary.dark1};
}
.ant-btn-primary:not(.btn-danger) {
background: ${theme.colors.primary.base};
color: ${theme.colors.grayscale.light5};
&:hover {
background: ${theme.colors.primary.dark1};
}
}
.ant-btn-default:not(.btn-danger) {
background: ${theme.colors.primary.light4};
color: ${theme.colors.primary.dark1};
&:hover {
background: ${mix(
0.1,
theme.colors.primary.base,
theme.colors.primary.light4,
)};
}
}
.modal-functions-cancel-button {
border-radius: ${theme.borderRadius}px;
background: ${theme.colors.primary.light4};
border: none;
color: ${theme.colors.primary.dark1};
line-height: 1.5715;
font-size: ${theme.typography.sizes.s}px;
font-weight: ${theme.typography.weights.bold};
&:hover {
background: ${mix(
0.1,
theme.colors.primary.base,
theme.colors.primary.light4,
)};
}
}
.column-config-popover {

View File

@@ -89,11 +89,11 @@ const SqlLabStyles = styled.div`
}
}
.ResultsModal .antd5-modal-body {
.ResultsModal .ant-modal-body {
min-height: ${theme.gridUnit * 140}px;
}
.antd5-modal-body {
.ant-modal-body {
overflow: auto;
}
}

View File

@@ -73,10 +73,10 @@ describe('SaveDatasetModal', () => {
const inputField = screen.getByRole('textbox');
const inputFieldText = screen.getByDisplayValue(/unimportant/i);
expect(saveRadioBtn).toBeInTheDocument();
expect(fieldLabel).toBeInTheDocument();
expect(inputField).toBeInTheDocument();
expect(inputFieldText).toBeInTheDocument();
expect(saveRadioBtn).toBeVisible();
expect(fieldLabel).toBeVisible();
expect(inputField).toBeVisible();
expect(inputFieldText).toBeVisible();
});
it('renders an "Overwrite existing" field', () => {
@@ -89,23 +89,23 @@ describe('SaveDatasetModal', () => {
const inputField = screen.getByRole('combobox');
const placeholderText = screen.getByText(/select or type dataset name/i);
expect(overwriteRadioBtn).toBeInTheDocument();
expect(fieldLabel).toBeInTheDocument();
expect(inputField).toBeInTheDocument();
expect(placeholderText).toBeInTheDocument();
expect(overwriteRadioBtn).toBeVisible();
expect(fieldLabel).toBeVisible();
expect(inputField).toBeVisible();
expect(placeholderText).toBeVisible();
});
it('renders a close button', () => {
render(<SaveDatasetModal {...mockedProps} />, { useRedux: true });
expect(screen.getByRole('button', { name: /close/i })).toBeInTheDocument();
expect(screen.getByRole('button', { name: /close/i })).toBeVisible();
});
it('renders a save button when "Save as new" is selected', () => {
render(<SaveDatasetModal {...mockedProps} />, { useRedux: true });
// "Save as new" is selected when the modal opens by default
expect(screen.getByRole('button', { name: /save/i })).toBeInTheDocument();
expect(screen.getByRole('button', { name: /save/i })).toBeVisible();
});
it('renders an overwrite button when "Overwrite existing" is selected', () => {
@@ -117,9 +117,7 @@ describe('SaveDatasetModal', () => {
});
userEvent.click(overwriteRadioBtn);
expect(
screen.getByRole('button', { name: /overwrite/i }),
).toBeInTheDocument();
expect(screen.getByRole('button', { name: /overwrite/i })).toBeVisible();
});
it('renders the overwrite button as disabled until an existing dataset is selected', async () => {
@@ -183,16 +181,14 @@ describe('SaveDatasetModal', () => {
userEvent.click(overwriteConfirmationBtn);
// Overwrite screen text
expect(screen.getByText(/save or overwrite dataset/i)).toBeInTheDocument();
expect(screen.getByText(/save or overwrite dataset/i)).toBeVisible();
expect(
screen.getByText(/are you sure you want to overwrite this dataset\?/i),
).toBeInTheDocument();
).toBeVisible();
// Overwrite screen buttons
expect(screen.getByRole('button', { name: /close/i })).toBeInTheDocument();
expect(screen.getByRole('button', { name: /back/i })).toBeInTheDocument();
expect(
screen.getByRole('button', { name: /overwrite/i }),
).toBeInTheDocument();
expect(screen.getByRole('button', { name: /close/i })).toBeVisible();
expect(screen.getByRole('button', { name: /back/i })).toBeVisible();
expect(screen.getByRole('button', { name: /overwrite/i })).toBeVisible();
});
it('sends the schema when creating the dataset', async () => {

View File

@@ -104,7 +104,7 @@ describe('SavedQuery', () => {
name: /save query/i,
});
expect(saveQueryModalHeader).toBeInTheDocument();
expect(saveQueryModalHeader).toBeVisible();
});
it('renders the save query modal UI', () => {
@@ -129,17 +129,17 @@ describe('SavedQuery', () => {
const saveBtns = screen.getAllByRole('button', { name: /save/i });
const cancelBtn = screen.getByRole('button', { name: /cancel/i });
expect(closeBtn).toBeInTheDocument();
expect(saveQueryModalHeader).toBeInTheDocument();
expect(nameLabel).toBeInTheDocument();
expect(descriptionLabel).toBeInTheDocument();
expect(closeBtn).toBeVisible();
expect(saveQueryModalHeader).toBeVisible();
expect(nameLabel).toBeVisible();
expect(descriptionLabel).toBeVisible();
expect(textBoxes.length).toBe(2);
expect(nameTextbox).toBeInTheDocument();
expect(descriptionTextbox).toBeInTheDocument();
expect(nameTextbox).toBeVisible();
expect(descriptionTextbox).toBeVisible();
expect(saveBtns.length).toBe(2);
expect(saveBtns[0]).toBeInTheDocument();
expect(saveBtns[1]).toBeInTheDocument();
expect(cancelBtn).toBeInTheDocument();
expect(saveBtns[0]).toBeVisible();
expect(saveBtns[1]).toBeVisible();
expect(cancelBtn).toBeVisible();
});
it('renders a "save as new" and "update" button if query already exists', () => {
@@ -163,8 +163,8 @@ describe('SavedQuery', () => {
const saveAsNewBtn = screen.getByRole('button', { name: /save as new/i });
const updateBtn = screen.getByRole('button', { name: /update/i });
expect(saveAsNewBtn).toBeInTheDocument();
expect(updateBtn).toBeInTheDocument();
expect(saveAsNewBtn).toBeVisible();
expect(updateBtn).toBeVisible();
});
it('renders a split save button when allows_virtual_table_explore is enabled', async () => {
@@ -188,15 +188,17 @@ describe('SavedQuery', () => {
store: mockStore(mockState),
});
const caretBtn = await screen.findByRole('button', { name: /caret-down/i });
userEvent.click(caretBtn);
await waitFor(() => {
const caretBtn = screen.getByRole('button', { name: /caret-down/i });
userEvent.click(caretBtn);
const saveDatasetMenuItem = await screen.findByText(/save dataset/i);
userEvent.click(saveDatasetMenuItem);
const saveDatasetMenuItem = screen.getByText(/save dataset/i);
userEvent.click(saveDatasetMenuItem);
});
const saveDatasetHeader = screen.getByText(/save or overwrite dataset/i);
expect(saveDatasetHeader).toBeInTheDocument();
expect(saveDatasetHeader).toBeVisible();
});
it('renders the save dataset modal UI', async () => {
@@ -205,11 +207,13 @@ describe('SavedQuery', () => {
store: mockStore(mockState),
});
const caretBtn = await screen.findByRole('button', { name: /caret-down/i });
userEvent.click(caretBtn);
await waitFor(() => {
const caretBtn = screen.getByRole('button', { name: /caret-down/i });
userEvent.click(caretBtn);
const saveDatasetMenuItem = await screen.findByText(/save dataset/i);
userEvent.click(saveDatasetMenuItem);
const saveDatasetMenuItem = screen.getByText(/save dataset/i);
userEvent.click(saveDatasetMenuItem);
});
const closeBtn = screen.getByRole('button', { name: /close/i });
const saveDatasetHeader = screen.getByText(/save or overwrite dataset/i);
@@ -227,14 +231,14 @@ describe('SavedQuery', () => {
/select or type dataset name/i,
);
expect(saveDatasetHeader).toBeInTheDocument();
expect(closeBtn).toBeInTheDocument();
expect(saveRadio).toBeInTheDocument();
expect(saveLabel).toBeInTheDocument();
expect(saveTextbox).toBeInTheDocument();
expect(overwriteRadio).toBeInTheDocument();
expect(overwriteLabel).toBeInTheDocument();
expect(overwriteCombobox).toBeInTheDocument();
expect(overwritePlaceholderText).toBeInTheDocument();
expect(saveDatasetHeader).toBeVisible();
expect(closeBtn).toBeVisible();
expect(saveRadio).toBeVisible();
expect(saveLabel).toBeVisible();
expect(saveTextbox).toBeVisible();
expect(overwriteRadio).toBeVisible();
expect(overwriteLabel).toBeVisible();
expect(overwriteCombobox).toBeVisible();
expect(overwritePlaceholderText).toBeVisible();
});
});

View File

@@ -428,7 +428,7 @@ export default function DrillByModal({
return (
<Modal
css={css`
.antd5-modal-footer {
.ant-modal-footer {
border-top: none;
}
`}

View File

@@ -117,13 +117,12 @@ const expectDrillToDetailModal = async (
filters: BinaryQueryObjectFilterClause[] = [],
) => {
const button = screen.getByRole('menuitem', { name: buttonName });
userEvent.click(button);
const modal = await screen.findByRole('dialog', {
name: `Drill to detail: ${chartName}`,
});
expect(modal).toBeInTheDocument();
expect(modal).toBeVisible();
expect(screen.getByTestId('modal-filters')).toHaveTextContent(
JSON.stringify(filters),
);

View File

@@ -118,7 +118,7 @@ export default function DrillDetailModal({
show={showModal}
onHide={onHideModal ?? (() => null)}
css={css`
.antd5-modal-body {
.ant-modal-body {
display: flex;
flex-direction: column;
}

View File

@@ -74,8 +74,8 @@ interface ChangeDatasourceModalProps {
show: boolean;
}
const CustomStyledModal = styled(StyledModal)`
.antd5-modal-body {
const Modal = styled(StyledModal)`
.ant-modal-body {
display: flex;
flex-direction: column;
}
@@ -255,7 +255,7 @@ const ChangeDatasourceModal: FunctionComponent<ChangeDatasourceModalProps> = ({
};
return (
<CustomStyledModal
<Modal
show={show}
onHide={onHide}
responsive
@@ -323,7 +323,7 @@ const ChangeDatasourceModal: FunctionComponent<ChangeDatasourceModalProps> = ({
)}
{confirmChange && <>{CONFIRM_WARNING_MESSAGE}</>}
</>
</CustomStyledModal>
</Modal>
);
};

View File

@@ -30,9 +30,9 @@ test('Must display title and content', () => {
};
render(<DeleteModal {...props} />);
expect(screen.getByTestId('test-title')).toBeInTheDocument();
expect(screen.getByTestId('test-title')).toBeInTheDocument();
expect(screen.getByTestId('test-description')).toBeInTheDocument();
expect(screen.getByTestId('test-title')).toBeVisible();
expect(screen.getByTestId('test-description')).toBeInTheDocument();
expect(screen.getByTestId('test-description')).toBeVisible();
});
test('Calling "onHide"', () => {
@@ -53,7 +53,7 @@ test('Calling "onHide"', () => {
expect(screen.getByTestId('delete-modal-input')).toHaveValue('del');
// close the modal
expect(screen.getByText('×')).toBeInTheDocument();
expect(screen.getByText('×')).toBeVisible();
userEvent.click(screen.getByText('×'));
expect(props.onHide).toHaveBeenCalledTimes(1);
expect(props.onConfirm).toHaveBeenCalledTimes(0);
@@ -73,7 +73,7 @@ test('Calling "onConfirm" only after typing "delete" in the input', () => {
render(<DeleteModal {...props} />);
expect(props.onHide).toHaveBeenCalledTimes(0);
expect(props.onConfirm).toHaveBeenCalledTimes(0);
expect(screen.getByTestId('delete-modal-input')).toBeInTheDocument();
expect(screen.getByTestId('delete-modal-input')).toBeVisible();
expect(props.onConfirm).toHaveBeenCalledTimes(0);
// do not execute "onConfirm" if you have not typed "delete"

View File

@@ -90,9 +90,8 @@ const StyleMenuItem = styled(Menu.Item)<{ divider?: boolean }>`
const StyleSubmenuItem = styled.div`
display: flex;
justify-content: space-between;
width: 100%;
> div {
flex-grow: 1;
> span {
width: 100%;
}
`;
@@ -103,7 +102,7 @@ export default (props: DropDownSelectableProps) => {
() => (label: string | ReactNode, key: string, divider?: boolean) => (
<StyleMenuItem key={key} divider={divider}>
<StyleSubmenuItem>
{label}
<span>{label}</span>
{selectedKeys?.includes(key) && (
<Icons.Check
iconColor={theme.colors.primary.base}

View File

@@ -70,7 +70,7 @@ const ErrorModal = styled(Modal)<{ level: ErrorLevel }>`
color: ${({ level, theme }) => theme.colors[level].dark2};
overflow-wrap: break-word;
.antd5-modal-header {
.ant-modal-header {
background-color: ${({ level, theme }) => theme.colors[level].light2};
padding: ${({ theme }) => 4 * theme.gridUnit}px;
}

View File

@@ -16,8 +16,8 @@
* specific language governing permissions and limitations
* under the License.
*/
import Modal, { ModalProps, ModalFuncProps } from '.';
import { ModalFuncProps } from 'antd/lib/modal';
import Modal, { ModalProps } from '.';
import Button from '../Button';
export default {
@@ -37,7 +37,6 @@ InteractiveModal.args = {
title: "I'm a modal!",
resizable: false,
draggable: false,
width: 500,
};
InteractiveModal.argTypes = {
@@ -56,8 +55,4 @@ export const ModalFunctions = (props: ModalFuncProps) => (
ModalFunctions.args = {
title: 'Modal title',
content: 'Modal content',
keyboard: true,
okText: 'Test',
maskClosable: true,
mask: true,
};

View File

@@ -26,13 +26,10 @@ import {
useState,
} from 'react';
import { isNil } from 'lodash';
import { ModalFuncProps } from 'antd/lib/modal';
import { styled, t } from '@superset-ui/core';
import { css } from '@emotion/react';
import {
Modal as AntdModal,
ModalProps as AntdModalProps,
ModalFuncProps,
} from 'antd-v5';
import { AntdModal, AntdModalProps } from 'src/components';
import Button from 'src/components/Button';
import { Resizable, ResizableProps } from 're-resizable';
import Draggable, {
@@ -83,8 +80,6 @@ interface StyledModalProps {
resizable?: boolean;
}
export type { ModalFuncProps };
const MODAL_HEADER_HEIGHT = 55;
const MODAL_MIN_CONTENT_HEIGHT = 54;
const MODAL_FOOTER_HEIGHT = 65;
@@ -94,7 +89,7 @@ const RESIZABLE_MIN_WIDTH = '380px';
const RESIZABLE_MAX_HEIGHT = '100vh';
const RESIZABLE_MAX_WIDTH = '100vw';
export const BaseModal = (props: AntdModalProps) => (
const BaseModal = (props: AntdModalProps) => (
// Removes mask animation. Fixed in 4.6.0.
// https://github.com/ant-design/ant-design/issues/27192
<AntdModal {...props} maskTransitionName="" />
@@ -111,45 +106,30 @@ export const StyledModal = styled(BaseModal)<StyledModalProps>`
top: 0;
`}
.antd5-modal-content {
.ant-modal-content {
display: flex;
flex-direction: column;
max-height: ${({ theme }) => `calc(100vh - ${theme.gridUnit * 8}px)`};
margin-bottom: ${({ theme }) => theme.gridUnit * 4}px;
margin-top: ${({ theme }) => theme.gridUnit * 4}px;
padding: 0;
}
.antd5-modal-header {
.ant-modal-header {
flex: 0 0 auto;
background-color: ${({ theme }) => theme.colors.grayscale.light4};
border-radius: ${({ theme }) => theme.borderRadius}px
${({ theme }) => theme.borderRadius}px 0 0;
padding: ${({ theme }) => theme.gridUnit * 4}px
${({ theme }) => theme.gridUnit * 6}px;
padding-left: ${({ theme }) => theme.gridUnit * 4}px;
padding-right: ${({ theme }) => theme.gridUnit * 4}px;
.antd5-modal-title {
font-weight: ${({ theme }) => theme.typography.weights.medium};
}
.antd5-modal-title h4 {
.ant-modal-title h4 {
display: flex;
margin: 0;
align-items: center;
}
}
.antd5-modal-close {
width: ${({ theme }) => theme.gridUnit * 14}px;
height: ${({ theme }) => theme.gridUnit * 14}px;
top: 0;
right: 0;
}
.antd5-modal-close:hover {
background: transparent;
}
.antd5-modal-close-x {
.ant-modal-close-x {
display: flex;
align-items: center;
@@ -162,18 +142,17 @@ export const StyledModal = styled(BaseModal)<StyledModalProps>`
}
}
.antd5-modal-body {
.ant-modal-body {
flex: 0 1 auto;
padding: ${({ theme }) => theme.gridUnit * 4}px;
overflow: auto;
${({ resizable, height }) => !resizable && height && `height: ${height};`}
}
.antd5-modal-footer {
.ant-modal-footer {
flex: 0 0 1;
border-top: ${({ theme }) => theme.gridUnit / 4}px solid
${({ theme }) => theme.colors.grayscale.light2};
padding: ${({ theme }) => theme.gridUnit * 4}px;
margin-top: 0;
.btn {
font-size: 12px;
@@ -191,14 +170,14 @@ export const StyledModal = styled(BaseModal)<StyledModalProps>`
margin-top: -${({ theme }) => theme.gridUnit * 4}px;
}
&.no-content-padding .antd5-modal-body {
&.no-content-padding .ant-modal-body {
padding: 0;
}
${({ draggable, theme }) =>
draggable &&
`
.antd5-modal-header {
.ant-modal-header {
padding: 0;
.draggable-trigger {
cursor: move;
@@ -218,10 +197,10 @@ export const StyledModal = styled(BaseModal)<StyledModalProps>`
height: 100%;
}
.antd5-modal-content {
.ant-modal-content {
height: 100%;
.antd5-modal-body {
.ant-modal-body {
/* 100% - header height - footer height */
height: ${
hideFooter
@@ -233,7 +212,6 @@ export const StyledModal = styled(BaseModal)<StyledModalProps>`
}
`}
`;
const defaultResizableConfig = (hideFooter: boolean | undefined) => ({
maxHeight: RESIZABLE_MAX_HEIGHT,
maxWidth: RESIZABLE_MAX_WIDTH,
@@ -355,7 +333,7 @@ const CustomModal = ({
width={modalWidth}
maxWidth={maxWidth}
responsive={responsive}
open={show}
visible={show}
title={<ModalTitle />}
closeIcon={
<span className="close" aria-hidden="true">
@@ -399,13 +377,26 @@ const CustomModal = ({
};
CustomModal.displayName = 'Modal';
// Ant Design 4 does not allow overriding Modal's buttons when
// using one of the pre-defined functions. Ant Design 5 Modal introduced
// the footer property that will allow that. Meanwhile, we're replicating
// Button style using global CSS in src/GlobalStyles.tsx.
// TODO: Replace this logic when on Ant Design 5.
const buttonProps = {
okButtonProps: { className: 'modal-functions-ok-button' },
cancelButtonProps: { className: 'modal-functions-cancel-button' },
};
// TODO: in another PR, rename this to CompatabilityModal
// and demote it as the default export.
// We should start using AntD component interfaces going forward.
const Modal = Object.assign(CustomModal, {
error: AntdModal.error,
warning: AntdModal.warning,
confirm: AntdModal.confirm,
error: (config: ModalFuncProps) =>
AntdModal.error({ ...config, ...buttonProps }),
warning: (config: ModalFuncProps) =>
AntdModal.warning({ ...config, ...buttonProps }),
confirm: (config: ModalFuncProps) =>
AntdModal.confirm({ ...config, ...buttonProps }),
useModal: AntdModal.useModal,
});

View File

@@ -62,6 +62,7 @@ export {
Dropdown as AntdDropdown,
Form as AntdForm,
Input as AntdInput,
Modal as AntdModal,
Select as AntdSelect,
Slider as AntdSlider,
Tabs as AntdTabs,
@@ -70,5 +71,6 @@ export {
// Exported types
export type { FormInstance } from 'antd/lib/form';
export type { ModalProps as AntdModalProps } from 'antd/lib/modal';
export type { DropDownProps as AntdDropdownProps } from 'antd/lib/dropdown';
export type { RadioChangeEvent } from 'antd/lib/radio';

View File

@@ -302,7 +302,6 @@ const PropertiesModal = ({
content: t('A valid color scheme is required'),
okButtonProps: { danger: true, className: 'btn-danger' },
});
onHide();
throw new Error('A valid color scheme is required');
}

View File

@@ -118,12 +118,10 @@ test('is valid', () => {
test('renders refresh interval modal', async () => {
render(setup(editModeOnProps));
expect(screen.queryByText('Refresh Interval')).not.toBeInTheDocument();
await openRefreshIntervalModal();
// Assert that modal exists by checking for the modal title
expect(screen.getByText('Refresh interval')).toBeInTheDocument();
expect(screen.getByText('Refresh interval')).toBeVisible();
});
test('renders refresh interval options', async () => {

Some files were not shown because too many files have changed in this diff Show More