Compare commits

...

34 Commits

Author SHA1 Message Date
Evan Rusackas
343a627c21 Parallelization 2024-12-19 09:40:53 -07:00
alexandrusoare
f362c6f508 refactor(Modal): Upgrade Modal component to Antd5 (#31420)
Co-authored-by: Diego Pucci <diegopucci.me@gmail.com>
2024-12-19 17:22:11 +01:00
Beto Dealmeida
7458c4bbd5 chore: rename apply_post_process (#31511) 2024-12-19 10:13:28 -05:00
Maxime Beauchemin
531f1b6aa4 chore(gha): bump ubuntu to latest fresh release (#31390) 2024-12-18 23:32:10 -08:00
Maxime Beauchemin
723ef591a5 fix: add various recent issues on master CI (#31561) 2024-12-18 21:03:01 -08:00
Maxime Beauchemin
e51b95ffa8 chore: enforce more ruff rules (#31447)
Co-authored-by: Elizabeth Thompson <eschutho@gmail.com>
2024-12-18 17:41:34 -08:00
Maxime Beauchemin
9da65d6bfd chore: deprecate pip-compile-multi in favor or uv (#31313) 2024-12-18 17:40:58 -08:00
Maxime Beauchemin
88cde7225e chore: deprecate fossa in favor of liccheck to validate python licenses (#31515) 2024-12-18 17:03:25 -08:00
Maxime Alay-Eddine
e788b858d0 feat(country-map): add map for France with all overseas territories (#31037)
Co-authored-by: Maxime ALAY-EDDINE <maxime@galeax.com>
2024-12-17 15:34:39 -07:00
Evan Rusackas
c0feb99f0e chore(code owners): Update CODEOWNERS file to remove a couple inactive contributors (#31501) 2024-12-17 15:26:12 -07:00
Samra Hanif
567380ffe1 docs: Update new user for Careem to user's list (#31496) 2024-12-17 11:26:35 -07:00
Maxime Beauchemin
a5e36c9aab fix: master docker builds fail because of multi-platform builds can't --load (#31493) 2024-12-17 09:41:13 -08:00
Kamil Gabryjelski
4c380b48e7 fix: Card component background color (#31483) 2024-12-17 14:54:46 +01:00
Maxime Beauchemin
3375e65486 feat(gha): various docker / docker-compose build improvements (#31386) 2024-12-16 17:50:15 -08:00
Michael Gerber
a1adb7f31c fix(sunburst): Use metric label from verbose map (#31480) 2024-12-16 10:44:33 -07:00
Kamil Gabryjelski
9b28a6eed6 fix: Tooltip covers the date selector in native filters (#31472) 2024-12-16 18:29:10 +01:00
Kamil Gabryjelski
8be69aa647 fix(explore): Styling issue in Search Metrics input field (#31473) 2024-12-16 18:28:38 +01:00
Evan Rusackas
48510d2ffb fix(filter options): full size list item targets (#31449) 2024-12-16 10:22:14 -07:00
Shane Zarechian
04077ce934 fix(api): typo api.py (#31458) 2024-12-15 13:37:57 -08:00
Maxime Beauchemin
092faa019b chore: remove numba and llvmlite deps as they are large and we don't use them (#31451) 2024-12-13 21:17:52 -08:00
Maxime Beauchemin
1f17b975d6 fix: docker refactor (#31385) 2024-12-13 16:17:44 -08:00
Michael Gerber
15ede02c25 chore(translations): German translation update (#30605) 2024-12-13 16:52:43 -07:00
Maxime Beauchemin
4bccf36375 chore: deprecate pylint in favor of ruff (#31262)
Co-authored-by: Ville Brofeldt <33317356+villebro@users.noreply.github.com>
2024-12-13 12:53:14 -08:00
Vitor Avila
21e794a66f fix(database import): Gracefully handle error to get catalog schemas (#31437) 2024-12-13 12:31:10 -03:00
Geido
e1f98e246f fix(Dashboard): Sync color configuration via dedicated endpoint (#31374) 2024-12-13 15:58:02 +02:00
Ville Brofeldt
bf56a327f4 fix(tags): clean up bulk create api and schema (#31427) 2024-12-12 17:54:10 -08:00
Elizabeth Thompson
cd200f07a5 fix: prevent multiple pvm errors on migration (#31332) 2024-12-12 16:26:36 -08:00
Maxime Beauchemin
4ff9aac1fa feat(sqllab): giving the query history pane a facelift (#31316) 2024-12-12 16:30:50 -07:00
Daniel Vaz Gaspar
988da2c477 docs: CVEs fixed on 4.1.0 v2 (#31422) 2024-12-12 18:48:54 +00:00
Maxime Beauchemin
f510f42b96 fix: pkg_resources is getting deprecated (#31411) 2024-12-12 09:20:52 -08:00
Vitor Avila
43314dc8db fix(Pivot Table): Fix column width to respect currency config (#31414) 2024-12-12 10:44:07 -03:00
Luiz Otavio
423a0fefa5 feat: Adds helper functions for migrations (#31303) 2024-12-11 10:50:56 -03:00
Enzo Martellucci
fd57fce977 refactor: Migrate AdhocFilterEditPopoverSqlTabContent to TypeScript (#31268)
Co-authored-by: JUST.in DO IT <justin.park@airbnb.com>
2024-12-11 14:26:18 +02:00
Maxime Beauchemin
d8fbaa4cbe fix: don't include chromium on ephemeral envs (#31391) 2024-12-10 18:33:56 -08:00
534 changed files with 11931 additions and 5347 deletions

View File

@@ -72,6 +72,7 @@ github:
- cypress-matrix (3, chrome)
- cypress-matrix (4, chrome)
- cypress-matrix (5, chrome)
- dependency-review
- frontend-build
- pre-commit (current)
- pre-commit (next)

View File

@@ -42,6 +42,8 @@ docs/
install/
superset-frontend/cypress-base/
superset-frontend/coverage/
superset-frontend/.temp_cache/
superset/static/assets/
superset-websocket/dist/
venv
.venv

8
.github/CODEOWNERS vendored
View File

@@ -16,17 +16,17 @@
# Notify E2E test maintainers of changes
/superset-frontend/cypress-base/ @jinghua-qa @geido @eschutho @rusackas @betodealmeida
/superset-frontend/cypress-base/ @sadpandajoe @geido @eschutho @rusackas @betodealmeida
# Notify PMC members of changes to GitHub Actions
/.github/ @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @john-bodley @kgabryje @dpgaspar
/.github/ @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @kgabryje @dpgaspar
# Notify PMC members of changes to required GitHub Actions
/.asf.yaml @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @john-bodley @kgabryje @dpgaspar
/.asf.yaml @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @kgabryje @dpgaspar
# Maps are a finnicky contribution process we care about
# Maps are a finicky contribution process we care about
**/*.geojson @villebro @rusackas
/superset-frontend/plugins/legacy-plugin-chart-country-map/ @villebro @rusackas

View File

@@ -44,10 +44,13 @@ runs:
if [ "${{ inputs.install-superset }}" = "true" ]; then
sudo apt-get update && sudo apt-get -y install libldap2-dev libsasl2-dev
pip install --upgrade pip setuptools wheel uv
if [ "${{ inputs.requirements-type }}" = "dev" ]; then
uv pip install --system -r requirements/development.txt
elif [ "${{ inputs.requirements-type }}" = "base" ]; then
uv pip install --system -r requirements/base.txt
fi
uv pip install --system -e .
fi
shell: bash

69
.github/actions/setup-docker/action.yml vendored Normal file
View File

@@ -0,0 +1,69 @@
name: "Setup Docker Environment"
description: "Reusable steps for setting up QEMU, Docker Buildx, DockerHub login, Supersetbot, and optionally Docker Compose"
inputs:
build:
description: "Used for building?"
required: false
default: "false"
dockerhub-user:
description: "DockerHub username"
required: false
dockerhub-token:
description: "DockerHub token"
required: false
install-docker-compose:
description: "Flag to install Docker Compose"
required: false
default: "true"
login-to-dockerhub:
description: "Whether you want to log into dockerhub"
required: false
default: "true"
outputs: {}
runs:
using: "composite"
steps:
- name: Set up QEMU
if: ${{ inputs.build == 'true' }}
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
if: ${{ inputs.build == 'true' }}
uses: docker/setup-buildx-action@v3
- name: Try to login to DockerHub
if: ${{ inputs.login-to-dockerhub == 'true' }}
continue-on-error: true
uses: docker/login-action@v3
with:
username: ${{ inputs.dockerhub-user }}
password: ${{ inputs.dockerhub-token }}
- name: Install Docker Compose
if: ${{ inputs.install-docker-compose == 'true' }}
shell: bash
run: |
sudo apt-get update
sudo apt-get install -y ca-certificates curl
sudo install -m 0755 -d /etc/apt/keyrings
# Download and save the Docker GPG key in the correct format
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /etc/apt/keyrings/docker.gpg
# Ensure the key file is readable
sudo chmod a+r /etc/apt/keyrings/docker.gpg
# Add the Docker repository using the correct key
echo \
"deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/ubuntu \
$(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \
sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
# Update package lists and install Docker Compose plugin
sudo apt update
sudo apt install -y docker-compose-plugin
- name: Docker Version Info
shell: bash
run: docker info

View File

@@ -22,8 +22,7 @@ updates:
# - package-ecosystem: "pip"
# NOTE: as dependabot isn't compatible with our python
# dependency setup (pip-compile-multi), we'll be using
# NOTE: as dependabot isn't compatible with our usage of `uv pip compile` we're using
# `supersetbot` instead
- package-ecosystem: "npm"

View File

@@ -23,7 +23,7 @@ on:
jobs:
bump-python-package:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
permissions:
actions: write
contents: write
@@ -45,8 +45,8 @@ jobs:
with:
python-version: "3.10"
- name: Install pip-compile-multi
run: pip install pip-compile-multi
- name: Install uv
run: pip install uv
- name: supersetbot bump-python -p "${{ github.event.inputs.package }}"
env:

View File

@@ -9,7 +9,7 @@ on:
jobs:
cancel-duplicate-runs:
name: Cancel duplicate workflow runs
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
permissions:
actions: write
contents: read

44
.github/workflows/check-python-deps.yml vendored Normal file
View File

@@ -0,0 +1,44 @@
name: Check python dependencies
on:
push:
branches:
- "master"
- "[0-9].[0-9]*"
pull_request:
types: [synchronize, opened, reopened, ready_for_review]
# cancel previous workflow jobs for PRs
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
jobs:
check-python-deps:
runs-on: ubuntu-22.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
depth: 1
- name: Setup Python
if: steps.check.outputs.python
uses: ./.github/actions/setup-backend/
- name: Run uv
if: steps.check.outputs.python
run: ./scripts/uv-pip-compile.sh
- name: Check for uncommitted changes
run: |
if [[ -n "$(git diff)" ]]; then
echo "ERROR: The pinned dependencies are not up-to-date."
echo "Please run './scripts/uv-pip-compile.sh' and commit the changes."
exit 1
else
echo "Pinned dependencies are up-to-date."
fi

View File

@@ -19,7 +19,7 @@ concurrency:
jobs:
check_db_migration_conflict:
name: Check DB migration conflict
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
permissions:
contents: read
pull-requests: write

View File

@@ -17,7 +17,7 @@ concurrency:
jobs:
analyze:
name: Analyze
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
permissions:
actions: read
contents: read

View File

@@ -5,14 +5,26 @@
# Source repository: https://github.com/actions/dependency-review-action
# Public documentation: https://docs.github.com/en/code-security/supply-chain-security/understanding-your-software-supply-chain/about-dependency-review#dependency-review-enforcement
name: "Dependency Review"
on: [pull_request]
on:
push:
branches:
- "master"
- "[0-9].[0-9]*"
pull_request:
types: [synchronize, opened, reopened, ready_for_review]
# cancel previous workflow jobs for PRs
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
permissions:
contents: read
jobs:
dependency-review:
runs-on: ubuntu-22.04
if: github.event_name == 'pull_request'
runs-on: ubuntu-24.04
steps:
- name: "Checkout Repository"
uses: actions/checkout@v4
@@ -33,3 +45,24 @@ jobs:
# pkg:npm/node-forge@1.3.1
# selecting BSD-3-Clause licensing terms for node-forge to ensure compatibility with Apache
allow-dependencies-licenses: pkg:npm/store2@2.14.2, pkg:npm/applitools/core, pkg:npm/applitools/core-base, pkg:npm/applitools/css-tree, pkg:npm/applitools/ec-client, pkg:npm/applitools/eg-socks5-proxy-server, pkg:npm/applitools/eyes, pkg:npm/applitools/eyes-cypress, pkg:npm/applitools/nml-client, pkg:npm/applitools/tunnel-client, pkg:npm/applitools/utils, pkg:npm/node-forge@1.3.1, pkg:npm/rgbcolor, pkg:npm/jszip@3.10.1
python-dependency-liccheck:
runs-on: ubuntu-22.04
steps:
- name: "Checkout Repository"
uses: actions/checkout@v4
- name: Setup Python
uses: ./.github/actions/setup-backend/
with:
requirements-type: base
- name: "Set up liccheck"
run: |
uv pip install --system liccheck
- name: "Run liccheck"
run: |
# run the checks
liccheck -R output.txt
# Print the report
cat output.txt

View File

@@ -15,20 +15,20 @@ concurrency:
jobs:
setup_matrix:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
outputs:
matrix_config: ${{ steps.set_matrix.outputs.matrix_config }}
steps:
- id: set_matrix
run: |
MATRIX_CONFIG=$(if [ "${{ github.event_name }}" == "pull_request" ]; then echo '["dev"]'; else echo '["dev", "lean", "py310", "websocket", "dockerize", "py311"]'; fi)
MATRIX_CONFIG=$(if [ "${{ github.event_name }}" == "pull_request" ]; then echo '["dev", "lean"]'; else echo '["dev", "lean", "py310", "websocket", "dockerize", "py311"]'; fi)
echo "matrix_config=${MATRIX_CONFIG}" >> $GITHUB_OUTPUT
echo $GITHUB_OUTPUT
docker-build:
name: docker-build
needs: setup_matrix
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
strategy:
matrix:
build_preset: ${{fromJson(needs.setup_matrix.outputs.matrix_config)}}
@@ -50,21 +50,13 @@ jobs:
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Set up QEMU
- name: Setup Docker Environment
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
uses: docker/setup-buildx-action@v3
- name: Try to login to DockerHub
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
continue-on-error: true
uses: docker/login-action@v3
uses: ./.github/actions/setup-docker
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
dockerhub-user: ${{ secrets.DOCKERHUB_USER }}
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
build: "true"
- name: Setup supersetbot
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
@@ -84,7 +76,30 @@ jobs:
fi
supersetbot docker \
--push \
--preset ${{ matrix.build_preset }} \
--context "$EVENT" \
--context-ref "$RELEASE" $FORCE_LATEST \
--extra-flags "--build-arg INCLUDE_CHROMIUM=false" \
$PLATFORM_ARG
- name: Docker pull
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
run: docker pull apache/superset:GHA-${GITHUB_RUN_ID}
- name: Print docker stats
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
run: |
IMAGE_ID=$(docker images --filter "label=sha=${{ github.sha }}" --format "{{.ID}}" | head -n 1)
echo "SHA: ${{ github.sha }}"
echo "IMAGE: $IMAGE_ID"
docker images $IMAGE_ID
docker history $IMAGE_ID
- name: docker-compose sanity check
if: (steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker) && (matrix.build_preset == 'dev' || matrix.build_preset == 'lean')
shell: bash
run: |
export SUPERSET_BUILD_TARGET=${{ matrix.build_preset }}
docker compose build superset-init --build-arg DEV_MODE=false --build-arg INCLUDE_CHROMIUM=false
docker compose up superset-init --exit-code-from superset-init

View File

@@ -8,7 +8,7 @@ on:
jobs:
config:
runs-on: "ubuntu-22.04"
runs-on: ubuntu-24.04
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
@@ -23,7 +23,7 @@ jobs:
build:
needs: config
if: needs.config.outputs.has-secrets
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
defaults:
run:
working-directory: superset-embedded-sdk

View File

@@ -13,7 +13,7 @@ concurrency:
jobs:
embedded-sdk-test:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
defaults:
run:
working-directory: superset-embedded-sdk

View File

@@ -6,7 +6,7 @@ on:
jobs:
config:
runs-on: "ubuntu-22.04"
runs-on: ubuntu-24.04
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
@@ -22,7 +22,7 @@ jobs:
needs: config
if: needs.config.outputs.has-secrets
name: Cleanup ephemeral envs
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
permissions:
pull-requests: write
steps:

View File

@@ -21,12 +21,15 @@ jobs:
group: ${{ github.workflow }}-${{ github.event.inputs.issue_number || github.event.issue.number || github.run_id }}-comment
cancel-in-progress: true
name: Evaluate ephemeral env comment trigger (/testenv)
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
permissions:
pull-requests: write
outputs:
slash-command: ${{ steps.eval-body.outputs.result }}
feature-flags: ${{ steps.eval-feature-flags.outputs.result }}
env:
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
steps:
- name: Debug
@@ -112,7 +115,7 @@ jobs:
needs: ephemeral-env-comment
if: needs.ephemeral-env-comment.outputs.slash-command == 'up'
name: ephemeral-docker-build
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
steps:
- name: Get Info from comment
uses: actions/github-script@v7
@@ -139,11 +142,13 @@ jobs:
ref: ${{ steps.get-sha.outputs.sha }}
persist-credentials: false
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Setup Docker Environment
uses: ./.github/actions/setup-docker
with:
dockerhub-user: ${{ secrets.DOCKERHUB_USER }}
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
build: "true"
install-docker-compose: "false"
- name: Setup supersetbot
uses: ./.github/actions/setup-supersetbot/
@@ -153,9 +158,12 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
supersetbot docker \
--push \
--load \
--preset ci \
--platform linux/amd64 \
--context-ref "$RELEASE"
--context-ref "$RELEASE" \
--extra-flags "--build-arg INCLUDE_CHROMIUM=false"
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
@@ -182,7 +190,7 @@ jobs:
needs: [ephemeral-env-comment, ephemeral-docker-build]
if: needs.ephemeral-env-comment.outputs.slash-command == 'up'
name: Spin up an ephemeral environment
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
permissions:
contents: read
pull-requests: write

View File

@@ -8,7 +8,7 @@ on:
jobs:
config:
runs-on: "ubuntu-22.04"
runs-on: ubuntu-24.04
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
@@ -24,7 +24,7 @@ jobs:
needs: config
if: needs.config.outputs.has-secrets
name: Generate Report
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4

View File

@@ -11,7 +11,7 @@ on:
jobs:
validate-all-ghas:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
steps:
- name: Checkout Repository
uses: actions/checkout@v4

View File

@@ -9,7 +9,7 @@ on:
jobs:
superbot-orglabel:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
permissions:
contents: read
pull-requests: write

View File

@@ -7,7 +7,7 @@ jobs:
permissions:
contents: read
pull-requests: write
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
steps:
- uses: actions/labeler@v5
with:

View File

@@ -6,7 +6,7 @@ on:
jobs:
latest-release:
name: Add/update tag to new release
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
permissions:
contents: write

View File

@@ -12,7 +12,7 @@ concurrency:
jobs:
license_check:
name: License Check
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4

View File

@@ -11,7 +11,7 @@ concurrency:
jobs:
check-hold-label:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
steps:
- name: Check for 'hold' label
uses: actions/github-script@v7

View File

@@ -10,7 +10,7 @@ on:
jobs:
lint-check:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
permissions:
contents: read
pull-requests: write

View File

@@ -15,7 +15,7 @@ concurrency:
jobs:
pre-commit:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
strategy:
matrix:
python-version: ["current", "next", "previous"]

View File

@@ -21,7 +21,7 @@ jobs:
prefer_typescript:
if: github.ref == 'ref/heads/master' && github.event_name == 'pull_request'
name: Prefer TypeScript
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
permissions:
contents: read
pull-requests: write

View File

@@ -8,7 +8,7 @@ on:
jobs:
config:
runs-on: "ubuntu-22.04"
runs-on: ubuntu-24.04
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
@@ -25,7 +25,7 @@ jobs:
if: needs.config.outputs.has-secrets
name: Bump version and publish package(s)
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
strategy:
matrix:

View File

@@ -6,7 +6,7 @@ on:
jobs:
config:
runs-on: "ubuntu-22.04"
runs-on: ubuntu-24.04
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
@@ -21,7 +21,7 @@ jobs:
cypress-applitools:
needs: config
if: needs.config.outputs.has-secrets
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
strategy:
fail-fast: false
matrix:

View File

@@ -12,7 +12,7 @@ env:
jobs:
config:
runs-on: "ubuntu-22.04"
runs-on: ubuntu-24.04
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
@@ -27,7 +27,7 @@ jobs:
cron:
needs: config
if: needs.config.outputs.has-secrets
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
strategy:
matrix:
node: [20]

View File

@@ -15,7 +15,7 @@ concurrency:
jobs:
test-load-examples:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
env:
PYTHONPATH: ${{ github.workspace }}
SUPERSET_CONFIG: tests.integration_tests.superset_test_config

View File

@@ -12,7 +12,7 @@ on:
jobs:
config:
runs-on: "ubuntu-22.04"
runs-on: ubuntu-24.04
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
@@ -28,7 +28,7 @@ jobs:
needs: config
if: needs.config.outputs.has-secrets
name: Build & Deploy
runs-on: "ubuntu-22.04"
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4

View File

@@ -51,7 +51,7 @@ jobs:
https://www.plaidcloud.com/
build-deploy:
name: Build & Deploy
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
defaults:
run:
working-directory: docs

View File

@@ -28,6 +28,7 @@ concurrency:
jobs:
cypress-matrix:
# Somehow one test flakes on 24.04 for unknown reasons, this is the only GHA left on 22.04
runs-on: ubuntu-22.04
permissions:
contents: read

View File

@@ -15,7 +15,7 @@ concurrency:
jobs:
frontend-build:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4

View File

@@ -13,7 +13,7 @@ concurrency:
jobs:
lint-test:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4

View File

@@ -20,7 +20,7 @@ on:
jobs:
release:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
permissions:
contents: write
pull-requests: write

View File

@@ -15,7 +15,7 @@ concurrency:
jobs:
test-mysql:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
env:
PYTHONPATH: ${{ github.workspace }}
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
@@ -74,7 +74,7 @@ jobs:
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
test-postgres:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
strategy:
matrix:
python-version: ["current", "next", "previous"]
@@ -136,7 +136,7 @@ jobs:
verbose: true
test-sqlite:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
env:
PYTHONPATH: ${{ github.workspace }}
SUPERSET_CONFIG: tests.integration_tests.superset_test_config

View File

@@ -16,7 +16,7 @@ concurrency:
jobs:
test-postgres-presto:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
env:
PYTHONPATH: ${{ github.workspace }}
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
@@ -84,7 +84,7 @@ jobs:
verbose: true
test-postgres-hive:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
env:
PYTHONPATH: ${{ github.workspace }}
SUPERSET_CONFIG: tests.integration_tests.superset_test_config

View File

@@ -16,7 +16,7 @@ concurrency:
jobs:
unit-tests:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
strategy:
matrix:
python-version: ["current", "next"]

View File

@@ -15,7 +15,7 @@ concurrency:
jobs:
frontend-check-translations:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
@@ -46,7 +46,7 @@ jobs:
npm run build-translation
babel-extract:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4

View File

@@ -18,7 +18,7 @@ concurrency:
jobs:
app-checks:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4

View File

@@ -15,7 +15,7 @@ on:
jobs:
supersetbot:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
if: >
github.event_name == 'workflow_dispatch' ||
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@supersetbot'))

View File

@@ -23,7 +23,7 @@ on:
- 'false'
jobs:
config:
runs-on: "ubuntu-22.04"
runs-on: ubuntu-24.04
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
@@ -39,23 +39,26 @@ jobs:
needs: config
if: needs.config.outputs.has-secrets
name: docker-release
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
strategy:
matrix:
build_preset: ["dev", "lean", "py310", "websocket", "dockerize", "py311"]
fail-fast: false
steps:
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Docker Environment
uses: ./.github/actions/setup-docker
with:
dockerhub-user: ${{ secrets.DOCKERHUB_USER }}
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
install-docker-compose: "false"
build: "true"
- name: Use Node.js 20
uses: actions/setup-node@v4
with:
@@ -64,13 +67,6 @@ jobs:
- name: Setup supersetbot
uses: ./.github/actions/setup-supersetbot/
- name: Try to login to DockerHub
continue-on-error: true
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Execute custom Node.js script
env:
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
@@ -91,6 +87,7 @@ jobs:
fi
supersetbot docker \
--push \
--preset ${{ matrix.build_preset }} \
--context "$EVENT" \
--context-ref "$RELEASE" $FORCE_LATEST \
@@ -103,7 +100,7 @@ jobs:
update-prs-with-release-info:
needs: config
if: needs.config.outputs.has-secrets
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
permissions:
contents: read
pull-requests: write

View File

@@ -8,7 +8,7 @@ on:
jobs:
config:
runs-on: "ubuntu-22.04"
runs-on: ubuntu-24.04
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
@@ -23,7 +23,7 @@ jobs:
process-and-upload:
needs: config
if: needs.config.outputs.has-secrets
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
name: Generate Reports
steps:
- name: Checkout Repository

View File

@@ -6,7 +6,7 @@ on:
jobs:
welcome:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
permissions:
pull-requests: write

View File

@@ -38,10 +38,6 @@ repos:
types-paramiko,
types-Markdown,
]
- repo: https://github.com/peterdemin/pip-compile-multi
rev: v2.6.4
hooks:
- id: pip-compile-multi-verify
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
hooks:
@@ -81,17 +77,3 @@ repos:
- id: ruff
args: [ --fix ]
- id: ruff-format
- repo: local
hooks:
- id: pylint
name: pylint
entry: pylint
language: system
types: [python]
exclude: ^(tests/|superset/migrations/|scripts/|RELEASING/|docker/)
args:
[
"-rn", # Only display messages
"-sn", # Don't display the score
"--rcfile=.pylintrc",
]

380
.pylintrc
View File

@@ -1,380 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
[MASTER]
# Specify a configuration file.
#rcfile=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=CVS,migrations
# Add files or directories matching the regex patterns to the blacklist. The
# regex matches against base names, not paths.
ignore-patterns=
# Pickle collected data for later comparisons.
persistent=yes
# List of plugins (as comma separated values of python modules names) to load,
# usually to register additional checkers.
load-plugins=superset.extensions.pylint
# Use multiple processes to speed up Pylint.
jobs=2
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
unsafe-load-any-extension=no
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code
extension-pkg-whitelist=pyarrow
[MESSAGES CONTROL]
# Only show warnings with the listed confidence levels. Leave empty to show
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
confidence=
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once). See also the "--disable" option for examples.
enable=
useless-suppression,
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once).You can also use "--disable=all" to
# disable everything first and then reenable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use"--disable=all --enable=classes
# --disable=W"
disable=
cyclic-import, # re-enable once this no longer raises false positives
missing-docstring,
duplicate-code,
line-too-long,
unspecified-encoding,
too-many-instance-attributes # re-enable once this no longer raises false positives
[REPORTS]
# Set the output format. Available formats are text, parseable, colorized, msvs
# (visual studio) and html. You can also give a reporter class, eg
# mypackage.mymodule.MyReporterClass.
output-format=text
# Tells whether to display a full report or only the messages
reports=yes
# Python expression which should return a note less than 10 (10 is the highest
# note). You have access to the variables errors warning, statement which
# respectively contain the number of errors / warnings messages and the total
# number of statements analyzed. This is used by the global evaluation report
# (RP0004).
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details
#msg-template=
[BASIC]
# Good variable names which should always be accepted, separated by a comma
good-names=_,df,ex,f,i,id,j,k,l,o,pk,Run,ts,v,x,y
# Bad variable names which should always be refused, separated by a comma
bad-names=bar,baz,db,fd,foo,sesh,session,tata,toto,tutu
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
name-group=
# Include a hint for the correct naming format with invalid-name
include-naming-hint=no
# List of decorators that produce properties, such as abc.abstractproperty. Add
# to this list to register other decorators that produce valid properties.
property-classes=
abc.abstractproperty,
sqlalchemy.ext.hybrid.hybrid_property
# Regular expression matching correct argument names
argument-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct method names
method-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct variable names
variable-rgx=[a-z_][a-z0-9_]{1,30}$
# Regular expression matching correct inline iteration names
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
# Regular expression matching correct constant names
const-rgx=(([A-Za-z_][A-Za-z0-9_]*)|(__.*__))$
# Regular expression matching correct class names
class-rgx=[A-Z_][a-zA-Z0-9]+$
# Regular expression matching correct class attribute names
class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
# Regular expression matching correct module names
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Regular expression matching correct attribute names
attr-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct function names
function-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=^_
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=10
[ELIF]
# Maximum number of nested blocks for function / method body
max-nested-blocks=5
[FORMAT]
# Maximum number of characters on a single line.
max-line-length=100
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=no
# Maximum number of lines in a module
max-module-lines=1000
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren=4
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
expected-line-ending-format=
[LOGGING]
# Logging modules to check that the string format arguments are in logging
# function parameter format
logging-modules=logging
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,XXX
[SIMILARITIES]
# Minimum lines number of a similarity.
min-similarity-lines=5
# Ignore comments when computing similarities.
ignore-comments=yes
# Ignore docstrings when computing similarities.
ignore-docstrings=yes
# Ignore imports when computing similarities.
ignore-imports=no
[SPELLING]
# Spelling dictionary name. Available dictionaries: none. To make it working
# install python-enchant package.
spelling-dict=
# List of comma separated words that should not be checked.
spelling-ignore-words=
# A path to a file that contains private dictionary; one word per line.
spelling-private-dict-file=
# Tells whether to store unknown words to indicated private dictionary in
# --spelling-private-dict-file option instead of raising a message.
spelling-store-unknown-words=no
[TYPECHECK]
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis. It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=numpy,pandas,alembic.op,sqlalchemy,alembic.context,flask_appbuilder.security.sqla.PermissionView.role,flask_appbuilder.Model.metadata,flask_appbuilder.Base.metadata
# List of class names for which member attributes should not be checked (useful
# for classes with dynamically set attributes). This supports the use of
# qualified names.
ignored-classes=contextlib.closing,optparse.Values,thread._local,_thread._local
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
generated-members=
# List of decorators that produce context managers, such as
# contextlib.contextmanager. Add to this list to register other decorators that
# produce valid context managers.
contextmanager-decorators=contextlib.contextmanager
[VARIABLES]
# Tells whether we should check for unused import in __init__ files.
init-import=no
# A regular expression matching the name of dummy variables (i.e. expectedly
# not used).
dummy-variables-rgx=(_+[a-zA-Z0-9]*?$)|dummy
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid to define new builtins when possible.
additional-builtins=
# List of strings which can identify a callback function by name. A callback
# name must start or end with one of those strings.
callbacks=cb_,_cb
# List of qualified module names which can have objects that can redefine
# builtins.
redefining-builtins-modules=six.moves,future.builtins
[CLASSES]
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,__new__,setUp
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=mcs
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected=_asdict,_fields,_replace,_source,_make
[DESIGN]
# Maximum number of arguments for function / method
max-args=5
# Argument names that match this expression will be ignored. Default to name
# with leading underscore
ignored-argument-names=_.*
# Maximum number of locals for function / method body
max-locals=15
# Maximum number of return / yield for function / method body
max-returns=10
# Maximum number of branch for function / method body
max-branches=15
# Maximum number of statements in function / method body
max-statements=50
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of attributes for a class (see R0902).
max-attributes=8
# Minimum number of public methods for a class (see R0903).
min-public-methods=2
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
# Maximum number of boolean expressions in a if statement
max-bool-expr=5
[IMPORTS]
# Deprecated modules which should not be used, separated by a comma
deprecated-modules=optparse
# Create a graph of every (i.e. internal and external) dependencies in the
# given file (report RP0402 must not be disabled)
import-graph=
# Create a graph of external dependencies in the given file (report RP0402 must
# not be disabled)
ext-import-graph=
# Create a graph of internal dependencies in the given file (report RP0402 must
# not be disabled)
int-import-graph=
# Force import order to recognize a module as part of the standard
# compatibility libraries.
known-standard-library=
# Force import order to recognize a module as part of a third party library.
known-third-party=enchant
# Analyse import fallback blocks. This can be used to support both Python 2 and
# 3 compatible code, which means that the block might have code that exists
# only in one or another interpreter, leading to false positives when analysed.
analyse-fallback-blocks=no
[EXCEPTIONS]
# Exceptions that will emit a warning when being caught. Defaults to
# "Exception"
overgeneral-exceptions=builtins.Exception

View File

@@ -22,26 +22,29 @@ ARG PY_VER=3.10-slim-bookworm
# If BUILDPLATFORM is null, set it to 'amd64' (or leave as is otherwise).
ARG BUILDPLATFORM=${BUILDPLATFORM:-amd64}
FROM --platform=${BUILDPLATFORM} node:20-bullseye-slim AS superset-node
######################################################################
# superset-node used for building frontend assets
######################################################################
FROM --platform=${BUILDPLATFORM} node:20-bullseye-slim AS superset-node
ARG BUILD_TRANSLATIONS="false" # Include translations in the final build
ENV BUILD_TRANSLATIONS=${BUILD_TRANSLATIONS}
ARG DEV_MODE="false" # Skip frontend build in dev mode
ENV DEV_MODE=${DEV_MODE}
COPY docker/ /app/docker/
# Arguments for build configuration
ARG NPM_BUILD_CMD="build"
ARG BUILD_TRANSLATIONS="false" # Include translations in the final build
ARG DEV_MODE="false" # Skip frontend build in dev mode
ARG INCLUDE_CHROMIUM="true" # Include headless Chromium for alerts & reports
ARG INCLUDE_FIREFOX="false" # Include headless Firefox if enabled
# Install system dependencies required for node-gyp
RUN --mount=type=bind,source=./docker,target=/docker \
/docker/apt-install.sh build-essential python3 zstd
RUN /app/docker/apt-install.sh build-essential python3 zstd
# Define environment variables for frontend build
ENV BUILD_CMD=${NPM_BUILD_CMD} \
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true
# Run the frontend memory monitoring script
RUN --mount=type=bind,source=./docker,target=/docker \
/docker/frontend-mem-nag.sh
RUN /app/docker/frontend-mem-nag.sh
WORKDIR /app/superset-frontend
@@ -52,6 +55,8 @@ RUN mkdir -p /app/superset/static/assets \
# Mount package files and install dependencies if not in dev mode
RUN --mount=type=bind,source=./superset-frontend/package.json,target=./package.json \
--mount=type=bind,source=./superset-frontend/package-lock.json,target=./package-lock.json \
--mount=type=cache,target=/root/.cache \
--mount=type=cache,target=/root/.npm \
if [ "$DEV_MODE" = "false" ]; then \
npm ci; \
else \
@@ -61,41 +66,36 @@ RUN --mount=type=bind,source=./superset-frontend/package.json,target=./package.j
# Runs the webpack build process
COPY superset-frontend /app/superset-frontend
# Build the frontend if not in dev mode
RUN --mount=type=cache,target=/app/superset-frontend/.temp_cache \
--mount=type=cache,target=/root/.npm \
if [ "$DEV_MODE" = "false" ]; then \
echo "Running 'npm run ${BUILD_CMD}'"; \
npm run ${BUILD_CMD}; \
else \
echo "Skipping 'npm run ${BUILD_CMD}' in dev mode"; \
fi;
# Copy translation files
COPY superset/translations /app/superset/translations
# Build the frontend if not in dev mode
RUN if [ "$DEV_MODE" = "false" ]; then \
BUILD_TRANSLATIONS=$BUILD_TRANSLATIONS npm run ${BUILD_CMD}; \
else \
echo "Skipping 'npm run ${BUILD_CMD}' in dev mode"; \
fi
# Compile .json files from .po translations (if required) and clean up .po files
RUN if [ "$BUILD_TRANSLATIONS" = "true" ]; then \
npm run build-translation; \
else \
echo "Skipping translations as requested by build flag"; \
fi \
# removing translations files regardless
&& rm -rf /app/superset/translations/*/LC_MESSAGES/*.po \
/app/superset/translations/messages.pot
fi; \
rm -rf /app/superset/translations/*/*/*.po; \
rm -rf /app/superset/translations/*/*/*.mo;
# Transition to Python base image
######################################################################
# Base python layer
######################################################################
FROM python:${PY_VER} AS python-base
RUN pip install --no-cache-dir --upgrade setuptools pip uv
ARG BUILD_TRANSLATIONS="false" # Include translations in the final build
ENV BUILD_TRANSLATIONS=${BUILD_TRANSLATIONS}
ARG DEV_MODE="false" # Skip frontend build in dev mode
ENV DEV_MODE=${DEV_MODE}
######################################################################
# Final lean image...
######################################################################
FROM python-base AS lean
# Build argument for including translations
ARG BUILD_TRANSLATIONS="false"
WORKDIR /app
ENV LANG=C.UTF-8 \
LC_ALL=C.UTF-8 \
SUPERSET_ENV=production \
@@ -104,126 +104,138 @@ ENV LANG=C.UTF-8 \
SUPERSET_HOME="/app/superset_home" \
SUPERSET_PORT=8088
RUN useradd --user-group -d ${SUPERSET_HOME} -m --no-log-init --shell /bin/bash superset
# Some bash scripts needed throughout the layers
COPY --chmod=755 docker/*.sh /app/docker/
RUN pip install --no-cache-dir --upgrade uv
# Using uv as it's faster/simpler than pip
RUN uv venv /app/.venv
ENV PATH="/app/.venv/bin:${PATH}"
# Install Playwright and optionally setup headless browsers
ARG INCLUDE_CHROMIUM="true"
ARG INCLUDE_FIREFOX="false"
RUN --mount=type=cache,target=/root/.cache/uv\
if [ "$INCLUDE_CHROMIUM" = "true" ] || [ "$INCLUDE_FIREFOX" = "true" ]; then \
uv pip install playwright && \
playwright install-deps && \
if [ "$INCLUDE_CHROMIUM" = "true" ]; then playwright install chromium; fi && \
if [ "$INCLUDE_FIREFOX" = "true" ]; then playwright install firefox; fi; \
else \
echo "Skipping browser installation"; \
fi
######################################################################
# Python translation compiler layer
######################################################################
FROM python-base AS python-translation-compiler
# Install Python dependencies using docker/pip-install.sh
COPY requirements/translations.txt requirements/
RUN --mount=type=cache,target=/root/.cache/uv \
/app/docker/pip-install.sh --requires-build-essential -r requirements/translations.txt
COPY superset/translations/ /app/translations_mo/
RUN if [ "$BUILD_TRANSLATIONS" = "true" ]; then \
pybabel compile -d /app/translations_mo | true; \
fi; \
rm -f /app/translations_mo/*/*/*.po; \
rm -f /app/translations_mo/*/*/*.json;
######################################################################
# Python APP common layer
######################################################################
FROM python-base AS python-common
# Copy the entrypoints, make them executable in userspace
COPY --chmod=755 docker/entrypoints /app/docker/entrypoints
WORKDIR /app
# Set up necessary directories and user
RUN --mount=type=bind,source=./docker,target=/docker \
mkdir -p ${PYTHONPATH} \
RUN mkdir -p \
${SUPERSET_HOME} \
${PYTHONPATH} \
superset/static \
requirements \
superset-frontend \
apache_superset.egg-info \
requirements \
&& useradd --user-group -d ${SUPERSET_HOME} -m --no-log-init --shell /bin/bash superset \
&& /docker/apt-install.sh \
curl \
libsasl2-dev \
libsasl2-modules-gssapi-mit \
libpq-dev \
libecpg-dev \
libldap2-dev \
&& touch superset/static/version_info.json \
&& chown -R superset:superset ./* \
&& rm -rf /var/lib/apt/lists/* /var/cache/apt/archives/*
&& touch superset/static/version_info.json
# Copy required files for Python build
COPY --chown=superset:superset pyproject.toml setup.py MANIFEST.in README.md ./
COPY --chown=superset:superset superset-frontend/package.json superset-frontend/
COPY --chown=superset:superset requirements/base.txt requirements/
COPY --chown=superset:superset scripts/check-env.py scripts/
COPY pyproject.toml setup.py MANIFEST.in README.md ./
COPY superset-frontend/package.json superset-frontend/
COPY scripts/check-env.py scripts/
# keeping for backward compatibility
COPY --chmod=755 ./docker/entrypoints/run-server.sh /usr/bin/
# Some debian libs
RUN /app/docker/apt-install.sh \
curl \
libsasl2-dev \
libsasl2-modules-gssapi-mit \
libpq-dev \
libecpg-dev \
libldap2-dev
# Copy compiled things from previous stages
COPY --from=superset-node /app/superset/static/assets superset/static/assets
# TODO, when the next version comes out, use --exclude superset/translations
COPY superset superset
# TODO in the meantime, remove the .po files
RUN rm superset/translations/*/*/*.po
# Merging translations from backend and frontend stages
COPY --from=superset-node /app/superset/translations superset/translations
COPY --from=python-translation-compiler /app/translations_mo superset/translations
HEALTHCHECK CMD curl -f "http://localhost:${SUPERSET_PORT}/health"
CMD ["/app/docker/entrypoints/run-server.sh"]
EXPOSE ${SUPERSET_PORT}
######################################################################
# Final lean image...
######################################################################
FROM python-common AS lean
# Install Python dependencies using docker/pip-install.sh
RUN --mount=type=bind,source=./docker,target=/docker \
--mount=type=cache,target=/root/.cache/pip \
/docker/pip-install.sh --requires-build-essential -r requirements/base.txt
COPY requirements/base.txt requirements/
RUN --mount=type=cache,target=/root/.cache/uv \
/app/docker/pip-install.sh --requires-build-essential -r requirements/base.txt
# Install the superset package
RUN --mount=type=cache,target=/root/.cache/uv \
uv pip install .
# Copy the compiled frontend assets from the node image
COPY --chown=superset:superset --from=superset-node /app/superset/static/assets superset/static/assets
RUN python -m compileall /app/superset
# Copy the main Superset source code
COPY --chown=superset:superset superset superset
# Install Superset itself using docker/pip-install.sh
RUN --mount=type=bind,source=./docker,target=/docker \
--mount=type=cache,target=/root/.cache/pip \
/docker/pip-install.sh -e .
# Copy .json translations from the node image
COPY --chown=superset:superset --from=superset-node /app/superset/translations superset/translations
# Compile backend translations and clean up
COPY ./scripts/translations/generate_mo_files.sh ./scripts/translations/
RUN if [ "$BUILD_TRANSLATIONS" = "true" ]; then \
./scripts/translations/generate_mo_files.sh \
&& chown -R superset:superset superset/translations; \
fi \
&& rm -rf superset/translations/messages.pot \
superset/translations/*/LC_MESSAGES/*.po
# Add server run script
COPY --chmod=755 ./docker/run-server.sh /usr/bin/
# Set user and healthcheck
USER superset
HEALTHCHECK CMD curl -f "http://localhost:${SUPERSET_PORT}/health"
# Expose port and set CMD
EXPOSE ${SUPERSET_PORT}
CMD ["/usr/bin/run-server.sh"]
######################################################################
# Dev image...
######################################################################
FROM lean AS dev
FROM python-common AS dev
USER root
# Install dev dependencies
RUN --mount=type=bind,source=./docker,target=/docker \
/docker/apt-install.sh \
libnss3 \
libdbus-glib-1-2 \
libgtk-3-0 \
libx11-xcb1 \
libasound2 \
libxtst6 \
git \
pkg-config
# Install Playwright and its dependencies
RUN --mount=type=cache,target=/root/.cache/pip \
uv pip install --system playwright \
&& playwright install-deps
# Optionally install Chromium
RUN if [ "$INCLUDE_CHROMIUM" = "true" ]; then \
playwright install chromium; \
else \
echo "Skipping Chromium installation in dev mode"; \
fi
# Install GeckoDriver WebDriver and Firefox (if required)
ARG GECKODRIVER_VERSION=v0.34.0
ARG FIREFOX_VERSION=125.0.3
RUN --mount=type=bind,source=./docker,target=/docker \
if [ "$INCLUDE_FIREFOX" = "true" ]; then \
/docker/apt-install.sh wget bzip2 \
&& wget -q https://github.com/mozilla/geckodriver/releases/download/${GECKODRIVER_VERSION}/geckodriver-${GECKODRIVER_VERSION}-linux64.tar.gz -O - | tar xfz - -C /usr/local/bin \
&& wget -q https://download-installer.cdn.mozilla.net/pub/firefox/releases/${FIREFOX_VERSION}/linux-x86_64/en-US/firefox-${FIREFOX_VERSION}.tar.bz2 -O - | tar xfj - -C /opt \
&& ln -s /opt/firefox/firefox /usr/local/bin/firefox \
&& apt-get autoremove -yqq --purge wget bzip2 && rm -rf /var/[log,tmp]/* /tmp/* /var/lib/apt/lists/* /var/cache/apt/archives/*; \
else \
echo "Skipping Firefox installation in dev mode"; \
fi
# Install MySQL client dependencies
RUN --mount=type=bind,source=./docker,target=/docker \
/docker/apt-install.sh default-libmysqlclient-dev
# Debian libs needed for dev
RUN /app/docker/apt-install.sh \
git \
pkg-config \
default-libmysqlclient-dev
# Copy development requirements and install them
COPY --chown=superset:superset requirements/development.txt requirements/
RUN --mount=type=bind,source=./docker,target=/docker \
--mount=type=cache,target=/root/.cache/pip \
/docker/pip-install.sh --requires-build-essential -r requirements/development.txt
COPY requirements/*.txt requirements/
# Install Python dependencies using docker/pip-install.sh
RUN --mount=type=cache,target=/root/.cache/uv \
/app/docker/pip-install.sh --requires-build-essential -r requirements/development.txt
# Install the superset package
RUN --mount=type=cache,target=/root/.cache/uv \
uv pip install .
RUN python -m compileall /app/superset
USER superset
@@ -232,6 +244,4 @@ USER superset
######################################################################
FROM lean AS ci
COPY --chown=superset:superset --chmod=755 ./docker/*.sh /app/docker/
CMD ["/app/docker/docker-ci.sh"]
CMD ["/app/docker/entrypoints/docker-ci.sh"]

View File

@@ -87,9 +87,6 @@ format: py-format js-format
py-format: pre-commit
pre-commit run black --all-files
py-lint: pre-commit
pylint -j 0 superset
js-format:
cd superset-frontend; npm run prettier

View File

@@ -272,14 +272,14 @@ class GitLogs:
@staticmethod
def _git_get_current_head() -> str:
output = os.popen("git status | head -1").read()
output = os.popen("git status | head -1").read() # noqa: S605, S607
match = re.match("(?:HEAD detached at|On branch) (.*)", output)
if not match:
return ""
return match.group(1)
def _git_checkout(self, git_ref: str) -> None:
os.popen(f"git checkout {git_ref}").read()
os.popen(f"git checkout {git_ref}").read() # noqa: S605
current_head = self._git_get_current_head()
if current_head != git_ref:
print(f"Could not checkout {git_ref}")
@@ -290,7 +290,7 @@ class GitLogs:
current_git_ref = self._git_get_current_head()
self._git_checkout(self._git_ref)
output = (
os.popen('git --no-pager log --pretty=format:"%h|%an|%ae|%ad|%s|"')
os.popen('git --no-pager log --pretty=format:"%h|%an|%ae|%ad|%s|"') # noqa: S605, S607
.read()
.split("\n")
)

View File

@@ -31,7 +31,7 @@ except ModuleNotFoundError:
RECEIVER_EMAIL = "dev@superset.apache.org"
PROJECT_NAME = "Superset"
PROJECT_MODULE = "superset"
PROJECT_DESCRIPTION = "Apache Superset is a modern, enterprise-ready business intelligence web application."
PROJECT_DESCRIPTION = "Apache Superset is a modern, enterprise-ready business intelligence web application." # noqa: E501
def string_comma_to_list(message: str) -> list[str]:

View File

@@ -23,12 +23,12 @@ from typing import Optional
import requests
# Part 1: Verify SHA512 hash - this is the same as running `shasum -a 512 {release}` and comparing it against `{release}.sha512`
# Part 1: Verify SHA512 hash - this is the same as running `shasum -a 512 {release}` and comparing it against `{release}.sha512` # noqa: E501
def get_sha512_hash(filename: str) -> str:
"""Run the shasum command on the file and return the SHA512 hash."""
result = subprocess.run(["shasum", "-a", "512", filename], stdout=subprocess.PIPE)
result = subprocess.run(["shasum", "-a", "512", filename], stdout=subprocess.PIPE) # noqa: S603, S607
sha512_hash = result.stdout.decode().split()[0]
return sha512_hash
@@ -43,7 +43,7 @@ def read_sha512_file(filename: str) -> str:
def verify_sha512(filename: str) -> str:
"""Verify if the SHA512 hash of the file matches with the hash in the .sha512 file."""
"""Verify if the SHA512 hash of the file matches with the hash in the .sha512 file.""" # noqa: E501
sha512_hash = get_sha512_hash(filename)
sha512_file_content = read_sha512_file(filename)
@@ -53,14 +53,15 @@ def verify_sha512(filename: str) -> str:
return "SHA failed"
# Part 2: Verify RSA key - this is the same as running `gpg --verify {release}.asc {release}` and comparing the RSA key and email address against the KEYS file
# Part 2: Verify RSA key - this is the same as running `gpg --verify {release}.asc {release}` and comparing the RSA key and email address against the KEYS file # noqa: E501
def get_gpg_info(filename: str) -> tuple[Optional[str], Optional[str]]:
"""Run the GPG verify command and extract RSA key and email address."""
asc_filename = filename + ".asc"
result = subprocess.run(
["gpg", "--verify", asc_filename, filename], capture_output=True
result = subprocess.run( # noqa: S603
["gpg", "--verify", asc_filename, filename], # noqa: S607
capture_output=True, # noqa: S607
)
output = result.stderr.decode()
@@ -90,7 +91,7 @@ def get_gpg_info(filename: str) -> tuple[Optional[str], Optional[str]]:
def verify_key(key: str, email: Optional[str]) -> str:
"""Fetch the KEYS file and verify if the RSA/EDDSA key and email match."""
url = "https://downloads.apache.org/superset/KEYS"
response = requests.get(url)
response = requests.get(url) # noqa: S113
if response.status_code == 200:
if key not in response.text:
return "RSA/EDDSA key not found on KEYS page"

View File

@@ -79,7 +79,7 @@ Join our growing community!
- [Astronomer](https://www.astronomer.io) [@ryw]
- [Avesta Technologies](https://avestatechnologies.com/) [@TheRum]
- [Caizin](https://caizin.com/) [@tejaskatariya]
- [Careem](https://www.careem.com/) [@SamraHanifCareem]
- [Careem](https://www.careem.com/) [@samraHanif0340]
- [Cloudsmith](https://cloudsmith.io) [@alancarson]
- [Cyberhaven](https://www.cyberhaven.com/) [@toliver-ch]
- [Deepomatic](https://deepomatic.com/) [@Zanoellia]

View File

@@ -28,7 +28,9 @@ assists people when migrating to a new version.
- [29798](https://github.com/apache/superset/pull/29798) Since 3.1.0, the intial schedule for an alert or report was mistakenly offset by the specified timezone's relation to UTC. The initial schedule should now begin at the correct time.
- [30021](https://github.com/apache/superset/pull/30021) The `dev` layer in our Dockerfile no long includes firefox binaries, only Chromium to reduce bloat/docker-build-time.
- [30099](https://github.com/apache/superset/pull/30099) Translations are no longer included in the default docker image builds. If your environment requires translations, you'll want to set the docker build arg `BUILD_TRANSACTION=true`.
- [31262](https://github.com/apache/superset/pull/31262) NOTE: deprecated `pylint` in favor of `ruff` as our only python linter. Only affect development workflows positively (not the release itself). It should cover most important rules, be much faster, but some things linting rules that were enforced before may not be enforce in the exact same way as before.
- [31173](https://github.com/apache/superset/pull/31173) Modified `fetch_csrf_token` to align with HTTP standards, particularly regarding how cookies are handled. If you encounter any issues related to CSRF functionality, please report them as a new issue and reference this PR for context.
- [31385](https://github.com/apache/superset/pull/31385) Significant docker refactor, reducing access levels for the `superset` user, streamlining layer building, ...
### Potential Downtime

View File

@@ -35,11 +35,14 @@ x-superset-volumes: &superset-volumes
x-common-build: &common-build
context: .
target: dev
target: ${SUPERSET_BUILD_TARGET:-dev} # can use `dev` (default) or `lean`
cache_from:
- apache/superset-cache:3.10-slim-bookworm
args:
DEV_MODE: "true"
INCLUDE_CHROMIUM: ${INCLUDE_CHROMIUM:-false}
INCLUDE_FIREFOX: ${INCLUDE_FIREFOX:-false}
BUILD_TRANSLATIONS: ${BUILD_TRANSLATIONS:-false}
services:
nginx:
@@ -157,6 +160,7 @@ services:
# and build it on startup while firing docker-frontend.sh in dev mode, where
# it'll mount and watch local files and rebuild as you update them
DEV_MODE: "true"
BUILD_TRANSLATIONS: ${BUILD_TRANSLATIONS:-false}
environment:
# set this to false if you have perf issues running the npm i; npm run dev in-docker
# if you do so, you have to run this manually on the host, which should perform better!

View File

@@ -17,6 +17,7 @@
COMPOSE_PROJECT_NAME=superset
DEV_MODE=true
# database configurations (do not modify)
DATABASE_DB=superset

View File

@@ -18,6 +18,11 @@
set -eo pipefail
# Make python interactive
if [ "$DEV_MODE" == "true" ]; then
echo "Reinstalling the app in editable mode"
uv pip install -e .
fi
REQUIREMENTS_LOCAL="/app/docker/requirements-local.txt"
# If Cypress run overwrite the password for admin and export env variables
if [ "$CYPRESS_CONFIG" == "true" ]; then
@@ -25,12 +30,16 @@ if [ "$CYPRESS_CONFIG" == "true" ]; then
export SUPERSET_TESTENV=true
export SUPERSET__SQLALCHEMY_DATABASE_URI=postgresql+psycopg2://superset:superset@db:5432/superset
fi
if [[ "$DATABASE_DIALECT" == postgres* ]] ; then
echo "Installing postgres requirements"
uv pip install -e .[postgres]
fi
#
# Make sure we have dev requirements installed
#
if [ -f "${REQUIREMENTS_LOCAL}" ]; then
echo "Installing local overrides at ${REQUIREMENTS_LOCAL}"
pip install --no-cache-dir -r "${REQUIREMENTS_LOCAL}"
uv pip install --no-cache-dir -r "${REQUIREMENTS_LOCAL}"
else
echo "Skipping local overrides"
fi

View File

@@ -35,7 +35,7 @@ if [ "$BUILD_SUPERSET_FRONTEND_IN_DOCKER" = "true" ]; then
echo "Running `npm install`"
npm install
echo "Running frontend"
echo "Start webpack dev server"
npm run dev
else

View File

@@ -47,10 +47,10 @@ fi
# Choose whether to use pip cache
if $USE_CACHE; then
echo "Using pip cache..."
uv pip install --system "${ARGS[@]}"
uv pip install "${ARGS[@]}"
else
echo "Disabling pip cache..."
uv pip install --system --no-cache-dir "${ARGS[@]}"
uv pip install --no-cache-dir "${ARGS[@]}"
fi
# Remove build-essential if it was installed

View File

@@ -99,7 +99,7 @@ CELERY_CONFIG = CeleryConfig
FEATURE_FLAGS = {"ALERT_REPORTS": True}
ALERT_REPORTS_NOTIFICATION_DRY_RUN = True
WEBDRIVER_BASEURL = "http://superset:8088/" # When using docker compose baseurl should be http://superset_app:8088/
WEBDRIVER_BASEURL = "http://superset:8088/" # When using docker compose baseurl should be http://superset_app:8088/ # noqa: E501
# The base URL for the email report hyperlinks.
WEBDRIVER_BASEURL_USER_FRIENDLY = WEBDRIVER_BASEURL
SQLLAB_CTAS_NO_LIMIT = True

View File

@@ -63,6 +63,7 @@
"Fiji",
"Finland",
"France",
"France (with overseas)",
"France (regions)",
"French Polynesia",
"Gabon",

View File

@@ -72,6 +72,19 @@ documentation.
configured to be secure.
:::
### Supported environment variables
Affecting the Docker build process:
- **SUPERSET_BUILD_TARGET (default=dev):** which --target to build, either `lean` or `dev` are commonly used
- **INCLUDE_FIREFOX (default=false):** whether to include the Firefox headless browser in the build
- **INCLUDE_CHROMIUM (default=false):** whether to include the Firefox headless browser in the build
- **BUILD_TRANSLATIONS(default=false):** whether to compile the translations from the .po files available
For more env vars that affect your configuration, see this
[superset_config.py](https://github.com/apache/superset/blob/master/docker/pythonpath_dev/superset_config.py)
used in the `docker compose` context to assign env vars to the superset configuration.
### Nuking the postgres database
At times, it's possible to end up with your development database in a bad state, it's
@@ -242,19 +255,19 @@ If you add a new requirement or update an existing requirement (per the `install
$ python3 -m venv venv
$ source venv/bin/activate
$ python3 -m pip install -r requirements/development.txt
$ pip-compile-multi --no-upgrade
$ ./scripts/uv-pip-compile.sh
```
When upgrading the version number of a single package, you should run `pip-compile-multi` with the `-P` flag:
When upgrading the version number of a single package, you should run `./scripts/uv-pip-compile.sh` with the `-P` flag:
```bash
$ pip-compile-multi -P my-package
$ ./scripts/uv-pip-compile.sh -P some-package-to-upgrade
```
To bring all dependencies up to date as per the restrictions defined in `setup.py` and `requirements/*.in`, run pip-compile-multi` without any flags:
To bring all dependencies up to date as per the restrictions defined in `setup.py` and `requirements/*.in`, run `./scripts/uv-pip-compile.sh --upgrade`
```bash
$ pip-compile-multi
$ ./scripts/uv-pip-compile.sh --upgrade
```
This should be done periodically, but it is recommended to do thorough manual testing of the application to ensure no breaking changes have been introduced that aren't caught by the unit and integration tests.
@@ -478,37 +491,7 @@ A series of checks will now run when you make a git commit.
## Linting
### Python
We use [Pylint](https://pylint.org/) for linting which can be invoked via:
```bash
pylint
```
In terms of best practices please avoid blanket disabling of Pylint messages globally (via `.pylintrc`) or top-level within the file header, albeit there being a few exceptions. Disabling should occur inline as it prevents masking issues and provides context as to why said message is disabled.
Additionally, the Python code is auto-formatted using [Black](https://github.com/python/black) which
is configured as a pre-commit hook. There are also numerous [editor integrations](https://black.readthedocs.io/en/stable/integrations/editors.html)
### TypeScript
```bash
cd superset-frontend
npm ci
# run eslint checks
npm run eslint -- .
# run tsc (typescript) checks
npm run type
```
If using the eslint extension with vscode, put the following in your workspace `settings.json` file:
```json
"eslint.workingDirectories": [
"superset-frontend"
]
```
See [how tos](/docs/contributing/howtos#linting)
## GitHub Actions and `act`

View File

@@ -571,13 +571,9 @@ pybabel compile -d superset/translations
### Python
We use [Pylint](https://pylint.org/) and [ruff](https://github.com/astral-sh/ruff)
for linting which can be invoked via:
We use [ruff](https://github.com/astral-sh/ruff) for linting which can be invoked via:
```
# Run pylint
pylint superset/
# auto-reformat using ruff
ruff format
@@ -588,11 +584,8 @@ ruff check
ruff check --fix
```
In terms of best practices please avoid blanket disabling of Pylint messages globally
(via `.pylintrc`) or top-level within the file header, albeit there being a few exceptions.
Disabling should occur inline as it prevents masking issues and provides context as to why
said message is disabled.
Ruff configuration is located in our
(pyproject.toml)[https://github.com/apache/superset/blob/master/pyproject.toml] file
All this is configured to run in pre-commit hooks, which we encourage you to setup
with `pre-commit install`

View File

@@ -9,6 +9,7 @@ sidebar_position: 2
| CVE-2024-53947 | Improper SQL authorisation, parse for specific postgres functions | < 4.1.0 |
| CVE-2024-53948 | Error verbosity exposes metadata in analytics databases | < 4.1.0 |
| CVE-2024-53949 | Lower privilege users are able to create Role when FAB_ADD_SECURITY_API is enabled | < 4.1.0 |
| CVE-2024-55633 | SQLLab Improper readonly query validation allows unauthorized write access | < 4.1.0 |
#### Version 4.0.2

File diff suppressed because it is too large Load Diff

View File

@@ -24,7 +24,7 @@ name = "apache-superset"
description = "A modern, enterprise-ready business intelligence web application"
readme = "README.md"
dynamic = ["version", "scripts", "entry-points"]
requires-python = "~=3.9"
requires-python = ">=3.9"
license = { file="LICENSE.txt" }
authors = [
{ name = "Apache Software Foundation", email = "dev@superset.apache.org" },
@@ -70,7 +70,11 @@ dependencies = [
"nh3>=0.2.11, <0.3",
"numpy==1.23.5",
"packaging",
"pandas[excel,performance]>=2.0.3, <2.1",
# --------------------------
# pandas and related (wanting pandas[performance] without numba as it's 100+MB and not needed)
"pandas[excel]>=2.0.3, <2.1",
"bottleneck",
# --------------------------
"parsedatetime",
"paramiko>=3.4.0",
"pgsanity",
@@ -187,13 +191,11 @@ development = [
"grpcio>=1.55.3",
"openapi-spec-validator",
"parameterized",
"pip-compile-multi",
"pre-commit",
"progress>=1.5,<2",
"psutil",
"pyfakefs",
"pyinstrument>=4.0.2,<5",
"pylint",
"pytest<8.0.0", # hairy issue with pytest >=8 where current_app proxies are not set in time
"pytest-cov",
"pytest-mock",
@@ -213,7 +215,7 @@ combine_as_imports = true
include_trailing_comma = true
line_length = 88
known_first_party = "superset"
known_third_party = "alembic, apispec, backoff, celery, click, colorama, cron_descriptor, croniter, cryptography, dateutil, deprecation, flask, flask_appbuilder, flask_babel, flask_caching, flask_compress, flask_jwt_extended, flask_login, flask_migrate, flask_sqlalchemy, flask_talisman, flask_testing, flask_wtf, freezegun, geohash, geopy, holidays, humanize, isodate, jinja2, jwt, markdown, markupsafe, marshmallow, msgpack, nh3, numpy, pandas, parameterized, parsedatetime, pgsanity, pkg_resources, polyline, prison, progress, pyarrow, sqlalchemy_bigquery, pyhive, pyparsing, pytest, pytest_mock, pytz, redis, requests, selenium, setuptools, shillelagh, simplejson, slack, sqlalchemy, sqlalchemy_utils, sqlparse, typing_extensions, urllib3, werkzeug, wtforms, wtforms_json, yaml"
known_third_party = "alembic, apispec, backoff, celery, click, colorama, cron_descriptor, croniter, cryptography, dateutil, deprecation, flask, flask_appbuilder, flask_babel, flask_caching, flask_compress, flask_jwt_extended, flask_login, flask_migrate, flask_sqlalchemy, flask_talisman, flask_testing, flask_wtf, freezegun, geohash, geopy, holidays, humanize, isodate, jinja2, jwt, markdown, markupsafe, marshmallow, msgpack, nh3, numpy, pandas, parameterized, parsedatetime, pgsanity, polyline, prison, progress, pyarrow, sqlalchemy_bigquery, pyhive, pyparsing, pytest, pytest_mock, pytz, redis, requests, selenium, setuptools, shillelagh, simplejson, slack, sqlalchemy, sqlalchemy_utils, sqlparse, typing_extensions, urllib3, werkzeug, wtforms, wtforms_json, yaml"
multi_line_output = 3
order_by_type = false
@@ -274,8 +276,8 @@ exclude = [
line-length = 88
indent-width = 4
# Assume Python 3.8
target-version = "py310"
# Assume Python 3.9
target-version = "py39"
[tool.ruff.lint]
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
@@ -286,11 +288,27 @@ select = [
"E4",
"E7",
"E9",
"F",
"PT009",
"TRY201",
"B",
"C",
"E",
"F",
"F",
"I",
"N",
"PT",
"Q",
"S",
"T",
"W",
]
ignore = [
"S101",
"PT006",
"T201",
"N999",
]
ignore = []
extend-select = ["I"]
@@ -343,3 +361,36 @@ docstring-code-format = false
# This only has an effect when the `docstring-code-format` setting is
# enabled.
docstring-code-line-length = "dynamic"
[tool.liccheck]
requirement_txt_file = "requirements/base.txt"
authorized_licenses = [
"academic free license (afl)",
"apache license 2.0",
"apache software",
"apache software, bsd",
"bsd",
"isc license (iscl)",
"isc license",
"mit",
"mozilla public license 2.0 (mpl 2.0)",
"osi approved",
"osi approved",
"python software foundation",
"the unlicense (unlicense)",
"the unlicense",
]
[tool.liccheck.authorized_packages]
# --------------------------------------------------------------
# These are ok, checked manually
# Seems ok, might need legal review
# https://github.com/urschrei/pypolyline/blob/master/LICENSE.md
polyline = "2"
# Apache 2.0 https://github.com/hkwi/python-geohash
python-geohash = "0"
# --------------------------------------------------------------
# TODO REMOVE THESE DEPS FROM CODEBASE
func-timeout = "4" # AGPL
paramiko = "3" # GPL
pyxlsb = "1" # GPL

View File

@@ -16,7 +16,6 @@
# specific language governing permissions and limitations
# under the License.
#
-e file:.
urllib3>=1.26.18
werkzeug>=3.0.1
numexpr>=2.9.0

View File

@@ -1,17 +1,10 @@
# SHA1:04f7e0860829f18926ea238354e6d4a6ab823d50
#
# This file is autogenerated by pip-compile-multi
# To update, run:
#
# pip-compile-multi
#
-e file:.
# via -r requirements/base.in
# This file was autogenerated by uv via the following command:
# uv pip compile pyproject.toml requirements/base.in -o requirements/base.txt
alembic==1.14.0
# via flask-migrate
amqp==5.3.1
# via kombu
apispec[yaml]==6.3.0
apispec==6.3.0
# via flask-appbuilder
apsw==3.46.0.0
# via shillelagh
@@ -27,7 +20,7 @@ attrs==24.2.0
babel==2.16.0
# via flask-babel
backoff==2.2.1
# via apache-superset
# via apache-superset (pyproject.toml)
bcrypt==4.2.1
# via paramiko
billiard==4.2.1
@@ -35,7 +28,7 @@ billiard==4.2.1
blinker==1.9.0
# via flask
bottleneck==1.4.2
# via pandas
# via apache-superset (pyproject.toml)
brotli==1.1.0
# via flask-compress
cachelib==0.9.0
@@ -47,7 +40,7 @@ cachetools==5.5.0
cattrs==24.1.2
# via requests-cache
celery==5.4.0
# via apache-superset
# via apache-superset (pyproject.toml)
certifi==2024.8.30
# via requests
cffi==1.17.1
@@ -58,7 +51,7 @@ charset-normalizer==3.4.0
# via requests
click==8.1.7
# via
# apache-superset
# apache-superset (pyproject.toml)
# celery
# click-didyoumean
# click-option-group
@@ -69,22 +62,22 @@ click==8.1.7
click-didyoumean==0.3.1
# via celery
click-option-group==0.5.6
# via apache-superset
# via apache-superset (pyproject.toml)
click-plugins==1.1.1
# via celery
click-repl==0.3.0
# via celery
colorama==0.4.6
# via
# apache-superset
# apache-superset (pyproject.toml)
# flask-appbuilder
cron-descriptor==1.4.5
# via apache-superset
# via apache-superset (pyproject.toml)
croniter==5.0.1
# via apache-superset
# via apache-superset (pyproject.toml)
cryptography==43.0.3
# via
# apache-superset
# apache-superset (pyproject.toml)
# paramiko
# pyopenssl
defusedxml==0.7.1
@@ -92,7 +85,7 @@ defusedxml==0.7.1
deprecated==1.2.15
# via limits
deprecation==2.1.0
# via apache-superset
# via apache-superset (pyproject.toml)
dnspython==2.7.0
# via email-validator
email-validator==2.2.0
@@ -103,7 +96,7 @@ exceptiongroup==1.2.2
# via cattrs
flask==2.3.3
# via
# apache-superset
# apache-superset (pyproject.toml)
# flask-appbuilder
# flask-babel
# flask-caching
@@ -116,66 +109,66 @@ flask==2.3.3
# flask-sqlalchemy
# flask-wtf
flask-appbuilder==4.5.2
# via apache-superset
# via apache-superset (pyproject.toml)
flask-babel==2.0.0
# via flask-appbuilder
flask-caching==2.3.0
# via apache-superset
# via apache-superset (pyproject.toml)
flask-compress==1.17
# via apache-superset
# via apache-superset (pyproject.toml)
flask-jwt-extended==4.7.1
# via flask-appbuilder
flask-limiter==3.8.0
# via flask-appbuilder
flask-login==0.6.3
# via
# apache-superset
# apache-superset (pyproject.toml)
# flask-appbuilder
flask-migrate==3.1.0
# via apache-superset
# via apache-superset (pyproject.toml)
flask-session==0.8.0
# via apache-superset
# via apache-superset (pyproject.toml)
flask-sqlalchemy==2.5.1
# via
# flask-appbuilder
# flask-migrate
flask-talisman==1.1.0
# via apache-superset
# via apache-superset (pyproject.toml)
flask-wtf==1.2.2
# via
# apache-superset
# apache-superset (pyproject.toml)
# flask-appbuilder
func-timeout==4.3.5
# via apache-superset
# via apache-superset (pyproject.toml)
geographiclib==2.0
# via geopy
geopy==2.4.1
# via apache-superset
# via apache-superset (pyproject.toml)
google-auth==2.36.0
# via shillelagh
greenlet==3.0.3
# via
# -r requirements/base.in
# apache-superset
# apache-superset (pyproject.toml)
# shillelagh
gunicorn==23.0.0
# via apache-superset
# via apache-superset (pyproject.toml)
hashids==1.3.1
# via apache-superset
# via apache-superset (pyproject.toml)
holidays==0.25
# via apache-superset
# via apache-superset (pyproject.toml)
humanize==4.11.0
# via apache-superset
# via apache-superset (pyproject.toml)
idna==3.10
# via
# email-validator
# requests
importlib-metadata==8.5.0
# via apache-superset
# via apache-superset (pyproject.toml)
importlib-resources==6.4.5
# via limits
isodate==0.7.2
# via apache-superset
# via apache-superset (pyproject.toml)
itsdangerous==2.2.0
# via
# flask
@@ -185,7 +178,7 @@ jinja2==3.1.4
# flask
# flask-babel
jsonpath-ng==1.7.0
# via apache-superset
# via apache-superset (pyproject.toml)
jsonschema==4.17.3
# via flask-appbuilder
kombu==5.4.2
@@ -194,14 +187,12 @@ korean-lunar-calendar==0.3.1
# via holidays
limits==3.13.0
# via flask-limiter
llvmlite==0.43.0
# via numba
mako==1.3.6
# via
# apache-superset (pyproject.toml)
# alembic
# apache-superset
markdown==3.7
# via apache-superset
# via apache-superset (pyproject.toml)
markdown-it-py==3.0.0
# via rich
markupsafe==3.0.2
@@ -219,22 +210,17 @@ marshmallow-sqlalchemy==0.28.2
mdurl==0.1.2
# via markdown-it-py
msgpack==1.0.8
# via apache-superset
# via apache-superset (pyproject.toml)
msgspec==0.18.6
# via flask-session
nh3==0.2.19
# via apache-superset
numba==0.60.0
# via pandas
# via apache-superset (pyproject.toml)
numexpr==2.10.2
# via
# -r requirements/base.in
# pandas
# via -r requirements/base.in
numpy==1.23.5
# via
# apache-superset
# apache-superset (pyproject.toml)
# bottleneck
# numba
# numexpr
# pandas
# pyarrow
@@ -246,7 +232,7 @@ ordered-set==4.1.0
# via flask-limiter
packaging==24.2
# via
# apache-superset
# apache-superset (pyproject.toml)
# apispec
# deprecation
# gunicorn
@@ -254,28 +240,28 @@ packaging==24.2
# marshmallow
# marshmallow-sqlalchemy
# shillelagh
pandas[excel,performance]==2.0.3
# via apache-superset
pandas==2.0.3
# via apache-superset (pyproject.toml)
paramiko==3.5.0
# via
# apache-superset
# apache-superset (pyproject.toml)
# sshtunnel
parsedatetime==2.6
# via apache-superset
# via apache-superset (pyproject.toml)
pgsanity==0.2.9
# via apache-superset
# via apache-superset (pyproject.toml)
platformdirs==3.8.1
# via requests-cache
ply==3.11
# via jsonpath-ng
polyline==2.0.2
# via apache-superset
# via apache-superset (pyproject.toml)
prison==0.2.1
# via flask-appbuilder
prompt-toolkit==3.0.48
# via click-repl
pyarrow==14.0.2
# via apache-superset
# via apache-superset (pyproject.toml)
pyasn1==0.6.1
# via
# pyasn1-modules
@@ -288,7 +274,7 @@ pygments==2.18.0
# via rich
pyjwt==2.10.1
# via
# apache-superset
# apache-superset (pyproject.toml)
# flask-appbuilder
# flask-jwt-extended
pynacl==1.5.0
@@ -296,12 +282,12 @@ pynacl==1.5.0
pyopenssl==24.2.1
# via shillelagh
pyparsing==3.2.0
# via apache-superset
# via apache-superset (pyproject.toml)
pyrsistent==0.20.0
# via jsonschema
python-dateutil==2.9.0.post0
# via
# apache-superset
# apache-superset (pyproject.toml)
# celery
# croniter
# flask-appbuilder
@@ -309,9 +295,9 @@ python-dateutil==2.9.0.post0
# pandas
# shillelagh
python-dotenv==1.0.1
# via apache-superset
# via apache-superset (pyproject.toml)
python-geohash==0.8.5
# via apache-superset
# via apache-superset (pyproject.toml)
pytz==2024.2
# via
# croniter
@@ -321,10 +307,10 @@ pyxlsb==1.0.10
# via pandas
pyyaml==6.0.2
# via
# apache-superset
# apache-superset (pyproject.toml)
# apispec
redis==4.6.0
# via apache-superset
# via apache-superset (pyproject.toml)
requests==2.32.2
# via
# requests-cache
@@ -336,13 +322,13 @@ rich==13.9.4
rsa==4.9
# via google-auth
selenium==3.141.0
# via apache-superset
shillelagh[gsheetsapi]==1.2.18
# via apache-superset
# via apache-superset (pyproject.toml)
shillelagh==1.2.18
# via apache-superset (pyproject.toml)
shortid==0.1.2
# via apache-superset
# via apache-superset (pyproject.toml)
simplejson==3.19.3
# via apache-superset
# via apache-superset (pyproject.toml)
six==1.16.0
# via
# prison
@@ -350,11 +336,11 @@ six==1.16.0
# url-normalize
# wtforms-json
slack-sdk==3.33.4
# via apache-superset
# via apache-superset (pyproject.toml)
sqlalchemy==1.4.54
# via
# apache-superset (pyproject.toml)
# alembic
# apache-superset
# flask-appbuilder
# flask-sqlalchemy
# marshmallow-sqlalchemy
@@ -362,20 +348,20 @@ sqlalchemy==1.4.54
# sqlalchemy-utils
sqlalchemy-utils==0.38.3
# via
# apache-superset
# apache-superset (pyproject.toml)
# flask-appbuilder
sqlglot==25.24.5
# via apache-superset
# via apache-superset (pyproject.toml)
sqlparse==0.5.2
# via apache-superset
# via apache-superset (pyproject.toml)
sshtunnel==0.4.0
# via apache-superset
# via apache-superset (pyproject.toml)
tabulate==0.8.10
# via apache-superset
# via apache-superset (pyproject.toml)
typing-extensions==4.12.2
# via
# apache-superset (pyproject.toml)
# alembic
# apache-superset
# cattrs
# flask-limiter
# limits
@@ -412,17 +398,17 @@ wrapt==1.17.0
# via deprecated
wtforms==3.2.1
# via
# apache-superset
# apache-superset (pyproject.toml)
# flask-appbuilder
# flask-wtf
# wtforms-json
wtforms-json==0.3.5
# via apache-superset
# via apache-superset (pyproject.toml)
xlrd==2.0.1
# via pandas
xlsxwriter==3.0.9
# via
# apache-superset
# apache-superset (pyproject.toml)
# pandas
zipp==3.21.0
# via importlib-metadata

View File

@@ -16,5 +16,4 @@
# specific language governing permissions and limitations
# under the License.
#
-r base.in
-e .[development,bigquery,cors,druid,gevent,gsheets,mysql,playwright,postgres,presto,prophet,trino,thumbnails]
-e .[development,bigquery,cors,druid,gevent,gsheets,mysql,postgres,presto,prophet,trino,thumbnails]

View File

@@ -1,58 +1,294 @@
# SHA1:dc767a7288b56c785b0cd3c38e95e7b5e66be1ac
#
# This file is autogenerated by pip-compile-multi
# To update, run:
#
# pip-compile-multi
#
-r base.txt
-e file:.
# This file was autogenerated by uv via the following command:
# uv pip compile requirements/development.in -c requirements/base.txt -o requirements/development.txt
-e .
# via -r requirements/development.in
alembic==1.14.0
# via
# -r requirements/base.in
# -r requirements/development.in
astroid==3.1.0
# via pylint
build==1.2.1
# via pip-tools
# -c requirements/base.txt
# flask-migrate
amqp==5.3.1
# via
# -c requirements/base.txt
# kombu
apispec==6.3.0
# via
# -c requirements/base.txt
# flask-appbuilder
apsw==3.46.0.0
# via
# -c requirements/base.txt
# shillelagh
async-timeout==4.0.3
# via
# -c requirements/base.txt
# redis
attrs==24.2.0
# via
# -c requirements/base.txt
# cattrs
# jsonschema
# requests-cache
babel==2.16.0
# via
# -c requirements/base.txt
# flask-babel
backoff==2.2.1
# via
# -c requirements/base.txt
# apache-superset
bcrypt==4.2.1
# via
# -c requirements/base.txt
# paramiko
billiard==4.2.1
# via
# -c requirements/base.txt
# celery
blinker==1.9.0
# via
# -c requirements/base.txt
# flask
bottleneck==1.4.2
# via
# -c requirements/base.txt
# apache-superset
brotli==1.1.0
# via
# -c requirements/base.txt
# flask-compress
cachelib==0.9.0
# via
# -c requirements/base.txt
# flask-caching
# flask-session
cachetools==5.5.0
# via
# -c requirements/base.txt
# google-auth
cattrs==24.1.2
# via
# -c requirements/base.txt
# requests-cache
celery==5.4.0
# via
# -c requirements/base.txt
# apache-superset
certifi==2024.8.30
# via
# -c requirements/base.txt
# requests
cffi==1.17.1
# via
# -c requirements/base.txt
# cryptography
# pynacl
cfgv==3.4.0
# via pre-commit
charset-normalizer==3.4.0
# via
# -c requirements/base.txt
# requests
click==8.1.7
# via
# -c requirements/base.txt
# apache-superset
# celery
# click-didyoumean
# click-option-group
# click-plugins
# click-repl
# flask
# flask-appbuilder
click-didyoumean==0.3.1
# via
# -c requirements/base.txt
# celery
click-option-group==0.5.6
# via
# -c requirements/base.txt
# apache-superset
click-plugins==1.1.1
# via
# -c requirements/base.txt
# celery
click-repl==0.3.0
# via
# -c requirements/base.txt
# celery
cmdstanpy==1.1.0
# via prophet
colorama==0.4.6
# via
# -c requirements/base.txt
# apache-superset
# flask-appbuilder
contourpy==1.0.7
# via matplotlib
coverage[toml]==7.6.8
coverage==7.6.8
# via pytest-cov
cron-descriptor==1.4.5
# via
# -c requirements/base.txt
# apache-superset
croniter==5.0.1
# via
# -c requirements/base.txt
# apache-superset
cryptography==43.0.3
# via
# -c requirements/base.txt
# apache-superset
# paramiko
# pyopenssl
cycler==0.12.1
# via matplotlib
db-dtypes==1.3.1
# via pandas-gbq
dill==0.3.9
# via pylint
defusedxml==0.7.1
# via
# -c requirements/base.txt
# odfpy
deprecated==1.2.15
# via
# -c requirements/base.txt
# limits
deprecation==2.1.0
# via
# -c requirements/base.txt
# apache-superset
distlib==0.3.8
# via virtualenv
dnspython==2.7.0
# via
# -c requirements/base.txt
# email-validator
docker==7.0.0
# via apache-superset
email-validator==2.2.0
# via
# -c requirements/base.txt
# flask-appbuilder
et-xmlfile==2.0.0
# via
# -c requirements/base.txt
# openpyxl
exceptiongroup==1.2.2
# via
# -c requirements/base.txt
# cattrs
# pytest
filelock==3.12.2
# via virtualenv
flask==2.3.3
# via
# -c requirements/base.txt
# apache-superset
# flask-appbuilder
# flask-babel
# flask-caching
# flask-compress
# flask-cors
# flask-jwt-extended
# flask-limiter
# flask-login
# flask-migrate
# flask-session
# flask-sqlalchemy
# flask-testing
# flask-wtf
flask-appbuilder==4.5.2
# via
# -c requirements/base.txt
# apache-superset
flask-babel==2.0.0
# via
# -c requirements/base.txt
# flask-appbuilder
flask-caching==2.3.0
# via
# -c requirements/base.txt
# apache-superset
flask-compress==1.17
# via
# -c requirements/base.txt
# apache-superset
flask-cors==4.0.0
# via apache-superset
flask-jwt-extended==4.7.1
# via
# -c requirements/base.txt
# flask-appbuilder
flask-limiter==3.8.0
# via
# -c requirements/base.txt
# flask-appbuilder
flask-login==0.6.3
# via
# -c requirements/base.txt
# apache-superset
# flask-appbuilder
flask-migrate==3.1.0
# via
# -c requirements/base.txt
# apache-superset
flask-session==0.8.0
# via
# -c requirements/base.txt
# apache-superset
flask-sqlalchemy==2.5.1
# via
# -c requirements/base.txt
# flask-appbuilder
# flask-migrate
flask-talisman==1.1.0
# via
# -c requirements/base.txt
# apache-superset
flask-testing==0.8.1
# via apache-superset
flask-wtf==1.2.2
# via
# -c requirements/base.txt
# apache-superset
# flask-appbuilder
fonttools==4.55.0
# via matplotlib
freezegun==1.5.1
# via apache-superset
func-timeout==4.3.5
# via
# -c requirements/base.txt
# apache-superset
future==1.0.0
# via pyhive
geographiclib==2.0
# via
# -c requirements/base.txt
# geopy
geopy==2.4.1
# via
# -c requirements/base.txt
# apache-superset
gevent==24.2.1
# via apache-superset
google-api-core[grpc]==2.23.0
google-api-core==2.23.0
# via
# google-cloud-bigquery
# google-cloud-bigquery-storage
# google-cloud-core
# pandas-gbq
# sqlalchemy-bigquery
google-auth==2.36.0
# via
# -c requirements/base.txt
# google-api-core
# google-auth-oauthlib
# google-cloud-bigquery
# google-cloud-core
# pandas-gbq
# pydata-google-auth
# shillelagh
# sqlalchemy-bigquery
google-auth-oauthlib==1.2.1
# via
# pandas-gbq
@@ -74,6 +310,12 @@ googleapis-common-protos==1.66.0
# via
# google-api-core
# grpcio-status
greenlet==3.0.3
# via
# -c requirements/base.txt
# apache-superset
# gevent
# shillelagh
grpcio==1.68.0
# via
# apache-superset
@@ -81,54 +323,241 @@ grpcio==1.68.0
# grpcio-status
grpcio-status==1.60.1
# via google-api-core
gunicorn==23.0.0
# via
# -c requirements/base.txt
# apache-superset
hashids==1.3.1
# via
# -c requirements/base.txt
# apache-superset
holidays==0.25
# via
# -c requirements/base.txt
# apache-superset
# prophet
humanize==4.11.0
# via
# -c requirements/base.txt
# apache-superset
identify==2.5.36
# via pre-commit
idna==3.10
# via
# -c requirements/base.txt
# email-validator
# requests
importlib-metadata==8.5.0
# via
# -c requirements/base.txt
# apache-superset
importlib-resources==6.4.5
# via
# -c requirements/base.txt
# limits
# prophet
iniconfig==2.0.0
# via pytest
isort==5.12.0
# via pylint
isodate==0.7.2
# via
# -c requirements/base.txt
# apache-superset
itsdangerous==2.2.0
# via
# -c requirements/base.txt
# flask
# flask-wtf
jinja2==3.1.4
# via
# -c requirements/base.txt
# flask
# flask-babel
jsonpath-ng==1.7.0
# via
# -c requirements/base.txt
# apache-superset
jsonschema==4.17.3
# via
# -c requirements/base.txt
# flask-appbuilder
# jsonschema-spec
# openapi-schema-validator
# openapi-spec-validator
jsonschema-spec==0.1.6
# via openapi-spec-validator
kiwisolver==1.4.7
# via matplotlib
kombu==5.4.2
# via
# -c requirements/base.txt
# celery
korean-lunar-calendar==0.3.1
# via
# -c requirements/base.txt
# holidays
lazy-object-proxy==1.10.0
# via openapi-spec-validator
limits==3.13.0
# via
# -c requirements/base.txt
# flask-limiter
mako==1.3.6
# via
# -c requirements/base.txt
# alembic
# apache-superset
markdown==3.7
# via
# -c requirements/base.txt
# apache-superset
markdown-it-py==3.0.0
# via
# -c requirements/base.txt
# rich
markupsafe==3.0.2
# via
# -c requirements/base.txt
# jinja2
# mako
# werkzeug
# wtforms
marshmallow==3.23.1
# via
# -c requirements/base.txt
# flask-appbuilder
# marshmallow-sqlalchemy
marshmallow-sqlalchemy==0.28.2
# via
# -c requirements/base.txt
# flask-appbuilder
matplotlib==3.9.0
# via prophet
mccabe==0.7.0
# via pylint
mdurl==0.1.2
# via
# -c requirements/base.txt
# markdown-it-py
msgpack==1.0.8
# via
# -c requirements/base.txt
# apache-superset
msgspec==0.18.6
# via
# -c requirements/base.txt
# flask-session
mysqlclient==2.2.6
# via apache-superset
nh3==0.2.19
# via
# -c requirements/base.txt
# apache-superset
nodeenv==1.8.0
# via pre-commit
numpy==1.23.5
# via
# -c requirements/base.txt
# apache-superset
# bottleneck
# cmdstanpy
# contourpy
# db-dtypes
# matplotlib
# pandas
# pandas-gbq
# prophet
# pyarrow
oauthlib==3.2.2
# via requests-oauthlib
odfpy==1.4.1
# via
# -c requirements/base.txt
# pandas
openapi-schema-validator==0.4.4
# via openapi-spec-validator
openapi-spec-validator==0.5.6
# via apache-superset
openpyxl==3.1.5
# via
# -c requirements/base.txt
# pandas
ordered-set==4.1.0
# via
# -c requirements/base.txt
# flask-limiter
packaging==24.2
# via
# -c requirements/base.txt
# apache-superset
# apispec
# db-dtypes
# deprecation
# docker
# google-cloud-bigquery
# gunicorn
# limits
# marshmallow
# marshmallow-sqlalchemy
# matplotlib
# pytest
# shillelagh
# sqlalchemy-bigquery
pandas==2.0.3
# via
# -c requirements/base.txt
# apache-superset
# cmdstanpy
# db-dtypes
# pandas-gbq
# prophet
pandas-gbq==0.19.1
# via apache-superset
parameterized==0.9.0
# via apache-superset
paramiko==3.5.0
# via
# -c requirements/base.txt
# apache-superset
# sshtunnel
parsedatetime==2.6
# via
# -c requirements/base.txt
# apache-superset
pathable==0.4.3
# via jsonschema-spec
pgsanity==0.2.9
# via
# -c requirements/base.txt
# apache-superset
pillow==10.3.0
# via
# apache-superset
# matplotlib
pip-compile-multi==2.6.3
# via apache-superset
pip-tools==7.4.1
# via pip-compile-multi
playwright==1.42.0
# via apache-superset
platformdirs==3.8.1
# via
# -c requirements/base.txt
# requests-cache
# virtualenv
pluggy==1.5.0
# via pytest
ply==3.11
# via
# -c requirements/base.txt
# jsonpath-ng
polyline==2.0.2
# via
# -c requirements/base.txt
# apache-superset
pre-commit==4.0.1
# via apache-superset
prison==0.2.1
# via
# -c requirements/base.txt
# flask-appbuilder
progress==1.6
# via apache-superset
prompt-toolkit==3.0.48
# via
# -c requirements/base.txt
# click-repl
prophet==1.1.5
# via apache-superset
proto-plus==1.25.0
@@ -146,24 +575,64 @@ psutil==6.1.0
# via apache-superset
psycopg2-binary==2.9.6
# via apache-superset
pyarrow==14.0.2
# via
# -c requirements/base.txt
# apache-superset
# db-dtypes
# pandas-gbq
pyasn1==0.6.1
# via
# -c requirements/base.txt
# pyasn1-modules
# python-ldap
# rsa
pyasn1-modules==0.4.1
# via
# -c requirements/base.txt
# google-auth
# python-ldap
pycparser==2.22
# via
# -c requirements/base.txt
# cffi
pydata-google-auth==1.9.0
# via pandas-gbq
pydruid==0.6.9
# via apache-superset
pyee==11.0.1
# via playwright
pyfakefs==5.3.5
# via apache-superset
pyhive[presto]==0.7.0
pygments==2.18.0
# via
# -c requirements/base.txt
# rich
pyhive==0.7.0
# via apache-superset
pyinstrument==4.4.0
# via apache-superset
pylint==3.1.0
# via apache-superset
pyproject-hooks==1.2.0
pyjwt==2.10.1
# via
# build
# pip-tools
# -c requirements/base.txt
# apache-superset
# flask-appbuilder
# flask-jwt-extended
pynacl==1.5.0
# via
# -c requirements/base.txt
# paramiko
pyopenssl==24.2.1
# via
# -c requirements/base.txt
# shillelagh
pyparsing==3.2.0
# via
# -c requirements/base.txt
# apache-superset
# matplotlib
pyrsistent==0.20.0
# via
# -c requirements/base.txt
# jsonschema
pytest==7.4.4
# via
# apache-superset
@@ -173,48 +642,248 @@ pytest-cov==6.0.0
# via apache-superset
pytest-mock==3.10.0
# via apache-superset
python-dateutil==2.9.0.post0
# via
# -c requirements/base.txt
# apache-superset
# celery
# croniter
# flask-appbuilder
# freezegun
# google-cloud-bigquery
# holidays
# matplotlib
# pandas
# pyhive
# shillelagh
# trino
python-dotenv==1.0.1
# via
# -c requirements/base.txt
# apache-superset
python-geohash==0.8.5
# via
# -c requirements/base.txt
# apache-superset
python-ldap==3.4.4
# via apache-superset
pytz==2024.2
# via
# -c requirements/base.txt
# croniter
# flask-babel
# pandas
# trino
pyxlsb==1.0.10
# via
# -c requirements/base.txt
# pandas
pyyaml==6.0.2
# via
# -c requirements/base.txt
# apache-superset
# apispec
# jsonschema-spec
# pre-commit
redis==4.6.0
# via
# -c requirements/base.txt
# apache-superset
requests==2.32.2
# via
# -c requirements/base.txt
# docker
# google-api-core
# google-cloud-bigquery
# jsonschema-spec
# pydruid
# pyhive
# requests-cache
# requests-oauthlib
# shillelagh
# trino
requests-cache==1.2.0
# via
# -c requirements/base.txt
# shillelagh
requests-oauthlib==2.0.0
# via google-auth-oauthlib
rfc3339-validator==0.1.4
# via openapi-schema-validator
rich==13.9.4
# via
# -c requirements/base.txt
# flask-limiter
rsa==4.9
# via
# -c requirements/base.txt
# google-auth
ruff==0.8.0
# via apache-superset
selenium==3.141.0
# via
# -c requirements/base.txt
# apache-superset
setuptools==75.6.0
# via
# nodeenv
# pandas-gbq
# pydata-google-auth
# zope-event
# zope-interface
shillelagh==1.2.18
# via
# -c requirements/base.txt
# apache-superset
shortid==0.1.2
# via
# -c requirements/base.txt
# apache-superset
simplejson==3.19.3
# via
# -c requirements/base.txt
# apache-superset
six==1.16.0
# via
# -c requirements/base.txt
# prison
# python-dateutil
# rfc3339-validator
# url-normalize
# wtforms-json
slack-sdk==3.33.4
# via
# -c requirements/base.txt
# apache-superset
sqlalchemy==1.4.54
# via
# -c requirements/base.txt
# alembic
# apache-superset
# flask-appbuilder
# flask-sqlalchemy
# marshmallow-sqlalchemy
# shillelagh
# sqlalchemy-bigquery
# sqlalchemy-utils
sqlalchemy-bigquery==1.12.0
# via apache-superset
sqlalchemy-utils==0.38.3
# via
# -c requirements/base.txt
# apache-superset
# flask-appbuilder
sqlglot==25.24.5
# via
# -c requirements/base.txt
# apache-superset
sqloxide==0.1.51
# via apache-superset
sqlparse==0.5.2
# via
# -c requirements/base.txt
# apache-superset
sshtunnel==0.4.0
# via
# -c requirements/base.txt
# apache-superset
statsd==4.0.1
# via apache-superset
tomli==2.1.0
tabulate==0.8.10
# via
# -c requirements/base.txt
# apache-superset
tomli==2.2.1
# via
# build
# coverage
# pip-tools
# pylint
# pytest
tomlkit==0.13.2
# via pylint
toposort==1.10
# via pip-compile-multi
tqdm==4.67.1
# via
# cmdstanpy
# prophet
trino==0.330.0
# via apache-superset
typing-extensions==4.12.2
# via
# -c requirements/base.txt
# alembic
# apache-superset
# cattrs
# flask-limiter
# limits
# rich
# shillelagh
tzdata==2024.2
# via
# -c requirements/base.txt
# celery
# kombu
# pandas
tzlocal==5.2
# via trino
url-normalize==1.4.3
# via
# -c requirements/base.txt
# requests-cache
urllib3==1.26.18
# via
# -c requirements/base.txt
# docker
# requests
# requests-cache
# selenium
vine==5.1.0
# via
# -c requirements/base.txt
# amqp
# celery
# kombu
virtualenv==20.23.1
# via pre-commit
wheel==0.45.1
# via pip-tools
wcwidth==0.2.13
# via
# -c requirements/base.txt
# prompt-toolkit
werkzeug==3.1.3
# via
# -c requirements/base.txt
# flask
# flask-appbuilder
# flask-jwt-extended
# flask-login
wrapt==1.17.0
# via
# -c requirements/base.txt
# deprecated
wtforms==3.2.1
# via
# -c requirements/base.txt
# apache-superset
# flask-appbuilder
# flask-wtf
# wtforms-json
wtforms-json==0.3.5
# via
# -c requirements/base.txt
# apache-superset
xlrd==2.0.1
# via
# -c requirements/base.txt
# pandas
xlsxwriter==3.0.9
# via
# -c requirements/base.txt
# apache-superset
# pandas
zipp==3.21.0
# via
# -c requirements/base.txt
# importlib-metadata
zope-event==5.0
# via gevent
zope-interface==5.4.0
# via gevent
# The following packages are considered to be unsafe in a requirements file:
# pip
# setuptools
zstandard==0.23.0
# via
# -c requirements/base.txt
# flask-compress

View File

@@ -0,0 +1 @@
babel

View File

@@ -0,0 +1,4 @@
# This file was autogenerated by uv via the following command:
# uv pip compile requirements/translations.in -o requirements/translations.txt
babel==2.16.0
# via -r requirements/translations.in

View File

@@ -70,7 +70,7 @@ def extract_modified_tables(module: ModuleType) -> set[str]:
return tables
def find_models(module: ModuleType) -> list[type[Model]]:
def find_models(module: ModuleType) -> list[type[Model]]: # noqa: C901
"""
Find all models in a migration script.
"""
@@ -94,7 +94,7 @@ def find_models(module: ModuleType) -> list[type[Model]]:
# downgrade
sqlalchemy_uri = current_app.config["SQLALCHEMY_DATABASE_URI"]
engine = create_engine(sqlalchemy_uri)
Base = automap_base()
Base = automap_base() # noqa: N806
Base.prepare(engine, reflect=True)
seen = set()
while tables:
@@ -138,7 +138,7 @@ def find_models(module: ModuleType) -> list[type[Model]]:
@click.option("--limit", default=1000, help="Maximum number of entities.")
@click.option("--force", is_flag=True, help="Do not prompt for confirmation.")
@click.option("--no-auto-cleanup", is_flag=True, help="Do not remove created models.")
def main(
def main( # noqa: C901
filepath: str, limit: int = 1000, force: bool = False, no_auto_cleanup: bool = False
) -> None:
auto_cleanup = not no_auto_cleanup

View File

@@ -49,7 +49,7 @@ github_repo = os.environ.get("GITHUB_REPOSITORY", "apache/superset")
def request(
method: Literal["GET", "POST", "DELETE", "PUT"], endpoint: str, **kwargs: Any
) -> dict[str, Any]:
resp = requests.request(
resp = requests.request( # noqa: S113
method,
f"https://api.github.com/{endpoint.lstrip('/')}",
headers={"Authorization": f"Bearer {github_token}"},
@@ -152,7 +152,7 @@ Date: {date_str}
help="Whether to also cancel running workflows.",
)
@click.argument("branch_or_pull", required=False)
def cancel_github_workflows(
def cancel_github_workflows( # noqa: C901
branch_or_pull: Optional[str],
repo: str,
event: list[str],

View File

@@ -51,12 +51,12 @@ GITHUB_TOKEN = os.environ.get("GITHUB_TOKEN")
def fetch_files_github_api(url: str): # type: ignore
"""Fetches data using GitHub API."""
req = Request(url)
req = Request(url) # noqa: S310
req.add_header("Authorization", f"Bearer {GITHUB_TOKEN}")
req.add_header("Accept", "application/vnd.github.v3+json")
print(f"Fetching from {url}")
with urlopen(req) as response:
with urlopen(req) as response: # noqa: S310
body = response.read()
return json.loads(body)
@@ -130,7 +130,7 @@ def main(event_type: str, sha: str, repo: str) -> None:
)
# Output results
output_path = os.getenv("GITHUB_OUTPUT") or "/tmp/GITHUB_OUTPUT.txt"
output_path = os.getenv("GITHUB_OUTPUT") or "/tmp/GITHUB_OUTPUT.txt" # noqa: S108
with open(output_path, "a") as f:
for check, changed in changes_detected.items():
if changed:
@@ -139,8 +139,8 @@ def main(event_type: str, sha: str, repo: str) -> None:
def get_git_sha() -> str:
return os.getenv("GITHUB_SHA") or subprocess.check_output(
["git", "rev-parse", "HEAD"]
return os.getenv("GITHUB_SHA") or subprocess.check_output( # noqa: S603
["git", "rev-parse", "HEAD"] # noqa: S607
).strip().decode("utf-8")

View File

@@ -47,7 +47,7 @@ class Requirement:
def get_version(self) -> Optional[str]:
try:
version = subprocess.check_output(self.command, shell=True).decode().strip()
version = subprocess.check_output(self.command, shell=True).decode().strip() # noqa: S602
if self.version_post_process:
version = self.version_post_process(version)
return version.split()[-1]
@@ -76,7 +76,7 @@ class Requirement:
def format_result(self) -> str:
ideal_range_str = f"{self.ideal_range[0]} - {self.ideal_range[1]}"
supported_range_str = f"{self.supported_range[0]} - {self.supported_range[1]}"
return f"{self.status.split()[0]} {self.name:<25} {self.version or 'N/A':<25} {ideal_range_str:<25} {supported_range_str:<25}"
return f"{self.status.split()[0]} {self.name:<25} {self.version or 'N/A':<25} {ideal_range_str:<25} {supported_range_str:<25}" # noqa: E501
def check_memory(min_gb: int) -> str:
@@ -101,8 +101,9 @@ def get_cpu_info() -> str:
def get_docker_platform() -> str:
try:
output = (
subprocess.check_output(
"docker info --format '{{.OperatingSystem}}'", shell=True
subprocess.check_output( # noqa: S602
"docker info --format '{{.OperatingSystem}}'", # noqa: S607
shell=True, # noqa: S607
)
.decode()
.strip()
@@ -117,7 +118,7 @@ def get_docker_platform() -> str:
@click.command(
help="""
This script checks the local environment for various software versions and other requirements, providing feedback on whether they are ideal, supported, or unsupported.
"""
""" # noqa: E501
)
@click.option(
"--docker", is_flag=True, help="Check Docker and Docker Compose requirements"
@@ -128,7 +129,7 @@ This script checks the local environment for various software versions and other
help="Check frontend requirements (npm, Node.js, memory)",
)
@click.option("--backend", is_flag=True, help="Check backend requirements (Python)")
def main(docker: bool, frontend: bool, backend: bool) -> None:
def main(docker: bool, frontend: bool, backend: bool) -> None: # noqa: C901
requirements = [
Requirement(
"python",

View File

@@ -74,7 +74,7 @@ def run_cypress_for_test_file(
print(f"DRY RUN: {cmd}")
return 0
process = subprocess.Popen(
process = subprocess.Popen( # noqa: S602
cmd,
shell=True,
stdout=subprocess.PIPE,

View File

@@ -171,7 +171,7 @@ def generate_erd(file_path: str) -> None:
"""
data = introspect_models()
templates_path = os.path.dirname(__file__)
env = jinja2.Environment(loader=jinja2.FileSystemLoader(templates_path))
env = jinja2.Environment(loader=jinja2.FileSystemLoader(templates_path)) # noqa: S701
# Load the template
template = env.get_template("erd.template.puml")

30
scripts/uv-pip-compile.sh Executable file
View File

@@ -0,0 +1,30 @@
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -e
ADDITIONAL_ARGS="$@"
# Generate the requirements/base.txt file
uv pip compile pyproject.toml requirements/base.in -o requirements/base.txt $ADDITIONAL_ARGS
# Generate the requirements/development.txt file, making sure requirements/base.txt is a constraint to keep the versions in sync
uv pip compile requirements/development.in -c requirements/base.txt -o requirements/development.txt $ADDITIONAL_ARGS
uv pip compile requirements/translations.in -o requirements/translations.txt $ADDITIONAL_ARGS

View File

@@ -30,7 +30,7 @@ with open(PACKAGE_JSON) as package_file:
def get_git_sha() -> str:
try:
output = subprocess.check_output(["git", "rev-parse", "HEAD"])
output = subprocess.check_output(["git", "rev-parse", "HEAD"]) # noqa: S603, S607
return output.decode().strip()
except Exception: # pylint: disable=broad-except
return ""
@@ -58,7 +58,7 @@ setup(
zip_safe=False,
entry_points={
"console_scripts": ["superset=superset.cli.main:superset"],
# the `postgres` and `postgres+psycopg2://` schemes were removed in SQLAlchemy 1.4
# the `postgres` and `postgres+psycopg2://` schemes were removed in SQLAlchemy 1.4 # noqa: E501
# add an alias here to prevent breaking existing databases
"sqlalchemy.dialects": [
"postgres.psycopg2 = sqlalchemy.dialects.postgresql:dialect",

View File

@@ -57,7 +57,7 @@ const drillBy = (targetDrillByColumn: string, isLegacy = false) => {
cy.get('.ant-dropdown:not(.ant-dropdown-hidden)')
.first()
.find("[role='menu'] [role='menuitem'] [title='Drill by']")
.trigger('mouseover');
.trigger('mouseover', { force: true });
cy.get(
'.ant-dropdown-menu-submenu:not(.ant-dropdown-menu-hidden) [data-test="drill-by-submenu"]',
)

View File

@@ -51,7 +51,7 @@ function openProperties() {
cy.getBySel('header-actions-menu')
.contains('Edit properties')
.click({ force: true });
cy.get('.ant-modal-body').should('be.visible');
cy.get('.antd5-modal-body').should('be.visible');
});
}
@@ -60,7 +60,7 @@ function openExploreProperties() {
cy.get('.ant-dropdown-menu')
.contains('Edit chart properties')
.click({ force: true });
cy.get('.ant-modal-body').should('be.visible');
cy.get('.antd5-modal-body').should('be.visible');
}
function assertMetadata(text: string) {
@@ -77,7 +77,7 @@ function assertMetadata(text: string) {
}
function openAdvancedProperties() {
cy.get('.ant-modal-body')
cy.get('.antd5-modal-body')
.contains('Advanced')
.should('be.visible')
.click({ force: true });
@@ -1093,14 +1093,14 @@ describe('Dashboard edit', () => {
applyChanges();
});
it('should not accept an invalid color scheme', () => {
it.skip('should not accept an invalid color scheme', () => {
openAdvancedProperties();
clearMetadata();
// allow console error
cy.allowConsoleErrors(['Error: A valid color scheme is required']);
writeMetadata('{"color_scheme":"wrongcolorscheme"}');
applyChanges();
cy.get('.ant-modal-body')
cy.get('.antd5-modal-body')
.contains('A valid color scheme is required')
.should('be.visible');
});

View File

@@ -56,7 +56,7 @@ describe('Datasource control', () => {
cy.focused().type(`${newMetricName}{enter}`);
cy.get('[data-test="datasource-modal-save"]').click();
cy.get('.ant-modal-confirm-btns button').contains('OK').click();
cy.get('.antd5-modal-confirm-btns button').contains('OK').click();
// select new metric
cy.get('[data-test=metrics]')
.contains('Drop columns/metrics here or click')
@@ -68,7 +68,7 @@ describe('Datasource control', () => {
// delete metric
cy.get('[data-test="datasource-menu-trigger"]').click();
cy.get('[data-test="edit-dataset"]').click();
cy.get('.ant-modal-content').within(() => {
cy.get('.antd5-modal-content').within(() => {
cy.get('[data-test="collection-tab-Metrics"]')
.contains('Metrics')
.click();
@@ -78,7 +78,7 @@ describe('Datasource control', () => {
.find('[data-test="crud-delete-icon"]')
.click();
cy.get('[data-test="datasource-modal-save"]').click();
cy.get('.ant-modal-confirm-btns button').contains('OK').click();
cy.get('.antd5-modal-confirm-btns button').contains('OK').click();
cy.get('[data-test="metrics"]').contains(newMetricName).should('not.exist');
});
});
@@ -121,7 +121,7 @@ describe('VizType control', () => {
cy.contains('View all charts').click();
cy.get('.ant-modal-content').within(() => {
cy.get('.antd5-modal-content').within(() => {
cy.get('button').contains('KPI').click(); // change categories
cy.get('[role="button"]').contains('Big Number').click();
cy.get('button').contains('Select').click();

View File

@@ -42,8 +42,8 @@ describe('Test explore links', () => {
cy.wait('@chartData').then(() => {
cy.get('code');
});
cy.get('.ant-modal-content').within(() => {
cy.get('button.ant-modal-close').first().click({ force: true });
cy.get('.antd5-modal-content').within(() => {
cy.get('button.antd5-modal-close').first().click({ force: true });
});
});

View File

@@ -97,8 +97,8 @@ export const databasesPage = {
infoAlert: '.antd5-alert',
serviceAccountInput: '[name="credentials_info"]',
connectionStep: {
modal: '.ant-modal-content',
modalBody: '.ant-modal-body',
modal: '.antd5-modal-content',
modalBody: '.antd5-modal-body',
stepTitle: '.css-7x6kk > h4',
helperBottom: '.helper-bottom',
postgresDatabase: '[name="database"]',
@@ -150,7 +150,7 @@ export const sqlLabView = {
sqlEditor: '#brace-editor textarea',
saveAsButton: '.SaveQuery > .ant-btn',
saveAsModal: {
footer: '.ant-modal-footer',
footer: '.antd5-modal-footer',
queryNameInput: 'input[class^="ant-input"]',
},
sqlToolbar: {
@@ -199,12 +199,12 @@ export const annotationLayersView = {
},
modal: {
content: {
content: '.ant-modal-body',
title: '.ant-modal-body > :nth-child(2) > input',
content: '.antd5-modal-body',
title: '.antd5-modal-body > :nth-child(2) > input',
description: "[name='descr']",
},
footer: {
footer: '.ant-modal-footer',
footer: '.antd5-modal-footer',
addButton: dataTestLocator('modal-confirm-button'),
cancelButton: dataTestLocator('modal-cancel-button'),
},
@@ -216,7 +216,7 @@ export const datasetsList = {
newDatasetModal: {
inputField: '[class="section"]',
addButton: dataTestLocator('modal-confirm-button'),
body: '.ant-modal-body',
body: '.antd5-modal-body',
},
table: {
tableRow: {
@@ -261,7 +261,7 @@ export const datasetsList = {
},
},
deleteDatasetModal: {
modal: '.ant-modal-content',
modal: '.antd5-modal-content',
deleteInput: dataTestLocator('delete-modal-input'),
deleteButton: dataTestLocator('modal-confirm-button'),
text: '.css-kxmt87',
@@ -318,8 +318,8 @@ export const chartListView = {
};
export const nativeFilters = {
modal: {
container: '.ant-modal',
footer: '.ant-modal-footer',
container: '.antd5-modal',
footer: '.antd5-modal-footer',
saveButton: dataTestLocator('native-filter-modal-save-button'),
cancelButton: dataTestLocator('native-filter-modal-cancel-button'),
confirmCancelButton: dataTestLocator(
@@ -476,15 +476,15 @@ export const exploreView = {
},
chartAreaItem: '.nv-legend-text',
viewQueryModal: {
container: '.ant-modal-content',
closeButton: 'button.ant-modal-close',
container: '.antd5-modal-content',
closeButton: 'button.antd5-modal-close',
},
embedCodeModal: {
container: dataTestLocator('embed-code-popover'),
textfield: dataTestLocator('embed-code-textarea'),
},
saveModal: {
modal: '.ant-modal-content',
modal: '.antd5-modal-content',
chartNameInput: dataTestLocator('new-chart-name'),
dashboardNameInput: '.ant-select-selection-search-input',
addToDashboardInput: dataTestLocator(
@@ -580,7 +580,7 @@ export const exploreView = {
},
},
editDatasetModal: {
container: '.ant-modal-content',
container: '.antd5-modal-content',
datasetTabsContainer: dataTestLocator('edit-dataset-tabs'),
saveButton: dataTestLocator('datasource-modal-save'),
metricsTab: {
@@ -588,7 +588,7 @@ export const exploreView = {
rowsContainer: dataTestLocator('table-content-rows'),
},
confirmModal: {
okButton: '.ant-modal-confirm-btns .ant-btn-primary',
okButton: '.antd5-modal-confirm-btns .ant-btn-primary',
},
},
visualizationTypeModal: {
@@ -619,12 +619,12 @@ export const dashboardView = {
closeButton: dataTestLocator('close-button'),
},
saveModal: {
modal: '.ant-modal-content',
modal: '.antd5-modal-content',
dashboardNameInput: '.ant-input',
saveButton: dataTestLocator('modal-save-dashboard-button'),
},
dashboardProperties: {
modal: '.ant-modal-content',
modal: '.antd5-modal-content',
dashboardTitleInput: dataTestLocator('dashboard-title-input'),
modalButton: '[type="button"]',
},

View File

@@ -221,6 +221,7 @@
"babel-plugin-dynamic-import-node": "^2.3.3",
"babel-plugin-jsx-remove-data-test-id": "^3.0.0",
"babel-plugin-lodash": "^3.3.4",
"compression-webpack-plugin": "^11.1.0",
"copy-webpack-plugin": "^12.0.2",
"cross-env": "^7.0.3",
"css-loader": "^6.8.1",
@@ -283,6 +284,7 @@
"speed-measure-webpack-plugin": "^1.5.0",
"storybook": "8.1.11",
"style-loader": "^4.0.0",
"terser-webpack-plugin": "^5.3.11",
"thread-loader": "^4.0.2",
"ts-loader": "^9.5.1",
"typescript": "^4.8.4",
@@ -19745,6 +19747,79 @@
"node": ">= 0.8.0"
}
},
"node_modules/compression-webpack-plugin": {
"version": "11.1.0",
"resolved": "https://registry.npmjs.org/compression-webpack-plugin/-/compression-webpack-plugin-11.1.0.tgz",
"integrity": "sha512-zDOQYp10+upzLxW+VRSjEpRRwBXJdsb5lBMlRxx1g8hckIFBpe3DTI0en2w7h+beuq89576RVzfiXrkdPGrHhA==",
"dev": true,
"dependencies": {
"schema-utils": "^4.2.0",
"serialize-javascript": "^6.0.2"
},
"engines": {
"node": ">= 18.12.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/webpack"
},
"peerDependencies": {
"webpack": "^5.1.0"
}
},
"node_modules/compression-webpack-plugin/node_modules/ajv": {
"version": "8.17.1",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz",
"integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==",
"dev": true,
"dependencies": {
"fast-deep-equal": "^3.1.3",
"fast-uri": "^3.0.1",
"json-schema-traverse": "^1.0.0",
"require-from-string": "^2.0.2"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/epoberezkin"
}
},
"node_modules/compression-webpack-plugin/node_modules/ajv-keywords": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz",
"integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==",
"dev": true,
"dependencies": {
"fast-deep-equal": "^3.1.3"
},
"peerDependencies": {
"ajv": "^8.8.2"
}
},
"node_modules/compression-webpack-plugin/node_modules/json-schema-traverse": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz",
"integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==",
"dev": true
},
"node_modules/compression-webpack-plugin/node_modules/schema-utils": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.3.0.tgz",
"integrity": "sha512-Gf9qqc58SpCA/xdziiHz35F4GNIWYWZrEshUc/G/r5BnLph6xpKuLeoJoQuj5WfBIx/eQLf+hmVPYHaxJu7V2g==",
"dev": true,
"dependencies": {
"@types/json-schema": "^7.0.9",
"ajv": "^8.9.0",
"ajv-formats": "^2.1.1",
"ajv-keywords": "^5.1.0"
},
"engines": {
"node": ">= 10.13.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/webpack"
}
},
"node_modules/compute-gcd": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/compute-gcd/-/compute-gcd-1.2.1.tgz",
@@ -51056,8 +51131,9 @@
}
},
"node_modules/terser": {
"version": "5.27.0",
"license": "BSD-2-Clause",
"version": "5.37.0",
"resolved": "https://registry.npmjs.org/terser/-/terser-5.37.0.tgz",
"integrity": "sha512-B8wRRkmre4ERucLM/uXx4MOV5cbnOlVAqUst+1+iLKPI0dOgFO28f84ptoQt9HEI537PMzfYa/d+GEPKTRXmYA==",
"dependencies": {
"@jridgewell/source-map": "^0.3.3",
"acorn": "^8.8.2",
@@ -51072,15 +51148,16 @@
}
},
"node_modules/terser-webpack-plugin": {
"version": "5.3.10",
"version": "5.3.11",
"resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.11.tgz",
"integrity": "sha512-RVCsMfuD0+cTt3EwX8hSl2Ks56EbFHWmhluwcqoPKtBnfjiT6olaq7PRIRfhyU8nnC2MrnDrBLfrD/RGE+cVXQ==",
"devOptional": true,
"license": "MIT",
"dependencies": {
"@jridgewell/trace-mapping": "^0.3.20",
"@jridgewell/trace-mapping": "^0.3.25",
"jest-worker": "^27.4.5",
"schema-utils": "^3.1.1",
"serialize-javascript": "^6.0.1",
"terser": "^5.26.0"
"schema-utils": "^4.3.0",
"serialize-javascript": "^6.0.2",
"terser": "^5.31.1"
},
"engines": {
"node": ">= 10.13.0"
@@ -51104,6 +51181,34 @@
}
}
},
"node_modules/terser-webpack-plugin/node_modules/ajv": {
"version": "8.17.1",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz",
"integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==",
"devOptional": true,
"dependencies": {
"fast-deep-equal": "^3.1.3",
"fast-uri": "^3.0.1",
"json-schema-traverse": "^1.0.0",
"require-from-string": "^2.0.2"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/epoberezkin"
}
},
"node_modules/terser-webpack-plugin/node_modules/ajv-keywords": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz",
"integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==",
"devOptional": true,
"dependencies": {
"fast-deep-equal": "^3.1.3"
},
"peerDependencies": {
"ajv": "^8.8.2"
}
},
"node_modules/terser-webpack-plugin/node_modules/has-flag": {
"version": "4.0.0",
"devOptional": true,
@@ -51125,6 +51230,31 @@
"node": ">= 10.13.0"
}
},
"node_modules/terser-webpack-plugin/node_modules/json-schema-traverse": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz",
"integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==",
"devOptional": true
},
"node_modules/terser-webpack-plugin/node_modules/schema-utils": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.3.0.tgz",
"integrity": "sha512-Gf9qqc58SpCA/xdziiHz35F4GNIWYWZrEshUc/G/r5BnLph6xpKuLeoJoQuj5WfBIx/eQLf+hmVPYHaxJu7V2g==",
"devOptional": true,
"dependencies": {
"@types/json-schema": "^7.0.9",
"ajv": "^8.9.0",
"ajv-formats": "^2.1.1",
"ajv-keywords": "^5.1.0"
},
"engines": {
"node": ">= 10.13.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/webpack"
}
},
"node_modules/terser-webpack-plugin/node_modules/supports-color": {
"version": "8.1.1",
"devOptional": true,
@@ -73488,6 +73618,57 @@
"vary": "~1.1.2"
}
},
"compression-webpack-plugin": {
"version": "11.1.0",
"resolved": "https://registry.npmjs.org/compression-webpack-plugin/-/compression-webpack-plugin-11.1.0.tgz",
"integrity": "sha512-zDOQYp10+upzLxW+VRSjEpRRwBXJdsb5lBMlRxx1g8hckIFBpe3DTI0en2w7h+beuq89576RVzfiXrkdPGrHhA==",
"dev": true,
"requires": {
"schema-utils": "^4.2.0",
"serialize-javascript": "^6.0.2"
},
"dependencies": {
"ajv": {
"version": "8.17.1",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz",
"integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==",
"dev": true,
"requires": {
"fast-deep-equal": "^3.1.3",
"fast-uri": "^3.0.1",
"json-schema-traverse": "^1.0.0",
"require-from-string": "^2.0.2"
}
},
"ajv-keywords": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz",
"integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==",
"dev": true,
"requires": {
"fast-deep-equal": "^3.1.3"
}
},
"json-schema-traverse": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz",
"integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==",
"dev": true
},
"schema-utils": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.3.0.tgz",
"integrity": "sha512-Gf9qqc58SpCA/xdziiHz35F4GNIWYWZrEshUc/G/r5BnLph6xpKuLeoJoQuj5WfBIx/eQLf+hmVPYHaxJu7V2g==",
"dev": true,
"requires": {
"@types/json-schema": "^7.0.9",
"ajv": "^8.9.0",
"ajv-formats": "^2.1.1",
"ajv-keywords": "^5.1.0"
}
}
}
},
"compute-gcd": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/compute-gcd/-/compute-gcd-1.2.1.tgz",
@@ -93305,7 +93486,9 @@
"dev": true
},
"terser": {
"version": "5.27.0",
"version": "5.37.0",
"resolved": "https://registry.npmjs.org/terser/-/terser-5.37.0.tgz",
"integrity": "sha512-B8wRRkmre4ERucLM/uXx4MOV5cbnOlVAqUst+1+iLKPI0dOgFO28f84ptoQt9HEI537PMzfYa/d+GEPKTRXmYA==",
"requires": {
"@jridgewell/source-map": "^0.3.3",
"acorn": "^8.8.2",
@@ -93314,16 +93497,39 @@
}
},
"terser-webpack-plugin": {
"version": "5.3.10",
"version": "5.3.11",
"resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.11.tgz",
"integrity": "sha512-RVCsMfuD0+cTt3EwX8hSl2Ks56EbFHWmhluwcqoPKtBnfjiT6olaq7PRIRfhyU8nnC2MrnDrBLfrD/RGE+cVXQ==",
"devOptional": true,
"requires": {
"@jridgewell/trace-mapping": "^0.3.20",
"@jridgewell/trace-mapping": "^0.3.25",
"jest-worker": "^27.4.5",
"schema-utils": "^3.1.1",
"serialize-javascript": "^6.0.1",
"terser": "^5.26.0"
"schema-utils": "^4.3.0",
"serialize-javascript": "^6.0.2",
"terser": "^5.31.1"
},
"dependencies": {
"ajv": {
"version": "8.17.1",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz",
"integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==",
"devOptional": true,
"requires": {
"fast-deep-equal": "^3.1.3",
"fast-uri": "^3.0.1",
"json-schema-traverse": "^1.0.0",
"require-from-string": "^2.0.2"
}
},
"ajv-keywords": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz",
"integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==",
"devOptional": true,
"requires": {
"fast-deep-equal": "^3.1.3"
}
},
"has-flag": {
"version": "4.0.0",
"devOptional": true
@@ -93337,6 +93543,24 @@
"supports-color": "^8.0.0"
}
},
"json-schema-traverse": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz",
"integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==",
"devOptional": true
},
"schema-utils": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.3.0.tgz",
"integrity": "sha512-Gf9qqc58SpCA/xdziiHz35F4GNIWYWZrEshUc/G/r5BnLph6xpKuLeoJoQuj5WfBIx/eQLf+hmVPYHaxJu7V2g==",
"devOptional": true,
"requires": {
"@types/json-schema": "^7.0.9",
"ajv": "^8.9.0",
"ajv-formats": "^2.1.1",
"ajv-keywords": "^5.1.0"
}
},
"supports-color": {
"version": "8.1.1",
"devOptional": true,

View File

@@ -288,6 +288,7 @@
"babel-plugin-dynamic-import-node": "^2.3.3",
"babel-plugin-jsx-remove-data-test-id": "^3.0.0",
"babel-plugin-lodash": "^3.3.4",
"compression-webpack-plugin": "^11.1.0",
"copy-webpack-plugin": "^12.0.2",
"cross-env": "^7.0.3",
"css-loader": "^6.8.1",
@@ -350,6 +351,7 @@
"speed-measure-webpack-plugin": "^1.5.0",
"storybook": "8.1.11",
"style-loader": "^4.0.0",
"terser-webpack-plugin": "^5.3.11",
"thread-loader": "^4.0.2",
"ts-loader": "^9.5.1",
"typescript": "^4.8.4",

View File

@@ -80,6 +80,7 @@ import ethiopia from './countries/ethiopia.geojson';
import fiji from './countries/fiji.geojson';
import finland from './countries/finland.geojson';
import france from './countries/france.geojson';
import france_overseas from './countries/france_overseas.geojson';
import france_regions from './countries/france_regions.geojson';
import french_polynesia from './countries/french_polynesia.geojson';
import gabon from './countries/gabon.geojson';
@@ -280,6 +281,7 @@ export const countries = {
fiji,
finland,
france,
france_overseas,
france_regions,
french_polynesia,
gabon,
@@ -427,6 +429,9 @@ export const countryOptions = Object.keys(countries).map(x => {
if (x === 'france_regions') {
return [x, 'France (regions)'];
}
if (x === 'france_overseas') {
return [x, 'France (with overseas)'];
}
if (x === 'turkey_regions') {
return [x, 'Turkey (regions)'];
}

File diff suppressed because one or more lines are too long

View File

@@ -188,7 +188,11 @@ export default function transformProps(
showTotal,
sliceId,
} = formData;
const { currencyFormats = {}, columnFormats = {} } = datasource;
const {
currencyFormats = {},
columnFormats = {},
verboseMap = {},
} = datasource;
const refs: Refs = {};
const primaryValueFormatter = getValueFormatter(
metric,
@@ -334,8 +338,10 @@ export default function transformProps(
secondaryValueFormatter,
colorByCategory,
totalValue,
metricLabel,
secondaryMetricLabel,
metricLabel: verboseMap[metricLabel] || metricLabel,
secondaryMetricLabel: secondaryMetricLabel
? verboseMap[secondaryMetricLabel] || secondaryMetricLabel
: undefined,
}),
},
series: [

View File

@@ -51,6 +51,7 @@ const Styles = styled.div<PivotTableStylesProps>`
width: ${
typeof width === 'string' ? parseInt(width, 10) : width - margin * 2
}px;
white-space: nowrap;
`}
`;

View File

@@ -39,34 +39,37 @@ export const GlobalStyles = () => (
.echarts-tooltip[style*='visibility: hidden'] {
display: none !important;
}
// TODO: Remove when on Ant Design 5.
// Check src/components/Modal for more info.
.modal-functions-ok-button {
border-radius: ${theme.borderRadius}px;
background: ${theme.colors.primary.base};
border: none;
color: ${theme.colors.grayscale.light5};
line-height: 1.5715;
font-size: ${theme.typography.sizes.s}px;
font-weight: ${theme.typography.weights.bold};
&:hover {
background: ${theme.colors.primary.dark1};
}
.antd5-dropdown,
.ant-dropdown {
z-index: ${theme.zIndex.max};
}
.modal-functions-cancel-button {
border-radius: ${theme.borderRadius}px;
background: ${theme.colors.primary.light4};
border: none;
color: ${theme.colors.primary.dark1};
line-height: 1.5715;
font-size: ${theme.typography.sizes.s}px;
font-weight: ${theme.typography.weights.bold};
&:hover {
background: ${mix(
0.1,
theme.colors.primary.base,
theme.colors.primary.light4,
)};
// TODO: Remove when buttons have been upgraded to Ant Design 5.
// Check src/components/Modal for more info.
.ant-modal-confirm {
button {
border: none;
border-radius: ${theme.borderRadius}px;
line-height: 1.5715;
font-size: ${theme.typography.sizes.s}px;
font-weight: ${theme.typography.weights.bold};
}
.ant-btn-primary:not(.btn-danger) {
background: ${theme.colors.primary.base};
color: ${theme.colors.grayscale.light5};
&:hover {
background: ${theme.colors.primary.dark1};
}
}
.ant-btn-default:not(.btn-danger) {
background: ${theme.colors.primary.light4};
color: ${theme.colors.primary.dark1};
&:hover {
background: ${mix(
0.1,
theme.colors.primary.base,
theme.colors.primary.light4,
)};
}
}
}
.column-config-popover {

View File

@@ -421,9 +421,7 @@ export function postStopQuery(query) {
})
.then(() => dispatch(stopQuery(query)))
.then(() => dispatch(addSuccessToast(t('Query was stopped.'))))
.catch(() =>
dispatch(addDangerToast(t('Failed at stopping query. %s', query.id))),
);
.catch(() => dispatch(addDangerToast(t('Failed to stop query.'))));
};
}

View File

@@ -89,11 +89,11 @@ const SqlLabStyles = styled.div`
}
}
.ResultsModal .ant-modal-body {
.ResultsModal .antd5-modal-body {
min-height: ${theme.gridUnit * 140}px;
}
.ant-modal-body {
.antd5-modal-body {
overflow: auto;
}
}

View File

@@ -16,11 +16,10 @@
* specific language governing permissions and limitations
* under the License.
*/
import { useMemo } from 'react';
import { useMemo, ReactNode } from 'react';
import moment from 'moment';
import Card from 'src/components/Card';
import ProgressBar from 'src/components/ProgressBar';
import Label from 'src/components/Label';
import { t, useTheme, QueryResponse } from '@superset-ui/core';
import { useDispatch, useSelector } from 'react-redux';
@@ -35,6 +34,7 @@ import TableView from 'src/components/TableView';
import Button from 'src/components/Button';
import { fDuration } from 'src/utils/dates';
import Icons from 'src/components/Icons';
import Label from 'src/components/Label';
import { Tooltip } from 'src/components/Tooltip';
import { SqlLabRootState } from 'src/SqlLab/types';
import ModalTrigger from 'src/components/ModalTrigger';
@@ -44,11 +44,16 @@ import HighlightedSql from '../HighlightedSql';
import { StaticPosition, verticalAlign, StyledTooltip } from './styles';
interface QueryTableQuery
extends Omit<QueryResponse, 'state' | 'sql' | 'progress' | 'results'> {
extends Omit<
QueryResponse,
'state' | 'sql' | 'progress' | 'results' | 'duration' | 'started'
> {
state?: Record<string, any>;
sql?: Record<string, any>;
progress?: Record<string, any>;
results?: Record<string, any>;
duration?: ReactNode;
started?: ReactNode;
}
interface QueryTableProps {
@@ -125,55 +130,95 @@ const QueryTable = ({
const statusAttributes = {
success: {
config: {
icon: <Icons.Check iconColor={theme.colors.success.base} />,
icon: (
<Icons.CheckOutlined
iconColor={theme.colors.success.base}
iconSize="m"
/>
),
// icon: <Icons.Edit iconSize="xl" />,
label: t('Success'),
},
},
failed: {
config: {
icon: <Icons.XSmall iconColor={theme.colors.error.base} />,
icon: (
<Icons.CloseOutlined
iconColor={theme.colors.error.base}
iconSize="m"
/>
),
label: t('Failed'),
},
},
stopped: {
config: {
icon: <Icons.XSmall iconColor={theme.colors.error.base} />,
icon: (
<Icons.CloseOutlined
iconColor={theme.colors.error.base}
iconSize="m"
/>
),
label: t('Failed'),
},
},
running: {
config: {
icon: <Icons.Running iconColor={theme.colors.primary.base} />,
icon: (
<Icons.LoadingOutlined
iconColor={theme.colors.primary.base}
iconSize="m"
/>
),
label: t('Running'),
},
},
fetching: {
config: {
icon: <Icons.Queued iconColor={theme.colors.primary.base} />,
icon: (
<Icons.LoadingOutlined
iconColor={theme.colors.primary.base}
iconSize="m"
/>
),
label: t('Fetching'),
},
},
timed_out: {
config: {
icon: <Icons.Offline iconColor={theme.colors.grayscale.light1} />,
icon: (
<Icons.Clock iconColor={theme.colors.error.base} iconSize="m" />
),
label: t('Offline'),
},
},
scheduled: {
config: {
icon: <Icons.Queued iconColor={theme.colors.grayscale.base} />,
icon: (
<Icons.LoadingOutlined
iconColor={theme.colors.warning.base}
iconSize="m"
/>
),
label: t('Scheduled'),
},
},
pending: {
config: {
icon: <Icons.Queued iconColor={theme.colors.grayscale.base} />,
icon: (
<Icons.LoadingOutlined
iconColor={theme.colors.warning.base}
iconSize="m"
/>
),
label: t('Scheduled'),
},
},
error: {
config: {
icon: <Icons.Error iconColor={theme.colors.error.base} />,
icon: (
<Icons.Error iconColor={theme.colors.error.base} iconSize="m" />
),
label: t('Unknown Status'),
},
},
@@ -187,16 +232,10 @@ const QueryTable = ({
const status = statusAttributes[state] || statusAttributes.error;
if (q.endDttm) {
q.duration = fDuration(q.startDttm, q.endDttm);
q.duration = (
<Label monospace>{fDuration(q.startDttm, q.endDttm)}</Label>
);
}
const time = moment(q.startDttm).format().split('T');
q.time = (
<div>
<span>
{time[0]} <br /> {time[1]}
</span>
</div>
);
q.user = (
<Button
buttonSize="small"
@@ -215,7 +254,9 @@ const QueryTable = ({
{q.db}
</Button>
);
q.started = moment(q.startDttm).format('L HH:mm:ss');
q.started = (
<Label monospace>{moment(q.startDttm).format('L HH:mm:ss')}</Label>
);
q.querylink = (
<Button
buttonSize="small"
@@ -241,9 +282,9 @@ const QueryTable = ({
<ModalTrigger
className="ResultsModal"
triggerNode={
<Label type="info" className="pointer">
<Button buttonSize="xsmall" buttonStyle="tertiary">
{t('View')}
</Label>
</Button>
}
modalTitle={t('Data preview')}
beforeOpen={() => openAsyncResults(query, displayLimit)}
@@ -275,9 +316,7 @@ const QueryTable = ({
<ProgressBar percent={parseInt(progress.toFixed(0), 10)} striped />
);
q.state = (
<Tooltip title={status.config.label} placement="bottom">
<span>{status.config.icon}</span>
</Tooltip>
<Tooltip title={status.config.label}>{status.config.icon}</Tooltip>
);
q.actions = (
<div>
@@ -287,6 +326,7 @@ const QueryTable = ({
'Overwrite text in the editor with a query on this table',
)}
placement="top"
className="pointer"
>
<Icons.Edit iconSize="xl" />
</StyledTooltip>
@@ -294,6 +334,7 @@ const QueryTable = ({
onClick={() => openQueryInNewTab(query)}
tooltip={t('Run query in a new tab')}
placement="top"
className="pointer"
>
<Icons.PlusCircleOutlined iconSize="xl" css={verticalAlign} />
</StyledTooltip>
@@ -301,6 +342,7 @@ const QueryTable = ({
<StyledTooltip
tooltip={t('Remove query from log')}
onClick={() => dispatch(removeQuery(query))}
className="pointer"
>
<Icons.Trash iconSize="xl" />
</StyledTooltip>

View File

@@ -73,10 +73,10 @@ describe('SaveDatasetModal', () => {
const inputField = screen.getByRole('textbox');
const inputFieldText = screen.getByDisplayValue(/unimportant/i);
expect(saveRadioBtn).toBeVisible();
expect(fieldLabel).toBeVisible();
expect(inputField).toBeVisible();
expect(inputFieldText).toBeVisible();
expect(saveRadioBtn).toBeInTheDocument();
expect(fieldLabel).toBeInTheDocument();
expect(inputField).toBeInTheDocument();
expect(inputFieldText).toBeInTheDocument();
});
it('renders an "Overwrite existing" field', () => {
@@ -89,23 +89,23 @@ describe('SaveDatasetModal', () => {
const inputField = screen.getByRole('combobox');
const placeholderText = screen.getByText(/select or type dataset name/i);
expect(overwriteRadioBtn).toBeVisible();
expect(fieldLabel).toBeVisible();
expect(inputField).toBeVisible();
expect(placeholderText).toBeVisible();
expect(overwriteRadioBtn).toBeInTheDocument();
expect(fieldLabel).toBeInTheDocument();
expect(inputField).toBeInTheDocument();
expect(placeholderText).toBeInTheDocument();
});
it('renders a close button', () => {
render(<SaveDatasetModal {...mockedProps} />, { useRedux: true });
expect(screen.getByRole('button', { name: /close/i })).toBeVisible();
expect(screen.getByRole('button', { name: /close/i })).toBeInTheDocument();
});
it('renders a save button when "Save as new" is selected', () => {
render(<SaveDatasetModal {...mockedProps} />, { useRedux: true });
// "Save as new" is selected when the modal opens by default
expect(screen.getByRole('button', { name: /save/i })).toBeVisible();
expect(screen.getByRole('button', { name: /save/i })).toBeInTheDocument();
});
it('renders an overwrite button when "Overwrite existing" is selected', () => {
@@ -117,7 +117,9 @@ describe('SaveDatasetModal', () => {
});
userEvent.click(overwriteRadioBtn);
expect(screen.getByRole('button', { name: /overwrite/i })).toBeVisible();
expect(
screen.getByRole('button', { name: /overwrite/i }),
).toBeInTheDocument();
});
it('renders the overwrite button as disabled until an existing dataset is selected', async () => {
@@ -181,14 +183,16 @@ describe('SaveDatasetModal', () => {
userEvent.click(overwriteConfirmationBtn);
// Overwrite screen text
expect(screen.getByText(/save or overwrite dataset/i)).toBeVisible();
expect(screen.getByText(/save or overwrite dataset/i)).toBeInTheDocument();
expect(
screen.getByText(/are you sure you want to overwrite this dataset\?/i),
).toBeVisible();
).toBeInTheDocument();
// Overwrite screen buttons
expect(screen.getByRole('button', { name: /close/i })).toBeVisible();
expect(screen.getByRole('button', { name: /back/i })).toBeVisible();
expect(screen.getByRole('button', { name: /overwrite/i })).toBeVisible();
expect(screen.getByRole('button', { name: /close/i })).toBeInTheDocument();
expect(screen.getByRole('button', { name: /back/i })).toBeInTheDocument();
expect(
screen.getByRole('button', { name: /overwrite/i }),
).toBeInTheDocument();
});
it('sends the schema when creating the dataset', async () => {

Some files were not shown because too many files have changed in this diff Show More