Compare commits

..

5 Commits

Author SHA1 Message Date
Beto Dealmeida
754807b87d WIP 2024-11-25 16:43:20 -05:00
Beto Dealmeida
782f94fe8d Another fix 2024-11-24 18:53:14 -05:00
Beto Dealmeida
8e0c00a82e Move integration tests to unit tests 2024-11-24 18:44:51 -05:00
Beto Dealmeida
c0c8802de9 Small fix 2024-11-24 18:03:20 -05:00
Beto Dealmeida
cd3209a600 chore (SIP-117): remove more sqlparse 2024-11-22 12:04:19 -05:00
374 changed files with 6816 additions and 7560 deletions

View File

@@ -53,9 +53,6 @@ github:
merge: false
rebase: false
ghp_branch: gh-pages
ghp_path: /
protected_branches:
master:
required_status_checks:
@@ -91,10 +88,3 @@ github:
required_approving_review_count: 1
required_signatures: false
gh-pages:
required_pull_request_reviews:
dismiss_stale_reviews: false
require_code_owner_reviews: true
required_approving_review_count: 1
required_signatures: false

1
.gitattributes vendored
View File

@@ -1,3 +1,2 @@
docker/**/*.sh text eol=lf
*.svg binary
*.ipynb binary

View File

@@ -41,8 +41,8 @@ body:
label: Superset version
options:
- master / latest-dev
- "4.1.1"
- "4.0.2"
- "4.1.0"
- "3.1.3"
validations:
required: true
- type: dropdown

View File

@@ -43,11 +43,11 @@ runs:
run: |
if [ "${{ inputs.install-superset }}" = "true" ]; then
sudo apt-get update && sudo apt-get -y install libldap2-dev libsasl2-dev
pip install --upgrade pip setuptools wheel uv
pip install --upgrade pip setuptools wheel
if [ "${{ inputs.requirements-type }}" = "dev" ]; then
uv pip install --system -r requirements/development.txt
pip install -r requirements/development.txt
elif [ "${{ inputs.requirements-type }}" = "base" ]; then
uv pip install --system -r requirements/base.txt
pip install -r requirements/base.txt
fi
fi
shell: bash

View File

@@ -14,12 +14,6 @@ on:
required: true
description: Max number of PRs to open (0 for no limit)
default: 5
extra-flags:
required: false
default: --only-base
description: Additional flags to pass to the bump-python command
#schedule:
# - cron: '0 0 * * *' # Runs daily at midnight UTC
jobs:
bump-python-package:
@@ -65,13 +59,10 @@ jobs:
GROUP_OPT="-g ${{ github.event.inputs.group }}"
fi
EXTRA_FLAGS="${{ github.event.inputs.extra-flags }}"
supersetbot bump-python \
--verbose \
--use-current-repo \
--include-subpackages \
--limit ${{ github.event.inputs.limit }} \
$PACKAGE_OPT \
$GROUP_OPT \
$EXTRA_FLAGS
$GROUP_OPT

View File

@@ -1,25 +1,30 @@
name: Ephemeral env workflow
# Example manual trigger: gh workflow run ephemeral-env.yml --ref fix_ephemerals --field comment_body="/testenv up" --field issue_number=666
on:
issue_comment:
types: [created]
workflow_dispatch:
inputs:
comment_body:
description: 'Comment body to simulate /testenv command'
required: true
default: '/testenv up'
issue_number:
description: 'Issue or PR number'
required: true
jobs:
config:
runs-on: "ubuntu-22.04"
if: github.event.issue.pull_request
outputs:
has-secrets: ${{ steps.check.outputs.has-secrets }}
steps:
- name: "Check for secrets"
id: check
shell: bash
run: |
if [ -n "${{ (secrets.AWS_ACCESS_KEY_ID != '' && secrets.AWS_SECRET_ACCESS_KEY != '') || '' }}" ]; then
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
fi
ephemeral-env-comment:
concurrency:
group: ${{ github.workflow }}-${{ github.event.inputs.issue_number || github.event.issue.number || github.run_id }}-comment
group: ${{ github.workflow }}-${{ github.event.issue.number || github.run_id }}-comment
cancel-in-progress: true
needs: config
if: needs.config.outputs.has-secrets
name: Evaluate ephemeral env comment trigger (/testenv)
runs-on: ubuntu-22.04
permissions:
@@ -39,18 +44,18 @@ jobs:
with:
result-encoding: string
script: |
const pattern = /^\/testenv (up|down)/;
const result = pattern.exec('${{ github.event.inputs.comment_body || github.event.comment.body }}');
return result === null ? 'noop' : result[1];
const pattern = /^\/testenv (up|down)/
const result = pattern.exec(context.payload.comment.body)
return result === null ? 'noop' : result[1]
- name: Looking for feature flags
- name: Eval comment body for feature flags
uses: actions/github-script@v7
id: eval-feature-flags
with:
script: |
const pattern = /FEATURE_(\w+)=(\w+)/g;
let results = [];
[...'${{ github.event.inputs.comment_body || github.event.comment.body }}'.matchAll(pattern)].forEach(match => {
[...context.payload.comment.body.matchAll(pattern)].forEach(match => {
const config = {
name: `SUPERSET_FEATURE_${match[1]}`,
value: match[2],
@@ -62,48 +67,24 @@ jobs:
- name: Limit to committers
if: >
steps.eval-body.outputs.result != 'noop' &&
github.event_name == 'issue_comment' &&
github.event.comment.author_association != 'MEMBER' &&
github.event.comment.author_association != 'OWNER'
uses: actions/github-script@v7
with:
github-token: ${{ github.token }}
github-token: ${{github.token}}
script: |
const errMsg = '@${{ github.event.comment.user.login }} Ephemeral environment creation is currently limited to committers.';
const errMsg = '@${{ github.event.comment.user.login }} Ephemeral environment creation is currently limited to committers.'
github.rest.issues.createComment({
issue_number: ${{ github.event.issue.number }},
owner: context.repo.owner,
repo: context.repo.repo,
body: errMsg
});
core.setFailed(errMsg);
- name: Reply with confirmation comment
uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const issueNumber = ${{ github.event.inputs.issue_number || github.event.issue.number }};
const user = '${{ github.event.comment.user.login || github.actor }}';
const action = '${{ steps.eval-body.outputs.result }}';
const runId = context.runId;
const workflowUrl = `${context.serverUrl}/${context.repo.owner}/${context.repo.repo}/actions/runs/${runId}`;
const body = action === 'noop'
? `@${user} No ephemeral environment action detected. Please use '/testenv up' or '/testenv down'. [View workflow run](${workflowUrl}).`
: `@${user} Processing your ephemeral environment request [here](${workflowUrl}).`;
if (action !== 'noop') {
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: issueNumber,
body,
});
}
})
core.setFailed(errMsg)
ephemeral-docker-build:
concurrency:
group: ${{ github.workflow }}-${{ github.event.inputs.issue_number || github.event.issue.number || github.run_id }}-build
group: ${{ github.workflow }}-${{ github.event.issue.number || github.run_id }}-build
cancel-in-progress: true
needs: ephemeral-env-comment
name: ephemeral-docker-build
@@ -117,9 +98,9 @@ jobs:
const request = {
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: ${{ github.event.inputs.issue_number || github.event.issue.number }},
};
core.info(`Getting PR #${request.pull_number} from ${request.owner}/${request.repo}`);
pull_number: ${{ github.event.issue.number }},
}
core.info(`Getting PR #${request.pull_number} from ${request.owner}/${request.repo}`)
const pr = await github.rest.pulls.get(request);
return pr.data;
@@ -140,17 +121,12 @@ jobs:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Setup supersetbot
uses: ./.github/actions/setup-supersetbot/
- name: Build ephemeral env image
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
supersetbot docker \
--preset ci \
--platform linux/amd64 \
--context-ref "$RELEASE"
./scripts/build_docker.py \
"ci" \
"pull_request" \
--build_context_ref ${{ github.event.issue.number }}
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
@@ -170,7 +146,7 @@ jobs:
ECR_REPOSITORY: superset-ci
IMAGE_TAG: apache/superset:${{ steps.get-sha.outputs.sha }}-ci
run: |
docker tag $IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-ci
docker tag $IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:pr-${{ github.event.issue.number }}-ci
docker push -a $ECR_REGISTRY/$ECR_REPOSITORY
ephemeral-env-up:
@@ -205,22 +181,22 @@ jobs:
aws ecr describe-images \
--registry-id $(echo "${{ steps.login-ecr.outputs.registry }}" | grep -Eo "^[0-9]+") \
--repository-name superset-ci \
--image-ids imageTag=pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-ci
--image-ids imageTag=pr-${{ github.event.issue.number }}-ci
- name: Fail on missing container image
if: steps.check-image.outcome == 'failure'
uses: actions/github-script@v7
with:
github-token: ${{ github.token }}
github-token: ${{github.token}}
script: |
const errMsg = '@${{ github.event.comment.user.login }} Container image not yet published for this PR. Please try again when build is complete.';
const errMsg = '@${{ github.event.comment.user.login }} Container image not yet published for this PR. Please try again when build is complete.'
github.rest.issues.createComment({
issue_number: ${{ github.event.inputs.issue_number || github.event.issue.number }},
issue_number: ${{ github.event.issue.number }},
owner: context.repo.owner,
repo: context.repo.repo,
body: errMsg
});
core.setFailed(errMsg);
})
core.setFailed(errMsg)
- name: Fill in the new image ID in the Amazon ECS task definition
id: task-def
@@ -228,7 +204,7 @@ jobs:
with:
task-definition: .github/workflows/ecs-task-definition.json
container-name: superset-ci
image: ${{ steps.login-ecr.outputs.registry }}/superset-ci:pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-ci
image: ${{ steps.login-ecr.outputs.registry }}/superset-ci:pr-${{ github.event.issue.number }}-ci
- name: Update env vars in the Amazon ECS task definition
run: |
@@ -237,29 +213,30 @@ jobs:
- name: Describe ECS service
id: describe-services
run: |
echo "active=$(aws ecs describe-services --cluster superset-ci --services pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-service | jq '.services[] | select(.status == "ACTIVE") | any')" >> $GITHUB_OUTPUT
echo "active=$(aws ecs describe-services --cluster superset-ci --services pr-${{ github.event.issue.number }}-service | jq '.services[] | select(.status == "ACTIVE") | any')" >> $GITHUB_OUTPUT
- name: Create ECS service
id: create-service
if: steps.describe-services.outputs.active != 'true'
id: create-service
env:
ECR_SUBNETS: subnet-0e15a5034b4121710,subnet-0e8efef4a72224974
ECR_SECURITY_GROUP: sg-092ff3a6ae0574d91
run: |
aws ecs create-service \
--cluster superset-ci \
--service-name pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-service \
--service-name pr-${{ github.event.issue.number }}-service \
--task-definition superset-ci \
--launch-type FARGATE \
--desired-count 1 \
--platform-version LATEST \
--network-configuration "awsvpcConfiguration={subnets=[$ECR_SUBNETS],securityGroups=[$ECR_SECURITY_GROUP],assignPublicIp=ENABLED}" \
--tags key=pr,value=${{ github.event.inputs.issue_number || github.event.issue.number }} key=github_user,value=${{ github.actor }}
--tags key=pr,value=${{ github.event.issue.number }} key=github_user,value=${{ github.actor }}
- name: Deploy Amazon ECS task definition
id: deploy-task
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
with:
task-definition: ${{ steps.task-def.outputs.task-definition }}
service: pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-service
service: pr-${{ github.event.issue.number }}-service
cluster: superset-ci
wait-for-service-stability: true
wait-for-minutes: 10
@@ -267,15 +244,18 @@ jobs:
- name: List tasks
id: list-tasks
run: |
echo "task=$(aws ecs list-tasks --cluster superset-ci --service-name pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-service | jq '.taskArns | first')" >> $GITHUB_OUTPUT
echo "task=$(aws ecs list-tasks --cluster superset-ci --service-name pr-${{ github.event.issue.number }}-service | jq '.taskArns | first')" >> $GITHUB_OUTPUT
- name: Get network interface
id: get-eni
run: |
echo "eni=$(aws ecs describe-tasks --cluster superset-ci --tasks ${{ steps.list-tasks.outputs.task }} | jq '.tasks | .[0] | .attachments | .[0] | .details | map(select(.name==\"networkInterfaceId\")) | .[0] | .value')" >> $GITHUB_OUTPUT
echo "eni=$(aws ecs describe-tasks --cluster superset-ci --tasks ${{ steps.list-tasks.outputs.task }} | jq '.tasks | .[0] | .attachments | .[0] | .details | map(select(.name=="networkInterfaceId")) | .[0] | .value')" >> $GITHUB_OUTPUT
- name: Get public IP
id: get-ip
run: |
echo "ip=$(aws ec2 describe-network-interfaces --network-interface-ids ${{ steps.get-eni.outputs.eni }} | jq -r '.NetworkInterfaces | first | .Association.PublicIp')" >> $GITHUB_OUTPUT
- name: Comment (success)
if: ${{ success() }}
uses: actions/github-script@v7
@@ -283,11 +263,12 @@ jobs:
github-token: ${{github.token}}
script: |
github.rest.issues.createComment({
issue_number: ${{ github.event.inputs.issue_number || github.event.issue.number }},
issue_number: ${{ github.event.issue.number }},
owner: context.repo.owner,
repo: context.repo.repo,
body: '@${{ github.event.inputs.user_login || github.event.comment.user.login }} Ephemeral environment spinning up at http://${{ steps.get-ip.outputs.ip }}:8080. Credentials are `admin`/`admin`. Please allow several minutes for bootstrapping and startup.'
body: '@${{ github.event.comment.user.login }} Ephemeral environment spinning up at http://${{ steps.get-ip.outputs.ip }}:8080. Credentials are `admin`/`admin`. Please allow several minutes for bootstrapping and startup.'
})
- name: Comment (failure)
if: ${{ failure() }}
uses: actions/github-script@v7
@@ -295,8 +276,8 @@ jobs:
github-token: ${{github.token}}
script: |
github.rest.issues.createComment({
issue_number: ${{ github.event.inputs.issue_number || github.event.issue.number }},
issue_number: ${{ github.event.issue.number }},
owner: context.repo.owner,
repo: context.repo.repo,
body: '@${{ github.event.inputs.user_login || github.event.comment.user.login }} Ephemeral environment creation failed. Please check the Actions logs for details.'
body: '@${{ github.event.comment.user.login }} Ephemeral environment creation failed. Please check the Actions logs for details.'
})

View File

@@ -73,7 +73,7 @@ jobs:
working-directory: ./superset-frontend/packages/generator-superset
run: npm run test
- name: Upload code coverage
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@v4
with:
flags: javascript
token: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -1,4 +1,4 @@
name: "Helm: lint and test charts"
name: Lint and Test Charts
on:
pull_request:

View File

@@ -1,8 +1,4 @@
# This workflow automates the release process for Helm charts.
# The workflow creates a new branch for the release and opens a pull request against the 'gh-pages' branch,
# allowing the changes to be reviewed and merged manually.
name: "Helm: release charts"
name: Release Charts
on:
push:
@@ -11,28 +7,18 @@ on:
- "[0-9].[0-9]*"
paths:
- "helm/**"
workflow_dispatch:
inputs:
ref:
description: "The branch, tag, or commit SHA to check out"
required: false
default: "master"
jobs:
release:
runs-on: ubuntu-22.04
permissions:
contents: write
pull-requests: write
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- name: Checkout code
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
ref: ${{ inputs.ref || github.ref_name }}
persist-credentials: true
persist-credentials: false
submodules: recursive
fetch-depth: 0
@@ -49,77 +35,11 @@ jobs:
- name: Add bitnami repo dependency
run: helm repo add bitnami https://charts.bitnami.com/bitnami
- name: Fetch/list all tags
run: |
# Debugging tags
git fetch --tags --force
git tag -d superset-helm-chart-0.13.4 || true
echo "DEBUG TAGS"
git show-ref --tags
- name: Create unique pages branch name
id: vars
run: echo "branch_name=helm-publish-${GITHUB_SHA:0:7}" >> $GITHUB_ENV
- name: Force recreate branch from gh-pages
run: |
# Ensure a clean working directory
git reset --hard
git clean -fdx
git checkout -b local_gha_temp
git submodule update
# Fetch the latest gh-pages branch
git fetch origin gh-pages
# Check out and reset the target branch based on gh-pages
git checkout -B ${{ env.branch_name }} origin/gh-pages
# Remove submodules from the branch
git submodule deinit -f --all
# Force push to the remote branch
git push origin ${{ env.branch_name }} --force
# Return to the original branch
git checkout local_gha_temp
- name: Fetch/list all tags
run: |
git submodule update
cat .github/actions/chart-releaser-action/action.yml
- name: Run chart-releaser
uses: ./.github/actions/chart-releaser-action
with:
version: v1.6.0
charts_dir: helm
mark_as_latest: false
pages_branch: ${{ env.branch_name }}
env:
CR_TOKEN: "${{ github.token }}"
CR_RELEASE_NAME_TEMPLATE: "superset-helm-chart-{{ .Version }}"
- name: Open Pull Request
uses: actions/github-script@v7
with:
script: |
const branchName = '${{ env.branch_name }}';
const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');
if (!branchName) {
throw new Error("Branch name is not defined.");
}
const pr = await github.rest.pulls.create({
owner,
repo,
title: `Helm chart release for ${branchName}`,
head: branchName,
base: "gh-pages", // Adjust if the target branch is different
body: `This PR releases Helm charts to the gh-pages branch.`,
});
core.info(`Pull request created: ${pr.data.html_url}`);
env:
BRANCH_NAME: ${{ env.branch_name }}

View File

@@ -68,7 +68,7 @@ jobs:
run: |
./scripts/python_tests.sh
- name: Upload code coverage
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@v4
with:
flags: python,mysql
token: ${{ secrets.CODECOV_TOKEN }}
@@ -129,7 +129,7 @@ jobs:
run: |
./scripts/python_tests.sh
- name: Upload code coverage
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@v4
with:
flags: python,postgres
token: ${{ secrets.CODECOV_TOKEN }}
@@ -181,7 +181,7 @@ jobs:
run: |
./scripts/python_tests.sh
- name: Upload code coverage
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@v4
with:
flags: python,sqlite
token: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -77,7 +77,7 @@ jobs:
run: |
./scripts/python_tests.sh -m 'chart_data_flow or sql_json_flow'
- name: Upload code coverage
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@v4
with:
flags: python,presto
token: ${{ secrets.CODECOV_TOKEN }}
@@ -142,10 +142,9 @@ jobs:
- name: Python unit tests (PostgreSQL)
if: steps.check.outputs.python
run: |
pip install -e .[hive]
./scripts/python_tests.sh -m 'chart_data_flow or sql_json_flow'
- name: Upload code coverage
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@v4
with:
flags: python,hive
token: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -46,7 +46,7 @@ jobs:
run: |
pytest --durations-min=0.5 --cov-report= --cov=superset ./tests/common ./tests/unit_tests --cache-clear
- name: Upload code coverage
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@v4
with:
flags: python,unit
token: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -16,11 +16,11 @@
#
repos:
- repo: https://github.com/MarcoGorelli/auto-walrus
rev: 0.3.4
rev: v0.2.2
hooks:
- id: auto-walrus
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.13.0
rev: v1.3.0
hooks:
- id: mypy
args: [--check-untyped-defs]
@@ -39,11 +39,11 @@ repos:
types-Markdown,
]
- repo: https://github.com/peterdemin/pip-compile-multi
rev: v2.6.4
rev: v2.6.2
hooks:
- id: pip-compile-multi-verify
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
rev: v4.4.0
hooks:
- id: check-docstring-first
- id: check-added-large-files
@@ -56,7 +56,7 @@ repos:
exclude: ^.*\.(snap)
args: ["--markdown-linebreak-ext=md"]
- repo: https://github.com/pre-commit/mirrors-prettier
rev: v4.0.0-alpha.8 # Use the sha or tag you want to point at
rev: v3.1.0 # Use the sha or tag you want to point at
hooks:
- id: prettier
additional_dependencies:
@@ -70,12 +70,12 @@ repos:
- id: blacklist
args: ["--blacklisted-names=make_url", "--ignore=tests/"]
- repo: https://github.com/norwoodj/helm-docs
rev: v1.14.2
rev: v1.11.0
hooks:
- id: helm-docs
files: helm
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.8.0
rev: v0.4.0
hooks:
- id: ruff
args: [ --fix ]

View File

@@ -70,7 +70,6 @@ google-sheets.svg
ibm-db2.svg
postgresql.svg
snowflake.svg
ydb.svg
# docs-related
erd.puml

View File

@@ -20,38 +20,44 @@
######################################################################
ARG PY_VER=3.10-slim-bookworm
# If BUILDPLATFORM is null, set it to 'amd64' (or leave as is otherwise).
# if BUILDPLATFORM is null, set it to 'amd64' (or leave as is otherwise).
ARG BUILDPLATFORM=${BUILDPLATFORM:-amd64}
FROM --platform=${BUILDPLATFORM} node:20-bullseye-slim AS superset-node
# Arguments for build configuration
ARG NPM_BUILD_CMD="build"
ARG BUILD_TRANSLATIONS="false" # Include translations in the final build
ARG DEV_MODE="false" # Skip frontend build in dev mode
ARG INCLUDE_CHROMIUM="true" # Include headless Chromium for alerts & reports
ARG INCLUDE_FIREFOX="false" # Include headless Firefox if enabled
# Install system dependencies required for node-gyp
RUN --mount=type=bind,source=./docker,target=/docker \
/docker/apt-install.sh build-essential python3 zstd
# Include translations in the final build. The default supports en only to
# reduce complexity and weight for those only using en
ARG BUILD_TRANSLATIONS="false"
# Used by docker-compose to skip the frontend build,
# in dev we mount the repo and build the frontend inside docker
ARG DEV_MODE="false"
# Include headless browsers? Allows for alerts, reports & thumbnails, but bloats the images
ARG INCLUDE_CHROMIUM="true"
ARG INCLUDE_FIREFOX="false"
# Somehow we need python3 + build-essential on this side of the house to install node-gyp
RUN apt-get update -qq \
&& apt-get install \
-yqq --no-install-recommends \
build-essential \
python3 \
zstd
# Define environment variables for frontend build
ENV BUILD_CMD=${NPM_BUILD_CMD} \
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true
# NPM ci first, as to NOT invalidate previous steps except for when package.json changes
# Run the frontend memory monitoring script
RUN --mount=type=bind,source=./docker,target=/docker \
/docker/frontend-mem-nag.sh
RUN --mount=type=bind,target=/frontend-mem-nag.sh,src=./docker/frontend-mem-nag.sh \
/frontend-mem-nag.sh
WORKDIR /app/superset-frontend
# Create necessary folders to avoid errors in subsequent steps
RUN mkdir -p /app/superset/static/assets \
/app/superset/translations
# Mount package files and install dependencies if not in dev mode
RUN --mount=type=bind,source=./superset-frontend/package.json,target=./package.json \
--mount=type=bind,source=./superset-frontend/package-lock.json,target=./package-lock.json \
# Creating empty folders to avoid errors when running COPY later on
RUN mkdir -p /app/superset/static/assets
RUN --mount=type=bind,target=./package.json,src=./superset-frontend/package.json \
--mount=type=bind,target=./package-lock.json,src=./superset-frontend/package-lock.json \
if [ "$DEV_MODE" = "false" ]; then \
npm ci; \
else \
@@ -60,39 +66,33 @@ RUN --mount=type=bind,source=./superset-frontend/package.json,target=./package.j
# Runs the webpack build process
COPY superset-frontend /app/superset-frontend
# Copy translation files
# This copies the .po files needed for translation
RUN mkdir -p /app/superset/translations
COPY superset/translations /app/superset/translations
# Build the frontend if not in dev mode
RUN if [ "$DEV_MODE" = "false" ]; then \
BUILD_TRANSLATIONS=$BUILD_TRANSLATIONS npm run ${BUILD_CMD}; \
else \
echo "Skipping 'npm run ${BUILD_CMD}' in dev mode"; \
fi
# Compile .json files from .po translations (if required) and clean up .po files
# Compiles .json files from the .po files, then deletes the .po files
RUN if [ "$BUILD_TRANSLATIONS" = "true" ]; then \
npm run build-translation; \
else \
echo "Skipping translations as requested by build flag"; \
fi \
# removing translations files regardless
&& rm -rf /app/superset/translations/*/LC_MESSAGES/*.po \
/app/superset/translations/messages.pot
fi
RUN rm /app/superset/translations/*/LC_MESSAGES/*.po
RUN rm /app/superset/translations/messages.pot
# Transition to Python base image
FROM python:${PY_VER} AS python-base
RUN pip install --no-cache-dir --upgrade setuptools pip uv
######################################################################
# Final lean image...
######################################################################
FROM python-base AS lean
# Build argument for including translations
# Include translations in the final build. The default supports en only to
# reduce complexity and weight for those only using en
ARG BUILD_TRANSLATIONS="false"
WORKDIR /app
@@ -104,16 +104,9 @@ ENV LANG=C.UTF-8 \
SUPERSET_HOME="/app/superset_home" \
SUPERSET_PORT=8088
# Set up necessary directories and user
RUN --mount=type=bind,source=./docker,target=/docker \
mkdir -p ${PYTHONPATH} \
superset/static \
requirements \
superset-frontend \
apache_superset.egg-info \
requirements \
RUN mkdir -p ${PYTHONPATH} superset/static requirements superset-frontend apache_superset.egg-info requirements \
&& useradd --user-group -d ${SUPERSET_HOME} -m --no-log-init --shell /bin/bash superset \
&& /docker/apt-install.sh \
&& apt-get update -qq && apt-get install -yqq --no-install-recommends \
curl \
libsasl2-dev \
libsasl2-modules-gssapi-mit \
@@ -122,53 +115,51 @@ RUN --mount=type=bind,source=./docker,target=/docker \
libldap2-dev \
&& touch superset/static/version_info.json \
&& chown -R superset:superset ./* \
&& rm -rf /var/lib/apt/lists/* /var/cache/apt/archives/*
&& rm -rf /var/lib/apt/lists/*
# Copy required files for Python build
COPY --chown=superset:superset pyproject.toml setup.py MANIFEST.in README.md ./
# setup.py uses the version information in package.json
COPY --chown=superset:superset superset-frontend/package.json superset-frontend/
COPY --chown=superset:superset requirements/base.txt requirements/
COPY --chown=superset:superset scripts/check-env.py scripts/
RUN --mount=type=cache,target=/root/.cache/pip \
apt-get update -qq && apt-get install -yqq --no-install-recommends \
build-essential \
&& pip install --no-cache-dir --upgrade setuptools pip \
&& pip install --no-cache-dir -r requirements/base.txt \
&& apt-get autoremove -yqq --purge build-essential \
&& rm -rf /var/lib/apt/lists/*
# Install Python dependencies using docker/pip-install.sh
RUN --mount=type=bind,source=./docker,target=/docker \
--mount=type=cache,target=/root/.cache/pip \
/docker/pip-install.sh --requires-build-essential -r requirements/base.txt
# Copy the compiled frontend assets from the node image
# Copy the compiled frontend assets
COPY --chown=superset:superset --from=superset-node /app/superset/static/assets superset/static/assets
# Copy the main Superset source code
## Lastly, let's install superset itself
COPY --chown=superset:superset superset superset
RUN --mount=type=cache,target=/root/.cache/pip \
pip install --no-cache-dir -e .
# Install Superset itself using docker/pip-install.sh
RUN --mount=type=bind,source=./docker,target=/docker \
--mount=type=cache,target=/root/.cache/pip \
/docker/pip-install.sh -e .
# Copy .json translations from the node image
# Copy the .json translations from the frontend layer
COPY --chown=superset:superset --from=superset-node /app/superset/translations superset/translations
# Compile backend translations and clean up
# Compile translations for the backend - this generates .mo files, then deletes the .po files
COPY ./scripts/translations/generate_mo_files.sh ./scripts/translations/
RUN if [ "$BUILD_TRANSLATIONS" = "true" ]; then \
./scripts/translations/generate_mo_files.sh \
&& chown -R superset:superset superset/translations; \
fi \
&& rm -rf superset/translations/messages.pot \
superset/translations/*/LC_MESSAGES/*.po
&& chown -R superset:superset superset/translations \
&& rm superset/translations/messages.pot \
&& rm superset/translations/*/LC_MESSAGES/*.po; \
else \
echo "Skipping translations as requested by build flag"; \
fi
# Add server run script
COPY --chmod=755 ./docker/run-server.sh /usr/bin/
# Set user and healthcheck
USER superset
HEALTHCHECK CMD curl -f "http://localhost:${SUPERSET_PORT}/health"
# Expose port and set CMD
EXPOSE ${SUPERSET_PORT}
CMD ["/usr/bin/run-server.sh"]
CMD ["/usr/bin/run-server.sh"]
######################################################################
# Dev image...
@@ -176,10 +167,8 @@ CMD ["/usr/bin/run-server.sh"]
FROM lean AS dev
USER root
# Install dev dependencies
RUN --mount=type=bind,source=./docker,target=/docker \
/docker/apt-install.sh \
RUN apt-get update -qq \
&& apt-get install -yqq --no-install-recommends \
libnss3 \
libdbus-glib-1-2 \
libgtk-3-0 \
@@ -187,46 +176,46 @@ RUN --mount=type=bind,source=./docker,target=/docker \
libasound2 \
libxtst6 \
git \
pkg-config
pkg-config \
&& rm -rf /var/lib/apt/lists/*
# Install Playwright and its dependencies
RUN --mount=type=cache,target=/root/.cache/pip \
uv pip install --system playwright \
&& playwright install-deps
pip install --no-cache-dir playwright
RUN playwright install-deps
# Optionally install Chromium
RUN if [ "$INCLUDE_CHROMIUM" = "true" ]; then \
playwright install chromium; \
else \
echo "Skipping Chromium installation in dev mode"; \
echo "Skipping translations in dev mode"; \
fi
# Install GeckoDriver WebDriver and Firefox (if required)
ARG GECKODRIVER_VERSION=v0.34.0
ARG FIREFOX_VERSION=125.0.3
RUN --mount=type=bind,source=./docker,target=/docker \
if [ "$INCLUDE_FIREFOX" = "true" ]; then \
/docker/apt-install.sh wget bzip2 \
# Install GeckoDriver WebDriver
ARG GECKODRIVER_VERSION=v0.34.0 \
FIREFOX_VERSION=125.0.3
RUN if [ "$INCLUDE_FIREFOX" = "true" ]; then \
apt-get update -qq \
&& apt-get install -yqq --no-install-recommends wget bzip2 \
&& wget -q https://github.com/mozilla/geckodriver/releases/download/${GECKODRIVER_VERSION}/geckodriver-${GECKODRIVER_VERSION}-linux64.tar.gz -O - | tar xfz - -C /usr/local/bin \
&& wget -q https://download-installer.cdn.mozilla.net/pub/firefox/releases/${FIREFOX_VERSION}/linux-x86_64/en-US/firefox-${FIREFOX_VERSION}.tar.bz2 -O - | tar xfj - -C /opt \
&& ln -s /opt/firefox/firefox /usr/local/bin/firefox \
&& apt-get autoremove -yqq --purge wget bzip2 && rm -rf /var/[log,tmp]/* /tmp/* /var/lib/apt/lists/* /var/cache/apt/archives/*; \
else \
echo "Skipping Firefox installation in dev mode"; \
&& apt-get autoremove -yqq --purge wget bzip2 && rm -rf /var/[log,tmp]/* /tmp/* /var/lib/apt/lists/*; \
fi
# Install MySQL client dependencies
RUN --mount=type=bind,source=./docker,target=/docker \
/docker/apt-install.sh default-libmysqlclient-dev
# Installing mysql client os-level dependencies in dev image only because GPL
RUN apt-get install -yqq --no-install-recommends \
default-libmysqlclient-dev \
&& rm -rf /var/lib/apt/lists/*
# Copy development requirements and install them
COPY --chown=superset:superset requirements/development.txt requirements/
RUN --mount=type=bind,source=./docker,target=/docker \
--mount=type=cache,target=/root/.cache/pip \
/docker/pip-install.sh --requires-build-essential -r requirements/development.txt
RUN --mount=type=cache,target=/root/.cache/pip \
apt-get update -qq && apt-get install -yqq --no-install-recommends \
build-essential \
&& pip install --no-cache-dir -r requirements/development.txt \
&& apt-get autoremove -yqq --purge build-essential \
&& rm -rf /var/lib/apt/lists/*
USER superset
######################################################################
# CI image...
######################################################################

View File

@@ -136,7 +136,6 @@ Here are some of the major database solutions that are supported:
<img src="https://superset.apache.org/img/databases/oceanbase.svg" alt="oceanbase" border="0" width="220" />
<img src="https://superset.apache.org/img/databases/sap-hana.png" alt="oceanbase" border="0" width="220" />
<img src="https://superset.apache.org/img/databases/denodo.png" alt="denodo" border="0" width="200" />
<img src="https://superset.apache.org/img/databases/ydb.svg" alt="ydb" border="0" width="200" />
</p>
**A more comprehensive list of supported databases** along with the configuration instructions can be found [here](https://superset.apache.org/docs/configuration/databases).

View File

@@ -24,7 +24,6 @@ assists people when migrating to a new version.
## Next
- [31198](https://github.com/apache/superset/pull/31198) Disallows by default the use of the following ClickHouse functions: "version", "currentDatabase", "hostName".
- [29798](https://github.com/apache/superset/pull/29798) Since 3.1.0, the intial schedule for an alert or report was mistakenly offset by the specified timezone's relation to UTC. The initial schedule should now begin at the correct time.
- [30021](https://github.com/apache/superset/pull/30021) The `dev` layer in our Dockerfile no long includes firefox binaries, only Chromium to reduce bloat/docker-build-time.
- [30099](https://github.com/apache/superset/pull/30099) Translations are no longer included in the default docker image builds. If your environment requires translations, you'll want to set the docker build arg `BUILD_TRANSACTION=true`.

View File

@@ -25,6 +25,7 @@ x-superset-user: &superset-user root
x-superset-depends-on: &superset-depends-on
- db
- redis
- superset-checks
x-superset-volumes: &superset-volumes
# /app/pythonpath_docker will be appended to the PYTHONPATH in the final container
- ./docker:/app/docker
@@ -130,6 +131,23 @@ services:
- REDIS_PORT=6379
- REDIS_SSL=false
superset-checks:
build:
context: .
target: python-base
cache_from:
- apache/superset-cache:3.10-slim-bookworm
container_name: superset_checks
command: ["/app/scripts/check-env.py"]
env_file:
- path: docker/.env # default
required: true
- path: docker/.env-local # optional override
required: false
user: *superset-user
healthcheck:
disable: true
superset-init:
build:
<<: *common-build
@@ -161,7 +179,6 @@ services:
# set this to false if you have perf issues running the npm i; npm run dev in-docker
# if you do so, you have to run this manually on the host, which should perform better!
BUILD_SUPERSET_FRONTEND_IN_DOCKER: true
NPM_RUN_PRUNE: false
SCARF_ANALYTICS: "${SCARF_ANALYTICS:-}"
container_name: superset_node
command: ["/app/docker/docker-frontend.sh"]

View File

@@ -1,51 +0,0 @@
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -euo pipefail
# Ensure this script is run as root
if [[ $EUID -ne 0 ]]; then
echo "This script must be run as root" >&2
exit 1
fi
# Check for required arguments
if [[ $# -lt 1 ]]; then
echo "Usage: $0 <package1> [<package2> ...]" >&2
exit 1
fi
# Colors for better logging (optional)
GREEN='\033[0;32m'
RED='\033[0;31m'
RESET='\033[0m'
# Install packages with clean-up
echo -e "${GREEN}Updating package lists...${RESET}"
apt-get update -qq
echo -e "${GREEN}Installing packages: $@${RESET}"
apt-get install -yqq --no-install-recommends "$@"
echo -e "${GREEN}Autoremoving unnecessary packages...${RESET}"
apt-get autoremove -y
echo -e "${GREEN}Cleaning up package cache and metadata...${RESET}"
apt-get clean
rm -rf /var/lib/apt/lists/* /var/cache/apt/archives/* /tmp/* /var/tmp/*
echo -e "${GREEN}Installation and cleanup complete.${RESET}"

View File

@@ -27,11 +27,6 @@ if [ "$BUILD_SUPERSET_FRONTEND_IN_DOCKER" = "true" ]; then
echo "Building Superset frontend in dev mode inside docker container"
cd /app/superset-frontend
if [ "$NPM_RUN_PRUNE" = "true" ]; then
echo "Running `npm run prune`"
npm run prune
fi
echo "Running `npm install`"
npm install

View File

@@ -1,64 +0,0 @@
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -euo pipefail
# Default flag
REQUIRES_BUILD_ESSENTIAL=false
USE_CACHE=true
# Filter arguments
ARGS=()
for arg in "$@"; do
case "$arg" in
--requires-build-essential)
REQUIRES_BUILD_ESSENTIAL=true
;;
--no-cache)
USE_CACHE=false
;;
*)
ARGS+=("$arg")
;;
esac
done
# Install build-essential if required
if $REQUIRES_BUILD_ESSENTIAL; then
echo "Installing build-essential for package builds..."
apt-get update -qq \
&& apt-get install -yqq --no-install-recommends build-essential
fi
# Choose whether to use pip cache
if $USE_CACHE; then
echo "Using pip cache..."
uv pip install --system "${ARGS[@]}"
else
echo "Disabling pip cache..."
uv pip install --system --no-cache-dir "${ARGS[@]}"
fi
# Remove build-essential if it was installed
if $REQUIRES_BUILD_ESSENTIAL; then
echo "Removing build-essential to keep the image lean..."
apt-get autoremove -yqq --purge build-essential \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* /var/cache/apt/archives/*
fi
echo "Python packages installed successfully."

View File

@@ -53,14 +53,11 @@ To send alerts and reports to Slack channels, you need to create a new Slack App
- `incoming-webhook`
- `files:write`
- `chat:write`
- `channels:read`
- `groups:read`
4. At the top of the "OAuth and Permissions" section, click "install to workspace".
5. Select a default channel for your app and continue.
(You can post to any channel by inviting your Superset app into that channel).
6. The app should now be installed in your workspace, and a "Bot User OAuth Access Token" should have been created. Copy that token in the `SLACK_API_TOKEN` variable of your `superset_config.py`.
7. Ensure the feature flag `ALERT_REPORT_SLACK_V2` is set to True in `superset_config.py`
8. Restart the service (or run `superset init`) to pull in the new configuration.
7. Restart the service (or run `superset init`) to pull in the new configuration.
Note: when you configure an alert or a report, the Slack channel list takes channel names without the leading '#' e.g. use `alerts` instead of `#alerts`.

View File

@@ -81,7 +81,6 @@ are compatible with Superset.
| [TimescaleDB](/docs/configuration/databases#timescaledb) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>:<Port>/<Database Name>` |
| [Trino](/docs/configuration/databases#trino) | `pip install trino` | `trino://{username}:{password}@{hostname}:{port}/{catalog}` |
| [Vertica](/docs/configuration/databases#vertica) | `pip install sqlalchemy-vertica-python` | `vertica+vertica_python://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
| [YDB](/docs/configuration/databases#ydb) | `pip install ydb-sqlalchemy` | `ydb://{host}:{port}/{database_name}` |
| [YugabyteDB](/docs/configuration/databases#yugabytedb) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
---
@@ -1538,78 +1537,6 @@ Other parameters:
- Load Balancer - Backup Host
#### YDB
The recommended connector library for [YDB](https://ydb.tech/) is
[ydb-sqlalchemy](https://pypi.org/project/ydb-sqlalchemy/).
##### Connection String
The connection string for YDB looks like this:
```
ydb://{host}:{port}/{database_name}
```
##### Protocol
You can specify `protocol` in the `Secure Extra` field at `Advanced / Security`:
```
{
"protocol": "grpcs"
}
```
Default is `grpc`.
##### Authentication Methods
###### Static Credentials
To use `Static Credentials` you should provide `username`/`password` in the `Secure Extra` field at `Advanced / Security`:
```
{
"credentials": {
"username": "...",
"password": "..."
}
}
```
###### Access Token Credentials
To use `Access Token Credentials` you should provide `token` in the `Secure Extra` field at `Advanced / Security`:
```
{
"credentials": {
"token": "...",
}
}
```
##### Service Account Credentials
To use Service Account Credentials, you should provide `service_account_json` in the `Secure Extra` field at `Advanced / Security`:
```
{
"credentials": {
"service_account_json": {
"id": "...",
"service_account_id": "...",
"created_at": "...",
"key_algorithm": "...",
"public_key": "...",
"private_key": "..."
}
}
}
```
#### YugabyteDB
[YugabyteDB](https://www.yugabyte.com/) is a distributed SQL database built on top of PostgreSQL.

View File

@@ -32,9 +32,7 @@ cd superset
Setting things up to squeeze a "hello world" into any part of Superset should be as simple as
```bash
# getting docker compose to fire up services, and rebuilding if some docker layers have changed
# using the `--build` suffix may be slower and optional if layers like py dependencies haven't changed
docker compose up --build
docker compose up
```
Note that:
@@ -72,24 +70,6 @@ documentation.
configured to be secure.
:::
### Nuking the postgres database
At times, it's possible to end up with your development database in a bad state, it's
common while switching branches that contain migrations for instance, where the database
version stamp that `alembic` manages is no longer available after switching branch.
In that case, the easy solution is to nuke the postgres db and start fresh. Note that the full
state of the database will be gone after doing this, so be cautious.
```bash
# first stop docker-compose if it's running
docker-compose down
# delete the volume containing the database
docker volume rm superset_db_home
# restart docker-compose, which will init a fresh database and load examples
docker-compose up
```
## Installing Development Tools
:::note
@@ -475,6 +455,17 @@ pre-commit install
A series of checks will now run when you make a git commit.
Alternatively, it is possible to run pre-commit via tox:
```bash
tox -e pre-commit
```
Or by running pre-commit manually:
```bash
pre-commit run --all-files
```
## Linting
@@ -483,7 +474,8 @@ A series of checks will now run when you make a git commit.
We use [Pylint](https://pylint.org/) for linting which can be invoked via:
```bash
pylint
# for python
tox -e pylint
```
In terms of best practices please avoid blanket disabling of Pylint messages globally (via `.pylintrc`) or top-level within the file header, albeit there being a few exceptions. Disabling should occur inline as it prevents masking issues and provides context as to why said message is disabled.
@@ -510,108 +502,39 @@ If using the eslint extension with vscode, put the following in your workspace `
]
```
## GitHub Actions and `act`
:::tip
`act` compatibility of Superset's GHAs is not fully tested. Running `act` locally may or may not
work for different actions, and may require fine tunning and local secret-handling.
For those more intricate GHAs that are tricky to run locally, we recommend iterating
directly on GHA's infrastructure, by pushing directly on a branch and monitoring GHA logs.
For more targetted iteration, see the `gh workflow run --ref {BRANCH}` subcommand of the GitHub CLI.
:::
For automation and CI/CD, Superset makes extensive use of GitHub Actions (GHA). You
can find all of the workflows and other assets under the `.github/` folder. This includes:
- running the backend unit test suites (`tests/`)
- running the frontend test suites (`superset-frontend/src/**.*.test.*`)
- running our Cypress end-to-end tests (`superset-frontend/cypress-base/`)
- linting the codebase, including all Python, Typescript and Javascript, yaml and beyond
- checking for all sorts of other rules conventions
When you open a pull request (PR), the appropriate GitHub Actions (GHA) workflows will
automatically run depending on the changes in your branch. It's perfectly reasonable
(and required!) to rely on this automation. However, the downside is that it's mostly an
all-or-nothing approach and doesn't provide much control to target specific tests or
iterate quickly.
At times, it may be more convenient to run GHA workflows locally. For that purpose
we use [act](https://github.com/nektos/act), a tool that allows you to run GitHub Actions (GHA)
workflows locally. It simulates the GitHub Actions environment, enabling developers to
test and debug workflows on their local machines before pushing changes to the repository. More
on how to use it in the next section.
:::note
In both GHA and `act`, we can run a more complex matrix for our tests, executing against different
database engines (PostgreSQL, MySQL, SQLite) and different versions of Python.
This enables us to ensure compatibility and stability across various environments.
:::
### Using `act`
First, install `act` -> https://nektosact.com/
To list the workflows, simply:
```bash
act --list
```
To run a specific workflow:
```bash
act pull_request --job {workflow_name} --secret GITHUB_TOKEN=$GITHUB_TOKEN --container-architecture linux/amd64
```
In the example above, notice that:
- we target a specific workflow, using `--job`
- we pass a secret using `--secret`, as many jobs require read access (public) to the repo
- we simulate a `pull_request` event by specifying it as the first arg,
similarly, we could simulate a `push` event or something else
- we specify `--container-architecture`, which tends to emulate GHA more reliably
:::note
`act` is a rich tool that offers all sorts of features, allowing you to simulate different
events (pull_request, push, ...), semantics around passing secrets where required and much
more. For more information, refer to [act's documentation](https://nektosact.com/)
:::
:::note
Some jobs require secrets to interact with external systems and accounts that you may
not have in your possession. In those cases you may have to rely on remote CI or parameterize the
job further to target a different environment/sandbox or your own alongside the related
secrets.
:::
---
## Testing
### Python Testing
#### Unit Tests
For unit tests located in `tests/unit_tests/`, it's usually easy to simply run the script locally using:
All python tests are carried out in [tox](https://tox.readthedocs.io/en/latest/index.html)
a standardized testing framework.
All python tests can be run with any of the tox [environments](https://tox.readthedocs.io/en/latest/example/basic.html#a-simple-tox-ini-default-environments), via,
```bash
pytest tests/unit_tests/*
tox -e <environment>
```
#### Integration Tests
For more complex pytest-defined integration tests (not to be confused with our end-to-end Cypress tests), many tests will require having a working test environment. Some tests require a database, Celery, and potentially other services or libraries installed.
### Running Tests with `act`
To run integration tests locally using `act`, ensure you have followed the setup instructions from the [GitHub Actions and `act`](#github-actions-and-act) section. You can run specific workflows or jobs that include integration tests. For example:
For example,
```bash
act --job test-python-38 --secret GITHUB_TOKEN=$GITHUB_TOKEN --event pull_request --container-architecture linux/amd64
tox -e py38
```
#### Running locally using a test script
Alternatively, you can run all tests in a single file via,
There is also a utility script included in the Superset codebase to run Python integration tests. The [readme can be found here](https://github.com/apache/superset/tree/master/scripts/tests).
```bash
tox -e <environment> -- tests/test_file.py
```
or for a specific test via,
```bash
tox -e <environment> -- tests/test_file.py::TestClassName::test_method_name
```
Note that the test environment uses a temporary directory for defining the
SQLite databases which will be cleared each time before the group of test
commands are invoked.
There is also a utility script included in the Superset codebase to run python integration tests. The [readme can be
found here](https://github.com/apache/superset/tree/master/scripts/tests)
@@ -622,7 +545,7 @@ To run all integration tests, for example, run this script from the root directo
scripts/tests/run.sh
```
You can run unit tests found in `./tests/unit_tests` with pytest. It is a simple way to run an isolated test that doesn't need any database setup:
You can run unit tests found in './tests/unit_tests' for example with pytest. It is a simple way to run an isolated test that doesn't need any database setup
```bash
pytest ./link_to_test.py
@@ -645,7 +568,7 @@ npm run test -- path/to/file.js
### Integration Testing
We use [Cypress](https://www.cypress.io/) for integration tests. To open Cypress and explore tests first setup and run test server:
We use [Cypress](https://www.cypress.io/) for integration tests. Tests can be run by `tox -e cypress`. To open Cypress and explore tests first setup and run test server:
```bash
export SUPERSET_CONFIG=tests.integration_tests.superset_test_config
@@ -696,7 +619,7 @@ If you already have launched Docker environment please use the following command
Launch environment:
`CYPRESS_CONFIG=true docker compose up --build`
`CYPRESS_CONFIG=true docker compose up`
It will serve the backend and frontend on port 8088.
@@ -764,7 +687,7 @@ superset:
Start Superset as usual
```bash
docker compose up --build
docker compose up
```
Install the required libraries and packages to the docker container

View File

@@ -170,10 +170,31 @@ npm run dev-server
### Python Testing
`pytest`, backend by docker-compose is how we recommend running tests locally.
All python tests are carried out in [tox](https://tox.readthedocs.io/en/latest/index.html)
a standardized testing framework.
All python tests can be run with any of the tox [environments](https://tox.readthedocs.io/en/latest/example/basic.html#a-simple-tox-ini-default-environments), via,
For a more complex test matrix (against different database backends, python versions, ...) you
can rely on our GitHub Actions by simply opening a draft pull request.
```bash
tox -e <environment>
```
For example,
```bash
tox -e py38
```
Alternatively, you can run all tests in a single file via,
```bash
tox -e <environment> -- tests/test_file.py
```
or for a specific test via,
```bash
tox -e <environment> -- tests/test_file.py::TestClassName::test_method_name
```
Note that the test environment uses a temporary directory for defining the
SQLite databases which will be cleared each time before the group of test
@@ -225,7 +246,13 @@ npm run test -- path/to/file.js
### e2e Integration Testing
For e2e testing, we recommend that you use a `docker-compose` backed-setup
We use [Cypress](https://www.cypress.io/) for end-to-end integration
tests. One easy option to get started quickly is to leverage `tox` to
run the whole suite in an isolated environment.
```bash
tox -e cypress
```
Alternatively, you can go lower level and set things up in your
development environment by following these steps:
@@ -571,31 +598,17 @@ pybabel compile -d superset/translations
### Python
We use [Pylint](https://pylint.org/) and [ruff](https://github.com/astral-sh/ruff)
for linting which can be invoked via:
We use [Pylint](https://pylint.org/) for linting which can be invoked via:
```
# Run pylint
pylint superset/
# auto-reformat using ruff
ruff format
# lint check with ruff
ruff check
# lint fix with ruff
ruff check --fix
```bash
# for python
tox -e pylint
```
In terms of best practices please avoid blanket disabling of Pylint messages globally (via `.pylintrc`) or top-level within the file header, albeit there being a few exceptions. Disabling should occur inline as it prevents masking issues and provides context as to why said message is disabled.
In terms of best practices please avoid blanket disabling of Pylint messages globally
(via `.pylintrc`) or top-level within the file header, albeit there being a few exceptions.
Disabling should occur inline as it prevents masking issues and provides context as to why
said message is disabled.
All this is configured to run in pre-commit hooks, which we encourage you to setup
with `pre-commit install`
Additionally, the Python code is auto-formatted using [Black](https://github.com/python/black) which
is configured as a pre-commit hook. There are also numerous [editor integrations](https://black.readthedocs.io/en/stable/integrations/editors.html)
### TypeScript

View File

@@ -29,7 +29,7 @@ We have a set of build "presets" that each represent a combination of
parameters for the build, mostly pointing to either different target layer
for the build, and/or base image.
Here are the build presets that are exposed through the `supersetbot docker` utility:
Here are the build presets that are exposed through the `build_docker.py` script:
- `lean`: The default Docker image, including both frontend and backend. Tags
without a build_preset are lean builds (ie: `latest`, `4.0.0`, `3.0.0`, ...). `lean`
@@ -62,8 +62,8 @@ Here are the build presets that are exposed through the `supersetbot docker` uti
For insights or modifications to the build matrix and tagging conventions,
check the [supersetbot docker](https://github.com/apache-superset/supersetbot)
subcommand and the [docker.yml](https://github.com/apache/superset/blob/master/.github/workflows/docker.yml)
check the [build_docker.py](https://github.com/apache/superset/blob/master/scripts/build_docker.py)
script and the [docker.yml](https://github.com/apache/superset/blob/master/.github/workflows/docker.yml)
GitHub action.
## Key ARGs in Dockerfile

View File

@@ -76,8 +76,7 @@ on latest base images using `docker compose build --pull`. In most cases though,
### Option #1 - for an interactive development environment
```bash
# The --build argument insures all the layers are up-to-date
docker compose up --build
docker compose up
```
:::tip
@@ -96,14 +95,6 @@ perform those operations. In this case, we recommend you set the env var
Simply trigger `npm i && npm run dev`, this should be MUCH faster.
:::
:::tip
Sometimes, your npm-related state can get out-of-wack, running `npm run prune` from
the `superset-frontend/` folder will nuke the various' packages `node_module/` folders
and help you start fresh. In the context of `docker compose` setting
`export NPM_RUN_PRUNE=true` prior to running `docker compose up` will trigger that
from within docker. This will slow down the startup, but will fix various npm-related issues.
:::
### Option #2 - build a set of immutable images from the local branch
```bash
@@ -236,11 +227,3 @@ may want to find the exact hostname you want to use, for that you can do `ifconf
Docker for you. Alternately if you don't even see the `docker0` interface try (if needed with sudo)
`docker network inspect bridge` and see if there is an entry for `"Gateway"` and note the IP
address.
## 4. To build or not to build
When running `docker compose up`, docker will build what is required behind the scene, but
may use the docker cache if assets already exist. Running `docker compose build` prior to
`docker compose up` or the equivalent shortcut `docker compose up --build` ensures that your
docker images matche the definition in the repository. This should only apply to the main
docker-compose.yml file (default) and not to the alternative methods defined above.

View File

@@ -77,6 +77,10 @@ versions officially supported by Superset. We'd recommend using a Python version
like [pyenv](https://github.com/pyenv/pyenv)
(and also [pyenv-virtualenv](https://github.com/pyenv/pyenv-virtualenv)).
:::tip
To identify the Python version used by the official docker image, see the [Dockerfile](https://github.com/apache/superset/blob/master/Dockerfile). Additional docker images published for newer versions of Python can be found in [this file](https://github.com/apache/superset/blob/master/scripts/build_docker.py).
:::
Let's also make sure we have the latest version of `pip` and `setuptools`:
```bash
@@ -130,22 +134,21 @@ First, start by installing `apache-superset`:
pip install apache-superset
```
Then, define mandatory configurations, SECRET_KEY and FLASK_APP:
```bash
export SUPERSET_SECRET_KEY=YOUR-SECRET-KEY
export FLASK_APP=superset
```
Then, you need to initialize the database:
```bash
superset db upgrade
```
:::tip
Note that some configuration is mandatory for production instances of Superset. In particular, Superset will not start without a user-specified value of SECRET_KEY. Please see [Configuring Superset](/docs/configuration/configuring-superset).
:::
Finish installing by running through the following commands:
```bash
# Create an admin user in your metadata database (use `admin` as username to be able to load the examples)
export FLASK_APP=superset
superset fab create-admin
# Load some data to play with

View File

@@ -17,8 +17,8 @@
"typecheck": "tsc"
},
"dependencies": {
"@algolia/client-search": "^5.15.0",
"@ant-design/icons": "^5.5.2",
"@algolia/client-search": "^5.12.0",
"@ant-design/icons": "^5.4.0",
"@docsearch/react": "^3.6.3",
"@docusaurus/core": "^3.5.2",
"@docusaurus/plugin-client-redirects": "^3.5.2",
@@ -34,7 +34,7 @@
"clsx": "^2.1.1",
"docusaurus-plugin-less": "^2.0.2",
"file-loader": "^6.2.0",
"less": "^4.2.1",
"less": "^4.2.0",
"less-loader": "^11.0.0",
"prism-react-renderer": "^2.4.0",
"react": "^18.3.1",
@@ -42,14 +42,14 @@
"react-github-btn": "^1.4.0",
"react-svg-pan-zoom": "^3.13.1",
"stream": "^0.0.3",
"swagger-ui-react": "^5.18.2",
"swagger-ui-react": "^5.17.14",
"url-loader": "^4.1.1"
},
"devDependencies": {
"@docusaurus/module-type-aliases": "^3.6.3",
"@docusaurus/tsconfig": "^3.6.3",
"@docusaurus/module-type-aliases": "^3.5.2",
"@docusaurus/tsconfig": "^3.5.2",
"@types/react": "^18.3.12",
"typescript": "^5.7.2",
"typescript": "^5.6.3",
"webpack": "^5.96.1"
},
"browserslist": {

View File

@@ -1,20 +0,0 @@
<svg width="753" height="274" viewBox="0 0 753 274" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0_28_1297)">
<path fill-rule="evenodd" clip-rule="evenodd" d="M5 53.8669C5 37.6466 29.6243 29 60 29C90.3757 29 115 37.6466 115 53.8669V138.133C115 154.353 90.3757 163 60 163C29.6243 163 5 154.353 5 138.133V53.8669Z" fill="#2399FF"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M175 53.8669C175 37.6466 199.624 29 230 29C260.376 29 285 37.6466 285 53.8669V138.133C285 154.353 260.376 163 230 163C199.624 163 175 154.353 175 138.133V53.8669Z" fill="#2399FF"/>
<path d="M177 85H113V103H177V85Z" fill="#2399FF"/>
<path d="M173 157H115L81 111H59L105 173H183L229 111H207L173 157Z" fill="white"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M89 145.867C89 129.647 113.624 121 144 121C174.376 121 199 129.647 199 145.867V230.133C199 246.353 174.376 255 144 255C113.624 255 89 246.353 89 230.133V145.867Z" fill="#2399FF"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M108.783 136.779C100.111 140.552 99 144.237 99 146C99 147.763 100.111 151.448 108.783 155.221C117.076 158.829 129.435 161 144 161C158.565 161 170.924 158.829 179.217 155.221C187.889 151.448 189 147.763 189 146C189 144.237 187.889 140.552 179.218 136.779C170.924 133.171 158.565 131 144 131C129.435 131 117.076 133.171 108.783 136.779Z" fill="white"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M24.7825 44.7792C16.1105 48.5515 15 52.2365 15 54C15 55.7635 16.1105 59.4485 24.7825 63.2208C33.0763 66.8287 45.4354 69 60 69C74.5646 69 86.9237 66.8287 95.2175 63.2208C103.889 59.4485 105 55.7635 105 54C105 52.2365 103.889 48.5515 95.2175 44.7792C86.9237 41.1713 74.5646 39 60 39C45.4354 39 33.0763 41.1713 24.7825 44.7792Z" fill="white"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M194.783 44.7792C186.111 48.5515 185 52.2365 185 54C185 55.7635 186.111 59.4485 194.783 63.2208C203.076 66.8287 215.435 69 230 69C244.565 69 256.924 66.8287 265.217 63.2208C273.889 59.4485 275 55.7635 275 54C275 52.2365 273.889 48.5515 265.218 44.7792C256.924 41.1713 244.565 39 230 39C215.435 39 203.076 41.1713 194.783 44.7792Z" fill="white"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M694.131 64H634.75V210H705.026C730.974 210 750.243 191.821 750.243 166.963C750.243 150.15 740.93 137.39 726.201 130.891C733.027 124.143 737.168 115.224 737.168 104.858C737.168 81.2033 718.875 64 694.131 64ZM660.899 85.791V123.925H691.951C702.482 123.925 711.019 115.389 711.019 104.858C711.019 94.3277 702.482 85.791 691.951 85.791H660.899ZM660.899 188.209V145.716H702.847C714.581 145.716 724.093 155.229 724.093 166.963C724.093 178.697 714.581 188.209 702.847 188.209H660.899Z" fill="black"/>
<path d="M352.716 64.0039H382.134L419.179 128.287L456.223 64.0039H485.641L432.308 155.472V210.004H406.049V155.472L352.716 64.0039Z" fill="black"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M496.008 64.0039H546.127C589.713 64.0039 619.127 92.3289 619.127 137.004C619.127 181.679 589.713 210.004 546.127 210.004H496.008V64.0039ZM522.157 188.213V85.7949H543.948C573.32 85.7949 592.978 104.364 592.978 137.004C592.978 169.644 573.32 188.213 543.948 188.213H522.157Z" fill="black"/>
</g>
<defs>
<clipPath id="clip0_28_1297">
<rect width="753" height="274" fill="white"/>
</clipPath>
</defs>
</svg>

Before

Width:  |  Height:  |  Size: 3.2 KiB

File diff suppressed because it is too large Load Diff

View File

@@ -29,7 +29,7 @@ maintainers:
- name: craig-rueda
email: craig@craigrueda.com
url: https://github.com/craig-rueda
version: 0.13.4
version: 0.13.3
dependencies:
- name: postgresql
version: 12.1.6

View File

@@ -23,7 +23,7 @@ NOTE: This file is generated by helm-docs: https://github.com/norwoodj/helm-docs
# superset
![Version: 0.13.4](https://img.shields.io/badge/Version-0.13.4-informational?style=flat-square)
![Version: 0.13.3](https://img.shields.io/badge/Version-0.13.3-informational?style=flat-square)
Apache Superset is a modern, enterprise-ready business intelligence web application

View File

@@ -19,8 +19,7 @@
# This is a YAML-formatted file.
# Declare variables to be passed into your templates.
# A README is automatically generated from this file to document it,
# using helm-docs (see https://github.com/norwoodj/helm-docs)
# A README is automatically generated from this file to document it, using helm-docs (see https://github.com/norwoodj/helm-docs)
# To update it, install helm-docs and run helm-docs from the root of this chart
# -- Provide a name to override the name of the chart

View File

@@ -55,7 +55,6 @@ dependencies = [
"flask-wtf>=1.1.0, <2.0",
"func_timeout",
"geopy",
"greenlet>=3.0.3, <=3.1.1",
"gunicorn>=22.0.0; sys_platform != 'win32'",
"hashids>=1.3.1, <2",
# known issue with holidays 0.26.0 and above related to prophet lib #25017
@@ -70,7 +69,7 @@ dependencies = [
"nh3>=0.2.11, <0.3",
"numpy==1.23.5",
"packaging",
"pandas[excel,performance]>=2.0.3, <2.1",
"pandas[performance]>=2.0.3, <2.1",
"parsedatetime",
"paramiko>=3.4.0",
"pgsanity",
@@ -91,9 +90,7 @@ dependencies = [
"slack_sdk>=3.19.0, <4",
"sqlalchemy>=1.4, <2",
"sqlalchemy-utils>=0.38.3, <0.39",
# known breaking changes in sqlglot 25.25.0
#https://github.com/tobymao/sqlglot/blob/main/CHANGELOG.md#v25250---2024-10-14
"sqlglot>=25.24.0,<25.25.0",
"sqlglot>=25.24.0,<26",
"sqlparse>=0.5.0",
"tabulate>=0.8.9, <0.9",
"typing-extensions>=4, <5",
@@ -138,6 +135,7 @@ gevent = ["gevent>=23.9.1"]
gsheets = ["shillelagh[gsheetsapi]>=1.2.18, <2"]
hana = ["hdbcli==2.4.162", "sqlalchemy_hana==0.4.0"]
hive = [
"boto3",
"pyhive[hive]>=0.6.5;python_version<'3.11'",
"pyhive[hive_pure_sasl]>=0.7.0",
"tableschema",
@@ -160,7 +158,7 @@ pinot = ["pinotdb>=5.0.0, <6.0.0"]
playwright = ["playwright>=1.37.0, <2"]
postgres = ["psycopg2-binary==2.9.6"]
presto = ["pyhive[presto]>=0.6.5"]
trino = ["trino>=0.328.0"]
trino = ["boto3", "trino>=0.328.0"]
prophet = ["prophet>=1.1.5, <2"]
redshift = ["sqlalchemy-redshift>=0.8.1, <0.9"]
rockset = ["rockset-sqlalchemy>=0.0.1, <1"]
@@ -179,11 +177,11 @@ netezza = ["nzalchemy>=11.0.2"]
starrocks = ["starrocks>=1.0.0"]
doris = ["pydoris>=1.0.0, <2.0.0"]
oceanbase = ["oceanbase_py>=0.0.1"]
ydb = ["ydb-sqlalchemy>=0.1.2"]
development = [
"docker",
"flask-testing",
"freezegun",
"greenlet>=2.0.2",
"grpcio>=1.55.3",
"openapi-spec-validator",
"parameterized",
@@ -201,6 +199,7 @@ development = [
"ruff",
"sqloxide",
"statsd",
"tox",
]
[project.urls]
@@ -237,10 +236,172 @@ disallow_untyped_calls = false
disallow_untyped_defs = false
disable_error_code = "annotation-unchecked"
[tool.tox]
legacy_tox_ini = """
# Remember to start celery workers to run celery tests, e.g.
# celery --app=superset.tasks.celery_app:app worker -Ofair -c 2
[testenv]
basepython = python3.10
ignore_basepython_conflict = true
commands =
superset db upgrade
superset init
superset load-test-users
# use -s to be able to use break pointers.
# no args or tests/* can be passed as an argument to run all tests
pytest -s {posargs}
deps =
-rrequirements/development.txt
setenv =
PYTHONPATH = {toxinidir}
SUPERSET_TESTENV = true
SUPERSET_CONFIG = tests.integration_tests.superset_test_config
SUPERSET_HOME = {envtmpdir}
mysql: SUPERSET__SQLALCHEMY_DATABASE_URI = mysql://mysqluser:mysqluserpassword@localhost/superset?charset=utf8
postgres: SUPERSET__SQLALCHEMY_DATABASE_URI = postgresql+psycopg2://superset:superset@localhost/test
sqlite: SUPERSET__SQLALCHEMY_DATABASE_URI = sqlite:////{envtmpdir}/superset.db
sqlite: SUPERSET__SQLALCHEMY_EXAMPLES_URI = sqlite:////{envtmpdir}/examples.db
mysql-presto: SUPERSET__SQLALCHEMY_DATABASE_URI = mysql://mysqluser:mysqluserpassword@localhost/superset?charset=utf8
# docker run -p 8080:8080 --name presto starburstdata/presto
mysql-presto: SUPERSET__SQLALCHEMY_EXAMPLES_URI = presto://localhost:8080/memory/default
# based on https://github.com/big-data-europe/docker-hadoop
# clone the repo & run docker compose up -d to test locally
mysql-hive: SUPERSET__SQLALCHEMY_DATABASE_URI = mysql://mysqluser:mysqluserpassword@localhost/superset?charset=utf8
mysql-hive: SUPERSET__SQLALCHEMY_EXAMPLES_URI = hive://localhost:10000/default
# make sure that directory is accessible by docker
hive: UPLOAD_FOLDER = /tmp/.superset/app/static/uploads/
usedevelop = true
allowlist_externals =
npm
pkill
[testenv:cypress]
setenv =
PYTHONPATH = {toxinidir}
SUPERSET_TESTENV = true
SUPERSET_CONFIG = tests.integration_tests.superset_test_config
SUPERSET_HOME = {envtmpdir}
commands =
npm install -g npm@'>=6.5.0'
pip install -e {toxinidir}/
{toxinidir}/superset-frontend/cypress_build.sh
commands_post =
pkill -if "python {envbindir}/flask"
[testenv:cypress-dashboard]
setenv =
PYTHONPATH = {toxinidir}
SUPERSET_TESTENV = true
SUPERSET_CONFIG = tests.integration_tests.superset_test_config
SUPERSET_HOME = {envtmpdir}
commands =
npm install -g npm@'>=6.5.0'
pip install -e {toxinidir}/
{toxinidir}/superset-frontend/cypress_build.sh dashboard
commands_post =
pkill -if "python {envbindir}/flask"
[testenv:cypress-explore]
setenv =
PYTHONPATH = {toxinidir}
SUPERSET_TESTENV = true
SUPERSET_CONFIG = tests.integration_tests.superset_test_config
SUPERSET_HOME = {envtmpdir}
commands =
npm install -g npm@'>=6.5.0'
pip install -e {toxinidir}/
{toxinidir}/superset-frontend/cypress_build.sh explore
commands_post =
pkill -if "python {envbindir}/flask"
[testenv:cypress-sqllab]
setenv =
PYTHONPATH = {toxinidir}
SUPERSET_TESTENV = true
SUPERSET_CONFIG = tests.integration_tests.superset_test_config
SUPERSET_HOME = {envtmpdir}
commands =
npm install -g npm@'>=6.5.0'
pip install -e {toxinidir}/
{toxinidir}/superset-frontend/cypress_build.sh sqllab
commands_post =
pkill -if "python {envbindir}/flask"
[testenv:cypress-sqllab-backend-persist]
setenv =
PYTHONPATH = {toxinidir}
SUPERSET_TESTENV = true
SUPERSET_CONFIG = tests.integration_tests.superset_test_config
SUPERSET_HOME = {envtmpdir}
commands =
npm install -g npm@'>=6.5.0'
pip install -e {toxinidir}/
{toxinidir}/superset-frontend/cypress_build.sh sqllab
commands_post =
pkill -if "python {envbindir}/flask"
[testenv:eslint]
changedir = {toxinidir}/superset-frontend
commands =
npm run lint
deps =
[testenv:fossa]
commands =
{toxinidir}/scripts/fossa.sh
deps =
passenv = *
[testenv:javascript]
commands =
npm install -g npm@'>=6.5.0'
{toxinidir}/superset-frontend/js_build.sh
deps =
[testenv:license-check]
commands =
{toxinidir}/scripts/check_license.sh
passenv = *
whitelist_externals =
{toxinidir}/scripts/check_license.sh
deps =
[testenv:pre-commit]
commands =
pre-commit run --all-files
deps =
-rrequirements/development.txt
skip_install = true
[testenv:pylint]
commands =
pylint superset
deps =
-rrequirements/development.txt
[testenv:thumbnails]
setenv =
SUPERSET_CONFIG = tests.integration_tests.superset_test_config_thumbnails
deps =
-rrequirements/development.txt
[tox]
envlist =
cypress-dashboard
cypress-explore
cypress-sqllab
cypress-sqllab-backend-persist
eslint
fossa
javascript
license-check
pre-commit
pylint
skipsdist = true
"""
[tool.ruff]
# Exclude a variety of commonly ignored directories.
exclude = [
"**/*.ipynb",
".bzr",
".direnv",
".eggs",

View File

@@ -20,12 +20,3 @@
urllib3>=1.26.18
werkzeug>=3.0.1
numexpr>=2.9.0
# 5.0.0 has a sensitive deprecation used in other libs
# -> https://github.com/aio-libs/async-timeout/blob/master/CHANGES.rst#500-2024-10-31
async_timeout>=4.0.0,<5.0.0
# playwright requires greenlet==3.0.3
# submitted a PR to relax deps in 11/2024
# https://github.com/microsoft/playwright-python/pull/2669
greenlet==3.0.3

View File

@@ -1,4 +1,4 @@
# SHA1:04f7e0860829f18926ea238354e6d4a6ab823d50
# SHA1:85649679306ea016e401f37adfbad832028d2e5f
#
# This file is autogenerated by pip-compile-multi
# To update, run:
@@ -7,18 +7,16 @@
#
-e file:.
# via -r requirements/base.in
alembic==1.14.0
alembic==1.13.1
# via flask-migrate
amqp==5.3.1
amqp==5.2.0
# via kombu
apispec[yaml]==6.3.0
# via flask-appbuilder
apsw==3.46.0.0
# via shillelagh
async-timeout==4.0.3
# via
# -r requirements/base.in
# redis
# via redis
attrs==24.2.0
# via
# cattrs
@@ -28,13 +26,13 @@ babel==2.16.0
# via flask-babel
backoff==2.2.1
# via apache-superset
bcrypt==4.2.1
bcrypt==4.1.3
# via paramiko
billiard==4.2.1
billiard==4.2.0
# via celery
blinker==1.9.0
# via flask
bottleneck==1.4.2
bottleneck==1.3.8
# via pandas
brotli==1.1.0
# via flask-compress
@@ -42,19 +40,19 @@ cachelib==0.9.0
# via
# flask-caching
# flask-session
cachetools==5.5.0
cachetools==5.3.3
# via google-auth
cattrs==24.1.2
# via requests-cache
celery==5.4.0
# via apache-superset
certifi==2024.8.30
certifi==2024.2.2
# via requests
cffi==1.17.1
# via
# cryptography
# pynacl
charset-normalizer==3.4.0
charset-normalizer==3.3.2
# via requests
click==8.1.7
# via
@@ -78,27 +76,23 @@ colorama==0.4.6
# via
# apache-superset
# flask-appbuilder
cron-descriptor==1.4.5
cron-descriptor==1.4.3
# via apache-superset
croniter==5.0.1
croniter==2.0.5
# via apache-superset
cryptography==43.0.3
cryptography==42.0.8
# via
# apache-superset
# paramiko
# pyopenssl
defusedxml==0.7.1
# via odfpy
deprecated==1.2.15
deprecated==1.2.14
# via limits
deprecation==2.1.0
# via apache-superset
dnspython==2.7.0
dnspython==2.6.1
# via email-validator
email-validator==2.2.0
email-validator==2.1.1
# via flask-appbuilder
et-xmlfile==2.0.0
# via openpyxl
exceptiongroup==1.2.2
# via cattrs
flask==2.3.3
@@ -121,11 +115,11 @@ flask-babel==2.0.0
# via flask-appbuilder
flask-caching==2.3.0
# via apache-superset
flask-compress==1.17
flask-compress==1.15
# via apache-superset
flask-jwt-extended==4.7.1
flask-jwt-extended==4.6.0
# via flask-appbuilder
flask-limiter==3.8.0
flask-limiter==3.7.0
# via flask-appbuilder
flask-login==0.6.3
# via
@@ -141,7 +135,7 @@ flask-sqlalchemy==2.5.1
# flask-migrate
flask-talisman==1.1.0
# via apache-superset
flask-wtf==1.2.2
flask-wtf==1.2.1
# via
# apache-superset
# flask-appbuilder
@@ -151,30 +145,29 @@ geographiclib==2.0
# via geopy
geopy==2.4.1
# via apache-superset
google-auth==2.36.0
google-auth==2.29.0
# via shillelagh
greenlet==3.0.3
# via
# -r requirements/base.in
# apache-superset
# shillelagh
gunicorn==23.0.0
# sqlalchemy
gunicorn==22.0.0
# via apache-superset
hashids==1.3.1
# via apache-superset
holidays==0.25
# via apache-superset
humanize==4.11.0
humanize==4.9.0
# via apache-superset
idna==3.10
idna==3.7
# via
# email-validator
# requests
importlib-metadata==8.5.0
importlib-metadata==7.1.0
# via apache-superset
importlib-resources==6.4.5
importlib-resources==6.4.0
# via limits
isodate==0.7.2
isodate==0.6.1
# via apache-superset
itsdangerous==2.2.0
# via
@@ -184,23 +177,23 @@ jinja2==3.1.4
# via
# flask
# flask-babel
jsonpath-ng==1.7.0
jsonpath-ng==1.6.1
# via apache-superset
jsonschema==4.17.3
# via flask-appbuilder
kombu==5.4.2
kombu==5.3.7
# via celery
korean-lunar-calendar==0.3.1
# via holidays
limits==3.13.0
limits==3.12.0
# via flask-limiter
llvmlite==0.43.0
llvmlite==0.42.0
# via numba
mako==1.3.6
mako==1.3.5
# via
# alembic
# apache-superset
markdown==3.7
markdown==3.6
# via apache-superset
markdown-it-py==3.0.0
# via rich
@@ -210,7 +203,7 @@ markupsafe==3.0.2
# mako
# werkzeug
# wtforms
marshmallow==3.23.1
marshmallow==3.21.2
# via
# flask-appbuilder
# marshmallow-sqlalchemy
@@ -222,11 +215,11 @@ msgpack==1.0.8
# via apache-superset
msgspec==0.18.6
# via flask-session
nh3==0.2.19
nh3==0.2.17
# via apache-superset
numba==0.60.0
numba==0.59.1
# via pandas
numexpr==2.10.2
numexpr==2.10.1
# via
# -r requirements/base.in
# pandas
@@ -238,13 +231,9 @@ numpy==1.23.5
# numexpr
# pandas
# pyarrow
odfpy==1.4.1
# via pandas
openpyxl==3.1.5
# via pandas
ordered-set==4.1.0
# via flask-limiter
packaging==24.2
packaging==23.2
# via
# apache-superset
# apispec
@@ -254,9 +243,9 @@ packaging==24.2
# marshmallow
# marshmallow-sqlalchemy
# shillelagh
pandas[excel,performance]==2.0.3
pandas[performance]==2.0.3
# via apache-superset
paramiko==3.5.0
paramiko==3.4.0
# via
# apache-superset
# sshtunnel
@@ -272,30 +261,30 @@ polyline==2.0.2
# via apache-superset
prison==0.2.1
# via flask-appbuilder
prompt-toolkit==3.0.48
prompt-toolkit==3.0.44
# via click-repl
pyarrow==14.0.2
# via apache-superset
pyasn1==0.6.1
pyasn1==0.6.0
# via
# pyasn1-modules
# rsa
pyasn1-modules==0.4.1
pyasn1-modules==0.4.0
# via google-auth
pycparser==2.22
# via cffi
pygments==2.18.0
# via rich
pyjwt==2.10.1
pyjwt==2.8.0
# via
# apache-superset
# flask-appbuilder
# flask-jwt-extended
pynacl==1.5.0
# via paramiko
pyopenssl==24.2.1
pyopenssl==24.1.0
# via shillelagh
pyparsing==3.2.0
pyparsing==3.1.2
# via apache-superset
pyrsistent==0.20.0
# via jsonschema
@@ -317,9 +306,7 @@ pytz==2024.2
# croniter
# flask-babel
# pandas
pyxlsb==1.0.10
# via pandas
pyyaml==6.0.2
pyyaml==6.0.1
# via
# apache-superset
# apispec
@@ -331,7 +318,7 @@ requests==2.32.2
# shillelagh
requests-cache==1.2.0
# via shillelagh
rich==13.9.4
rich==13.7.1
# via flask-limiter
rsa==4.9
# via google-auth
@@ -341,17 +328,18 @@ shillelagh[gsheetsapi]==1.2.18
# via apache-superset
shortid==0.1.2
# via apache-superset
simplejson==3.19.3
simplejson==3.19.2
# via apache-superset
six==1.16.0
# via
# isodate
# prison
# python-dateutil
# url-normalize
# wtforms-json
slack-sdk==3.33.4
slack-sdk==3.27.2
# via apache-superset
sqlalchemy==1.4.54
sqlalchemy==1.4.52
# via
# alembic
# apache-superset
@@ -364,9 +352,9 @@ sqlalchemy-utils==0.38.3
# via
# apache-superset
# flask-appbuilder
sqlglot==25.24.5
sqlglot==25.24.0
# via apache-superset
sqlparse==0.5.2
sqlparse==0.5.0
# via apache-superset
sshtunnel==0.4.0
# via apache-superset
@@ -379,12 +367,10 @@ typing-extensions==4.12.2
# cattrs
# flask-limiter
# limits
# rich
# shillelagh
tzdata==2024.2
tzdata==2024.1
# via
# celery
# kombu
# pandas
url-normalize==1.4.3
# via requests-cache
@@ -408,7 +394,7 @@ werkzeug==3.1.3
# flask-appbuilder
# flask-jwt-extended
# flask-login
wrapt==1.17.0
wrapt==1.16.0
# via deprecated
wtforms==3.2.1
# via
@@ -418,13 +404,9 @@ wtforms==3.2.1
# wtforms-json
wtforms-json==0.3.5
# via apache-superset
xlrd==2.0.1
# via pandas
xlsxwriter==3.0.9
# via
# apache-superset
# pandas
zipp==3.21.0
# via apache-superset
zipp==3.19.0
# via importlib-metadata
zstandard==0.23.0
zstandard==0.22.0
# via flask-compress

View File

@@ -17,4 +17,4 @@
# under the License.
#
-r base.in
-e .[development,bigquery,cors,druid,gevent,gsheets,mysql,playwright,postgres,presto,prophet,trino,thumbnails]
-e .[development,bigquery,cors,druid,gevent,gsheets,hive,mysql,playwright,postgres,presto,prophet,trino,thumbnails]

View File

@@ -1,4 +1,4 @@
# SHA1:dc767a7288b56c785b0cd3c38e95e7b5e66be1ac
# SHA1:c186006a3f82c8775e1039f37c52309f6c858197
#
# This file is autogenerated by pip-compile-multi
# To update, run:
@@ -12,69 +12,89 @@
# -r requirements/development.in
astroid==3.1.0
# via pylint
boto3==1.34.112
# via
# apache-superset
# dataflows-tabulator
botocore==1.34.112
# via
# boto3
# s3transfer
build==1.2.1
# via pip-tools
cfgv==3.4.0
cached-property==1.5.2
# via tableschema
cfgv==3.3.1
# via pre-commit
chardet==5.1.0
# via
# dataflows-tabulator
# tox
cmdstanpy==1.1.0
# via prophet
contourpy==1.0.7
# via matplotlib
coverage[toml]==7.6.8
coverage[toml]==7.2.5
# via pytest-cov
cycler==0.12.1
cycler==0.11.0
# via matplotlib
db-dtypes==1.3.1
dataflows-tabulator==1.54.3
# via tableschema
db-dtypes==1.2.0
# via pandas-gbq
dill==0.3.9
dill==0.3.8
# via pylint
distlib==0.3.8
# via virtualenv
docker==7.0.0
# via apache-superset
et-xmlfile==1.1.0
# via openpyxl
filelock==3.12.2
# via virtualenv
# via
# tox
# virtualenv
flask-cors==4.0.0
# via apache-superset
flask-testing==0.8.1
# via apache-superset
fonttools==4.55.0
fonttools==4.51.0
# via matplotlib
freezegun==1.5.1
# via apache-superset
future==1.0.0
future==0.18.3
# via pyhive
gevent==24.2.1
# via apache-superset
google-api-core[grpc]==2.23.0
google-api-core[grpc]==2.11.0
# via
# google-cloud-bigquery
# google-cloud-bigquery-storage
# google-cloud-core
# pandas-gbq
# sqlalchemy-bigquery
google-auth-oauthlib==1.2.1
google-auth-oauthlib==1.0.0
# via
# pandas-gbq
# pydata-google-auth
google-cloud-bigquery==3.27.0
google-cloud-bigquery==3.20.1
# via
# apache-superset
# pandas-gbq
# sqlalchemy-bigquery
google-cloud-bigquery-storage==2.19.1
# via pandas-gbq
google-cloud-core==2.4.1
google-cloud-core==2.3.2
# via google-cloud-bigquery
google-crc32c==1.6.0
google-crc32c==1.5.0
# via google-resumable-media
google-resumable-media==2.7.2
google-resumable-media==2.7.0
# via google-cloud-bigquery
googleapis-common-protos==1.66.0
googleapis-common-protos==1.63.0
# via
# google-api-core
# grpcio-status
grpcio==1.68.0
grpcio==1.62.1
# via
# apache-superset
# google-api-core
@@ -83,21 +103,31 @@ grpcio-status==1.60.1
# via google-api-core
identify==2.5.36
# via pre-commit
ijson==3.2.3
# via dataflows-tabulator
iniconfig==2.0.0
# via pytest
isort==5.12.0
# via pylint
jmespath==1.0.1
# via
# boto3
# botocore
jsonlines==4.0.0
# via dataflows-tabulator
jsonschema-spec==0.1.6
# via openapi-spec-validator
kiwisolver==1.4.7
kiwisolver==1.4.5
# via matplotlib
lazy-object-proxy==1.10.0
# via openapi-spec-validator
linear-tsv==1.1.0
# via dataflows-tabulator
matplotlib==3.9.0
# via prophet
mccabe==0.7.0
# via pylint
mysqlclient==2.2.6
mysqlclient==2.2.4
# via apache-superset
nodeenv==1.8.0
# via pre-commit
@@ -107,6 +137,8 @@ openapi-schema-validator==0.4.4
# via openapi-spec-validator
openapi-spec-validator==0.5.6
# via apache-superset
openpyxl==3.1.2
# via dataflows-tabulator
pandas-gbq==0.19.1
# via apache-superset
parameterized==0.9.0
@@ -123,30 +155,32 @@ pip-tools==7.4.1
# via pip-compile-multi
playwright==1.42.0
# via apache-superset
pluggy==1.5.0
# via pytest
pre-commit==4.0.1
pluggy==1.4.0
# via
# pytest
# tox
pre-commit==3.7.1
# via apache-superset
progress==1.6
# via apache-superset
prophet==1.1.5
# via apache-superset
proto-plus==1.25.0
# via
# google-api-core
# google-cloud-bigquery-storage
protobuf==4.25.5
proto-plus==1.22.2
# via google-cloud-bigquery-storage
protobuf==4.23.0
# via
# google-api-core
# google-cloud-bigquery-storage
# googleapis-common-protos
# grpcio-status
# proto-plus
psutil==6.1.0
psutil==6.0.0
# via apache-superset
psycopg2-binary==2.9.6
# via apache-superset
pydata-google-auth==1.9.0
pure-sasl==0.6.2
# via thrift-sasl
pydata-google-auth==1.7.0
# via pandas-gbq
pydruid==0.6.9
# via apache-superset
@@ -160,7 +194,9 @@ pyinstrument==4.4.0
# via apache-superset
pylint==3.1.0
# via apache-superset
pyproject-hooks==1.2.0
pyproject-api==1.6.1
# via tox
pyproject-hooks==1.0.0
# via
# build
# pip-tools
@@ -169,7 +205,7 @@ pytest==7.4.4
# apache-superset
# pytest-cov
# pytest-mock
pytest-cov==6.0.0
pytest-cov==5.0.0
# via apache-superset
pytest-mock==3.10.0
# via apache-superset
@@ -179,37 +215,62 @@ requests-oauthlib==2.0.0
# via google-auth-oauthlib
rfc3339-validator==0.1.4
# via openapi-schema-validator
ruff==0.8.0
rfc3986==2.0.0
# via tableschema
ruff==0.4.5
# via apache-superset
sqlalchemy-bigquery==1.12.0
s3transfer==0.10.1
# via boto3
sqlalchemy-bigquery==1.11.0
# via apache-superset
sqloxide==0.1.51
sqloxide==0.1.43
# via apache-superset
statsd==4.0.1
# via apache-superset
tomli==2.1.0
tableschema==1.20.10
# via apache-superset
thrift==0.16.0
# via
# apache-superset
# thrift-sasl
thrift-sasl==0.4.3
# via apache-superset
tomli==2.0.1
# via
# build
# coverage
# pip-tools
# pylint
# pyproject-api
# pyproject-hooks
# pytest
tomlkit==0.13.2
# tox
tomlkit==0.12.5
# via pylint
toposort==1.10
# via pip-compile-multi
tqdm==4.67.1
tox==4.6.4
# via apache-superset
tqdm==4.66.4
# via
# cmdstanpy
# prophet
trino==0.330.0
trino==0.328.0
# via apache-superset
tzlocal==5.2
# via trino
unicodecsv==0.14.1
# via
# dataflows-tabulator
# tableschema
virtualenv==20.23.1
# via pre-commit
wheel==0.45.1
# via
# pre-commit
# tox
wheel==0.43.0
# via pip-tools
xlrd==2.0.1
# via dataflows-tabulator
zope-event==5.0
# via gevent
zope-interface==5.4.0

294
scripts/build_docker.py Executable file
View File

@@ -0,0 +1,294 @@
#!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import re
import subprocess
from textwrap import dedent
import click
REPO = "apache/superset"
CACHE_REPO = f"{REPO}-cache"
BASE_PY_IMAGE = "3.10-slim-bookworm"
def run_cmd(command: str, raise_on_failure: bool = True) -> str:
process = subprocess.Popen(
command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True
)
output = ""
if process.stdout is not None:
for line in iter(process.stdout.readline, ""):
print(line.strip()) # Print the line to stdout in real-time
output += line
process.wait() # Wait for the subprocess to finish
if process.returncode != 0 and raise_on_failure:
raise subprocess.CalledProcessError(process.returncode, command, output)
return output
def get_git_sha() -> str:
return run_cmd("git rev-parse HEAD").strip()
def get_build_context_ref(build_context: str) -> str:
"""
Given a context, return a ref:
- if context is pull_request, return the PR's id
- if context is push, return the branch
- if context is release, return the release ref
"""
event = os.getenv("GITHUB_EVENT_NAME")
github_ref = os.getenv("GITHUB_REF", "")
if event == "pull_request":
github_head_ref = os.getenv("GITHUB_HEAD_REF", "")
return re.sub("[^a-zA-Z0-9]", "-", github_head_ref)[:40]
elif event == "release":
return re.sub("refs/tags/", "", github_ref)[:40]
elif event == "push":
return re.sub("[^a-zA-Z0-9]", "-", re.sub("refs/heads/", "", github_ref))[:40]
return ""
def is_latest_release(release: str) -> bool:
output = (
run_cmd(
f"./scripts/tag_latest_release.sh {release} --dry-run",
raise_on_failure=False,
)
or ""
)
return "SKIP_TAG::false" in output
def make_docker_tag(l: list[str]) -> str: # noqa: E741
return f"{REPO}:" + "-".join([o for o in l if o])
def get_docker_tags(
build_preset: str,
build_platforms: list[str],
sha: str,
build_context: str,
build_context_ref: str,
force_latest: bool = False,
) -> set[str]:
"""
Return a set of tags given a given build context
"""
tags: set[str] = set()
tag_chunks: list[str] = []
is_latest = is_latest_release(build_context_ref)
if build_preset != "lean":
# Always add the preset_build name if different from default (lean)
tag_chunks += [build_preset]
if len(build_platforms) == 1:
build_platform = build_platforms[0]
short_build_platform = build_platform.replace("linux/", "").replace("64", "")
if short_build_platform != "amd":
# Always a platform indicator if different from default (amd)
tag_chunks += [short_build_platform]
# Always craft a tag for the SHA
tags.add(make_docker_tag([sha] + tag_chunks))
# also a short SHA, cause it's nice
tags.add(make_docker_tag([sha[:7]] + tag_chunks))
if build_context == "release":
# add a release tag
tags.add(make_docker_tag([build_context_ref] + tag_chunks))
if is_latest or force_latest:
# add a latest tag
tags.add(make_docker_tag(["latest"] + tag_chunks))
elif build_context == "push" and build_context_ref == "master":
tags.add(make_docker_tag(["master"] + tag_chunks))
elif build_context == "pull_request":
tags.add(make_docker_tag([f"pr-{build_context_ref}"] + tag_chunks))
return tags
def get_docker_command(
build_preset: str,
build_platforms: list[str],
is_authenticated: bool,
sha: str,
build_context: str,
build_context_ref: str,
force_latest: bool = False,
) -> str:
tag = "" # noqa: F841
build_target = ""
py_ver = BASE_PY_IMAGE
docker_context = "."
if build_preset == "dev":
build_target = "dev"
elif build_preset == "lean":
build_target = "lean"
elif build_preset == "py311":
build_target = "lean"
py_ver = "3.11-slim-bookworm"
elif build_preset == "websocket":
build_target = ""
docker_context = "superset-websocket"
elif build_preset == "ci":
build_target = "ci"
elif build_preset == "dockerize":
build_target = ""
docker_context = "-f dockerize.Dockerfile ."
else:
print(f"Invalid build preset: {build_preset}")
exit(1)
# Try to get context reference if missing
if not build_context_ref:
build_context_ref = get_build_context_ref(build_context)
tags = get_docker_tags(
build_preset,
build_platforms,
sha,
build_context,
build_context_ref,
force_latest,
)
docker_tags = ("\\\n" + 8 * " ").join([f"-t {s} " for s in tags])
docker_args = "--load" if not is_authenticated else "--push"
target_argument = f"--target {build_target}" if build_target else ""
cache_ref = f"{CACHE_REPO}:{py_ver}"
if len(build_platforms) == 1:
build_platform = build_platforms[0]
short_build_platform = build_platform.replace("linux/", "").replace("64", "")
cache_ref = f"{CACHE_REPO}:{py_ver}-{short_build_platform}"
platform_arg = "--platform " + ",".join(build_platforms)
cache_from_arg = f"--cache-from=type=registry,ref={cache_ref}"
cache_to_arg = (
f"--cache-to=type=registry,mode=max,ref={cache_ref}" if is_authenticated else ""
)
build_arg = f"--build-arg PY_VER={py_ver}" if py_ver else ""
actor = os.getenv("GITHUB_ACTOR")
return dedent(
f"""\
docker buildx build \\
{docker_args} \\
{docker_tags} \\
{cache_from_arg} \\
{cache_to_arg} \\
{build_arg} \\
{platform_arg} \\
{target_argument} \\
--label sha={sha} \\
--label target={build_target} \\
--label build_trigger={build_context} \\
--label base={py_ver} \\
--label build_actor={actor} \\
{docker_context}"""
)
@click.command()
@click.argument(
"build_preset",
type=click.Choice(["lean", "dev", "dockerize", "websocket", "py311", "ci"]),
)
@click.argument("build_context", type=click.Choice(["push", "pull_request", "release"]))
@click.option(
"--platform",
type=click.Choice(["linux/arm64", "linux/amd64"]),
default=["linux/amd64"],
multiple=True,
)
@click.option("--build_context_ref", help="a reference to the pr, release or branch")
@click.option("--dry-run", is_flag=True, help="Run the command in dry-run mode.")
@click.option("--verbose", is_flag=True, help="Print more info")
@click.option(
"--force-latest", is_flag=True, help="Force the 'latest' tag on the release"
)
def main(
build_preset: str,
build_context: str,
build_context_ref: str,
platform: list[str],
dry_run: bool,
force_latest: bool,
verbose: bool,
) -> None:
"""
This script executes docker build and push commands based on given arguments.
"""
is_authenticated = (
True if os.getenv("DOCKERHUB_TOKEN") and os.getenv("DOCKERHUB_USER") else False
)
if force_latest and build_context != "release":
print(
"--force-latest can only be applied if the build context is set to 'release'"
)
exit(1)
if build_context == "release" and not build_context_ref.strip():
print("Release number has to be provided")
exit(1)
docker_build_command = get_docker_command(
build_preset,
platform,
is_authenticated,
get_git_sha(),
build_context,
build_context_ref,
force_latest,
)
if not dry_run:
print("Executing Docker Build Command:")
print(docker_build_command)
script = ""
if os.getenv("DOCKERHUB_USER"):
script = dedent(
f"""\
docker logout
docker login --username "{os.getenv("DOCKERHUB_USER")}" --password "{os.getenv("DOCKERHUB_TOKEN")}"
DOCKER_ARGS="--push"
"""
)
script = script + docker_build_command
if verbose:
run_cmd("cat Dockerfile")
stdout = run_cmd(script) # noqa: F841
else:
print("Dry Run - Docker Build Command:")
print(docker_build_command)
if __name__ == "__main__":
main()

View File

@@ -95,21 +95,15 @@ def print_files(files: List[str]) -> None:
print("\n".join([f"- {s}" for s in files]))
def is_int(s: str) -> bool:
return bool(re.match(r"^-?\d+$", s))
def main(event_type: str, sha: str, repo: str) -> None:
"""Main function to check for file changes based on event context."""
print("SHA:", sha)
print("EVENT_TYPE", event_type)
files = None
if event_type == "pull_request":
pr_number = os.getenv("GITHUB_REF", "").split("/")[-2]
if is_int(pr_number):
files = fetch_changed_files_pr(repo, pr_number)
print("PR files:")
print_files(files)
files = fetch_changed_files_pr(repo, pr_number)
print("PR files:")
print_files(files)
elif event_type == "push":
files = fetch_changed_files_push(repo, sha)
@@ -125,7 +119,7 @@ def main(event_type: str, sha: str, repo: str) -> None:
changes_detected = {}
for group, regex_patterns in PATTERNS.items():
patterns_compiled = [re.compile(p) for p in regex_patterns]
changes_detected[group] = files is None or detect_changes(
changes_detected[group] = event_type == "workflow_dispatch" or detect_changes(
files, patterns_compiled
)

View File

@@ -87,7 +87,7 @@ describe('Charts list', () => {
visitChartList();
cy.getBySel('count-crosslinks').should('be.visible');
cy.getBySel('crosslinks').first().trigger('mouseover');
cy.get('.antd5-tooltip')
cy.get('.ant-tooltip')
.contains('3 - Sample dashboard')
.invoke('removeAttr', 'target')
.click();

View File

@@ -88,9 +88,6 @@ describe('Horizontal FilterBar', () => {
cy.getBySel('horizontal-filterbar-empty')
.contains('No filters are currently added to this dashboard.')
.should('exist');
cy.get(nativeFilters.filtersPanel.filterGear).click({
force: true,
});
cy.getBySel('filter-bar__create-filter').should('exist');
cy.getBySel('filterbar-action-buttons').should('exist');
});
@@ -123,7 +120,7 @@ describe('Horizontal FilterBar', () => {
cy.getBySel('form-item-value').should('have.length', 3);
cy.viewport(768, 1024);
cy.getBySel('form-item-value').should('have.length', 1);
cy.getBySel('form-item-value').should('have.length', 0);
openMoreFilters(false);
cy.getBySel('form-item-value').should('have.length', 3);

View File

@@ -263,10 +263,8 @@ describe('Native filters', () => {
});
it('User can expand / retract native filter sidebar on a dashboard', () => {
cy.get(nativeFilters.addFilterButton.button).should('not.exist');
expandFilterOnLeftPanel();
cy.get(nativeFilters.filtersPanel.filterGear).click({
force: true,
});
cy.get(nativeFilters.filterFromDashboardView.createFilterButton).should(
'be.visible',
);

View File

@@ -228,9 +228,6 @@ export function collapseFilterOnLeftPanel() {
************************************************************************* */
export function enterNativeFilterEditModal(waitForDataset = true) {
interceptDataset();
cy.get(nativeFilters.filtersPanel.filterGear).click({
force: true,
});
cy.get(nativeFilters.filterFromDashboardView.createFilterButton).click({
force: true,
});
@@ -246,7 +243,11 @@ export function enterNativeFilterEditModal(waitForDataset = true) {
* @summary helper for adding new filter
************************************************************************* */
export function clickOnAddFilterInModal() {
return cy.get(nativeFilters.modal.addNewFilterButton).click({ force: true });
cy.get(nativeFilters.addFilterButton.button).first().click();
return cy
.get(nativeFilters.addFilterButton.dropdownItem)
.contains('Filter')
.click({ force: true });
}
/** ************************************************************************

View File

@@ -99,13 +99,16 @@ describe('Color scheme control', () => {
cy.get('.ant-select-selection-item .color-scheme-label').trigger(
'mouseover',
);
cy.get('.color-scheme-tooltip').should('be.visible');
cy.get('.color-scheme-tooltip').contains('Superset Colors');
cy.get('.Control[data-test="color_scheme"]').scrollIntoView();
cy.get('.Control[data-test="color_scheme"] input[type="search"]').focus();
cy.focused().type('lyftColors');
cy.getBySel('lyftColors').should('exist');
cy.getBySel('lyftColors').trigger('mouseover');
cy.focused().type('lyftColors{enter}');
cy.get(
'.Control[data-test="color_scheme"] .ant-select-selection-item [data-test="lyftColors"]',
).should('exist');
cy.get('.ant-select-selection-item .color-scheme-label').trigger(
'mouseover',
);
cy.get('.color-scheme-tooltip').should('not.exist');
});
});

View File

@@ -62,7 +62,7 @@ describe('Visualization > Line', () => {
'not.exist',
);
cy.get('.antd5-alert-warning').should('not.exist');
cy.get('.ant-alert-warning').should('not.exist');
});
it('should allow negative values in Y bounds', () => {
@@ -71,7 +71,7 @@ describe('Visualization > Line', () => {
cy.get('#controlSections-tab-display').click();
cy.get('span').contains('Y Axis Bounds').scrollIntoView();
cy.get('input[placeholder="Min"]').type('-0.1', { delay: 100 });
cy.get('.antd5-alert-warning').should('not.exist');
cy.get('.ant-alert-warning').should('not.exist');
});
it('should allow type to search color schemes and apply the scheme', () => {

View File

@@ -94,7 +94,7 @@ export const databasesPage = {
dbDropdown: '[class="ant-select-selection-search-input"]',
dbDropdownMenu: '.rc-virtual-list-holder-inner',
dbDropdownMenuItem: '[class="ant-select-item-option-content"]',
infoAlert: '.antd5-alert',
infoAlert: '.ant-alert',
serviceAccountInput: '[name="credentials_info"]',
connectionStep: {
modal: '.ant-modal-content',
@@ -103,7 +103,7 @@ export const databasesPage = {
helperBottom: '.helper-bottom',
postgresDatabase: '[name="database"]',
dbInput: '[name="database_name"]',
alertMessage: '.antd5-alert-message',
alertMessage: '.ant-alert-message',
errorField: '[role="alert"]',
uploadJson: '[title="Upload JSON file"]',
chooseFile: '[class="ant-btn input-upload-btn"]',
@@ -140,7 +140,7 @@ export const sqlLabView = {
tabsNavList: "[class='ant-tabs-nav-list']",
tab: "[class='ant-tabs-tab-btn']",
addTabButton: dataTestLocator('add-tab-icon'),
tooltip: '.antd5-tooltip-content',
tooltip: '.ant-tooltip-content',
tabName: '.css-1suejie',
schemaInput: '[data-test=DatabaseSelector] > :nth-child(2)',
loadingIndicator: '.Select__loading-indicator',
@@ -166,7 +166,7 @@ export const sqlLabView = {
renderedTableHeader: '.ReactVirtualized__Table__headerRow',
renderedTableRow: '.ReactVirtualized__Table__row',
errorBody: '.error-body',
alertMessage: '.antd5-alert-message',
alertMessage: '.ant-alert-message',
historyTable: {
header: '[role=columnheader]',
table: '.QueryTable',
@@ -325,7 +325,7 @@ export const nativeFilters = {
confirmCancelButton: dataTestLocator(
'native-filter-modal-confirm-cancel-button',
),
alertXUnsavedFilters: '.antd5-alert-message',
alertXUnsavedFilters: '.ant-alert-message',
tabsList: {
filterItemsContainer: dataTestLocator('filter-title-container'),
tabsContainer: '[class="ant-tabs-nav-list"]',
@@ -334,8 +334,10 @@ export const nativeFilters = {
},
addFilter: dataTestLocator('add-filter-button'),
defaultValueCheck: '.ant-checkbox-checked',
addNewFilterButton: dataTestLocator('add-new-filter-button'),
addNewDividerButton: dataTestLocator('add-new-divider-button'),
},
addFilterButton: {
button: `.ant-modal-content [data-test="new-dropdown-icon"]`,
dropdownItem: '.ant-dropdown-menu-item',
},
filtersPanel: {
filterName: dataTestLocator('filters-config-modal__name-input'),
@@ -346,7 +348,6 @@ export const nativeFilters = {
filterTypeInput: dataTestLocator('filters-config-modal__filter-type'),
fieldInput: dataTestLocator('field-input'),
filterTypeItem: '.ant-select-selection-item',
filterGear: dataTestLocator('filterbar-orientation-icon'),
},
filterFromDashboardView: {
filterValueInput: '[class="ant-select-selection-search-input"]',

View File

@@ -24,7 +24,7 @@
"@rjsf/core": "^5.21.1",
"@rjsf/utils": "^5.19.3",
"@rjsf/validator-ajv8": "^5.22.3",
"@scarf/scarf": "^1.4.0",
"@scarf/scarf": "^1.3.0",
"@superset-ui/chart-controls": "file:./packages/superset-ui-chart-controls",
"@superset-ui/core": "file:./packages/superset-ui-core",
"@superset-ui/legacy-plugin-chart-calendar": "file:./plugins/legacy-plugin-chart-calendar",
@@ -101,7 +101,7 @@
"prop-types": "^15.8.1",
"query-string": "^6.13.7",
"rc-trigger": "^5.3.4",
"re-resizable": "^6.10.1",
"re-resizable": "^6.10.0",
"react": "^16.13.1",
"react-ace": "^10.1.0",
"react-checkbox-tree": "^1.8.0",
@@ -202,7 +202,7 @@
"@types/react-redux": "^7.1.10",
"@types/react-router-dom": "^5.3.3",
"@types/react-syntax-highlighter": "^15.5.13",
"@types/react-table": "^7.7.20",
"@types/react-table": "^7.7.19",
"@types/react-transition-group": "^4.4.10",
"@types/react-ultimate-pagination": "^1.2.4",
"@types/react-virtualized-auto-sizer": "^1.0.4",
@@ -9680,10 +9680,9 @@
"license": "MIT"
},
"node_modules/@scarf/scarf": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/@scarf/scarf/-/scarf-1.4.0.tgz",
"integrity": "sha512-xxeapPiUXdZAE3che6f3xogoJPeZgig6omHEy1rIY5WVsB3H2BHNnZH+gHG6x91SCWyQCzWGsuL2Hh3ClO5/qQ==",
"hasInstallScript": true
"version": "1.3.0",
"hasInstallScript": true,
"license": "Apache-2.0"
},
"node_modules/@sigstore/bundle": {
"version": "2.3.2",
@@ -13981,9 +13980,9 @@
}
},
"node_modules/@types/react-table": {
"version": "7.7.20",
"resolved": "https://registry.npmjs.org/@types/react-table/-/react-table-7.7.20.tgz",
"integrity": "sha512-ahMp4pmjVlnExxNwxyaDrFgmKxSbPwU23sGQw2gJK4EhCvnvmib2s/O/+y1dfV57dXOwpr2plfyBol+vEHbi2w==",
"version": "7.7.19",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/react": "*"
}
@@ -28030,14 +28029,6 @@
"uglify-js": "^3.1.4"
}
},
"node_modules/handlebars-group-by": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/handlebars-group-by/-/handlebars-group-by-1.0.1.tgz",
"integrity": "sha512-qwVVDVAJMBKdmnQU8jcEXGOu+4/2YJX1RP3pUw6Ee9t6gdkxt+dJEWDudSFTgqb35KXrktw/Nn/Dp3Rx5muHpg==",
"engines": {
"node": ">= 0.10"
}
},
"node_modules/handlebars/node_modules/source-map": {
"version": "0.6.1",
"license": "BSD-3-Clause",
@@ -45816,9 +45807,9 @@
}
},
"node_modules/re-resizable": {
"version": "6.10.1",
"resolved": "https://registry.npmjs.org/re-resizable/-/re-resizable-6.10.1.tgz",
"integrity": "sha512-m33nSWRH57UZLmep5M/LatkZ2NRqimVD/bOOpvymw5Zf33+eTSEixsUugscOZzAtK0/nx+OSuOf8VbKJx/4ptw==",
"version": "6.10.0",
"resolved": "https://registry.npmjs.org/re-resizable/-/re-resizable-6.10.0.tgz",
"integrity": "sha512-hysSK0xmA5nz24HBVztlk4yCqCLCvS32E6ZpWxVKop9x3tqCa4yAj1++facrmkOf62JsJHjmjABdKxXofYioCw==",
"peerDependencies": {
"react": "^16.13.1 || ^17.0.0 || ^18.0.0",
"react-dom": "^16.13.1 || ^17.0.0 || ^18.0.0"
@@ -53076,6 +53067,14 @@
"node": ">=4"
}
},
"node_modules/viewport-mercator-project": {
"version": "6.2.3",
"license": "MIT",
"dependencies": {
"@babel/runtime": "^7.0.0",
"gl-matrix": "^3.0.0"
}
},
"node_modules/vlq": {
"version": "0.2.3",
"license": "MIT"
@@ -57952,10 +57951,10 @@
"version": "0.18.25",
"license": "Apache-2.0",
"dependencies": {
"@math.gl/web-mercator": "^4.1.0",
"prop-types": "^15.8.1",
"react-map-gl": "^6.1.19",
"supercluster": "^8.0.1"
"supercluster": "^8.0.1",
"viewport-mercator-project": "^6.1.1"
},
"peerDependencies": {
"@superset-ui/chart-controls": "*",
@@ -57964,30 +57963,6 @@
"react": "^15 || ^16"
}
},
"plugins/legacy-plugin-chart-map-box/node_modules/@math.gl/core": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/@math.gl/core/-/core-4.1.0.tgz",
"integrity": "sha512-FrdHBCVG3QdrworwrUSzXIaK+/9OCRLscxI2OUy6sLOHyHgBMyfnEGs99/m3KNvs+95BsnQLWklVfpKfQzfwKA==",
"license": "MIT",
"dependencies": {
"@math.gl/types": "4.1.0"
}
},
"plugins/legacy-plugin-chart-map-box/node_modules/@math.gl/types": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/@math.gl/types/-/types-4.1.0.tgz",
"integrity": "sha512-clYZdHcmRvMzVK5fjeDkQlHUzXQSNdZ7s4xOqC3nJPgz4C/TZkUecTo9YS4PruZqtDda/ag4erndP0MIn40dGA==",
"license": "MIT"
},
"plugins/legacy-plugin-chart-map-box/node_modules/@math.gl/web-mercator": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/@math.gl/web-mercator/-/web-mercator-4.1.0.tgz",
"integrity": "sha512-HZo3vO5GCMkXJThxRJ5/QYUYRr3XumfT8CzNNCwoJfinxy5NtKUd7dusNTXn7yJ40UoB8FMIwkVwNlqaiRZZAw==",
"license": "MIT",
"dependencies": {
"@math.gl/core": "4.1.0"
}
},
"plugins/legacy-plugin-chart-map-box/node_modules/kdbush": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/kdbush/-/kdbush-4.0.2.tgz",
@@ -58577,7 +58552,6 @@
"license": "Apache-2.0",
"dependencies": {
"handlebars": "^4.7.8",
"handlebars-group-by": "^1.0.1",
"just-handlebars-helpers": "^1.0.19"
},
"devDependencies": {
@@ -58681,6 +58655,13 @@
"react-dom": "^16.13.1"
}
},
"plugins/plugin-chart-table/node_modules/@types/react-table": {
"version": "7.7.20",
"license": "MIT",
"dependencies": {
"@types/react": "*"
}
},
"plugins/plugin-chart-table/node_modules/d3-array": {
"version": "2.12.1",
"license": "BSD-3-Clause",
@@ -65117,9 +65098,7 @@
}
},
"@scarf/scarf": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/@scarf/scarf/-/scarf-1.4.0.tgz",
"integrity": "sha512-xxeapPiUXdZAE3che6f3xogoJPeZgig6omHEy1rIY5WVsB3H2BHNnZH+gHG6x91SCWyQCzWGsuL2Hh3ClO5/qQ=="
"version": "1.3.0"
},
"@sigstore/bundle": {
"version": "2.3.2",
@@ -68687,33 +68666,12 @@
"@superset-ui/legacy-plugin-chart-map-box": {
"version": "file:plugins/legacy-plugin-chart-map-box",
"requires": {
"@math.gl/web-mercator": "^4.1.0",
"prop-types": "^15.8.1",
"react-map-gl": "^6.1.19",
"supercluster": "^8.0.1"
"supercluster": "^8.0.1",
"viewport-mercator-project": "^6.1.1"
},
"dependencies": {
"@math.gl/core": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/@math.gl/core/-/core-4.1.0.tgz",
"integrity": "sha512-FrdHBCVG3QdrworwrUSzXIaK+/9OCRLscxI2OUy6sLOHyHgBMyfnEGs99/m3KNvs+95BsnQLWklVfpKfQzfwKA==",
"requires": {
"@math.gl/types": "4.1.0"
}
},
"@math.gl/types": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/@math.gl/types/-/types-4.1.0.tgz",
"integrity": "sha512-clYZdHcmRvMzVK5fjeDkQlHUzXQSNdZ7s4xOqC3nJPgz4C/TZkUecTo9YS4PruZqtDda/ag4erndP0MIn40dGA=="
},
"@math.gl/web-mercator": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/@math.gl/web-mercator/-/web-mercator-4.1.0.tgz",
"integrity": "sha512-HZo3vO5GCMkXJThxRJ5/QYUYRr3XumfT8CzNNCwoJfinxy5NtKUd7dusNTXn7yJ40UoB8FMIwkVwNlqaiRZZAw==",
"requires": {
"@math.gl/core": "4.1.0"
}
},
"kdbush": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/kdbush/-/kdbush-4.0.2.tgz",
@@ -69182,7 +69140,6 @@
"@types/jest": "^29.5.12",
"@types/lodash": "^4.17.7",
"handlebars": "^4.7.8",
"handlebars-group-by": "*",
"jest": "^29.7.0",
"just-handlebars-helpers": "^1.0.19"
},
@@ -69237,6 +69194,12 @@
"xss": "^1.0.15"
},
"dependencies": {
"@types/react-table": {
"version": "7.7.20",
"requires": {
"@types/react": "*"
}
},
"d3-array": {
"version": "2.12.1",
"requires": {
@@ -70370,9 +70333,8 @@
}
},
"@types/react-table": {
"version": "7.7.20",
"resolved": "https://registry.npmjs.org/@types/react-table/-/react-table-7.7.20.tgz",
"integrity": "sha512-ahMp4pmjVlnExxNwxyaDrFgmKxSbPwU23sGQw2gJK4EhCvnvmib2s/O/+y1dfV57dXOwpr2plfyBol+vEHbi2w==",
"version": "7.7.19",
"dev": true,
"requires": {
"@types/react": "^16.9.53"
}
@@ -79830,11 +79792,6 @@
}
}
},
"handlebars-group-by": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/handlebars-group-by/-/handlebars-group-by-1.0.1.tgz",
"integrity": "sha512-qwVVDVAJMBKdmnQU8jcEXGOu+4/2YJX1RP3pUw6Ee9t6gdkxt+dJEWDudSFTgqb35KXrktw/Nn/Dp3Rx5muHpg=="
},
"har-schema": {
"version": "2.0.0",
"dev": true
@@ -90727,9 +90684,9 @@
}
},
"re-resizable": {
"version": "6.10.1",
"resolved": "https://registry.npmjs.org/re-resizable/-/re-resizable-6.10.1.tgz",
"integrity": "sha512-m33nSWRH57UZLmep5M/LatkZ2NRqimVD/bOOpvymw5Zf33+eTSEixsUugscOZzAtK0/nx+OSuOf8VbKJx/4ptw==",
"version": "6.10.0",
"resolved": "https://registry.npmjs.org/re-resizable/-/re-resizable-6.10.0.tgz",
"integrity": "sha512-hysSK0xmA5nz24HBVztlk4yCqCLCvS32E6ZpWxVKop9x3tqCa4yAj1++facrmkOf62JsJHjmjABdKxXofYioCw==",
"requires": {}
},
"react": {
@@ -95430,6 +95387,13 @@
"unist-util-stringify-position": "^3.0.0"
}
},
"viewport-mercator-project": {
"version": "6.2.3",
"requires": {
"@babel/runtime": "^7.0.0",
"gl-matrix": "^3.0.0"
}
},
"vlq": {
"version": "0.2.3"
},

View File

@@ -90,7 +90,7 @@
"@rjsf/core": "^5.21.1",
"@rjsf/utils": "^5.19.3",
"@rjsf/validator-ajv8": "^5.22.3",
"@scarf/scarf": "^1.4.0",
"@scarf/scarf": "^1.3.0",
"@superset-ui/chart-controls": "file:./packages/superset-ui-chart-controls",
"@superset-ui/core": "file:./packages/superset-ui-core",
"@superset-ui/legacy-plugin-chart-calendar": "file:./plugins/legacy-plugin-chart-calendar",
@@ -167,7 +167,7 @@
"prop-types": "^15.8.1",
"query-string": "^6.13.7",
"rc-trigger": "^5.3.4",
"re-resizable": "^6.10.1",
"re-resizable": "^6.10.0",
"react": "^16.13.1",
"react-ace": "^10.1.0",
"react-checkbox-tree": "^1.8.0",
@@ -268,7 +268,7 @@
"@types/react-redux": "^7.1.10",
"@types/react-router-dom": "^5.3.3",
"@types/react-syntax-highlighter": "^15.5.13",
"@types/react-table": "^7.7.20",
"@types/react-table": "^7.7.19",
"@types/react-transition-group": "^4.4.10",
"@types/react-ultimate-pagination": "^1.2.4",
"@types/react-virtualized-auto-sizer": "^1.0.4",

View File

@@ -18,8 +18,9 @@
*/
import { CSSProperties } from 'react';
import { kebabCase } from 'lodash';
import { TooltipPlacement } from 'antd/lib/tooltip';
import { t } from '@superset-ui/core';
import { Tooltip, TooltipProps, TooltipPlacement } from './Tooltip';
import { Tooltip, TooltipProps } from './Tooltip';
export interface InfoTooltipWithTriggerProps {
label?: string;

View File

@@ -17,41 +17,48 @@
* under the License.
*/
import { useTheme } from '@superset-ui/core';
import { Tooltip as BaseTooltip } from 'antd-v5';
import {
TooltipProps as BaseTooltipProps,
TooltipPlacement as BaseTooltipPlacement,
} from 'antd-v5/lib/tooltip';
import { useTheme, css } from '@superset-ui/core';
import { Tooltip as BaseTooltip } from 'antd';
import type { TooltipProps } from 'antd/lib/tooltip';
import { Global } from '@emotion/react';
export type TooltipProps = BaseTooltipProps;
export type TooltipPlacement = BaseTooltipPlacement;
export type { TooltipProps } from 'antd/lib/tooltip';
export const Tooltip = ({
overlayStyle,
color,
...props
}: BaseTooltipProps) => {
export const Tooltip = ({ overlayStyle, color, ...props }: TooltipProps) => {
const theme = useTheme();
const defaultColor = `${theme.colors.grayscale.dark2}e6`;
return (
<BaseTooltip
overlayStyle={{
fontSize: theme.typography.sizes.s,
lineHeight: '1.6',
maxWidth: theme.gridUnit * 62,
minWidth: theme.gridUnit * 30,
...overlayStyle,
}}
// make the tooltip display closer to the label
align={{ offset: [0, 1] }}
color={defaultColor || color}
trigger="hover"
placement="bottom"
// don't allow hovering over the tooltip
mouseLeaveDelay={0}
{...props}
/>
<>
{/* Safari hack to hide browser default tooltips */}
<Global
styles={css`
.ant-tooltip-open {
display: inline-block;
&::after {
content: '';
display: block;
}
}
`}
/>
<BaseTooltip
overlayStyle={{
fontSize: theme.typography.sizes.s,
lineHeight: '1.6',
maxWidth: theme.gridUnit * 62,
minWidth: theme.gridUnit * 30,
...overlayStyle,
}}
// make the tooltip display closer to the label
align={{ offset: [0, 1] }}
color={defaultColor || color}
trigger="hover"
placement="bottom"
// don't allow hovering over the tooltip
mouseLeaveDelay={0}
{...props}
/>
</>
);
};

View File

@@ -262,7 +262,6 @@ export interface BaseControlConfig<
props: ControlPanelsContainerProps,
controlData: AnyDict,
) => boolean;
disableStash?: boolean;
hidden?:
| boolean
| ((props: ControlPanelsContainerProps, controlData: AnyDict) => boolean);

View File

@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
import { QueryObject, SqlaFormData, VizType } from '@superset-ui/core';
import { QueryObject, SqlaFormData } from '@superset-ui/core';
import { boxplotOperator } from '@superset-ui/chart-controls';
const formData: SqlaFormData = {
@@ -27,7 +27,7 @@ const formData: SqlaFormData = {
time_range: '2015 : 2016',
time_grain_sqla: 'P1Y',
datasource: 'foo',
viz_type: VizType.Table,
viz_type: 'table',
};
const queryObject: QueryObject = {
metrics: [

View File

@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
import { QueryObject, SqlaFormData, VizType } from '@superset-ui/core';
import { QueryObject, SqlaFormData } from '@superset-ui/core';
import { contributionOperator } from '@superset-ui/chart-controls';
const formData: SqlaFormData = {
@@ -27,7 +27,7 @@ const formData: SqlaFormData = {
time_range: '2015 : 2016',
granularity: 'month',
datasource: 'foo',
viz_type: VizType.Table,
viz_type: 'table',
};
const queryObject: QueryObject = {
metrics: [

View File

@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
import { QueryObject, SqlaFormData, VizType } from '@superset-ui/core';
import { QueryObject, SqlaFormData } from '@superset-ui/core';
import { flattenOperator } from '@superset-ui/chart-controls';
const formData: SqlaFormData = {
@@ -27,7 +27,7 @@ const formData: SqlaFormData = {
time_range: '2015 : 2016',
granularity: 'month',
datasource: 'foo',
viz_type: VizType.Table,
viz_type: 'table',
};
const queryObject: QueryObject = {
metrics: [

View File

@@ -17,7 +17,7 @@
* under the License.
*/
import { histogramOperator } from '@superset-ui/chart-controls';
import { SqlaFormData, VizType } from '@superset-ui/core';
import { SqlaFormData } from '@superset-ui/core';
import { omit } from 'lodash';
const formData: SqlaFormData = {
@@ -26,7 +26,7 @@ const formData: SqlaFormData = {
cumulative: true,
normalize: true,
groupby: ['country', 'region'],
viz_type: VizType.LegacyHistogram,
viz_type: 'histogram',
datasource: 'foo',
};

View File

@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
import { QueryObject, SqlaFormData, VizType } from '@superset-ui/core';
import { QueryObject, SqlaFormData } from '@superset-ui/core';
import { pivotOperator } from '@superset-ui/chart-controls';
const formData: SqlaFormData = {
@@ -27,7 +27,7 @@ const formData: SqlaFormData = {
time_range: '2015 : 2016',
granularity: 'month',
datasource: 'foo',
viz_type: VizType.Table,
viz_type: 'table',
show_empty_columns: true,
};
const queryObject: QueryObject = {

View File

@@ -16,12 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
import {
DTTM_ALIAS,
QueryObject,
SqlaFormData,
VizType,
} from '@superset-ui/core';
import { DTTM_ALIAS, QueryObject, SqlaFormData } from '@superset-ui/core';
import { prophetOperator } from '@superset-ui/chart-controls';
const formData: SqlaFormData = {
@@ -32,7 +27,7 @@ const formData: SqlaFormData = {
time_range: '2015 : 2016',
time_grain_sqla: 'P1Y',
datasource: 'foo',
viz_type: VizType.Table,
viz_type: 'table',
};
const queryObject: QueryObject = {
metrics: [

View File

@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
import { QueryObject, SqlaFormData, VizType } from '@superset-ui/core';
import { QueryObject, SqlaFormData } from '@superset-ui/core';
import { rankOperator } from '@superset-ui/chart-controls';
const formData: SqlaFormData = {
@@ -26,7 +26,7 @@ const formData: SqlaFormData = {
time_range: '2015 : 2016',
granularity: 'month',
datasource: 'foo',
viz_type: VizType.Table,
viz_type: 'table',
truncate_metric: true,
};
const queryObject: QueryObject = {

View File

@@ -16,12 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
import {
ComparisonType,
QueryObject,
SqlaFormData,
VizType,
} from '@superset-ui/core';
import { ComparisonType, QueryObject, SqlaFormData } from '@superset-ui/core';
import { renameOperator } from '@superset-ui/chart-controls';
const formData: SqlaFormData = {
@@ -31,7 +26,7 @@ const formData: SqlaFormData = {
time_range: '2015 : 2016',
granularity: 'month',
datasource: 'foo',
viz_type: VizType.Table,
viz_type: 'table',
truncate_metric: true,
};
const queryObject: QueryObject = {

View File

@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
import { QueryObject, SqlaFormData, VizType } from '@superset-ui/core';
import { QueryObject, SqlaFormData } from '@superset-ui/core';
import { resampleOperator } from '@superset-ui/chart-controls';
const formData: SqlaFormData = {
@@ -27,7 +27,7 @@ const formData: SqlaFormData = {
time_range: '2015 : 2016',
granularity: 'month',
datasource: 'foo',
viz_type: VizType.Table,
viz_type: 'table',
};
const queryObject: QueryObject = {
metrics: [

View File

@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
import { QueryObject, SqlaFormData, VizType } from '@superset-ui/core';
import { QueryObject, SqlaFormData } from '@superset-ui/core';
import { rollingWindowOperator } from '@superset-ui/chart-controls';
const formData: SqlaFormData = {
@@ -27,7 +27,7 @@ const formData: SqlaFormData = {
time_range: '2015 : 2016',
granularity: 'month',
datasource: 'foo',
viz_type: VizType.Table,
viz_type: 'table',
};
const queryObject: QueryObject = {
metrics: [

View File

@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
import { QueryObject, SqlaFormData, VizType } from '@superset-ui/core';
import { QueryObject, SqlaFormData } from '@superset-ui/core';
import { sortOperator } from '@superset-ui/chart-controls';
const formData: SqlaFormData = {
@@ -27,7 +27,7 @@ const formData: SqlaFormData = {
time_range: '2015 : 2016',
granularity: 'month',
datasource: 'foo',
viz_type: VizType.Table,
viz_type: 'table',
};
const queryObject: QueryObject = {
metrics: [

View File

@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
import { QueryObject, SqlaFormData, VizType } from '@superset-ui/core';
import { QueryObject, SqlaFormData } from '@superset-ui/core';
import { timeCompareOperator } from '@superset-ui/chart-controls';
const formData: SqlaFormData = {
@@ -27,7 +27,7 @@ const formData: SqlaFormData = {
time_range: '2015 : 2016',
granularity: 'month',
datasource: 'foo',
viz_type: VizType.Table,
viz_type: 'table',
};
const queryObject: QueryObject = {
metrics: [

View File

@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
import { QueryObject, SqlaFormData, VizType } from '@superset-ui/core';
import { QueryObject, SqlaFormData } from '@superset-ui/core';
import {
timeCompareOperator,
timeComparePivotOperator,
@@ -30,7 +30,7 @@ const formData: SqlaFormData = {
time_range: '2015 : 2016',
granularity: 'month',
datasource: 'foo',
viz_type: VizType.Table,
viz_type: 'table',
show_empty_columns: true,
};
const queryObject: QueryObject = {

View File

@@ -16,12 +16,12 @@
* specific language governing permissions and limitations
* under the License.
*/
import { QueryFormData, QueryFormMetric, VizType } from '@superset-ui/core';
import { QueryFormData, QueryFormMetric } from '@superset-ui/core';
import { extractExtraMetrics } from '@superset-ui/chart-controls';
const baseFormData: QueryFormData = {
datasource: 'dummy',
viz_type: VizType.Table,
viz_type: 'table',
metrics: ['a', 'b'],
columns: ['foo', 'bar'],
limit: 100,

View File

@@ -17,11 +17,11 @@
* under the License.
*/
import { isDerivedSeries } from '@superset-ui/chart-controls';
import { SqlaFormData, ComparisonType, VizType } from '@superset-ui/core';
import { SqlaFormData, ComparisonType } from '@superset-ui/core';
const formData: SqlaFormData = {
datasource: 'foo',
viz_type: VizType.Table,
viz_type: 'table',
};
const series = {
id: 'metric__1 month ago',

View File

@@ -16,7 +16,6 @@
* specific language governing permissions and limitations
* under the License.
*/
import { VizType } from '@superset-ui/core';
import { displayTimeRelatedControls } from '../../src';
const mockData = {
@@ -36,7 +35,7 @@ const mockData = {
exportState: {},
form_data: {
datasource: '22__table',
viz_type: VizType.Table,
viz_type: 'table',
},
};

View File

@@ -16,12 +16,12 @@
* specific language governing permissions and limitations
* under the License.
*/
import { QueryFormData, VizType } from '@superset-ui/core';
import { QueryFormData } from '@superset-ui/core';
import { getStandardizedControls } from '../../src';
const formData: QueryFormData = {
datasource: '30__table',
viz_type: VizType.Table,
viz_type: 'table',
standardizedFormData: {
controls: {
metrics: ['count(*)', 'sum(sales)'],
@@ -34,7 +34,7 @@ const formData: QueryFormData = {
test('without standardizedFormData', () => {
getStandardizedControls().setStandardizedControls({
datasource: '30__table',
viz_type: VizType.Table,
viz_type: 'table',
});
expect(getStandardizedControls().controls).toEqual({
metrics: [],

View File

@@ -41,7 +41,6 @@ export { default as ChartDataProvider } from './components/ChartDataProvider';
export * from './types/Base';
export * from './types/TransformFunction';
export * from './types/QueryResponse';
export * from './types/VizType';
export { default as __hack_reexport_chart_Base } from './types/Base';
export { default as __hack_reexport_chart_TransformFunction } from './types/TransformFunction';

View File

@@ -1,72 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export enum VizType {
Area = 'echarts_area',
Bar = 'echarts_timeseries_bar',
BigNumber = 'big_number',
BigNumberTotal = 'big_number_total',
BigNumberPeriodOverPeriod = 'pop_kpi',
BoxPlot = 'box_plot',
Bubble = 'bubble_v2',
Bullet = 'bullet',
Calendar = 'cal_heatmap',
Chord = 'chord',
Compare = 'compare',
CountryMap = 'country_map',
DistBar = 'dist_bar',
EventFlow = 'event_flow',
Funnel = 'funnel',
Gauge = 'gauge_chart',
Graph = 'graph_chart',
Handlebars = 'handlebars',
Heatmap = 'heatmap_v2',
Histogram = 'histogram_v2',
Horizon = 'horizon',
LegacyArea = 'area',
LegacyBar = 'bar',
LegacyBubble = 'bubble',
LegacyHeatmap = 'heatmap',
LegacyHistogram = 'histogram',
LegacyLine = 'line',
LegacySankey = 'sankey',
Line = 'echarts_timeseries_line',
MapBox = 'mapbox',
MixedTimeseries = 'mixed_timeseries',
PairedTTest = 'paired_ttest',
ParallelCoordinates = 'para',
Partition = 'partition',
Pie = 'pie',
PivotTable = 'pivot_table_v2',
Radar = 'radar',
Rose = 'rose',
Sankey = 'sankey_v2',
Scatter = 'echarts_timeseries_scatter',
SmoothLine = 'echarts_timeseries_smooth',
Step = 'echarts_timeseries_step',
Sunburst = 'sunburst_v2',
Table = 'table',
TimePivot = 'time_pivot',
TimeTable = 'time_table',
Timeseries = 'echarts_timeseries',
Tree = 'tree_chart',
Treemap = 'treemap_v2',
Waterfall = 'waterfall',
WordCloud = 'word_cloud',
WorldMap = 'world_map',
}

View File

@@ -28,7 +28,6 @@ import {
getChartBuildQueryRegistry,
getChartMetadataRegistry,
ChartMetadata,
VizType,
} from '@superset-ui/core';
import { LOGIN_GLOB } from '../fixtures/constants';
@@ -87,13 +86,13 @@ describe('ChartClient', () => {
sliceId,
formData: {
granularity: 'second',
viz_type: VizType.LegacyBar,
viz_type: 'bar',
},
}),
).resolves.toEqual({
...sankeyFormData,
granularity: 'second',
viz_type: VizType.LegacyBar,
viz_type: 'bar',
});
});
it('returns promise of formData if only formData was given', () =>
@@ -102,13 +101,13 @@ describe('ChartClient', () => {
formData: {
datasource: '1__table',
granularity: 'minute',
viz_type: VizType.LegacyLine,
viz_type: 'line',
},
}),
).resolves.toEqual({
datasource: '1__table',
granularity: 'minute',
viz_type: VizType.LegacyLine,
viz_type: 'line',
}));
it('rejects if none of sliceId or formData is specified', () =>
expect(
@@ -121,12 +120,12 @@ describe('ChartClient', () => {
describe('.loadQueryData(formData, options)', () => {
it('returns a promise of query data for known chart type', () => {
getChartMetadataRegistry().registerValue(
VizType.WordCloud,
'word_cloud',
new ChartMetadata({ name: 'Word Cloud', thumbnail: '' }),
);
getChartBuildQueryRegistry().registerValue(
VizType.WordCloud,
'word_cloud',
(formData: QueryFormData) => buildQueryContext(formData),
);
fetchMock.post('glob:*/api/v1/chart/data', [
@@ -139,7 +138,7 @@ describe('ChartClient', () => {
return expect(
chartClient.loadQueryData({
granularity: 'minute',
viz_type: VizType.WordCloud,
viz_type: 'word_cloud',
datasource: '1__table',
}),
).resolves.toEqual([
@@ -256,7 +255,7 @@ describe('ChartClient', () => {
it('loadAllDataNecessaryForAChart', () => {
fetchMock.get(`glob:*/api/v1/form_data/?slice_id=${sliceId}`, {
granularity: 'minute',
viz_type: VizType.LegacyLine,
viz_type: 'line',
datasource: '1__table',
color: 'living-coral',
});
@@ -276,12 +275,12 @@ describe('ChartClient', () => {
});
getChartMetadataRegistry().registerValue(
VizType.LegacyLine,
'line',
new ChartMetadata({ name: 'Line', thumbnail: '.gif' }),
);
getChartBuildQueryRegistry().registerValue(
VizType.LegacyLine,
'line',
(formData: QueryFormData) => buildQueryContext(formData),
);
@@ -297,7 +296,7 @@ describe('ChartClient', () => {
},
formData: {
granularity: 'minute',
viz_type: VizType.LegacyLine,
viz_type: 'line',
datasource: '1__table',
color: 'living-coral',
},

View File

@@ -19,11 +19,11 @@
/* eslint sort-keys: 'off' */
/** The form data defined here is based on default visualizations packaged with Apache Superset */
import { TimeGranularity, VizType } from '@superset-ui/core';
import { TimeGranularity } from '@superset-ui/core';
export const bigNumberFormData = {
datasource: '3__table',
viz_type: VizType.BigNumber,
viz_type: 'big_number',
slice_id: 54,
granularity_sqla: 'ds',
time_grain_sqla: TimeGranularity.DAY,
@@ -39,7 +39,7 @@ export const bigNumberFormData = {
export const wordCloudFormData = {
datasource: '3__table',
viz_type: VizType.WordCloud,
viz_type: 'word_cloud',
slice_id: 60,
url_params: {},
granularity_sqla: 'ds',
@@ -56,7 +56,7 @@ export const wordCloudFormData = {
export const sunburstFormData = {
datasource: '2__table',
viz_type: VizType.Sunburst,
viz_type: 'sunburst_v2',
slice_id: 47,
url_params: {},
granularity_sqla: 'year',
@@ -71,7 +71,7 @@ export const sunburstFormData = {
export const sankeyFormData = {
datasource: '1__table',
viz_type: VizType.LegacySankey,
viz_type: 'sankey',
slice_id: 1,
url_params: {},
granularity_sqla: null,

View File

@@ -31,7 +31,6 @@ import {
QueryFormData,
DatasourceType,
supersetTheme,
VizType,
} from '@superset-ui/core';
describe('ChartPlugin', () => {
@@ -60,7 +59,7 @@ describe('ChartPlugin', () => {
const FORM_DATA = {
datasource: '1__table',
granularity: 'day',
viz_type: VizType.Table,
viz_type: 'table',
};
it('creates a new plugin', () => {

View File

@@ -17,7 +17,6 @@
* under the License.
*/
import fetchMock from 'fetch-mock';
import { VizType } from '@superset-ui/core';
import { getFormData } from '../../../../src/query/api/legacy';
import setupClientForTest from '../setupClientForTest';
@@ -29,7 +28,7 @@ describe('getFormData()', () => {
const mockData = {
datasource: '1__table',
viz_type: VizType.LegacySankey,
viz_type: 'sankey',
slice_id: 1,
url_params: {},
granularity_sqla: null,

View File

@@ -17,7 +17,7 @@
* under the License.
*/
import fetchMock from 'fetch-mock';
import { buildQueryContext, ApiV1, VizType } from '@superset-ui/core';
import { buildQueryContext, ApiV1 } from '@superset-ui/core';
import setupClientForTest from '../setupClientForTest';
describe('API v1 > getChartData()', () => {
@@ -39,7 +39,7 @@ describe('API v1 > getChartData()', () => {
const result = await ApiV1.getChartData(
buildQueryContext({
granularity: 'minute',
viz_type: VizType.WordCloud,
viz_type: 'word_cloud',
datasource: '1__table',
}),
);

View File

@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
import { buildQueryContext, VizType } from '@superset-ui/core';
import { buildQueryContext } from '@superset-ui/core';
import * as queryModule from '../../src/query/normalizeTimeColumn';
describe('buildQueryContext', () => {
@@ -24,7 +24,7 @@ describe('buildQueryContext', () => {
const queryContext = buildQueryContext({
datasource: '5__table',
granularity_sqla: 'ds',
viz_type: VizType.Table,
viz_type: 'table',
});
expect(queryContext.datasource.id).toBe(5);
expect(queryContext.datasource.type).toBe('table');
@@ -37,7 +37,7 @@ describe('buildQueryContext', () => {
{
datasource: '5__table',
granularity_sqla: 'ds',
viz_type: VizType.Table,
viz_type: 'table',
source: 'source_column',
source_category: 'source_category_column',
target: 'target_column',
@@ -75,7 +75,7 @@ describe('buildQueryContext', () => {
{
datasource: '5__table',
granularity_sqla: 'ds',
viz_type: VizType.Table,
viz_type: 'table',
source: 'source_column',
source_category: 'source_category_column',
target: 'target_column',
@@ -103,7 +103,7 @@ describe('buildQueryContext', () => {
const queryContext = buildQueryContext(
{
datasource: '5__table',
viz_type: VizType.Table,
viz_type: 'table',
},
() => [
{
@@ -133,7 +133,7 @@ describe('buildQueryContext', () => {
buildQueryContext(
{
datasource: '5__table',
viz_type: VizType.Table,
viz_type: 'table',
x_axis: 'axis',
},
() => [{}],

View File

@@ -25,7 +25,6 @@ import {
AnnotationType,
buildQueryObject,
QueryObject,
VizType,
} from '@superset-ui/core';
describe('buildQueryObject', () => {
@@ -35,7 +34,7 @@ describe('buildQueryObject', () => {
query = buildQueryObject({
datasource: '5__table',
granularity_sqla: 'ds',
viz_type: VizType.Table,
viz_type: 'table',
});
expect(query.granularity).toEqual('ds');
});
@@ -44,7 +43,7 @@ describe('buildQueryObject', () => {
query = buildQueryObject({
datasource: '5__table',
granularity_sqla: 'ds',
viz_type: VizType.Table,
viz_type: 'table',
metric: 'sum__num',
secondary_metric: 'avg__num',
});
@@ -55,7 +54,7 @@ describe('buildQueryObject', () => {
query = buildQueryObject({
datasource: '5__table',
granularity_sqla: 'ds',
viz_type: VizType.Table,
viz_type: 'table',
extra_filters: [{ col: 'abc', op: '==', val: 'qwerty' }],
adhoc_filters: [
{
@@ -89,7 +88,7 @@ describe('buildQueryObject', () => {
{
datasource: '5__table',
granularity_sqla: 'ds',
viz_type: VizType.Table,
viz_type: 'table',
my_custom_metric_control: 'sum__num',
},
{ my_custom_metric_control: 'metrics' },
@@ -102,7 +101,7 @@ describe('buildQueryObject', () => {
{
datasource: '5__table',
granularity_sqla: 'ds',
viz_type: VizType.Table,
viz_type: 'table',
metrics: ['sum__num'],
my_custom_metric_control: 'avg__num',
},
@@ -116,7 +115,7 @@ describe('buildQueryObject', () => {
query = buildQueryObject({
datasource: '5__table',
granularity_sqla: 'ds',
viz_type: VizType.Table,
viz_type: 'table',
limit: series_limit,
});
expect(query.series_limit).toEqual(series_limit);
@@ -127,7 +126,7 @@ describe('buildQueryObject', () => {
query = buildQueryObject({
datasource: '5__table',
granularity_sqla: 'ds',
viz_type: VizType.Table,
viz_type: 'table',
series_limit,
});
expect(query.series_limit).toEqual(series_limit);
@@ -138,7 +137,7 @@ describe('buildQueryObject', () => {
query = buildQueryObject({
datasource: '5__table',
granularity_sqla: 'ds',
viz_type: VizType.Table,
viz_type: 'table',
order_desc: orderDesc,
});
expect(query.order_desc).toEqual(orderDesc);
@@ -149,7 +148,7 @@ describe('buildQueryObject', () => {
query = buildQueryObject({
datasource: '5__table',
granularity_sqla: 'ds',
viz_type: VizType.Table,
viz_type: 'table',
timeseries_limit_metric: metric,
});
expect(query.series_limit_metric).toEqual(metric);
@@ -160,7 +159,7 @@ describe('buildQueryObject', () => {
query = buildQueryObject({
datasource: '5__table',
granularity_sqla: 'ds',
viz_type: VizType.PivotTable,
viz_type: 'pivot_table_v2',
series_limit_metric: metric,
});
expect(query.series_limit_metric).toEqual(metric);
@@ -171,7 +170,7 @@ describe('buildQueryObject', () => {
query = buildQueryObject({
datasource: '5__table',
granularity_sqla: 'ds',
viz_type: VizType.PivotTable,
viz_type: 'pivot_table_v2',
series_limit_metric: metric,
});
expect(query.series_limit_metric).toEqual(undefined);
@@ -181,7 +180,7 @@ describe('buildQueryObject', () => {
const baseQuery = {
datasource: '5__table',
granularity_sqla: 'ds',
viz_type: VizType.Table,
viz_type: 'table',
row_limit: null,
};
@@ -268,7 +267,7 @@ describe('buildQueryObject', () => {
query = buildQueryObject({
datasource: '5__table',
granularity_sqla: 'ds',
viz_type: VizType.Table,
viz_type: 'table',
annotation_layers: annotationLayers,
});
expect(query.annotation_layers).toEqual(annotationLayers);
@@ -279,7 +278,7 @@ describe('buildQueryObject', () => {
buildQueryObject({
datasource: '5__table',
granularity_sqla: 'ds',
viz_type: VizType.Table,
viz_type: 'table',
url_params: { abc: '123' },
}).url_params,
).toEqual({ abc: '123' });
@@ -287,7 +286,7 @@ describe('buildQueryObject', () => {
buildQueryObject({
datasource: '5__table',
granularity_sqla: 'ds',
viz_type: VizType.Table,
viz_type: 'table',
// @ts-expect-error
url_params: null,
}).url_params,
@@ -299,7 +298,7 @@ describe('buildQueryObject', () => {
query = buildQueryObject({
datasource: '5__table',
granularity,
viz_type: VizType.Table,
viz_type: 'table',
});
expect(query.granularity).toEqual(granularity);
});
@@ -309,7 +308,7 @@ describe('buildQueryObject', () => {
query = buildQueryObject({
datasource: '5__table',
granularity_sqla: granularity,
viz_type: VizType.Table,
viz_type: 'table',
});
expect(query.granularity).toEqual(granularity);
});
@@ -321,7 +320,7 @@ describe('buildQueryObject', () => {
query = buildQueryObject({
datasource: '5__table',
granularity_sqla: 'ds',
viz_type: VizType.Table,
viz_type: 'table',
custom_params: customParams,
});
expect(query.custom_params).toEqual(customParams);

View File

@@ -16,13 +16,11 @@
* specific language governing permissions and limitations
* under the License.
*/
import { isXAxisSet, VizType } from '@superset-ui/core';
import { isXAxisSet } from '@superset-ui/core';
test('isXAxisSet', () => {
expect(isXAxisSet({ datasource: '123', viz_type: 'table' })).not.toBeTruthy();
expect(
isXAxisSet({ datasource: '123', viz_type: VizType.Table }),
).not.toBeTruthy();
expect(
isXAxisSet({ datasource: '123', viz_type: VizType.Table, x_axis: 'axis' }),
isXAxisSet({ datasource: '123', viz_type: 'table', x_axis: 'axis' }),
).toBeTruthy();
});

View File

@@ -16,13 +16,13 @@
* specific language governing permissions and limitations
* under the License.
*/
import { normalizeOrderBy, QueryObject, VizType } from '@superset-ui/core';
import { normalizeOrderBy, QueryObject } from '@superset-ui/core';
describe('normalizeOrderBy', () => {
it('should not change original queryObject when orderby populated', () => {
const query: QueryObject = {
datasource: '5__table',
viz_type: VizType.Table,
viz_type: 'table',
time_range: '1 year ago : 2013',
orderby: [['count(*)', true]],
};
@@ -32,7 +32,7 @@ describe('normalizeOrderBy', () => {
it('has series_limit_metric in queryObject', () => {
const query: QueryObject = {
datasource: '5__table',
viz_type: VizType.Table,
viz_type: 'table',
time_range: '1 year ago : 2013',
metrics: ['count(*)'],
series_limit_metric: {
@@ -50,7 +50,7 @@ describe('normalizeOrderBy', () => {
expect(expectedQueryObject).not.toHaveProperty('order_desc');
expect(expectedQueryObject).toEqual({
datasource: '5__table',
viz_type: VizType.Table,
viz_type: 'table',
time_range: '1 year ago : 2013',
metrics: ['count(*)'],
orderby: [
@@ -72,7 +72,7 @@ describe('normalizeOrderBy', () => {
it('should transform legacy_order_by in queryObject', () => {
const query: QueryObject = {
datasource: '5__table',
viz_type: VizType.Table,
viz_type: 'table',
time_range: '1 year ago : 2013',
metrics: ['count(*)'],
legacy_order_by: {
@@ -90,7 +90,7 @@ describe('normalizeOrderBy', () => {
expect(expectedQueryObject).not.toHaveProperty('order_desc');
expect(expectedQueryObject).toEqual({
datasource: '5__table',
viz_type: VizType.Table,
viz_type: 'table',
time_range: '1 year ago : 2013',
metrics: ['count(*)'],
orderby: [
@@ -112,7 +112,7 @@ describe('normalizeOrderBy', () => {
it('has metrics in queryObject', () => {
const query: QueryObject = {
datasource: '5__table',
viz_type: VizType.Table,
viz_type: 'table',
time_range: '1 year ago : 2013',
metrics: ['count(*)'],
order_desc: true,
@@ -122,7 +122,7 @@ describe('normalizeOrderBy', () => {
expect(expectedQueryObject).not.toHaveProperty('order_desc');
expect(expectedQueryObject).toEqual({
datasource: '5__table',
viz_type: VizType.Table,
viz_type: 'table',
time_range: '1 year ago : 2013',
metrics: ['count(*)'],
orderby: [['count(*)', false]],
@@ -132,7 +132,7 @@ describe('normalizeOrderBy', () => {
it('should not change', () => {
const query: QueryObject = {
datasource: '5__table',
viz_type: VizType.Table,
viz_type: 'table',
time_range: '1 year ago : 2013',
};
expect(normalizeOrderBy(query)).toEqual(query);
@@ -141,7 +141,7 @@ describe('normalizeOrderBy', () => {
it('remove empty orderby', () => {
const query: QueryObject = {
datasource: '5__table',
viz_type: VizType.Table,
viz_type: 'table',
time_range: '1 year ago : 2013',
orderby: [],
};
@@ -151,7 +151,7 @@ describe('normalizeOrderBy', () => {
it('remove orderby with an empty array', () => {
const query: QueryObject = {
datasource: '5__table',
viz_type: VizType.Table,
viz_type: 'table',
time_range: '1 year ago : 2013',
orderby: [[]],
};
@@ -161,7 +161,7 @@ describe('normalizeOrderBy', () => {
it('remove orderby with an empty metric', () => {
const query: QueryObject = {
datasource: '5__table',
viz_type: VizType.Table,
viz_type: 'table',
time_range: '1 year ago : 2013',
orderby: [['', true]],
};
@@ -171,7 +171,7 @@ describe('normalizeOrderBy', () => {
it('remove orderby with an empty adhoc metric', () => {
const query: QueryObject = {
datasource: '5__table',
viz_type: VizType.Table,
viz_type: 'table',
time_range: '1 year ago : 2013',
orderby: [[{}, true]],
};
@@ -181,7 +181,7 @@ describe('normalizeOrderBy', () => {
it('remove orderby with an non-boolean type', () => {
const query: QueryObject = {
datasource: '5__table',
viz_type: VizType.Table,
viz_type: 'table',
time_range: '1 year ago : 2013',
// @ts-ignore
orderby: [['count(*)', 'true']],

View File

@@ -20,13 +20,12 @@ import {
normalizeTimeColumn,
QueryObject,
SqlaFormData,
VizType,
} from '@superset-ui/core';
test('should return original QueryObject if x_axis is empty', () => {
const formData: SqlaFormData = {
datasource: '5__table',
viz_type: VizType.Table,
viz_type: 'table',
granularity: 'time_column',
time_grain_sqla: 'P1Y',
time_range: '1 year ago : 2013',
@@ -35,7 +34,7 @@ test('should return original QueryObject if x_axis is empty', () => {
};
const query: QueryObject = {
datasource: '5__table',
viz_type: VizType.Table,
viz_type: 'table',
granularity: 'time_column',
extras: {
time_grain_sqla: 'P1Y',
@@ -52,7 +51,7 @@ test('should return original QueryObject if x_axis is empty', () => {
test('should support different columns for x-axis and granularity', () => {
const formData: SqlaFormData = {
datasource: '5__table',
viz_type: VizType.Table,
viz_type: 'table',
granularity: 'time_column',
time_grain_sqla: 'P1Y',
time_range: '1 year ago : 2013',
@@ -62,7 +61,7 @@ test('should support different columns for x-axis and granularity', () => {
};
const query: QueryObject = {
datasource: '5__table',
viz_type: VizType.Table,
viz_type: 'table',
granularity: 'time_column',
extras: {
time_grain_sqla: 'P1Y',
@@ -77,7 +76,7 @@ test('should support different columns for x-axis and granularity', () => {
};
expect(normalizeTimeColumn(formData, query)).toEqual({
datasource: '5__table',
viz_type: VizType.Table,
viz_type: 'table',
granularity: 'time_column',
extras: { where: '', having: '', time_grain_sqla: 'P1Y' },
time_range: '1 year ago : 2013',
@@ -99,7 +98,7 @@ test('should support different columns for x-axis and granularity', () => {
test('should support custom SQL in x-axis', () => {
const formData: SqlaFormData = {
datasource: '5__table',
viz_type: VizType.Table,
viz_type: 'table',
granularity: 'time_column',
time_grain_sqla: 'P1Y',
time_range: '1 year ago : 2013',
@@ -113,7 +112,7 @@ test('should support custom SQL in x-axis', () => {
};
const query: QueryObject = {
datasource: '5__table',
viz_type: VizType.Table,
viz_type: 'table',
granularity: 'time_column',
extras: {
time_grain_sqla: 'P1Y',
@@ -135,7 +134,7 @@ test('should support custom SQL in x-axis', () => {
};
expect(normalizeTimeColumn(formData, query)).toEqual({
datasource: '5__table',
viz_type: VizType.Table,
viz_type: 'table',
granularity: 'time_column',
extras: { where: '', having: '', time_grain_sqla: 'P1Y' },
time_range: '1 year ago : 2013',
@@ -157,7 +156,7 @@ test('should support custom SQL in x-axis', () => {
test('fallback and invalid columns value', () => {
const formData: SqlaFormData = {
datasource: '5__table',
viz_type: VizType.Table,
viz_type: 'table',
granularity: 'time_column',
time_grain_sqla: 'P1Y',
time_range: '1 year ago : 2013',
@@ -171,7 +170,7 @@ test('fallback and invalid columns value', () => {
};
const query: QueryObject = {
datasource: '5__table',
viz_type: VizType.Table,
viz_type: 'table',
granularity: 'time_column',
extras: {
time_grain_sqla: 'P1Y',

View File

@@ -17,11 +17,11 @@
* under the License.
*/
import { getComparisonFilters, VizType } from '@superset-ui/core';
import { getComparisonFilters } from '@superset-ui/core';
const form_data = {
datasource: '22__table',
viz_type: VizType.BigNumberPeriodOverPeriod,
viz_type: 'pop_kpi',
slice_id: 97,
url_params: {
form_data_key:

View File

@@ -17,15 +17,11 @@
* under the License.
*/
import {
getComparisonInfo,
ComparisonTimeRangeType,
VizType,
} from '@superset-ui/core';
import { getComparisonInfo, ComparisonTimeRangeType } from '@superset-ui/core';
const form_data = {
datasource: '22__table',
viz_type: VizType.BigNumberPeriodOverPeriod,
viz_type: 'pop_kpi',
slice_id: 97,
url_params: {
form_data_key:

View File

@@ -17,12 +17,12 @@
* under the License.
*/
import { SuperChart, VizType } from '@superset-ui/core';
import { SuperChart } from '@superset-ui/core';
import ChordChartPlugin from '@superset-ui/legacy-plugin-chart-chord';
import data from './data';
import { withResizableChartDemo } from '../../../shared/components/ResizableChartDemo';
new ChordChartPlugin().configure({ key: VizType.Chord }).register();
new ChordChartPlugin().configure({ key: 'chord' }).register();
export default {
title: 'Legacy Chart Plugins/legacy-plugin-chart-chord',
@@ -31,7 +31,7 @@ export default {
export const basic = ({ width, height }) => (
<SuperChart
chartType={VizType.Chord}
chartType="chord"
width={width}
height={height}
queriesData={[{ data }]}

View File

@@ -17,12 +17,12 @@
* under the License.
*/
import { SuperChart, VizType } from '@superset-ui/core';
import { SuperChart } from '@superset-ui/core';
import HeatmapChartPlugin from '@superset-ui/legacy-plugin-chart-heatmap';
import ResizableChartDemo from '../../../shared/components/ResizableChartDemo';
import data from './data';
new HeatmapChartPlugin().configure({ key: VizType.LegacyHeatmap }).register();
new HeatmapChartPlugin().configure({ key: 'heatmap' }).register();
export default {
title: 'Legacy Chart Plugins/legacy-plugin-chart-heatmap',
@@ -30,7 +30,7 @@ export default {
export const basic = () => (
<SuperChart
chartType={VizType.LegacyHeatmap}
chartType="heatmap"
width={500}
height={500}
formData={{
@@ -67,7 +67,7 @@ export const resizable = () => (
<ResizableChartDemo>
{({ width, height }) => (
<SuperChart
chartType={VizType.LegacyHeatmap}
chartType="heatmap"
width={width}
height={height}
formData={{
@@ -104,7 +104,7 @@ export const resizable = () => (
export const withNullData = () => (
<SuperChart
chartType={VizType.LegacyHeatmap}
chartType="heatmap"
width={500}
height={500}
formData={{

View File

@@ -18,13 +18,11 @@
*/
/* eslint-disable no-magic-numbers */
import { SuperChart, VizType } from '@superset-ui/core';
import { SuperChart } from '@superset-ui/core';
import HistogramChartPlugin from '@superset-ui/legacy-plugin-chart-histogram';
import data from './data';
new HistogramChartPlugin()
.configure({ key: VizType.LegacyHistogram })
.register();
new HistogramChartPlugin().configure({ key: 'histogram' }).register();
export default {
title: 'Legacy Chart Plugins/legacy-plugin-chart-histogram',
@@ -32,7 +30,7 @@ export default {
export const basic = () => (
<SuperChart
chartType={VizType.LegacyHistogram}
chartType="histogram"
width={400}
height={400}
queriesData={[{ data }]}

View File

@@ -17,11 +17,11 @@
* under the License.
*/
import { SuperChart, VizType } from '@superset-ui/core';
import { SuperChart } from '@superset-ui/core';
import HorizonChartPlugin from '@superset-ui/legacy-plugin-chart-horizon';
import data from './data';
new HorizonChartPlugin().configure({ key: VizType.Horizon }).register();
new HorizonChartPlugin().configure({ key: 'horizon' }).register();
export default {
title: 'Legacy Chart Plugins/legacy-plugin-chart-horizon',
@@ -29,7 +29,7 @@ export default {
export const basic = () => (
<SuperChart
chartType={VizType.Horizon}
chartType="horizon"
width={400}
height={400}
queriesData={[{ data }]}

View File

@@ -17,12 +17,12 @@
* under the License.
*/
import { SuperChart, VizType } from '@superset-ui/core';
import { SuperChart } from '@superset-ui/core';
import PartitionChartPlugin from '@superset-ui/legacy-plugin-chart-partition';
import data from './data';
import dummyDatasource from '../../../shared/dummyDatasource';
new PartitionChartPlugin().configure({ key: VizType.Partition }).register();
new PartitionChartPlugin().configure({ key: 'partition' }).register();
export default {
title: 'Legacy Chart Plugins/legacy-plugin-chart-partition',
@@ -30,7 +30,7 @@ export default {
export const basic = () => (
<SuperChart
chartType={VizType.Partition}
chartType="partition"
width={400}
height={400}
datasource={dummyDatasource}

View File

@@ -18,11 +18,11 @@
*/
/* eslint-disable no-magic-numbers, sort-keys */
import { SuperChart, VizType } from '@superset-ui/core';
import { SuperChart } from '@superset-ui/core';
import RoseChartPlugin from '@superset-ui/legacy-plugin-chart-rose';
import data from './data';
new RoseChartPlugin().configure({ key: VizType.Rose }).register();
new RoseChartPlugin().configure({ key: 'rose' }).register();
export default {
title: 'Legacy Chart Plugins/legacy-plugin-chart-rose',
@@ -30,7 +30,7 @@ export default {
export const basic = () => (
<SuperChart
chartType={VizType.Rose}
chartType="rose"
width={400}
height={400}
queriesData={[{ data }]}

View File

@@ -18,12 +18,12 @@
*/
/* eslint-disable no-magic-numbers */
import { SuperChart, VizType } from '@superset-ui/core';
import { SuperChart } from '@superset-ui/core';
import SankeyChartPlugin from '@superset-ui/legacy-plugin-chart-sankey';
import ResizableChartDemo from '../../../shared/components/ResizableChartDemo';
import data from './data';
new SankeyChartPlugin().configure({ key: VizType.LegacySankey }).register();
new SankeyChartPlugin().configure({ key: 'sankey' }).register();
export default {
title: 'Legacy Chart Plugins/legacy-plugin-chart-sankey',
@@ -31,7 +31,7 @@ export default {
export const basic = () => (
<SuperChart
chartType={VizType.LegacySankey}
chartType="sankey"
width={400}
height={400}
queriesData={[{ data }]}
@@ -45,7 +45,7 @@ export const resizable = () => (
<ResizableChartDemo>
{({ width, height }) => (
<SuperChart
chartType={VizType.LegacySankey}
chartType="sankey"
width={width}
height={height}
queriesData={[{ data }]}

View File

@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
import { SuperChart, VizType } from '@superset-ui/core';
import { SuperChart } from '@superset-ui/core';
import { BigNumberChartPlugin } from '@superset-ui/plugin-chart-echarts';
import testData from './data';
@@ -37,7 +37,7 @@ const formData = {
showTrendLine: true,
startYAxisAtZero: true,
timeGrainSqla: 'P1Y',
vizType: VizType.BigNumber,
vizType: 'big_number',
yAxisFormat: '.3s',
};

View File

@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
import { SuperChart, VizType } from '@superset-ui/core';
import { SuperChart } from '@superset-ui/core';
import { BigNumberTotalChartPlugin } from '@superset-ui/plugin-chart-echarts';
import data from './data';
@@ -37,7 +37,7 @@ export const totalBasic = () => (
formData={{
metric: 'sum__num',
subheader: 'total female participants',
vizType: VizType.BigNumberTotal,
vizType: 'big_number_total',
yAxisFormat: '.3s',
}}
/>
@@ -52,7 +52,7 @@ export const totalNoData = () => (
formData={{
metric: 'sum__num',
subheader: 'total female participants',
vizType: VizType.BigNumberTotal,
vizType: 'big_number_total',
yAxisFormat: '.3s',
}}
/>

View File

@@ -17,10 +17,9 @@
* under the License.
*/
import { VizType } from '@superset-ui/core';
import { AreaChartPlugin } from '@superset-ui/legacy-preset-chart-nvd3';
new AreaChartPlugin().configure({ key: VizType.LegacyArea }).register();
new AreaChartPlugin().configure({ key: 'area' }).register();
export default {
title: 'Legacy Chart Plugins/legacy-preset-chart-nvd3/Area',

View File

@@ -17,13 +17,13 @@
* under the License.
*/
import { SuperChart, VizType } from '@superset-ui/core';
import { SuperChart } from '@superset-ui/core';
import dummyDatasource from '../../../../../shared/dummyDatasource';
import data from '../data';
export const controlsShown = () => (
<SuperChart
chartType={VizType.LegacyArea}
chartType="area"
datasource={dummyDatasource}
width={400}
height={400}
@@ -40,7 +40,7 @@ export const controlsShown = () => (
showControls: true,
showLegend: true,
stackedStyle: 'stack',
vizType: VizType.LegacyArea,
vizType: 'area',
xAxisFormat: '%Y',
xAxisLabel: '',
xAxisShowminmax: false,

View File

@@ -17,13 +17,13 @@
* under the License.
*/
import { SuperChart, VizType } from '@superset-ui/core';
import { SuperChart } from '@superset-ui/core';
import dummyDatasource from '../../../../../shared/dummyDatasource';
import data from '../data';
export const expanded = () => (
<SuperChart
chartType={VizType.LegacyArea}
chartType="area"
datasource={dummyDatasource}
width={400}
height={400}
@@ -40,7 +40,7 @@ export const expanded = () => (
showControls: false,
showLegend: true,
stackedStyle: 'expand',
vizType: VizType.LegacyArea,
vizType: 'area',
xAxisFormat: '%Y',
xAxisLabel: '',
xAxisShowminmax: false,

View File

@@ -17,14 +17,14 @@
* under the License.
*/
import { SuperChart, VizType } from '@superset-ui/core';
import { SuperChart } from '@superset-ui/core';
import dummyDatasource from '../../../../../shared/dummyDatasource';
import data from '../data';
export const stacked = () => (
<SuperChart
id="stacked-area-chart"
chartType={VizType.LegacyArea}
chartType="area"
datasource={dummyDatasource}
width={400}
height={400}
@@ -41,7 +41,7 @@ export const stacked = () => (
showControls: false,
showLegend: true,
stackedStyle: 'stack',
vizType: VizType.LegacyArea,
vizType: 'area',
xAxisFormat: '%Y',
xAxisLabel: '',
xAxisShowminmax: false,

View File

@@ -17,13 +17,13 @@
* under the License.
*/
import { SuperChart, VizType } from '@superset-ui/core';
import { SuperChart } from '@superset-ui/core';
import dummyDatasource from '../../../../../shared/dummyDatasource';
import data from '../data';
export const stackedWithYAxisBounds = () => (
<SuperChart
chartType={VizType.LegacyArea}
chartType="area"
datasource={dummyDatasource}
width={400}
height={400}
@@ -40,7 +40,7 @@ export const stackedWithYAxisBounds = () => (
showControls: false,
showLegend: true,
stackedStyle: 'stack',
vizType: VizType.LegacyArea,
vizType: 'area',
xAxisFormat: '%Y',
xAxisLabel: '',
xAxisShowminmax: false,
@@ -56,7 +56,7 @@ stackedWithYAxisBounds.storyName = 'Stacked with yAxisBounds';
export const stackedWithYAxisBoundsMinOnly = () => (
<SuperChart
chartType={VizType.LegacyArea}
chartType="area"
datasource={dummyDatasource}
width={400}
height={400}
@@ -73,7 +73,7 @@ export const stackedWithYAxisBoundsMinOnly = () => (
showControls: true,
showLegend: true,
stackedStyle: 'stack',
vizType: VizType.LegacyArea,
vizType: 'area',
xAxisFormat: '%Y',
xAxisLabel: '',
xAxisShowminmax: false,

View File

@@ -17,10 +17,9 @@
* under the License.
*/
import { VizType } from '@superset-ui/core';
import { BarChartPlugin } from '@superset-ui/legacy-preset-chart-nvd3';
new BarChartPlugin().configure({ key: VizType.LegacyBar }).register();
new BarChartPlugin().configure({ key: 'bar' }).register();
export default {
title: 'Legacy Chart Plugins/legacy-preset-chart-nvd3/Bar',

View File

@@ -17,13 +17,13 @@
* under the License.
*/
import { SuperChart, VizType } from '@superset-ui/core';
import { SuperChart } from '@superset-ui/core';
import dummyDatasource from '../../../../../shared/dummyDatasource';
import data from '../data';
export const barWithPositiveAndNegativeValues = () => (
<SuperChart
chartType={VizType.LegacyBar}
chartType="bar"
width={400}
height={400}
datasource={dummyDatasource}
@@ -51,7 +51,7 @@ export const barWithPositiveAndNegativeValues = () => (
showControls: false,
showLegend: true,
stackedStyle: 'stack',
vizType: VizType.LegacyBar,
vizType: 'bar',
xAxisFormat: '%Y',
xAxisLabel: '',
xAxisShowminmax: false,

Some files were not shown because too many files have changed in this diff Show More