mirror of
https://github.com/apache/superset.git
synced 2026-05-03 06:54:19 +00:00
Compare commits
146 Commits
remove-mor
...
nix-button
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
79aff6827c | ||
|
|
079e7327a2 | ||
|
|
48864ce8c7 | ||
|
|
2816a70af3 | ||
|
|
6af22a9cdd | ||
|
|
827fe06903 | ||
|
|
45815d8642 | ||
|
|
cf5c770adc | ||
|
|
638f82b46d | ||
|
|
e0e1eea9ce | ||
|
|
27c7240185 | ||
|
|
5ca2a8f670 | ||
|
|
2d60a2d48c | ||
|
|
b70c8ee7a8 | ||
|
|
a3fd7423b0 | ||
|
|
f679a18e82 | ||
|
|
77f3764fea | ||
|
|
1e0c04fc15 | ||
|
|
56b973f3cc | ||
|
|
3479574bd4 | ||
|
|
aa55751b1d | ||
|
|
6c2aade375 | ||
|
|
f51f19bcba | ||
|
|
1d44662b1d | ||
|
|
25f4226dbb | ||
|
|
dd1ba96adf | ||
|
|
d4888fa4af | ||
|
|
b3559f644c | ||
|
|
fe80fb1090 | ||
|
|
43efa05113 | ||
|
|
e5e3f9e210 | ||
|
|
468dfed416 | ||
|
|
3564740255 | ||
|
|
8020729ced | ||
|
|
deec63bb5b | ||
|
|
339d491dfc | ||
|
|
d66ac9f3f4 | ||
|
|
06fb330569 | ||
|
|
ce0e06a935 | ||
|
|
5006f97f70 | ||
|
|
24d001e498 | ||
|
|
eab888c63a | ||
|
|
3d3c09d299 | ||
|
|
97dde8c485 | ||
|
|
14682b9054 | ||
|
|
93ba8e16c3 | ||
|
|
dbcb473040 | ||
|
|
f0811c8863 | ||
|
|
0166db9663 | ||
|
|
c26f073134 | ||
|
|
45668e31fc | ||
|
|
529aed5da1 | ||
|
|
09802acf0d | ||
|
|
9224051b80 | ||
|
|
fd9d3301f6 | ||
|
|
68499a1199 | ||
|
|
f077323e6f | ||
|
|
7f2e752796 | ||
|
|
97683ec052 | ||
|
|
73164c61ad | ||
|
|
564c168420 | ||
|
|
95f4fe0cb8 | ||
|
|
bbc6d374ea | ||
|
|
316da5e5f5 | ||
|
|
e2b9b8e9fd | ||
|
|
7154b8d40f | ||
|
|
fcb3ff3a41 | ||
|
|
342cfc41ec | ||
|
|
aa7d3b0f96 | ||
|
|
3e28bd2cfa | ||
|
|
cc1eec69df | ||
|
|
3fa0de4293 | ||
|
|
2ad8af71b5 | ||
|
|
b648cc1168 | ||
|
|
f24bf873bf | ||
|
|
e0a5033596 | ||
|
|
ef14d58c64 | ||
|
|
547a4adef5 | ||
|
|
5256a2f194 | ||
|
|
0560c2615d | ||
|
|
ff282492a1 | ||
|
|
312dc1c749 | ||
|
|
1e26c34758 | ||
|
|
decaba72c3 | ||
|
|
7e8c77e636 | ||
|
|
ba99980cf4 | ||
|
|
c62f722f99 | ||
|
|
3fd23508bc | ||
|
|
9ff9e0299b | ||
|
|
6488ced3d3 | ||
|
|
9a2be95159 | ||
|
|
ef4e03c9fe | ||
|
|
ca2f0288e5 | ||
|
|
ca63760a4b | ||
|
|
83924f7e10 | ||
|
|
c4a56c3f6e | ||
|
|
cf134ab3aa | ||
|
|
043c585008 | ||
|
|
0d346d4414 | ||
|
|
9067371234 | ||
|
|
40fe05c5e2 | ||
|
|
e3bdfb5def | ||
|
|
55f0713a2f | ||
|
|
5aee59cc3a | ||
|
|
94d3774d9e | ||
|
|
b665254f39 | ||
|
|
4dc8cce8e8 | ||
|
|
d206a20ce7 | ||
|
|
6fcc282a4e | ||
|
|
93c35a7ba5 | ||
|
|
9dfa8d5f8f | ||
|
|
87504056fe | ||
|
|
429c18f9e8 | ||
|
|
5bddc81f60 | ||
|
|
9837b4a61e | ||
|
|
454f143661 | ||
|
|
7376dfc6e9 | ||
|
|
838d47d578 | ||
|
|
14e81d0a9a | ||
|
|
f68c2b2454 | ||
|
|
814c3dfecc | ||
|
|
b8aade776b | ||
|
|
e092e6002d | ||
|
|
673754d16e | ||
|
|
27deeb2f51 | ||
|
|
9a7a84c7a0 | ||
|
|
a3d2588313 | ||
|
|
5c87fee282 | ||
|
|
b24323d500 | ||
|
|
824aca85d0 | ||
|
|
1e4098a29e | ||
|
|
3aa8f32ca9 | ||
|
|
bf42ea70ba | ||
|
|
d69da5f0f5 | ||
|
|
078257dd1b | ||
|
|
8c1c2570b3 | ||
|
|
a80803566d | ||
|
|
f551f5b7b6 | ||
|
|
1978cde4f1 | ||
|
|
c5f6cc6382 | ||
|
|
e9e2c0bee8 | ||
|
|
33a9817388 | ||
|
|
91301bcd5b | ||
|
|
67ad7da5cc | ||
|
|
e0deb704f9 | ||
|
|
abf3790ea6 |
10
.asf.yaml
10
.asf.yaml
@@ -53,6 +53,9 @@ github:
|
||||
merge: false
|
||||
rebase: false
|
||||
|
||||
ghp_branch: gh-pages
|
||||
ghp_path: /
|
||||
|
||||
protected_branches:
|
||||
master:
|
||||
required_status_checks:
|
||||
@@ -88,3 +91,10 @@ github:
|
||||
required_approving_review_count: 1
|
||||
|
||||
required_signatures: false
|
||||
gh-pages:
|
||||
required_pull_request_reviews:
|
||||
dismiss_stale_reviews: false
|
||||
require_code_owner_reviews: true
|
||||
required_approving_review_count: 1
|
||||
|
||||
required_signatures: false
|
||||
|
||||
1
.gitattributes
vendored
1
.gitattributes
vendored
@@ -1,2 +1,3 @@
|
||||
docker/**/*.sh text eol=lf
|
||||
*.svg binary
|
||||
*.ipynb binary
|
||||
|
||||
4
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
4
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
@@ -41,8 +41,8 @@ body:
|
||||
label: Superset version
|
||||
options:
|
||||
- master / latest-dev
|
||||
- "4.1.0"
|
||||
- "3.1.3"
|
||||
- "4.1.1"
|
||||
- "4.0.2"
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
||||
6
.github/actions/setup-backend/action.yml
vendored
6
.github/actions/setup-backend/action.yml
vendored
@@ -43,11 +43,11 @@ runs:
|
||||
run: |
|
||||
if [ "${{ inputs.install-superset }}" = "true" ]; then
|
||||
sudo apt-get update && sudo apt-get -y install libldap2-dev libsasl2-dev
|
||||
pip install --upgrade pip setuptools wheel
|
||||
pip install --upgrade pip setuptools wheel uv
|
||||
if [ "${{ inputs.requirements-type }}" = "dev" ]; then
|
||||
pip install -r requirements/development.txt
|
||||
uv pip install --system -r requirements/development.txt
|
||||
elif [ "${{ inputs.requirements-type }}" = "base" ]; then
|
||||
pip install -r requirements/base.txt
|
||||
uv pip install --system -r requirements/base.txt
|
||||
fi
|
||||
fi
|
||||
shell: bash
|
||||
|
||||
11
.github/workflows/bump-python-package.yml
vendored
11
.github/workflows/bump-python-package.yml
vendored
@@ -14,6 +14,12 @@ on:
|
||||
required: true
|
||||
description: Max number of PRs to open (0 for no limit)
|
||||
default: 5
|
||||
extra-flags:
|
||||
required: false
|
||||
default: --only-base
|
||||
description: Additional flags to pass to the bump-python command
|
||||
#schedule:
|
||||
# - cron: '0 0 * * *' # Runs daily at midnight UTC
|
||||
|
||||
jobs:
|
||||
bump-python-package:
|
||||
@@ -59,10 +65,13 @@ jobs:
|
||||
GROUP_OPT="-g ${{ github.event.inputs.group }}"
|
||||
fi
|
||||
|
||||
EXTRA_FLAGS="${{ github.event.inputs.extra-flags }}"
|
||||
|
||||
supersetbot bump-python \
|
||||
--verbose \
|
||||
--use-current-repo \
|
||||
--include-subpackages \
|
||||
--limit ${{ github.event.inputs.limit }} \
|
||||
$PACKAGE_OPT \
|
||||
$GROUP_OPT
|
||||
$GROUP_OPT \
|
||||
$EXTRA_FLAGS
|
||||
|
||||
135
.github/workflows/ephemeral-env.yml
vendored
135
.github/workflows/ephemeral-env.yml
vendored
@@ -1,30 +1,25 @@
|
||||
name: Ephemeral env workflow
|
||||
|
||||
# Example manual trigger: gh workflow run ephemeral-env.yml --ref fix_ephemerals --field comment_body="/testenv up" --field issue_number=666
|
||||
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
comment_body:
|
||||
description: 'Comment body to simulate /testenv command'
|
||||
required: true
|
||||
default: '/testenv up'
|
||||
issue_number:
|
||||
description: 'Issue or PR number'
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
config:
|
||||
runs-on: "ubuntu-22.04"
|
||||
if: github.event.issue.pull_request
|
||||
outputs:
|
||||
has-secrets: ${{ steps.check.outputs.has-secrets }}
|
||||
steps:
|
||||
- name: "Check for secrets"
|
||||
id: check
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "${{ (secrets.AWS_ACCESS_KEY_ID != '' && secrets.AWS_SECRET_ACCESS_KEY != '') || '' }}" ]; then
|
||||
echo "has-secrets=1" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
ephemeral-env-comment:
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.issue.number || github.run_id }}-comment
|
||||
group: ${{ github.workflow }}-${{ github.event.inputs.issue_number || github.event.issue.number || github.run_id }}-comment
|
||||
cancel-in-progress: true
|
||||
needs: config
|
||||
if: needs.config.outputs.has-secrets
|
||||
name: Evaluate ephemeral env comment trigger (/testenv)
|
||||
runs-on: ubuntu-22.04
|
||||
permissions:
|
||||
@@ -44,18 +39,18 @@ jobs:
|
||||
with:
|
||||
result-encoding: string
|
||||
script: |
|
||||
const pattern = /^\/testenv (up|down)/
|
||||
const result = pattern.exec(context.payload.comment.body)
|
||||
return result === null ? 'noop' : result[1]
|
||||
const pattern = /^\/testenv (up|down)/;
|
||||
const result = pattern.exec('${{ github.event.inputs.comment_body || github.event.comment.body }}');
|
||||
return result === null ? 'noop' : result[1];
|
||||
|
||||
- name: Eval comment body for feature flags
|
||||
- name: Looking for feature flags
|
||||
uses: actions/github-script@v7
|
||||
id: eval-feature-flags
|
||||
with:
|
||||
script: |
|
||||
const pattern = /FEATURE_(\w+)=(\w+)/g;
|
||||
let results = [];
|
||||
[...context.payload.comment.body.matchAll(pattern)].forEach(match => {
|
||||
[...'${{ github.event.inputs.comment_body || github.event.comment.body }}'.matchAll(pattern)].forEach(match => {
|
||||
const config = {
|
||||
name: `SUPERSET_FEATURE_${match[1]}`,
|
||||
value: match[2],
|
||||
@@ -67,24 +62,48 @@ jobs:
|
||||
- name: Limit to committers
|
||||
if: >
|
||||
steps.eval-body.outputs.result != 'noop' &&
|
||||
github.event_name == 'issue_comment' &&
|
||||
github.event.comment.author_association != 'MEMBER' &&
|
||||
github.event.comment.author_association != 'OWNER'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{github.token}}
|
||||
github-token: ${{ github.token }}
|
||||
script: |
|
||||
const errMsg = '@${{ github.event.comment.user.login }} Ephemeral environment creation is currently limited to committers.'
|
||||
const errMsg = '@${{ github.event.comment.user.login }} Ephemeral environment creation is currently limited to committers.';
|
||||
github.rest.issues.createComment({
|
||||
issue_number: ${{ github.event.issue.number }},
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: errMsg
|
||||
})
|
||||
core.setFailed(errMsg)
|
||||
});
|
||||
core.setFailed(errMsg);
|
||||
|
||||
- name: Reply with confirmation comment
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
const issueNumber = ${{ github.event.inputs.issue_number || github.event.issue.number }};
|
||||
const user = '${{ github.event.comment.user.login || github.actor }}';
|
||||
const action = '${{ steps.eval-body.outputs.result }}';
|
||||
const runId = context.runId;
|
||||
const workflowUrl = `${context.serverUrl}/${context.repo.owner}/${context.repo.repo}/actions/runs/${runId}`;
|
||||
const body = action === 'noop'
|
||||
? `@${user} No ephemeral environment action detected. Please use '/testenv up' or '/testenv down'. [View workflow run](${workflowUrl}).`
|
||||
: `@${user} Processing your ephemeral environment request [here](${workflowUrl}).`;
|
||||
if (action !== 'noop') {
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: issueNumber,
|
||||
body,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
ephemeral-docker-build:
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.issue.number || github.run_id }}-build
|
||||
group: ${{ github.workflow }}-${{ github.event.inputs.issue_number || github.event.issue.number || github.run_id }}-build
|
||||
cancel-in-progress: true
|
||||
needs: ephemeral-env-comment
|
||||
name: ephemeral-docker-build
|
||||
@@ -98,9 +117,9 @@ jobs:
|
||||
const request = {
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
pull_number: ${{ github.event.issue.number }},
|
||||
}
|
||||
core.info(`Getting PR #${request.pull_number} from ${request.owner}/${request.repo}`)
|
||||
pull_number: ${{ github.event.inputs.issue_number || github.event.issue.number }},
|
||||
};
|
||||
core.info(`Getting PR #${request.pull_number} from ${request.owner}/${request.repo}`);
|
||||
const pr = await github.rest.pulls.get(request);
|
||||
return pr.data;
|
||||
|
||||
@@ -121,12 +140,17 @@ jobs:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Setup supersetbot
|
||||
uses: ./.github/actions/setup-supersetbot/
|
||||
|
||||
- name: Build ephemeral env image
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
./scripts/build_docker.py \
|
||||
"ci" \
|
||||
"pull_request" \
|
||||
--build_context_ref ${{ github.event.issue.number }}
|
||||
supersetbot docker \
|
||||
--preset ci \
|
||||
--platform linux/amd64 \
|
||||
--context-ref "$RELEASE"
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
@@ -146,7 +170,7 @@ jobs:
|
||||
ECR_REPOSITORY: superset-ci
|
||||
IMAGE_TAG: apache/superset:${{ steps.get-sha.outputs.sha }}-ci
|
||||
run: |
|
||||
docker tag $IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:pr-${{ github.event.issue.number }}-ci
|
||||
docker tag $IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-ci
|
||||
docker push -a $ECR_REGISTRY/$ECR_REPOSITORY
|
||||
|
||||
ephemeral-env-up:
|
||||
@@ -181,22 +205,22 @@ jobs:
|
||||
aws ecr describe-images \
|
||||
--registry-id $(echo "${{ steps.login-ecr.outputs.registry }}" | grep -Eo "^[0-9]+") \
|
||||
--repository-name superset-ci \
|
||||
--image-ids imageTag=pr-${{ github.event.issue.number }}-ci
|
||||
--image-ids imageTag=pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-ci
|
||||
|
||||
- name: Fail on missing container image
|
||||
if: steps.check-image.outcome == 'failure'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{github.token}}
|
||||
github-token: ${{ github.token }}
|
||||
script: |
|
||||
const errMsg = '@${{ github.event.comment.user.login }} Container image not yet published for this PR. Please try again when build is complete.'
|
||||
const errMsg = '@${{ github.event.comment.user.login }} Container image not yet published for this PR. Please try again when build is complete.';
|
||||
github.rest.issues.createComment({
|
||||
issue_number: ${{ github.event.issue.number }},
|
||||
issue_number: ${{ github.event.inputs.issue_number || github.event.issue.number }},
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: errMsg
|
||||
})
|
||||
core.setFailed(errMsg)
|
||||
});
|
||||
core.setFailed(errMsg);
|
||||
|
||||
- name: Fill in the new image ID in the Amazon ECS task definition
|
||||
id: task-def
|
||||
@@ -204,7 +228,7 @@ jobs:
|
||||
with:
|
||||
task-definition: .github/workflows/ecs-task-definition.json
|
||||
container-name: superset-ci
|
||||
image: ${{ steps.login-ecr.outputs.registry }}/superset-ci:pr-${{ github.event.issue.number }}-ci
|
||||
image: ${{ steps.login-ecr.outputs.registry }}/superset-ci:pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-ci
|
||||
|
||||
- name: Update env vars in the Amazon ECS task definition
|
||||
run: |
|
||||
@@ -213,30 +237,29 @@ jobs:
|
||||
- name: Describe ECS service
|
||||
id: describe-services
|
||||
run: |
|
||||
echo "active=$(aws ecs describe-services --cluster superset-ci --services pr-${{ github.event.issue.number }}-service | jq '.services[] | select(.status == "ACTIVE") | any')" >> $GITHUB_OUTPUT
|
||||
echo "active=$(aws ecs describe-services --cluster superset-ci --services pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-service | jq '.services[] | select(.status == "ACTIVE") | any')" >> $GITHUB_OUTPUT
|
||||
- name: Create ECS service
|
||||
if: steps.describe-services.outputs.active != 'true'
|
||||
id: create-service
|
||||
if: steps.describe-services.outputs.active != 'true'
|
||||
env:
|
||||
ECR_SUBNETS: subnet-0e15a5034b4121710,subnet-0e8efef4a72224974
|
||||
ECR_SECURITY_GROUP: sg-092ff3a6ae0574d91
|
||||
run: |
|
||||
aws ecs create-service \
|
||||
--cluster superset-ci \
|
||||
--service-name pr-${{ github.event.issue.number }}-service \
|
||||
--service-name pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-service \
|
||||
--task-definition superset-ci \
|
||||
--launch-type FARGATE \
|
||||
--desired-count 1 \
|
||||
--platform-version LATEST \
|
||||
--network-configuration "awsvpcConfiguration={subnets=[$ECR_SUBNETS],securityGroups=[$ECR_SECURITY_GROUP],assignPublicIp=ENABLED}" \
|
||||
--tags key=pr,value=${{ github.event.issue.number }} key=github_user,value=${{ github.actor }}
|
||||
|
||||
--tags key=pr,value=${{ github.event.inputs.issue_number || github.event.issue.number }} key=github_user,value=${{ github.actor }}
|
||||
- name: Deploy Amazon ECS task definition
|
||||
id: deploy-task
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
|
||||
with:
|
||||
task-definition: ${{ steps.task-def.outputs.task-definition }}
|
||||
service: pr-${{ github.event.issue.number }}-service
|
||||
service: pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-service
|
||||
cluster: superset-ci
|
||||
wait-for-service-stability: true
|
||||
wait-for-minutes: 10
|
||||
@@ -244,18 +267,15 @@ jobs:
|
||||
- name: List tasks
|
||||
id: list-tasks
|
||||
run: |
|
||||
echo "task=$(aws ecs list-tasks --cluster superset-ci --service-name pr-${{ github.event.issue.number }}-service | jq '.taskArns | first')" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "task=$(aws ecs list-tasks --cluster superset-ci --service-name pr-${{ github.event.inputs.issue_number || github.event.issue.number }}-service | jq '.taskArns | first')" >> $GITHUB_OUTPUT
|
||||
- name: Get network interface
|
||||
id: get-eni
|
||||
run: |
|
||||
echo "eni=$(aws ecs describe-tasks --cluster superset-ci --tasks ${{ steps.list-tasks.outputs.task }} | jq '.tasks | .[0] | .attachments | .[0] | .details | map(select(.name=="networkInterfaceId")) | .[0] | .value')" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "eni=$(aws ecs describe-tasks --cluster superset-ci --tasks ${{ steps.list-tasks.outputs.task }} | jq '.tasks | .[0] | .attachments | .[0] | .details | map(select(.name==\"networkInterfaceId\")) | .[0] | .value')" >> $GITHUB_OUTPUT
|
||||
- name: Get public IP
|
||||
id: get-ip
|
||||
run: |
|
||||
echo "ip=$(aws ec2 describe-network-interfaces --network-interface-ids ${{ steps.get-eni.outputs.eni }} | jq -r '.NetworkInterfaces | first | .Association.PublicIp')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Comment (success)
|
||||
if: ${{ success() }}
|
||||
uses: actions/github-script@v7
|
||||
@@ -263,12 +283,11 @@ jobs:
|
||||
github-token: ${{github.token}}
|
||||
script: |
|
||||
github.rest.issues.createComment({
|
||||
issue_number: ${{ github.event.issue.number }},
|
||||
issue_number: ${{ github.event.inputs.issue_number || github.event.issue.number }},
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: '@${{ github.event.comment.user.login }} Ephemeral environment spinning up at http://${{ steps.get-ip.outputs.ip }}:8080. Credentials are `admin`/`admin`. Please allow several minutes for bootstrapping and startup.'
|
||||
body: '@${{ github.event.inputs.user_login || github.event.comment.user.login }} Ephemeral environment spinning up at http://${{ steps.get-ip.outputs.ip }}:8080. Credentials are `admin`/`admin`. Please allow several minutes for bootstrapping and startup.'
|
||||
})
|
||||
|
||||
- name: Comment (failure)
|
||||
if: ${{ failure() }}
|
||||
uses: actions/github-script@v7
|
||||
@@ -276,8 +295,8 @@ jobs:
|
||||
github-token: ${{github.token}}
|
||||
script: |
|
||||
github.rest.issues.createComment({
|
||||
issue_number: ${{ github.event.issue.number }},
|
||||
issue_number: ${{ github.event.inputs.issue_number || github.event.issue.number }},
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: '@${{ github.event.comment.user.login }} Ephemeral environment creation failed. Please check the Actions logs for details.'
|
||||
body: '@${{ github.event.inputs.user_login || github.event.comment.user.login }} Ephemeral environment creation failed. Please check the Actions logs for details.'
|
||||
})
|
||||
|
||||
2
.github/workflows/superset-frontend.yml
vendored
2
.github/workflows/superset-frontend.yml
vendored
@@ -73,7 +73,7 @@ jobs:
|
||||
working-directory: ./superset-frontend/packages/generator-superset
|
||||
run: npm run test
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@v4
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: javascript
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
2
.github/workflows/superset-helm-lint.yml
vendored
2
.github/workflows/superset-helm-lint.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: Lint and Test Charts
|
||||
name: "Helm: lint and test charts"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
86
.github/workflows/superset-helm-release.yml
vendored
86
.github/workflows/superset-helm-release.yml
vendored
@@ -1,4 +1,8 @@
|
||||
name: Release Charts
|
||||
# This workflow automates the release process for Helm charts.
|
||||
# The workflow creates a new branch for the release and opens a pull request against the 'gh-pages' branch,
|
||||
# allowing the changes to be reviewed and merged manually.
|
||||
|
||||
name: "Helm: release charts"
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -7,18 +11,28 @@ on:
|
||||
- "[0-9].[0-9]*"
|
||||
paths:
|
||||
- "helm/**"
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
ref:
|
||||
description: "The branch, tag, or commit SHA to check out"
|
||||
required: false
|
||||
default: "master"
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-22.04
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ inputs.ref || github.ref_name }}
|
||||
persist-credentials: true
|
||||
submodules: recursive
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -35,11 +49,77 @@ jobs:
|
||||
- name: Add bitnami repo dependency
|
||||
run: helm repo add bitnami https://charts.bitnami.com/bitnami
|
||||
|
||||
- name: Fetch/list all tags
|
||||
run: |
|
||||
# Debugging tags
|
||||
git fetch --tags --force
|
||||
git tag -d superset-helm-chart-0.13.4 || true
|
||||
echo "DEBUG TAGS"
|
||||
git show-ref --tags
|
||||
|
||||
- name: Create unique pages branch name
|
||||
id: vars
|
||||
run: echo "branch_name=helm-publish-${GITHUB_SHA:0:7}" >> $GITHUB_ENV
|
||||
|
||||
- name: Force recreate branch from gh-pages
|
||||
run: |
|
||||
# Ensure a clean working directory
|
||||
git reset --hard
|
||||
git clean -fdx
|
||||
git checkout -b local_gha_temp
|
||||
git submodule update
|
||||
|
||||
# Fetch the latest gh-pages branch
|
||||
git fetch origin gh-pages
|
||||
|
||||
# Check out and reset the target branch based on gh-pages
|
||||
git checkout -B ${{ env.branch_name }} origin/gh-pages
|
||||
|
||||
# Remove submodules from the branch
|
||||
git submodule deinit -f --all
|
||||
|
||||
# Force push to the remote branch
|
||||
git push origin ${{ env.branch_name }} --force
|
||||
|
||||
# Return to the original branch
|
||||
git checkout local_gha_temp
|
||||
|
||||
- name: Fetch/list all tags
|
||||
run: |
|
||||
git submodule update
|
||||
cat .github/actions/chart-releaser-action/action.yml
|
||||
|
||||
- name: Run chart-releaser
|
||||
uses: ./.github/actions/chart-releaser-action
|
||||
with:
|
||||
version: v1.6.0
|
||||
charts_dir: helm
|
||||
mark_as_latest: false
|
||||
pages_branch: ${{ env.branch_name }}
|
||||
env:
|
||||
CR_TOKEN: "${{ github.token }}"
|
||||
CR_RELEASE_NAME_TEMPLATE: "superset-helm-chart-{{ .Version }}"
|
||||
|
||||
- name: Open Pull Request
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const branchName = '${{ env.branch_name }}';
|
||||
const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');
|
||||
|
||||
if (!branchName) {
|
||||
throw new Error("Branch name is not defined.");
|
||||
}
|
||||
|
||||
const pr = await github.rest.pulls.create({
|
||||
owner,
|
||||
repo,
|
||||
title: `Helm chart release for ${branchName}`,
|
||||
head: branchName,
|
||||
base: "gh-pages", // Adjust if the target branch is different
|
||||
body: `This PR releases Helm charts to the gh-pages branch.`,
|
||||
});
|
||||
|
||||
core.info(`Pull request created: ${pr.data.html_url}`);
|
||||
env:
|
||||
BRANCH_NAME: ${{ env.branch_name }}
|
||||
|
||||
@@ -68,7 +68,7 @@ jobs:
|
||||
run: |
|
||||
./scripts/python_tests.sh
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@v4
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: python,mysql
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -129,7 +129,7 @@ jobs:
|
||||
run: |
|
||||
./scripts/python_tests.sh
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@v4
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: python,postgres
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -181,7 +181,7 @@ jobs:
|
||||
run: |
|
||||
./scripts/python_tests.sh
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@v4
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: python,sqlite
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
@@ -77,7 +77,7 @@ jobs:
|
||||
run: |
|
||||
./scripts/python_tests.sh -m 'chart_data_flow or sql_json_flow'
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@v4
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: python,presto
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -142,9 +142,10 @@ jobs:
|
||||
- name: Python unit tests (PostgreSQL)
|
||||
if: steps.check.outputs.python
|
||||
run: |
|
||||
pip install -e .[hive]
|
||||
./scripts/python_tests.sh -m 'chart_data_flow or sql_json_flow'
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@v4
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: python,hive
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
@@ -46,7 +46,7 @@ jobs:
|
||||
run: |
|
||||
pytest --durations-min=0.5 --cov-report= --cov=superset ./tests/common ./tests/unit_tests --cache-clear
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@v4
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: python,unit
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
@@ -16,11 +16,11 @@
|
||||
#
|
||||
repos:
|
||||
- repo: https://github.com/MarcoGorelli/auto-walrus
|
||||
rev: v0.2.2
|
||||
rev: 0.3.4
|
||||
hooks:
|
||||
- id: auto-walrus
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v1.3.0
|
||||
rev: v1.13.0
|
||||
hooks:
|
||||
- id: mypy
|
||||
args: [--check-untyped-defs]
|
||||
@@ -39,11 +39,11 @@ repos:
|
||||
types-Markdown,
|
||||
]
|
||||
- repo: https://github.com/peterdemin/pip-compile-multi
|
||||
rev: v2.6.2
|
||||
rev: v2.6.4
|
||||
hooks:
|
||||
- id: pip-compile-multi-verify
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.4.0
|
||||
rev: v5.0.0
|
||||
hooks:
|
||||
- id: check-docstring-first
|
||||
- id: check-added-large-files
|
||||
@@ -56,7 +56,7 @@ repos:
|
||||
exclude: ^.*\.(snap)
|
||||
args: ["--markdown-linebreak-ext=md"]
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: v3.1.0 # Use the sha or tag you want to point at
|
||||
rev: v4.0.0-alpha.8 # Use the sha or tag you want to point at
|
||||
hooks:
|
||||
- id: prettier
|
||||
additional_dependencies:
|
||||
@@ -70,12 +70,12 @@ repos:
|
||||
- id: blacklist
|
||||
args: ["--blacklisted-names=make_url", "--ignore=tests/"]
|
||||
- repo: https://github.com/norwoodj/helm-docs
|
||||
rev: v1.11.0
|
||||
rev: v1.14.2
|
||||
hooks:
|
||||
- id: helm-docs
|
||||
files: helm
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.4.0
|
||||
rev: v0.8.0
|
||||
hooks:
|
||||
- id: ruff
|
||||
args: [ --fix ]
|
||||
|
||||
@@ -70,6 +70,7 @@ google-sheets.svg
|
||||
ibm-db2.svg
|
||||
postgresql.svg
|
||||
snowflake.svg
|
||||
ydb.svg
|
||||
|
||||
# docs-related
|
||||
erd.puml
|
||||
|
||||
183
Dockerfile
183
Dockerfile
@@ -20,44 +20,38 @@
|
||||
######################################################################
|
||||
ARG PY_VER=3.10-slim-bookworm
|
||||
|
||||
# if BUILDPLATFORM is null, set it to 'amd64' (or leave as is otherwise).
|
||||
# If BUILDPLATFORM is null, set it to 'amd64' (or leave as is otherwise).
|
||||
ARG BUILDPLATFORM=${BUILDPLATFORM:-amd64}
|
||||
FROM --platform=${BUILDPLATFORM} node:20-bullseye-slim AS superset-node
|
||||
|
||||
# Arguments for build configuration
|
||||
ARG NPM_BUILD_CMD="build"
|
||||
ARG BUILD_TRANSLATIONS="false" # Include translations in the final build
|
||||
ARG DEV_MODE="false" # Skip frontend build in dev mode
|
||||
ARG INCLUDE_CHROMIUM="true" # Include headless Chromium for alerts & reports
|
||||
ARG INCLUDE_FIREFOX="false" # Include headless Firefox if enabled
|
||||
|
||||
# Include translations in the final build. The default supports en only to
|
||||
# reduce complexity and weight for those only using en
|
||||
ARG BUILD_TRANSLATIONS="false"
|
||||
|
||||
# Used by docker-compose to skip the frontend build,
|
||||
# in dev we mount the repo and build the frontend inside docker
|
||||
ARG DEV_MODE="false"
|
||||
|
||||
# Include headless browsers? Allows for alerts, reports & thumbnails, but bloats the images
|
||||
ARG INCLUDE_CHROMIUM="true"
|
||||
ARG INCLUDE_FIREFOX="false"
|
||||
|
||||
# Somehow we need python3 + build-essential on this side of the house to install node-gyp
|
||||
RUN apt-get update -qq \
|
||||
&& apt-get install \
|
||||
-yqq --no-install-recommends \
|
||||
build-essential \
|
||||
python3 \
|
||||
zstd
|
||||
# Install system dependencies required for node-gyp
|
||||
RUN --mount=type=bind,source=./docker,target=/docker \
|
||||
/docker/apt-install.sh build-essential python3 zstd
|
||||
|
||||
# Define environment variables for frontend build
|
||||
ENV BUILD_CMD=${NPM_BUILD_CMD} \
|
||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true
|
||||
# NPM ci first, as to NOT invalidate previous steps except for when package.json changes
|
||||
|
||||
RUN --mount=type=bind,target=/frontend-mem-nag.sh,src=./docker/frontend-mem-nag.sh \
|
||||
/frontend-mem-nag.sh
|
||||
# Run the frontend memory monitoring script
|
||||
RUN --mount=type=bind,source=./docker,target=/docker \
|
||||
/docker/frontend-mem-nag.sh
|
||||
|
||||
WORKDIR /app/superset-frontend
|
||||
# Creating empty folders to avoid errors when running COPY later on
|
||||
RUN mkdir -p /app/superset/static/assets
|
||||
RUN --mount=type=bind,target=./package.json,src=./superset-frontend/package.json \
|
||||
--mount=type=bind,target=./package-lock.json,src=./superset-frontend/package-lock.json \
|
||||
|
||||
# Create necessary folders to avoid errors in subsequent steps
|
||||
RUN mkdir -p /app/superset/static/assets \
|
||||
/app/superset/translations
|
||||
|
||||
# Mount package files and install dependencies if not in dev mode
|
||||
RUN --mount=type=bind,source=./superset-frontend/package.json,target=./package.json \
|
||||
--mount=type=bind,source=./superset-frontend/package-lock.json,target=./package-lock.json \
|
||||
if [ "$DEV_MODE" = "false" ]; then \
|
||||
npm ci; \
|
||||
else \
|
||||
@@ -66,33 +60,39 @@ RUN --mount=type=bind,target=./package.json,src=./superset-frontend/package.json
|
||||
|
||||
# Runs the webpack build process
|
||||
COPY superset-frontend /app/superset-frontend
|
||||
# This copies the .po files needed for translation
|
||||
RUN mkdir -p /app/superset/translations
|
||||
|
||||
|
||||
# Copy translation files
|
||||
COPY superset/translations /app/superset/translations
|
||||
|
||||
# Build the frontend if not in dev mode
|
||||
RUN if [ "$DEV_MODE" = "false" ]; then \
|
||||
BUILD_TRANSLATIONS=$BUILD_TRANSLATIONS npm run ${BUILD_CMD}; \
|
||||
else \
|
||||
echo "Skipping 'npm run ${BUILD_CMD}' in dev mode"; \
|
||||
fi
|
||||
|
||||
|
||||
# Compiles .json files from the .po files, then deletes the .po files
|
||||
# Compile .json files from .po translations (if required) and clean up .po files
|
||||
RUN if [ "$BUILD_TRANSLATIONS" = "true" ]; then \
|
||||
npm run build-translation; \
|
||||
else \
|
||||
echo "Skipping translations as requested by build flag"; \
|
||||
fi
|
||||
RUN rm /app/superset/translations/*/LC_MESSAGES/*.po
|
||||
RUN rm /app/superset/translations/messages.pot
|
||||
fi \
|
||||
# removing translations files regardless
|
||||
&& rm -rf /app/superset/translations/*/LC_MESSAGES/*.po \
|
||||
/app/superset/translations/messages.pot
|
||||
|
||||
|
||||
# Transition to Python base image
|
||||
FROM python:${PY_VER} AS python-base
|
||||
RUN pip install --no-cache-dir --upgrade setuptools pip uv
|
||||
|
||||
######################################################################
|
||||
# Final lean image...
|
||||
######################################################################
|
||||
FROM python-base AS lean
|
||||
|
||||
# Include translations in the final build. The default supports en only to
|
||||
# reduce complexity and weight for those only using en
|
||||
# Build argument for including translations
|
||||
ARG BUILD_TRANSLATIONS="false"
|
||||
|
||||
WORKDIR /app
|
||||
@@ -104,9 +104,16 @@ ENV LANG=C.UTF-8 \
|
||||
SUPERSET_HOME="/app/superset_home" \
|
||||
SUPERSET_PORT=8088
|
||||
|
||||
RUN mkdir -p ${PYTHONPATH} superset/static requirements superset-frontend apache_superset.egg-info requirements \
|
||||
# Set up necessary directories and user
|
||||
RUN --mount=type=bind,source=./docker,target=/docker \
|
||||
mkdir -p ${PYTHONPATH} \
|
||||
superset/static \
|
||||
requirements \
|
||||
superset-frontend \
|
||||
apache_superset.egg-info \
|
||||
requirements \
|
||||
&& useradd --user-group -d ${SUPERSET_HOME} -m --no-log-init --shell /bin/bash superset \
|
||||
&& apt-get update -qq && apt-get install -yqq --no-install-recommends \
|
||||
&& /docker/apt-install.sh \
|
||||
curl \
|
||||
libsasl2-dev \
|
||||
libsasl2-modules-gssapi-mit \
|
||||
@@ -115,60 +122,64 @@ RUN mkdir -p ${PYTHONPATH} superset/static requirements superset-frontend apache
|
||||
libldap2-dev \
|
||||
&& touch superset/static/version_info.json \
|
||||
&& chown -R superset:superset ./* \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
&& rm -rf /var/lib/apt/lists/* /var/cache/apt/archives/*
|
||||
|
||||
# Copy required files for Python build
|
||||
COPY --chown=superset:superset pyproject.toml setup.py MANIFEST.in README.md ./
|
||||
# setup.py uses the version information in package.json
|
||||
COPY --chown=superset:superset superset-frontend/package.json superset-frontend/
|
||||
COPY --chown=superset:superset requirements/base.txt requirements/
|
||||
COPY --chown=superset:superset scripts/check-env.py scripts/
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
apt-get update -qq && apt-get install -yqq --no-install-recommends \
|
||||
build-essential \
|
||||
&& pip install --no-cache-dir --upgrade setuptools pip \
|
||||
&& pip install --no-cache-dir -r requirements/base.txt \
|
||||
&& apt-get autoremove -yqq --purge build-essential \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy the compiled frontend assets
|
||||
# Install Python dependencies using docker/pip-install.sh
|
||||
RUN --mount=type=bind,source=./docker,target=/docker \
|
||||
--mount=type=cache,target=/root/.cache/pip \
|
||||
/docker/pip-install.sh --requires-build-essential -r requirements/base.txt
|
||||
|
||||
# Copy the compiled frontend assets from the node image
|
||||
COPY --chown=superset:superset --from=superset-node /app/superset/static/assets superset/static/assets
|
||||
|
||||
## Lastly, let's install superset itself
|
||||
# Copy the main Superset source code
|
||||
COPY --chown=superset:superset superset superset
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install --no-cache-dir -e .
|
||||
|
||||
# Copy the .json translations from the frontend layer
|
||||
# Install Superset itself using docker/pip-install.sh
|
||||
RUN --mount=type=bind,source=./docker,target=/docker \
|
||||
--mount=type=cache,target=/root/.cache/pip \
|
||||
/docker/pip-install.sh -e .
|
||||
|
||||
# Copy .json translations from the node image
|
||||
COPY --chown=superset:superset --from=superset-node /app/superset/translations superset/translations
|
||||
|
||||
# Compile translations for the backend - this generates .mo files, then deletes the .po files
|
||||
# Compile backend translations and clean up
|
||||
COPY ./scripts/translations/generate_mo_files.sh ./scripts/translations/
|
||||
RUN if [ "$BUILD_TRANSLATIONS" = "true" ]; then \
|
||||
./scripts/translations/generate_mo_files.sh \
|
||||
&& chown -R superset:superset superset/translations \
|
||||
&& rm superset/translations/messages.pot \
|
||||
&& rm superset/translations/*/LC_MESSAGES/*.po; \
|
||||
else \
|
||||
echo "Skipping translations as requested by build flag"; \
|
||||
fi
|
||||
&& chown -R superset:superset superset/translations; \
|
||||
fi \
|
||||
&& rm -rf superset/translations/messages.pot \
|
||||
superset/translations/*/LC_MESSAGES/*.po
|
||||
|
||||
# Add server run script
|
||||
COPY --chmod=755 ./docker/run-server.sh /usr/bin/
|
||||
USER superset
|
||||
|
||||
# Set user and healthcheck
|
||||
USER superset
|
||||
HEALTHCHECK CMD curl -f "http://localhost:${SUPERSET_PORT}/health"
|
||||
|
||||
# Expose port and set CMD
|
||||
EXPOSE ${SUPERSET_PORT}
|
||||
|
||||
CMD ["/usr/bin/run-server.sh"]
|
||||
|
||||
|
||||
######################################################################
|
||||
# Dev image...
|
||||
######################################################################
|
||||
FROM lean AS dev
|
||||
|
||||
USER root
|
||||
RUN apt-get update -qq \
|
||||
&& apt-get install -yqq --no-install-recommends \
|
||||
|
||||
# Install dev dependencies
|
||||
RUN --mount=type=bind,source=./docker,target=/docker \
|
||||
/docker/apt-install.sh \
|
||||
libnss3 \
|
||||
libdbus-glib-1-2 \
|
||||
libgtk-3-0 \
|
||||
@@ -176,46 +187,46 @@ RUN apt-get update -qq \
|
||||
libasound2 \
|
||||
libxtst6 \
|
||||
git \
|
||||
pkg-config \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
pkg-config
|
||||
|
||||
# Install Playwright and its dependencies
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install --no-cache-dir playwright
|
||||
RUN playwright install-deps
|
||||
uv pip install --system playwright \
|
||||
&& playwright install-deps
|
||||
|
||||
# Optionally install Chromium
|
||||
RUN if [ "$INCLUDE_CHROMIUM" = "true" ]; then \
|
||||
playwright install chromium; \
|
||||
else \
|
||||
echo "Skipping translations in dev mode"; \
|
||||
echo "Skipping Chromium installation in dev mode"; \
|
||||
fi
|
||||
|
||||
# Install GeckoDriver WebDriver
|
||||
ARG GECKODRIVER_VERSION=v0.34.0 \
|
||||
FIREFOX_VERSION=125.0.3
|
||||
|
||||
RUN if [ "$INCLUDE_FIREFOX" = "true" ]; then \
|
||||
apt-get update -qq \
|
||||
&& apt-get install -yqq --no-install-recommends wget bzip2 \
|
||||
# Install GeckoDriver WebDriver and Firefox (if required)
|
||||
ARG GECKODRIVER_VERSION=v0.34.0
|
||||
ARG FIREFOX_VERSION=125.0.3
|
||||
RUN --mount=type=bind,source=./docker,target=/docker \
|
||||
if [ "$INCLUDE_FIREFOX" = "true" ]; then \
|
||||
/docker/apt-install.sh wget bzip2 \
|
||||
&& wget -q https://github.com/mozilla/geckodriver/releases/download/${GECKODRIVER_VERSION}/geckodriver-${GECKODRIVER_VERSION}-linux64.tar.gz -O - | tar xfz - -C /usr/local/bin \
|
||||
&& wget -q https://download-installer.cdn.mozilla.net/pub/firefox/releases/${FIREFOX_VERSION}/linux-x86_64/en-US/firefox-${FIREFOX_VERSION}.tar.bz2 -O - | tar xfj - -C /opt \
|
||||
&& ln -s /opt/firefox/firefox /usr/local/bin/firefox \
|
||||
&& apt-get autoremove -yqq --purge wget bzip2 && rm -rf /var/[log,tmp]/* /tmp/* /var/lib/apt/lists/*; \
|
||||
&& apt-get autoremove -yqq --purge wget bzip2 && rm -rf /var/[log,tmp]/* /tmp/* /var/lib/apt/lists/* /var/cache/apt/archives/*; \
|
||||
else \
|
||||
echo "Skipping Firefox installation in dev mode"; \
|
||||
fi
|
||||
|
||||
# Installing mysql client os-level dependencies in dev image only because GPL
|
||||
RUN apt-get install -yqq --no-install-recommends \
|
||||
default-libmysqlclient-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
# Install MySQL client dependencies
|
||||
RUN --mount=type=bind,source=./docker,target=/docker \
|
||||
/docker/apt-install.sh default-libmysqlclient-dev
|
||||
|
||||
# Copy development requirements and install them
|
||||
COPY --chown=superset:superset requirements/development.txt requirements/
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
apt-get update -qq && apt-get install -yqq --no-install-recommends \
|
||||
build-essential \
|
||||
&& pip install --no-cache-dir -r requirements/development.txt \
|
||||
&& apt-get autoremove -yqq --purge build-essential \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
RUN --mount=type=bind,source=./docker,target=/docker \
|
||||
--mount=type=cache,target=/root/.cache/pip \
|
||||
/docker/pip-install.sh --requires-build-essential -r requirements/development.txt
|
||||
|
||||
USER superset
|
||||
|
||||
######################################################################
|
||||
# CI image...
|
||||
######################################################################
|
||||
|
||||
@@ -136,6 +136,7 @@ Here are some of the major database solutions that are supported:
|
||||
<img src="https://superset.apache.org/img/databases/oceanbase.svg" alt="oceanbase" border="0" width="220" />
|
||||
<img src="https://superset.apache.org/img/databases/sap-hana.png" alt="oceanbase" border="0" width="220" />
|
||||
<img src="https://superset.apache.org/img/databases/denodo.png" alt="denodo" border="0" width="200" />
|
||||
<img src="https://superset.apache.org/img/databases/ydb.svg" alt="ydb" border="0" width="200" />
|
||||
</p>
|
||||
|
||||
**A more comprehensive list of supported databases** along with the configuration instructions can be found [here](https://superset.apache.org/docs/configuration/databases).
|
||||
|
||||
@@ -24,6 +24,7 @@ assists people when migrating to a new version.
|
||||
|
||||
## Next
|
||||
|
||||
- [31198](https://github.com/apache/superset/pull/31198) Disallows by default the use of the following ClickHouse functions: "version", "currentDatabase", "hostName".
|
||||
- [29798](https://github.com/apache/superset/pull/29798) Since 3.1.0, the intial schedule for an alert or report was mistakenly offset by the specified timezone's relation to UTC. The initial schedule should now begin at the correct time.
|
||||
- [30021](https://github.com/apache/superset/pull/30021) The `dev` layer in our Dockerfile no long includes firefox binaries, only Chromium to reduce bloat/docker-build-time.
|
||||
- [30099](https://github.com/apache/superset/pull/30099) Translations are no longer included in the default docker image builds. If your environment requires translations, you'll want to set the docker build arg `BUILD_TRANSACTION=true`.
|
||||
|
||||
@@ -25,7 +25,6 @@ x-superset-user: &superset-user root
|
||||
x-superset-depends-on: &superset-depends-on
|
||||
- db
|
||||
- redis
|
||||
- superset-checks
|
||||
x-superset-volumes: &superset-volumes
|
||||
# /app/pythonpath_docker will be appended to the PYTHONPATH in the final container
|
||||
- ./docker:/app/docker
|
||||
@@ -131,23 +130,6 @@ services:
|
||||
- REDIS_PORT=6379
|
||||
- REDIS_SSL=false
|
||||
|
||||
superset-checks:
|
||||
build:
|
||||
context: .
|
||||
target: python-base
|
||||
cache_from:
|
||||
- apache/superset-cache:3.10-slim-bookworm
|
||||
container_name: superset_checks
|
||||
command: ["/app/scripts/check-env.py"]
|
||||
env_file:
|
||||
- path: docker/.env # default
|
||||
required: true
|
||||
- path: docker/.env-local # optional override
|
||||
required: false
|
||||
user: *superset-user
|
||||
healthcheck:
|
||||
disable: true
|
||||
|
||||
superset-init:
|
||||
build:
|
||||
<<: *common-build
|
||||
@@ -179,6 +161,7 @@ services:
|
||||
# set this to false if you have perf issues running the npm i; npm run dev in-docker
|
||||
# if you do so, you have to run this manually on the host, which should perform better!
|
||||
BUILD_SUPERSET_FRONTEND_IN_DOCKER: true
|
||||
NPM_RUN_PRUNE: false
|
||||
SCARF_ANALYTICS: "${SCARF_ANALYTICS:-}"
|
||||
container_name: superset_node
|
||||
command: ["/app/docker/docker-frontend.sh"]
|
||||
|
||||
51
docker/apt-install.sh
Executable file
51
docker/apt-install.sh
Executable file
@@ -0,0 +1,51 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
set -euo pipefail
|
||||
|
||||
# Ensure this script is run as root
|
||||
if [[ $EUID -ne 0 ]]; then
|
||||
echo "This script must be run as root" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check for required arguments
|
||||
if [[ $# -lt 1 ]]; then
|
||||
echo "Usage: $0 <package1> [<package2> ...]" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Colors for better logging (optional)
|
||||
GREEN='\033[0;32m'
|
||||
RED='\033[0;31m'
|
||||
RESET='\033[0m'
|
||||
|
||||
# Install packages with clean-up
|
||||
echo -e "${GREEN}Updating package lists...${RESET}"
|
||||
apt-get update -qq
|
||||
|
||||
echo -e "${GREEN}Installing packages: $@${RESET}"
|
||||
apt-get install -yqq --no-install-recommends "$@"
|
||||
|
||||
echo -e "${GREEN}Autoremoving unnecessary packages...${RESET}"
|
||||
apt-get autoremove -y
|
||||
|
||||
echo -e "${GREEN}Cleaning up package cache and metadata...${RESET}"
|
||||
apt-get clean
|
||||
rm -rf /var/lib/apt/lists/* /var/cache/apt/archives/* /tmp/* /var/tmp/*
|
||||
|
||||
echo -e "${GREEN}Installation and cleanup complete.${RESET}"
|
||||
@@ -27,6 +27,11 @@ if [ "$BUILD_SUPERSET_FRONTEND_IN_DOCKER" = "true" ]; then
|
||||
echo "Building Superset frontend in dev mode inside docker container"
|
||||
cd /app/superset-frontend
|
||||
|
||||
if [ "$NPM_RUN_PRUNE" = "true" ]; then
|
||||
echo "Running `npm run prune`"
|
||||
npm run prune
|
||||
fi
|
||||
|
||||
echo "Running `npm install`"
|
||||
npm install
|
||||
|
||||
|
||||
64
docker/pip-install.sh
Executable file
64
docker/pip-install.sh
Executable file
@@ -0,0 +1,64 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
set -euo pipefail
|
||||
|
||||
# Default flag
|
||||
REQUIRES_BUILD_ESSENTIAL=false
|
||||
USE_CACHE=true
|
||||
|
||||
# Filter arguments
|
||||
ARGS=()
|
||||
for arg in "$@"; do
|
||||
case "$arg" in
|
||||
--requires-build-essential)
|
||||
REQUIRES_BUILD_ESSENTIAL=true
|
||||
;;
|
||||
--no-cache)
|
||||
USE_CACHE=false
|
||||
;;
|
||||
*)
|
||||
ARGS+=("$arg")
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Install build-essential if required
|
||||
if $REQUIRES_BUILD_ESSENTIAL; then
|
||||
echo "Installing build-essential for package builds..."
|
||||
apt-get update -qq \
|
||||
&& apt-get install -yqq --no-install-recommends build-essential
|
||||
fi
|
||||
|
||||
# Choose whether to use pip cache
|
||||
if $USE_CACHE; then
|
||||
echo "Using pip cache..."
|
||||
uv pip install --system "${ARGS[@]}"
|
||||
else
|
||||
echo "Disabling pip cache..."
|
||||
uv pip install --system --no-cache-dir "${ARGS[@]}"
|
||||
fi
|
||||
|
||||
# Remove build-essential if it was installed
|
||||
if $REQUIRES_BUILD_ESSENTIAL; then
|
||||
echo "Removing build-essential to keep the image lean..."
|
||||
apt-get autoremove -yqq --purge build-essential \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* /var/cache/apt/archives/*
|
||||
fi
|
||||
|
||||
echo "Python packages installed successfully."
|
||||
@@ -53,11 +53,14 @@ To send alerts and reports to Slack channels, you need to create a new Slack App
|
||||
- `incoming-webhook`
|
||||
- `files:write`
|
||||
- `chat:write`
|
||||
- `channels:read`
|
||||
- `groups:read`
|
||||
4. At the top of the "OAuth and Permissions" section, click "install to workspace".
|
||||
5. Select a default channel for your app and continue.
|
||||
(You can post to any channel by inviting your Superset app into that channel).
|
||||
6. The app should now be installed in your workspace, and a "Bot User OAuth Access Token" should have been created. Copy that token in the `SLACK_API_TOKEN` variable of your `superset_config.py`.
|
||||
7. Restart the service (or run `superset init`) to pull in the new configuration.
|
||||
7. Ensure the feature flag `ALERT_REPORT_SLACK_V2` is set to True in `superset_config.py`
|
||||
8. Restart the service (or run `superset init`) to pull in the new configuration.
|
||||
|
||||
Note: when you configure an alert or a report, the Slack channel list takes channel names without the leading '#' e.g. use `alerts` instead of `#alerts`.
|
||||
|
||||
|
||||
@@ -81,6 +81,7 @@ are compatible with Superset.
|
||||
| [TimescaleDB](/docs/configuration/databases#timescaledb) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>:<Port>/<Database Name>` |
|
||||
| [Trino](/docs/configuration/databases#trino) | `pip install trino` | `trino://{username}:{password}@{hostname}:{port}/{catalog}` |
|
||||
| [Vertica](/docs/configuration/databases#vertica) | `pip install sqlalchemy-vertica-python` | `vertica+vertica_python://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
| [YDB](/docs/configuration/databases#ydb) | `pip install ydb-sqlalchemy` | `ydb://{host}:{port}/{database_name}` |
|
||||
| [YugabyteDB](/docs/configuration/databases#yugabytedb) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` |
|
||||
---
|
||||
|
||||
@@ -1537,6 +1538,78 @@ Other parameters:
|
||||
- Load Balancer - Backup Host
|
||||
|
||||
|
||||
|
||||
#### YDB
|
||||
|
||||
The recommended connector library for [YDB](https://ydb.tech/) is
|
||||
[ydb-sqlalchemy](https://pypi.org/project/ydb-sqlalchemy/).
|
||||
|
||||
##### Connection String
|
||||
|
||||
The connection string for YDB looks like this:
|
||||
|
||||
```
|
||||
ydb://{host}:{port}/{database_name}
|
||||
```
|
||||
|
||||
##### Protocol
|
||||
You can specify `protocol` in the `Secure Extra` field at `Advanced / Security`:
|
||||
|
||||
```
|
||||
{
|
||||
"protocol": "grpcs"
|
||||
}
|
||||
```
|
||||
|
||||
Default is `grpc`.
|
||||
|
||||
|
||||
##### Authentication Methods
|
||||
###### Static Credentials
|
||||
To use `Static Credentials` you should provide `username`/`password` in the `Secure Extra` field at `Advanced / Security`:
|
||||
|
||||
```
|
||||
{
|
||||
"credentials": {
|
||||
"username": "...",
|
||||
"password": "..."
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
###### Access Token Credentials
|
||||
To use `Access Token Credentials` you should provide `token` in the `Secure Extra` field at `Advanced / Security`:
|
||||
|
||||
```
|
||||
{
|
||||
"credentials": {
|
||||
"token": "...",
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
##### Service Account Credentials
|
||||
To use Service Account Credentials, you should provide `service_account_json` in the `Secure Extra` field at `Advanced / Security`:
|
||||
|
||||
```
|
||||
{
|
||||
"credentials": {
|
||||
"service_account_json": {
|
||||
"id": "...",
|
||||
"service_account_id": "...",
|
||||
"created_at": "...",
|
||||
"key_algorithm": "...",
|
||||
"public_key": "...",
|
||||
"private_key": "..."
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
|
||||
#### YugabyteDB
|
||||
|
||||
[YugabyteDB](https://www.yugabyte.com/) is a distributed SQL database built on top of PostgreSQL.
|
||||
|
||||
@@ -32,7 +32,9 @@ cd superset
|
||||
Setting things up to squeeze a "hello world" into any part of Superset should be as simple as
|
||||
|
||||
```bash
|
||||
docker compose up
|
||||
# getting docker compose to fire up services, and rebuilding if some docker layers have changed
|
||||
# using the `--build` suffix may be slower and optional if layers like py dependencies haven't changed
|
||||
docker compose up --build
|
||||
```
|
||||
|
||||
Note that:
|
||||
@@ -70,6 +72,24 @@ documentation.
|
||||
configured to be secure.
|
||||
:::
|
||||
|
||||
### Nuking the postgres database
|
||||
|
||||
At times, it's possible to end up with your development database in a bad state, it's
|
||||
common while switching branches that contain migrations for instance, where the database
|
||||
version stamp that `alembic` manages is no longer available after switching branch.
|
||||
|
||||
In that case, the easy solution is to nuke the postgres db and start fresh. Note that the full
|
||||
state of the database will be gone after doing this, so be cautious.
|
||||
|
||||
```bash
|
||||
# first stop docker-compose if it's running
|
||||
docker-compose down
|
||||
# delete the volume containing the database
|
||||
docker volume rm superset_db_home
|
||||
# restart docker-compose, which will init a fresh database and load examples
|
||||
docker-compose up
|
||||
```
|
||||
|
||||
## Installing Development Tools
|
||||
|
||||
:::note
|
||||
@@ -455,17 +475,6 @@ pre-commit install
|
||||
|
||||
A series of checks will now run when you make a git commit.
|
||||
|
||||
Alternatively, it is possible to run pre-commit via tox:
|
||||
|
||||
```bash
|
||||
tox -e pre-commit
|
||||
```
|
||||
|
||||
Or by running pre-commit manually:
|
||||
|
||||
```bash
|
||||
pre-commit run --all-files
|
||||
```
|
||||
|
||||
## Linting
|
||||
|
||||
@@ -474,8 +483,7 @@ pre-commit run --all-files
|
||||
We use [Pylint](https://pylint.org/) for linting which can be invoked via:
|
||||
|
||||
```bash
|
||||
# for python
|
||||
tox -e pylint
|
||||
pylint
|
||||
```
|
||||
|
||||
In terms of best practices please avoid blanket disabling of Pylint messages globally (via `.pylintrc`) or top-level within the file header, albeit there being a few exceptions. Disabling should occur inline as it prevents masking issues and provides context as to why said message is disabled.
|
||||
@@ -502,39 +510,108 @@ If using the eslint extension with vscode, put the following in your workspace `
|
||||
]
|
||||
```
|
||||
|
||||
|
||||
## GitHub Actions and `act`
|
||||
|
||||
:::tip
|
||||
`act` compatibility of Superset's GHAs is not fully tested. Running `act` locally may or may not
|
||||
work for different actions, and may require fine tunning and local secret-handling.
|
||||
For those more intricate GHAs that are tricky to run locally, we recommend iterating
|
||||
directly on GHA's infrastructure, by pushing directly on a branch and monitoring GHA logs.
|
||||
For more targetted iteration, see the `gh workflow run --ref {BRANCH}` subcommand of the GitHub CLI.
|
||||
:::
|
||||
|
||||
For automation and CI/CD, Superset makes extensive use of GitHub Actions (GHA). You
|
||||
can find all of the workflows and other assets under the `.github/` folder. This includes:
|
||||
- running the backend unit test suites (`tests/`)
|
||||
- running the frontend test suites (`superset-frontend/src/**.*.test.*`)
|
||||
- running our Cypress end-to-end tests (`superset-frontend/cypress-base/`)
|
||||
- linting the codebase, including all Python, Typescript and Javascript, yaml and beyond
|
||||
- checking for all sorts of other rules conventions
|
||||
|
||||
When you open a pull request (PR), the appropriate GitHub Actions (GHA) workflows will
|
||||
automatically run depending on the changes in your branch. It's perfectly reasonable
|
||||
(and required!) to rely on this automation. However, the downside is that it's mostly an
|
||||
all-or-nothing approach and doesn't provide much control to target specific tests or
|
||||
iterate quickly.
|
||||
|
||||
At times, it may be more convenient to run GHA workflows locally. For that purpose
|
||||
we use [act](https://github.com/nektos/act), a tool that allows you to run GitHub Actions (GHA)
|
||||
workflows locally. It simulates the GitHub Actions environment, enabling developers to
|
||||
test and debug workflows on their local machines before pushing changes to the repository. More
|
||||
on how to use it in the next section.
|
||||
|
||||
:::note
|
||||
In both GHA and `act`, we can run a more complex matrix for our tests, executing against different
|
||||
database engines (PostgreSQL, MySQL, SQLite) and different versions of Python.
|
||||
This enables us to ensure compatibility and stability across various environments.
|
||||
:::
|
||||
|
||||
### Using `act`
|
||||
|
||||
First, install `act` -> https://nektosact.com/
|
||||
|
||||
To list the workflows, simply:
|
||||
|
||||
```bash
|
||||
act --list
|
||||
```
|
||||
|
||||
To run a specific workflow:
|
||||
|
||||
```bash
|
||||
act pull_request --job {workflow_name} --secret GITHUB_TOKEN=$GITHUB_TOKEN --container-architecture linux/amd64
|
||||
```
|
||||
|
||||
In the example above, notice that:
|
||||
- we target a specific workflow, using `--job`
|
||||
- we pass a secret using `--secret`, as many jobs require read access (public) to the repo
|
||||
- we simulate a `pull_request` event by specifying it as the first arg,
|
||||
similarly, we could simulate a `push` event or something else
|
||||
- we specify `--container-architecture`, which tends to emulate GHA more reliably
|
||||
|
||||
:::note
|
||||
`act` is a rich tool that offers all sorts of features, allowing you to simulate different
|
||||
events (pull_request, push, ...), semantics around passing secrets where required and much
|
||||
more. For more information, refer to [act's documentation](https://nektosact.com/)
|
||||
:::
|
||||
|
||||
:::note
|
||||
Some jobs require secrets to interact with external systems and accounts that you may
|
||||
not have in your possession. In those cases you may have to rely on remote CI or parameterize the
|
||||
job further to target a different environment/sandbox or your own alongside the related
|
||||
secrets.
|
||||
:::
|
||||
|
||||
---
|
||||
|
||||
## Testing
|
||||
|
||||
### Python Testing
|
||||
|
||||
All python tests are carried out in [tox](https://tox.readthedocs.io/en/latest/index.html)
|
||||
a standardized testing framework.
|
||||
All python tests can be run with any of the tox [environments](https://tox.readthedocs.io/en/latest/example/basic.html#a-simple-tox-ini-default-environments), via,
|
||||
#### Unit Tests
|
||||
|
||||
For unit tests located in `tests/unit_tests/`, it's usually easy to simply run the script locally using:
|
||||
|
||||
```bash
|
||||
tox -e <environment>
|
||||
pytest tests/unit_tests/*
|
||||
```
|
||||
|
||||
For example,
|
||||
#### Integration Tests
|
||||
|
||||
For more complex pytest-defined integration tests (not to be confused with our end-to-end Cypress tests), many tests will require having a working test environment. Some tests require a database, Celery, and potentially other services or libraries installed.
|
||||
|
||||
### Running Tests with `act`
|
||||
|
||||
To run integration tests locally using `act`, ensure you have followed the setup instructions from the [GitHub Actions and `act`](#github-actions-and-act) section. You can run specific workflows or jobs that include integration tests. For example:
|
||||
|
||||
```bash
|
||||
tox -e py38
|
||||
act --job test-python-38 --secret GITHUB_TOKEN=$GITHUB_TOKEN --event pull_request --container-architecture linux/amd64
|
||||
```
|
||||
|
||||
Alternatively, you can run all tests in a single file via,
|
||||
#### Running locally using a test script
|
||||
|
||||
```bash
|
||||
tox -e <environment> -- tests/test_file.py
|
||||
```
|
||||
|
||||
or for a specific test via,
|
||||
|
||||
```bash
|
||||
tox -e <environment> -- tests/test_file.py::TestClassName::test_method_name
|
||||
```
|
||||
|
||||
Note that the test environment uses a temporary directory for defining the
|
||||
SQLite databases which will be cleared each time before the group of test
|
||||
commands are invoked.
|
||||
There is also a utility script included in the Superset codebase to run Python integration tests. The [readme can be found here](https://github.com/apache/superset/tree/master/scripts/tests).
|
||||
|
||||
There is also a utility script included in the Superset codebase to run python integration tests. The [readme can be
|
||||
found here](https://github.com/apache/superset/tree/master/scripts/tests)
|
||||
@@ -545,7 +622,7 @@ To run all integration tests, for example, run this script from the root directo
|
||||
scripts/tests/run.sh
|
||||
```
|
||||
|
||||
You can run unit tests found in './tests/unit_tests' for example with pytest. It is a simple way to run an isolated test that doesn't need any database setup
|
||||
You can run unit tests found in `./tests/unit_tests` with pytest. It is a simple way to run an isolated test that doesn't need any database setup:
|
||||
|
||||
```bash
|
||||
pytest ./link_to_test.py
|
||||
@@ -568,7 +645,7 @@ npm run test -- path/to/file.js
|
||||
|
||||
### Integration Testing
|
||||
|
||||
We use [Cypress](https://www.cypress.io/) for integration tests. Tests can be run by `tox -e cypress`. To open Cypress and explore tests first setup and run test server:
|
||||
We use [Cypress](https://www.cypress.io/) for integration tests. To open Cypress and explore tests first setup and run test server:
|
||||
|
||||
```bash
|
||||
export SUPERSET_CONFIG=tests.integration_tests.superset_test_config
|
||||
@@ -619,7 +696,7 @@ If you already have launched Docker environment please use the following command
|
||||
|
||||
Launch environment:
|
||||
|
||||
`CYPRESS_CONFIG=true docker compose up`
|
||||
`CYPRESS_CONFIG=true docker compose up --build`
|
||||
|
||||
It will serve the backend and frontend on port 8088.
|
||||
|
||||
@@ -687,7 +764,7 @@ superset:
|
||||
Start Superset as usual
|
||||
|
||||
```bash
|
||||
docker compose up
|
||||
docker compose up --build
|
||||
```
|
||||
|
||||
Install the required libraries and packages to the docker container
|
||||
|
||||
@@ -170,31 +170,10 @@ npm run dev-server
|
||||
|
||||
### Python Testing
|
||||
|
||||
All python tests are carried out in [tox](https://tox.readthedocs.io/en/latest/index.html)
|
||||
a standardized testing framework.
|
||||
All python tests can be run with any of the tox [environments](https://tox.readthedocs.io/en/latest/example/basic.html#a-simple-tox-ini-default-environments), via,
|
||||
`pytest`, backend by docker-compose is how we recommend running tests locally.
|
||||
|
||||
```bash
|
||||
tox -e <environment>
|
||||
```
|
||||
|
||||
For example,
|
||||
|
||||
```bash
|
||||
tox -e py38
|
||||
```
|
||||
|
||||
Alternatively, you can run all tests in a single file via,
|
||||
|
||||
```bash
|
||||
tox -e <environment> -- tests/test_file.py
|
||||
```
|
||||
|
||||
or for a specific test via,
|
||||
|
||||
```bash
|
||||
tox -e <environment> -- tests/test_file.py::TestClassName::test_method_name
|
||||
```
|
||||
For a more complex test matrix (against different database backends, python versions, ...) you
|
||||
can rely on our GitHub Actions by simply opening a draft pull request.
|
||||
|
||||
Note that the test environment uses a temporary directory for defining the
|
||||
SQLite databases which will be cleared each time before the group of test
|
||||
@@ -246,13 +225,7 @@ npm run test -- path/to/file.js
|
||||
|
||||
### e2e Integration Testing
|
||||
|
||||
We use [Cypress](https://www.cypress.io/) for end-to-end integration
|
||||
tests. One easy option to get started quickly is to leverage `tox` to
|
||||
run the whole suite in an isolated environment.
|
||||
|
||||
```bash
|
||||
tox -e cypress
|
||||
```
|
||||
For e2e testing, we recommend that you use a `docker-compose` backed-setup
|
||||
|
||||
Alternatively, you can go lower level and set things up in your
|
||||
development environment by following these steps:
|
||||
@@ -598,17 +571,31 @@ pybabel compile -d superset/translations
|
||||
|
||||
### Python
|
||||
|
||||
We use [Pylint](https://pylint.org/) for linting which can be invoked via:
|
||||
We use [Pylint](https://pylint.org/) and [ruff](https://github.com/astral-sh/ruff)
|
||||
for linting which can be invoked via:
|
||||
|
||||
```bash
|
||||
# for python
|
||||
tox -e pylint
|
||||
```
|
||||
# Run pylint
|
||||
pylint superset/
|
||||
|
||||
# auto-reformat using ruff
|
||||
ruff format
|
||||
|
||||
# lint check with ruff
|
||||
ruff check
|
||||
|
||||
# lint fix with ruff
|
||||
ruff check --fix
|
||||
```
|
||||
|
||||
In terms of best practices please avoid blanket disabling of Pylint messages globally (via `.pylintrc`) or top-level within the file header, albeit there being a few exceptions. Disabling should occur inline as it prevents masking issues and provides context as to why said message is disabled.
|
||||
|
||||
Additionally, the Python code is auto-formatted using [Black](https://github.com/python/black) which
|
||||
is configured as a pre-commit hook. There are also numerous [editor integrations](https://black.readthedocs.io/en/stable/integrations/editors.html)
|
||||
In terms of best practices please avoid blanket disabling of Pylint messages globally
|
||||
(via `.pylintrc`) or top-level within the file header, albeit there being a few exceptions.
|
||||
Disabling should occur inline as it prevents masking issues and provides context as to why
|
||||
said message is disabled.
|
||||
|
||||
All this is configured to run in pre-commit hooks, which we encourage you to setup
|
||||
with `pre-commit install`
|
||||
|
||||
### TypeScript
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ We have a set of build "presets" that each represent a combination of
|
||||
parameters for the build, mostly pointing to either different target layer
|
||||
for the build, and/or base image.
|
||||
|
||||
Here are the build presets that are exposed through the `build_docker.py` script:
|
||||
Here are the build presets that are exposed through the `supersetbot docker` utility:
|
||||
|
||||
- `lean`: The default Docker image, including both frontend and backend. Tags
|
||||
without a build_preset are lean builds (ie: `latest`, `4.0.0`, `3.0.0`, ...). `lean`
|
||||
@@ -62,8 +62,8 @@ Here are the build presets that are exposed through the `build_docker.py` script
|
||||
|
||||
|
||||
For insights or modifications to the build matrix and tagging conventions,
|
||||
check the [build_docker.py](https://github.com/apache/superset/blob/master/scripts/build_docker.py)
|
||||
script and the [docker.yml](https://github.com/apache/superset/blob/master/.github/workflows/docker.yml)
|
||||
check the [supersetbot docker](https://github.com/apache-superset/supersetbot)
|
||||
subcommand and the [docker.yml](https://github.com/apache/superset/blob/master/.github/workflows/docker.yml)
|
||||
GitHub action.
|
||||
|
||||
## Key ARGs in Dockerfile
|
||||
|
||||
@@ -76,7 +76,8 @@ on latest base images using `docker compose build --pull`. In most cases though,
|
||||
### Option #1 - for an interactive development environment
|
||||
|
||||
```bash
|
||||
docker compose up
|
||||
# The --build argument insures all the layers are up-to-date
|
||||
docker compose up --build
|
||||
```
|
||||
|
||||
:::tip
|
||||
@@ -95,6 +96,14 @@ perform those operations. In this case, we recommend you set the env var
|
||||
Simply trigger `npm i && npm run dev`, this should be MUCH faster.
|
||||
:::
|
||||
|
||||
:::tip
|
||||
Sometimes, your npm-related state can get out-of-wack, running `npm run prune` from
|
||||
the `superset-frontend/` folder will nuke the various' packages `node_module/` folders
|
||||
and help you start fresh. In the context of `docker compose` setting
|
||||
`export NPM_RUN_PRUNE=true` prior to running `docker compose up` will trigger that
|
||||
from within docker. This will slow down the startup, but will fix various npm-related issues.
|
||||
:::
|
||||
|
||||
### Option #2 - build a set of immutable images from the local branch
|
||||
|
||||
```bash
|
||||
@@ -227,3 +236,11 @@ may want to find the exact hostname you want to use, for that you can do `ifconf
|
||||
Docker for you. Alternately if you don't even see the `docker0` interface try (if needed with sudo)
|
||||
`docker network inspect bridge` and see if there is an entry for `"Gateway"` and note the IP
|
||||
address.
|
||||
|
||||
## 4. To build or not to build
|
||||
|
||||
When running `docker compose up`, docker will build what is required behind the scene, but
|
||||
may use the docker cache if assets already exist. Running `docker compose build` prior to
|
||||
`docker compose up` or the equivalent shortcut `docker compose up --build` ensures that your
|
||||
docker images matche the definition in the repository. This should only apply to the main
|
||||
docker-compose.yml file (default) and not to the alternative methods defined above.
|
||||
|
||||
@@ -77,10 +77,6 @@ versions officially supported by Superset. We'd recommend using a Python version
|
||||
like [pyenv](https://github.com/pyenv/pyenv)
|
||||
(and also [pyenv-virtualenv](https://github.com/pyenv/pyenv-virtualenv)).
|
||||
|
||||
:::tip
|
||||
To identify the Python version used by the official docker image, see the [Dockerfile](https://github.com/apache/superset/blob/master/Dockerfile). Additional docker images published for newer versions of Python can be found in [this file](https://github.com/apache/superset/blob/master/scripts/build_docker.py).
|
||||
:::
|
||||
|
||||
Let's also make sure we have the latest version of `pip` and `setuptools`:
|
||||
|
||||
```bash
|
||||
@@ -134,21 +130,22 @@ First, start by installing `apache-superset`:
|
||||
pip install apache-superset
|
||||
```
|
||||
|
||||
Then, define mandatory configurations, SECRET_KEY and FLASK_APP:
|
||||
```bash
|
||||
export SUPERSET_SECRET_KEY=YOUR-SECRET-KEY
|
||||
export FLASK_APP=superset
|
||||
```
|
||||
|
||||
Then, you need to initialize the database:
|
||||
|
||||
```bash
|
||||
superset db upgrade
|
||||
```
|
||||
|
||||
:::tip
|
||||
Note that some configuration is mandatory for production instances of Superset. In particular, Superset will not start without a user-specified value of SECRET_KEY. Please see [Configuring Superset](/docs/configuration/configuring-superset).
|
||||
:::
|
||||
|
||||
Finish installing by running through the following commands:
|
||||
|
||||
```bash
|
||||
# Create an admin user in your metadata database (use `admin` as username to be able to load the examples)
|
||||
export FLASK_APP=superset
|
||||
superset fab create-admin
|
||||
|
||||
# Load some data to play with
|
||||
|
||||
@@ -17,8 +17,8 @@
|
||||
"typecheck": "tsc"
|
||||
},
|
||||
"dependencies": {
|
||||
"@algolia/client-search": "^5.12.0",
|
||||
"@ant-design/icons": "^5.4.0",
|
||||
"@algolia/client-search": "^5.15.0",
|
||||
"@ant-design/icons": "^5.5.2",
|
||||
"@docsearch/react": "^3.6.3",
|
||||
"@docusaurus/core": "^3.5.2",
|
||||
"@docusaurus/plugin-client-redirects": "^3.5.2",
|
||||
@@ -34,7 +34,7 @@
|
||||
"clsx": "^2.1.1",
|
||||
"docusaurus-plugin-less": "^2.0.2",
|
||||
"file-loader": "^6.2.0",
|
||||
"less": "^4.2.0",
|
||||
"less": "^4.2.1",
|
||||
"less-loader": "^11.0.0",
|
||||
"prism-react-renderer": "^2.4.0",
|
||||
"react": "^18.3.1",
|
||||
@@ -42,14 +42,14 @@
|
||||
"react-github-btn": "^1.4.0",
|
||||
"react-svg-pan-zoom": "^3.13.1",
|
||||
"stream": "^0.0.3",
|
||||
"swagger-ui-react": "^5.17.14",
|
||||
"swagger-ui-react": "^5.18.2",
|
||||
"url-loader": "^4.1.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@docusaurus/module-type-aliases": "^3.5.2",
|
||||
"@docusaurus/tsconfig": "^3.5.2",
|
||||
"@docusaurus/module-type-aliases": "^3.6.3",
|
||||
"@docusaurus/tsconfig": "^3.6.3",
|
||||
"@types/react": "^18.3.12",
|
||||
"typescript": "^5.6.3",
|
||||
"typescript": "^5.7.2",
|
||||
"webpack": "^5.96.1"
|
||||
},
|
||||
"browserslist": {
|
||||
|
||||
20
docs/static/img/databases/ydb.svg
vendored
Normal file
20
docs/static/img/databases/ydb.svg
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
<svg width="753" height="274" viewBox="0 0 753 274" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g clip-path="url(#clip0_28_1297)">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M5 53.8669C5 37.6466 29.6243 29 60 29C90.3757 29 115 37.6466 115 53.8669V138.133C115 154.353 90.3757 163 60 163C29.6243 163 5 154.353 5 138.133V53.8669Z" fill="#2399FF"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M175 53.8669C175 37.6466 199.624 29 230 29C260.376 29 285 37.6466 285 53.8669V138.133C285 154.353 260.376 163 230 163C199.624 163 175 154.353 175 138.133V53.8669Z" fill="#2399FF"/>
|
||||
<path d="M177 85H113V103H177V85Z" fill="#2399FF"/>
|
||||
<path d="M173 157H115L81 111H59L105 173H183L229 111H207L173 157Z" fill="white"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M89 145.867C89 129.647 113.624 121 144 121C174.376 121 199 129.647 199 145.867V230.133C199 246.353 174.376 255 144 255C113.624 255 89 246.353 89 230.133V145.867Z" fill="#2399FF"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M108.783 136.779C100.111 140.552 99 144.237 99 146C99 147.763 100.111 151.448 108.783 155.221C117.076 158.829 129.435 161 144 161C158.565 161 170.924 158.829 179.217 155.221C187.889 151.448 189 147.763 189 146C189 144.237 187.889 140.552 179.218 136.779C170.924 133.171 158.565 131 144 131C129.435 131 117.076 133.171 108.783 136.779Z" fill="white"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M24.7825 44.7792C16.1105 48.5515 15 52.2365 15 54C15 55.7635 16.1105 59.4485 24.7825 63.2208C33.0763 66.8287 45.4354 69 60 69C74.5646 69 86.9237 66.8287 95.2175 63.2208C103.889 59.4485 105 55.7635 105 54C105 52.2365 103.889 48.5515 95.2175 44.7792C86.9237 41.1713 74.5646 39 60 39C45.4354 39 33.0763 41.1713 24.7825 44.7792Z" fill="white"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M194.783 44.7792C186.111 48.5515 185 52.2365 185 54C185 55.7635 186.111 59.4485 194.783 63.2208C203.076 66.8287 215.435 69 230 69C244.565 69 256.924 66.8287 265.217 63.2208C273.889 59.4485 275 55.7635 275 54C275 52.2365 273.889 48.5515 265.218 44.7792C256.924 41.1713 244.565 39 230 39C215.435 39 203.076 41.1713 194.783 44.7792Z" fill="white"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M694.131 64H634.75V210H705.026C730.974 210 750.243 191.821 750.243 166.963C750.243 150.15 740.93 137.39 726.201 130.891C733.027 124.143 737.168 115.224 737.168 104.858C737.168 81.2033 718.875 64 694.131 64ZM660.899 85.791V123.925H691.951C702.482 123.925 711.019 115.389 711.019 104.858C711.019 94.3277 702.482 85.791 691.951 85.791H660.899ZM660.899 188.209V145.716H702.847C714.581 145.716 724.093 155.229 724.093 166.963C724.093 178.697 714.581 188.209 702.847 188.209H660.899Z" fill="black"/>
|
||||
<path d="M352.716 64.0039H382.134L419.179 128.287L456.223 64.0039H485.641L432.308 155.472V210.004H406.049V155.472L352.716 64.0039Z" fill="black"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M496.008 64.0039H546.127C589.713 64.0039 619.127 92.3289 619.127 137.004C619.127 181.679 589.713 210.004 546.127 210.004H496.008V64.0039ZM522.157 188.213V85.7949H543.948C573.32 85.7949 592.978 104.364 592.978 137.004C592.978 169.644 573.32 188.213 543.948 188.213H522.157Z" fill="black"/>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0_28_1297">
|
||||
<rect width="753" height="274" fill="white"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 3.2 KiB |
867
docs/yarn.lock
867
docs/yarn.lock
File diff suppressed because it is too large
Load Diff
@@ -29,7 +29,7 @@ maintainers:
|
||||
- name: craig-rueda
|
||||
email: craig@craigrueda.com
|
||||
url: https://github.com/craig-rueda
|
||||
version: 0.13.3
|
||||
version: 0.13.4
|
||||
dependencies:
|
||||
- name: postgresql
|
||||
version: 12.1.6
|
||||
|
||||
@@ -23,7 +23,7 @@ NOTE: This file is generated by helm-docs: https://github.com/norwoodj/helm-docs
|
||||
|
||||
# superset
|
||||
|
||||

|
||||

|
||||
|
||||
Apache Superset is a modern, enterprise-ready business intelligence web application
|
||||
|
||||
|
||||
@@ -19,7 +19,8 @@
|
||||
# This is a YAML-formatted file.
|
||||
# Declare variables to be passed into your templates.
|
||||
|
||||
# A README is automatically generated from this file to document it, using helm-docs (see https://github.com/norwoodj/helm-docs)
|
||||
# A README is automatically generated from this file to document it,
|
||||
# using helm-docs (see https://github.com/norwoodj/helm-docs)
|
||||
# To update it, install helm-docs and run helm-docs from the root of this chart
|
||||
|
||||
# -- Provide a name to override the name of the chart
|
||||
|
||||
177
pyproject.toml
177
pyproject.toml
@@ -55,6 +55,7 @@ dependencies = [
|
||||
"flask-wtf>=1.1.0, <2.0",
|
||||
"func_timeout",
|
||||
"geopy",
|
||||
"greenlet>=3.0.3, <=3.1.1",
|
||||
"gunicorn>=22.0.0; sys_platform != 'win32'",
|
||||
"hashids>=1.3.1, <2",
|
||||
# known issue with holidays 0.26.0 and above related to prophet lib #25017
|
||||
@@ -69,7 +70,7 @@ dependencies = [
|
||||
"nh3>=0.2.11, <0.3",
|
||||
"numpy==1.23.5",
|
||||
"packaging",
|
||||
"pandas[performance]>=2.0.3, <2.1",
|
||||
"pandas[excel,performance]>=2.0.3, <2.1",
|
||||
"parsedatetime",
|
||||
"paramiko>=3.4.0",
|
||||
"pgsanity",
|
||||
@@ -90,7 +91,9 @@ dependencies = [
|
||||
"slack_sdk>=3.19.0, <4",
|
||||
"sqlalchemy>=1.4, <2",
|
||||
"sqlalchemy-utils>=0.38.3, <0.39",
|
||||
"sqlglot>=25.24.0,<26",
|
||||
# known breaking changes in sqlglot 25.25.0
|
||||
#https://github.com/tobymao/sqlglot/blob/main/CHANGELOG.md#v25250---2024-10-14
|
||||
"sqlglot>=25.24.0,<25.25.0",
|
||||
"sqlparse>=0.5.0",
|
||||
"tabulate>=0.8.9, <0.9",
|
||||
"typing-extensions>=4, <5",
|
||||
@@ -135,7 +138,6 @@ gevent = ["gevent>=23.9.1"]
|
||||
gsheets = ["shillelagh[gsheetsapi]>=1.2.18, <2"]
|
||||
hana = ["hdbcli==2.4.162", "sqlalchemy_hana==0.4.0"]
|
||||
hive = [
|
||||
"boto3",
|
||||
"pyhive[hive]>=0.6.5;python_version<'3.11'",
|
||||
"pyhive[hive_pure_sasl]>=0.7.0",
|
||||
"tableschema",
|
||||
@@ -158,7 +160,7 @@ pinot = ["pinotdb>=5.0.0, <6.0.0"]
|
||||
playwright = ["playwright>=1.37.0, <2"]
|
||||
postgres = ["psycopg2-binary==2.9.6"]
|
||||
presto = ["pyhive[presto]>=0.6.5"]
|
||||
trino = ["boto3", "trino>=0.328.0"]
|
||||
trino = ["trino>=0.328.0"]
|
||||
prophet = ["prophet>=1.1.5, <2"]
|
||||
redshift = ["sqlalchemy-redshift>=0.8.1, <0.9"]
|
||||
rockset = ["rockset-sqlalchemy>=0.0.1, <1"]
|
||||
@@ -177,11 +179,11 @@ netezza = ["nzalchemy>=11.0.2"]
|
||||
starrocks = ["starrocks>=1.0.0"]
|
||||
doris = ["pydoris>=1.0.0, <2.0.0"]
|
||||
oceanbase = ["oceanbase_py>=0.0.1"]
|
||||
ydb = ["ydb-sqlalchemy>=0.1.2"]
|
||||
development = [
|
||||
"docker",
|
||||
"flask-testing",
|
||||
"freezegun",
|
||||
"greenlet>=2.0.2",
|
||||
"grpcio>=1.55.3",
|
||||
"openapi-spec-validator",
|
||||
"parameterized",
|
||||
@@ -199,7 +201,6 @@ development = [
|
||||
"ruff",
|
||||
"sqloxide",
|
||||
"statsd",
|
||||
"tox",
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
@@ -236,172 +237,10 @@ disallow_untyped_calls = false
|
||||
disallow_untyped_defs = false
|
||||
disable_error_code = "annotation-unchecked"
|
||||
|
||||
[tool.tox]
|
||||
legacy_tox_ini = """
|
||||
# Remember to start celery workers to run celery tests, e.g.
|
||||
# celery --app=superset.tasks.celery_app:app worker -Ofair -c 2
|
||||
[testenv]
|
||||
basepython = python3.10
|
||||
ignore_basepython_conflict = true
|
||||
commands =
|
||||
superset db upgrade
|
||||
superset init
|
||||
superset load-test-users
|
||||
# use -s to be able to use break pointers.
|
||||
# no args or tests/* can be passed as an argument to run all tests
|
||||
pytest -s {posargs}
|
||||
deps =
|
||||
-rrequirements/development.txt
|
||||
setenv =
|
||||
PYTHONPATH = {toxinidir}
|
||||
SUPERSET_TESTENV = true
|
||||
SUPERSET_CONFIG = tests.integration_tests.superset_test_config
|
||||
SUPERSET_HOME = {envtmpdir}
|
||||
mysql: SUPERSET__SQLALCHEMY_DATABASE_URI = mysql://mysqluser:mysqluserpassword@localhost/superset?charset=utf8
|
||||
postgres: SUPERSET__SQLALCHEMY_DATABASE_URI = postgresql+psycopg2://superset:superset@localhost/test
|
||||
sqlite: SUPERSET__SQLALCHEMY_DATABASE_URI = sqlite:////{envtmpdir}/superset.db
|
||||
sqlite: SUPERSET__SQLALCHEMY_EXAMPLES_URI = sqlite:////{envtmpdir}/examples.db
|
||||
mysql-presto: SUPERSET__SQLALCHEMY_DATABASE_URI = mysql://mysqluser:mysqluserpassword@localhost/superset?charset=utf8
|
||||
# docker run -p 8080:8080 --name presto starburstdata/presto
|
||||
mysql-presto: SUPERSET__SQLALCHEMY_EXAMPLES_URI = presto://localhost:8080/memory/default
|
||||
# based on https://github.com/big-data-europe/docker-hadoop
|
||||
# clone the repo & run docker compose up -d to test locally
|
||||
mysql-hive: SUPERSET__SQLALCHEMY_DATABASE_URI = mysql://mysqluser:mysqluserpassword@localhost/superset?charset=utf8
|
||||
mysql-hive: SUPERSET__SQLALCHEMY_EXAMPLES_URI = hive://localhost:10000/default
|
||||
# make sure that directory is accessible by docker
|
||||
hive: UPLOAD_FOLDER = /tmp/.superset/app/static/uploads/
|
||||
usedevelop = true
|
||||
allowlist_externals =
|
||||
npm
|
||||
pkill
|
||||
|
||||
[testenv:cypress]
|
||||
setenv =
|
||||
PYTHONPATH = {toxinidir}
|
||||
SUPERSET_TESTENV = true
|
||||
SUPERSET_CONFIG = tests.integration_tests.superset_test_config
|
||||
SUPERSET_HOME = {envtmpdir}
|
||||
commands =
|
||||
npm install -g npm@'>=6.5.0'
|
||||
pip install -e {toxinidir}/
|
||||
{toxinidir}/superset-frontend/cypress_build.sh
|
||||
commands_post =
|
||||
pkill -if "python {envbindir}/flask"
|
||||
|
||||
[testenv:cypress-dashboard]
|
||||
setenv =
|
||||
PYTHONPATH = {toxinidir}
|
||||
SUPERSET_TESTENV = true
|
||||
SUPERSET_CONFIG = tests.integration_tests.superset_test_config
|
||||
SUPERSET_HOME = {envtmpdir}
|
||||
commands =
|
||||
npm install -g npm@'>=6.5.0'
|
||||
pip install -e {toxinidir}/
|
||||
{toxinidir}/superset-frontend/cypress_build.sh dashboard
|
||||
commands_post =
|
||||
pkill -if "python {envbindir}/flask"
|
||||
|
||||
[testenv:cypress-explore]
|
||||
setenv =
|
||||
PYTHONPATH = {toxinidir}
|
||||
SUPERSET_TESTENV = true
|
||||
SUPERSET_CONFIG = tests.integration_tests.superset_test_config
|
||||
SUPERSET_HOME = {envtmpdir}
|
||||
commands =
|
||||
npm install -g npm@'>=6.5.0'
|
||||
pip install -e {toxinidir}/
|
||||
{toxinidir}/superset-frontend/cypress_build.sh explore
|
||||
commands_post =
|
||||
pkill -if "python {envbindir}/flask"
|
||||
|
||||
[testenv:cypress-sqllab]
|
||||
setenv =
|
||||
PYTHONPATH = {toxinidir}
|
||||
SUPERSET_TESTENV = true
|
||||
SUPERSET_CONFIG = tests.integration_tests.superset_test_config
|
||||
SUPERSET_HOME = {envtmpdir}
|
||||
commands =
|
||||
npm install -g npm@'>=6.5.0'
|
||||
pip install -e {toxinidir}/
|
||||
{toxinidir}/superset-frontend/cypress_build.sh sqllab
|
||||
commands_post =
|
||||
pkill -if "python {envbindir}/flask"
|
||||
|
||||
[testenv:cypress-sqllab-backend-persist]
|
||||
setenv =
|
||||
PYTHONPATH = {toxinidir}
|
||||
SUPERSET_TESTENV = true
|
||||
SUPERSET_CONFIG = tests.integration_tests.superset_test_config
|
||||
SUPERSET_HOME = {envtmpdir}
|
||||
commands =
|
||||
npm install -g npm@'>=6.5.0'
|
||||
pip install -e {toxinidir}/
|
||||
{toxinidir}/superset-frontend/cypress_build.sh sqllab
|
||||
commands_post =
|
||||
pkill -if "python {envbindir}/flask"
|
||||
|
||||
[testenv:eslint]
|
||||
changedir = {toxinidir}/superset-frontend
|
||||
commands =
|
||||
npm run lint
|
||||
deps =
|
||||
|
||||
[testenv:fossa]
|
||||
commands =
|
||||
{toxinidir}/scripts/fossa.sh
|
||||
deps =
|
||||
passenv = *
|
||||
|
||||
[testenv:javascript]
|
||||
commands =
|
||||
npm install -g npm@'>=6.5.0'
|
||||
{toxinidir}/superset-frontend/js_build.sh
|
||||
deps =
|
||||
|
||||
[testenv:license-check]
|
||||
commands =
|
||||
{toxinidir}/scripts/check_license.sh
|
||||
passenv = *
|
||||
whitelist_externals =
|
||||
{toxinidir}/scripts/check_license.sh
|
||||
deps =
|
||||
|
||||
[testenv:pre-commit]
|
||||
commands =
|
||||
pre-commit run --all-files
|
||||
deps =
|
||||
-rrequirements/development.txt
|
||||
skip_install = true
|
||||
|
||||
[testenv:pylint]
|
||||
commands =
|
||||
pylint superset
|
||||
deps =
|
||||
-rrequirements/development.txt
|
||||
|
||||
[testenv:thumbnails]
|
||||
setenv =
|
||||
SUPERSET_CONFIG = tests.integration_tests.superset_test_config_thumbnails
|
||||
deps =
|
||||
-rrequirements/development.txt
|
||||
|
||||
[tox]
|
||||
envlist =
|
||||
cypress-dashboard
|
||||
cypress-explore
|
||||
cypress-sqllab
|
||||
cypress-sqllab-backend-persist
|
||||
eslint
|
||||
fossa
|
||||
javascript
|
||||
license-check
|
||||
pre-commit
|
||||
pylint
|
||||
skipsdist = true
|
||||
"""
|
||||
[tool.ruff]
|
||||
# Exclude a variety of commonly ignored directories.
|
||||
exclude = [
|
||||
"**/*.ipynb",
|
||||
".bzr",
|
||||
".direnv",
|
||||
".eggs",
|
||||
|
||||
@@ -20,3 +20,12 @@
|
||||
urllib3>=1.26.18
|
||||
werkzeug>=3.0.1
|
||||
numexpr>=2.9.0
|
||||
|
||||
# 5.0.0 has a sensitive deprecation used in other libs
|
||||
# -> https://github.com/aio-libs/async-timeout/blob/master/CHANGES.rst#500-2024-10-31
|
||||
async_timeout>=4.0.0,<5.0.0
|
||||
|
||||
# playwright requires greenlet==3.0.3
|
||||
# submitted a PR to relax deps in 11/2024
|
||||
# https://github.com/microsoft/playwright-python/pull/2669
|
||||
greenlet==3.0.3
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# SHA1:85649679306ea016e401f37adfbad832028d2e5f
|
||||
# SHA1:04f7e0860829f18926ea238354e6d4a6ab823d50
|
||||
#
|
||||
# This file is autogenerated by pip-compile-multi
|
||||
# To update, run:
|
||||
@@ -7,16 +7,18 @@
|
||||
#
|
||||
-e file:.
|
||||
# via -r requirements/base.in
|
||||
alembic==1.13.1
|
||||
alembic==1.14.0
|
||||
# via flask-migrate
|
||||
amqp==5.2.0
|
||||
amqp==5.3.1
|
||||
# via kombu
|
||||
apispec[yaml]==6.3.0
|
||||
# via flask-appbuilder
|
||||
apsw==3.46.0.0
|
||||
# via shillelagh
|
||||
async-timeout==4.0.3
|
||||
# via redis
|
||||
# via
|
||||
# -r requirements/base.in
|
||||
# redis
|
||||
attrs==24.2.0
|
||||
# via
|
||||
# cattrs
|
||||
@@ -26,13 +28,13 @@ babel==2.16.0
|
||||
# via flask-babel
|
||||
backoff==2.2.1
|
||||
# via apache-superset
|
||||
bcrypt==4.1.3
|
||||
bcrypt==4.2.1
|
||||
# via paramiko
|
||||
billiard==4.2.0
|
||||
billiard==4.2.1
|
||||
# via celery
|
||||
blinker==1.9.0
|
||||
# via flask
|
||||
bottleneck==1.3.8
|
||||
bottleneck==1.4.2
|
||||
# via pandas
|
||||
brotli==1.1.0
|
||||
# via flask-compress
|
||||
@@ -40,19 +42,19 @@ cachelib==0.9.0
|
||||
# via
|
||||
# flask-caching
|
||||
# flask-session
|
||||
cachetools==5.3.3
|
||||
cachetools==5.5.0
|
||||
# via google-auth
|
||||
cattrs==24.1.2
|
||||
# via requests-cache
|
||||
celery==5.4.0
|
||||
# via apache-superset
|
||||
certifi==2024.2.2
|
||||
certifi==2024.8.30
|
||||
# via requests
|
||||
cffi==1.17.1
|
||||
# via
|
||||
# cryptography
|
||||
# pynacl
|
||||
charset-normalizer==3.3.2
|
||||
charset-normalizer==3.4.0
|
||||
# via requests
|
||||
click==8.1.7
|
||||
# via
|
||||
@@ -76,23 +78,27 @@ colorama==0.4.6
|
||||
# via
|
||||
# apache-superset
|
||||
# flask-appbuilder
|
||||
cron-descriptor==1.4.3
|
||||
cron-descriptor==1.4.5
|
||||
# via apache-superset
|
||||
croniter==2.0.5
|
||||
croniter==5.0.1
|
||||
# via apache-superset
|
||||
cryptography==42.0.8
|
||||
cryptography==43.0.3
|
||||
# via
|
||||
# apache-superset
|
||||
# paramiko
|
||||
# pyopenssl
|
||||
deprecated==1.2.14
|
||||
defusedxml==0.7.1
|
||||
# via odfpy
|
||||
deprecated==1.2.15
|
||||
# via limits
|
||||
deprecation==2.1.0
|
||||
# via apache-superset
|
||||
dnspython==2.6.1
|
||||
dnspython==2.7.0
|
||||
# via email-validator
|
||||
email-validator==2.1.1
|
||||
email-validator==2.2.0
|
||||
# via flask-appbuilder
|
||||
et-xmlfile==2.0.0
|
||||
# via openpyxl
|
||||
exceptiongroup==1.2.2
|
||||
# via cattrs
|
||||
flask==2.3.3
|
||||
@@ -115,11 +121,11 @@ flask-babel==2.0.0
|
||||
# via flask-appbuilder
|
||||
flask-caching==2.3.0
|
||||
# via apache-superset
|
||||
flask-compress==1.15
|
||||
flask-compress==1.17
|
||||
# via apache-superset
|
||||
flask-jwt-extended==4.6.0
|
||||
flask-jwt-extended==4.7.1
|
||||
# via flask-appbuilder
|
||||
flask-limiter==3.7.0
|
||||
flask-limiter==3.8.0
|
||||
# via flask-appbuilder
|
||||
flask-login==0.6.3
|
||||
# via
|
||||
@@ -135,7 +141,7 @@ flask-sqlalchemy==2.5.1
|
||||
# flask-migrate
|
||||
flask-talisman==1.1.0
|
||||
# via apache-superset
|
||||
flask-wtf==1.2.1
|
||||
flask-wtf==1.2.2
|
||||
# via
|
||||
# apache-superset
|
||||
# flask-appbuilder
|
||||
@@ -145,29 +151,30 @@ geographiclib==2.0
|
||||
# via geopy
|
||||
geopy==2.4.1
|
||||
# via apache-superset
|
||||
google-auth==2.29.0
|
||||
google-auth==2.36.0
|
||||
# via shillelagh
|
||||
greenlet==3.0.3
|
||||
# via
|
||||
# -r requirements/base.in
|
||||
# apache-superset
|
||||
# shillelagh
|
||||
# sqlalchemy
|
||||
gunicorn==22.0.0
|
||||
gunicorn==23.0.0
|
||||
# via apache-superset
|
||||
hashids==1.3.1
|
||||
# via apache-superset
|
||||
holidays==0.25
|
||||
# via apache-superset
|
||||
humanize==4.9.0
|
||||
humanize==4.11.0
|
||||
# via apache-superset
|
||||
idna==3.7
|
||||
idna==3.10
|
||||
# via
|
||||
# email-validator
|
||||
# requests
|
||||
importlib-metadata==7.1.0
|
||||
importlib-metadata==8.5.0
|
||||
# via apache-superset
|
||||
importlib-resources==6.4.0
|
||||
importlib-resources==6.4.5
|
||||
# via limits
|
||||
isodate==0.6.1
|
||||
isodate==0.7.2
|
||||
# via apache-superset
|
||||
itsdangerous==2.2.0
|
||||
# via
|
||||
@@ -177,23 +184,23 @@ jinja2==3.1.4
|
||||
# via
|
||||
# flask
|
||||
# flask-babel
|
||||
jsonpath-ng==1.6.1
|
||||
jsonpath-ng==1.7.0
|
||||
# via apache-superset
|
||||
jsonschema==4.17.3
|
||||
# via flask-appbuilder
|
||||
kombu==5.3.7
|
||||
kombu==5.4.2
|
||||
# via celery
|
||||
korean-lunar-calendar==0.3.1
|
||||
# via holidays
|
||||
limits==3.12.0
|
||||
limits==3.13.0
|
||||
# via flask-limiter
|
||||
llvmlite==0.42.0
|
||||
llvmlite==0.43.0
|
||||
# via numba
|
||||
mako==1.3.5
|
||||
mako==1.3.6
|
||||
# via
|
||||
# alembic
|
||||
# apache-superset
|
||||
markdown==3.6
|
||||
markdown==3.7
|
||||
# via apache-superset
|
||||
markdown-it-py==3.0.0
|
||||
# via rich
|
||||
@@ -203,7 +210,7 @@ markupsafe==3.0.2
|
||||
# mako
|
||||
# werkzeug
|
||||
# wtforms
|
||||
marshmallow==3.21.2
|
||||
marshmallow==3.23.1
|
||||
# via
|
||||
# flask-appbuilder
|
||||
# marshmallow-sqlalchemy
|
||||
@@ -215,11 +222,11 @@ msgpack==1.0.8
|
||||
# via apache-superset
|
||||
msgspec==0.18.6
|
||||
# via flask-session
|
||||
nh3==0.2.17
|
||||
nh3==0.2.19
|
||||
# via apache-superset
|
||||
numba==0.59.1
|
||||
numba==0.60.0
|
||||
# via pandas
|
||||
numexpr==2.10.1
|
||||
numexpr==2.10.2
|
||||
# via
|
||||
# -r requirements/base.in
|
||||
# pandas
|
||||
@@ -231,9 +238,13 @@ numpy==1.23.5
|
||||
# numexpr
|
||||
# pandas
|
||||
# pyarrow
|
||||
odfpy==1.4.1
|
||||
# via pandas
|
||||
openpyxl==3.1.5
|
||||
# via pandas
|
||||
ordered-set==4.1.0
|
||||
# via flask-limiter
|
||||
packaging==23.2
|
||||
packaging==24.2
|
||||
# via
|
||||
# apache-superset
|
||||
# apispec
|
||||
@@ -243,9 +254,9 @@ packaging==23.2
|
||||
# marshmallow
|
||||
# marshmallow-sqlalchemy
|
||||
# shillelagh
|
||||
pandas[performance]==2.0.3
|
||||
pandas[excel,performance]==2.0.3
|
||||
# via apache-superset
|
||||
paramiko==3.4.0
|
||||
paramiko==3.5.0
|
||||
# via
|
||||
# apache-superset
|
||||
# sshtunnel
|
||||
@@ -261,30 +272,30 @@ polyline==2.0.2
|
||||
# via apache-superset
|
||||
prison==0.2.1
|
||||
# via flask-appbuilder
|
||||
prompt-toolkit==3.0.44
|
||||
prompt-toolkit==3.0.48
|
||||
# via click-repl
|
||||
pyarrow==14.0.2
|
||||
# via apache-superset
|
||||
pyasn1==0.6.0
|
||||
pyasn1==0.6.1
|
||||
# via
|
||||
# pyasn1-modules
|
||||
# rsa
|
||||
pyasn1-modules==0.4.0
|
||||
pyasn1-modules==0.4.1
|
||||
# via google-auth
|
||||
pycparser==2.22
|
||||
# via cffi
|
||||
pygments==2.18.0
|
||||
# via rich
|
||||
pyjwt==2.8.0
|
||||
pyjwt==2.10.1
|
||||
# via
|
||||
# apache-superset
|
||||
# flask-appbuilder
|
||||
# flask-jwt-extended
|
||||
pynacl==1.5.0
|
||||
# via paramiko
|
||||
pyopenssl==24.1.0
|
||||
pyopenssl==24.2.1
|
||||
# via shillelagh
|
||||
pyparsing==3.1.2
|
||||
pyparsing==3.2.0
|
||||
# via apache-superset
|
||||
pyrsistent==0.20.0
|
||||
# via jsonschema
|
||||
@@ -306,7 +317,9 @@ pytz==2024.2
|
||||
# croniter
|
||||
# flask-babel
|
||||
# pandas
|
||||
pyyaml==6.0.1
|
||||
pyxlsb==1.0.10
|
||||
# via pandas
|
||||
pyyaml==6.0.2
|
||||
# via
|
||||
# apache-superset
|
||||
# apispec
|
||||
@@ -318,7 +331,7 @@ requests==2.32.2
|
||||
# shillelagh
|
||||
requests-cache==1.2.0
|
||||
# via shillelagh
|
||||
rich==13.7.1
|
||||
rich==13.9.4
|
||||
# via flask-limiter
|
||||
rsa==4.9
|
||||
# via google-auth
|
||||
@@ -328,18 +341,17 @@ shillelagh[gsheetsapi]==1.2.18
|
||||
# via apache-superset
|
||||
shortid==0.1.2
|
||||
# via apache-superset
|
||||
simplejson==3.19.2
|
||||
simplejson==3.19.3
|
||||
# via apache-superset
|
||||
six==1.16.0
|
||||
# via
|
||||
# isodate
|
||||
# prison
|
||||
# python-dateutil
|
||||
# url-normalize
|
||||
# wtforms-json
|
||||
slack-sdk==3.27.2
|
||||
slack-sdk==3.33.4
|
||||
# via apache-superset
|
||||
sqlalchemy==1.4.52
|
||||
sqlalchemy==1.4.54
|
||||
# via
|
||||
# alembic
|
||||
# apache-superset
|
||||
@@ -352,9 +364,9 @@ sqlalchemy-utils==0.38.3
|
||||
# via
|
||||
# apache-superset
|
||||
# flask-appbuilder
|
||||
sqlglot==25.24.0
|
||||
sqlglot==25.24.5
|
||||
# via apache-superset
|
||||
sqlparse==0.5.0
|
||||
sqlparse==0.5.2
|
||||
# via apache-superset
|
||||
sshtunnel==0.4.0
|
||||
# via apache-superset
|
||||
@@ -367,10 +379,12 @@ typing-extensions==4.12.2
|
||||
# cattrs
|
||||
# flask-limiter
|
||||
# limits
|
||||
# rich
|
||||
# shillelagh
|
||||
tzdata==2024.1
|
||||
tzdata==2024.2
|
||||
# via
|
||||
# celery
|
||||
# kombu
|
||||
# pandas
|
||||
url-normalize==1.4.3
|
||||
# via requests-cache
|
||||
@@ -394,7 +408,7 @@ werkzeug==3.1.3
|
||||
# flask-appbuilder
|
||||
# flask-jwt-extended
|
||||
# flask-login
|
||||
wrapt==1.16.0
|
||||
wrapt==1.17.0
|
||||
# via deprecated
|
||||
wtforms==3.2.1
|
||||
# via
|
||||
@@ -404,9 +418,13 @@ wtforms==3.2.1
|
||||
# wtforms-json
|
||||
wtforms-json==0.3.5
|
||||
# via apache-superset
|
||||
xlrd==2.0.1
|
||||
# via pandas
|
||||
xlsxwriter==3.0.9
|
||||
# via apache-superset
|
||||
zipp==3.19.0
|
||||
# via
|
||||
# apache-superset
|
||||
# pandas
|
||||
zipp==3.21.0
|
||||
# via importlib-metadata
|
||||
zstandard==0.22.0
|
||||
zstandard==0.23.0
|
||||
# via flask-compress
|
||||
|
||||
@@ -17,4 +17,4 @@
|
||||
# under the License.
|
||||
#
|
||||
-r base.in
|
||||
-e .[development,bigquery,cors,druid,gevent,gsheets,hive,mysql,playwright,postgres,presto,prophet,trino,thumbnails]
|
||||
-e .[development,bigquery,cors,druid,gevent,gsheets,mysql,playwright,postgres,presto,prophet,trino,thumbnails]
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# SHA1:c186006a3f82c8775e1039f37c52309f6c858197
|
||||
# SHA1:dc767a7288b56c785b0cd3c38e95e7b5e66be1ac
|
||||
#
|
||||
# This file is autogenerated by pip-compile-multi
|
||||
# To update, run:
|
||||
@@ -12,89 +12,69 @@
|
||||
# -r requirements/development.in
|
||||
astroid==3.1.0
|
||||
# via pylint
|
||||
boto3==1.34.112
|
||||
# via
|
||||
# apache-superset
|
||||
# dataflows-tabulator
|
||||
botocore==1.34.112
|
||||
# via
|
||||
# boto3
|
||||
# s3transfer
|
||||
build==1.2.1
|
||||
# via pip-tools
|
||||
cached-property==1.5.2
|
||||
# via tableschema
|
||||
cfgv==3.3.1
|
||||
cfgv==3.4.0
|
||||
# via pre-commit
|
||||
chardet==5.1.0
|
||||
# via
|
||||
# dataflows-tabulator
|
||||
# tox
|
||||
cmdstanpy==1.1.0
|
||||
# via prophet
|
||||
contourpy==1.0.7
|
||||
# via matplotlib
|
||||
coverage[toml]==7.2.5
|
||||
coverage[toml]==7.6.8
|
||||
# via pytest-cov
|
||||
cycler==0.11.0
|
||||
cycler==0.12.1
|
||||
# via matplotlib
|
||||
dataflows-tabulator==1.54.3
|
||||
# via tableschema
|
||||
db-dtypes==1.2.0
|
||||
db-dtypes==1.3.1
|
||||
# via pandas-gbq
|
||||
dill==0.3.8
|
||||
dill==0.3.9
|
||||
# via pylint
|
||||
distlib==0.3.8
|
||||
# via virtualenv
|
||||
docker==7.0.0
|
||||
# via apache-superset
|
||||
et-xmlfile==1.1.0
|
||||
# via openpyxl
|
||||
filelock==3.12.2
|
||||
# via
|
||||
# tox
|
||||
# virtualenv
|
||||
# via virtualenv
|
||||
flask-cors==4.0.0
|
||||
# via apache-superset
|
||||
flask-testing==0.8.1
|
||||
# via apache-superset
|
||||
fonttools==4.51.0
|
||||
fonttools==4.55.0
|
||||
# via matplotlib
|
||||
freezegun==1.5.1
|
||||
# via apache-superset
|
||||
future==0.18.3
|
||||
future==1.0.0
|
||||
# via pyhive
|
||||
gevent==24.2.1
|
||||
# via apache-superset
|
||||
google-api-core[grpc]==2.11.0
|
||||
google-api-core[grpc]==2.23.0
|
||||
# via
|
||||
# google-cloud-bigquery
|
||||
# google-cloud-bigquery-storage
|
||||
# google-cloud-core
|
||||
# pandas-gbq
|
||||
# sqlalchemy-bigquery
|
||||
google-auth-oauthlib==1.0.0
|
||||
google-auth-oauthlib==1.2.1
|
||||
# via
|
||||
# pandas-gbq
|
||||
# pydata-google-auth
|
||||
google-cloud-bigquery==3.20.1
|
||||
google-cloud-bigquery==3.27.0
|
||||
# via
|
||||
# apache-superset
|
||||
# pandas-gbq
|
||||
# sqlalchemy-bigquery
|
||||
google-cloud-bigquery-storage==2.19.1
|
||||
# via pandas-gbq
|
||||
google-cloud-core==2.3.2
|
||||
google-cloud-core==2.4.1
|
||||
# via google-cloud-bigquery
|
||||
google-crc32c==1.5.0
|
||||
google-crc32c==1.6.0
|
||||
# via google-resumable-media
|
||||
google-resumable-media==2.7.0
|
||||
google-resumable-media==2.7.2
|
||||
# via google-cloud-bigquery
|
||||
googleapis-common-protos==1.63.0
|
||||
googleapis-common-protos==1.66.0
|
||||
# via
|
||||
# google-api-core
|
||||
# grpcio-status
|
||||
grpcio==1.62.1
|
||||
grpcio==1.68.0
|
||||
# via
|
||||
# apache-superset
|
||||
# google-api-core
|
||||
@@ -103,31 +83,21 @@ grpcio-status==1.60.1
|
||||
# via google-api-core
|
||||
identify==2.5.36
|
||||
# via pre-commit
|
||||
ijson==3.2.3
|
||||
# via dataflows-tabulator
|
||||
iniconfig==2.0.0
|
||||
# via pytest
|
||||
isort==5.12.0
|
||||
# via pylint
|
||||
jmespath==1.0.1
|
||||
# via
|
||||
# boto3
|
||||
# botocore
|
||||
jsonlines==4.0.0
|
||||
# via dataflows-tabulator
|
||||
jsonschema-spec==0.1.6
|
||||
# via openapi-spec-validator
|
||||
kiwisolver==1.4.5
|
||||
kiwisolver==1.4.7
|
||||
# via matplotlib
|
||||
lazy-object-proxy==1.10.0
|
||||
# via openapi-spec-validator
|
||||
linear-tsv==1.1.0
|
||||
# via dataflows-tabulator
|
||||
matplotlib==3.9.0
|
||||
# via prophet
|
||||
mccabe==0.7.0
|
||||
# via pylint
|
||||
mysqlclient==2.2.4
|
||||
mysqlclient==2.2.6
|
||||
# via apache-superset
|
||||
nodeenv==1.8.0
|
||||
# via pre-commit
|
||||
@@ -137,8 +107,6 @@ openapi-schema-validator==0.4.4
|
||||
# via openapi-spec-validator
|
||||
openapi-spec-validator==0.5.6
|
||||
# via apache-superset
|
||||
openpyxl==3.1.2
|
||||
# via dataflows-tabulator
|
||||
pandas-gbq==0.19.1
|
||||
# via apache-superset
|
||||
parameterized==0.9.0
|
||||
@@ -155,32 +123,30 @@ pip-tools==7.4.1
|
||||
# via pip-compile-multi
|
||||
playwright==1.42.0
|
||||
# via apache-superset
|
||||
pluggy==1.4.0
|
||||
# via
|
||||
# pytest
|
||||
# tox
|
||||
pre-commit==3.7.1
|
||||
pluggy==1.5.0
|
||||
# via pytest
|
||||
pre-commit==4.0.1
|
||||
# via apache-superset
|
||||
progress==1.6
|
||||
# via apache-superset
|
||||
prophet==1.1.5
|
||||
# via apache-superset
|
||||
proto-plus==1.22.2
|
||||
# via google-cloud-bigquery-storage
|
||||
protobuf==4.23.0
|
||||
proto-plus==1.25.0
|
||||
# via
|
||||
# google-api-core
|
||||
# google-cloud-bigquery-storage
|
||||
protobuf==4.25.5
|
||||
# via
|
||||
# google-api-core
|
||||
# google-cloud-bigquery-storage
|
||||
# googleapis-common-protos
|
||||
# grpcio-status
|
||||
# proto-plus
|
||||
psutil==6.0.0
|
||||
psutil==6.1.0
|
||||
# via apache-superset
|
||||
psycopg2-binary==2.9.6
|
||||
# via apache-superset
|
||||
pure-sasl==0.6.2
|
||||
# via thrift-sasl
|
||||
pydata-google-auth==1.7.0
|
||||
pydata-google-auth==1.9.0
|
||||
# via pandas-gbq
|
||||
pydruid==0.6.9
|
||||
# via apache-superset
|
||||
@@ -194,9 +160,7 @@ pyinstrument==4.4.0
|
||||
# via apache-superset
|
||||
pylint==3.1.0
|
||||
# via apache-superset
|
||||
pyproject-api==1.6.1
|
||||
# via tox
|
||||
pyproject-hooks==1.0.0
|
||||
pyproject-hooks==1.2.0
|
||||
# via
|
||||
# build
|
||||
# pip-tools
|
||||
@@ -205,7 +169,7 @@ pytest==7.4.4
|
||||
# apache-superset
|
||||
# pytest-cov
|
||||
# pytest-mock
|
||||
pytest-cov==5.0.0
|
||||
pytest-cov==6.0.0
|
||||
# via apache-superset
|
||||
pytest-mock==3.10.0
|
||||
# via apache-superset
|
||||
@@ -215,62 +179,37 @@ requests-oauthlib==2.0.0
|
||||
# via google-auth-oauthlib
|
||||
rfc3339-validator==0.1.4
|
||||
# via openapi-schema-validator
|
||||
rfc3986==2.0.0
|
||||
# via tableschema
|
||||
ruff==0.4.5
|
||||
ruff==0.8.0
|
||||
# via apache-superset
|
||||
s3transfer==0.10.1
|
||||
# via boto3
|
||||
sqlalchemy-bigquery==1.11.0
|
||||
sqlalchemy-bigquery==1.12.0
|
||||
# via apache-superset
|
||||
sqloxide==0.1.43
|
||||
sqloxide==0.1.51
|
||||
# via apache-superset
|
||||
statsd==4.0.1
|
||||
# via apache-superset
|
||||
tableschema==1.20.10
|
||||
# via apache-superset
|
||||
thrift==0.16.0
|
||||
# via
|
||||
# apache-superset
|
||||
# thrift-sasl
|
||||
thrift-sasl==0.4.3
|
||||
# via apache-superset
|
||||
tomli==2.0.1
|
||||
tomli==2.1.0
|
||||
# via
|
||||
# build
|
||||
# coverage
|
||||
# pip-tools
|
||||
# pylint
|
||||
# pyproject-api
|
||||
# pyproject-hooks
|
||||
# pytest
|
||||
# tox
|
||||
tomlkit==0.12.5
|
||||
tomlkit==0.13.2
|
||||
# via pylint
|
||||
toposort==1.10
|
||||
# via pip-compile-multi
|
||||
tox==4.6.4
|
||||
# via apache-superset
|
||||
tqdm==4.66.4
|
||||
tqdm==4.67.1
|
||||
# via
|
||||
# cmdstanpy
|
||||
# prophet
|
||||
trino==0.328.0
|
||||
trino==0.330.0
|
||||
# via apache-superset
|
||||
tzlocal==5.2
|
||||
# via trino
|
||||
unicodecsv==0.14.1
|
||||
# via
|
||||
# dataflows-tabulator
|
||||
# tableschema
|
||||
virtualenv==20.23.1
|
||||
# via
|
||||
# pre-commit
|
||||
# tox
|
||||
wheel==0.43.0
|
||||
# via pre-commit
|
||||
wheel==0.45.1
|
||||
# via pip-tools
|
||||
xlrd==2.0.1
|
||||
# via dataflows-tabulator
|
||||
zope-event==5.0
|
||||
# via gevent
|
||||
zope-interface==5.4.0
|
||||
|
||||
@@ -1,294 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
from textwrap import dedent
|
||||
|
||||
import click
|
||||
|
||||
REPO = "apache/superset"
|
||||
CACHE_REPO = f"{REPO}-cache"
|
||||
BASE_PY_IMAGE = "3.10-slim-bookworm"
|
||||
|
||||
|
||||
def run_cmd(command: str, raise_on_failure: bool = True) -> str:
|
||||
process = subprocess.Popen(
|
||||
command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True
|
||||
)
|
||||
|
||||
output = ""
|
||||
if process.stdout is not None:
|
||||
for line in iter(process.stdout.readline, ""):
|
||||
print(line.strip()) # Print the line to stdout in real-time
|
||||
output += line
|
||||
|
||||
process.wait() # Wait for the subprocess to finish
|
||||
|
||||
if process.returncode != 0 and raise_on_failure:
|
||||
raise subprocess.CalledProcessError(process.returncode, command, output)
|
||||
return output
|
||||
|
||||
|
||||
def get_git_sha() -> str:
|
||||
return run_cmd("git rev-parse HEAD").strip()
|
||||
|
||||
|
||||
def get_build_context_ref(build_context: str) -> str:
|
||||
"""
|
||||
Given a context, return a ref:
|
||||
- if context is pull_request, return the PR's id
|
||||
- if context is push, return the branch
|
||||
- if context is release, return the release ref
|
||||
"""
|
||||
|
||||
event = os.getenv("GITHUB_EVENT_NAME")
|
||||
github_ref = os.getenv("GITHUB_REF", "")
|
||||
|
||||
if event == "pull_request":
|
||||
github_head_ref = os.getenv("GITHUB_HEAD_REF", "")
|
||||
return re.sub("[^a-zA-Z0-9]", "-", github_head_ref)[:40]
|
||||
elif event == "release":
|
||||
return re.sub("refs/tags/", "", github_ref)[:40]
|
||||
elif event == "push":
|
||||
return re.sub("[^a-zA-Z0-9]", "-", re.sub("refs/heads/", "", github_ref))[:40]
|
||||
return ""
|
||||
|
||||
|
||||
def is_latest_release(release: str) -> bool:
|
||||
output = (
|
||||
run_cmd(
|
||||
f"./scripts/tag_latest_release.sh {release} --dry-run",
|
||||
raise_on_failure=False,
|
||||
)
|
||||
or ""
|
||||
)
|
||||
return "SKIP_TAG::false" in output
|
||||
|
||||
|
||||
def make_docker_tag(l: list[str]) -> str: # noqa: E741
|
||||
return f"{REPO}:" + "-".join([o for o in l if o])
|
||||
|
||||
|
||||
def get_docker_tags(
|
||||
build_preset: str,
|
||||
build_platforms: list[str],
|
||||
sha: str,
|
||||
build_context: str,
|
||||
build_context_ref: str,
|
||||
force_latest: bool = False,
|
||||
) -> set[str]:
|
||||
"""
|
||||
Return a set of tags given a given build context
|
||||
"""
|
||||
tags: set[str] = set()
|
||||
tag_chunks: list[str] = []
|
||||
|
||||
is_latest = is_latest_release(build_context_ref)
|
||||
|
||||
if build_preset != "lean":
|
||||
# Always add the preset_build name if different from default (lean)
|
||||
tag_chunks += [build_preset]
|
||||
|
||||
if len(build_platforms) == 1:
|
||||
build_platform = build_platforms[0]
|
||||
short_build_platform = build_platform.replace("linux/", "").replace("64", "")
|
||||
if short_build_platform != "amd":
|
||||
# Always a platform indicator if different from default (amd)
|
||||
tag_chunks += [short_build_platform]
|
||||
|
||||
# Always craft a tag for the SHA
|
||||
tags.add(make_docker_tag([sha] + tag_chunks))
|
||||
# also a short SHA, cause it's nice
|
||||
tags.add(make_docker_tag([sha[:7]] + tag_chunks))
|
||||
|
||||
if build_context == "release":
|
||||
# add a release tag
|
||||
tags.add(make_docker_tag([build_context_ref] + tag_chunks))
|
||||
if is_latest or force_latest:
|
||||
# add a latest tag
|
||||
tags.add(make_docker_tag(["latest"] + tag_chunks))
|
||||
elif build_context == "push" and build_context_ref == "master":
|
||||
tags.add(make_docker_tag(["master"] + tag_chunks))
|
||||
elif build_context == "pull_request":
|
||||
tags.add(make_docker_tag([f"pr-{build_context_ref}"] + tag_chunks))
|
||||
return tags
|
||||
|
||||
|
||||
def get_docker_command(
|
||||
build_preset: str,
|
||||
build_platforms: list[str],
|
||||
is_authenticated: bool,
|
||||
sha: str,
|
||||
build_context: str,
|
||||
build_context_ref: str,
|
||||
force_latest: bool = False,
|
||||
) -> str:
|
||||
tag = "" # noqa: F841
|
||||
build_target = ""
|
||||
py_ver = BASE_PY_IMAGE
|
||||
docker_context = "."
|
||||
|
||||
if build_preset == "dev":
|
||||
build_target = "dev"
|
||||
elif build_preset == "lean":
|
||||
build_target = "lean"
|
||||
elif build_preset == "py311":
|
||||
build_target = "lean"
|
||||
py_ver = "3.11-slim-bookworm"
|
||||
elif build_preset == "websocket":
|
||||
build_target = ""
|
||||
docker_context = "superset-websocket"
|
||||
elif build_preset == "ci":
|
||||
build_target = "ci"
|
||||
elif build_preset == "dockerize":
|
||||
build_target = ""
|
||||
docker_context = "-f dockerize.Dockerfile ."
|
||||
else:
|
||||
print(f"Invalid build preset: {build_preset}")
|
||||
exit(1)
|
||||
|
||||
# Try to get context reference if missing
|
||||
if not build_context_ref:
|
||||
build_context_ref = get_build_context_ref(build_context)
|
||||
|
||||
tags = get_docker_tags(
|
||||
build_preset,
|
||||
build_platforms,
|
||||
sha,
|
||||
build_context,
|
||||
build_context_ref,
|
||||
force_latest,
|
||||
)
|
||||
docker_tags = ("\\\n" + 8 * " ").join([f"-t {s} " for s in tags])
|
||||
|
||||
docker_args = "--load" if not is_authenticated else "--push"
|
||||
target_argument = f"--target {build_target}" if build_target else ""
|
||||
|
||||
cache_ref = f"{CACHE_REPO}:{py_ver}"
|
||||
if len(build_platforms) == 1:
|
||||
build_platform = build_platforms[0]
|
||||
short_build_platform = build_platform.replace("linux/", "").replace("64", "")
|
||||
cache_ref = f"{CACHE_REPO}:{py_ver}-{short_build_platform}"
|
||||
platform_arg = "--platform " + ",".join(build_platforms)
|
||||
|
||||
cache_from_arg = f"--cache-from=type=registry,ref={cache_ref}"
|
||||
cache_to_arg = (
|
||||
f"--cache-to=type=registry,mode=max,ref={cache_ref}" if is_authenticated else ""
|
||||
)
|
||||
build_arg = f"--build-arg PY_VER={py_ver}" if py_ver else ""
|
||||
actor = os.getenv("GITHUB_ACTOR")
|
||||
|
||||
return dedent(
|
||||
f"""\
|
||||
docker buildx build \\
|
||||
{docker_args} \\
|
||||
{docker_tags} \\
|
||||
{cache_from_arg} \\
|
||||
{cache_to_arg} \\
|
||||
{build_arg} \\
|
||||
{platform_arg} \\
|
||||
{target_argument} \\
|
||||
--label sha={sha} \\
|
||||
--label target={build_target} \\
|
||||
--label build_trigger={build_context} \\
|
||||
--label base={py_ver} \\
|
||||
--label build_actor={actor} \\
|
||||
{docker_context}"""
|
||||
)
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.argument(
|
||||
"build_preset",
|
||||
type=click.Choice(["lean", "dev", "dockerize", "websocket", "py311", "ci"]),
|
||||
)
|
||||
@click.argument("build_context", type=click.Choice(["push", "pull_request", "release"]))
|
||||
@click.option(
|
||||
"--platform",
|
||||
type=click.Choice(["linux/arm64", "linux/amd64"]),
|
||||
default=["linux/amd64"],
|
||||
multiple=True,
|
||||
)
|
||||
@click.option("--build_context_ref", help="a reference to the pr, release or branch")
|
||||
@click.option("--dry-run", is_flag=True, help="Run the command in dry-run mode.")
|
||||
@click.option("--verbose", is_flag=True, help="Print more info")
|
||||
@click.option(
|
||||
"--force-latest", is_flag=True, help="Force the 'latest' tag on the release"
|
||||
)
|
||||
def main(
|
||||
build_preset: str,
|
||||
build_context: str,
|
||||
build_context_ref: str,
|
||||
platform: list[str],
|
||||
dry_run: bool,
|
||||
force_latest: bool,
|
||||
verbose: bool,
|
||||
) -> None:
|
||||
"""
|
||||
This script executes docker build and push commands based on given arguments.
|
||||
"""
|
||||
|
||||
is_authenticated = (
|
||||
True if os.getenv("DOCKERHUB_TOKEN") and os.getenv("DOCKERHUB_USER") else False
|
||||
)
|
||||
|
||||
if force_latest and build_context != "release":
|
||||
print(
|
||||
"--force-latest can only be applied if the build context is set to 'release'"
|
||||
)
|
||||
exit(1)
|
||||
|
||||
if build_context == "release" and not build_context_ref.strip():
|
||||
print("Release number has to be provided")
|
||||
exit(1)
|
||||
|
||||
docker_build_command = get_docker_command(
|
||||
build_preset,
|
||||
platform,
|
||||
is_authenticated,
|
||||
get_git_sha(),
|
||||
build_context,
|
||||
build_context_ref,
|
||||
force_latest,
|
||||
)
|
||||
|
||||
if not dry_run:
|
||||
print("Executing Docker Build Command:")
|
||||
print(docker_build_command)
|
||||
script = ""
|
||||
if os.getenv("DOCKERHUB_USER"):
|
||||
script = dedent(
|
||||
f"""\
|
||||
docker logout
|
||||
docker login --username "{os.getenv("DOCKERHUB_USER")}" --password "{os.getenv("DOCKERHUB_TOKEN")}"
|
||||
DOCKER_ARGS="--push"
|
||||
"""
|
||||
)
|
||||
script = script + docker_build_command
|
||||
if verbose:
|
||||
run_cmd("cat Dockerfile")
|
||||
stdout = run_cmd(script) # noqa: F841
|
||||
else:
|
||||
print("Dry Run - Docker Build Command:")
|
||||
print(docker_build_command)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -95,15 +95,21 @@ def print_files(files: List[str]) -> None:
|
||||
print("\n".join([f"- {s}" for s in files]))
|
||||
|
||||
|
||||
def is_int(s: str) -> bool:
|
||||
return bool(re.match(r"^-?\d+$", s))
|
||||
|
||||
|
||||
def main(event_type: str, sha: str, repo: str) -> None:
|
||||
"""Main function to check for file changes based on event context."""
|
||||
print("SHA:", sha)
|
||||
print("EVENT_TYPE", event_type)
|
||||
files = None
|
||||
if event_type == "pull_request":
|
||||
pr_number = os.getenv("GITHUB_REF", "").split("/")[-2]
|
||||
files = fetch_changed_files_pr(repo, pr_number)
|
||||
print("PR files:")
|
||||
print_files(files)
|
||||
if is_int(pr_number):
|
||||
files = fetch_changed_files_pr(repo, pr_number)
|
||||
print("PR files:")
|
||||
print_files(files)
|
||||
|
||||
elif event_type == "push":
|
||||
files = fetch_changed_files_push(repo, sha)
|
||||
@@ -119,7 +125,7 @@ def main(event_type: str, sha: str, repo: str) -> None:
|
||||
changes_detected = {}
|
||||
for group, regex_patterns in PATTERNS.items():
|
||||
patterns_compiled = [re.compile(p) for p in regex_patterns]
|
||||
changes_detected[group] = event_type == "workflow_dispatch" or detect_changes(
|
||||
changes_detected[group] = files is None or detect_changes(
|
||||
files, patterns_compiled
|
||||
)
|
||||
|
||||
|
||||
@@ -87,7 +87,7 @@ describe('Charts list', () => {
|
||||
visitChartList();
|
||||
cy.getBySel('count-crosslinks').should('be.visible');
|
||||
cy.getBySel('crosslinks').first().trigger('mouseover');
|
||||
cy.get('.ant-tooltip')
|
||||
cy.get('.antd5-tooltip')
|
||||
.contains('3 - Sample dashboard')
|
||||
.invoke('removeAttr', 'target')
|
||||
.click();
|
||||
|
||||
@@ -88,6 +88,9 @@ describe('Horizontal FilterBar', () => {
|
||||
cy.getBySel('horizontal-filterbar-empty')
|
||||
.contains('No filters are currently added to this dashboard.')
|
||||
.should('exist');
|
||||
cy.get(nativeFilters.filtersPanel.filterGear).click({
|
||||
force: true,
|
||||
});
|
||||
cy.getBySel('filter-bar__create-filter').should('exist');
|
||||
cy.getBySel('filterbar-action-buttons').should('exist');
|
||||
});
|
||||
@@ -120,7 +123,7 @@ describe('Horizontal FilterBar', () => {
|
||||
|
||||
cy.getBySel('form-item-value').should('have.length', 3);
|
||||
cy.viewport(768, 1024);
|
||||
cy.getBySel('form-item-value').should('have.length', 0);
|
||||
cy.getBySel('form-item-value').should('have.length', 1);
|
||||
openMoreFilters(false);
|
||||
cy.getBySel('form-item-value').should('have.length', 3);
|
||||
|
||||
|
||||
@@ -263,8 +263,10 @@ describe('Native filters', () => {
|
||||
});
|
||||
|
||||
it('User can expand / retract native filter sidebar on a dashboard', () => {
|
||||
cy.get(nativeFilters.addFilterButton.button).should('not.exist');
|
||||
expandFilterOnLeftPanel();
|
||||
cy.get(nativeFilters.filtersPanel.filterGear).click({
|
||||
force: true,
|
||||
});
|
||||
cy.get(nativeFilters.filterFromDashboardView.createFilterButton).should(
|
||||
'be.visible',
|
||||
);
|
||||
|
||||
@@ -228,6 +228,9 @@ export function collapseFilterOnLeftPanel() {
|
||||
************************************************************************* */
|
||||
export function enterNativeFilterEditModal(waitForDataset = true) {
|
||||
interceptDataset();
|
||||
cy.get(nativeFilters.filtersPanel.filterGear).click({
|
||||
force: true,
|
||||
});
|
||||
cy.get(nativeFilters.filterFromDashboardView.createFilterButton).click({
|
||||
force: true,
|
||||
});
|
||||
@@ -243,11 +246,7 @@ export function enterNativeFilterEditModal(waitForDataset = true) {
|
||||
* @summary helper for adding new filter
|
||||
************************************************************************* */
|
||||
export function clickOnAddFilterInModal() {
|
||||
cy.get(nativeFilters.addFilterButton.button).first().click();
|
||||
return cy
|
||||
.get(nativeFilters.addFilterButton.dropdownItem)
|
||||
.contains('Filter')
|
||||
.click({ force: true });
|
||||
return cy.get(nativeFilters.modal.addNewFilterButton).click({ force: true });
|
||||
}
|
||||
|
||||
/** ************************************************************************
|
||||
|
||||
@@ -99,16 +99,13 @@ describe('Color scheme control', () => {
|
||||
cy.get('.ant-select-selection-item .color-scheme-label').trigger(
|
||||
'mouseover',
|
||||
);
|
||||
cy.get('.color-scheme-tooltip').should('be.visible');
|
||||
cy.get('.color-scheme-tooltip').contains('Superset Colors');
|
||||
cy.get('.Control[data-test="color_scheme"]').scrollIntoView();
|
||||
cy.get('.Control[data-test="color_scheme"] input[type="search"]').focus();
|
||||
cy.focused().type('lyftColors{enter}');
|
||||
cy.get(
|
||||
'.Control[data-test="color_scheme"] .ant-select-selection-item [data-test="lyftColors"]',
|
||||
).should('exist');
|
||||
cy.get('.ant-select-selection-item .color-scheme-label').trigger(
|
||||
'mouseover',
|
||||
);
|
||||
cy.focused().type('lyftColors');
|
||||
cy.getBySel('lyftColors').should('exist');
|
||||
cy.getBySel('lyftColors').trigger('mouseover');
|
||||
cy.get('.color-scheme-tooltip').should('not.exist');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -62,7 +62,7 @@ describe('Visualization > Line', () => {
|
||||
'not.exist',
|
||||
);
|
||||
|
||||
cy.get('.ant-alert-warning').should('not.exist');
|
||||
cy.get('.antd5-alert-warning').should('not.exist');
|
||||
});
|
||||
|
||||
it('should allow negative values in Y bounds', () => {
|
||||
@@ -71,7 +71,7 @@ describe('Visualization > Line', () => {
|
||||
cy.get('#controlSections-tab-display').click();
|
||||
cy.get('span').contains('Y Axis Bounds').scrollIntoView();
|
||||
cy.get('input[placeholder="Min"]').type('-0.1', { delay: 100 });
|
||||
cy.get('.ant-alert-warning').should('not.exist');
|
||||
cy.get('.antd5-alert-warning').should('not.exist');
|
||||
});
|
||||
|
||||
it('should allow type to search color schemes and apply the scheme', () => {
|
||||
|
||||
@@ -94,7 +94,7 @@ export const databasesPage = {
|
||||
dbDropdown: '[class="ant-select-selection-search-input"]',
|
||||
dbDropdownMenu: '.rc-virtual-list-holder-inner',
|
||||
dbDropdownMenuItem: '[class="ant-select-item-option-content"]',
|
||||
infoAlert: '.ant-alert',
|
||||
infoAlert: '.antd5-alert',
|
||||
serviceAccountInput: '[name="credentials_info"]',
|
||||
connectionStep: {
|
||||
modal: '.ant-modal-content',
|
||||
@@ -103,7 +103,7 @@ export const databasesPage = {
|
||||
helperBottom: '.helper-bottom',
|
||||
postgresDatabase: '[name="database"]',
|
||||
dbInput: '[name="database_name"]',
|
||||
alertMessage: '.ant-alert-message',
|
||||
alertMessage: '.antd5-alert-message',
|
||||
errorField: '[role="alert"]',
|
||||
uploadJson: '[title="Upload JSON file"]',
|
||||
chooseFile: '[class="ant-btn input-upload-btn"]',
|
||||
@@ -140,7 +140,7 @@ export const sqlLabView = {
|
||||
tabsNavList: "[class='ant-tabs-nav-list']",
|
||||
tab: "[class='ant-tabs-tab-btn']",
|
||||
addTabButton: dataTestLocator('add-tab-icon'),
|
||||
tooltip: '.ant-tooltip-content',
|
||||
tooltip: '.antd5-tooltip-content',
|
||||
tabName: '.css-1suejie',
|
||||
schemaInput: '[data-test=DatabaseSelector] > :nth-child(2)',
|
||||
loadingIndicator: '.Select__loading-indicator',
|
||||
@@ -166,7 +166,7 @@ export const sqlLabView = {
|
||||
renderedTableHeader: '.ReactVirtualized__Table__headerRow',
|
||||
renderedTableRow: '.ReactVirtualized__Table__row',
|
||||
errorBody: '.error-body',
|
||||
alertMessage: '.ant-alert-message',
|
||||
alertMessage: '.antd5-alert-message',
|
||||
historyTable: {
|
||||
header: '[role=columnheader]',
|
||||
table: '.QueryTable',
|
||||
@@ -325,7 +325,7 @@ export const nativeFilters = {
|
||||
confirmCancelButton: dataTestLocator(
|
||||
'native-filter-modal-confirm-cancel-button',
|
||||
),
|
||||
alertXUnsavedFilters: '.ant-alert-message',
|
||||
alertXUnsavedFilters: '.antd5-alert-message',
|
||||
tabsList: {
|
||||
filterItemsContainer: dataTestLocator('filter-title-container'),
|
||||
tabsContainer: '[class="ant-tabs-nav-list"]',
|
||||
@@ -334,10 +334,8 @@ export const nativeFilters = {
|
||||
},
|
||||
addFilter: dataTestLocator('add-filter-button'),
|
||||
defaultValueCheck: '.ant-checkbox-checked',
|
||||
},
|
||||
addFilterButton: {
|
||||
button: `.ant-modal-content [data-test="new-dropdown-icon"]`,
|
||||
dropdownItem: '.ant-dropdown-menu-item',
|
||||
addNewFilterButton: dataTestLocator('add-new-filter-button'),
|
||||
addNewDividerButton: dataTestLocator('add-new-divider-button'),
|
||||
},
|
||||
filtersPanel: {
|
||||
filterName: dataTestLocator('filters-config-modal__name-input'),
|
||||
@@ -348,6 +346,7 @@ export const nativeFilters = {
|
||||
filterTypeInput: dataTestLocator('filters-config-modal__filter-type'),
|
||||
fieldInput: dataTestLocator('field-input'),
|
||||
filterTypeItem: '.ant-select-selection-item',
|
||||
filterGear: dataTestLocator('filterbar-orientation-icon'),
|
||||
},
|
||||
filterFromDashboardView: {
|
||||
filterValueInput: '[class="ant-select-selection-search-input"]',
|
||||
|
||||
136
superset-frontend/package-lock.json
generated
136
superset-frontend/package-lock.json
generated
@@ -24,7 +24,7 @@
|
||||
"@rjsf/core": "^5.21.1",
|
||||
"@rjsf/utils": "^5.19.3",
|
||||
"@rjsf/validator-ajv8": "^5.22.3",
|
||||
"@scarf/scarf": "^1.3.0",
|
||||
"@scarf/scarf": "^1.4.0",
|
||||
"@superset-ui/chart-controls": "file:./packages/superset-ui-chart-controls",
|
||||
"@superset-ui/core": "file:./packages/superset-ui-core",
|
||||
"@superset-ui/legacy-plugin-chart-calendar": "file:./plugins/legacy-plugin-chart-calendar",
|
||||
@@ -101,7 +101,7 @@
|
||||
"prop-types": "^15.8.1",
|
||||
"query-string": "^6.13.7",
|
||||
"rc-trigger": "^5.3.4",
|
||||
"re-resizable": "^6.10.0",
|
||||
"re-resizable": "^6.10.1",
|
||||
"react": "^16.13.1",
|
||||
"react-ace": "^10.1.0",
|
||||
"react-checkbox-tree": "^1.8.0",
|
||||
@@ -202,7 +202,7 @@
|
||||
"@types/react-redux": "^7.1.10",
|
||||
"@types/react-router-dom": "^5.3.3",
|
||||
"@types/react-syntax-highlighter": "^15.5.13",
|
||||
"@types/react-table": "^7.7.19",
|
||||
"@types/react-table": "^7.7.20",
|
||||
"@types/react-transition-group": "^4.4.10",
|
||||
"@types/react-ultimate-pagination": "^1.2.4",
|
||||
"@types/react-virtualized-auto-sizer": "^1.0.4",
|
||||
@@ -9680,9 +9680,10 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@scarf/scarf": {
|
||||
"version": "1.3.0",
|
||||
"hasInstallScript": true,
|
||||
"license": "Apache-2.0"
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@scarf/scarf/-/scarf-1.4.0.tgz",
|
||||
"integrity": "sha512-xxeapPiUXdZAE3che6f3xogoJPeZgig6omHEy1rIY5WVsB3H2BHNnZH+gHG6x91SCWyQCzWGsuL2Hh3ClO5/qQ==",
|
||||
"hasInstallScript": true
|
||||
},
|
||||
"node_modules/@sigstore/bundle": {
|
||||
"version": "2.3.2",
|
||||
@@ -13980,9 +13981,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@types/react-table": {
|
||||
"version": "7.7.19",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"version": "7.7.20",
|
||||
"resolved": "https://registry.npmjs.org/@types/react-table/-/react-table-7.7.20.tgz",
|
||||
"integrity": "sha512-ahMp4pmjVlnExxNwxyaDrFgmKxSbPwU23sGQw2gJK4EhCvnvmib2s/O/+y1dfV57dXOwpr2plfyBol+vEHbi2w==",
|
||||
"dependencies": {
|
||||
"@types/react": "*"
|
||||
}
|
||||
@@ -28029,6 +28030,14 @@
|
||||
"uglify-js": "^3.1.4"
|
||||
}
|
||||
},
|
||||
"node_modules/handlebars-group-by": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/handlebars-group-by/-/handlebars-group-by-1.0.1.tgz",
|
||||
"integrity": "sha512-qwVVDVAJMBKdmnQU8jcEXGOu+4/2YJX1RP3pUw6Ee9t6gdkxt+dJEWDudSFTgqb35KXrktw/Nn/Dp3Rx5muHpg==",
|
||||
"engines": {
|
||||
"node": ">= 0.10"
|
||||
}
|
||||
},
|
||||
"node_modules/handlebars/node_modules/source-map": {
|
||||
"version": "0.6.1",
|
||||
"license": "BSD-3-Clause",
|
||||
@@ -45807,9 +45816,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/re-resizable": {
|
||||
"version": "6.10.0",
|
||||
"resolved": "https://registry.npmjs.org/re-resizable/-/re-resizable-6.10.0.tgz",
|
||||
"integrity": "sha512-hysSK0xmA5nz24HBVztlk4yCqCLCvS32E6ZpWxVKop9x3tqCa4yAj1++facrmkOf62JsJHjmjABdKxXofYioCw==",
|
||||
"version": "6.10.1",
|
||||
"resolved": "https://registry.npmjs.org/re-resizable/-/re-resizable-6.10.1.tgz",
|
||||
"integrity": "sha512-m33nSWRH57UZLmep5M/LatkZ2NRqimVD/bOOpvymw5Zf33+eTSEixsUugscOZzAtK0/nx+OSuOf8VbKJx/4ptw==",
|
||||
"peerDependencies": {
|
||||
"react": "^16.13.1 || ^17.0.0 || ^18.0.0",
|
||||
"react-dom": "^16.13.1 || ^17.0.0 || ^18.0.0"
|
||||
@@ -53067,14 +53076,6 @@
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/viewport-mercator-project": {
|
||||
"version": "6.2.3",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.0.0",
|
||||
"gl-matrix": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/vlq": {
|
||||
"version": "0.2.3",
|
||||
"license": "MIT"
|
||||
@@ -57951,10 +57952,10 @@
|
||||
"version": "0.18.25",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@math.gl/web-mercator": "^4.1.0",
|
||||
"prop-types": "^15.8.1",
|
||||
"react-map-gl": "^6.1.19",
|
||||
"supercluster": "^8.0.1",
|
||||
"viewport-mercator-project": "^6.1.1"
|
||||
"supercluster": "^8.0.1"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@superset-ui/chart-controls": "*",
|
||||
@@ -57963,6 +57964,30 @@
|
||||
"react": "^15 || ^16"
|
||||
}
|
||||
},
|
||||
"plugins/legacy-plugin-chart-map-box/node_modules/@math.gl/core": {
|
||||
"version": "4.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@math.gl/core/-/core-4.1.0.tgz",
|
||||
"integrity": "sha512-FrdHBCVG3QdrworwrUSzXIaK+/9OCRLscxI2OUy6sLOHyHgBMyfnEGs99/m3KNvs+95BsnQLWklVfpKfQzfwKA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@math.gl/types": "4.1.0"
|
||||
}
|
||||
},
|
||||
"plugins/legacy-plugin-chart-map-box/node_modules/@math.gl/types": {
|
||||
"version": "4.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@math.gl/types/-/types-4.1.0.tgz",
|
||||
"integrity": "sha512-clYZdHcmRvMzVK5fjeDkQlHUzXQSNdZ7s4xOqC3nJPgz4C/TZkUecTo9YS4PruZqtDda/ag4erndP0MIn40dGA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"plugins/legacy-plugin-chart-map-box/node_modules/@math.gl/web-mercator": {
|
||||
"version": "4.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@math.gl/web-mercator/-/web-mercator-4.1.0.tgz",
|
||||
"integrity": "sha512-HZo3vO5GCMkXJThxRJ5/QYUYRr3XumfT8CzNNCwoJfinxy5NtKUd7dusNTXn7yJ40UoB8FMIwkVwNlqaiRZZAw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@math.gl/core": "4.1.0"
|
||||
}
|
||||
},
|
||||
"plugins/legacy-plugin-chart-map-box/node_modules/kdbush": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/kdbush/-/kdbush-4.0.2.tgz",
|
||||
@@ -58552,6 +58577,7 @@
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"handlebars": "^4.7.8",
|
||||
"handlebars-group-by": "^1.0.1",
|
||||
"just-handlebars-helpers": "^1.0.19"
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -58655,13 +58681,6 @@
|
||||
"react-dom": "^16.13.1"
|
||||
}
|
||||
},
|
||||
"plugins/plugin-chart-table/node_modules/@types/react-table": {
|
||||
"version": "7.7.20",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/react": "*"
|
||||
}
|
||||
},
|
||||
"plugins/plugin-chart-table/node_modules/d3-array": {
|
||||
"version": "2.12.1",
|
||||
"license": "BSD-3-Clause",
|
||||
@@ -65098,7 +65117,9 @@
|
||||
}
|
||||
},
|
||||
"@scarf/scarf": {
|
||||
"version": "1.3.0"
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@scarf/scarf/-/scarf-1.4.0.tgz",
|
||||
"integrity": "sha512-xxeapPiUXdZAE3che6f3xogoJPeZgig6omHEy1rIY5WVsB3H2BHNnZH+gHG6x91SCWyQCzWGsuL2Hh3ClO5/qQ=="
|
||||
},
|
||||
"@sigstore/bundle": {
|
||||
"version": "2.3.2",
|
||||
@@ -68666,12 +68687,33 @@
|
||||
"@superset-ui/legacy-plugin-chart-map-box": {
|
||||
"version": "file:plugins/legacy-plugin-chart-map-box",
|
||||
"requires": {
|
||||
"@math.gl/web-mercator": "^4.1.0",
|
||||
"prop-types": "^15.8.1",
|
||||
"react-map-gl": "^6.1.19",
|
||||
"supercluster": "^8.0.1",
|
||||
"viewport-mercator-project": "^6.1.1"
|
||||
"supercluster": "^8.0.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"@math.gl/core": {
|
||||
"version": "4.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@math.gl/core/-/core-4.1.0.tgz",
|
||||
"integrity": "sha512-FrdHBCVG3QdrworwrUSzXIaK+/9OCRLscxI2OUy6sLOHyHgBMyfnEGs99/m3KNvs+95BsnQLWklVfpKfQzfwKA==",
|
||||
"requires": {
|
||||
"@math.gl/types": "4.1.0"
|
||||
}
|
||||
},
|
||||
"@math.gl/types": {
|
||||
"version": "4.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@math.gl/types/-/types-4.1.0.tgz",
|
||||
"integrity": "sha512-clYZdHcmRvMzVK5fjeDkQlHUzXQSNdZ7s4xOqC3nJPgz4C/TZkUecTo9YS4PruZqtDda/ag4erndP0MIn40dGA=="
|
||||
},
|
||||
"@math.gl/web-mercator": {
|
||||
"version": "4.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@math.gl/web-mercator/-/web-mercator-4.1.0.tgz",
|
||||
"integrity": "sha512-HZo3vO5GCMkXJThxRJ5/QYUYRr3XumfT8CzNNCwoJfinxy5NtKUd7dusNTXn7yJ40UoB8FMIwkVwNlqaiRZZAw==",
|
||||
"requires": {
|
||||
"@math.gl/core": "4.1.0"
|
||||
}
|
||||
},
|
||||
"kdbush": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/kdbush/-/kdbush-4.0.2.tgz",
|
||||
@@ -69140,6 +69182,7 @@
|
||||
"@types/jest": "^29.5.12",
|
||||
"@types/lodash": "^4.17.7",
|
||||
"handlebars": "^4.7.8",
|
||||
"handlebars-group-by": "*",
|
||||
"jest": "^29.7.0",
|
||||
"just-handlebars-helpers": "^1.0.19"
|
||||
},
|
||||
@@ -69194,12 +69237,6 @@
|
||||
"xss": "^1.0.15"
|
||||
},
|
||||
"dependencies": {
|
||||
"@types/react-table": {
|
||||
"version": "7.7.20",
|
||||
"requires": {
|
||||
"@types/react": "*"
|
||||
}
|
||||
},
|
||||
"d3-array": {
|
||||
"version": "2.12.1",
|
||||
"requires": {
|
||||
@@ -70333,8 +70370,9 @@
|
||||
}
|
||||
},
|
||||
"@types/react-table": {
|
||||
"version": "7.7.19",
|
||||
"dev": true,
|
||||
"version": "7.7.20",
|
||||
"resolved": "https://registry.npmjs.org/@types/react-table/-/react-table-7.7.20.tgz",
|
||||
"integrity": "sha512-ahMp4pmjVlnExxNwxyaDrFgmKxSbPwU23sGQw2gJK4EhCvnvmib2s/O/+y1dfV57dXOwpr2plfyBol+vEHbi2w==",
|
||||
"requires": {
|
||||
"@types/react": "^16.9.53"
|
||||
}
|
||||
@@ -79792,6 +79830,11 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"handlebars-group-by": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/handlebars-group-by/-/handlebars-group-by-1.0.1.tgz",
|
||||
"integrity": "sha512-qwVVDVAJMBKdmnQU8jcEXGOu+4/2YJX1RP3pUw6Ee9t6gdkxt+dJEWDudSFTgqb35KXrktw/Nn/Dp3Rx5muHpg=="
|
||||
},
|
||||
"har-schema": {
|
||||
"version": "2.0.0",
|
||||
"dev": true
|
||||
@@ -90684,9 +90727,9 @@
|
||||
}
|
||||
},
|
||||
"re-resizable": {
|
||||
"version": "6.10.0",
|
||||
"resolved": "https://registry.npmjs.org/re-resizable/-/re-resizable-6.10.0.tgz",
|
||||
"integrity": "sha512-hysSK0xmA5nz24HBVztlk4yCqCLCvS32E6ZpWxVKop9x3tqCa4yAj1++facrmkOf62JsJHjmjABdKxXofYioCw==",
|
||||
"version": "6.10.1",
|
||||
"resolved": "https://registry.npmjs.org/re-resizable/-/re-resizable-6.10.1.tgz",
|
||||
"integrity": "sha512-m33nSWRH57UZLmep5M/LatkZ2NRqimVD/bOOpvymw5Zf33+eTSEixsUugscOZzAtK0/nx+OSuOf8VbKJx/4ptw==",
|
||||
"requires": {}
|
||||
},
|
||||
"react": {
|
||||
@@ -95387,13 +95430,6 @@
|
||||
"unist-util-stringify-position": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"viewport-mercator-project": {
|
||||
"version": "6.2.3",
|
||||
"requires": {
|
||||
"@babel/runtime": "^7.0.0",
|
||||
"gl-matrix": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"vlq": {
|
||||
"version": "0.2.3"
|
||||
},
|
||||
|
||||
@@ -90,7 +90,7 @@
|
||||
"@rjsf/core": "^5.21.1",
|
||||
"@rjsf/utils": "^5.19.3",
|
||||
"@rjsf/validator-ajv8": "^5.22.3",
|
||||
"@scarf/scarf": "^1.3.0",
|
||||
"@scarf/scarf": "^1.4.0",
|
||||
"@superset-ui/chart-controls": "file:./packages/superset-ui-chart-controls",
|
||||
"@superset-ui/core": "file:./packages/superset-ui-core",
|
||||
"@superset-ui/legacy-plugin-chart-calendar": "file:./plugins/legacy-plugin-chart-calendar",
|
||||
@@ -167,7 +167,7 @@
|
||||
"prop-types": "^15.8.1",
|
||||
"query-string": "^6.13.7",
|
||||
"rc-trigger": "^5.3.4",
|
||||
"re-resizable": "^6.10.0",
|
||||
"re-resizable": "^6.10.1",
|
||||
"react": "^16.13.1",
|
||||
"react-ace": "^10.1.0",
|
||||
"react-checkbox-tree": "^1.8.0",
|
||||
@@ -268,7 +268,7 @@
|
||||
"@types/react-redux": "^7.1.10",
|
||||
"@types/react-router-dom": "^5.3.3",
|
||||
"@types/react-syntax-highlighter": "^15.5.13",
|
||||
"@types/react-table": "^7.7.19",
|
||||
"@types/react-table": "^7.7.20",
|
||||
"@types/react-transition-group": "^4.4.10",
|
||||
"@types/react-ultimate-pagination": "^1.2.4",
|
||||
"@types/react-virtualized-auto-sizer": "^1.0.4",
|
||||
|
||||
@@ -18,9 +18,8 @@
|
||||
*/
|
||||
import { CSSProperties } from 'react';
|
||||
import { kebabCase } from 'lodash';
|
||||
import { TooltipPlacement } from 'antd/lib/tooltip';
|
||||
import { t } from '@superset-ui/core';
|
||||
import { Tooltip, TooltipProps } from './Tooltip';
|
||||
import { Tooltip, TooltipProps, TooltipPlacement } from './Tooltip';
|
||||
|
||||
export interface InfoTooltipWithTriggerProps {
|
||||
label?: string;
|
||||
|
||||
@@ -17,48 +17,41 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { useTheme, css } from '@superset-ui/core';
|
||||
import { Tooltip as BaseTooltip } from 'antd';
|
||||
import type { TooltipProps } from 'antd/lib/tooltip';
|
||||
import { Global } from '@emotion/react';
|
||||
import { useTheme } from '@superset-ui/core';
|
||||
import { Tooltip as BaseTooltip } from 'antd-v5';
|
||||
import {
|
||||
TooltipProps as BaseTooltipProps,
|
||||
TooltipPlacement as BaseTooltipPlacement,
|
||||
} from 'antd-v5/lib/tooltip';
|
||||
|
||||
export type { TooltipProps } from 'antd/lib/tooltip';
|
||||
export type TooltipProps = BaseTooltipProps;
|
||||
export type TooltipPlacement = BaseTooltipPlacement;
|
||||
|
||||
export const Tooltip = ({ overlayStyle, color, ...props }: TooltipProps) => {
|
||||
export const Tooltip = ({
|
||||
overlayStyle,
|
||||
color,
|
||||
...props
|
||||
}: BaseTooltipProps) => {
|
||||
const theme = useTheme();
|
||||
const defaultColor = `${theme.colors.grayscale.dark2}e6`;
|
||||
return (
|
||||
<>
|
||||
{/* Safari hack to hide browser default tooltips */}
|
||||
<Global
|
||||
styles={css`
|
||||
.ant-tooltip-open {
|
||||
display: inline-block;
|
||||
&::after {
|
||||
content: '';
|
||||
display: block;
|
||||
}
|
||||
}
|
||||
`}
|
||||
/>
|
||||
<BaseTooltip
|
||||
overlayStyle={{
|
||||
fontSize: theme.typography.sizes.s,
|
||||
lineHeight: '1.6',
|
||||
maxWidth: theme.gridUnit * 62,
|
||||
minWidth: theme.gridUnit * 30,
|
||||
...overlayStyle,
|
||||
}}
|
||||
// make the tooltip display closer to the label
|
||||
align={{ offset: [0, 1] }}
|
||||
color={defaultColor || color}
|
||||
trigger="hover"
|
||||
placement="bottom"
|
||||
// don't allow hovering over the tooltip
|
||||
mouseLeaveDelay={0}
|
||||
{...props}
|
||||
/>
|
||||
</>
|
||||
<BaseTooltip
|
||||
overlayStyle={{
|
||||
fontSize: theme.typography.sizes.s,
|
||||
lineHeight: '1.6',
|
||||
maxWidth: theme.gridUnit * 62,
|
||||
minWidth: theme.gridUnit * 30,
|
||||
...overlayStyle,
|
||||
}}
|
||||
// make the tooltip display closer to the label
|
||||
align={{ offset: [0, 1] }}
|
||||
color={defaultColor || color}
|
||||
trigger="hover"
|
||||
placement="bottom"
|
||||
// don't allow hovering over the tooltip
|
||||
mouseLeaveDelay={0}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
|
||||
@@ -262,6 +262,7 @@ export interface BaseControlConfig<
|
||||
props: ControlPanelsContainerProps,
|
||||
controlData: AnyDict,
|
||||
) => boolean;
|
||||
disableStash?: boolean;
|
||||
hidden?:
|
||||
| boolean
|
||||
| ((props: ControlPanelsContainerProps, controlData: AnyDict) => boolean);
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { QueryObject, SqlaFormData } from '@superset-ui/core';
|
||||
import { QueryObject, SqlaFormData, VizType } from '@superset-ui/core';
|
||||
import { boxplotOperator } from '@superset-ui/chart-controls';
|
||||
|
||||
const formData: SqlaFormData = {
|
||||
@@ -27,7 +27,7 @@ const formData: SqlaFormData = {
|
||||
time_range: '2015 : 2016',
|
||||
time_grain_sqla: 'P1Y',
|
||||
datasource: 'foo',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
};
|
||||
const queryObject: QueryObject = {
|
||||
metrics: [
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { QueryObject, SqlaFormData } from '@superset-ui/core';
|
||||
import { QueryObject, SqlaFormData, VizType } from '@superset-ui/core';
|
||||
import { contributionOperator } from '@superset-ui/chart-controls';
|
||||
|
||||
const formData: SqlaFormData = {
|
||||
@@ -27,7 +27,7 @@ const formData: SqlaFormData = {
|
||||
time_range: '2015 : 2016',
|
||||
granularity: 'month',
|
||||
datasource: 'foo',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
};
|
||||
const queryObject: QueryObject = {
|
||||
metrics: [
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { QueryObject, SqlaFormData } from '@superset-ui/core';
|
||||
import { QueryObject, SqlaFormData, VizType } from '@superset-ui/core';
|
||||
import { flattenOperator } from '@superset-ui/chart-controls';
|
||||
|
||||
const formData: SqlaFormData = {
|
||||
@@ -27,7 +27,7 @@ const formData: SqlaFormData = {
|
||||
time_range: '2015 : 2016',
|
||||
granularity: 'month',
|
||||
datasource: 'foo',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
};
|
||||
const queryObject: QueryObject = {
|
||||
metrics: [
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
* under the License.
|
||||
*/
|
||||
import { histogramOperator } from '@superset-ui/chart-controls';
|
||||
import { SqlaFormData } from '@superset-ui/core';
|
||||
import { SqlaFormData, VizType } from '@superset-ui/core';
|
||||
import { omit } from 'lodash';
|
||||
|
||||
const formData: SqlaFormData = {
|
||||
@@ -26,7 +26,7 @@ const formData: SqlaFormData = {
|
||||
cumulative: true,
|
||||
normalize: true,
|
||||
groupby: ['country', 'region'],
|
||||
viz_type: 'histogram',
|
||||
viz_type: VizType.LegacyHistogram,
|
||||
datasource: 'foo',
|
||||
};
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { QueryObject, SqlaFormData } from '@superset-ui/core';
|
||||
import { QueryObject, SqlaFormData, VizType } from '@superset-ui/core';
|
||||
import { pivotOperator } from '@superset-ui/chart-controls';
|
||||
|
||||
const formData: SqlaFormData = {
|
||||
@@ -27,7 +27,7 @@ const formData: SqlaFormData = {
|
||||
time_range: '2015 : 2016',
|
||||
granularity: 'month',
|
||||
datasource: 'foo',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
show_empty_columns: true,
|
||||
};
|
||||
const queryObject: QueryObject = {
|
||||
|
||||
@@ -16,7 +16,12 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { DTTM_ALIAS, QueryObject, SqlaFormData } from '@superset-ui/core';
|
||||
import {
|
||||
DTTM_ALIAS,
|
||||
QueryObject,
|
||||
SqlaFormData,
|
||||
VizType,
|
||||
} from '@superset-ui/core';
|
||||
import { prophetOperator } from '@superset-ui/chart-controls';
|
||||
|
||||
const formData: SqlaFormData = {
|
||||
@@ -27,7 +32,7 @@ const formData: SqlaFormData = {
|
||||
time_range: '2015 : 2016',
|
||||
time_grain_sqla: 'P1Y',
|
||||
datasource: 'foo',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
};
|
||||
const queryObject: QueryObject = {
|
||||
metrics: [
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { QueryObject, SqlaFormData } from '@superset-ui/core';
|
||||
import { QueryObject, SqlaFormData, VizType } from '@superset-ui/core';
|
||||
import { rankOperator } from '@superset-ui/chart-controls';
|
||||
|
||||
const formData: SqlaFormData = {
|
||||
@@ -26,7 +26,7 @@ const formData: SqlaFormData = {
|
||||
time_range: '2015 : 2016',
|
||||
granularity: 'month',
|
||||
datasource: 'foo',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
truncate_metric: true,
|
||||
};
|
||||
const queryObject: QueryObject = {
|
||||
|
||||
@@ -16,7 +16,12 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { ComparisonType, QueryObject, SqlaFormData } from '@superset-ui/core';
|
||||
import {
|
||||
ComparisonType,
|
||||
QueryObject,
|
||||
SqlaFormData,
|
||||
VizType,
|
||||
} from '@superset-ui/core';
|
||||
import { renameOperator } from '@superset-ui/chart-controls';
|
||||
|
||||
const formData: SqlaFormData = {
|
||||
@@ -26,7 +31,7 @@ const formData: SqlaFormData = {
|
||||
time_range: '2015 : 2016',
|
||||
granularity: 'month',
|
||||
datasource: 'foo',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
truncate_metric: true,
|
||||
};
|
||||
const queryObject: QueryObject = {
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { QueryObject, SqlaFormData } from '@superset-ui/core';
|
||||
import { QueryObject, SqlaFormData, VizType } from '@superset-ui/core';
|
||||
import { resampleOperator } from '@superset-ui/chart-controls';
|
||||
|
||||
const formData: SqlaFormData = {
|
||||
@@ -27,7 +27,7 @@ const formData: SqlaFormData = {
|
||||
time_range: '2015 : 2016',
|
||||
granularity: 'month',
|
||||
datasource: 'foo',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
};
|
||||
const queryObject: QueryObject = {
|
||||
metrics: [
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { QueryObject, SqlaFormData } from '@superset-ui/core';
|
||||
import { QueryObject, SqlaFormData, VizType } from '@superset-ui/core';
|
||||
import { rollingWindowOperator } from '@superset-ui/chart-controls';
|
||||
|
||||
const formData: SqlaFormData = {
|
||||
@@ -27,7 +27,7 @@ const formData: SqlaFormData = {
|
||||
time_range: '2015 : 2016',
|
||||
granularity: 'month',
|
||||
datasource: 'foo',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
};
|
||||
const queryObject: QueryObject = {
|
||||
metrics: [
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { QueryObject, SqlaFormData } from '@superset-ui/core';
|
||||
import { QueryObject, SqlaFormData, VizType } from '@superset-ui/core';
|
||||
import { sortOperator } from '@superset-ui/chart-controls';
|
||||
|
||||
const formData: SqlaFormData = {
|
||||
@@ -27,7 +27,7 @@ const formData: SqlaFormData = {
|
||||
time_range: '2015 : 2016',
|
||||
granularity: 'month',
|
||||
datasource: 'foo',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
};
|
||||
const queryObject: QueryObject = {
|
||||
metrics: [
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { QueryObject, SqlaFormData } from '@superset-ui/core';
|
||||
import { QueryObject, SqlaFormData, VizType } from '@superset-ui/core';
|
||||
import { timeCompareOperator } from '@superset-ui/chart-controls';
|
||||
|
||||
const formData: SqlaFormData = {
|
||||
@@ -27,7 +27,7 @@ const formData: SqlaFormData = {
|
||||
time_range: '2015 : 2016',
|
||||
granularity: 'month',
|
||||
datasource: 'foo',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
};
|
||||
const queryObject: QueryObject = {
|
||||
metrics: [
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { QueryObject, SqlaFormData } from '@superset-ui/core';
|
||||
import { QueryObject, SqlaFormData, VizType } from '@superset-ui/core';
|
||||
import {
|
||||
timeCompareOperator,
|
||||
timeComparePivotOperator,
|
||||
@@ -30,7 +30,7 @@ const formData: SqlaFormData = {
|
||||
time_range: '2015 : 2016',
|
||||
granularity: 'month',
|
||||
datasource: 'foo',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
show_empty_columns: true,
|
||||
};
|
||||
const queryObject: QueryObject = {
|
||||
|
||||
@@ -16,12 +16,12 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { QueryFormData, QueryFormMetric } from '@superset-ui/core';
|
||||
import { QueryFormData, QueryFormMetric, VizType } from '@superset-ui/core';
|
||||
import { extractExtraMetrics } from '@superset-ui/chart-controls';
|
||||
|
||||
const baseFormData: QueryFormData = {
|
||||
datasource: 'dummy',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
metrics: ['a', 'b'],
|
||||
columns: ['foo', 'bar'],
|
||||
limit: 100,
|
||||
|
||||
@@ -17,11 +17,11 @@
|
||||
* under the License.
|
||||
*/
|
||||
import { isDerivedSeries } from '@superset-ui/chart-controls';
|
||||
import { SqlaFormData, ComparisonType } from '@superset-ui/core';
|
||||
import { SqlaFormData, ComparisonType, VizType } from '@superset-ui/core';
|
||||
|
||||
const formData: SqlaFormData = {
|
||||
datasource: 'foo',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
};
|
||||
const series = {
|
||||
id: 'metric__1 month ago',
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { VizType } from '@superset-ui/core';
|
||||
import { displayTimeRelatedControls } from '../../src';
|
||||
|
||||
const mockData = {
|
||||
@@ -35,7 +36,7 @@ const mockData = {
|
||||
exportState: {},
|
||||
form_data: {
|
||||
datasource: '22__table',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
@@ -16,12 +16,12 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { QueryFormData } from '@superset-ui/core';
|
||||
import { QueryFormData, VizType } from '@superset-ui/core';
|
||||
import { getStandardizedControls } from '../../src';
|
||||
|
||||
const formData: QueryFormData = {
|
||||
datasource: '30__table',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
standardizedFormData: {
|
||||
controls: {
|
||||
metrics: ['count(*)', 'sum(sales)'],
|
||||
@@ -34,7 +34,7 @@ const formData: QueryFormData = {
|
||||
test('without standardizedFormData', () => {
|
||||
getStandardizedControls().setStandardizedControls({
|
||||
datasource: '30__table',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
});
|
||||
expect(getStandardizedControls().controls).toEqual({
|
||||
metrics: [],
|
||||
|
||||
@@ -41,6 +41,7 @@ export { default as ChartDataProvider } from './components/ChartDataProvider';
|
||||
export * from './types/Base';
|
||||
export * from './types/TransformFunction';
|
||||
export * from './types/QueryResponse';
|
||||
export * from './types/VizType';
|
||||
|
||||
export { default as __hack_reexport_chart_Base } from './types/Base';
|
||||
export { default as __hack_reexport_chart_TransformFunction } from './types/TransformFunction';
|
||||
|
||||
@@ -0,0 +1,72 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
export enum VizType {
|
||||
Area = 'echarts_area',
|
||||
Bar = 'echarts_timeseries_bar',
|
||||
BigNumber = 'big_number',
|
||||
BigNumberTotal = 'big_number_total',
|
||||
BigNumberPeriodOverPeriod = 'pop_kpi',
|
||||
BoxPlot = 'box_plot',
|
||||
Bubble = 'bubble_v2',
|
||||
Bullet = 'bullet',
|
||||
Calendar = 'cal_heatmap',
|
||||
Chord = 'chord',
|
||||
Compare = 'compare',
|
||||
CountryMap = 'country_map',
|
||||
DistBar = 'dist_bar',
|
||||
EventFlow = 'event_flow',
|
||||
Funnel = 'funnel',
|
||||
Gauge = 'gauge_chart',
|
||||
Graph = 'graph_chart',
|
||||
Handlebars = 'handlebars',
|
||||
Heatmap = 'heatmap_v2',
|
||||
Histogram = 'histogram_v2',
|
||||
Horizon = 'horizon',
|
||||
LegacyArea = 'area',
|
||||
LegacyBar = 'bar',
|
||||
LegacyBubble = 'bubble',
|
||||
LegacyHeatmap = 'heatmap',
|
||||
LegacyHistogram = 'histogram',
|
||||
LegacyLine = 'line',
|
||||
LegacySankey = 'sankey',
|
||||
Line = 'echarts_timeseries_line',
|
||||
MapBox = 'mapbox',
|
||||
MixedTimeseries = 'mixed_timeseries',
|
||||
PairedTTest = 'paired_ttest',
|
||||
ParallelCoordinates = 'para',
|
||||
Partition = 'partition',
|
||||
Pie = 'pie',
|
||||
PivotTable = 'pivot_table_v2',
|
||||
Radar = 'radar',
|
||||
Rose = 'rose',
|
||||
Sankey = 'sankey_v2',
|
||||
Scatter = 'echarts_timeseries_scatter',
|
||||
SmoothLine = 'echarts_timeseries_smooth',
|
||||
Step = 'echarts_timeseries_step',
|
||||
Sunburst = 'sunburst_v2',
|
||||
Table = 'table',
|
||||
TimePivot = 'time_pivot',
|
||||
TimeTable = 'time_table',
|
||||
Timeseries = 'echarts_timeseries',
|
||||
Tree = 'tree_chart',
|
||||
Treemap = 'treemap_v2',
|
||||
Waterfall = 'waterfall',
|
||||
WordCloud = 'word_cloud',
|
||||
WorldMap = 'world_map',
|
||||
}
|
||||
@@ -28,6 +28,7 @@ import {
|
||||
getChartBuildQueryRegistry,
|
||||
getChartMetadataRegistry,
|
||||
ChartMetadata,
|
||||
VizType,
|
||||
} from '@superset-ui/core';
|
||||
|
||||
import { LOGIN_GLOB } from '../fixtures/constants';
|
||||
@@ -86,13 +87,13 @@ describe('ChartClient', () => {
|
||||
sliceId,
|
||||
formData: {
|
||||
granularity: 'second',
|
||||
viz_type: 'bar',
|
||||
viz_type: VizType.LegacyBar,
|
||||
},
|
||||
}),
|
||||
).resolves.toEqual({
|
||||
...sankeyFormData,
|
||||
granularity: 'second',
|
||||
viz_type: 'bar',
|
||||
viz_type: VizType.LegacyBar,
|
||||
});
|
||||
});
|
||||
it('returns promise of formData if only formData was given', () =>
|
||||
@@ -101,13 +102,13 @@ describe('ChartClient', () => {
|
||||
formData: {
|
||||
datasource: '1__table',
|
||||
granularity: 'minute',
|
||||
viz_type: 'line',
|
||||
viz_type: VizType.LegacyLine,
|
||||
},
|
||||
}),
|
||||
).resolves.toEqual({
|
||||
datasource: '1__table',
|
||||
granularity: 'minute',
|
||||
viz_type: 'line',
|
||||
viz_type: VizType.LegacyLine,
|
||||
}));
|
||||
it('rejects if none of sliceId or formData is specified', () =>
|
||||
expect(
|
||||
@@ -120,12 +121,12 @@ describe('ChartClient', () => {
|
||||
describe('.loadQueryData(formData, options)', () => {
|
||||
it('returns a promise of query data for known chart type', () => {
|
||||
getChartMetadataRegistry().registerValue(
|
||||
'word_cloud',
|
||||
VizType.WordCloud,
|
||||
new ChartMetadata({ name: 'Word Cloud', thumbnail: '' }),
|
||||
);
|
||||
|
||||
getChartBuildQueryRegistry().registerValue(
|
||||
'word_cloud',
|
||||
VizType.WordCloud,
|
||||
(formData: QueryFormData) => buildQueryContext(formData),
|
||||
);
|
||||
fetchMock.post('glob:*/api/v1/chart/data', [
|
||||
@@ -138,7 +139,7 @@ describe('ChartClient', () => {
|
||||
return expect(
|
||||
chartClient.loadQueryData({
|
||||
granularity: 'minute',
|
||||
viz_type: 'word_cloud',
|
||||
viz_type: VizType.WordCloud,
|
||||
datasource: '1__table',
|
||||
}),
|
||||
).resolves.toEqual([
|
||||
@@ -255,7 +256,7 @@ describe('ChartClient', () => {
|
||||
it('loadAllDataNecessaryForAChart', () => {
|
||||
fetchMock.get(`glob:*/api/v1/form_data/?slice_id=${sliceId}`, {
|
||||
granularity: 'minute',
|
||||
viz_type: 'line',
|
||||
viz_type: VizType.LegacyLine,
|
||||
datasource: '1__table',
|
||||
color: 'living-coral',
|
||||
});
|
||||
@@ -275,12 +276,12 @@ describe('ChartClient', () => {
|
||||
});
|
||||
|
||||
getChartMetadataRegistry().registerValue(
|
||||
'line',
|
||||
VizType.LegacyLine,
|
||||
new ChartMetadata({ name: 'Line', thumbnail: '.gif' }),
|
||||
);
|
||||
|
||||
getChartBuildQueryRegistry().registerValue(
|
||||
'line',
|
||||
VizType.LegacyLine,
|
||||
(formData: QueryFormData) => buildQueryContext(formData),
|
||||
);
|
||||
|
||||
@@ -296,7 +297,7 @@ describe('ChartClient', () => {
|
||||
},
|
||||
formData: {
|
||||
granularity: 'minute',
|
||||
viz_type: 'line',
|
||||
viz_type: VizType.LegacyLine,
|
||||
datasource: '1__table',
|
||||
color: 'living-coral',
|
||||
},
|
||||
|
||||
@@ -19,11 +19,11 @@
|
||||
|
||||
/* eslint sort-keys: 'off' */
|
||||
/** The form data defined here is based on default visualizations packaged with Apache Superset */
|
||||
import { TimeGranularity } from '@superset-ui/core';
|
||||
import { TimeGranularity, VizType } from '@superset-ui/core';
|
||||
|
||||
export const bigNumberFormData = {
|
||||
datasource: '3__table',
|
||||
viz_type: 'big_number',
|
||||
viz_type: VizType.BigNumber,
|
||||
slice_id: 54,
|
||||
granularity_sqla: 'ds',
|
||||
time_grain_sqla: TimeGranularity.DAY,
|
||||
@@ -39,7 +39,7 @@ export const bigNumberFormData = {
|
||||
|
||||
export const wordCloudFormData = {
|
||||
datasource: '3__table',
|
||||
viz_type: 'word_cloud',
|
||||
viz_type: VizType.WordCloud,
|
||||
slice_id: 60,
|
||||
url_params: {},
|
||||
granularity_sqla: 'ds',
|
||||
@@ -56,7 +56,7 @@ export const wordCloudFormData = {
|
||||
|
||||
export const sunburstFormData = {
|
||||
datasource: '2__table',
|
||||
viz_type: 'sunburst_v2',
|
||||
viz_type: VizType.Sunburst,
|
||||
slice_id: 47,
|
||||
url_params: {},
|
||||
granularity_sqla: 'year',
|
||||
@@ -71,7 +71,7 @@ export const sunburstFormData = {
|
||||
|
||||
export const sankeyFormData = {
|
||||
datasource: '1__table',
|
||||
viz_type: 'sankey',
|
||||
viz_type: VizType.LegacySankey,
|
||||
slice_id: 1,
|
||||
url_params: {},
|
||||
granularity_sqla: null,
|
||||
|
||||
@@ -31,6 +31,7 @@ import {
|
||||
QueryFormData,
|
||||
DatasourceType,
|
||||
supersetTheme,
|
||||
VizType,
|
||||
} from '@superset-ui/core';
|
||||
|
||||
describe('ChartPlugin', () => {
|
||||
@@ -59,7 +60,7 @@ describe('ChartPlugin', () => {
|
||||
const FORM_DATA = {
|
||||
datasource: '1__table',
|
||||
granularity: 'day',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
};
|
||||
|
||||
it('creates a new plugin', () => {
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
* under the License.
|
||||
*/
|
||||
import fetchMock from 'fetch-mock';
|
||||
import { VizType } from '@superset-ui/core';
|
||||
import { getFormData } from '../../../../src/query/api/legacy';
|
||||
|
||||
import setupClientForTest from '../setupClientForTest';
|
||||
@@ -28,7 +29,7 @@ describe('getFormData()', () => {
|
||||
|
||||
const mockData = {
|
||||
datasource: '1__table',
|
||||
viz_type: 'sankey',
|
||||
viz_type: VizType.LegacySankey,
|
||||
slice_id: 1,
|
||||
url_params: {},
|
||||
granularity_sqla: null,
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
* under the License.
|
||||
*/
|
||||
import fetchMock from 'fetch-mock';
|
||||
import { buildQueryContext, ApiV1 } from '@superset-ui/core';
|
||||
import { buildQueryContext, ApiV1, VizType } from '@superset-ui/core';
|
||||
import setupClientForTest from '../setupClientForTest';
|
||||
|
||||
describe('API v1 > getChartData()', () => {
|
||||
@@ -39,7 +39,7 @@ describe('API v1 > getChartData()', () => {
|
||||
const result = await ApiV1.getChartData(
|
||||
buildQueryContext({
|
||||
granularity: 'minute',
|
||||
viz_type: 'word_cloud',
|
||||
viz_type: VizType.WordCloud,
|
||||
datasource: '1__table',
|
||||
}),
|
||||
);
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { buildQueryContext } from '@superset-ui/core';
|
||||
import { buildQueryContext, VizType } from '@superset-ui/core';
|
||||
import * as queryModule from '../../src/query/normalizeTimeColumn';
|
||||
|
||||
describe('buildQueryContext', () => {
|
||||
@@ -24,7 +24,7 @@ describe('buildQueryContext', () => {
|
||||
const queryContext = buildQueryContext({
|
||||
datasource: '5__table',
|
||||
granularity_sqla: 'ds',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
});
|
||||
expect(queryContext.datasource.id).toBe(5);
|
||||
expect(queryContext.datasource.type).toBe('table');
|
||||
@@ -37,7 +37,7 @@ describe('buildQueryContext', () => {
|
||||
{
|
||||
datasource: '5__table',
|
||||
granularity_sqla: 'ds',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
source: 'source_column',
|
||||
source_category: 'source_category_column',
|
||||
target: 'target_column',
|
||||
@@ -75,7 +75,7 @@ describe('buildQueryContext', () => {
|
||||
{
|
||||
datasource: '5__table',
|
||||
granularity_sqla: 'ds',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
source: 'source_column',
|
||||
source_category: 'source_category_column',
|
||||
target: 'target_column',
|
||||
@@ -103,7 +103,7 @@ describe('buildQueryContext', () => {
|
||||
const queryContext = buildQueryContext(
|
||||
{
|
||||
datasource: '5__table',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
},
|
||||
() => [
|
||||
{
|
||||
@@ -133,7 +133,7 @@ describe('buildQueryContext', () => {
|
||||
buildQueryContext(
|
||||
{
|
||||
datasource: '5__table',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
x_axis: 'axis',
|
||||
},
|
||||
() => [{}],
|
||||
|
||||
@@ -25,6 +25,7 @@ import {
|
||||
AnnotationType,
|
||||
buildQueryObject,
|
||||
QueryObject,
|
||||
VizType,
|
||||
} from '@superset-ui/core';
|
||||
|
||||
describe('buildQueryObject', () => {
|
||||
@@ -34,7 +35,7 @@ describe('buildQueryObject', () => {
|
||||
query = buildQueryObject({
|
||||
datasource: '5__table',
|
||||
granularity_sqla: 'ds',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
});
|
||||
expect(query.granularity).toEqual('ds');
|
||||
});
|
||||
@@ -43,7 +44,7 @@ describe('buildQueryObject', () => {
|
||||
query = buildQueryObject({
|
||||
datasource: '5__table',
|
||||
granularity_sqla: 'ds',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
metric: 'sum__num',
|
||||
secondary_metric: 'avg__num',
|
||||
});
|
||||
@@ -54,7 +55,7 @@ describe('buildQueryObject', () => {
|
||||
query = buildQueryObject({
|
||||
datasource: '5__table',
|
||||
granularity_sqla: 'ds',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
extra_filters: [{ col: 'abc', op: '==', val: 'qwerty' }],
|
||||
adhoc_filters: [
|
||||
{
|
||||
@@ -88,7 +89,7 @@ describe('buildQueryObject', () => {
|
||||
{
|
||||
datasource: '5__table',
|
||||
granularity_sqla: 'ds',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
my_custom_metric_control: 'sum__num',
|
||||
},
|
||||
{ my_custom_metric_control: 'metrics' },
|
||||
@@ -101,7 +102,7 @@ describe('buildQueryObject', () => {
|
||||
{
|
||||
datasource: '5__table',
|
||||
granularity_sqla: 'ds',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
metrics: ['sum__num'],
|
||||
my_custom_metric_control: 'avg__num',
|
||||
},
|
||||
@@ -115,7 +116,7 @@ describe('buildQueryObject', () => {
|
||||
query = buildQueryObject({
|
||||
datasource: '5__table',
|
||||
granularity_sqla: 'ds',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
limit: series_limit,
|
||||
});
|
||||
expect(query.series_limit).toEqual(series_limit);
|
||||
@@ -126,7 +127,7 @@ describe('buildQueryObject', () => {
|
||||
query = buildQueryObject({
|
||||
datasource: '5__table',
|
||||
granularity_sqla: 'ds',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
series_limit,
|
||||
});
|
||||
expect(query.series_limit).toEqual(series_limit);
|
||||
@@ -137,7 +138,7 @@ describe('buildQueryObject', () => {
|
||||
query = buildQueryObject({
|
||||
datasource: '5__table',
|
||||
granularity_sqla: 'ds',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
order_desc: orderDesc,
|
||||
});
|
||||
expect(query.order_desc).toEqual(orderDesc);
|
||||
@@ -148,7 +149,7 @@ describe('buildQueryObject', () => {
|
||||
query = buildQueryObject({
|
||||
datasource: '5__table',
|
||||
granularity_sqla: 'ds',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
timeseries_limit_metric: metric,
|
||||
});
|
||||
expect(query.series_limit_metric).toEqual(metric);
|
||||
@@ -159,7 +160,7 @@ describe('buildQueryObject', () => {
|
||||
query = buildQueryObject({
|
||||
datasource: '5__table',
|
||||
granularity_sqla: 'ds',
|
||||
viz_type: 'pivot_table_v2',
|
||||
viz_type: VizType.PivotTable,
|
||||
series_limit_metric: metric,
|
||||
});
|
||||
expect(query.series_limit_metric).toEqual(metric);
|
||||
@@ -170,7 +171,7 @@ describe('buildQueryObject', () => {
|
||||
query = buildQueryObject({
|
||||
datasource: '5__table',
|
||||
granularity_sqla: 'ds',
|
||||
viz_type: 'pivot_table_v2',
|
||||
viz_type: VizType.PivotTable,
|
||||
series_limit_metric: metric,
|
||||
});
|
||||
expect(query.series_limit_metric).toEqual(undefined);
|
||||
@@ -180,7 +181,7 @@ describe('buildQueryObject', () => {
|
||||
const baseQuery = {
|
||||
datasource: '5__table',
|
||||
granularity_sqla: 'ds',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
row_limit: null,
|
||||
};
|
||||
|
||||
@@ -267,7 +268,7 @@ describe('buildQueryObject', () => {
|
||||
query = buildQueryObject({
|
||||
datasource: '5__table',
|
||||
granularity_sqla: 'ds',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
annotation_layers: annotationLayers,
|
||||
});
|
||||
expect(query.annotation_layers).toEqual(annotationLayers);
|
||||
@@ -278,7 +279,7 @@ describe('buildQueryObject', () => {
|
||||
buildQueryObject({
|
||||
datasource: '5__table',
|
||||
granularity_sqla: 'ds',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
url_params: { abc: '123' },
|
||||
}).url_params,
|
||||
).toEqual({ abc: '123' });
|
||||
@@ -286,7 +287,7 @@ describe('buildQueryObject', () => {
|
||||
buildQueryObject({
|
||||
datasource: '5__table',
|
||||
granularity_sqla: 'ds',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
// @ts-expect-error
|
||||
url_params: null,
|
||||
}).url_params,
|
||||
@@ -298,7 +299,7 @@ describe('buildQueryObject', () => {
|
||||
query = buildQueryObject({
|
||||
datasource: '5__table',
|
||||
granularity,
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
});
|
||||
expect(query.granularity).toEqual(granularity);
|
||||
});
|
||||
@@ -308,7 +309,7 @@ describe('buildQueryObject', () => {
|
||||
query = buildQueryObject({
|
||||
datasource: '5__table',
|
||||
granularity_sqla: granularity,
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
});
|
||||
expect(query.granularity).toEqual(granularity);
|
||||
});
|
||||
@@ -320,7 +321,7 @@ describe('buildQueryObject', () => {
|
||||
query = buildQueryObject({
|
||||
datasource: '5__table',
|
||||
granularity_sqla: 'ds',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
custom_params: customParams,
|
||||
});
|
||||
expect(query.custom_params).toEqual(customParams);
|
||||
|
||||
@@ -16,11 +16,13 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { isXAxisSet } from '@superset-ui/core';
|
||||
import { isXAxisSet, VizType } from '@superset-ui/core';
|
||||
|
||||
test('isXAxisSet', () => {
|
||||
expect(isXAxisSet({ datasource: '123', viz_type: 'table' })).not.toBeTruthy();
|
||||
expect(
|
||||
isXAxisSet({ datasource: '123', viz_type: 'table', x_axis: 'axis' }),
|
||||
isXAxisSet({ datasource: '123', viz_type: VizType.Table }),
|
||||
).not.toBeTruthy();
|
||||
expect(
|
||||
isXAxisSet({ datasource: '123', viz_type: VizType.Table, x_axis: 'axis' }),
|
||||
).toBeTruthy();
|
||||
});
|
||||
|
||||
@@ -16,13 +16,13 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { normalizeOrderBy, QueryObject } from '@superset-ui/core';
|
||||
import { normalizeOrderBy, QueryObject, VizType } from '@superset-ui/core';
|
||||
|
||||
describe('normalizeOrderBy', () => {
|
||||
it('should not change original queryObject when orderby populated', () => {
|
||||
const query: QueryObject = {
|
||||
datasource: '5__table',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
time_range: '1 year ago : 2013',
|
||||
orderby: [['count(*)', true]],
|
||||
};
|
||||
@@ -32,7 +32,7 @@ describe('normalizeOrderBy', () => {
|
||||
it('has series_limit_metric in queryObject', () => {
|
||||
const query: QueryObject = {
|
||||
datasource: '5__table',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
time_range: '1 year ago : 2013',
|
||||
metrics: ['count(*)'],
|
||||
series_limit_metric: {
|
||||
@@ -50,7 +50,7 @@ describe('normalizeOrderBy', () => {
|
||||
expect(expectedQueryObject).not.toHaveProperty('order_desc');
|
||||
expect(expectedQueryObject).toEqual({
|
||||
datasource: '5__table',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
time_range: '1 year ago : 2013',
|
||||
metrics: ['count(*)'],
|
||||
orderby: [
|
||||
@@ -72,7 +72,7 @@ describe('normalizeOrderBy', () => {
|
||||
it('should transform legacy_order_by in queryObject', () => {
|
||||
const query: QueryObject = {
|
||||
datasource: '5__table',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
time_range: '1 year ago : 2013',
|
||||
metrics: ['count(*)'],
|
||||
legacy_order_by: {
|
||||
@@ -90,7 +90,7 @@ describe('normalizeOrderBy', () => {
|
||||
expect(expectedQueryObject).not.toHaveProperty('order_desc');
|
||||
expect(expectedQueryObject).toEqual({
|
||||
datasource: '5__table',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
time_range: '1 year ago : 2013',
|
||||
metrics: ['count(*)'],
|
||||
orderby: [
|
||||
@@ -112,7 +112,7 @@ describe('normalizeOrderBy', () => {
|
||||
it('has metrics in queryObject', () => {
|
||||
const query: QueryObject = {
|
||||
datasource: '5__table',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
time_range: '1 year ago : 2013',
|
||||
metrics: ['count(*)'],
|
||||
order_desc: true,
|
||||
@@ -122,7 +122,7 @@ describe('normalizeOrderBy', () => {
|
||||
expect(expectedQueryObject).not.toHaveProperty('order_desc');
|
||||
expect(expectedQueryObject).toEqual({
|
||||
datasource: '5__table',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
time_range: '1 year ago : 2013',
|
||||
metrics: ['count(*)'],
|
||||
orderby: [['count(*)', false]],
|
||||
@@ -132,7 +132,7 @@ describe('normalizeOrderBy', () => {
|
||||
it('should not change', () => {
|
||||
const query: QueryObject = {
|
||||
datasource: '5__table',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
time_range: '1 year ago : 2013',
|
||||
};
|
||||
expect(normalizeOrderBy(query)).toEqual(query);
|
||||
@@ -141,7 +141,7 @@ describe('normalizeOrderBy', () => {
|
||||
it('remove empty orderby', () => {
|
||||
const query: QueryObject = {
|
||||
datasource: '5__table',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
time_range: '1 year ago : 2013',
|
||||
orderby: [],
|
||||
};
|
||||
@@ -151,7 +151,7 @@ describe('normalizeOrderBy', () => {
|
||||
it('remove orderby with an empty array', () => {
|
||||
const query: QueryObject = {
|
||||
datasource: '5__table',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
time_range: '1 year ago : 2013',
|
||||
orderby: [[]],
|
||||
};
|
||||
@@ -161,7 +161,7 @@ describe('normalizeOrderBy', () => {
|
||||
it('remove orderby with an empty metric', () => {
|
||||
const query: QueryObject = {
|
||||
datasource: '5__table',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
time_range: '1 year ago : 2013',
|
||||
orderby: [['', true]],
|
||||
};
|
||||
@@ -171,7 +171,7 @@ describe('normalizeOrderBy', () => {
|
||||
it('remove orderby with an empty adhoc metric', () => {
|
||||
const query: QueryObject = {
|
||||
datasource: '5__table',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
time_range: '1 year ago : 2013',
|
||||
orderby: [[{}, true]],
|
||||
};
|
||||
@@ -181,7 +181,7 @@ describe('normalizeOrderBy', () => {
|
||||
it('remove orderby with an non-boolean type', () => {
|
||||
const query: QueryObject = {
|
||||
datasource: '5__table',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
time_range: '1 year ago : 2013',
|
||||
// @ts-ignore
|
||||
orderby: [['count(*)', 'true']],
|
||||
|
||||
@@ -20,12 +20,13 @@ import {
|
||||
normalizeTimeColumn,
|
||||
QueryObject,
|
||||
SqlaFormData,
|
||||
VizType,
|
||||
} from '@superset-ui/core';
|
||||
|
||||
test('should return original QueryObject if x_axis is empty', () => {
|
||||
const formData: SqlaFormData = {
|
||||
datasource: '5__table',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
granularity: 'time_column',
|
||||
time_grain_sqla: 'P1Y',
|
||||
time_range: '1 year ago : 2013',
|
||||
@@ -34,7 +35,7 @@ test('should return original QueryObject if x_axis is empty', () => {
|
||||
};
|
||||
const query: QueryObject = {
|
||||
datasource: '5__table',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
granularity: 'time_column',
|
||||
extras: {
|
||||
time_grain_sqla: 'P1Y',
|
||||
@@ -51,7 +52,7 @@ test('should return original QueryObject if x_axis is empty', () => {
|
||||
test('should support different columns for x-axis and granularity', () => {
|
||||
const formData: SqlaFormData = {
|
||||
datasource: '5__table',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
granularity: 'time_column',
|
||||
time_grain_sqla: 'P1Y',
|
||||
time_range: '1 year ago : 2013',
|
||||
@@ -61,7 +62,7 @@ test('should support different columns for x-axis and granularity', () => {
|
||||
};
|
||||
const query: QueryObject = {
|
||||
datasource: '5__table',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
granularity: 'time_column',
|
||||
extras: {
|
||||
time_grain_sqla: 'P1Y',
|
||||
@@ -76,7 +77,7 @@ test('should support different columns for x-axis and granularity', () => {
|
||||
};
|
||||
expect(normalizeTimeColumn(formData, query)).toEqual({
|
||||
datasource: '5__table',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
granularity: 'time_column',
|
||||
extras: { where: '', having: '', time_grain_sqla: 'P1Y' },
|
||||
time_range: '1 year ago : 2013',
|
||||
@@ -98,7 +99,7 @@ test('should support different columns for x-axis and granularity', () => {
|
||||
test('should support custom SQL in x-axis', () => {
|
||||
const formData: SqlaFormData = {
|
||||
datasource: '5__table',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
granularity: 'time_column',
|
||||
time_grain_sqla: 'P1Y',
|
||||
time_range: '1 year ago : 2013',
|
||||
@@ -112,7 +113,7 @@ test('should support custom SQL in x-axis', () => {
|
||||
};
|
||||
const query: QueryObject = {
|
||||
datasource: '5__table',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
granularity: 'time_column',
|
||||
extras: {
|
||||
time_grain_sqla: 'P1Y',
|
||||
@@ -134,7 +135,7 @@ test('should support custom SQL in x-axis', () => {
|
||||
};
|
||||
expect(normalizeTimeColumn(formData, query)).toEqual({
|
||||
datasource: '5__table',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
granularity: 'time_column',
|
||||
extras: { where: '', having: '', time_grain_sqla: 'P1Y' },
|
||||
time_range: '1 year ago : 2013',
|
||||
@@ -156,7 +157,7 @@ test('should support custom SQL in x-axis', () => {
|
||||
test('fallback and invalid columns value', () => {
|
||||
const formData: SqlaFormData = {
|
||||
datasource: '5__table',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
granularity: 'time_column',
|
||||
time_grain_sqla: 'P1Y',
|
||||
time_range: '1 year ago : 2013',
|
||||
@@ -170,7 +171,7 @@ test('fallback and invalid columns value', () => {
|
||||
};
|
||||
const query: QueryObject = {
|
||||
datasource: '5__table',
|
||||
viz_type: 'table',
|
||||
viz_type: VizType.Table,
|
||||
granularity: 'time_column',
|
||||
extras: {
|
||||
time_grain_sqla: 'P1Y',
|
||||
|
||||
@@ -17,11 +17,11 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { getComparisonFilters } from '@superset-ui/core';
|
||||
import { getComparisonFilters, VizType } from '@superset-ui/core';
|
||||
|
||||
const form_data = {
|
||||
datasource: '22__table',
|
||||
viz_type: 'pop_kpi',
|
||||
viz_type: VizType.BigNumberPeriodOverPeriod,
|
||||
slice_id: 97,
|
||||
url_params: {
|
||||
form_data_key:
|
||||
|
||||
@@ -17,11 +17,15 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { getComparisonInfo, ComparisonTimeRangeType } from '@superset-ui/core';
|
||||
import {
|
||||
getComparisonInfo,
|
||||
ComparisonTimeRangeType,
|
||||
VizType,
|
||||
} from '@superset-ui/core';
|
||||
|
||||
const form_data = {
|
||||
datasource: '22__table',
|
||||
viz_type: 'pop_kpi',
|
||||
viz_type: VizType.BigNumberPeriodOverPeriod,
|
||||
slice_id: 97,
|
||||
url_params: {
|
||||
form_data_key:
|
||||
|
||||
@@ -17,12 +17,12 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { SuperChart } from '@superset-ui/core';
|
||||
import { SuperChart, VizType } from '@superset-ui/core';
|
||||
import ChordChartPlugin from '@superset-ui/legacy-plugin-chart-chord';
|
||||
import data from './data';
|
||||
import { withResizableChartDemo } from '../../../shared/components/ResizableChartDemo';
|
||||
|
||||
new ChordChartPlugin().configure({ key: 'chord' }).register();
|
||||
new ChordChartPlugin().configure({ key: VizType.Chord }).register();
|
||||
|
||||
export default {
|
||||
title: 'Legacy Chart Plugins/legacy-plugin-chart-chord',
|
||||
@@ -31,7 +31,7 @@ export default {
|
||||
|
||||
export const basic = ({ width, height }) => (
|
||||
<SuperChart
|
||||
chartType="chord"
|
||||
chartType={VizType.Chord}
|
||||
width={width}
|
||||
height={height}
|
||||
queriesData={[{ data }]}
|
||||
|
||||
@@ -17,12 +17,12 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { SuperChart } from '@superset-ui/core';
|
||||
import { SuperChart, VizType } from '@superset-ui/core';
|
||||
import HeatmapChartPlugin from '@superset-ui/legacy-plugin-chart-heatmap';
|
||||
import ResizableChartDemo from '../../../shared/components/ResizableChartDemo';
|
||||
import data from './data';
|
||||
|
||||
new HeatmapChartPlugin().configure({ key: 'heatmap' }).register();
|
||||
new HeatmapChartPlugin().configure({ key: VizType.LegacyHeatmap }).register();
|
||||
|
||||
export default {
|
||||
title: 'Legacy Chart Plugins/legacy-plugin-chart-heatmap',
|
||||
@@ -30,7 +30,7 @@ export default {
|
||||
|
||||
export const basic = () => (
|
||||
<SuperChart
|
||||
chartType="heatmap"
|
||||
chartType={VizType.LegacyHeatmap}
|
||||
width={500}
|
||||
height={500}
|
||||
formData={{
|
||||
@@ -67,7 +67,7 @@ export const resizable = () => (
|
||||
<ResizableChartDemo>
|
||||
{({ width, height }) => (
|
||||
<SuperChart
|
||||
chartType="heatmap"
|
||||
chartType={VizType.LegacyHeatmap}
|
||||
width={width}
|
||||
height={height}
|
||||
formData={{
|
||||
@@ -104,7 +104,7 @@ export const resizable = () => (
|
||||
|
||||
export const withNullData = () => (
|
||||
<SuperChart
|
||||
chartType="heatmap"
|
||||
chartType={VizType.LegacyHeatmap}
|
||||
width={500}
|
||||
height={500}
|
||||
formData={{
|
||||
|
||||
@@ -18,11 +18,13 @@
|
||||
*/
|
||||
|
||||
/* eslint-disable no-magic-numbers */
|
||||
import { SuperChart } from '@superset-ui/core';
|
||||
import { SuperChart, VizType } from '@superset-ui/core';
|
||||
import HistogramChartPlugin from '@superset-ui/legacy-plugin-chart-histogram';
|
||||
import data from './data';
|
||||
|
||||
new HistogramChartPlugin().configure({ key: 'histogram' }).register();
|
||||
new HistogramChartPlugin()
|
||||
.configure({ key: VizType.LegacyHistogram })
|
||||
.register();
|
||||
|
||||
export default {
|
||||
title: 'Legacy Chart Plugins/legacy-plugin-chart-histogram',
|
||||
@@ -30,7 +32,7 @@ export default {
|
||||
|
||||
export const basic = () => (
|
||||
<SuperChart
|
||||
chartType="histogram"
|
||||
chartType={VizType.LegacyHistogram}
|
||||
width={400}
|
||||
height={400}
|
||||
queriesData={[{ data }]}
|
||||
|
||||
@@ -17,11 +17,11 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { SuperChart } from '@superset-ui/core';
|
||||
import { SuperChart, VizType } from '@superset-ui/core';
|
||||
import HorizonChartPlugin from '@superset-ui/legacy-plugin-chart-horizon';
|
||||
import data from './data';
|
||||
|
||||
new HorizonChartPlugin().configure({ key: 'horizon' }).register();
|
||||
new HorizonChartPlugin().configure({ key: VizType.Horizon }).register();
|
||||
|
||||
export default {
|
||||
title: 'Legacy Chart Plugins/legacy-plugin-chart-horizon',
|
||||
@@ -29,7 +29,7 @@ export default {
|
||||
|
||||
export const basic = () => (
|
||||
<SuperChart
|
||||
chartType="horizon"
|
||||
chartType={VizType.Horizon}
|
||||
width={400}
|
||||
height={400}
|
||||
queriesData={[{ data }]}
|
||||
|
||||
@@ -17,12 +17,12 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { SuperChart } from '@superset-ui/core';
|
||||
import { SuperChart, VizType } from '@superset-ui/core';
|
||||
import PartitionChartPlugin from '@superset-ui/legacy-plugin-chart-partition';
|
||||
import data from './data';
|
||||
import dummyDatasource from '../../../shared/dummyDatasource';
|
||||
|
||||
new PartitionChartPlugin().configure({ key: 'partition' }).register();
|
||||
new PartitionChartPlugin().configure({ key: VizType.Partition }).register();
|
||||
|
||||
export default {
|
||||
title: 'Legacy Chart Plugins/legacy-plugin-chart-partition',
|
||||
@@ -30,7 +30,7 @@ export default {
|
||||
|
||||
export const basic = () => (
|
||||
<SuperChart
|
||||
chartType="partition"
|
||||
chartType={VizType.Partition}
|
||||
width={400}
|
||||
height={400}
|
||||
datasource={dummyDatasource}
|
||||
|
||||
@@ -18,11 +18,11 @@
|
||||
*/
|
||||
|
||||
/* eslint-disable no-magic-numbers, sort-keys */
|
||||
import { SuperChart } from '@superset-ui/core';
|
||||
import { SuperChart, VizType } from '@superset-ui/core';
|
||||
import RoseChartPlugin from '@superset-ui/legacy-plugin-chart-rose';
|
||||
import data from './data';
|
||||
|
||||
new RoseChartPlugin().configure({ key: 'rose' }).register();
|
||||
new RoseChartPlugin().configure({ key: VizType.Rose }).register();
|
||||
|
||||
export default {
|
||||
title: 'Legacy Chart Plugins/legacy-plugin-chart-rose',
|
||||
@@ -30,7 +30,7 @@ export default {
|
||||
|
||||
export const basic = () => (
|
||||
<SuperChart
|
||||
chartType="rose"
|
||||
chartType={VizType.Rose}
|
||||
width={400}
|
||||
height={400}
|
||||
queriesData={[{ data }]}
|
||||
|
||||
@@ -18,12 +18,12 @@
|
||||
*/
|
||||
|
||||
/* eslint-disable no-magic-numbers */
|
||||
import { SuperChart } from '@superset-ui/core';
|
||||
import { SuperChart, VizType } from '@superset-ui/core';
|
||||
import SankeyChartPlugin from '@superset-ui/legacy-plugin-chart-sankey';
|
||||
import ResizableChartDemo from '../../../shared/components/ResizableChartDemo';
|
||||
import data from './data';
|
||||
|
||||
new SankeyChartPlugin().configure({ key: 'sankey' }).register();
|
||||
new SankeyChartPlugin().configure({ key: VizType.LegacySankey }).register();
|
||||
|
||||
export default {
|
||||
title: 'Legacy Chart Plugins/legacy-plugin-chart-sankey',
|
||||
@@ -31,7 +31,7 @@ export default {
|
||||
|
||||
export const basic = () => (
|
||||
<SuperChart
|
||||
chartType="sankey"
|
||||
chartType={VizType.LegacySankey}
|
||||
width={400}
|
||||
height={400}
|
||||
queriesData={[{ data }]}
|
||||
@@ -45,7 +45,7 @@ export const resizable = () => (
|
||||
<ResizableChartDemo>
|
||||
{({ width, height }) => (
|
||||
<SuperChart
|
||||
chartType="sankey"
|
||||
chartType={VizType.LegacySankey}
|
||||
width={width}
|
||||
height={height}
|
||||
queriesData={[{ data }]}
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { SuperChart } from '@superset-ui/core';
|
||||
import { SuperChart, VizType } from '@superset-ui/core';
|
||||
import { BigNumberChartPlugin } from '@superset-ui/plugin-chart-echarts';
|
||||
import testData from './data';
|
||||
|
||||
@@ -37,7 +37,7 @@ const formData = {
|
||||
showTrendLine: true,
|
||||
startYAxisAtZero: true,
|
||||
timeGrainSqla: 'P1Y',
|
||||
vizType: 'big_number',
|
||||
vizType: VizType.BigNumber,
|
||||
yAxisFormat: '.3s',
|
||||
};
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { SuperChart } from '@superset-ui/core';
|
||||
import { SuperChart, VizType } from '@superset-ui/core';
|
||||
import { BigNumberTotalChartPlugin } from '@superset-ui/plugin-chart-echarts';
|
||||
import data from './data';
|
||||
|
||||
@@ -37,7 +37,7 @@ export const totalBasic = () => (
|
||||
formData={{
|
||||
metric: 'sum__num',
|
||||
subheader: 'total female participants',
|
||||
vizType: 'big_number_total',
|
||||
vizType: VizType.BigNumberTotal,
|
||||
yAxisFormat: '.3s',
|
||||
}}
|
||||
/>
|
||||
@@ -52,7 +52,7 @@ export const totalNoData = () => (
|
||||
formData={{
|
||||
metric: 'sum__num',
|
||||
subheader: 'total female participants',
|
||||
vizType: 'big_number_total',
|
||||
vizType: VizType.BigNumberTotal,
|
||||
yAxisFormat: '.3s',
|
||||
}}
|
||||
/>
|
||||
|
||||
@@ -17,9 +17,10 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { VizType } from '@superset-ui/core';
|
||||
import { AreaChartPlugin } from '@superset-ui/legacy-preset-chart-nvd3';
|
||||
|
||||
new AreaChartPlugin().configure({ key: 'area' }).register();
|
||||
new AreaChartPlugin().configure({ key: VizType.LegacyArea }).register();
|
||||
|
||||
export default {
|
||||
title: 'Legacy Chart Plugins/legacy-preset-chart-nvd3/Area',
|
||||
|
||||
@@ -17,13 +17,13 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { SuperChart } from '@superset-ui/core';
|
||||
import { SuperChart, VizType } from '@superset-ui/core';
|
||||
import dummyDatasource from '../../../../../shared/dummyDatasource';
|
||||
import data from '../data';
|
||||
|
||||
export const controlsShown = () => (
|
||||
<SuperChart
|
||||
chartType="area"
|
||||
chartType={VizType.LegacyArea}
|
||||
datasource={dummyDatasource}
|
||||
width={400}
|
||||
height={400}
|
||||
@@ -40,7 +40,7 @@ export const controlsShown = () => (
|
||||
showControls: true,
|
||||
showLegend: true,
|
||||
stackedStyle: 'stack',
|
||||
vizType: 'area',
|
||||
vizType: VizType.LegacyArea,
|
||||
xAxisFormat: '%Y',
|
||||
xAxisLabel: '',
|
||||
xAxisShowminmax: false,
|
||||
|
||||
@@ -17,13 +17,13 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { SuperChart } from '@superset-ui/core';
|
||||
import { SuperChart, VizType } from '@superset-ui/core';
|
||||
import dummyDatasource from '../../../../../shared/dummyDatasource';
|
||||
import data from '../data';
|
||||
|
||||
export const expanded = () => (
|
||||
<SuperChart
|
||||
chartType="area"
|
||||
chartType={VizType.LegacyArea}
|
||||
datasource={dummyDatasource}
|
||||
width={400}
|
||||
height={400}
|
||||
@@ -40,7 +40,7 @@ export const expanded = () => (
|
||||
showControls: false,
|
||||
showLegend: true,
|
||||
stackedStyle: 'expand',
|
||||
vizType: 'area',
|
||||
vizType: VizType.LegacyArea,
|
||||
xAxisFormat: '%Y',
|
||||
xAxisLabel: '',
|
||||
xAxisShowminmax: false,
|
||||
|
||||
@@ -17,14 +17,14 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { SuperChart } from '@superset-ui/core';
|
||||
import { SuperChart, VizType } from '@superset-ui/core';
|
||||
import dummyDatasource from '../../../../../shared/dummyDatasource';
|
||||
import data from '../data';
|
||||
|
||||
export const stacked = () => (
|
||||
<SuperChart
|
||||
id="stacked-area-chart"
|
||||
chartType="area"
|
||||
chartType={VizType.LegacyArea}
|
||||
datasource={dummyDatasource}
|
||||
width={400}
|
||||
height={400}
|
||||
@@ -41,7 +41,7 @@ export const stacked = () => (
|
||||
showControls: false,
|
||||
showLegend: true,
|
||||
stackedStyle: 'stack',
|
||||
vizType: 'area',
|
||||
vizType: VizType.LegacyArea,
|
||||
xAxisFormat: '%Y',
|
||||
xAxisLabel: '',
|
||||
xAxisShowminmax: false,
|
||||
|
||||
@@ -17,13 +17,13 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { SuperChart } from '@superset-ui/core';
|
||||
import { SuperChart, VizType } from '@superset-ui/core';
|
||||
import dummyDatasource from '../../../../../shared/dummyDatasource';
|
||||
import data from '../data';
|
||||
|
||||
export const stackedWithYAxisBounds = () => (
|
||||
<SuperChart
|
||||
chartType="area"
|
||||
chartType={VizType.LegacyArea}
|
||||
datasource={dummyDatasource}
|
||||
width={400}
|
||||
height={400}
|
||||
@@ -40,7 +40,7 @@ export const stackedWithYAxisBounds = () => (
|
||||
showControls: false,
|
||||
showLegend: true,
|
||||
stackedStyle: 'stack',
|
||||
vizType: 'area',
|
||||
vizType: VizType.LegacyArea,
|
||||
xAxisFormat: '%Y',
|
||||
xAxisLabel: '',
|
||||
xAxisShowminmax: false,
|
||||
@@ -56,7 +56,7 @@ stackedWithYAxisBounds.storyName = 'Stacked with yAxisBounds';
|
||||
|
||||
export const stackedWithYAxisBoundsMinOnly = () => (
|
||||
<SuperChart
|
||||
chartType="area"
|
||||
chartType={VizType.LegacyArea}
|
||||
datasource={dummyDatasource}
|
||||
width={400}
|
||||
height={400}
|
||||
@@ -73,7 +73,7 @@ export const stackedWithYAxisBoundsMinOnly = () => (
|
||||
showControls: true,
|
||||
showLegend: true,
|
||||
stackedStyle: 'stack',
|
||||
vizType: 'area',
|
||||
vizType: VizType.LegacyArea,
|
||||
xAxisFormat: '%Y',
|
||||
xAxisLabel: '',
|
||||
xAxisShowminmax: false,
|
||||
|
||||
@@ -17,9 +17,10 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { VizType } from '@superset-ui/core';
|
||||
import { BarChartPlugin } from '@superset-ui/legacy-preset-chart-nvd3';
|
||||
|
||||
new BarChartPlugin().configure({ key: 'bar' }).register();
|
||||
new BarChartPlugin().configure({ key: VizType.LegacyBar }).register();
|
||||
|
||||
export default {
|
||||
title: 'Legacy Chart Plugins/legacy-preset-chart-nvd3/Bar',
|
||||
|
||||
@@ -17,13 +17,13 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { SuperChart } from '@superset-ui/core';
|
||||
import { SuperChart, VizType } from '@superset-ui/core';
|
||||
import dummyDatasource from '../../../../../shared/dummyDatasource';
|
||||
import data from '../data';
|
||||
|
||||
export const barWithPositiveAndNegativeValues = () => (
|
||||
<SuperChart
|
||||
chartType="bar"
|
||||
chartType={VizType.LegacyBar}
|
||||
width={400}
|
||||
height={400}
|
||||
datasource={dummyDatasource}
|
||||
@@ -51,7 +51,7 @@ export const barWithPositiveAndNegativeValues = () => (
|
||||
showControls: false,
|
||||
showLegend: true,
|
||||
stackedStyle: 'stack',
|
||||
vizType: 'bar',
|
||||
vizType: VizType.LegacyBar,
|
||||
xAxisFormat: '%Y',
|
||||
xAxisLabel: '',
|
||||
xAxisShowminmax: false,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user