mirror of
https://github.com/apache/superset.git
synced 2026-05-07 08:54:23 +00:00
Compare commits
1 Commits
5.0-pulse
...
chart-assi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
037be49185 |
10
.github/CODEOWNERS
vendored
10
.github/CODEOWNERS
vendored
@@ -30,13 +30,3 @@
|
||||
|
||||
**/*.geojson @villebro @rusackas
|
||||
/superset-frontend/plugins/legacy-plugin-chart-country-map/ @villebro @rusackas
|
||||
|
||||
# Notify PMC members of changes to extension-related files
|
||||
|
||||
/superset-core/ @michael-s-molina @villebro
|
||||
/superset-extensions-cli/ @michael-s-molina @villebro
|
||||
/superset/core/ @michael-s-molina @villebro
|
||||
/superset/extensions/ @michael-s-molina @villebro
|
||||
/superset-frontend/src/packages/superset-core/ @michael-s-molina @villebro
|
||||
/superset-frontend/src/core/ @michael-s-molina @villebro
|
||||
/superset-frontend/src/extensions/ @michael-s-molina @villebro
|
||||
|
||||
19
.github/actions/change-detector/action.yml
vendored
19
.github/actions/change-detector/action.yml
vendored
@@ -1,27 +1,24 @@
|
||||
name: Change Detector
|
||||
description: Detects file changes for pull request and push events
|
||||
name: 'Change Detector'
|
||||
description: 'Detects file changes for pull request and push events'
|
||||
inputs:
|
||||
token:
|
||||
description: GitHub token for authentication
|
||||
description: 'GitHub token for authentication'
|
||||
required: true
|
||||
outputs:
|
||||
python:
|
||||
description: Whether Python-related files were changed
|
||||
description: 'Whether Python-related files were changed'
|
||||
value: ${{ steps.change-detector.outputs.python }}
|
||||
frontend:
|
||||
description: Whether frontend-related files were changed
|
||||
description: 'Whether frontend-related files were changed'
|
||||
value: ${{ steps.change-detector.outputs.frontend }}
|
||||
docker:
|
||||
description: Whether docker-related files were changed
|
||||
description: 'Whether docker-related files were changed'
|
||||
value: ${{ steps.change-detector.outputs.docker }}
|
||||
docs:
|
||||
description: Whether docs-related files were changed
|
||||
description: 'Whether docs-related files were changed'
|
||||
value: ${{ steps.change-detector.outputs.docs }}
|
||||
superset-extensions-cli:
|
||||
description: Whether superset-extensions-cli package-related files were changed
|
||||
value: ${{ steps.change-detector.outputs.superset-extensions-cli }}
|
||||
runs:
|
||||
using: composite
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Detect file changes
|
||||
id: change-detector
|
||||
|
||||
6
.github/actions/setup-docker/action.yml
vendored
6
.github/actions/setup-docker/action.yml
vendored
@@ -26,16 +26,16 @@ runs:
|
||||
|
||||
- name: Set up QEMU
|
||||
if: ${{ inputs.build == 'true' }}
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
if: ${{ inputs.build == 'true' }}
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Try to login to DockerHub
|
||||
if: ${{ inputs.login-to-dockerhub == 'true' }}
|
||||
continue-on-error: true
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ inputs.dockerhub-user }}
|
||||
password: ${{ inputs.dockerhub-token }}
|
||||
|
||||
5
.github/workflows/bump-python-package.yml
vendored
5
.github/workflows/bump-python-package.yml
vendored
@@ -30,8 +30,9 @@ jobs:
|
||||
pull-requests: write
|
||||
checks: write
|
||||
steps:
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: true
|
||||
ref: master
|
||||
@@ -40,7 +41,7 @@ jobs:
|
||||
uses: ./.github/actions/setup-supersetbot/
|
||||
|
||||
- name: Set up Python ${{ inputs.python-version }}
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
|
||||
2
.github/workflows/cancel_duplicates.yml
vendored
2
.github/workflows/cancel_duplicates.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
if: steps.check_queued.outputs.count >= 20
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Cancel duplicate workflow runs
|
||||
if: steps.check_queued.outputs.count >= 20
|
||||
|
||||
3
.github/workflows/check-python-deps.yml
vendored
3
.github/workflows/check-python-deps.yml
vendored
@@ -17,8 +17,9 @@ jobs:
|
||||
check-python-deps:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
|
||||
@@ -25,9 +25,9 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
- name: Check and notify
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
script: |
|
||||
|
||||
2
.github/workflows/codeql-analysis.yml
vendored
2
.github/workflows/codeql-analysis.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Check for file changes
|
||||
id: check
|
||||
|
||||
6
.github/workflows/dependency-review.yml
vendored
6
.github/workflows/dependency-review.yml
vendored
@@ -27,9 +27,9 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout Repository"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
- name: "Dependency Review"
|
||||
uses: actions/dependency-review-action@2031cfc080254a8a887f58cffee85186f0e49e48 # v4.9.0
|
||||
uses: actions/dependency-review-action@v4
|
||||
continue-on-error: true
|
||||
with:
|
||||
fail-on-severity: critical
|
||||
@@ -51,7 +51,7 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: "Checkout Repository"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Python
|
||||
uses: ./.github/actions/setup-backend/
|
||||
|
||||
8
.github/workflows/docker.yml
vendored
8
.github/workflows/docker.yml
vendored
@@ -14,6 +14,7 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
|
||||
setup_matrix:
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
@@ -39,8 +40,9 @@ jobs:
|
||||
IMAGE_TAG: apache/superset:GHA-${{ matrix.build_preset }}-${{ github.run_id }}
|
||||
|
||||
steps:
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
@@ -89,7 +91,7 @@ jobs:
|
||||
# in the context of push (using multi-platform build), we need to pull the image locally
|
||||
- name: Docker pull
|
||||
if: github.event_name == 'push' && (steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker)
|
||||
run: docker pull $IMAGE_TAG
|
||||
run: docker pull $IMAGE_TAG
|
||||
|
||||
- name: Print docker stats
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
|
||||
@@ -112,7 +114,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Check for file changes
|
||||
|
||||
6
.github/workflows/embedded-sdk-release.yml
vendored
6
.github/workflows/embedded-sdk-release.yml
vendored
@@ -28,11 +28,11 @@ jobs:
|
||||
run:
|
||||
working-directory: superset-embedded-sdk
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
- run: npm ci
|
||||
- run: npm run ci:release
|
||||
env:
|
||||
|
||||
6
.github/workflows/embedded-sdk-test.yml
vendored
6
.github/workflows/embedded-sdk-test.yml
vendored
@@ -18,11 +18,11 @@ jobs:
|
||||
run:
|
||||
working-directory: superset-embedded-sdk
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
- run: npm ci
|
||||
- run: npm test
|
||||
- run: npm run build
|
||||
|
||||
2
.github/workflows/ephemeral-env-pr-close.yml
vendored
2
.github/workflows/ephemeral-env-pr-close.yml
vendored
@@ -63,7 +63,7 @@ jobs:
|
||||
|
||||
- name: Comment (success)
|
||||
if: steps.describe-services.outputs.active == 'true'
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{github.token}}
|
||||
script: |
|
||||
|
||||
34
.github/workflows/ephemeral-env.yml
vendored
34
.github/workflows/ephemeral-env.yml
vendored
@@ -10,11 +10,11 @@ on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
label_name:
|
||||
description: "Label name to simulate label-based /testenv trigger"
|
||||
description: 'Label name to simulate label-based /testenv trigger'
|
||||
required: true
|
||||
default: "testenv-up"
|
||||
default: 'testenv-up'
|
||||
issue_number:
|
||||
description: "Issue or PR number"
|
||||
description: 'Issue or PR number'
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
@@ -55,7 +55,7 @@ jobs:
|
||||
- name: Get event SHA
|
||||
id: get-sha
|
||||
if: steps.eval-label.outputs.result == 'up'
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
@@ -86,7 +86,7 @@ jobs:
|
||||
core.setOutput("sha", prSha);
|
||||
|
||||
- name: Looking for feature flags in PR description
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
uses: actions/github-script@v7
|
||||
id: eval-feature-flags
|
||||
if: steps.eval-label.outputs.result == 'up'
|
||||
with:
|
||||
@@ -108,7 +108,7 @@ jobs:
|
||||
return results;
|
||||
|
||||
- name: Reply with confirmation comment
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
uses: actions/github-script@v7
|
||||
if: steps.eval-label.outputs.result == 'up'
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
@@ -145,7 +145,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ needs.ephemeral-env-label.outputs.sha }} : ${{steps.get-sha.outputs.sha}} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ needs.ephemeral-env-label.outputs.sha }}
|
||||
persist-credentials: false
|
||||
@@ -174,7 +174,7 @@ jobs:
|
||||
--extra-flags "--build-arg INCLUDE_CHROMIUM=false"
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7 # v6
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
@@ -182,7 +182,7 @@ jobs:
|
||||
|
||||
- name: Login to Amazon ECR
|
||||
id: login-ecr
|
||||
uses: aws-actions/amazon-ecr-login@c962da2960ed15f492addc26fffa274485265950 # v2
|
||||
uses: aws-actions/amazon-ecr-login@v2
|
||||
|
||||
- name: Load, tag and push image to ECR
|
||||
id: push-image
|
||||
@@ -205,12 +205,12 @@ jobs:
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7 # v6
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
@@ -218,7 +218,7 @@ jobs:
|
||||
|
||||
- name: Login to Amazon ECR
|
||||
id: login-ecr
|
||||
uses: aws-actions/amazon-ecr-login@c962da2960ed15f492addc26fffa274485265950 # v2
|
||||
uses: aws-actions/amazon-ecr-login@v2
|
||||
|
||||
- name: Check target image exists in ECR
|
||||
id: check-image
|
||||
@@ -233,7 +233,7 @@ jobs:
|
||||
|
||||
- name: Fail on missing container image
|
||||
if: steps.check-image.outcome == 'failure'
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
script: |
|
||||
@@ -248,7 +248,7 @@ jobs:
|
||||
|
||||
- name: Fill in the new image ID in the Amazon ECS task definition
|
||||
id: task-def
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@77954e213ba1f9f9cb016b86a1d4f6fcdea0d57e # v1
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: .github/workflows/ecs-task-definition.json
|
||||
container-name: superset-ci
|
||||
@@ -281,7 +281,7 @@ jobs:
|
||||
--tags key=pr,value=$PR_NUMBER key=github_user,value=${{ github.actor }}
|
||||
- name: Deploy Amazon ECS task definition
|
||||
id: deploy-task
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@cbf54ec46642b86ff78c2f5793da6746954cf8ff # v2
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
|
||||
with:
|
||||
task-definition: ${{ steps.task-def.outputs.task-definition }}
|
||||
service: pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-service
|
||||
@@ -303,7 +303,7 @@ jobs:
|
||||
echo "ip=$(aws ec2 describe-network-interfaces --network-interface-ids ${{ steps.get-eni.outputs.eni }} | jq -r '.NetworkInterfaces | first | .Association.PublicIp')" >> $GITHUB_OUTPUT
|
||||
- name: Comment (success)
|
||||
if: ${{ success() }}
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{github.token}}
|
||||
script: |
|
||||
@@ -316,7 +316,7 @@ jobs:
|
||||
});
|
||||
- name: Comment (failure)
|
||||
if: ${{ failure() }}
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{github.token}}
|
||||
script: |
|
||||
|
||||
4
.github/workflows/generate-FOSSA-report.yml
vendored
4
.github/workflows/generate-FOSSA-report.yml
vendored
@@ -27,12 +27,12 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Setup Java
|
||||
uses: actions/setup-java@be666c2fcd27ec809703dec50e508c2fdc7f6654 # v5
|
||||
uses: actions/setup-java@v4
|
||||
with:
|
||||
distribution: "temurin"
|
||||
java-version: "11"
|
||||
|
||||
@@ -9,16 +9,17 @@ on:
|
||||
types: [synchronize, opened, reopened, ready_for_review]
|
||||
|
||||
jobs:
|
||||
|
||||
validate-all-ghas:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
node-version: '20'
|
||||
|
||||
- name: Install Dependencies
|
||||
run: npm install -g @action-validator/core @action-validator/cli --save-dev
|
||||
|
||||
3
.github/workflows/issue_creation.yml
vendored
3
.github/workflows/issue_creation.yml
vendored
@@ -15,8 +15,9 @@ jobs:
|
||||
pull-requests: write
|
||||
issues: write
|
||||
steps:
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
|
||||
42
.github/workflows/latest-release-tag.yml
vendored
42
.github/workflows/latest-release-tag.yml
vendored
@@ -11,27 +11,27 @@ jobs:
|
||||
contents: write
|
||||
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
|
||||
- name: Check for latest tag
|
||||
id: latest-tag
|
||||
run: |
|
||||
source ./scripts/tag_latest_release.sh $(echo ${{ github.event.release.tag_name }}) --dry-run
|
||||
- name: Check for latest tag
|
||||
id: latest-tag
|
||||
run: |
|
||||
source ./scripts/tag_latest_release.sh $(echo ${{ github.event.release.tag_name }}) --dry-run
|
||||
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config user.name "$GITHUB_ACTOR"
|
||||
git config user.email "$GITHUB_ACTOR@users.noreply.github.com"
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config user.name "$GITHUB_ACTOR"
|
||||
git config user.email "$GITHUB_ACTOR@users.noreply.github.com"
|
||||
|
||||
- name: Run latest-tag
|
||||
uses: ./.github/actions/latest-tag
|
||||
if: (! ${{ steps.latest-tag.outputs.SKIP_TAG }} )
|
||||
with:
|
||||
description: Superset latest release
|
||||
tag-name: latest
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
- name: Run latest-tag
|
||||
uses: ./.github/actions/latest-tag
|
||||
if: (! ${{ steps.latest-tag.outputs.SKIP_TAG }} )
|
||||
with:
|
||||
description: Superset latest release
|
||||
tag-name: latest
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
|
||||
8
.github/workflows/license-check.yml
vendored
8
.github/workflows/license-check.yml
vendored
@@ -15,14 +15,14 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Setup Java
|
||||
uses: actions/setup-java@be666c2fcd27ec809703dec50e508c2fdc7f6654 # v5
|
||||
uses: actions/setup-java@v4
|
||||
with:
|
||||
distribution: "temurin"
|
||||
java-version: "11"
|
||||
distribution: 'temurin'
|
||||
java-version: '11'
|
||||
- name: Run license check
|
||||
run: ./scripts/check_license.sh
|
||||
|
||||
20
.github/workflows/no-hold-label.yml
vendored
20
.github/workflows/no-hold-label.yml
vendored
@@ -13,13 +13,13 @@ jobs:
|
||||
check-hold-label:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Check for 'hold' label
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
with:
|
||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
script: |
|
||||
const payload = context.payload.pull_request
|
||||
const holdLabelPresent = !!payload.labels.find(label => label.name.includes('hold'))
|
||||
if (holdLabelPresent) {
|
||||
core.setFailed('Hold label is present, merge is blocked.')
|
||||
}
|
||||
- name: Check for 'hold' label
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
script: |
|
||||
const payload = context.payload.pull_request
|
||||
const holdLabelPresent = !!payload.labels.find(label => label.name.includes('hold'))
|
||||
if (holdLabelPresent) {
|
||||
core.setFailed('Hold label is present, merge is blocked.')
|
||||
}
|
||||
|
||||
5
.github/workflows/pr-lint.yml
vendored
5
.github/workflows/pr-lint.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -26,5 +26,6 @@ jobs:
|
||||
on-failed-regex-fail-action: true
|
||||
on-failed-regex-request-changes: false
|
||||
on-failed-regex-create-review: false
|
||||
on-failed-regex-comment: "Please format your PR title to match: `%regex%`!"
|
||||
on-failed-regex-comment:
|
||||
"Please format your PR title to match: `%regex%`!"
|
||||
repo-token: "${{ github.token }}"
|
||||
|
||||
3
.github/workflows/pre-commit.yml
vendored
3
.github/workflows/pre-commit.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
python-version: ["current", "previous"]
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -38,7 +38,6 @@ jobs:
|
||||
echo "HOMEBREW_CELLAR=$HOMEBREW_CELLAR" >>"${GITHUB_ENV}"
|
||||
echo "HOMEBREW_REPOSITORY=$HOMEBREW_REPOSITORY" >>"${GITHUB_ENV}"
|
||||
brew install norwoodj/tap/helm-docs
|
||||
|
||||
- name: pre-commit
|
||||
run: |
|
||||
set +e # Don't exit immediately on failure
|
||||
|
||||
8
.github/workflows/release.yml
vendored
8
.github/workflows/release.yml
vendored
@@ -32,7 +32,7 @@ jobs:
|
||||
node-version: [20]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
# pulls all commits (needed for lerna / semantic release to correctly version)
|
||||
fetch-depth: 0
|
||||
@@ -48,13 +48,13 @@ jobs:
|
||||
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
if: env.HAS_TAGS
|
||||
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
|
||||
- name: Cache npm
|
||||
if: env.HAS_TAGS
|
||||
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.npm # npm cache files are stored in `~/.npm` on Linux/macOS
|
||||
key: ${{ runner.OS }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||
@@ -68,7 +68,7 @@ jobs:
|
||||
run: echo "dir=$(npm config get cache)" >> $GITHUB_OUTPUT
|
||||
- name: Cache npm
|
||||
if: env.HAS_TAGS
|
||||
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
|
||||
uses: actions/cache@v4
|
||||
id: npm-cache # use this to check for `cache-hit` (`steps.npm-cache.outputs.cache-hit != 'true'`)
|
||||
with:
|
||||
path: ${{ steps.npm-cache-dir-path.outputs.dir }}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
name: Superset App CLI tests
|
||||
name: Superset CLI tests
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -23,7 +23,7 @@ jobs:
|
||||
SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
image: postgres:15-alpine
|
||||
env:
|
||||
POSTGRES_USER: superset
|
||||
POSTGRES_PASSWORD: superset
|
||||
@@ -37,7 +37,7 @@ jobs:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
16
.github/workflows/superset-docs-deploy.yml
vendored
16
.github/workflows/superset-docs-deploy.yml
vendored
@@ -30,21 +30,21 @@ jobs:
|
||||
name: Build & Deploy
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.event.workflow_run.head_sha || github.sha }}"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
|
||||
- name: Set up Node.js 20
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
node-version: '20'
|
||||
- name: Setup Python
|
||||
uses: ./.github/actions/setup-backend/
|
||||
- uses: actions/setup-java@be666c2fcd27ec809703dec50e508c2fdc7f6654 # v5
|
||||
- uses: actions/setup-java@v4
|
||||
with:
|
||||
distribution: "zulu"
|
||||
java-version: "21"
|
||||
distribution: 'zulu'
|
||||
java-version: '21'
|
||||
- name: Install Graphviz
|
||||
run: sudo apt-get install -y graphviz
|
||||
- name: Compute Entity Relationship diagram (ERD)
|
||||
|
||||
10
.github/workflows/superset-docs-verify.yml
vendored
10
.github/workflows/superset-docs-verify.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
name: Link Checking
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
- uses: actions/checkout@v4
|
||||
# Do not bump this linkinator-action version without opening
|
||||
# an ASF Infra ticket to allow the new verison first!
|
||||
- uses: JustinBeckwith/linkinator-action@v1.11.0
|
||||
@@ -56,14 +56,14 @@ jobs:
|
||||
working-directory: docs
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
|
||||
- name: Set up Node.js 20
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
node-version: '20'
|
||||
- name: yarn install
|
||||
run: |
|
||||
yarn install --check-cache
|
||||
|
||||
24
.github/workflows/superset-e2e.yml
vendored
24
.github/workflows/superset-e2e.yml
vendored
@@ -10,17 +10,17 @@ on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
use_dashboard:
|
||||
description: "Use Cypress Dashboard (true/false) [paid service - trigger manually when needed]. You MUST provide a branch and/or PR number below for this to work."
|
||||
description: 'Use Cypress Dashboard (true/false) [paid service - trigger manually when needed]. You MUST provide a branch and/or PR number below for this to work.'
|
||||
required: false
|
||||
default: "false"
|
||||
default: 'false'
|
||||
ref:
|
||||
description: "The branch or tag to checkout"
|
||||
description: 'The branch or tag to checkout'
|
||||
required: false
|
||||
default: ""
|
||||
default: ''
|
||||
pr_id:
|
||||
description: "The pull request ID to checkout"
|
||||
description: 'The pull request ID to checkout'
|
||||
required: false
|
||||
default: ""
|
||||
default: ''
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
|
||||
@@ -68,20 +68,20 @@ jobs:
|
||||
# Conditional checkout based on context
|
||||
- name: Checkout for push or pull_request event
|
||||
if: github.event_name == 'push' || github.event_name == 'pull_request'
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Checkout using ref (workflow_dispatch)
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.ref != ''
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.inputs.ref }}
|
||||
submodules: recursive
|
||||
- name: Checkout using PR ID (workflow_dispatch)
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.pr_id != ''
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: refs/pull/${{ github.event.inputs.pr_id }}/merge
|
||||
@@ -107,7 +107,7 @@ jobs:
|
||||
run: testdata
|
||||
- name: Setup Node.js
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Install npm dependencies
|
||||
@@ -137,8 +137,8 @@ jobs:
|
||||
with:
|
||||
run: cypress-run-all ${{ env.USE_DASHBOARD }}
|
||||
- name: Upload Artifacts
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
|
||||
uses: actions/upload-artifact@v4
|
||||
if: failure()
|
||||
with:
|
||||
path: ${{ github.workspace }}/superset-frontend/cypress-base/cypress/screenshots
|
||||
name: cypress-artifact-${{ github.run_id }}-${{ github.job }}-${{ matrix.browser }}-${{ matrix.parallel_id }}--${{ steps.set-safe-app-root.outputs.safe_app_root }}
|
||||
name: cypress-artifact-${{ github.run_id }}-${{ github.job }}-${{ matrix.browser }}-${{ matrix.parallel_id }}
|
||||
|
||||
64
.github/workflows/superset-extensions-cli.yml
vendored
64
.github/workflows/superset-extensions-cli.yml
vendored
@@ -1,64 +0,0 @@
|
||||
name: Superset Extensions CLI Package Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "[0-9].[0-9]*"
|
||||
pull_request:
|
||||
types: [synchronize, opened, reopened, ready_for_review]
|
||||
|
||||
# cancel previous workflow jobs for PRs
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
test-superset-extensions-cli-package:
|
||||
runs-on: ubuntu-24.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["previous", "current"]
|
||||
defaults:
|
||||
run:
|
||||
working-directory: superset-extensions-cli
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
|
||||
- name: Check for file changes
|
||||
id: check
|
||||
uses: ./.github/actions/change-detector/
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Setup Python
|
||||
if: steps.check.outputs.superset-extensions-cli
|
||||
uses: ./.github/actions/setup-backend/
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
requirements-type: dev
|
||||
|
||||
- name: Run pytest with coverage
|
||||
if: steps.check.outputs.superset-extensions-cli
|
||||
run: |
|
||||
pytest --cov=superset_extensions_cli --cov-report=xml --cov-report=term-missing --cov-report=html -v --tb=short
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.check.outputs.superset-extensions-cli
|
||||
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5
|
||||
with:
|
||||
file: ./coverage.xml
|
||||
flags: superset-extensions-cli
|
||||
name: superset-extensions-cli-coverage
|
||||
fail_ci_if_error: false
|
||||
|
||||
- name: Upload HTML coverage report
|
||||
if: steps.check.outputs.superset-extensions-cli
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
|
||||
with:
|
||||
name: superset-extensions-cli-coverage-html
|
||||
path: htmlcov/
|
||||
17
.github/workflows/superset-frontend.yml
vendored
17
.github/workflows/superset-frontend.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
should-run: ${{ steps.check.outputs.frontend }}
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
@@ -52,7 +52,7 @@ jobs:
|
||||
|
||||
- name: Upload Docker Image Artifact
|
||||
if: steps.check.outputs.frontend
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: docker-image
|
||||
path: docker-image.tar.gz
|
||||
@@ -67,7 +67,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Download Docker Image Artifact
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
@@ -84,7 +84,7 @@ jobs:
|
||||
"npm run test -- --coverage --shard=${{ matrix.shard }}/8 --coverageReporters=json-summary"
|
||||
|
||||
- name: Upload Coverage Artifact
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: coverage-artifacts-${{ matrix.shard }}
|
||||
path: superset-frontend/coverage
|
||||
@@ -95,7 +95,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Download Coverage Artifacts
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: coverage-artifacts-*
|
||||
path: coverage/
|
||||
@@ -107,7 +107,7 @@ jobs:
|
||||
run: npx nyc merge coverage/ merged-output/coverage-summary.json
|
||||
|
||||
- name: Upload Code Coverage
|
||||
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: javascript
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -139,7 +139,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Download Docker Image Artifact
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
@@ -162,7 +162,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Download Docker Image Artifact
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
@@ -173,6 +173,7 @@ jobs:
|
||||
run: |
|
||||
docker run --rm $TAG bash -c \
|
||||
"npm run plugins:build"
|
||||
|
||||
- name: Build Plugins Storybook
|
||||
run: |
|
||||
docker run --rm $TAG bash -c \
|
||||
|
||||
6
.github/workflows/superset-helm-lint.yml
vendored
6
.github/workflows/superset-helm-lint.yml
vendored
@@ -16,21 +16,21 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Helm
|
||||
uses: azure/setup-helm@1a275c3b69536ee54be43f2070a358922e12c8d4 # v4
|
||||
uses: azure/setup-helm@v4
|
||||
with:
|
||||
version: v3.16.4
|
||||
|
||||
- name: Setup Python
|
||||
uses: ./.github/actions/setup-backend/
|
||||
with:
|
||||
install-superset: "false"
|
||||
install-superset: 'false'
|
||||
|
||||
- name: Set up chart-testing
|
||||
uses: ./.github/actions/chart-testing-action
|
||||
|
||||
6
.github/workflows/superset-helm-release.yml
vendored
6
.github/workflows/superset-helm-release.yml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.ref || github.ref_name }}
|
||||
persist-credentials: true
|
||||
@@ -42,7 +42,7 @@ jobs:
|
||||
git config user.email "$GITHUB_ACTOR@users.noreply.github.com"
|
||||
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@1a275c3b69536ee54be43f2070a358922e12c8d4 # v4
|
||||
uses: azure/setup-helm@v4
|
||||
with:
|
||||
version: v3.5.4
|
||||
|
||||
@@ -101,7 +101,7 @@ jobs:
|
||||
CR_RELEASE_NAME_TEMPLATE: "superset-helm-chart-{{ .Version }}"
|
||||
|
||||
- name: Open Pull Request
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const branchName = '${{ env.branch_name }}';
|
||||
|
||||
@@ -41,7 +41,7 @@ jobs:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -68,7 +68,7 @@ jobs:
|
||||
run: |
|
||||
./scripts/python_tests.sh
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: python,mysql
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -129,7 +129,7 @@ jobs:
|
||||
run: |
|
||||
./scripts/python_tests.sh
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: python,postgres
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -152,7 +152,7 @@ jobs:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -181,7 +181,7 @@ jobs:
|
||||
run: |
|
||||
./scripts/python_tests.sh
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: python,sqlite
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
@@ -48,7 +48,7 @@ jobs:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -77,7 +77,7 @@ jobs:
|
||||
run: |
|
||||
./scripts/python_tests.sh -m 'chart_data_flow or sql_json_flow'
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: python,presto
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -108,7 +108,7 @@ jobs:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -145,7 +145,7 @@ jobs:
|
||||
pip install -e .[hive]
|
||||
./scripts/python_tests.sh -m 'chart_data_flow or sql_json_flow'
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: python,hive
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
@@ -24,7 +24,7 @@ jobs:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -46,7 +46,7 @@ jobs:
|
||||
run: |
|
||||
pytest --durations-min=0.5 --cov-report= --cov=superset ./tests/common ./tests/unit_tests --cache-clear
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: python,unit
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
8
.github/workflows/superset-translations.yml
vendored
8
.github/workflows/superset-translations.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -31,9 +31,9 @@ jobs:
|
||||
|
||||
- name: Setup Node.js
|
||||
if: steps.check.outputs.frontend
|
||||
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "18"
|
||||
node-version: '18'
|
||||
- name: Install dependencies
|
||||
if: steps.check.outputs.frontend
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
|
||||
2
.github/workflows/superset-websocket.yml
vendored
2
.github/workflows/superset-websocket.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Install dependencies
|
||||
|
||||
6
.github/workflows/supersetbot.yml
vendored
6
.github/workflows/supersetbot.yml
vendored
@@ -9,7 +9,7 @@ on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
comment_body:
|
||||
description: "Comment Body"
|
||||
description: 'Comment Body'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
@@ -26,7 +26,7 @@ jobs:
|
||||
steps:
|
||||
- name: Quickly add thumbs up!
|
||||
if: github.event_name == 'issue_comment' && contains(github.event.comment.body, '@supersetbot')
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/')
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
});
|
||||
|
||||
- name: "Checkout ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
|
||||
19
.github/workflows/tag-release.yml
vendored
19
.github/workflows/tag-release.yml
vendored
@@ -16,11 +16,11 @@ on:
|
||||
force-latest:
|
||||
required: true
|
||||
type: choice
|
||||
default: "false"
|
||||
default: 'false'
|
||||
description: Whether to force a latest tag on the release
|
||||
options:
|
||||
- "true"
|
||||
- "false"
|
||||
- 'true'
|
||||
- 'false'
|
||||
jobs:
|
||||
config:
|
||||
runs-on: ubuntu-24.04
|
||||
@@ -42,12 +42,12 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
strategy:
|
||||
matrix:
|
||||
build_preset:
|
||||
["dev", "lean", "py310", "websocket", "dockerize", "py311"]
|
||||
build_preset: ["dev", "lean", "py310", "websocket", "dockerize", "py311"]
|
||||
fail-fast: false
|
||||
steps:
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -60,7 +60,7 @@ jobs:
|
||||
build: "true"
|
||||
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
@@ -105,13 +105,14 @@ jobs:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
|
||||
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
|
||||
6
.github/workflows/tech-debt.yml
vendored
6
.github/workflows/tech-debt.yml
vendored
@@ -27,12 +27,12 @@ jobs:
|
||||
name: Generate Reports
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
node-version: '20'
|
||||
|
||||
- name: Install Dependencies
|
||||
run: npm install
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -42,7 +42,7 @@ _modules
|
||||
_static
|
||||
build
|
||||
app.db
|
||||
*.egg-info/
|
||||
apache_superset.egg-info/
|
||||
changelog.sh
|
||||
dist
|
||||
dump.rdb
|
||||
|
||||
@@ -23,9 +23,7 @@ repos:
|
||||
rev: v1.13.0
|
||||
hooks:
|
||||
- id: mypy
|
||||
name: mypy (main)
|
||||
args: [--check-untyped-defs]
|
||||
exclude: ^superset-extensions-cli/
|
||||
additional_dependencies: [
|
||||
types-simplejson,
|
||||
types-python-dateutil,
|
||||
@@ -40,10 +38,6 @@ repos:
|
||||
types-paramiko,
|
||||
types-Markdown,
|
||||
]
|
||||
- id: mypy
|
||||
name: mypy (superset-extensions-cli)
|
||||
args: [--check-untyped-defs]
|
||||
files: ^superset-extensions-cli/
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v5.0.0
|
||||
hooks:
|
||||
@@ -89,5 +83,5 @@ repos:
|
||||
rev: v0.8.0
|
||||
hooks:
|
||||
- id: ruff
|
||||
args: [--fix]
|
||||
args: [ --fix ]
|
||||
- id: ruff-format
|
||||
|
||||
@@ -32,8 +32,6 @@ apache_superset.egg-info
|
||||
# json and csv in general cannot have comments
|
||||
.*json
|
||||
.*csv
|
||||
# jinja templates often need to be as-is
|
||||
.*j2
|
||||
# Generated doc files
|
||||
env/*
|
||||
docs/.htaccess*
|
||||
|
||||
@@ -19,10 +19,12 @@ under the License.
|
||||
|
||||
## Change Log
|
||||
|
||||
### 5.0.0 (Wed Jun 18 13:54:10 2025 -0300)
|
||||
### 5.0 (Tue Feb 4 10:43:25 2025 -0300)
|
||||
|
||||
**Database Migrations**
|
||||
|
||||
- [#32759](https://github.com/apache/superset/pull/32759) fix(migrations): fix foreign keys to match FAB 4.6.0 tables (@Antonio-RiveroMartnez)
|
||||
- [#32680](https://github.com/apache/superset/pull/32680) feat: DB migration for dataset folders (@betodealmeida)
|
||||
- [#31959](https://github.com/apache/superset/pull/31959) refactor: upload data unification, less permissions and less endpoints (@dpgaspar)
|
||||
- [#31582](https://github.com/apache/superset/pull/31582) refactor: Removes 5.0 approved legacy charts (@michael-s-molina)
|
||||
- [#31490](https://github.com/apache/superset/pull/31490) feat: use docker in frontend GHA to parallelize work (@mistercrunch)
|
||||
@@ -30,6 +32,7 @@ under the License.
|
||||
- [#29649](https://github.com/apache/superset/pull/29649) fix: remove old database constraint on the Dataset model (@betodealmeida)
|
||||
- [#31447](https://github.com/apache/superset/pull/31447) chore: enforce more ruff rules (@mistercrunch)
|
||||
- [#31303](https://github.com/apache/superset/pull/31303) feat: Adds helper functions for migrations (@luizotavio32)
|
||||
- [#31185](https://github.com/apache/superset/pull/31185) fix: check for column before adding in migrations (@sadpandajoe)
|
||||
|
||||
**Features**
|
||||
|
||||
@@ -96,6 +99,7 @@ under the License.
|
||||
- [#24308](https://github.com/apache/superset/pull/24308) feat(docker): add GUNICORN_LOGLEVEL env var (@drummerwolli)
|
||||
- [#29333](https://github.com/apache/superset/pull/29333) feat(alert/reports): adding logic to handle downstream reports when tab is deleted from dashboard (@fisjac)
|
||||
- [#30002](https://github.com/apache/superset/pull/30002) feat(time_comparison): Support all date formats when computing custom and inherit offsets (@Antonio-RiveroMartnez)
|
||||
- [#29974](https://github.com/apache/superset/pull/29974) feat(sqllab): Adds refresh button to table metadata in SQL Lab (@Usiel)
|
||||
- [#25775](https://github.com/apache/superset/pull/25775) feat: Adding Elestio as deployment option (@kaiwalyakoparkar)
|
||||
- [#29941](https://github.com/apache/superset/pull/29941) feat(docs): fix bug google chrome < 114 not found (@hoalongnatsu)
|
||||
- [#29917](https://github.com/apache/superset/pull/29917) feat: Enable injecting custom html into head (@kgabryje)
|
||||
@@ -108,72 +112,6 @@ under the License.
|
||||
|
||||
**Fixes**
|
||||
|
||||
- [#33817](https://github.com/apache/superset/pull/33817) fix: SQL Lab warning message sizes (@michael-s-molina)
|
||||
- [#33779](https://github.com/apache/superset/pull/33779) fix(Echarts): Echarts Legend Scroll fix (@amaannawab923)
|
||||
- [#33765](https://github.com/apache/superset/pull/33765) fix(tooltip): Sanitize tooltip html (@msyavuz)
|
||||
- [#33759](https://github.com/apache/superset/pull/33759) fix: apply d3 format to BigNumber(s) (@betodealmeida)
|
||||
- [#33752](https://github.com/apache/superset/pull/33752) fix(create chart page): add missing space between words (@Quatters)
|
||||
- [#33748](https://github.com/apache/superset/pull/33748) fix: sync dot color between dashboard chart and edit chart (@anantaoutlook)
|
||||
- [#33743](https://github.com/apache/superset/pull/33743) fix(dataset): Fix plural toast messages (@rad-pat)
|
||||
- [#33717](https://github.com/apache/superset/pull/33717) fix(explore): add gap to the "Cached" button (@Quatters)
|
||||
- [#33719](https://github.com/apache/superset/pull/33719) fix(Alerts & reports): invalid "Last updated" time formatting (@Quatters)
|
||||
- [#33726](https://github.com/apache/superset/pull/33726) fix(dashboard): show dashboard thumbnail images when retrieved (@rad-pat)
|
||||
- [#33296](https://github.com/apache/superset/pull/33296) fix(template_processing): get_filters now works for IS_NULL and IS_NOT_NULL operators (@Prokos)
|
||||
- [#32414](https://github.com/apache/superset/pull/32414) fix(api): Added uuid to list api calls (@withnale)
|
||||
- [#33710](https://github.com/apache/superset/pull/33710) fix: Migrate charts with empty query_context (@luizotavio32)
|
||||
- [#33592](https://github.com/apache/superset/pull/33592) fix: Makes time compare migration more resilient (@michael-s-molina)
|
||||
- [#33596](https://github.com/apache/superset/pull/33596) fix: Missing processor context when rendering Jinja (@michael-s-molina)
|
||||
- [#33285](https://github.com/apache/superset/pull/33285) fix: Adjust viz migrations to also migrate the queries object (@luizotavio32)
|
||||
- [#33431](https://github.com/apache/superset/pull/33431) fix(sankey): incorrect nodeValues (@richardfogaca)
|
||||
- [#33553](https://github.com/apache/superset/pull/33553) fix(AllEntities): Display action buttons according to the user permissions (@Vitor-Avila)
|
||||
- [#30577](https://github.com/apache/superset/pull/30577) fix(user settings): Update forked cosmo theme to resolve down chevron in caret style (#30514) (@mklumpen)
|
||||
- [#33540](https://github.com/apache/superset/pull/33540) fix(table): table sort by fix (@amaannawab923)
|
||||
- [#33522](https://github.com/apache/superset/pull/33522) fix(Sqllab): Autocomplete got stuck in UI when open it too fast (@rebenitez1802)
|
||||
- [#33444](https://github.com/apache/superset/pull/33444) fix: allow metadata to parse json (@eschutho)
|
||||
- [#33425](https://github.com/apache/superset/pull/33425) fix(table-chart): time shift is not working (@justinpark)
|
||||
- [#33364](https://github.com/apache/superset/pull/33364) fix(deckgl): fix deckgl multiple layers chart filter and viewport (@syedbarimanjan)
|
||||
- [#33422](https://github.com/apache/superset/pull/33422) fix(Row): don't unload charts while embedded to reduce rerenders (@msyavuz)
|
||||
- [#33354](https://github.com/apache/superset/pull/33354) fix: loading examples from raw.githubusercontent.com fails with 429 errors (@mistercrunch)
|
||||
- [#31917](https://github.com/apache/superset/pull/31917) fix(be/utils): sync cache timeout for memoized function (@hainenber)
|
||||
- [#33345](https://github.com/apache/superset/pull/33345) fix(i18n): zh_TW pybabel compile error: placeholders are incompatible (@bestlong)
|
||||
- [#33337](https://github.com/apache/superset/pull/33337) fix: Edge case with metric not getting quoted in sort by when normalize_columns is enabled (@Vitor-Avila)
|
||||
- [#33224](https://github.com/apache/superset/pull/33224) fix: Temporal filter conversion in viz migrations (@michael-s-molina)
|
||||
- [#33306](https://github.com/apache/superset/pull/33306) fix: improve function detection (@betodealmeida)
|
||||
- [#33269](https://github.com/apache/superset/pull/33269) fix(echarts): rename time series shifted colnames (@justinpark)
|
||||
- [#33267](https://github.com/apache/superset/pull/33267) fix: mask password on DB import (@betodealmeida)
|
||||
- [#33025](https://github.com/apache/superset/pull/33025) fix: LocalProxy is not mapped warning (@dpgaspar)
|
||||
- [#33248](https://github.com/apache/superset/pull/33248) fix(histogram): remove extra single quotes (@rusackas)
|
||||
- [#33250](https://github.com/apache/superset/pull/33250) fix(DB update): Gracefully handle querry error during DB update (@Vitor-Avila)
|
||||
- [#33238](https://github.com/apache/superset/pull/33238) fix(heatmap): correctly render int and boolean falsy values on axes (@sfirke)
|
||||
- [#33237](https://github.com/apache/superset/pull/33237) fix(sqllab permalink): Commit SQL Lab permalinks (@Vitor-Avila)
|
||||
- [#33234](https://github.com/apache/superset/pull/33234) fix(standalone): Ensure correct URL param value for standalone mode (@Vitor-Avila)
|
||||
- [#33291](https://github.com/apache/superset/pull/33291) fix(antd): Invalid dashed border in tertiary button (@justinpark)
|
||||
- [#33214](https://github.com/apache/superset/pull/33214) fix(export): Full CSV/Excel exports respecting SQL_MAX_ROW config (@Vitor-Avila)
|
||||
- [#33164](https://github.com/apache/superset/pull/33164) fix(sqllab): Invalid SQL Error breaks SQL Lab (@justinpark)
|
||||
- [#33154](https://github.com/apache/superset/pull/33154) fix(deckgl): Update Arc to properly adjust line width (@rusackas)
|
||||
- [#33161](https://github.com/apache/superset/pull/33161) fix: os.makedirs race condition (@jamra)
|
||||
- [#33143](https://github.com/apache/superset/pull/33143) fix(echart): Thrown errors shown after resized (@justinpark)
|
||||
- [#33138](https://github.com/apache/superset/pull/33138) fix(echart): Tooltip date format doesn't follow time grain (@justinpark)
|
||||
- [#31692](https://github.com/apache/superset/pull/31692) fix(lang): patch FAB's LocaleView to redirect to previous page (@pomegranited)
|
||||
- [#33106](https://github.com/apache/superset/pull/33106) fix(dashboard): invalid active tab state (@justinpark)
|
||||
- [#33037](https://github.com/apache/superset/pull/33037) fix: Viz migration error handling (@michael-s-molina)
|
||||
- [#33107](https://github.com/apache/superset/pull/33107) fix(playwright): allow screenshotting empty dashboards (@hxtmdev)
|
||||
- [#33110](https://github.com/apache/superset/pull/33110) fix: resolve recent merge collisio (@mistercrunch)
|
||||
- [#33103](https://github.com/apache/superset/pull/33103) fix: Allows configuration of Selenium Webdriver binary (@michael-s-molina)
|
||||
- [#33109](https://github.com/apache/superset/pull/33109) fix(thumbnails): ensure consistent cache_key (@hxtmdev)
|
||||
- [#32193](https://github.com/apache/superset/pull/32193) fix(dashboard): Generate screenshot via celery (@tahvane1)
|
||||
- [#33087](https://github.com/apache/superset/pull/33087) fix(docker): fallback to pip if uv is not available (@hossein-khalilian)
|
||||
- [#33059](https://github.com/apache/superset/pull/33059) fix: Adds missing **init** file to commands/logs (@michael-s-molina)
|
||||
- [#33048](https://github.com/apache/superset/pull/33048) fix: improve error type on parse error (@justinpark)
|
||||
- [#31720](https://github.com/apache/superset/pull/31720) fix(export): charts csv export in dashboards (@EmmanuelCbd)
|
||||
- [#33024](https://github.com/apache/superset/pull/33024) fix(log): Missing failed query log on async queries (@justinpark)
|
||||
- [#32839](https://github.com/apache/superset/pull/32839) fix: fix bug where dashboard did not enter fullscreen mode. (@LevisNgigi)
|
||||
- [#28428](https://github.com/apache/superset/pull/28428) fix(dashboard): chart fullscreen issue when filter pane is collapsed (@hlvhe)
|
||||
- [#29422](https://github.com/apache/superset/pull/29422) fix: `show_filters` URL parameter is not working (@hexcafe)
|
||||
- [#32965](https://github.com/apache/superset/pull/32965) fix: Bar Chart (legacy) migration to keep labels layout (@michael-s-molina)
|
||||
- [#30679](https://github.com/apache/superset/pull/30679) fix: fixed Add Metrics to Tree Chart (#29158) (@SBIN2010)
|
||||
- [#32968](https://github.com/apache/superset/pull/32968) fix(pivot-table): Revert "fix(Pivot Table): Fix column width to respect currency config (#31414)" (@justinpark)
|
||||
- [#32384](https://github.com/apache/superset/pull/32384) fix: Clicking in the body of a Markdown component does not put it into edit mode (@notHuman9504)
|
||||
- [#32763](https://github.com/apache/superset/pull/32763) fix(sqllab): Invalid display of table column keys (@justinpark)
|
||||
- [#32871](https://github.com/apache/superset/pull/32871) fix(Jinja): Emit time grain to table charts even if they don't have a temporal column (@Vitor-Avila)
|
||||
- [#32372](https://github.com/apache/superset/pull/32372) fix(backend/async_events): allow user to configure username for Redis authentication in GLOBAL_ASYNC_QUERIES_CACHE_BACKEND (@hainenber)
|
||||
@@ -183,6 +121,7 @@ under the License.
|
||||
- [#31869](https://github.com/apache/superset/pull/31869) fix(translation): Dutch translations for Current datetime filter (@christiaan)
|
||||
- [#32829](https://github.com/apache/superset/pull/32829) fix: update dataset/query catalog on DB changes (@betodealmeida)
|
||||
- [#32850](https://github.com/apache/superset/pull/32850) fix(echarts): Sort series by name using natural comparison (@Vitor-Avila)
|
||||
- [#32848](https://github.com/apache/superset/pull/32848) fix: Bump FAB to 4.6.1 (@michael-s-molina)
|
||||
- [#32795](https://github.com/apache/superset/pull/32795) fix(log): store navigation path to get correct logging path (@justinpark)
|
||||
- [#32665](https://github.com/apache/superset/pull/32665) fix: Time Comparison Feature Reverts Metric Labels to Metric Keys in Table Charts (@fardin-developer)
|
||||
- [#32792](https://github.com/apache/superset/pull/32792) fix: key error in frontend on disallowed GSheets (@chrisvnimbus)
|
||||
@@ -217,11 +156,13 @@ under the License.
|
||||
- [#32599](https://github.com/apache/superset/pull/32599) fix(Slack V2): Specify the filename for the Slack upload method (@Vitor-Avila)
|
||||
- [#32572](https://github.com/apache/superset/pull/32572) fix: Log table retention policy (@michael-s-molina)
|
||||
- [#32532](https://github.com/apache/superset/pull/32532) fix: add DateOffset to json serializer (@eschutho)
|
||||
- [#32515](https://github.com/apache/superset/pull/32515) fix(sqllab): Allow clear on schema and catalog (@justinpark)
|
||||
- [#32523](https://github.com/apache/superset/pull/32523) fix: keep calculated columns when datasource is updated (@eschutho)
|
||||
- [#32507](https://github.com/apache/superset/pull/32507) fix: Show response message as default error (@eschutho)
|
||||
- [#32336](https://github.com/apache/superset/pull/32336) fix(Slack): Fix Slack recipients migration to V2 (@Vitor-Avila)
|
||||
- [#32511](https://github.com/apache/superset/pull/32511) fix(beat): prune_query celery task args fix (@Usiel)
|
||||
- [#32499](https://github.com/apache/superset/pull/32499) fix(explore): Glitch in a tooltip with metric's name (@kgabryje)
|
||||
- [#32500](https://github.com/apache/superset/pull/32500) fix: dashboard, chart and dataset import validation (@dpgaspar)
|
||||
- [#32486](https://github.com/apache/superset/pull/32486) fix: skip DB filter when doing OAuth2 (@betodealmeida)
|
||||
- [#32488](https://github.com/apache/superset/pull/32488) fix(tooltip): displaying <a> tags correctly (@rusackas)
|
||||
- [#32473](https://github.com/apache/superset/pull/32473) fix(plugin-chart-echarts): remove erroneous upper bound value (@villebro)
|
||||
@@ -262,6 +203,8 @@ under the License.
|
||||
- [#32151](https://github.com/apache/superset/pull/32151) fix(releasing): fix borked SVN-based image building process (@hainenber)
|
||||
- [#32137](https://github.com/apache/superset/pull/32137) fix: copy oauth2 capture to `get_sqla_engine` (@betodealmeida)
|
||||
- [#32135](https://github.com/apache/superset/pull/32135) fix: Local tarball Docker container is missing zstd dependency (@michael-s-molina)
|
||||
- [#32538](https://github.com/apache/superset/pull/32538) fix(migrations): Handle comparator None in old time comparison migration (@Antonio-RiveroMartnez)
|
||||
- [#32155](https://github.com/apache/superset/pull/32155) fix(migrations): Handle no params in time comparison migration (@Antonio-RiveroMartnez)
|
||||
- [#32133](https://github.com/apache/superset/pull/32133) fix: No virtual environment when running Docker translation compiler (@michael-s-molina)
|
||||
- [#32040](https://github.com/apache/superset/pull/32040) fix(ci): ephemeral env, handle different label, create comment (@dpgaspar)
|
||||
- [#32064](https://github.com/apache/superset/pull/32064) fix(datepicker): Full width datepicker on filter value select (@msyavuz)
|
||||
@@ -280,6 +223,7 @@ under the License.
|
||||
- [#32005](https://github.com/apache/superset/pull/32005) fix(sqllab): tab layout truncated (@justinpark)
|
||||
- [#29417](https://github.com/apache/superset/pull/29417) fix(verbose map): Correct raw metrics handling in verbose map (@mcdogg17)
|
||||
- [#31962](https://github.com/apache/superset/pull/31962) fix: proper URL building (@betodealmeida)
|
||||
- [#31960](https://github.com/apache/superset/pull/31960) fix(sqllab): Missing allowHTML props in ResultTableExtension (@justinpark)
|
||||
- [#31941](https://github.com/apache/superset/pull/31941) fix(timezoneselector): Correct the order to match names first (@msyavuz)
|
||||
- [#25166](https://github.com/apache/superset/pull/25166) fix: correct value for config variable `UPLOAD_FOLDER` (@sebastianliebscher)
|
||||
- [#31948](https://github.com/apache/superset/pull/31948) fix: Load cached DB metadata as DatasourceName and add catalog to schema_list cache key (@Vitor-Avila)
|
||||
@@ -299,39 +243,75 @@ under the License.
|
||||
- [#31777](https://github.com/apache/superset/pull/31777) fix(oauth): Handle updates to the OAuth config (@Vitor-Avila)
|
||||
- [#31789](https://github.com/apache/superset/pull/31789) fix(button): change back button styles for dropdown buttons (@msyavuz)
|
||||
- [#31752](https://github.com/apache/superset/pull/31752) fix: Heatmap sorting (@michael-s-molina)
|
||||
- [#31639](https://github.com/apache/superset/pull/31639) fix(sqllab): unable to update saved queries (@DamianPendrak)
|
||||
- [#31742](https://github.com/apache/superset/pull/31742) fix: GHA frontend builds fail when frontends hasn't changed (@mistercrunch)
|
||||
- [#31732](https://github.com/apache/superset/pull/31732) fix: docker builds in forks (@mistercrunch)
|
||||
- [#31606](https://github.com/apache/superset/pull/31606) fix: docker-compose-image-tag fails to start (@mistercrunch)
|
||||
- [#31710](https://github.com/apache/superset/pull/31710) fix(inthewild): Update companies using superset (@gwthm-in)
|
||||
- [#31673](https://github.com/apache/superset/pull/31673) fix: typo in plugin-chart-echats controls (@vhf)
|
||||
- [#31688](https://github.com/apache/superset/pull/31688) fix(helm): change values.yaml comments (@sule26)
|
||||
- [#31407](https://github.com/apache/superset/pull/31407) fix: Big Number side cut fixed (@fardin-developer)
|
||||
- [#31588](https://github.com/apache/superset/pull/31588) fix: install uv in docker-bootstrap (@mistercrunch)
|
||||
- [#31583](https://github.com/apache/superset/pull/31583) fix(docs): get quickstart guide working again (@sfirke)
|
||||
- [#31561](https://github.com/apache/superset/pull/31561) fix: add various recent issues on master CI (@mistercrunch)
|
||||
- [#31493](https://github.com/apache/superset/pull/31493) fix: master docker builds fail because of multi-platform builds can't --load (@mistercrunch)
|
||||
- [#31483](https://github.com/apache/superset/pull/31483) fix: Card component background color (@kgabryje)
|
||||
- [#31480](https://github.com/apache/superset/pull/31480) fix(sunburst): Use metric label from verbose map (@gerbermichi)
|
||||
- [#31472](https://github.com/apache/superset/pull/31472) fix: Tooltip covers the date selector in native filters (@kgabryje)
|
||||
- [#31473](https://github.com/apache/superset/pull/31473) fix(explore): Styling issue in Search Metrics input field (@kgabryje)
|
||||
- [#31449](https://github.com/apache/superset/pull/31449) fix(filter options): full size list item targets (@rusackas)
|
||||
- [#31458](https://github.com/apache/superset/pull/31458) fix(api): typo api.py (@zero-stroke)
|
||||
- [#31385](https://github.com/apache/superset/pull/31385) fix: docker refactor (@mistercrunch)
|
||||
- [#31437](https://github.com/apache/superset/pull/31437) fix(database import): Gracefully handle error to get catalog schemas (@Vitor-Avila)
|
||||
- [#31374](https://github.com/apache/superset/pull/31374) fix(Dashboard): Sync color configuration via dedicated endpoint (@geido)
|
||||
- [#31427](https://github.com/apache/superset/pull/31427) fix(tags): clean up bulk create api and schema (@villebro)
|
||||
- [#31332](https://github.com/apache/superset/pull/31332) fix: prevent multiple pvm errors on migration (@eschutho)
|
||||
- [#31411](https://github.com/apache/superset/pull/31411) fix: pkg_resources is getting deprecated (@mistercrunch)
|
||||
- [#31414](https://github.com/apache/superset/pull/31414) fix(Pivot Table): Fix column width to respect currency config (@Vitor-Avila)
|
||||
- [#31391](https://github.com/apache/superset/pull/31391) fix: don't include chromium on ephemeral envs (@mistercrunch)
|
||||
- [#31387](https://github.com/apache/superset/pull/31387) fix: Revert "chore(deps-dev): bump esbuild from 0.20.0 to 0.24.0 in /super… (@sadpandajoe)
|
||||
- [#31236](https://github.com/apache/superset/pull/31236) fix: ephemeral envs fail on noop (@dpgaspar)
|
||||
- [#31335](https://github.com/apache/superset/pull/31335) fix(histogram): axis margin padding consistent with other graphs (@tatiana-cherne)
|
||||
- [#31334](https://github.com/apache/superset/pull/31334) fix(docs): add custom editUrl path for intro page (@dwgrossberg)
|
||||
- [#31353](https://github.com/apache/superset/pull/31353) fix(sqllab): duplicate error message (@betodealmeida)
|
||||
- [#31350](https://github.com/apache/superset/pull/31350) fix(alerts&reports): tabs with userfriendly urls (@tahvane1)
|
||||
- [#30956](https://github.com/apache/superset/pull/30956) fix: added missing pod labels for init job (@glothriel)
|
||||
- [#31341](https://github.com/apache/superset/pull/31341) fix(pinot): remove query aliases from SELECT and ORDER BY clauses in Pinot (@yuribogomolov)
|
||||
- [#31301](https://github.com/apache/superset/pull/31301) fix(AllEntitiesTable): show Tags (@alexandrusoare)
|
||||
- [#31323](https://github.com/apache/superset/pull/31323) fix: Use clickhouse sqlglot dialect for YDB (@vgvoleg)
|
||||
- [#31329](https://github.com/apache/superset/pull/31329) fix: pass string to `process_template` (@betodealmeida)
|
||||
- [#31173](https://github.com/apache/superset/pull/31173) fix: cache-warmup fails (@nsivarajan)
|
||||
- [#31294](https://github.com/apache/superset/pull/31294) fix(sqllab): Remove update_saved_query_exec_info to reduce lag (@justinpark)
|
||||
- [#31308](https://github.com/apache/superset/pull/31308) fix: annotations on horizontal bar chart (@DamianPendrak)
|
||||
- [#31279](https://github.com/apache/superset/pull/31279) fix(filters): improving the add filter/divider UI. (@rusackas)
|
||||
- [#31265](https://github.com/apache/superset/pull/31265) fix(trino): db session error in handle cursor (@justinpark)
|
||||
- [#31198](https://github.com/apache/superset/pull/31198) fix: add more clickhouse disallowed functions on config (@dpgaspar)
|
||||
- [#31199](https://github.com/apache/superset/pull/31199) fix(Databricks): Escape catalog and schema names in pre-queries (@Vitor-Avila)
|
||||
- [#30821](https://github.com/apache/superset/pull/30821) fix: x axis title disappears when editing bar chart (@DamianPendrak)
|
||||
- [#31194](https://github.com/apache/superset/pull/31194) fix(embedded): Hide anchor links in embedded mode (@Vitor-Avila)
|
||||
- [#31181](https://github.com/apache/superset/pull/31181) fix: Time-series Line Chart Display unnecessary total (@michael-s-molina)
|
||||
- [#31163](https://github.com/apache/superset/pull/31163) fix(Dashboard): Backward compatible shared_label_colors field (@geido)
|
||||
- [#31156](https://github.com/apache/superset/pull/31156) fix: check orderby (@betodealmeida)
|
||||
- [#31155](https://github.com/apache/superset/pull/31155) fix: helm chart deploy to open PRs to now-protected gh-pages branch (@mistercrunch)
|
||||
- [#31154](https://github.com/apache/superset/pull/31154) fix: Remove unwanted commit on Trino's handle_cursor (@michael-s-molina)
|
||||
- [#31151](https://github.com/apache/superset/pull/31151) fix: Revert "feat(trino): Add functionality to upload data (#29164)" (@michael-s-molina)
|
||||
- [#31152](https://github.com/apache/superset/pull/31152) fix: try to re-enable gh-pages (@mistercrunch)
|
||||
- [#31148](https://github.com/apache/superset/pull/31148) fix: touch helm/ folder to trigger doc deploy in CI (@mistercrunch)
|
||||
- [#31031](https://github.com/apache/superset/pull/31031) fix(Dashboard): Ensure shared label colors are updated (@geido)
|
||||
- [#31035](https://github.com/apache/superset/pull/31035) fix: ephemeral environments missing env var (@mistercrunch)
|
||||
- [#31024](https://github.com/apache/superset/pull/31024) fix(dataset): use sqlglot for DML check (@betodealmeida)
|
||||
- [#30887](https://github.com/apache/superset/pull/30887) fix(imports): import query_context for imports with charts (@lindenh)
|
||||
- [#31008](https://github.com/apache/superset/pull/31008) fix(explore): verified props is not updated (@justinpark)
|
||||
- [#30646](https://github.com/apache/superset/pull/30646) fix(Dashboard): Retain colors when color scheme not set (@geido)
|
||||
- [#30966](https://github.com/apache/superset/pull/30966) fix(helm-chart): Fix broken PodDisruptionBudget due to introduction of extraLabels. (@theoriginalgri)
|
||||
- [#30967](https://github.com/apache/superset/pull/30967) fix(release validation): scripts now support RSA and EDDSA keys. (@rusackas)
|
||||
- [#30964](https://github.com/apache/superset/pull/30964) fix(Card): Use correct class names for Ant Design 5 Card component (@geido)
|
||||
- [#30962](https://github.com/apache/superset/pull/30962) fix(Dashboard): Exclude edit param in async screenshot (@geido)
|
||||
- [#30924](https://github.com/apache/superset/pull/30924) fix(helm): use submodule on helm release action (@villebro)
|
||||
- [#30767](https://github.com/apache/superset/pull/30767) fix(empty dashboards): Allow downloading a screenshot of an empty dashboard (@msyavuz)
|
||||
- [#30897](https://github.com/apache/superset/pull/30897) fix: Exception handling for SQL Lab views (@michael-s-molina)
|
||||
- [#30885](https://github.com/apache/superset/pull/30885) fix(docs): add missing bracket in openID config (@samarsrivastav)
|
||||
- [#30881](https://github.com/apache/superset/pull/30881) fix(Dashboard): Native & Cross-Filters Scoping Performance (@geido)
|
||||
- [#30858](https://github.com/apache/superset/pull/30858) fix(chart data): removing query from /chart/data payload when accessing as guest user (@fisjac)
|
||||
- [#30848](https://github.com/apache/superset/pull/30848) fix(time_comparison): Allow deleting dates when using custom shift (@Antonio-RiveroMartnez)
|
||||
- [#28524](https://github.com/apache/superset/pull/28524) fix: warning emits an error (@eschutho)
|
||||
@@ -351,6 +331,7 @@ under the License.
|
||||
- [#30569](https://github.com/apache/superset/pull/30569) fix(dev-server): Revert "chore(fe): bump webpack-related packages to v5" (@geido)
|
||||
- [#30069](https://github.com/apache/superset/pull/30069) fix(frontend/generator): fix failed Viz plugin build due to missing JSDOM config and dep (@hainenber)
|
||||
- [#30277](https://github.com/apache/superset/pull/30277) fix(examples): fix examples uri for sqlite (@villebro)
|
||||
- [#30442](https://github.com/apache/superset/pull/30442) fix(fe/src/dashboard): optional chaining for possibly nullable parent attribute in LayoutItem type (@hainenber)
|
||||
- [#30509](https://github.com/apache/superset/pull/30509) fix(plugin/echarts): correct enum values for LABEL_POSITION map (@hainenber)
|
||||
- [#30500](https://github.com/apache/superset/pull/30500) fix(sqllab): Remove redundant scrolling (@justinpark)
|
||||
- [#30349](https://github.com/apache/superset/pull/30349) fix(radar-chart): metric options not available & add `min` option (@goncaloacteixeira)
|
||||
@@ -359,6 +340,7 @@ under the License.
|
||||
- [#30441](https://github.com/apache/superset/pull/30441) fix: battling cypress' dashboard feature (@mistercrunch)
|
||||
- [#30430](https://github.com/apache/superset/pull/30430) fix: cypress on master doesn't work because of --parallel flag (@mistercrunch)
|
||||
- [#29444](https://github.com/apache/superset/pull/29444) fix(plugin/country/map): rectify naming for some Vietnamese provinces (@hainenber)
|
||||
- [#29898](https://github.com/apache/superset/pull/29898) fix: parse pandas pivot null values (@eschutho)
|
||||
- [#30388](https://github.com/apache/superset/pull/30388) fix(ECharts): Revert ECharts version bump (@geido)
|
||||
- [#30340](https://github.com/apache/superset/pull/30340) fix(CI): increase node JS heap size (@rusackas)
|
||||
- [#30325](https://github.com/apache/superset/pull/30325) fix(db_engine_specs): add a few missing time grains to Postgres spec (@sfirke)
|
||||
@@ -382,6 +364,7 @@ under the License.
|
||||
- [#29944](https://github.com/apache/superset/pull/29944) fix: only show dataset name in list (@eschutho)
|
||||
- [#29935](https://github.com/apache/superset/pull/29935) fix: Fix delete_fake_db (@stamplevskiyd)
|
||||
- [#29522](https://github.com/apache/superset/pull/29522) fix(cli): add impersonate_user to db import (@chessman)
|
||||
- [#29885](https://github.com/apache/superset/pull/29885) fix: add mutator to get_columns_description (@eschutho)
|
||||
- [#29895](https://github.com/apache/superset/pull/29895) fix(PivotTable): Pass string only to safeHtmlSpan (@geido)
|
||||
- [#29864](https://github.com/apache/superset/pull/29864) fix: mypy issue on py3.9 + prevent similar issues (@mistercrunch)
|
||||
- [#29861](https://github.com/apache/superset/pull/29861) fix: mypy fails related to simplejson.dumps (@mistercrunch)
|
||||
@@ -395,10 +378,6 @@ under the License.
|
||||
|
||||
**Others**
|
||||
|
||||
- [#33745](https://github.com/apache/superset/pull/33745) build: update Dockerfile to 3.11.13-slim-bookworm (@gpchandran)
|
||||
- [#33612](https://github.com/apache/superset/pull/33612) chore: update Dockerfile - Upgrade to 3.11.12 (@gpchandran)
|
||||
- [#33339](https://github.com/apache/superset/pull/33339) chore(🦾): bump python h11 0.14.0 -> 0.16.0 (@github-actions[bot])
|
||||
- [#32745](https://github.com/apache/superset/pull/32745) chore(🦾): bump python sqlglot 26.1.3 -> 26.11.1 (@github-actions[bot])
|
||||
- [#32239](https://github.com/apache/superset/pull/32239) docs: adding notes about using uv instead of raw pip (@mistercrunch)
|
||||
- [#32221](https://github.com/apache/superset/pull/32221) chore(ci): fix ephemeral env null issue number (v2) (@dpgaspar)
|
||||
- [#32220](https://github.com/apache/superset/pull/32220) chore(ci): fix ephemeral env null issue number (@dpgaspar)
|
||||
@@ -406,6 +385,7 @@ under the License.
|
||||
- [#32062](https://github.com/apache/superset/pull/32062) chore: Re-enable asnyc event API tests (@Vitor-Avila)
|
||||
- [#32004](https://github.com/apache/superset/pull/32004) refactor(Radio): Upgrade Radio Component to Ant Design 5 (@EnxDev)
|
||||
- [#32054](https://github.com/apache/superset/pull/32054) chore: Add more database-related tests (follow up to #31948) (@Vitor-Avila)
|
||||
- [#32043](https://github.com/apache/superset/pull/32043) chore: Skip the creation of secondary perms during catalog migrations (@Vitor-Avila)
|
||||
- [#31811](https://github.com/apache/superset/pull/31811) chore(Network Errors): Update network errors on filter bars and charts (@msyavuz)
|
||||
- [#31794](https://github.com/apache/superset/pull/31794) chore: Removing DASHBOARD_CROSS_FILTERS flag and all that comes with it. (@rusackas)
|
||||
- [#32013](https://github.com/apache/superset/pull/32013) chore: add UPDATING note for CSV_UPLOAD_MAX_SIZE removal (@dpgaspar)
|
||||
@@ -700,6 +680,7 @@ under the License.
|
||||
- [#30770](https://github.com/apache/superset/pull/30770) docs: make it more clear that GLOBAL_ASYNC_QUERIES is experimental/beta (@mistercrunch)
|
||||
- [#30883](https://github.com/apache/superset/pull/30883) perf: Prevent redundant calls to getRelevantDataMask (@kgabryje)
|
||||
- [#30847](https://github.com/apache/superset/pull/30847) chore(GHA): Making the Linkinator STEP non-blocking, rather than the JOB. (@rusackas)
|
||||
- [#30865](https://github.com/apache/superset/pull/30865) docs: Updating 4.1 Release Notes (@yousoph)
|
||||
- [#30812](https://github.com/apache/superset/pull/30812) chore(FilterBar): Filter bar accessibility (@alexandrusoare)
|
||||
- [#30854](https://github.com/apache/superset/pull/30854) chore: Chart context menu permissions cleanup (@kgabryje)
|
||||
- [#30255](https://github.com/apache/superset/pull/30255) chore(scripts): purge node_modules folder on `npm prune` (@rusackas)
|
||||
|
||||
11
Dockerfile
11
Dockerfile
@@ -18,7 +18,7 @@
|
||||
######################################################################
|
||||
# Node stage to deal with static asset construction
|
||||
######################################################################
|
||||
ARG PY_VER=3.11.13-slim-bookworm
|
||||
ARG PY_VER=3.11.11-slim-bookworm
|
||||
|
||||
# If BUILDPLATFORM is null, set it to 'amd64' (or leave as is otherwise).
|
||||
ARG BUILDPLATFORM=${BUILDPLATFORM:-amd64}
|
||||
@@ -219,10 +219,6 @@ FROM python-common AS lean
|
||||
|
||||
# Install Python dependencies using docker/pip-install.sh
|
||||
COPY requirements/base.txt requirements/
|
||||
|
||||
# Copy superset-core package needed for editable install in base.txt
|
||||
COPY superset-core superset-core
|
||||
|
||||
RUN --mount=type=cache,target=${SUPERSET_HOME}/.cache/uv \
|
||||
/app/docker/pip-install.sh --requires-build-essential -r requirements/base.txt
|
||||
# Install the superset package
|
||||
@@ -245,11 +241,6 @@ RUN /app/docker/apt-install.sh \
|
||||
|
||||
# Copy development requirements and install them
|
||||
COPY requirements/*.txt requirements/
|
||||
|
||||
# Copy local packages needed for editable installs in development.txt
|
||||
COPY superset-core superset-core
|
||||
COPY superset-extensions-cli superset-extensions-cli
|
||||
|
||||
# Install Python dependencies using docker/pip-install.sh
|
||||
RUN --mount=type=cache,target=${SUPERSET_HOME}/.cache/uv \
|
||||
/app/docker/pip-install.sh --requires-build-essential -r requirements/development.txt
|
||||
|
||||
@@ -34,8 +34,6 @@ classifiers = [
|
||||
"Programming Language :: Python :: 3.11",
|
||||
]
|
||||
dependencies = [
|
||||
# no bounds for apache-superset-core until we have a stable version
|
||||
"apache-superset-core",
|
||||
"backoff>=1.8.0",
|
||||
"celery>=5.3.6, <6.0.0",
|
||||
"click>=8.0.3",
|
||||
@@ -66,8 +64,6 @@ dependencies = [
|
||||
"jsonpath-ng>=1.6.1, <2",
|
||||
"Mako>=1.2.2",
|
||||
"markdown>=3.0",
|
||||
# marshmallow>=4 has issues: https://github.com/apache/superset/issues/33162
|
||||
"marshmallow>=3.0, <4",
|
||||
"msgpack>=1.0.0, <1.1",
|
||||
"nh3>=0.2.11, <0.3",
|
||||
"numpy>1.23.5, <2",
|
||||
@@ -102,7 +98,6 @@ dependencies = [
|
||||
"tabulate>=0.8.9, <0.9",
|
||||
"typing-extensions>=4, <5",
|
||||
"waitress; sys_platform == 'win32'",
|
||||
"watchdog>=6.0.0",
|
||||
"wtforms>=2.3.3, <4",
|
||||
"wtforms-json",
|
||||
"xlsxwriter>=3.0.7, <3.1",
|
||||
@@ -187,8 +182,6 @@ doris = ["pydoris>=1.0.0, <2.0.0"]
|
||||
oceanbase = ["oceanbase_py>=0.0.1"]
|
||||
ydb = ["ydb-sqlalchemy>=0.1.2"]
|
||||
development = [
|
||||
# no bounds for apache-superset-extensions-cli until a stable version
|
||||
"apache-superset-extensions-cli",
|
||||
"docker",
|
||||
"flask-testing",
|
||||
"freezegun",
|
||||
@@ -218,7 +211,7 @@ documentation = "https://superset.apache.org/docs/intro"
|
||||
combine_as_imports = true
|
||||
include_trailing_comma = true
|
||||
line_length = 88
|
||||
known_first_party = "superset, apache-superset-core, apache-superset-extensions-cli"
|
||||
known_first_party = "superset"
|
||||
known_third_party = "alembic, apispec, backoff, celery, click, colorama, cron_descriptor, croniter, cryptography, dateutil, deprecation, flask, flask_appbuilder, flask_babel, flask_caching, flask_compress, flask_jwt_extended, flask_login, flask_migrate, flask_sqlalchemy, flask_talisman, flask_testing, flask_wtf, freezegun, geohash, geopy, holidays, humanize, isodate, jinja2, jwt, markdown, markupsafe, marshmallow, msgpack, nh3, numpy, pandas, parameterized, parsedatetime, pgsanity, polyline, prison, progress, pyarrow, sqlalchemy_bigquery, pyhive, pyparsing, pytest, pytest_mock, pytz, redis, requests, selenium, setuptools, shillelagh, simplejson, slack, sqlalchemy, sqlalchemy_utils, sqlparse, typing_extensions, urllib3, werkzeug, wtforms, wtforms_json, yaml"
|
||||
multi_line_output = 3
|
||||
order_by_type = false
|
||||
@@ -397,7 +390,3 @@ python-geohash = "0"
|
||||
# TODO REMOVE THESE DEPS FROM CODEBASE
|
||||
paramiko = "3" # GPL
|
||||
pyxlsb = "1" # GPL
|
||||
|
||||
[tool.uv.sources]
|
||||
apache-superset-core = { path = "./superset-core", editable = true }
|
||||
apache-superset-extensions-cli = { path = "./superset-extensions-cli", editable = true }
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
# This file was autogenerated by uv via the following command:
|
||||
# uv pip compile pyproject.toml requirements/base.in -o requirements/base.txt
|
||||
-e ./superset-core
|
||||
# via apache-superset (pyproject.toml)
|
||||
alembic==1.14.0
|
||||
# via flask-migrate
|
||||
amqp==5.3.1
|
||||
@@ -11,9 +9,7 @@ apispec==6.3.0
|
||||
apsw==3.46.0.0
|
||||
# via shillelagh
|
||||
async-timeout==4.0.3
|
||||
# via
|
||||
# -r requirements/base.in
|
||||
# redis
|
||||
# via -r requirements/base.in
|
||||
attrs==24.2.0
|
||||
# via
|
||||
# cattrs
|
||||
@@ -98,11 +94,6 @@ email-validator==2.2.0
|
||||
# via flask-appbuilder
|
||||
et-xmlfile==2.0.0
|
||||
# via openpyxl
|
||||
exceptiongroup==1.3.0
|
||||
# via
|
||||
# cattrs
|
||||
# trio
|
||||
# trio-websocket
|
||||
flask==2.3.3
|
||||
# via
|
||||
# apache-superset (pyproject.toml)
|
||||
@@ -118,9 +109,7 @@ flask==2.3.3
|
||||
# flask-sqlalchemy
|
||||
# flask-wtf
|
||||
flask-appbuilder==4.5.5
|
||||
# via
|
||||
# apache-superset (pyproject.toml)
|
||||
# apache-superset-core
|
||||
# via apache-superset (pyproject.toml)
|
||||
flask-babel==2.0.0
|
||||
# via flask-appbuilder
|
||||
flask-caching==2.3.0
|
||||
@@ -159,9 +148,10 @@ greenlet==3.0.3
|
||||
# via
|
||||
# apache-superset (pyproject.toml)
|
||||
# shillelagh
|
||||
# sqlalchemy
|
||||
gunicorn==23.0.0
|
||||
# via apache-superset (pyproject.toml)
|
||||
h11==0.16.0
|
||||
h11==0.14.0
|
||||
# via wsproto
|
||||
hashids==1.3.1
|
||||
# via apache-superset (pyproject.toml)
|
||||
@@ -214,7 +204,6 @@ markupsafe==3.0.2
|
||||
# wtforms
|
||||
marshmallow==3.23.1
|
||||
# via
|
||||
# apache-superset (pyproject.toml)
|
||||
# flask-appbuilder
|
||||
# marshmallow-sqlalchemy
|
||||
marshmallow-sqlalchemy==0.28.2
|
||||
@@ -345,9 +334,6 @@ shortid==0.1.2
|
||||
# via apache-superset (pyproject.toml)
|
||||
simplejson==3.19.3
|
||||
# via apache-superset (pyproject.toml)
|
||||
setuptools==80.9.0
|
||||
# via
|
||||
# liccheck
|
||||
six==1.16.0
|
||||
# via
|
||||
# prison
|
||||
@@ -391,11 +377,8 @@ typing-extensions==4.12.2
|
||||
# via
|
||||
# apache-superset (pyproject.toml)
|
||||
# alembic
|
||||
# cattrs
|
||||
# exceptiongroup
|
||||
# flask-limiter
|
||||
# limits
|
||||
# rich
|
||||
# selenium
|
||||
# shillelagh
|
||||
tzdata==2024.2
|
||||
@@ -416,8 +399,6 @@ vine==5.1.0
|
||||
# amqp
|
||||
# celery
|
||||
# kombu
|
||||
watchdog==6.0.0
|
||||
# via apache-superset (pyproject.toml)
|
||||
wcwidth==0.2.13
|
||||
# via prompt-toolkit
|
||||
websocket-client==1.8.0
|
||||
|
||||
@@ -17,4 +17,3 @@
|
||||
# under the License.
|
||||
#
|
||||
-e .[development,bigquery,cors,druid,gevent,gsheets,mysql,postgres,presto,prophet,trino,thumbnails]
|
||||
-e ./superset-extensions-cli[test]
|
||||
|
||||
@@ -1,38 +1,26 @@
|
||||
# This file was autogenerated by uv via the following command:
|
||||
# uv pip compile requirements/development.in -c requirements/base-constraint.txt -o requirements/development.txt
|
||||
# uv pip compile requirements/development.in -c requirements/base.txt -o requirements/development.txt
|
||||
-e .
|
||||
# via -r requirements/development.in
|
||||
-e ./superset-core
|
||||
# via
|
||||
# apache-superset
|
||||
# apache-superset-extensions-cli
|
||||
-e ./superset-extensions-cli
|
||||
# via
|
||||
# -r requirements/development.in
|
||||
# apache-superset
|
||||
alembic==1.14.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# flask-migrate
|
||||
amqp==5.3.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# kombu
|
||||
apispec==6.3.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# flask-appbuilder
|
||||
apsw==3.46.0.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# shillelagh
|
||||
async-timeout==4.0.3
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# redis
|
||||
attrs==24.2.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# cattrs
|
||||
# jsonschema
|
||||
# outcome
|
||||
@@ -40,70 +28,69 @@ attrs==24.2.0
|
||||
# trio
|
||||
babel==2.16.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# flask-babel
|
||||
backoff==2.2.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
bcrypt==4.2.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# paramiko
|
||||
billiard==4.2.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# celery
|
||||
blinker==1.9.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# flask
|
||||
bottleneck==1.4.2
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
brotli==1.1.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# flask-compress
|
||||
cachelib==0.9.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# flask-caching
|
||||
# flask-session
|
||||
cachetools==5.5.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# google-auth
|
||||
cattrs==24.1.2
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# requests-cache
|
||||
celery==5.4.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
certifi==2024.8.30
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# requests
|
||||
# selenium
|
||||
cffi==1.17.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# cryptography
|
||||
# pynacl
|
||||
cfgv==3.4.0
|
||||
# via pre-commit
|
||||
charset-normalizer==3.4.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# requests
|
||||
click==8.1.7
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
# apache-superset-extensions-cli
|
||||
# celery
|
||||
# click-didyoumean
|
||||
# click-option-group
|
||||
@@ -113,25 +100,25 @@ click==8.1.7
|
||||
# flask-appbuilder
|
||||
click-didyoumean==0.3.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# celery
|
||||
click-option-group==0.5.6
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
click-plugins==1.1.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# celery
|
||||
click-repl==0.3.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# celery
|
||||
cmdstanpy==1.1.0
|
||||
# via prophet
|
||||
colorama==0.4.6
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
# flask-appbuilder
|
||||
contourpy==1.0.7
|
||||
@@ -140,15 +127,15 @@ coverage==7.6.8
|
||||
# via pytest-cov
|
||||
cron-descriptor==1.4.5
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
croniter==5.0.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
cryptography==43.0.3
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
# paramiko
|
||||
# pyopenssl
|
||||
@@ -158,44 +145,37 @@ db-dtypes==1.3.1
|
||||
# via pandas-gbq
|
||||
defusedxml==0.7.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# odfpy
|
||||
deprecated==1.2.15
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# limits
|
||||
deprecation==2.1.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
distlib==0.3.8
|
||||
# via virtualenv
|
||||
dnspython==2.7.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# email-validator
|
||||
docker==7.0.0
|
||||
# via apache-superset
|
||||
email-validator==2.2.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# flask-appbuilder
|
||||
et-xmlfile==2.0.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# openpyxl
|
||||
exceptiongroup==1.3.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# cattrs
|
||||
# pytest
|
||||
# trio
|
||||
# trio-websocket
|
||||
filelock==3.12.2
|
||||
# via virtualenv
|
||||
flask==2.3.3
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
# flask-appbuilder
|
||||
# flask-babel
|
||||
@@ -212,58 +192,57 @@ flask==2.3.3
|
||||
# flask-wtf
|
||||
flask-appbuilder==4.5.5
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
# apache-superset-core
|
||||
flask-babel==2.0.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# flask-appbuilder
|
||||
flask-caching==2.3.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
flask-compress==1.17
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
flask-cors==4.0.0
|
||||
# via apache-superset
|
||||
flask-jwt-extended==4.7.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# flask-appbuilder
|
||||
flask-limiter==3.8.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# flask-appbuilder
|
||||
flask-login==0.6.3
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
# flask-appbuilder
|
||||
flask-migrate==3.1.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
flask-session==0.8.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
flask-sqlalchemy==2.5.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# flask-appbuilder
|
||||
# flask-migrate
|
||||
flask-talisman==1.1.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
flask-testing==0.8.1
|
||||
# via apache-superset
|
||||
flask-wtf==1.2.2
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
# flask-appbuilder
|
||||
fonttools==4.55.0
|
||||
@@ -274,11 +253,11 @@ future==1.0.0
|
||||
# via pyhive
|
||||
geographiclib==2.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# geopy
|
||||
geopy==2.4.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
gevent==24.2.1
|
||||
# via apache-superset
|
||||
@@ -291,7 +270,7 @@ google-api-core==2.23.0
|
||||
# sqlalchemy-bigquery
|
||||
google-auth==2.36.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# google-api-core
|
||||
# google-auth-oauthlib
|
||||
# google-cloud-bigquery
|
||||
@@ -323,10 +302,11 @@ googleapis-common-protos==1.66.0
|
||||
# grpcio-status
|
||||
greenlet==3.0.3
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
# gevent
|
||||
# shillelagh
|
||||
# sqlalchemy
|
||||
grpcio==1.68.0
|
||||
# via
|
||||
# apache-superset
|
||||
@@ -336,66 +316,65 @@ grpcio-status==1.60.1
|
||||
# via google-api-core
|
||||
gunicorn==23.0.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
h11==0.16.0
|
||||
h11==0.14.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# wsproto
|
||||
hashids==1.3.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
holidays==0.25
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
# prophet
|
||||
humanize==4.11.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
identify==2.5.36
|
||||
# via pre-commit
|
||||
idna==3.10
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# email-validator
|
||||
# requests
|
||||
# trio
|
||||
importlib-metadata==8.5.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
importlib-resources==6.4.5
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# limits
|
||||
# prophet
|
||||
iniconfig==2.0.0
|
||||
# via pytest
|
||||
isodate==0.7.2
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
itsdangerous==2.2.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# flask
|
||||
# flask-wtf
|
||||
jinja2==3.1.4
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset-extensions-cli
|
||||
# -c requirements/base.txt
|
||||
# flask
|
||||
# flask-babel
|
||||
jsonpath-ng==1.7.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
jsonschema==4.17.3
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# flask-appbuilder
|
||||
# jsonschema-spec
|
||||
# openapi-schema-validator
|
||||
@@ -406,73 +385,72 @@ kiwisolver==1.4.7
|
||||
# via matplotlib
|
||||
kombu==5.4.2
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# celery
|
||||
korean-lunar-calendar==0.3.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# holidays
|
||||
lazy-object-proxy==1.10.0
|
||||
# via openapi-spec-validator
|
||||
limits==3.13.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# flask-limiter
|
||||
mako==1.3.6
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# alembic
|
||||
# apache-superset
|
||||
markdown==3.7
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
markdown-it-py==3.0.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# rich
|
||||
markupsafe==3.0.2
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# jinja2
|
||||
# mako
|
||||
# werkzeug
|
||||
# wtforms
|
||||
marshmallow==3.23.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
# -c requirements/base.txt
|
||||
# flask-appbuilder
|
||||
# marshmallow-sqlalchemy
|
||||
marshmallow-sqlalchemy==0.28.2
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# flask-appbuilder
|
||||
matplotlib==3.9.0
|
||||
# via prophet
|
||||
mdurl==0.1.2
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# markdown-it-py
|
||||
msgpack==1.0.8
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
msgspec==0.18.6
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# flask-session
|
||||
mysqlclient==2.2.6
|
||||
# via apache-superset
|
||||
nh3==0.2.19
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
nodeenv==1.8.0
|
||||
# via pre-commit
|
||||
numpy==1.26.4
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
# bottleneck
|
||||
# cmdstanpy
|
||||
@@ -487,7 +465,7 @@ oauthlib==3.2.2
|
||||
# via requests-oauthlib
|
||||
odfpy==1.4.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# pandas
|
||||
openapi-schema-validator==0.4.4
|
||||
# via openapi-spec-validator
|
||||
@@ -495,19 +473,19 @@ openapi-spec-validator==0.5.6
|
||||
# via apache-superset
|
||||
openpyxl==3.1.5
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# pandas
|
||||
ordered-set==4.1.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# flask-limiter
|
||||
outcome==1.3.0.post0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# trio
|
||||
packaging==24.2
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
# apispec
|
||||
# db-dtypes
|
||||
@@ -524,7 +502,7 @@ packaging==24.2
|
||||
# sqlalchemy-bigquery
|
||||
pandas==2.0.3
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
# cmdstanpy
|
||||
# db-dtypes
|
||||
@@ -536,18 +514,18 @@ parameterized==0.9.0
|
||||
# via apache-superset
|
||||
paramiko==3.5.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
# sshtunnel
|
||||
parsedatetime==2.6
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
pathable==0.4.3
|
||||
# via jsonschema-spec
|
||||
pgsanity==0.2.9
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
pillow==10.3.0
|
||||
# via
|
||||
@@ -555,30 +533,30 @@ pillow==10.3.0
|
||||
# matplotlib
|
||||
platformdirs==3.8.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# requests-cache
|
||||
# virtualenv
|
||||
pluggy==1.5.0
|
||||
# via pytest
|
||||
ply==3.11
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# jsonpath-ng
|
||||
polyline==2.0.2
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
pre-commit==4.0.1
|
||||
# via apache-superset
|
||||
prison==0.2.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# flask-appbuilder
|
||||
progress==1.6
|
||||
# via apache-superset
|
||||
prompt-toolkit==3.0.48
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# click-repl
|
||||
prophet==1.1.5
|
||||
# via apache-superset
|
||||
@@ -599,24 +577,24 @@ psycopg2-binary==2.9.6
|
||||
# via apache-superset
|
||||
pyarrow==14.0.2
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
# db-dtypes
|
||||
# pandas-gbq
|
||||
pyasn1==0.6.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# pyasn1-modules
|
||||
# python-ldap
|
||||
# rsa
|
||||
pyasn1-modules==0.4.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# google-auth
|
||||
# python-ldap
|
||||
pycparser==2.22
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# cffi
|
||||
pydata-google-auth==1.9.0
|
||||
# via pandas-gbq
|
||||
@@ -626,7 +604,7 @@ pyfakefs==5.3.5
|
||||
# via apache-superset
|
||||
pygments==2.18.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# rich
|
||||
pyhive==0.7.0
|
||||
# via apache-superset
|
||||
@@ -634,48 +612,43 @@ pyinstrument==4.4.0
|
||||
# via apache-superset
|
||||
pyjwt==2.10.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
# flask-appbuilder
|
||||
# flask-jwt-extended
|
||||
pynacl==1.5.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# paramiko
|
||||
pyopenssl==24.2.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# shillelagh
|
||||
pyparsing==3.2.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
# matplotlib
|
||||
pyrsistent==0.20.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# jsonschema
|
||||
pysocks==1.7.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# urllib3
|
||||
pytest==7.4.4
|
||||
# via
|
||||
# apache-superset
|
||||
# apache-superset-extensions-cli
|
||||
# pytest-cov
|
||||
# pytest-mock
|
||||
pytest-cov==6.0.0
|
||||
# via
|
||||
# apache-superset
|
||||
# apache-superset-extensions-cli
|
||||
# via apache-superset
|
||||
pytest-mock==3.10.0
|
||||
# via
|
||||
# apache-superset
|
||||
# apache-superset-extensions-cli
|
||||
# via apache-superset
|
||||
python-dateutil==2.9.0.post0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
# celery
|
||||
# croniter
|
||||
@@ -690,39 +663,39 @@ python-dateutil==2.9.0.post0
|
||||
# trino
|
||||
python-dotenv==1.0.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
python-geohash==0.8.5
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
python-ldap==3.4.4
|
||||
# via apache-superset
|
||||
pytz==2024.2
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# croniter
|
||||
# flask-babel
|
||||
# pandas
|
||||
# trino
|
||||
pyxlsb==1.0.10
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# pandas
|
||||
pyyaml==6.0.2
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
# apispec
|
||||
# jsonschema-spec
|
||||
# pre-commit
|
||||
redis==4.6.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
requests==2.32.2
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# docker
|
||||
# google-api-core
|
||||
# google-cloud-bigquery
|
||||
@@ -735,7 +708,7 @@ requests==2.32.2
|
||||
# trino
|
||||
requests-cache==1.2.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# shillelagh
|
||||
requests-oauthlib==2.0.0
|
||||
# via google-auth-oauthlib
|
||||
@@ -743,20 +716,18 @@ rfc3339-validator==0.1.4
|
||||
# via openapi-schema-validator
|
||||
rich==13.9.4
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# flask-limiter
|
||||
rsa==4.9
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# google-auth
|
||||
ruff==0.8.0
|
||||
# via apache-superset
|
||||
selenium==4.27.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
semver==3.0.4
|
||||
# via apache-superset-extensions-cli
|
||||
setuptools==75.6.0
|
||||
# via
|
||||
# nodeenv
|
||||
@@ -766,19 +737,19 @@ setuptools==75.6.0
|
||||
# zope-interface
|
||||
shillelagh==1.2.18
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
shortid==0.1.2
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
simplejson==3.19.3
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
six==1.16.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# prison
|
||||
# python-dateutil
|
||||
# rfc3339-validator
|
||||
@@ -786,19 +757,19 @@ six==1.16.0
|
||||
# wtforms-json
|
||||
slack-sdk==3.33.4
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
sniffio==1.3.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# trio
|
||||
sortedcontainers==2.4.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# trio
|
||||
sqlalchemy==1.4.54
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# alembic
|
||||
# apache-superset
|
||||
# flask-appbuilder
|
||||
@@ -811,34 +782,29 @@ sqlalchemy-bigquery==1.12.0
|
||||
# via apache-superset
|
||||
sqlalchemy-utils==0.38.3
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
# flask-appbuilder
|
||||
sqlglot==26.11.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
sqloxide==0.1.51
|
||||
# via apache-superset
|
||||
sqlparse==0.5.2
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
sshtunnel==0.4.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
statsd==4.0.1
|
||||
# via apache-superset
|
||||
tabulate==0.8.10
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
tomli==2.2.1
|
||||
# via
|
||||
# apache-superset-extensions-cli
|
||||
# coverage
|
||||
# pytest
|
||||
tqdm==4.67.1
|
||||
# via
|
||||
# cmdstanpy
|
||||
@@ -847,28 +813,25 @@ trino==0.330.0
|
||||
# via apache-superset
|
||||
trio==0.28.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# selenium
|
||||
# trio-websocket
|
||||
trio-websocket==0.11.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# selenium
|
||||
typing-extensions==4.12.2
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# alembic
|
||||
# apache-superset
|
||||
# cattrs
|
||||
# exceptiongroup
|
||||
# flask-limiter
|
||||
# limits
|
||||
# rich
|
||||
# selenium
|
||||
# shillelagh
|
||||
tzdata==2024.2
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# celery
|
||||
# kombu
|
||||
# pandas
|
||||
@@ -876,74 +839,69 @@ tzlocal==5.2
|
||||
# via trino
|
||||
url-normalize==1.4.3
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# requests-cache
|
||||
urllib3==1.26.18
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# docker
|
||||
# requests
|
||||
# requests-cache
|
||||
# selenium
|
||||
vine==5.1.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# amqp
|
||||
# celery
|
||||
# kombu
|
||||
virtualenv==20.23.1
|
||||
# via pre-commit
|
||||
watchdog==6.0.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# apache-superset
|
||||
# apache-superset-extensions-cli
|
||||
wcwidth==0.2.13
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# prompt-toolkit
|
||||
websocket-client==1.8.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# selenium
|
||||
werkzeug==3.1.3
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# flask
|
||||
# flask-appbuilder
|
||||
# flask-jwt-extended
|
||||
# flask-login
|
||||
wrapt==1.17.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# deprecated
|
||||
wsproto==1.2.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# trio-websocket
|
||||
wtforms==3.2.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
# flask-appbuilder
|
||||
# flask-wtf
|
||||
# wtforms-json
|
||||
wtforms-json==0.3.5
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
xlrd==2.0.1
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# pandas
|
||||
xlsxwriter==3.0.9
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# apache-superset
|
||||
# pandas
|
||||
zipp==3.21.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# importlib-metadata
|
||||
zope-event==5.0
|
||||
# via gevent
|
||||
@@ -951,5 +909,5 @@ zope-interface==5.4.0
|
||||
# via gevent
|
||||
zstandard==0.23.0
|
||||
# via
|
||||
# -c requirements/base-constraint.txt
|
||||
# -c requirements/base.txt
|
||||
# flask-compress
|
||||
|
||||
@@ -45,11 +45,6 @@ PATTERNS = {
|
||||
"docs": [
|
||||
r"^docs/",
|
||||
],
|
||||
"superset-extensions-cli": [
|
||||
r"^\.github/workflows/superset-extensions-cli\.yml",
|
||||
r"^superset-extensions-cli/",
|
||||
r"^superset-core/",
|
||||
],
|
||||
}
|
||||
GITHUB_TOKEN = os.environ.get("GITHUB_TOKEN")
|
||||
|
||||
|
||||
@@ -24,13 +24,7 @@ ADDITIONAL_ARGS="$@"
|
||||
# Generate the requirements/base.txt file
|
||||
uv pip compile pyproject.toml requirements/base.in -o requirements/base.txt $ADDITIONAL_ARGS
|
||||
|
||||
# Hack to remove "Unnamed requirements are not allowed as constraints" error from base requirements
|
||||
grep --invert-match "./superset-core" requirements/base.txt > requirements/base-constraint.txt
|
||||
|
||||
# Generate the requirements/development.txt file, making sure the base requirements are used as a constraint to keep the versions in sync. Note that `development.txt` is a Superset of `base.txt` where version for the shared libs should match their version.
|
||||
uv pip compile requirements/development.in -c requirements/base-constraint.txt -o requirements/development.txt $ADDITIONAL_ARGS
|
||||
|
||||
# Remove temporary base requirement file
|
||||
rm requirements/base-constraint.txt
|
||||
# Generate the requirements/development.txt file, making sure requirements/base.txt is a constraint to keep the versions in sync
|
||||
uv pip compile requirements/development.in -c requirements/base.txt -o requirements/development.txt $ADDITIONAL_ARGS
|
||||
|
||||
uv pip compile requirements/translations.in -o requirements/translations.txt $ADDITIONAL_ARGS
|
||||
|
||||
1
superset-core/.gitignore
vendored
1
superset-core/.gitignore
vendored
@@ -1 +0,0 @@
|
||||
apache_superset_primitives.egg-info/
|
||||
@@ -1,22 +0,0 @@
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
## Change Log
|
||||
|
||||
Changelogs will be added once we have the first stable release.
|
||||
@@ -1,216 +0,0 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
============================================================================
|
||||
APACHE SUPERSET SUBCOMPONENTS:
|
||||
|
||||
The Apache Superset project contains subcomponents with separate copyright
|
||||
notices and license terms. Your use of the source code for the these
|
||||
subcomponents is subject to the terms and conditions of the following
|
||||
licenses.
|
||||
|
||||
========================================================================
|
||||
Third party SIL Open Font License v1.1 (OFL-1.1)
|
||||
========================================================================
|
||||
|
||||
(SIL OPEN FONT LICENSE Version 1.1) The Inter font family (https://github.com/rsms/inter)
|
||||
(SIL OPEN FONT LICENSE Version 1.1) The Fira Code font family (https://github.com/tonsky/FiraCode)
|
||||
@@ -1,113 +0,0 @@
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# apache-superset-core
|
||||
|
||||
[](https://badge.fury.io/py/apache-superset-core)
|
||||
[](https://opensource.org/licenses/Apache-2.0)
|
||||
[](https://www.python.org/downloads/)
|
||||
|
||||
The official core package for building Apache Superset backend extensions and integrations. This package provides essential building blocks including base classes, API utilities, type definitions, and decorators for both the host application and extensions.
|
||||
|
||||
## 📦 Installation
|
||||
|
||||
```bash
|
||||
pip install apache-superset-core
|
||||
```
|
||||
|
||||
## 🏗️ Architecture
|
||||
|
||||
The package is organized into logical modules, each providing specific functionality:
|
||||
|
||||
- **`api`** - REST API base classes, models access, query utilities, and registration
|
||||
- **`api.models`** - Access to Superset's database models (datasets, databases, etc.)
|
||||
- **`api.query`** - Database query utilities and SQL dialect handling
|
||||
- **`api.rest_api`** - Extension API registration and management
|
||||
- **`api.types.rest_api`** - REST API base classes and type definitions
|
||||
|
||||
## 🚀 Quick Start
|
||||
|
||||
### Basic Extension Structure
|
||||
|
||||
```python
|
||||
from flask import request, Response
|
||||
from flask_appbuilder.api import expose, permission_name, protect, safe
|
||||
from superset_core.api import models, query, rest_api
|
||||
from superset_core.api.types.rest_api import RestApi
|
||||
|
||||
class DatasetReferencesAPI(RestApi):
|
||||
"""Example extension API demonstrating core functionality."""
|
||||
|
||||
resource_name = "dataset_references"
|
||||
openapi_spec_tag = "Dataset references"
|
||||
class_permission_name = "dataset_references"
|
||||
|
||||
@expose("/metadata", methods=("POST",))
|
||||
@protect()
|
||||
@safe
|
||||
@permission_name("read")
|
||||
def metadata(self) -> Response:
|
||||
"""Get dataset metadata for tables referenced in SQL."""
|
||||
sql: str = request.json.get("sql")
|
||||
database_id: int = request.json.get("databaseId")
|
||||
|
||||
# Access Superset's models using core APIs
|
||||
databases = models.get_databases(id=database_id)
|
||||
if not databases:
|
||||
return self.response_404()
|
||||
|
||||
database = databases[0]
|
||||
dialect = query.get_sqlglot_dialect(database)
|
||||
|
||||
# Access datasets to get owner information
|
||||
datasets = models.get_datasets()
|
||||
owners_map = {
|
||||
dataset.table_name: [
|
||||
f"{owner.first_name} {owner.last_name}"
|
||||
for owner in dataset.owners
|
||||
]
|
||||
for dataset in datasets
|
||||
}
|
||||
|
||||
# Process SQL and return dataset metadata
|
||||
return self.response(200, result=owners_map)
|
||||
|
||||
# Register the extension API
|
||||
rest_api.add_extension_api(DatasetReferencesAPI)
|
||||
```
|
||||
|
||||
## 🤝 Contributing
|
||||
|
||||
We welcome contributions! Please see the [Contributing Guide](https://github.com/apache/superset/blob/master/CONTRIBUTING.md) for details.
|
||||
|
||||
## 📄 License
|
||||
|
||||
Licensed under the Apache License, Version 2.0. See [LICENSE](https://github.com/apache/superset/blob/master/LICENSE.txt) for details.
|
||||
|
||||
## 🔗 Links
|
||||
|
||||
- [Apache Superset](https://superset.apache.org/)
|
||||
- [Documentation](https://superset.apache.org/docs/)
|
||||
- [Community](https://superset.apache.org/community/)
|
||||
- [GitHub Repository](https://github.com/apache/superset)
|
||||
- [Extension Development Guide](https://superset.apache.org/docs/extensions/)
|
||||
|
||||
---
|
||||
|
||||
**Note**: This package is currently in release candidate status. APIs may change before the 1.0.0 release. Please check the [changelog](CHANGELOG.md) for breaking changes between versions.
|
||||
@@ -1,63 +0,0 @@
|
||||
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
[project]
|
||||
name = "apache-superset-core"
|
||||
version = "0.0.1rc2"
|
||||
description = "Core Python package for building Apache Superset backend extensions and integrations"
|
||||
readme = "README.md"
|
||||
authors = [
|
||||
{ name = "Apache Software Foundation", email = "dev@superset.apache.org" },
|
||||
]
|
||||
license = { file="LICENSE.txt" }
|
||||
requires-python = ">=3.10"
|
||||
keywords = ["superset", "apache", "analytics", "business-intelligence", "extensions", "visualization"]
|
||||
classifiers = [
|
||||
"Development Status :: 3 - Alpha",
|
||||
"Environment :: Web Environment",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: Apache Software License",
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Topic :: Database",
|
||||
"Topic :: Scientific/Engineering :: Visualization",
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
]
|
||||
dependencies = [
|
||||
"flask-appbuilder>=4.5.3, <5.0.0",
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
Homepage = "https://superset.apache.org/"
|
||||
Documentation = "https://superset.apache.org/docs/"
|
||||
Repository = "https://github.com/apache/superset"
|
||||
"Bug Tracker" = "https://github.com/apache/superset/issues"
|
||||
Changelog = "https://github.com/apache/superset/blob/master/CHANGELOG.md"
|
||||
|
||||
[build-system]
|
||||
requires = ["setuptools>=76.0.0", "wheel"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[tool.setuptools]
|
||||
package-dir = { "" = "src" }
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
where = ["src"]
|
||||
@@ -1,16 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
@@ -1,24 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from .types.models import CoreModelsApi
|
||||
from .types.query import CoreQueryApi
|
||||
from .types.rest_api import CoreRestApi
|
||||
|
||||
models: CoreModelsApi
|
||||
rest_api: CoreRestApi
|
||||
query: CoreQueryApi
|
||||
@@ -1,16 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
@@ -1,90 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Any, Type
|
||||
|
||||
from flask_sqlalchemy import BaseQuery
|
||||
from sqlalchemy.orm import scoped_session
|
||||
|
||||
|
||||
class CoreModelsApi(ABC):
|
||||
"""
|
||||
Abstract interface for accessing Superset data models.
|
||||
|
||||
This class defines the contract for retrieving SQLAlchemy sessions
|
||||
and model instances for datasets and databases within Superset.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
@abstractmethod
|
||||
def get_session() -> scoped_session:
|
||||
"""
|
||||
Retrieve the SQLAlchemy session to directly interface with the
|
||||
Superset models.
|
||||
|
||||
:returns: The SQLAlchemy scoped session instance.
|
||||
"""
|
||||
...
|
||||
|
||||
@staticmethod
|
||||
@abstractmethod
|
||||
def get_dataset_model() -> Type[Any]:
|
||||
"""
|
||||
Retrieve the Dataset (SqlaTable) SQLAlchemy model.
|
||||
|
||||
:returns: The Dataset SQLAlchemy model class.
|
||||
"""
|
||||
...
|
||||
|
||||
@staticmethod
|
||||
@abstractmethod
|
||||
def get_database_model() -> Type[Any]:
|
||||
"""
|
||||
Retrieve the Database SQLAlchemy model.
|
||||
|
||||
:returns: The Database SQLAlchemy model class.
|
||||
"""
|
||||
...
|
||||
|
||||
@staticmethod
|
||||
@abstractmethod
|
||||
def get_datasets(query: BaseQuery | None = None, **kwargs: Any) -> list[Any]:
|
||||
"""
|
||||
Retrieve Dataset (SqlaTable) entities.
|
||||
|
||||
:param query: A query with the Dataset model as the primary entity for complex
|
||||
queries.
|
||||
:param kwargs: Optional keyword arguments to filter datasets using SQLAlchemy's
|
||||
`filter_by()`.
|
||||
:returns: SqlaTable entities.
|
||||
"""
|
||||
...
|
||||
|
||||
@staticmethod
|
||||
@abstractmethod
|
||||
def get_databases(query: BaseQuery | None = None, **kwargs: Any) -> list[Any]:
|
||||
"""
|
||||
Retrieve Database entities.
|
||||
|
||||
:param query: A query with the Database model as the primary entity for complex
|
||||
queries.
|
||||
:param kwargs: Optional keyword arguments to filter databases using SQLAlchemy's
|
||||
`filter_by()`.
|
||||
:returns: Database entities.
|
||||
"""
|
||||
...
|
||||
@@ -1,41 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Any
|
||||
|
||||
from sqlglot import Dialects
|
||||
|
||||
|
||||
class CoreQueryApi(ABC):
|
||||
"""
|
||||
Abstract interface for query-related operations.
|
||||
|
||||
This class defines the contract for database query operations,
|
||||
including dialect handling and query processing.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
@abstractmethod
|
||||
def get_sqlglot_dialect(database: Any) -> Dialects:
|
||||
"""
|
||||
Get the SQLGlot dialect for the specified database.
|
||||
|
||||
:param database: The database instance to get the dialect for.
|
||||
:returns: The SQLGlot dialect enum corresponding to the database.
|
||||
"""
|
||||
...
|
||||
@@ -1,64 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Type
|
||||
|
||||
from flask_appbuilder.api import BaseApi
|
||||
|
||||
|
||||
class RestApi(BaseApi):
|
||||
"""
|
||||
Base REST API class for Superset with browser login support.
|
||||
|
||||
This class extends Flask-AppBuilder's BaseApi and enables browser-based
|
||||
authentication by default.
|
||||
"""
|
||||
|
||||
allow_browser_login = True
|
||||
|
||||
|
||||
class CoreRestApi(ABC):
|
||||
"""
|
||||
Abstract interface for managing REST APIs in Superset.
|
||||
|
||||
This class defines the contract for adding and managing REST APIs,
|
||||
including both core APIs and extension APIs.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
@abstractmethod
|
||||
def add_api(api: Type[RestApi]) -> None:
|
||||
"""
|
||||
Add a REST API to the Superset API.
|
||||
|
||||
:param api: A REST API instance.
|
||||
:returns: None.
|
||||
"""
|
||||
...
|
||||
|
||||
@staticmethod
|
||||
@abstractmethod
|
||||
def add_extension_api(api: Type[RestApi]) -> None:
|
||||
"""
|
||||
Add an extension REST API to the Superset API.
|
||||
|
||||
:param api: An extension REST API instance. These are placed under
|
||||
the /extensions resource.
|
||||
:returns: None.
|
||||
"""
|
||||
...
|
||||
@@ -1,16 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
@@ -1,63 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from typing import TypedDict
|
||||
|
||||
|
||||
class ModuleFederationConfig(TypedDict):
|
||||
exposes: dict[str, str]
|
||||
filename: str
|
||||
shared: dict[str, str]
|
||||
remotes: dict[str, str]
|
||||
|
||||
|
||||
class FrontendContributionConfig(TypedDict):
|
||||
commands: dict[str, list[dict[str, str]]]
|
||||
views: dict[str, list[dict[str, str]]]
|
||||
menus: dict[str, list[dict[str, str]]]
|
||||
|
||||
|
||||
class FrontendManifest(TypedDict):
|
||||
contributions: FrontendContributionConfig
|
||||
moduleFederation: ModuleFederationConfig
|
||||
remoteEntry: str
|
||||
|
||||
|
||||
class BackendManifest(TypedDict):
|
||||
entryPoints: list[str]
|
||||
|
||||
|
||||
class SharedBase(TypedDict, total=False):
|
||||
id: str
|
||||
name: str
|
||||
dependencies: list[str]
|
||||
description: str
|
||||
version: str
|
||||
frontend: FrontendManifest
|
||||
permissions: list[str]
|
||||
|
||||
|
||||
class Manifest(SharedBase, total=False):
|
||||
backend: BackendManifest
|
||||
|
||||
|
||||
class BackendMetadata(BackendManifest):
|
||||
files: list[str]
|
||||
|
||||
|
||||
class Metadata(SharedBase):
|
||||
backend: BackendMetadata
|
||||
@@ -29,8 +29,8 @@ Embedding is done by inserting an iframe, containing a Superset page, into the h
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Activate the feature flag `EMBEDDED_SUPERSET`
|
||||
- Set a strong password in configuration variable `GUEST_TOKEN_JWT_SECRET` (see configuration file config.py). Be aware that its default value must be changed in production.
|
||||
* Activate the feature flag `EMBEDDED_SUPERSET`
|
||||
* Set a strong password in configuration variable `GUEST_TOKEN_JWT_SECRET` (see configuration file config.py). Be aware that its default value must be changed in production.
|
||||
|
||||
## Embedding a Dashboard
|
||||
|
||||
@@ -48,27 +48,19 @@ embedDashboard({
|
||||
supersetDomain: "https://superset.example.com",
|
||||
mountPoint: document.getElementById("my-superset-container"), // any html element that can contain an iframe
|
||||
fetchGuestToken: () => fetchGuestTokenFromBackend(),
|
||||
dashboardUiConfig: {
|
||||
// dashboard UI config: hideTitle, hideTab, hideChartControls, filters.visible, filters.expanded (optional), urlParams (optional)
|
||||
hideTitle: true,
|
||||
filters: {
|
||||
expanded: true,
|
||||
},
|
||||
urlParams: {
|
||||
foo: "value1",
|
||||
bar: "value2",
|
||||
// ...
|
||||
},
|
||||
dashboardUiConfig: { // dashboard UI config: hideTitle, hideTab, hideChartControls, filters.visible, filters.expanded (optional), urlParams (optional)
|
||||
hideTitle: true,
|
||||
filters: {
|
||||
expanded: true,
|
||||
},
|
||||
urlParams: {
|
||||
foo: 'value1',
|
||||
bar: 'value2',
|
||||
// ...
|
||||
}
|
||||
},
|
||||
// optional additional iframe sandbox attributes
|
||||
iframeSandboxExtras: [
|
||||
"allow-top-navigation",
|
||||
"allow-popups-to-escape-sandbox",
|
||||
],
|
||||
// optional Permissions Policy features
|
||||
iframeAllowExtras: ["clipboard-write", "fullscreen"],
|
||||
// optional callback to customize permalink URLs
|
||||
resolvePermalinkUrl: ({ key }) => `https://my-app.com/analytics/share/${key}`,
|
||||
// optional additional iframe sandbox attributes
|
||||
iframeSandboxExtras: ['allow-top-navigation', 'allow-popups-to-escape-sandbox']
|
||||
});
|
||||
```
|
||||
|
||||
@@ -99,7 +91,7 @@ Guest tokens can have Row Level Security rules which filter data for the user ca
|
||||
|
||||
The agent making the `POST` request must be authenticated with the `can_grant_guest_token` permission.
|
||||
|
||||
Within your app, using the Guest Token will then allow authentication to your Superset instance via creating an Anonymous user object. This guest anonymous user will default to the public role as per this setting `GUEST_ROLE_NAME = "Public"`.
|
||||
Within your app, using the Guest Token will then allow authentication to your Superset instance via creating an Anonymous user object. This guest anonymous user will default to the public role as per this setting `GUEST_ROLE_NAME = "Public"`.
|
||||
|
||||
The user parameters in the example below are optional and are provided as a means of passing user attributes that may be accessed in jinja templates inside your charts.
|
||||
|
||||
@@ -112,19 +104,18 @@ Example `POST /security/guest_token` payload:
|
||||
"first_name": "Stan",
|
||||
"last_name": "Lee"
|
||||
},
|
||||
"resources": [
|
||||
{
|
||||
"type": "dashboard",
|
||||
"id": "abc123"
|
||||
}
|
||||
],
|
||||
"rls": [{ "clause": "publisher = 'Nintendo'" }]
|
||||
"resources": [{
|
||||
"type": "dashboard",
|
||||
"id": "abc123"
|
||||
}],
|
||||
"rls": [
|
||||
{ "clause": "publisher = 'Nintendo'" }
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
Alternatively, a guest token can be created directly in your app with a json like the following, and then signed
|
||||
with the secret set in configuration variable `GUEST_TOKEN_JWT_SECRET` (see configuration file config.py)
|
||||
|
||||
```
|
||||
{
|
||||
"user": {
|
||||
@@ -151,47 +142,7 @@ with the secret set in configuration variable `GUEST_TOKEN_JWT_SECRET` (see conf
|
||||
The Embedded SDK creates an iframe with [sandbox](https://developer.mozilla.org/es/docs/Web/HTML/Element/iframe#sandbox) mode by default
|
||||
which applies certain restrictions to the iframe's content.
|
||||
To pass additional sandbox attributes you can use `iframeSandboxExtras`:
|
||||
|
||||
```js
|
||||
// optional additional iframe sandbox attributes
|
||||
iframeSandboxExtras: ["allow-top-navigation", "allow-popups-to-escape-sandbox"];
|
||||
```
|
||||
|
||||
### Customizing Permalink URLs
|
||||
|
||||
When users click share buttons inside an embedded dashboard, Superset generates permalinks using Superset's domain. If you want to use your own domain and URL format for these permalinks, you can provide a `resolvePermalinkUrl` callback:
|
||||
|
||||
```js
|
||||
embedDashboard({
|
||||
id: "abc123",
|
||||
supersetDomain: "https://superset.example.com",
|
||||
mountPoint: document.getElementById("my-superset-container"),
|
||||
fetchGuestToken: () => fetchGuestTokenFromBackend(),
|
||||
|
||||
// Customize permalink URLs
|
||||
resolvePermalinkUrl: ({ key }) => {
|
||||
// key: the permalink key (e.g., "xyz789")
|
||||
return `https://my-app.com/analytics/share/${key}`;
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
To restore the dashboard state from a permalink in your app:
|
||||
|
||||
```js
|
||||
// In your route handler for /analytics/share/:key
|
||||
const permalinkKey = routeParams.key;
|
||||
|
||||
embedDashboard({
|
||||
id: "abc123",
|
||||
supersetDomain: "https://superset.example.com",
|
||||
mountPoint: document.getElementById("my-superset-container"),
|
||||
fetchGuestToken: () => fetchGuestTokenFromBackend(),
|
||||
resolvePermalinkUrl: ({ key }) => `https://my-app.com/analytics/share/${key}`,
|
||||
dashboardUiConfig: {
|
||||
urlParams: {
|
||||
permalink_key: permalinkKey, // Restores filters, tabs, chart states, and scrolls to anchor
|
||||
},
|
||||
},
|
||||
});
|
||||
// optional additional iframe sandbox attributes
|
||||
iframeSandboxExtras: ['allow-top-navigation', 'allow-popups-to-escape-sandbox']
|
||||
```
|
||||
|
||||
@@ -19,12 +19,12 @@
|
||||
|
||||
import {
|
||||
DASHBOARD_UI_FILTER_CONFIG_URL_PARAM_KEY,
|
||||
IFRAME_COMMS_MESSAGE_TYPE,
|
||||
} from "./const";
|
||||
IFRAME_COMMS_MESSAGE_TYPE
|
||||
} from './const';
|
||||
|
||||
// We can swap this out for the actual switchboard package once it gets published
|
||||
import { Switchboard } from "@superset-ui/switchboard";
|
||||
import { getGuestTokenRefreshTiming } from "./guestTokenRefresh";
|
||||
import { Switchboard } from '@superset-ui/switchboard';
|
||||
import { getGuestTokenRefreshTiming } from './guestTokenRefresh';
|
||||
|
||||
/**
|
||||
* The function to fetch a guest token from your Host App's backend server.
|
||||
@@ -34,63 +34,48 @@ import { getGuestTokenRefreshTiming } from "./guestTokenRefresh";
|
||||
export type GuestTokenFetchFn = () => Promise<string>;
|
||||
|
||||
export type UiConfigType = {
|
||||
hideTitle?: boolean;
|
||||
hideTab?: boolean;
|
||||
hideChartControls?: boolean;
|
||||
hideTitle?: boolean
|
||||
hideTab?: boolean
|
||||
hideChartControls?: boolean
|
||||
filters?: {
|
||||
[key: string]: boolean | undefined;
|
||||
visible?: boolean;
|
||||
expanded?: boolean;
|
||||
};
|
||||
[key: string]: boolean | undefined
|
||||
visible?: boolean
|
||||
expanded?: boolean
|
||||
}
|
||||
urlParams?: {
|
||||
[key: string]: any;
|
||||
};
|
||||
};
|
||||
[key: string]: any
|
||||
}
|
||||
}
|
||||
|
||||
export type EmbedDashboardParams = {
|
||||
/** The id provided by the embed configuration UI in Superset */
|
||||
id: string;
|
||||
id: string
|
||||
/** The domain where Superset can be located, with protocol, such as: https://superset.example.com */
|
||||
supersetDomain: string;
|
||||
supersetDomain: string
|
||||
/** The html element within which to mount the iframe */
|
||||
mountPoint: HTMLElement;
|
||||
mountPoint: HTMLElement
|
||||
/** A function to fetch a guest token from the Host App's backend server */
|
||||
fetchGuestToken: GuestTokenFetchFn;
|
||||
fetchGuestToken: GuestTokenFetchFn
|
||||
/** The dashboard UI config: hideTitle, hideTab, hideChartControls, filters.visible, filters.expanded **/
|
||||
dashboardUiConfig?: UiConfigType;
|
||||
dashboardUiConfig?: UiConfigType
|
||||
/** Are we in debug mode? */
|
||||
debug?: boolean;
|
||||
debug?: boolean
|
||||
/** The iframe title attribute */
|
||||
iframeTitle?: string;
|
||||
iframeTitle?: string
|
||||
/** additional iframe sandbox attributes ex (allow-top-navigation, allow-popups-to-escape-sandbox) **/
|
||||
iframeSandboxExtras?: string[];
|
||||
/** iframe allow attribute for Permissions Policy (e.g., ['clipboard-write', 'fullscreen']) **/
|
||||
iframeAllowExtras?: string[];
|
||||
/** Callback to resolve permalink URLs. If provided, this will be called when generating permalinks
|
||||
* to allow the host app to customize the URL. If not provided, Superset's default URL is used. */
|
||||
resolvePermalinkUrl?: ResolvePermalinkUrlFn;
|
||||
};
|
||||
iframeSandboxExtras?: string[]
|
||||
}
|
||||
|
||||
export type Size = {
|
||||
width: number;
|
||||
height: number;
|
||||
};
|
||||
|
||||
/**
|
||||
* Callback to resolve permalink URLs.
|
||||
* Receives the permalink key and returns the full URL to use for the permalink.
|
||||
*/
|
||||
export type ResolvePermalinkUrlFn = (params: {
|
||||
/** The permalink key (e.g., "xyz789") */
|
||||
key: string;
|
||||
}) => string | Promise<string>;
|
||||
width: number, height: number
|
||||
}
|
||||
|
||||
export type EmbeddedDashboard = {
|
||||
getScrollSize: () => Promise<Size>;
|
||||
unmount: () => void;
|
||||
getDashboardPermalink: (anchor: string) => Promise<string>;
|
||||
getActiveTabs: () => Promise<string[]>;
|
||||
};
|
||||
getScrollSize: () => Promise<Size>
|
||||
unmount: () => void
|
||||
getDashboardPermalink: (anchor: string) => Promise<string>
|
||||
getActiveTabs: () => Promise<string[]>
|
||||
}
|
||||
|
||||
/**
|
||||
* Embeds a Superset dashboard into the page using an iframe.
|
||||
@@ -103,9 +88,7 @@ export async function embedDashboard({
|
||||
dashboardUiConfig,
|
||||
debug = false,
|
||||
iframeTitle = "Embedded Dashboard",
|
||||
iframeSandboxExtras = [],
|
||||
iframeAllowExtras = [],
|
||||
resolvePermalinkUrl,
|
||||
iframeSandboxExtras = []
|
||||
}: EmbedDashboardParams): Promise<EmbeddedDashboard> {
|
||||
function log(...info: unknown[]) {
|
||||
if (debug) {
|
||||
@@ -113,52 +96,40 @@ export async function embedDashboard({
|
||||
}
|
||||
}
|
||||
|
||||
log("embedding");
|
||||
log('embedding');
|
||||
|
||||
if (supersetDomain.endsWith("/")) {
|
||||
supersetDomain = supersetDomain.slice(0, -1);
|
||||
}
|
||||
|
||||
function calculateConfig() {
|
||||
let configNumber = 0;
|
||||
if (dashboardUiConfig) {
|
||||
if (dashboardUiConfig.hideTitle) {
|
||||
configNumber += 1;
|
||||
let configNumber = 0
|
||||
if(dashboardUiConfig) {
|
||||
if(dashboardUiConfig.hideTitle) {
|
||||
configNumber += 1
|
||||
}
|
||||
if (dashboardUiConfig.hideTab) {
|
||||
configNumber += 2;
|
||||
if(dashboardUiConfig.hideTab) {
|
||||
configNumber += 2
|
||||
}
|
||||
if (dashboardUiConfig.hideChartControls) {
|
||||
configNumber += 8;
|
||||
if(dashboardUiConfig.hideChartControls) {
|
||||
configNumber += 8
|
||||
}
|
||||
}
|
||||
return configNumber;
|
||||
return configNumber
|
||||
}
|
||||
|
||||
async function mountIframe(): Promise<Switchboard> {
|
||||
return new Promise((resolve) => {
|
||||
const iframe = document.createElement("iframe");
|
||||
const dashboardConfigUrlParams = dashboardUiConfig
|
||||
? { uiConfig: `${calculateConfig()}` }
|
||||
: undefined;
|
||||
const filterConfig = dashboardUiConfig?.filters || {};
|
||||
const filterConfigKeys = Object.keys(filterConfig);
|
||||
const filterConfigUrlParams = Object.fromEntries(
|
||||
filterConfigKeys.map((key) => [
|
||||
DASHBOARD_UI_FILTER_CONFIG_URL_PARAM_KEY[key],
|
||||
filterConfig[key],
|
||||
])
|
||||
);
|
||||
return new Promise(resolve => {
|
||||
const iframe = document.createElement('iframe');
|
||||
const dashboardConfigUrlParams = dashboardUiConfig ? {uiConfig: `${calculateConfig()}`} : undefined;
|
||||
const filterConfig = dashboardUiConfig?.filters || {}
|
||||
const filterConfigKeys = Object.keys(filterConfig)
|
||||
const filterConfigUrlParams = Object.fromEntries(filterConfigKeys.map(
|
||||
key => [DASHBOARD_UI_FILTER_CONFIG_URL_PARAM_KEY[key], filterConfig[key]]))
|
||||
|
||||
// Allow url query parameters from dashboardUiConfig.urlParams to override the ones from filterConfig
|
||||
const urlParams = {
|
||||
...dashboardConfigUrlParams,
|
||||
...filterConfigUrlParams,
|
||||
...dashboardUiConfig?.urlParams,
|
||||
};
|
||||
const urlParamsString = Object.keys(urlParams).length
|
||||
? "?" + new URLSearchParams(urlParams).toString()
|
||||
: "";
|
||||
const urlParams = {...dashboardConfigUrlParams, ...filterConfigUrlParams, ...dashboardUiConfig?.urlParams}
|
||||
const urlParamsString = Object.keys(urlParams).length ? '?' + new URLSearchParams(urlParams).toString() : ''
|
||||
|
||||
// set up the iframe's sandbox configuration
|
||||
iframe.sandbox.add("allow-same-origin"); // needed for postMessage to work
|
||||
@@ -173,7 +144,7 @@ export async function embedDashboard({
|
||||
});
|
||||
|
||||
// add the event listener before setting src, to be 100% sure that we capture the load event
|
||||
iframe.addEventListener("load", () => {
|
||||
iframe.addEventListener('load', () => {
|
||||
// MessageChannel allows us to send and receive messages smoothly between our window and the iframe
|
||||
// See https://developer.mozilla.org/en-US/docs/Web/API/Channel_Messaging_API
|
||||
const commsChannel = new MessageChannel();
|
||||
@@ -186,27 +157,18 @@ export async function embedDashboard({
|
||||
iframe.contentWindow!.postMessage(
|
||||
{ type: IFRAME_COMMS_MESSAGE_TYPE, handshake: "port transfer" },
|
||||
supersetDomain,
|
||||
[theirPort]
|
||||
);
|
||||
log("sent message channel to the iframe");
|
||||
[theirPort],
|
||||
)
|
||||
log('sent message channel to the iframe');
|
||||
|
||||
// return our port from the promise
|
||||
resolve(
|
||||
new Switchboard({
|
||||
port: ourPort,
|
||||
name: "superset-embedded-sdk",
|
||||
debug,
|
||||
})
|
||||
);
|
||||
resolve(new Switchboard({ port: ourPort, name: 'superset-embedded-sdk', debug }));
|
||||
});
|
||||
iframe.src = `${supersetDomain}/embedded/${id}${urlParamsString}`;
|
||||
iframe.title = iframeTitle;
|
||||
if (iframeAllowExtras.length > 0) {
|
||||
iframe.setAttribute("allow", iframeAllowExtras.join("; "));
|
||||
}
|
||||
//@ts-ignore
|
||||
mountPoint.replaceChildren(iframe);
|
||||
log("placed the iframe");
|
||||
log('placed the iframe')
|
||||
});
|
||||
}
|
||||
|
||||
@@ -215,45 +177,27 @@ export async function embedDashboard({
|
||||
mountIframe(),
|
||||
]);
|
||||
|
||||
ourPort.emit("guestToken", { guestToken });
|
||||
log("sent guest token");
|
||||
ourPort.emit('guestToken', { guestToken });
|
||||
log('sent guest token');
|
||||
|
||||
async function refreshGuestToken() {
|
||||
const newGuestToken = await fetchGuestToken();
|
||||
ourPort.emit("guestToken", { guestToken: newGuestToken });
|
||||
ourPort.emit('guestToken', { guestToken: newGuestToken });
|
||||
setTimeout(refreshGuestToken, getGuestTokenRefreshTiming(newGuestToken));
|
||||
}
|
||||
|
||||
setTimeout(refreshGuestToken, getGuestTokenRefreshTiming(guestToken));
|
||||
|
||||
// Register the resolvePermalinkUrl method for the iframe to call
|
||||
// Returns null if no callback provided or on error, allowing iframe to use default URL
|
||||
ourPort.start();
|
||||
ourPort.defineMethod(
|
||||
"resolvePermalinkUrl",
|
||||
async ({ key }: { key: string }): Promise<string | null> => {
|
||||
if (!resolvePermalinkUrl) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
return await resolvePermalinkUrl({ key });
|
||||
} catch (error) {
|
||||
log("Error in resolvePermalinkUrl callback:", error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
function unmount() {
|
||||
log("unmounting");
|
||||
log('unmounting');
|
||||
//@ts-ignore
|
||||
mountPoint.replaceChildren();
|
||||
}
|
||||
|
||||
const getScrollSize = () => ourPort.get<Size>("getScrollSize");
|
||||
const getScrollSize = () => ourPort.get<Size>('getScrollSize');
|
||||
const getDashboardPermalink = (anchor: string) =>
|
||||
ourPort.get<string>("getDashboardPermalink", { anchor });
|
||||
const getActiveTabs = () => ourPort.get<string[]>("getActiveTabs");
|
||||
ourPort.get<string>('getDashboardPermalink', { anchor });
|
||||
const getActiveTabs = () => ourPort.get<string[]>('getActiveTabs')
|
||||
|
||||
return {
|
||||
getScrollSize,
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
## Change Log
|
||||
|
||||
Changelogs will be added once we have the first stable release.
|
||||
@@ -1,216 +0,0 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
============================================================================
|
||||
APACHE SUPERSET SUBCOMPONENTS:
|
||||
|
||||
The Apache Superset project contains subcomponents with separate copyright
|
||||
notices and license terms. Your use of the source code for the these
|
||||
subcomponents is subject to the terms and conditions of the following
|
||||
licenses.
|
||||
|
||||
========================================================================
|
||||
Third party SIL Open Font License v1.1 (OFL-1.1)
|
||||
========================================================================
|
||||
|
||||
(SIL OPEN FONT LICENSE Version 1.1) The Inter font family (https://github.com/rsms/inter)
|
||||
(SIL OPEN FONT LICENSE Version 1.1) The Fira Code font family (https://github.com/tonsky/FiraCode)
|
||||
@@ -1,110 +0,0 @@
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# apache-superset-extensions-cli
|
||||
|
||||
[](https://badge.fury.io/py/apache-superset-extensions-cli)
|
||||
[](https://opensource.org/licenses/Apache-2.0)
|
||||
[](https://www.python.org/downloads/)
|
||||
|
||||
Official command-line interface for building, bundling, and managing Apache Superset extensions. This CLI tool provides developers with everything needed to create, develop, and package extensions for the Superset ecosystem.
|
||||
|
||||
## 🚀 Features
|
||||
|
||||
- **Extension Scaffolding** - Generate initial folder structure and scaffold new extension projects
|
||||
- **Development Server** - Automatically rebuild extensions as files change during development
|
||||
- **Build System** - Build extension assets for production deployment
|
||||
- **Bundle Packaging** - Package extensions into distributable .supx files
|
||||
|
||||
## 📦 Installation
|
||||
|
||||
```bash
|
||||
pip install apache-superset-extensions-cli
|
||||
```
|
||||
|
||||
## 🛠️ Quick Start
|
||||
|
||||
### Available Commands
|
||||
|
||||
```bash
|
||||
# Generate initial folder structure and scaffold a new extension project
|
||||
superset-extensions init <extension-name>
|
||||
|
||||
# Automatically rebuild extension as files change during development
|
||||
superset-extensions dev
|
||||
|
||||
# Build extension assets for production
|
||||
superset-extensions build
|
||||
|
||||
# Package extension into a distributable .supx file
|
||||
superset-extensions bundle
|
||||
```
|
||||
|
||||
## 📋 Extension Structure
|
||||
|
||||
The CLI generates extensions with the following structure:
|
||||
|
||||
```
|
||||
extension_name/
|
||||
├── extension.json # Extension configuration and metadata
|
||||
├── frontend/ # Frontend code
|
||||
│ ├── src/ # TypeScript/React source files
|
||||
│ ├── webpack.config.js # Frontend build configuration
|
||||
│ ├── tsconfig.json # TypeScript configuration
|
||||
│ └── package.json # Frontend dependencies
|
||||
├── backend/ # Backend code
|
||||
│ ├── src/
|
||||
│ │ └── dataset_references/ # Python package source
|
||||
│ ├── tests/ # Backend tests
|
||||
│ ├── pyproject.toml # Python package configuration
|
||||
│ └── requirements.txt # Python dependencies
|
||||
├── dist/ # Built extension files (generated)
|
||||
│ ├── manifest.json # Generated extension manifest
|
||||
│ ├── frontend/
|
||||
│ │ └── dist/ # Built frontend assets
|
||||
│ │ ├── remoteEntry.*.js # Module federation entry
|
||||
│ │ └── *.js # Additional frontend bundles
|
||||
│ └── backend/
|
||||
│ └── dataset_references/ # Built backend package
|
||||
│ ├── __init__.py
|
||||
│ ├── api.py
|
||||
│ └── entrypoint.py
|
||||
├── dataset_references-1.0.0.supx # Packaged extension file (generated)
|
||||
└── README.md # Extension documentation
|
||||
```
|
||||
|
||||
## 🤝 Contributing
|
||||
|
||||
We welcome contributions! Please see the [Contributing Guide](https://github.com/apache/superset/blob/master/CONTRIBUTING.md) for details.
|
||||
|
||||
## 📄 License
|
||||
|
||||
Licensed under the Apache License, Version 2.0. See [LICENSE](https://github.com/apache/superset/blob/master/LICENSE.txt) for details.
|
||||
|
||||
## 🔗 Links
|
||||
|
||||
- [Apache Superset](https://superset.apache.org/)
|
||||
- [Extension Development Guide](https://superset.apache.org/docs/extensions/)
|
||||
- [API Documentation](https://superset.apache.org/docs/api/)
|
||||
- [GitHub Repository](https://github.com/apache/superset)
|
||||
- [Community](https://superset.apache.org/community/)
|
||||
|
||||
---
|
||||
|
||||
**Note**: This package is currently in early development. APIs and commands may change before the 1.0.0 release. Please check the [changelog](CHANGELOG.md) for breaking changes between versions.
|
||||
@@ -1,125 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
[project]
|
||||
name = "apache-superset-extensions-cli"
|
||||
version = "0.0.1rc2"
|
||||
description = "Official command-line interface for building, bundling, and managing Apache Superset extensions"
|
||||
readme = "README.md"
|
||||
authors = [
|
||||
{ name = "Apache Software Foundation", email = "dev@superset.apache.org" },
|
||||
]
|
||||
license = { file="LICENSE.txt" }
|
||||
requires-python = ">=3.10"
|
||||
keywords = ["superset", "apache", "cli", "extensions", "analytics", "business-intelligence", "development-tools"]
|
||||
classifiers = [
|
||||
"Development Status :: 3 - Alpha",
|
||||
"Environment :: Console",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: Apache Software License",
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Topic :: Database",
|
||||
"Topic :: Scientific/Engineering :: Visualization",
|
||||
"Topic :: Software Development :: Build Tools",
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
"Topic :: System :: Software Distribution",
|
||||
]
|
||||
dependencies = [
|
||||
# no bounds for apache-superset-core until we have a stable version
|
||||
"apache-superset-core",
|
||||
"click>=8.0.3",
|
||||
"jinja2>=3.1.4",
|
||||
"semver>=3.0.4",
|
||||
"tomli>=2.2.1; python_version < '3.11'",
|
||||
"watchdog>=6.0.0",
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
Homepage = "https://superset.apache.org/"
|
||||
Documentation = "https://superset.apache.org/docs/"
|
||||
Repository = "https://github.com/apache/superset"
|
||||
"Bug Tracker" = "https://github.com/apache/superset/issues"
|
||||
Changelog = "https://github.com/apache/superset/blob/master/CHANGELOG.md"
|
||||
|
||||
[project.optional-dependencies]
|
||||
test = [
|
||||
"pytest",
|
||||
"pytest-cov",
|
||||
"pytest-mock",
|
||||
]
|
||||
|
||||
[build-system]
|
||||
requires = ["setuptools>=76.0.0", "wheel"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[tool.setuptools]
|
||||
package-dir = { "" = "src" }
|
||||
include-package-data = true
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
where = ["src"]
|
||||
|
||||
[tool.setuptools.package-data]
|
||||
superset_extensions_cli = ["templates/**/*"]
|
||||
|
||||
[project.scripts]
|
||||
superset-extensions = "superset_extensions_cli.cli:app"
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
testpaths = ["tests"]
|
||||
python_files = ["test_*.py", "*_test.py"]
|
||||
python_classes = ["Test*"]
|
||||
python_functions = ["test_*"]
|
||||
addopts = [
|
||||
"--strict-markers",
|
||||
"--strict-config",
|
||||
"--verbose",
|
||||
"--cov=superset_extensions_cli",
|
||||
"--cov-report=term-missing",
|
||||
"--cov-report=html:htmlcov"
|
||||
]
|
||||
markers = [
|
||||
"unit: Unit tests",
|
||||
"integration: Integration tests",
|
||||
"cli: CLI command tests",
|
||||
"slow: Slow running tests",
|
||||
]
|
||||
|
||||
[tool.coverage.run]
|
||||
source = ["src/superset_extensions_cli"]
|
||||
omit = ["*/tests/*", "*/test_*"]
|
||||
|
||||
[tool.coverage.report]
|
||||
exclude_lines = [
|
||||
"pragma: no cover",
|
||||
"def __repr__",
|
||||
"if self.debug:",
|
||||
"if settings.DEBUG",
|
||||
"raise AssertionError",
|
||||
"raise NotImplementedError",
|
||||
"if 0:",
|
||||
"if __name__ == .__main__.:",
|
||||
"class .*\\bProtocol\\):",
|
||||
"@(abc\\.)?abstractmethod",
|
||||
]
|
||||
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"src/superset_extensions_cli/*" = ["TID251"]
|
||||
@@ -1,16 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
@@ -1,471 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json # noqa: TID251
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, cast
|
||||
|
||||
import click
|
||||
import semver
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
from superset_core.extensions.types import Manifest, Metadata
|
||||
from watchdog.events import FileSystemEventHandler
|
||||
from watchdog.observers import Observer
|
||||
|
||||
from superset_extensions_cli.constants import MIN_NPM_VERSION
|
||||
from superset_extensions_cli.utils import read_json, read_toml
|
||||
|
||||
REMOTE_ENTRY_REGEX = re.compile(r"^remoteEntry\..+\.js$")
|
||||
FRONTEND_DIST_REGEX = re.compile(r"/frontend/dist")
|
||||
|
||||
|
||||
def validate_npm() -> None:
|
||||
"""Abort if `npm` is not on PATH."""
|
||||
if shutil.which("npm") is None:
|
||||
click.secho(
|
||||
"❌ npm is not installed or not on your PATH.",
|
||||
err=True,
|
||||
fg="red",
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
result = subprocess.run( # noqa: S603
|
||||
["npm", "-v"], # noqa: S607
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
)
|
||||
if result.returncode != 0:
|
||||
click.secho(
|
||||
f"❌ Failed to run `npm -v`: {result.stderr.strip()}",
|
||||
err=True,
|
||||
fg="red",
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
npm_version = result.stdout.strip()
|
||||
if semver.compare(npm_version, MIN_NPM_VERSION) < 0:
|
||||
click.secho(
|
||||
f"❌ npm version {npm_version} is lower than the required {MIN_NPM_VERSION}.", # noqa: E501
|
||||
err=True,
|
||||
fg="red",
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
except FileNotFoundError:
|
||||
click.secho(
|
||||
"❌ npm was not found when checking its version.",
|
||||
err=True,
|
||||
fg="red",
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def init_frontend_deps(frontend_dir: Path) -> None:
|
||||
"""
|
||||
If node_modules is missing under `frontend_dir`, run `npm ci` if package-lock.json
|
||||
exists, otherwise run `npm i`.
|
||||
"""
|
||||
node_modules = frontend_dir / "node_modules"
|
||||
if not node_modules.exists():
|
||||
package_lock = frontend_dir / "package-lock.json"
|
||||
if package_lock.exists():
|
||||
click.secho("⚙️ node_modules not found, running `npm ci`…", fg="cyan")
|
||||
npm_command = ["npm", "ci"]
|
||||
error_msg = "❌ `npm ci` failed. Aborting."
|
||||
else:
|
||||
click.secho("⚙️ node_modules not found, running `npm i`…", fg="cyan")
|
||||
npm_command = ["npm", "i"]
|
||||
error_msg = "❌ `npm i` failed. Aborting."
|
||||
|
||||
validate_npm()
|
||||
res = subprocess.run( # noqa: S603
|
||||
npm_command, # noqa: S607
|
||||
cwd=frontend_dir,
|
||||
text=True,
|
||||
)
|
||||
if res.returncode != 0:
|
||||
click.secho(error_msg, err=True, fg="red")
|
||||
sys.exit(1)
|
||||
click.secho("✅ Dependencies installed", fg="green")
|
||||
|
||||
|
||||
def clean_dist(cwd: Path) -> None:
|
||||
dist_dir = cwd / "dist"
|
||||
if dist_dir.exists():
|
||||
shutil.rmtree(dist_dir)
|
||||
dist_dir.mkdir(parents=True)
|
||||
|
||||
|
||||
def clean_dist_frontend(cwd: Path) -> None:
|
||||
frontend_dist = cwd / "dist" / "frontend"
|
||||
if frontend_dist.exists():
|
||||
shutil.rmtree(frontend_dist)
|
||||
|
||||
|
||||
def build_manifest(cwd: Path, remote_entry: str | None) -> Manifest:
|
||||
extension: Metadata = cast(Metadata, read_json(cwd / "extension.json"))
|
||||
if not extension:
|
||||
click.secho("❌ extension.json not found.", err=True, fg="red")
|
||||
sys.exit(1)
|
||||
|
||||
manifest: Manifest = {
|
||||
"id": extension["id"],
|
||||
"name": extension["name"],
|
||||
"version": extension["version"],
|
||||
"permissions": extension["permissions"],
|
||||
"dependencies": extension.get("dependencies", []),
|
||||
}
|
||||
if (
|
||||
(frontend := extension.get("frontend"))
|
||||
and (contributions := frontend.get("contributions"))
|
||||
and (module_federation := frontend.get("moduleFederation"))
|
||||
and remote_entry
|
||||
):
|
||||
manifest["frontend"] = {
|
||||
"contributions": contributions,
|
||||
"moduleFederation": module_federation,
|
||||
"remoteEntry": remote_entry,
|
||||
}
|
||||
|
||||
if entry_points := extension.get("backend", {}).get("entryPoints"):
|
||||
manifest["backend"] = {"entryPoints": entry_points}
|
||||
|
||||
return manifest
|
||||
|
||||
|
||||
def write_manifest(cwd: Path, manifest: Manifest) -> None:
|
||||
dist_dir = cwd / "dist"
|
||||
(dist_dir / "manifest.json").write_text(
|
||||
json.dumps(manifest, indent=2, sort_keys=True)
|
||||
)
|
||||
click.secho("✅ Manifest updated", fg="green")
|
||||
|
||||
|
||||
def run_frontend_build(frontend_dir: Path) -> subprocess.CompletedProcess[str]:
|
||||
click.echo()
|
||||
click.secho("⚙️ Building frontend assets…", fg="cyan")
|
||||
return subprocess.run( # noqa: S603
|
||||
["npm", "run", "build"], # noqa: S607
|
||||
cwd=frontend_dir,
|
||||
text=True,
|
||||
)
|
||||
|
||||
|
||||
def copy_frontend_dist(cwd: Path) -> str:
|
||||
dist_dir = cwd / "dist"
|
||||
frontend_dist = cwd / "frontend" / "dist"
|
||||
remote_entry: str | None = None
|
||||
|
||||
for f in frontend_dist.rglob("*"):
|
||||
if not f.is_file():
|
||||
continue
|
||||
if REMOTE_ENTRY_REGEX.match(f.name):
|
||||
remote_entry = f.name
|
||||
tgt = dist_dir / f.relative_to(cwd)
|
||||
tgt.parent.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copy2(f, tgt)
|
||||
|
||||
if not remote_entry:
|
||||
click.secho("❌ No remote entry file found.", err=True, fg="red")
|
||||
sys.exit(1)
|
||||
return remote_entry
|
||||
|
||||
|
||||
def copy_backend_files(cwd: Path) -> None:
|
||||
dist_dir = cwd / "dist"
|
||||
extension = read_json(cwd / "extension.json")
|
||||
if not extension:
|
||||
click.secho("❌ No extension.json file found.", err=True, fg="red")
|
||||
sys.exit(1)
|
||||
|
||||
for pat in extension.get("backend", {}).get("files", []):
|
||||
for f in cwd.glob(pat):
|
||||
if not f.is_file():
|
||||
continue
|
||||
tgt = dist_dir / f.relative_to(cwd)
|
||||
tgt.parent.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copy2(f, tgt)
|
||||
|
||||
|
||||
def rebuild_frontend(cwd: Path, frontend_dir: Path) -> str | None:
|
||||
"""Clean and rebuild frontend, return the remoteEntry filename."""
|
||||
clean_dist_frontend(cwd)
|
||||
|
||||
res = run_frontend_build(frontend_dir)
|
||||
if res.returncode != 0:
|
||||
click.secho("❌ Frontend build failed", fg="red")
|
||||
return None
|
||||
|
||||
remote_entry = copy_frontend_dist(cwd)
|
||||
click.secho("✅ Frontend rebuilt", fg="green")
|
||||
return remote_entry
|
||||
|
||||
|
||||
def rebuild_backend(cwd: Path) -> None:
|
||||
"""Copy backend files (no manifest update)."""
|
||||
copy_backend_files(cwd)
|
||||
click.secho("✅ Backend files synced", fg="green")
|
||||
|
||||
|
||||
class FrontendChangeHandler(FileSystemEventHandler):
|
||||
def __init__(self, trigger_build: Callable[[], None]):
|
||||
self.trigger_build = trigger_build
|
||||
|
||||
def on_any_event(self, event: Any) -> None:
|
||||
if FRONTEND_DIST_REGEX.search(event.src_path):
|
||||
return
|
||||
click.secho(f"🔁 Frontend change detected: {event.src_path}", fg="yellow")
|
||||
self.trigger_build()
|
||||
|
||||
|
||||
@click.group(help="CLI for validating and bundling Superset extensions.")
|
||||
def app() -> None:
|
||||
pass
|
||||
|
||||
|
||||
@app.command()
|
||||
def validate() -> None:
|
||||
validate_npm()
|
||||
|
||||
click.secho("✅ Validation successful", fg="green")
|
||||
|
||||
|
||||
@app.command()
|
||||
@click.pass_context
|
||||
def build(ctx: click.Context) -> None:
|
||||
ctx.invoke(validate)
|
||||
cwd = Path.cwd()
|
||||
frontend_dir = cwd / "frontend"
|
||||
backend_dir = cwd / "backend"
|
||||
|
||||
clean_dist(cwd)
|
||||
|
||||
# Build frontend if it exists
|
||||
remote_entry = None
|
||||
if frontend_dir.exists():
|
||||
init_frontend_deps(frontend_dir)
|
||||
remote_entry = rebuild_frontend(cwd, frontend_dir)
|
||||
|
||||
# Build backend independently if it exists
|
||||
if backend_dir.exists():
|
||||
pyproject = read_toml(backend_dir / "pyproject.toml")
|
||||
if pyproject:
|
||||
rebuild_backend(cwd)
|
||||
|
||||
# Build manifest and write it
|
||||
manifest = build_manifest(cwd, remote_entry)
|
||||
write_manifest(cwd, manifest)
|
||||
|
||||
click.secho("✅ Full build completed in dist/", fg="green")
|
||||
|
||||
|
||||
@app.command()
|
||||
@click.option(
|
||||
"--output",
|
||||
"-o",
|
||||
type=click.Path(path_type=Path, dir_okay=True, file_okay=True, writable=True),
|
||||
help="Optional output path or filename for the bundle.",
|
||||
)
|
||||
@click.pass_context
|
||||
def bundle(ctx: click.Context, output: Path | None) -> None:
|
||||
ctx.invoke(build)
|
||||
|
||||
cwd = Path.cwd()
|
||||
dist_dir = cwd / "dist"
|
||||
manifest_path = dist_dir / "manifest.json"
|
||||
|
||||
if not manifest_path.exists():
|
||||
click.secho(
|
||||
"❌ dist/manifest.json not found. Run `build` first.", err=True, fg="red"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
manifest = json.loads(manifest_path.read_text())
|
||||
id_ = manifest["id"]
|
||||
version = manifest["version"]
|
||||
default_filename = f"{id_}-{version}.supx"
|
||||
|
||||
if output is None:
|
||||
zip_path = Path(default_filename)
|
||||
elif output.is_dir():
|
||||
zip_path = output / default_filename
|
||||
else:
|
||||
zip_path = output
|
||||
|
||||
try:
|
||||
with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zipf:
|
||||
for file in dist_dir.rglob("*"):
|
||||
if file.is_file():
|
||||
arcname = file.relative_to(dist_dir)
|
||||
zipf.write(file, arcname)
|
||||
except Exception as ex:
|
||||
click.secho(f"❌ Failed to create bundle: {ex}", err=True, fg="red")
|
||||
sys.exit(1)
|
||||
|
||||
click.secho(f"✅ Bundle created: {zip_path}", fg="green")
|
||||
|
||||
|
||||
@app.command()
|
||||
@click.pass_context
|
||||
def dev(ctx: click.Context) -> None:
|
||||
cwd = Path.cwd()
|
||||
frontend_dir = cwd / "frontend"
|
||||
backend_dir = cwd / "backend"
|
||||
|
||||
clean_dist(cwd)
|
||||
|
||||
# Build frontend if it exists
|
||||
remote_entry = None
|
||||
if frontend_dir.exists():
|
||||
init_frontend_deps(frontend_dir)
|
||||
remote_entry = rebuild_frontend(cwd, frontend_dir)
|
||||
|
||||
# Build backend if it exists
|
||||
if backend_dir.exists():
|
||||
rebuild_backend(cwd)
|
||||
|
||||
manifest = build_manifest(cwd, remote_entry)
|
||||
write_manifest(cwd, manifest)
|
||||
|
||||
def frontend_watcher() -> None:
|
||||
if frontend_dir.exists():
|
||||
if (remote_entry := rebuild_frontend(cwd, frontend_dir)) is not None:
|
||||
manifest = build_manifest(cwd, remote_entry)
|
||||
write_manifest(cwd, manifest)
|
||||
|
||||
def backend_watcher() -> None:
|
||||
if backend_dir.exists():
|
||||
rebuild_backend(cwd)
|
||||
dist_dir = cwd / "dist"
|
||||
manifest_path = dist_dir / "manifest.json"
|
||||
if manifest_path.exists():
|
||||
manifest = json.loads(manifest_path.read_text())
|
||||
write_manifest(cwd, manifest)
|
||||
|
||||
# Build watch message based on existing directories
|
||||
watch_dirs = []
|
||||
if frontend_dir.exists():
|
||||
watch_dirs.append(str(frontend_dir))
|
||||
if backend_dir.exists():
|
||||
watch_dirs.append(str(backend_dir))
|
||||
|
||||
if watch_dirs:
|
||||
click.secho(f"👀 Watching for changes in: {', '.join(watch_dirs)}", fg="green")
|
||||
else:
|
||||
click.secho("⚠️ No frontend or backend directories found to watch", fg="yellow")
|
||||
|
||||
observer = Observer()
|
||||
|
||||
# Only set up watchers for directories that exist
|
||||
if frontend_dir.exists():
|
||||
frontend_handler = FrontendChangeHandler(trigger_build=frontend_watcher)
|
||||
observer.schedule(frontend_handler, str(frontend_dir), recursive=True)
|
||||
|
||||
if backend_dir.exists():
|
||||
backend_handler = FileSystemEventHandler()
|
||||
backend_handler.on_any_event = lambda event: backend_watcher()
|
||||
observer.schedule(backend_handler, str(backend_dir), recursive=True)
|
||||
|
||||
if watch_dirs:
|
||||
observer.start()
|
||||
|
||||
try:
|
||||
while True:
|
||||
time.sleep(1)
|
||||
except KeyboardInterrupt:
|
||||
click.secho("\n🛑 Stopping watch mode", fg="blue")
|
||||
observer.stop()
|
||||
|
||||
observer.join()
|
||||
else:
|
||||
click.secho("❌ No directories to watch. Exiting.", fg="red")
|
||||
|
||||
|
||||
@app.command()
|
||||
def init() -> None:
|
||||
id_ = click.prompt("Extension ID (unique identifier, alphanumeric only)", type=str)
|
||||
if not re.match(r"^[a-zA-Z0-9_]+$", id_):
|
||||
click.secho(
|
||||
"❌ ID must be alphanumeric (letters, digits, underscore).", fg="red"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
name = click.prompt("Extension name (human-readable display name)", type=str)
|
||||
version = click.prompt("Initial version", default="0.1.0")
|
||||
license = click.prompt("License", default="Apache-2.0")
|
||||
include_frontend = click.confirm("Include frontend?", default=True)
|
||||
include_backend = click.confirm("Include backend?", default=True)
|
||||
|
||||
target_dir = Path.cwd() / id_
|
||||
if target_dir.exists():
|
||||
click.secho(f"❌ Directory {target_dir} already exists.", fg="red")
|
||||
sys.exit(1)
|
||||
|
||||
# Set up Jinja environment
|
||||
templates_dir = Path(__file__).parent / "templates"
|
||||
env = Environment(loader=FileSystemLoader(templates_dir)) # noqa: S701
|
||||
ctx = {
|
||||
"id": id_,
|
||||
"name": name,
|
||||
"include_frontend": include_frontend,
|
||||
"include_backend": include_backend,
|
||||
"license": license,
|
||||
"version": version,
|
||||
}
|
||||
|
||||
# Create base directory
|
||||
target_dir.mkdir()
|
||||
extension_json = env.get_template("extension.json.j2").render(ctx)
|
||||
(target_dir / "extension.json").write_text(extension_json)
|
||||
click.secho("✅ Created extension.json", fg="green")
|
||||
|
||||
# Copy frontend template
|
||||
if include_frontend:
|
||||
frontend_dir = target_dir / "frontend"
|
||||
frontend_dir.mkdir()
|
||||
|
||||
# package.json
|
||||
package_json = env.get_template("frontend/package.json.j2").render(ctx)
|
||||
(frontend_dir / "package.json").write_text(package_json)
|
||||
click.secho("✅ Created frontend folder structure", fg="green")
|
||||
|
||||
# Copy backend template
|
||||
if include_backend:
|
||||
backend_dir = target_dir / "backend"
|
||||
backend_dir.mkdir()
|
||||
|
||||
# pyproject.toml
|
||||
pyproject_toml = env.get_template("backend/pyproject.toml.j2").render(ctx)
|
||||
(backend_dir / "pyproject.toml").write_text(pyproject_toml)
|
||||
|
||||
click.secho("✅ Created backend folder structure", fg="green")
|
||||
|
||||
click.secho(
|
||||
f"🎉 Extension {name} (ID: {id_}) initialized at {target_dir}", fg="cyan"
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
app()
|
||||
@@ -1,19 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
MIN_NPM_VERSION = "10.8.2"
|
||||
@@ -1,4 +0,0 @@
|
||||
[project]
|
||||
name = "{{ id }}"
|
||||
version = "{{ version }}"
|
||||
license = "{{ license }}"
|
||||
@@ -1,25 +0,0 @@
|
||||
{
|
||||
"id": "{{ id }}",
|
||||
"name": "{{ name }}",
|
||||
"version": "{{ version }}",
|
||||
"license": "{{ license }}",
|
||||
{% if include_frontend -%}
|
||||
"frontend": {
|
||||
"contributions": {
|
||||
"commands": [],
|
||||
"views": [],
|
||||
"menus": []
|
||||
},
|
||||
"moduleFederation": {
|
||||
"exposes": ["./index"]
|
||||
}
|
||||
},
|
||||
{% endif -%}
|
||||
{% if include_backend -%}
|
||||
"backend": {
|
||||
"entryPoints": ["{{ id }}.entrypoint"],
|
||||
"files": ["backend/src/{{ id }}/**/*.py"]
|
||||
},
|
||||
{% endif -%}
|
||||
"permissions": []
|
||||
}
|
||||
@@ -1,34 +0,0 @@
|
||||
{
|
||||
"name": "{{ id }}",
|
||||
"version": "{{ version }}",
|
||||
"main": "dist/main.js",
|
||||
"types": "dist/publicAPI.d.ts",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"start": "webpack serve --mode development",
|
||||
"build": "webpack --stats-error-details --mode production"
|
||||
},
|
||||
"keywords": [],
|
||||
"private": true,
|
||||
"author": "",
|
||||
"license": "{{ license }}",
|
||||
"description": "",
|
||||
"peerDependencies": {
|
||||
"@apache-superset/core": "file:../../../superset-frontend/packages/superset-core",
|
||||
"react": "^17.0.2",
|
||||
"react-dom": "^17.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/preset-react": "^7.26.3",
|
||||
"@babel/preset-typescript": "^7.26.0",
|
||||
"@types/react": "^19.0.10",
|
||||
"copy-webpack-plugin": "^13.0.0",
|
||||
"install": "^0.13.0",
|
||||
"npm": "^11.1.0",
|
||||
"ts-loader": "^9.5.2",
|
||||
"typescript": "^5.8.2",
|
||||
"webpack": "^5.98.0",
|
||||
"webpack-cli": "^6.0.1",
|
||||
"webpack-dev-server": "^5.2.0"
|
||||
}
|
||||
}
|
||||
@@ -1,42 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json # noqa: TID251
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
import tomllib
|
||||
else:
|
||||
import tomli as tomllib
|
||||
|
||||
|
||||
def read_toml(path: Path) -> dict[str, Any] | None:
|
||||
if not path.is_file():
|
||||
return None
|
||||
|
||||
with path.open("rb") as f:
|
||||
return tomllib.load(f)
|
||||
|
||||
|
||||
def read_json(path: Path) -> dict[str, Any] | None:
|
||||
path = Path(path)
|
||||
if not path.is_file():
|
||||
return None
|
||||
|
||||
return json.loads(path.read_text())
|
||||
@@ -1,206 +0,0 @@
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
|
||||
# distributed with this work for additional information
|
||||
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
|
||||
# "License"); you may not use this file except in compliance
|
||||
|
||||
# with the License. You may obtain a copy of the License at
|
||||
|
||||
#
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
#
|
||||
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
|
||||
# software distributed under the License is distributed on an
|
||||
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
|
||||
# KIND, either express or implied. See the License for the
|
||||
|
||||
# specific language governing permissions and limitations
|
||||
|
||||
# under the License.
|
||||
|
||||
# Superset CLI Tests
|
||||
|
||||
This directory contains tests for the superset-extensions-cli package, focusing on the `init` command and other CLI functionality.
|
||||
|
||||
## Test Structure
|
||||
|
||||
### Core Test Files
|
||||
|
||||
- **`test_cli_init.py`**: Comprehensive tests for the `init` command scaffolder
|
||||
- **`test_templates.py`**: Unit tests for Jinja2 template rendering
|
||||
- **`conftest.py`**: Pytest fixtures and configuration
|
||||
- **`utils.py`**: Reusable testing utilities and helpers
|
||||
|
||||
### Test Categories
|
||||
|
||||
#### Unit Tests (`@pytest.mark.unit`)
|
||||
|
||||
- Template rendering functionality
|
||||
- Individual function testing
|
||||
- Input validation logic
|
||||
|
||||
#### Integration Tests (`@pytest.mark.integration`)
|
||||
|
||||
- Complete CLI command workflows
|
||||
- End-to-end scaffolding processes
|
||||
|
||||
#### CLI Tests (`@pytest.mark.cli`)
|
||||
|
||||
- Click command interface testing
|
||||
- User input simulation
|
||||
- Command output verification
|
||||
|
||||
## Testing Approach for Scaffolders/Generators
|
||||
|
||||
The tests use these patterns for testing code generators:
|
||||
|
||||
### 1. Isolated Environment Testing
|
||||
|
||||
```python
|
||||
@pytest.fixture
|
||||
def isolated_filesystem(tmp_path):
|
||||
"""Provide isolated temporary directory for each test."""
|
||||
```
|
||||
|
||||
### 2. Click CLI Testing Framework
|
||||
|
||||
```python
|
||||
from click.testing import CliRunner
|
||||
runner = CliRunner()
|
||||
result = runner.invoke(app, ["init"], input="...")
|
||||
```
|
||||
|
||||
### 3. File Structure Validation
|
||||
|
||||
```python
|
||||
from tests.utils import assert_file_structure, assert_directory_structure
|
||||
assert_file_structure(extension_path, expected_files)
|
||||
```
|
||||
|
||||
### 4. Template Content Verification
|
||||
|
||||
```python
|
||||
from tests.utils import assert_json_content
|
||||
assert_json_content(json_path, {"name": "expected_value"})
|
||||
```
|
||||
|
||||
### 5. Parametrized Testing
|
||||
|
||||
```python
|
||||
@pytest.mark.parametrize("include_frontend,include_backend", [
|
||||
(True, True), (True, False), (False, True), (False, False)
|
||||
])
|
||||
```
|
||||
|
||||
## Key Test Cases
|
||||
|
||||
### Init Command Tests
|
||||
|
||||
- ✅ Creates extension with both frontend and backend
|
||||
- ✅ Creates frontend-only extensions
|
||||
- ✅ Creates backend-only extensions
|
||||
- ✅ Validates extension naming (alphanumeric + underscore only)
|
||||
- ✅ Handles existing directory conflicts
|
||||
- ✅ Verifies generated file content accuracy
|
||||
- ✅ Tests custom version and license inputs
|
||||
- ✅ Integration test for complete workflow
|
||||
|
||||
### Template Rendering Tests
|
||||
|
||||
- ✅ Extension.json template with various configurations
|
||||
- ✅ Package.json template rendering
|
||||
- ✅ Pyproject.toml template rendering
|
||||
- ✅ Template validation with different names/versions/licenses
|
||||
- ✅ JSON validity verification
|
||||
- ✅ Whitespace and formatting checks
|
||||
|
||||
## Running Tests
|
||||
|
||||
### All tests
|
||||
|
||||
```bash
|
||||
pytest
|
||||
```
|
||||
|
||||
### Specific test categories
|
||||
|
||||
```bash
|
||||
pytest -m unit # Unit tests only
|
||||
pytest -m integration # Integration tests only
|
||||
pytest -m cli # CLI tests only
|
||||
```
|
||||
|
||||
### With coverage
|
||||
|
||||
```bash
|
||||
pytest --cov=superset_extensions_cli --cov-report=html
|
||||
```
|
||||
|
||||
### Specific test files
|
||||
|
||||
```bash
|
||||
pytest tests/test_cli_init.py
|
||||
pytest tests/test_templates.py
|
||||
```
|
||||
|
||||
## Reusable Testing Infrastructure
|
||||
|
||||
The testing infrastructure is designed for reusability:
|
||||
|
||||
### Test Utilities (`tests/utils.py`)
|
||||
|
||||
- `assert_file_exists()` / `assert_directory_exists()`
|
||||
- `assert_file_structure()` / `assert_directory_structure()`
|
||||
- `assert_json_content()` / `load_json_file()`
|
||||
- `create_test_extension_structure()` - Helper for expected structures
|
||||
|
||||
### Fixtures (`tests/conftest.py`)
|
||||
|
||||
- `cli_runner` - Click CLI runner
|
||||
- `isolated_filesystem` - Temporary directory with cleanup
|
||||
- `extension_params` - Default extension parameters
|
||||
- `cli_input_*` - Pre-configured user inputs
|
||||
|
||||
This infrastructure can be easily extended for testing additional CLI commands like `build`, `bundle`, `dev`, and `validate`.
|
||||
|
||||
## Best Practices Implemented
|
||||
|
||||
1. **Isolation**: Each test runs in its own temporary directory
|
||||
2. **Comprehensive Coverage**: Tests cover happy paths, edge cases, and error conditions
|
||||
3. **Realistic Testing**: Uses actual Click CLI runner with realistic user input
|
||||
4. **Content Verification**: Validates both file existence and content accuracy
|
||||
5. **Template Testing**: Separates template rendering logic from CLI integration
|
||||
6. **Reusable Components**: Utilities and fixtures designed for extension
|
||||
7. **Clear Documentation**: Well-documented test cases and helper functions
|
||||
8. **Type Safety**: Uses modern Python type annotations with `from __future__ import annotations`
|
||||
@@ -1,16 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
@@ -1,136 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
from click.testing import CliRunner
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cli_runner():
|
||||
"""Provide a Click CLI runner for testing commands."""
|
||||
return CliRunner()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def isolated_filesystem(tmp_path):
|
||||
"""
|
||||
Provide an isolated temporary directory and change to it.
|
||||
This ensures tests don't interfere with each other.
|
||||
"""
|
||||
original_cwd = Path.cwd()
|
||||
os.chdir(tmp_path)
|
||||
yield tmp_path
|
||||
os.chdir(original_cwd)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def extension_params():
|
||||
"""Default parameters for extension creation."""
|
||||
return {
|
||||
"id": "test_extension",
|
||||
"name": "Test Extension",
|
||||
"version": "0.1.0",
|
||||
"license": "Apache-2.0",
|
||||
"include_frontend": True,
|
||||
"include_backend": True,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cli_input_both():
|
||||
"""CLI input for creating extension with both frontend and backend."""
|
||||
return "test_extension\nTest Extension\n0.1.0\nApache-2.0\ny\ny\n"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cli_input_frontend_only():
|
||||
"""CLI input for creating extension with frontend only."""
|
||||
return "test_extension\nTest Extension\n0.1.0\nApache-2.0\ny\nn\n"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cli_input_backend_only():
|
||||
"""CLI input for creating extension with backend only."""
|
||||
return "test_extension\nTest Extension\n0.1.0\nApache-2.0\nn\ny\n"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cli_input_neither():
|
||||
"""CLI input for creating extension with neither frontend nor backend."""
|
||||
return "test_extension\nTest Extension\n0.1.0\nApache-2.0\nn\nn\n"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def extension_setup_for_dev():
|
||||
"""Set up extension structure for dev testing."""
|
||||
|
||||
def _setup(base_path: Path) -> None:
|
||||
import json
|
||||
|
||||
# Create extension.json
|
||||
extension_json = {
|
||||
"id": "test_extension",
|
||||
"name": "Test Extension",
|
||||
"version": "1.0.0",
|
||||
"permissions": [],
|
||||
}
|
||||
(base_path / "extension.json").write_text(json.dumps(extension_json))
|
||||
|
||||
# Create frontend and backend directories
|
||||
(base_path / "frontend").mkdir()
|
||||
(base_path / "backend").mkdir()
|
||||
|
||||
return _setup
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def extension_setup_for_bundling():
|
||||
"""Set up a complete extension structure ready for bundling."""
|
||||
|
||||
def _setup(base_path: Path) -> None:
|
||||
import json
|
||||
|
||||
# Create dist directory with manifest and files
|
||||
dist_dir = base_path / "dist"
|
||||
dist_dir.mkdir(parents=True)
|
||||
|
||||
# Create manifest.json
|
||||
manifest = {
|
||||
"id": "test_extension",
|
||||
"name": "Test Extension",
|
||||
"version": "1.0.0",
|
||||
"permissions": [],
|
||||
}
|
||||
(dist_dir / "manifest.json").write_text(json.dumps(manifest))
|
||||
|
||||
# Create some frontend files
|
||||
frontend_dir = dist_dir / "frontend" / "dist"
|
||||
frontend_dir.mkdir(parents=True)
|
||||
(frontend_dir / "remoteEntry.abc123.js").write_text("// remote entry")
|
||||
(frontend_dir / "main.js").write_text("// main js")
|
||||
|
||||
# Create some backend files
|
||||
backend_dir = dist_dir / "backend" / "src" / "test_extension"
|
||||
backend_dir.mkdir(parents=True)
|
||||
(backend_dir / "__init__.py").write_text("# init")
|
||||
|
||||
return _setup
|
||||
@@ -1,552 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
from superset_extensions_cli.cli import (
|
||||
app,
|
||||
build_manifest,
|
||||
clean_dist,
|
||||
copy_backend_files,
|
||||
copy_frontend_dist,
|
||||
init_frontend_deps,
|
||||
)
|
||||
|
||||
from tests.utils import (
|
||||
assert_directory_exists,
|
||||
assert_file_exists,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def extension_with_build_structure():
|
||||
"""Create extension structure suitable for build testing."""
|
||||
|
||||
def _create(base_path, include_frontend=True, include_backend=True):
|
||||
# Create required directories
|
||||
if include_frontend:
|
||||
frontend_dir = base_path / "frontend"
|
||||
frontend_dir.mkdir()
|
||||
|
||||
if include_backend:
|
||||
backend_dir = base_path / "backend"
|
||||
backend_dir.mkdir()
|
||||
|
||||
# Create extension.json
|
||||
extension_json = {
|
||||
"id": "test_extension",
|
||||
"name": "Test Extension",
|
||||
"version": "1.0.0",
|
||||
"permissions": [],
|
||||
}
|
||||
|
||||
if include_frontend:
|
||||
extension_json["frontend"] = {
|
||||
"contributions": {"commands": []},
|
||||
"moduleFederation": {"exposes": ["./index"]},
|
||||
}
|
||||
|
||||
if include_backend:
|
||||
extension_json["backend"] = {"entryPoints": ["test_extension.entrypoint"]}
|
||||
|
||||
(base_path / "extension.json").write_text(json.dumps(extension_json))
|
||||
|
||||
return {
|
||||
"frontend_dir": frontend_dir if include_frontend else None,
|
||||
"backend_dir": backend_dir if include_backend else None,
|
||||
}
|
||||
|
||||
return _create
|
||||
|
||||
|
||||
# Build Command Tests
|
||||
@pytest.mark.cli
|
||||
@patch("superset_extensions_cli.cli.validate_npm")
|
||||
@patch("superset_extensions_cli.cli.init_frontend_deps")
|
||||
@patch("superset_extensions_cli.cli.rebuild_frontend")
|
||||
@patch("superset_extensions_cli.cli.rebuild_backend")
|
||||
@patch("superset_extensions_cli.cli.read_toml")
|
||||
def test_build_command_success_flow(
|
||||
mock_read_toml,
|
||||
mock_rebuild_backend,
|
||||
mock_rebuild_frontend,
|
||||
mock_init_frontend_deps,
|
||||
mock_validate_npm,
|
||||
cli_runner,
|
||||
isolated_filesystem,
|
||||
extension_with_build_structure,
|
||||
):
|
||||
"""Test build command success flow."""
|
||||
# Setup mocks
|
||||
mock_rebuild_frontend.return_value = "remoteEntry.abc123.js"
|
||||
mock_read_toml.return_value = {"project": {"name": "test"}}
|
||||
|
||||
# Create extension structure
|
||||
dirs = extension_with_build_structure(isolated_filesystem)
|
||||
|
||||
result = cli_runner.invoke(app, ["build"])
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert "✅ Full build completed in dist/" in result.output
|
||||
|
||||
# Verify function calls
|
||||
mock_validate_npm.assert_called_once()
|
||||
mock_init_frontend_deps.assert_called_once_with(dirs["frontend_dir"])
|
||||
mock_rebuild_frontend.assert_called_once()
|
||||
mock_rebuild_backend.assert_called_once()
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
@patch("superset_extensions_cli.cli.validate_npm")
|
||||
@patch("superset_extensions_cli.cli.init_frontend_deps")
|
||||
@patch("superset_extensions_cli.cli.rebuild_frontend")
|
||||
def test_build_command_handles_frontend_build_failure(
|
||||
mock_rebuild_frontend,
|
||||
mock_init_frontend_deps,
|
||||
mock_validate_npm,
|
||||
cli_runner,
|
||||
isolated_filesystem,
|
||||
extension_with_build_structure,
|
||||
):
|
||||
"""Test build command handles frontend build failure."""
|
||||
# Setup mocks
|
||||
mock_rebuild_frontend.return_value = None # Indicates failure
|
||||
|
||||
# Create extension structure
|
||||
extension_with_build_structure(isolated_filesystem)
|
||||
|
||||
result = cli_runner.invoke(app, ["build"])
|
||||
|
||||
# Command should complete and create manifest even with frontend failure
|
||||
assert result.exit_code == 0
|
||||
assert "✅ Full build completed in dist/" in result.output
|
||||
|
||||
|
||||
# Clean Dist Tests
|
||||
@pytest.mark.unit
|
||||
def test_clean_dist_removes_existing_dist_directory(isolated_filesystem):
|
||||
"""Test clean_dist removes existing dist directory and recreates it."""
|
||||
# Create dist directory with some content
|
||||
dist_dir = isolated_filesystem / "dist"
|
||||
dist_dir.mkdir()
|
||||
(dist_dir / "some_file.txt").write_text("test content")
|
||||
(dist_dir / "subdir").mkdir()
|
||||
|
||||
clean_dist(isolated_filesystem)
|
||||
|
||||
# Should exist but be empty
|
||||
assert_directory_exists(dist_dir)
|
||||
assert list(dist_dir.iterdir()) == []
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_clean_dist_creates_dist_directory_if_missing(isolated_filesystem):
|
||||
"""Test clean_dist creates dist directory when it doesn't exist."""
|
||||
dist_dir = isolated_filesystem / "dist"
|
||||
assert not dist_dir.exists()
|
||||
|
||||
clean_dist(isolated_filesystem)
|
||||
|
||||
assert_directory_exists(dist_dir)
|
||||
|
||||
|
||||
# Frontend Dependencies Tests
|
||||
@pytest.mark.unit
|
||||
@patch("subprocess.run")
|
||||
def test_init_frontend_deps_skips_when_node_modules_exists(
|
||||
mock_run, isolated_filesystem
|
||||
):
|
||||
"""Test init_frontend_deps skips npm ci when node_modules exists."""
|
||||
frontend_dir = isolated_filesystem / "frontend"
|
||||
frontend_dir.mkdir()
|
||||
(frontend_dir / "node_modules").mkdir()
|
||||
|
||||
init_frontend_deps(frontend_dir)
|
||||
|
||||
# Should not call subprocess.run for npm ci
|
||||
mock_run.assert_not_called()
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@patch("subprocess.run")
|
||||
@patch("superset_extensions_cli.cli.validate_npm")
|
||||
def test_init_frontend_deps_runs_npm_i_when_missing(
|
||||
mock_validate_npm, mock_run, isolated_filesystem
|
||||
):
|
||||
"""Test init_frontend_deps runs npm ci when node_modules is missing."""
|
||||
frontend_dir = isolated_filesystem / "frontend"
|
||||
frontend_dir.mkdir()
|
||||
|
||||
# Mock successful npm ci
|
||||
mock_run.return_value = Mock(returncode=0)
|
||||
|
||||
init_frontend_deps(frontend_dir)
|
||||
|
||||
# Should validate npm and run npm ci
|
||||
mock_validate_npm.assert_called_once()
|
||||
mock_run.assert_called_once_with(["npm", "i"], cwd=frontend_dir, text=True)
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@patch("subprocess.run")
|
||||
@patch("superset_extensions_cli.cli.validate_npm")
|
||||
def test_init_frontend_deps_exits_on_npm_ci_failure(
|
||||
mock_validate_npm, mock_run, isolated_filesystem
|
||||
):
|
||||
"""Test init_frontend_deps exits when npm ci fails."""
|
||||
frontend_dir = isolated_filesystem / "frontend"
|
||||
frontend_dir.mkdir()
|
||||
|
||||
# Mock failed npm ci
|
||||
mock_run.return_value = Mock(returncode=1)
|
||||
|
||||
with pytest.raises(SystemExit) as exc_info:
|
||||
init_frontend_deps(frontend_dir)
|
||||
|
||||
assert exc_info.value.code == 1
|
||||
|
||||
|
||||
# Build Manifest Tests
|
||||
@pytest.mark.unit
|
||||
def test_build_manifest_creates_correct_manifest_structure(isolated_filesystem):
|
||||
"""Test build_manifest creates correct manifest from extension.json."""
|
||||
# Create extension.json
|
||||
extension_data = {
|
||||
"id": "test_extension",
|
||||
"name": "Test Extension",
|
||||
"version": "1.0.0",
|
||||
"permissions": ["read_data"],
|
||||
"dependencies": ["some_dep"],
|
||||
"frontend": {
|
||||
"contributions": {"commands": ["test_command"]},
|
||||
"moduleFederation": {"exposes": ["./index"]},
|
||||
},
|
||||
"backend": {"entryPoints": ["test_extension.entrypoint"]},
|
||||
}
|
||||
extension_json = isolated_filesystem / "extension.json"
|
||||
extension_json.write_text(json.dumps(extension_data))
|
||||
|
||||
manifest = build_manifest(isolated_filesystem, "remoteEntry.abc123.js")
|
||||
|
||||
# Verify manifest structure
|
||||
manifest_dict = dict(manifest)
|
||||
assert manifest_dict["id"] == "test_extension"
|
||||
assert manifest_dict["name"] == "Test Extension"
|
||||
assert manifest_dict["version"] == "1.0.0"
|
||||
assert manifest_dict["permissions"] == ["read_data"]
|
||||
assert manifest_dict["dependencies"] == ["some_dep"]
|
||||
|
||||
# Verify frontend section
|
||||
assert "frontend" in manifest
|
||||
frontend = manifest["frontend"]
|
||||
assert frontend["contributions"] == {"commands": ["test_command"]}
|
||||
assert frontend["moduleFederation"] == {"exposes": ["./index"]}
|
||||
assert frontend["remoteEntry"] == "remoteEntry.abc123.js"
|
||||
|
||||
# Verify backend section
|
||||
assert "backend" in manifest
|
||||
assert manifest["backend"]["entryPoints"] == ["test_extension.entrypoint"]
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_build_manifest_handles_minimal_extension(isolated_filesystem):
|
||||
"""Test build_manifest with minimal extension.json (no frontend/backend)."""
|
||||
extension_data = {
|
||||
"id": "minimal_extension",
|
||||
"name": "Minimal Extension",
|
||||
"version": "0.1.0",
|
||||
"permissions": [],
|
||||
}
|
||||
extension_json = isolated_filesystem / "extension.json"
|
||||
extension_json.write_text(json.dumps(extension_data))
|
||||
|
||||
manifest = build_manifest(isolated_filesystem, None)
|
||||
|
||||
manifest_dict = dict(manifest)
|
||||
assert manifest_dict["id"] == "minimal_extension"
|
||||
assert manifest_dict["name"] == "Minimal Extension"
|
||||
assert manifest_dict["version"] == "0.1.0"
|
||||
assert manifest_dict["permissions"] == []
|
||||
assert manifest_dict["dependencies"] == [] # Default empty list
|
||||
assert "frontend" not in manifest
|
||||
assert "backend" not in manifest
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_build_manifest_exits_when_extension_json_missing(isolated_filesystem):
|
||||
"""Test build_manifest exits when extension.json is missing."""
|
||||
with pytest.raises(SystemExit) as exc_info:
|
||||
build_manifest(isolated_filesystem, "remoteEntry.js")
|
||||
|
||||
assert exc_info.value.code == 1
|
||||
|
||||
|
||||
# Frontend Build Tests
|
||||
@pytest.mark.unit
|
||||
def test_clean_dist_frontend_removes_frontend_dist(isolated_filesystem):
|
||||
"""Test clean_dist_frontend removes frontend/dist directory specifically."""
|
||||
from superset_extensions_cli.cli import clean_dist_frontend
|
||||
|
||||
# Create dist/frontend structure
|
||||
dist_dir = isolated_filesystem / "dist"
|
||||
dist_dir.mkdir(parents=True)
|
||||
frontend_dist = dist_dir / "frontend"
|
||||
frontend_dist.mkdir()
|
||||
(frontend_dist / "some_file.js").write_text("content")
|
||||
|
||||
clean_dist_frontend(isolated_filesystem)
|
||||
|
||||
# Frontend dist should be removed, but dist should remain
|
||||
assert dist_dir.exists()
|
||||
assert not frontend_dist.exists()
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_clean_dist_frontend_handles_nonexistent_directory(isolated_filesystem):
|
||||
"""Test clean_dist_frontend handles case where frontend dist doesn't exist."""
|
||||
from superset_extensions_cli.cli import clean_dist_frontend
|
||||
|
||||
# No dist directory exists
|
||||
clean_dist_frontend(isolated_filesystem)
|
||||
|
||||
# Should not raise error
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_run_frontend_build_with_output_messages(isolated_filesystem):
|
||||
"""Test run_frontend_build produces expected output messages."""
|
||||
from superset_extensions_cli.cli import run_frontend_build
|
||||
|
||||
frontend_dir = isolated_filesystem / "frontend"
|
||||
frontend_dir.mkdir()
|
||||
|
||||
with patch("subprocess.run") as mock_run:
|
||||
mock_result = Mock(returncode=0)
|
||||
mock_run.return_value = mock_result
|
||||
|
||||
result = run_frontend_build(frontend_dir)
|
||||
|
||||
assert result.returncode == 0
|
||||
mock_run.assert_called_once_with(
|
||||
["npm", "run", "build"], cwd=frontend_dir, text=True
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@pytest.mark.parametrize(
|
||||
"return_code,expected_result",
|
||||
[
|
||||
(0, "remoteEntry.abc123.js"),
|
||||
(1, None),
|
||||
],
|
||||
)
|
||||
def test_rebuild_frontend_handles_build_results(
|
||||
isolated_filesystem, return_code, expected_result
|
||||
):
|
||||
"""Test rebuild_frontend handles different build results."""
|
||||
from superset_extensions_cli.cli import rebuild_frontend
|
||||
|
||||
# Create frontend structure
|
||||
frontend_dir = isolated_filesystem / "frontend"
|
||||
frontend_dir.mkdir()
|
||||
|
||||
if return_code == 0:
|
||||
# Create frontend/dist with remoteEntry for success case
|
||||
frontend_dist = frontend_dir / "dist"
|
||||
frontend_dist.mkdir()
|
||||
(frontend_dist / "remoteEntry.abc123.js").write_text("content")
|
||||
|
||||
# Create dist directory
|
||||
dist_dir = isolated_filesystem / "dist"
|
||||
dist_dir.mkdir()
|
||||
|
||||
with patch("superset_extensions_cli.cli.run_frontend_build") as mock_build:
|
||||
mock_build.return_value = Mock(returncode=return_code)
|
||||
|
||||
result = rebuild_frontend(isolated_filesystem, frontend_dir)
|
||||
|
||||
assert result == expected_result
|
||||
|
||||
|
||||
# Backend Build Tests
|
||||
@pytest.mark.unit
|
||||
def test_rebuild_backend_calls_copy_and_shows_message(isolated_filesystem):
|
||||
"""Test rebuild_backend calls copy_backend_files and shows success message."""
|
||||
from superset_extensions_cli.cli import rebuild_backend
|
||||
|
||||
# Create extension.json
|
||||
extension_json = {
|
||||
"id": "test",
|
||||
"name": "Test Extension",
|
||||
"version": "1.0.0",
|
||||
"permissions": [],
|
||||
}
|
||||
(isolated_filesystem / "extension.json").write_text(json.dumps(extension_json))
|
||||
|
||||
with patch("superset_extensions_cli.cli.copy_backend_files") as mock_copy:
|
||||
rebuild_backend(isolated_filesystem)
|
||||
|
||||
mock_copy.assert_called_once_with(isolated_filesystem)
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_copy_backend_files_skips_non_files(isolated_filesystem):
|
||||
"""Test copy_backend_files skips directories and non-files."""
|
||||
# Create backend structure with directory
|
||||
backend_src = isolated_filesystem / "backend" / "src" / "test_ext"
|
||||
backend_src.mkdir(parents=True)
|
||||
(backend_src / "__init__.py").write_text("# init")
|
||||
|
||||
# Create a subdirectory (should be skipped)
|
||||
subdir = backend_src / "subdir"
|
||||
subdir.mkdir()
|
||||
|
||||
# Create extension.json with backend file patterns
|
||||
extension_data = {
|
||||
"id": "test_ext",
|
||||
"name": "Test Extension",
|
||||
"version": "1.0.0",
|
||||
"permissions": [],
|
||||
"backend": {
|
||||
"files": ["backend/src/test_ext/**/*"] # Will match both files and dirs
|
||||
},
|
||||
}
|
||||
(isolated_filesystem / "extension.json").write_text(json.dumps(extension_data))
|
||||
|
||||
# Create dist directory
|
||||
clean_dist(isolated_filesystem)
|
||||
|
||||
copy_backend_files(isolated_filesystem)
|
||||
|
||||
# Verify only files were copied, not directories
|
||||
dist_dir = isolated_filesystem / "dist"
|
||||
assert_file_exists(dist_dir / "backend" / "src" / "test_ext" / "__init__.py")
|
||||
|
||||
# Directory should not be copied as a file
|
||||
copied_subdir = dist_dir / "backend" / "src" / "test_ext" / "subdir"
|
||||
# The directory might exist but should be empty since we skip non-files
|
||||
if copied_subdir.exists():
|
||||
assert list(copied_subdir.iterdir()) == []
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_copy_backend_files_copies_matched_files(isolated_filesystem):
|
||||
"""Test copy_backend_files copies files matching patterns from extension.json."""
|
||||
# Create backend source files
|
||||
backend_src = isolated_filesystem / "backend" / "src" / "test_ext"
|
||||
backend_src.mkdir(parents=True)
|
||||
(backend_src / "__init__.py").write_text("# init")
|
||||
(backend_src / "main.py").write_text("# main")
|
||||
|
||||
# Create extension.json with backend file patterns
|
||||
extension_data = {
|
||||
"id": "test_ext",
|
||||
"name": "Test Extension",
|
||||
"version": "1.0.0",
|
||||
"permissions": [],
|
||||
"backend": {"files": ["backend/src/test_ext/**/*.py"]},
|
||||
}
|
||||
(isolated_filesystem / "extension.json").write_text(json.dumps(extension_data))
|
||||
|
||||
# Create dist directory
|
||||
clean_dist(isolated_filesystem)
|
||||
|
||||
copy_backend_files(isolated_filesystem)
|
||||
|
||||
# Verify files were copied
|
||||
dist_dir = isolated_filesystem / "dist"
|
||||
assert_file_exists(dist_dir / "backend" / "src" / "test_ext" / "__init__.py")
|
||||
assert_file_exists(dist_dir / "backend" / "src" / "test_ext" / "main.py")
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_copy_backend_files_handles_no_backend_config(isolated_filesystem):
|
||||
"""Test copy_backend_files handles extension.json without backend config."""
|
||||
extension_data = {
|
||||
"id": "frontend_only",
|
||||
"name": "Frontend Only Extension",
|
||||
"version": "1.0.0",
|
||||
"permissions": [],
|
||||
}
|
||||
(isolated_filesystem / "extension.json").write_text(json.dumps(extension_data))
|
||||
|
||||
clean_dist(isolated_filesystem)
|
||||
|
||||
# Should not raise error
|
||||
copy_backend_files(isolated_filesystem)
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_copy_backend_files_exits_when_extension_json_missing(isolated_filesystem):
|
||||
"""Test copy_backend_files exits when extension.json is missing."""
|
||||
clean_dist(isolated_filesystem)
|
||||
|
||||
with pytest.raises(SystemExit) as exc_info:
|
||||
copy_backend_files(isolated_filesystem)
|
||||
|
||||
assert exc_info.value.code == 1
|
||||
|
||||
|
||||
# Frontend Dist Copy Tests
|
||||
@pytest.mark.unit
|
||||
def test_copy_frontend_dist_copies_files_correctly(isolated_filesystem):
|
||||
"""Test copy_frontend_dist copies frontend build files to dist."""
|
||||
# Create frontend/dist structure
|
||||
frontend_dist = isolated_filesystem / "frontend" / "dist"
|
||||
frontend_dist.mkdir(parents=True)
|
||||
|
||||
# Create some files including remoteEntry
|
||||
(frontend_dist / "remoteEntry.abc123.js").write_text("remote entry content")
|
||||
(frontend_dist / "main.js").write_text("main js content")
|
||||
|
||||
# Create subdirectory with file
|
||||
assets_dir = frontend_dist / "assets"
|
||||
assets_dir.mkdir()
|
||||
(assets_dir / "style.css").write_text("css content")
|
||||
|
||||
# Create dist directory
|
||||
clean_dist(isolated_filesystem)
|
||||
|
||||
remote_entry = copy_frontend_dist(isolated_filesystem)
|
||||
|
||||
assert remote_entry == "remoteEntry.abc123.js"
|
||||
|
||||
# Verify files were copied
|
||||
dist_dir = isolated_filesystem / "dist"
|
||||
assert_file_exists(dist_dir / "frontend" / "dist" / "remoteEntry.abc123.js")
|
||||
assert_file_exists(dist_dir / "frontend" / "dist" / "main.js")
|
||||
assert_file_exists(dist_dir / "frontend" / "dist" / "assets" / "style.css")
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_copy_frontend_dist_exits_when_no_remote_entry(isolated_filesystem):
|
||||
"""Test copy_frontend_dist exits when no remoteEntry file found."""
|
||||
# Create frontend/dist without remoteEntry file
|
||||
frontend_dist = isolated_filesystem / "frontend" / "dist"
|
||||
frontend_dist.mkdir(parents=True)
|
||||
(frontend_dist / "main.js").write_text("main content")
|
||||
|
||||
clean_dist(isolated_filesystem)
|
||||
|
||||
with pytest.raises(SystemExit) as exc_info:
|
||||
copy_frontend_dist(isolated_filesystem)
|
||||
|
||||
assert exc_info.value.code == 1
|
||||
@@ -1,255 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import zipfile
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from superset_extensions_cli.cli import app
|
||||
|
||||
from tests.utils import assert_file_exists
|
||||
|
||||
|
||||
# Bundle Command Tests
|
||||
@pytest.mark.cli
|
||||
@patch("superset_extensions_cli.cli.build")
|
||||
def test_bundle_command_creates_zip_with_default_name(
|
||||
mock_build, cli_runner, isolated_filesystem, extension_setup_for_bundling
|
||||
):
|
||||
"""Test bundle command creates zip file with default name."""
|
||||
# Mock the build command to do nothing (we'll set up dist manually)
|
||||
mock_build.return_value = None
|
||||
|
||||
# Setup extension for bundling (this creates the dist structure)
|
||||
extension_setup_for_bundling(isolated_filesystem)
|
||||
|
||||
result = cli_runner.invoke(app, ["bundle"])
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert "✅ Bundle created: test_extension-1.0.0.supx" in result.output
|
||||
|
||||
# Verify zip file was created
|
||||
zip_path = isolated_filesystem / "test_extension-1.0.0.supx"
|
||||
assert_file_exists(zip_path)
|
||||
|
||||
# Verify zip contents
|
||||
with zipfile.ZipFile(zip_path, "r") as zipf:
|
||||
file_list = zipf.namelist()
|
||||
assert "manifest.json" in file_list
|
||||
assert "frontend/dist/remoteEntry.abc123.js" in file_list
|
||||
assert "frontend/dist/main.js" in file_list
|
||||
assert "backend/src/test_extension/__init__.py" in file_list
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
@patch("superset_extensions_cli.cli.build")
|
||||
def test_bundle_command_with_custom_output_filename(
|
||||
mock_build, cli_runner, isolated_filesystem, extension_setup_for_bundling
|
||||
):
|
||||
"""Test bundle command with custom output filename."""
|
||||
# Mock the build command
|
||||
mock_build.return_value = None
|
||||
|
||||
extension_setup_for_bundling(isolated_filesystem)
|
||||
|
||||
custom_name = "my_custom_bundle.supx"
|
||||
result = cli_runner.invoke(app, ["bundle", "--output", custom_name])
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert f"✅ Bundle created: {custom_name}" in result.output
|
||||
|
||||
# Verify custom-named zip file was created
|
||||
zip_path = isolated_filesystem / custom_name
|
||||
assert_file_exists(zip_path)
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
@patch("superset_extensions_cli.cli.build")
|
||||
def test_bundle_command_with_output_directory(
|
||||
mock_build, cli_runner, isolated_filesystem, extension_setup_for_bundling
|
||||
):
|
||||
"""Test bundle command with output directory."""
|
||||
# Mock the build command
|
||||
mock_build.return_value = None
|
||||
|
||||
extension_setup_for_bundling(isolated_filesystem)
|
||||
|
||||
# Create output directory
|
||||
output_dir = isolated_filesystem / "output"
|
||||
output_dir.mkdir()
|
||||
|
||||
result = cli_runner.invoke(app, ["bundle", "--output", str(output_dir)])
|
||||
|
||||
assert result.exit_code == 0
|
||||
|
||||
# Verify zip file was created in output directory
|
||||
expected_path = output_dir / "test_extension-1.0.0.supx"
|
||||
assert_file_exists(expected_path)
|
||||
assert f"✅ Bundle created: {expected_path}" in result.output
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
@patch("superset_extensions_cli.cli.build")
|
||||
def test_bundle_command_fails_without_manifest(
|
||||
mock_build, cli_runner, isolated_filesystem
|
||||
):
|
||||
"""Test bundle command fails when manifest.json doesn't exist."""
|
||||
# Mock build to succeed but not create manifest
|
||||
mock_build.return_value = None
|
||||
|
||||
# Create empty dist directory
|
||||
(isolated_filesystem / "dist").mkdir()
|
||||
|
||||
result = cli_runner.invoke(app, ["bundle"])
|
||||
|
||||
assert result.exit_code == 1
|
||||
assert "dist/manifest.json not found" in result.output
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
@patch("superset_extensions_cli.cli.build")
|
||||
def test_bundle_command_handles_zip_creation_error(
|
||||
mock_build, cli_runner, isolated_filesystem, extension_setup_for_bundling
|
||||
):
|
||||
"""Test bundle command handles zip file creation errors."""
|
||||
# Mock the build command
|
||||
mock_build.return_value = None
|
||||
|
||||
extension_setup_for_bundling(isolated_filesystem)
|
||||
|
||||
# Try to bundle to an invalid location (directory that doesn't exist)
|
||||
invalid_path = isolated_filesystem / "nonexistent" / "bundle.supx"
|
||||
|
||||
with patch("zipfile.ZipFile", side_effect=OSError("Permission denied")):
|
||||
result = cli_runner.invoke(app, ["bundle", "--output", str(invalid_path)])
|
||||
|
||||
assert result.exit_code == 1
|
||||
assert "Failed to create bundle" in result.output
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
@patch("superset_extensions_cli.cli.build")
|
||||
def test_bundle_includes_all_files_recursively(
|
||||
mock_build, cli_runner, isolated_filesystem
|
||||
):
|
||||
"""Test that bundle includes all files from dist directory recursively."""
|
||||
# Mock the build command
|
||||
mock_build.return_value = None
|
||||
|
||||
# Create complex dist structure
|
||||
dist_dir = isolated_filesystem / "dist"
|
||||
dist_dir.mkdir(parents=True)
|
||||
|
||||
# Manifest
|
||||
manifest = {
|
||||
"id": "complex_extension",
|
||||
"name": "Complex Extension",
|
||||
"version": "2.1.0",
|
||||
"permissions": [],
|
||||
}
|
||||
(dist_dir / "manifest.json").write_text(json.dumps(manifest))
|
||||
|
||||
# Frontend files with nested structure
|
||||
frontend_dir = dist_dir / "frontend" / "dist"
|
||||
frontend_dir.mkdir(parents=True)
|
||||
(frontend_dir / "remoteEntry.xyz789.js").write_text("// entry")
|
||||
|
||||
assets_dir = frontend_dir / "assets"
|
||||
assets_dir.mkdir()
|
||||
(assets_dir / "style.css").write_text("/* css */")
|
||||
(assets_dir / "image.png").write_bytes(b"fake image data")
|
||||
|
||||
# Backend files with nested structure
|
||||
backend_dir = dist_dir / "backend" / "src" / "complex_extension"
|
||||
backend_dir.mkdir(parents=True)
|
||||
(backend_dir / "__init__.py").write_text("# init")
|
||||
(backend_dir / "core.py").write_text("# core")
|
||||
|
||||
utils_dir = backend_dir / "utils"
|
||||
utils_dir.mkdir()
|
||||
(utils_dir / "helpers.py").write_text("# helpers")
|
||||
|
||||
result = cli_runner.invoke(app, ["bundle"])
|
||||
|
||||
assert result.exit_code == 0
|
||||
|
||||
# Verify zip file and contents
|
||||
zip_path = isolated_filesystem / "complex_extension-2.1.0.supx"
|
||||
assert_file_exists(zip_path)
|
||||
|
||||
with zipfile.ZipFile(zip_path, "r") as zipf:
|
||||
file_list = set(zipf.namelist())
|
||||
|
||||
# Verify all files are included
|
||||
expected_files = {
|
||||
"manifest.json",
|
||||
"frontend/dist/remoteEntry.xyz789.js",
|
||||
"frontend/dist/assets/style.css",
|
||||
"frontend/dist/assets/image.png",
|
||||
"backend/src/complex_extension/__init__.py",
|
||||
"backend/src/complex_extension/core.py",
|
||||
"backend/src/complex_extension/utils/helpers.py",
|
||||
}
|
||||
|
||||
assert expected_files.issubset(
|
||||
file_list
|
||||
), f"Missing files: {expected_files - file_list}"
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
@patch("superset_extensions_cli.cli.build")
|
||||
def test_bundle_command_short_option(
|
||||
mock_build, cli_runner, isolated_filesystem, extension_setup_for_bundling
|
||||
):
|
||||
"""Test bundle command with short -o option."""
|
||||
# Mock the build command
|
||||
mock_build.return_value = None
|
||||
|
||||
extension_setup_for_bundling(isolated_filesystem)
|
||||
|
||||
result = cli_runner.invoke(app, ["bundle", "-o", "short_option.supx"])
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert "✅ Bundle created: short_option.supx" in result.output
|
||||
assert_file_exists(isolated_filesystem / "short_option.supx")
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
@pytest.mark.parametrize("output_option", ["--output", "-o"])
|
||||
@patch("superset_extensions_cli.cli.build")
|
||||
def test_bundle_command_output_options(
|
||||
mock_build,
|
||||
output_option,
|
||||
cli_runner,
|
||||
isolated_filesystem,
|
||||
extension_setup_for_bundling,
|
||||
):
|
||||
"""Test bundle command with both long and short output options."""
|
||||
# Mock the build command
|
||||
mock_build.return_value = None
|
||||
|
||||
extension_setup_for_bundling(isolated_filesystem)
|
||||
|
||||
filename = f"test_{output_option.replace('-', '')}.supx"
|
||||
result = cli_runner.invoke(app, ["bundle", output_option, filename])
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert f"✅ Bundle created: {filename}" in result.output
|
||||
assert_file_exists(isolated_filesystem / filename)
|
||||
@@ -1,238 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import threading
|
||||
import time
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
from superset_extensions_cli.cli import app, FrontendChangeHandler
|
||||
|
||||
|
||||
# Dev Command Tests
|
||||
@pytest.mark.cli
|
||||
@patch("superset_extensions_cli.cli.Observer")
|
||||
@patch("superset_extensions_cli.cli.init_frontend_deps")
|
||||
@patch("superset_extensions_cli.cli.rebuild_frontend")
|
||||
@patch("superset_extensions_cli.cli.rebuild_backend")
|
||||
@patch("superset_extensions_cli.cli.build_manifest")
|
||||
@patch("superset_extensions_cli.cli.write_manifest")
|
||||
def test_dev_command_starts_watchers(
|
||||
mock_write_manifest,
|
||||
mock_build_manifest,
|
||||
mock_rebuild_backend,
|
||||
mock_rebuild_frontend,
|
||||
mock_init_frontend_deps,
|
||||
mock_observer_class,
|
||||
cli_runner,
|
||||
isolated_filesystem,
|
||||
extension_setup_for_dev,
|
||||
):
|
||||
"""Test dev command starts file watchers."""
|
||||
# Setup mocks
|
||||
mock_rebuild_frontend.return_value = "remoteEntry.abc123.js"
|
||||
mock_build_manifest.return_value = {"name": "test", "version": "1.0.0"}
|
||||
|
||||
mock_observer = Mock()
|
||||
mock_observer_class.return_value = mock_observer
|
||||
|
||||
extension_setup_for_dev(isolated_filesystem)
|
||||
|
||||
# Run dev command in a thread since it's blocking
|
||||
def run_dev():
|
||||
try:
|
||||
cli_runner.invoke(app, ["dev"], catch_exceptions=False)
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
dev_thread = threading.Thread(target=run_dev)
|
||||
dev_thread.daemon = True
|
||||
dev_thread.start()
|
||||
|
||||
# Let it start up
|
||||
time.sleep(0.1)
|
||||
|
||||
# Verify observer methods were called
|
||||
mock_observer.schedule.assert_called()
|
||||
mock_observer.start.assert_called_once()
|
||||
|
||||
# Initial setup calls
|
||||
mock_init_frontend_deps.assert_called_once()
|
||||
mock_rebuild_frontend.assert_called()
|
||||
mock_rebuild_backend.assert_called()
|
||||
mock_build_manifest.assert_called()
|
||||
mock_write_manifest.assert_called()
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
@patch("superset_extensions_cli.cli.init_frontend_deps")
|
||||
@patch("superset_extensions_cli.cli.rebuild_frontend")
|
||||
@patch("superset_extensions_cli.cli.rebuild_backend")
|
||||
@patch("superset_extensions_cli.cli.build_manifest")
|
||||
@patch("superset_extensions_cli.cli.write_manifest")
|
||||
def test_dev_command_initial_build(
|
||||
mock_write_manifest,
|
||||
mock_build_manifest,
|
||||
mock_rebuild_backend,
|
||||
mock_rebuild_frontend,
|
||||
mock_init_frontend_deps,
|
||||
cli_runner,
|
||||
isolated_filesystem,
|
||||
extension_setup_for_dev,
|
||||
):
|
||||
"""Test dev command performs initial build setup."""
|
||||
# Setup mocks
|
||||
mock_rebuild_frontend.return_value = "remoteEntry.abc123.js"
|
||||
mock_build_manifest.return_value = {"name": "test", "version": "1.0.0"}
|
||||
|
||||
extension_setup_for_dev(isolated_filesystem)
|
||||
|
||||
with patch("superset_extensions_cli.cli.Observer") as mock_observer_class:
|
||||
mock_observer = Mock()
|
||||
mock_observer_class.return_value = mock_observer
|
||||
|
||||
with patch("time.sleep", side_effect=KeyboardInterrupt):
|
||||
try:
|
||||
cli_runner.invoke(app, ["dev"], catch_exceptions=False)
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
# Verify initial build steps
|
||||
frontend_dir = isolated_filesystem / "frontend"
|
||||
mock_init_frontend_deps.assert_called_once_with(frontend_dir)
|
||||
mock_rebuild_frontend.assert_called_once_with(isolated_filesystem, frontend_dir)
|
||||
mock_rebuild_backend.assert_called_once_with(isolated_filesystem)
|
||||
|
||||
|
||||
# FrontendChangeHandler Tests
|
||||
@pytest.mark.unit
|
||||
def test_frontend_change_handler_init():
|
||||
"""Test FrontendChangeHandler initialization."""
|
||||
mock_trigger = Mock()
|
||||
handler = FrontendChangeHandler(trigger_build=mock_trigger)
|
||||
|
||||
assert handler.trigger_build == mock_trigger
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_frontend_change_handler_ignores_dist_changes():
|
||||
"""Test FrontendChangeHandler ignores changes in dist directory."""
|
||||
mock_trigger = Mock()
|
||||
handler = FrontendChangeHandler(trigger_build=mock_trigger)
|
||||
|
||||
# Create mock event with dist path
|
||||
mock_event = Mock()
|
||||
mock_event.src_path = "/path/to/frontend/dist/file.js"
|
||||
|
||||
handler.on_any_event(mock_event)
|
||||
|
||||
# Should not trigger build for dist changes
|
||||
mock_trigger.assert_not_called()
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@pytest.mark.parametrize(
|
||||
"source_path",
|
||||
[
|
||||
"/path/to/frontend/src/component.tsx",
|
||||
"/path/to/frontend/webpack.config.js",
|
||||
"/path/to/frontend/package.json",
|
||||
],
|
||||
)
|
||||
def test_frontend_change_handler_triggers_on_source_changes(source_path):
|
||||
"""Test FrontendChangeHandler triggers build on source changes."""
|
||||
mock_trigger = Mock()
|
||||
handler = FrontendChangeHandler(trigger_build=mock_trigger)
|
||||
|
||||
# Create mock event with source path
|
||||
mock_event = Mock()
|
||||
mock_event.src_path = source_path
|
||||
|
||||
handler.on_any_event(mock_event)
|
||||
|
||||
# Should trigger build for source changes
|
||||
mock_trigger.assert_called_once()
|
||||
|
||||
|
||||
# Dev Utility Functions Tests
|
||||
@pytest.mark.unit
|
||||
def test_frontend_watcher_function_coverage(isolated_filesystem):
|
||||
"""Test frontend watcher function for coverage."""
|
||||
# Create extension.json
|
||||
extension_json = {
|
||||
"id": "test_extension",
|
||||
"name": "Test Extension",
|
||||
"version": "1.0.0",
|
||||
"permissions": [],
|
||||
}
|
||||
(isolated_filesystem / "extension.json").write_text(json.dumps(extension_json))
|
||||
|
||||
# Create dist directory
|
||||
dist_dir = isolated_filesystem / "dist"
|
||||
dist_dir.mkdir()
|
||||
|
||||
with patch("superset_extensions_cli.cli.rebuild_frontend") as mock_rebuild:
|
||||
with patch("superset_extensions_cli.cli.build_manifest") as mock_build:
|
||||
with patch("superset_extensions_cli.cli.write_manifest") as mock_write:
|
||||
mock_rebuild.return_value = "remoteEntry.abc123.js"
|
||||
mock_build.return_value = {"name": "test", "version": "1.0.0"}
|
||||
|
||||
# Simulate frontend watcher function logic
|
||||
frontend_dir = isolated_filesystem / "frontend"
|
||||
frontend_dir.mkdir()
|
||||
|
||||
# Actually call the functions to simulate the frontend_watcher
|
||||
if (
|
||||
remote_entry := mock_rebuild(isolated_filesystem, frontend_dir)
|
||||
) is not None:
|
||||
manifest = mock_build(isolated_filesystem, remote_entry)
|
||||
mock_write(isolated_filesystem, manifest)
|
||||
|
||||
mock_rebuild.assert_called_once_with(isolated_filesystem, frontend_dir)
|
||||
mock_build.assert_called_once_with(
|
||||
isolated_filesystem, "remoteEntry.abc123.js"
|
||||
)
|
||||
mock_write.assert_called_once_with(
|
||||
isolated_filesystem, {"name": "test", "version": "1.0.0"}
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_backend_watcher_function_coverage(isolated_filesystem):
|
||||
"""Test backend watcher function for coverage."""
|
||||
# Create dist directory with manifest
|
||||
dist_dir = isolated_filesystem / "dist"
|
||||
dist_dir.mkdir()
|
||||
|
||||
manifest_data = {"name": "test", "version": "1.0.0"}
|
||||
(dist_dir / "manifest.json").write_text(json.dumps(manifest_data))
|
||||
|
||||
with patch("superset_extensions_cli.cli.rebuild_backend") as mock_rebuild:
|
||||
with patch("superset_extensions_cli.cli.write_manifest") as mock_write:
|
||||
# Simulate backend watcher function
|
||||
mock_rebuild(isolated_filesystem)
|
||||
|
||||
manifest_path = dist_dir / "manifest.json"
|
||||
if manifest_path.exists():
|
||||
manifest = json.loads(manifest_path.read_text())
|
||||
mock_write(isolated_filesystem, manifest)
|
||||
|
||||
mock_rebuild.assert_called_once_with(isolated_filesystem)
|
||||
mock_write.assert_called_once()
|
||||
@@ -1,362 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
from superset_extensions_cli.cli import app
|
||||
|
||||
from tests.utils import (
|
||||
assert_directory_exists,
|
||||
assert_directory_structure,
|
||||
assert_file_exists,
|
||||
assert_file_structure,
|
||||
assert_json_content,
|
||||
create_test_extension_structure,
|
||||
load_json_file,
|
||||
)
|
||||
|
||||
|
||||
# Init Command Tests
|
||||
@pytest.mark.cli
|
||||
def test_init_creates_extension_with_both_frontend_and_backend(
|
||||
cli_runner, isolated_filesystem, cli_input_both
|
||||
):
|
||||
"""Test that init creates a complete extension with both frontend and backend."""
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input_both)
|
||||
|
||||
assert result.exit_code == 0, f"Command failed with output: {result.output}"
|
||||
assert (
|
||||
"🎉 Extension Test Extension (ID: test_extension) initialized" in result.output
|
||||
)
|
||||
|
||||
# Verify directory structure
|
||||
extension_path = isolated_filesystem / "test_extension"
|
||||
assert_directory_exists(extension_path, "main extension directory")
|
||||
|
||||
expected_structure = create_test_extension_structure(
|
||||
isolated_filesystem,
|
||||
"test_extension",
|
||||
include_frontend=True,
|
||||
include_backend=True,
|
||||
)
|
||||
|
||||
# Check directories
|
||||
assert_directory_structure(extension_path, expected_structure["expected_dirs"])
|
||||
|
||||
# Check files
|
||||
assert_file_structure(extension_path, expected_structure["expected_files"])
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
def test_init_creates_extension_with_frontend_only(
|
||||
cli_runner, isolated_filesystem, cli_input_frontend_only
|
||||
):
|
||||
"""Test that init creates extension with only frontend components."""
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input_frontend_only)
|
||||
|
||||
assert result.exit_code == 0, f"Command failed with output: {result.output}"
|
||||
|
||||
extension_path = isolated_filesystem / "test_extension"
|
||||
assert_directory_exists(extension_path)
|
||||
|
||||
# Should have frontend directory and package.json
|
||||
assert_directory_exists(extension_path / "frontend")
|
||||
assert_file_exists(extension_path / "frontend" / "package.json")
|
||||
|
||||
# Should NOT have backend directory
|
||||
backend_path = extension_path / "backend"
|
||||
assert (
|
||||
not backend_path.exists()
|
||||
), "Backend directory should not exist for frontend-only extension"
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
def test_init_creates_extension_with_backend_only(
|
||||
cli_runner, isolated_filesystem, cli_input_backend_only
|
||||
):
|
||||
"""Test that init creates extension with only backend components."""
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input_backend_only)
|
||||
|
||||
assert result.exit_code == 0, f"Command failed with output: {result.output}"
|
||||
|
||||
extension_path = isolated_filesystem / "test_extension"
|
||||
assert_directory_exists(extension_path)
|
||||
|
||||
# Should have backend directory and pyproject.toml
|
||||
assert_directory_exists(extension_path / "backend")
|
||||
assert_file_exists(extension_path / "backend" / "pyproject.toml")
|
||||
|
||||
# Should NOT have frontend directory
|
||||
frontend_path = extension_path / "frontend"
|
||||
assert (
|
||||
not frontend_path.exists()
|
||||
), "Frontend directory should not exist for backend-only extension"
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
def test_init_creates_extension_with_neither_frontend_nor_backend(
|
||||
cli_runner, isolated_filesystem, cli_input_neither
|
||||
):
|
||||
"""Test that init creates minimal extension with neither frontend nor backend."""
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input_neither)
|
||||
|
||||
assert result.exit_code == 0, f"Command failed with output: {result.output}"
|
||||
|
||||
extension_path = isolated_filesystem / "test_extension"
|
||||
assert_directory_exists(extension_path)
|
||||
|
||||
# Should only have extension.json
|
||||
assert_file_exists(extension_path / "extension.json")
|
||||
|
||||
# Should NOT have frontend or backend directories
|
||||
assert not (extension_path / "frontend").exists()
|
||||
assert not (extension_path / "backend").exists()
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
@pytest.mark.parametrize(
|
||||
"invalid_name,expected_error",
|
||||
[
|
||||
("test-extension", "must be alphanumeric"),
|
||||
("test extension", "must be alphanumeric"),
|
||||
("test.extension", "must be alphanumeric"),
|
||||
("test@extension", "must be alphanumeric"),
|
||||
("", "must be alphanumeric"),
|
||||
],
|
||||
)
|
||||
def test_init_validates_extension_name(
|
||||
cli_runner, isolated_filesystem, invalid_name, expected_error
|
||||
):
|
||||
"""Test that init validates extension names according to regex pattern."""
|
||||
cli_input = f"{invalid_name}\n0.1.0\nApache-2.0\ny\ny\n"
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input)
|
||||
|
||||
assert (
|
||||
result.exit_code == 1
|
||||
), f"Expected command to fail for invalid name '{invalid_name}'"
|
||||
assert expected_error in result.output
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
def test_init_accepts_numeric_extension_name(cli_runner, isolated_filesystem):
|
||||
"""Test that init accepts numeric extension ids like '123'."""
|
||||
cli_input = "123\n123\n0.1.0\nApache-2.0\ny\ny\n"
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input)
|
||||
|
||||
assert result.exit_code == 0, f"Numeric id '123' should be valid: {result.output}"
|
||||
assert Path("123").exists(), "Directory for '123' should be created"
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
@pytest.mark.parametrize(
|
||||
"valid_id", ["test123", "TestExtension", "test_extension_123", "MyExt_1"]
|
||||
)
|
||||
def test_init_with_valid_alphanumeric_names(cli_runner, valid_id):
|
||||
"""Test that init accepts various valid alphanumeric names."""
|
||||
with cli_runner.isolated_filesystem():
|
||||
cli_input = f"{valid_id}\nTest Extension\n0.1.0\nApache-2.0\ny\ny\n"
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input)
|
||||
|
||||
assert (
|
||||
result.exit_code == 0
|
||||
), f"Valid name '{valid_id}' was rejected: {result.output}"
|
||||
assert Path(valid_id).exists(), f"Directory for '{valid_id}' was not created"
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
def test_init_fails_when_directory_already_exists(
|
||||
cli_runner, isolated_filesystem, cli_input_both
|
||||
):
|
||||
"""Test that init fails gracefully when target directory already exists."""
|
||||
# Create the directory first
|
||||
existing_dir = isolated_filesystem / "test_extension"
|
||||
existing_dir.mkdir()
|
||||
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input_both)
|
||||
|
||||
assert result.exit_code == 1, "Command should fail when directory already exists"
|
||||
assert "already exists" in result.output
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
def test_extension_json_content_is_correct(
|
||||
cli_runner, isolated_filesystem, cli_input_both
|
||||
):
|
||||
"""Test that the generated extension.json has the correct content."""
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input_both)
|
||||
assert result.exit_code == 0
|
||||
|
||||
extension_path = isolated_filesystem / "test_extension"
|
||||
extension_json_path = extension_path / "extension.json"
|
||||
|
||||
# Verify the JSON structure and values
|
||||
assert_json_content(
|
||||
extension_json_path,
|
||||
{
|
||||
"id": "test_extension",
|
||||
"name": "Test Extension",
|
||||
"version": "0.1.0",
|
||||
"license": "Apache-2.0",
|
||||
"permissions": [],
|
||||
},
|
||||
)
|
||||
|
||||
# Load and verify more complex nested structures
|
||||
content = load_json_file(extension_json_path)
|
||||
|
||||
# Verify frontend section exists and has correct structure
|
||||
assert "frontend" in content
|
||||
frontend = content["frontend"]
|
||||
assert "contributions" in frontend
|
||||
assert "moduleFederation" in frontend
|
||||
assert frontend["contributions"] == {"commands": [], "views": [], "menus": []}
|
||||
assert frontend["moduleFederation"] == {"exposes": ["./index"]}
|
||||
|
||||
# Verify backend section exists and has correct structure
|
||||
assert "backend" in content
|
||||
backend = content["backend"]
|
||||
assert "entryPoints" in backend
|
||||
assert "files" in backend
|
||||
assert backend["entryPoints"] == ["test_extension.entrypoint"]
|
||||
assert backend["files"] == ["backend/src/test_extension/**/*.py"]
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
def test_frontend_package_json_content_is_correct(
|
||||
cli_runner, isolated_filesystem, cli_input_both
|
||||
):
|
||||
"""Test that the generated frontend/package.json has the correct content."""
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input_both)
|
||||
assert result.exit_code == 0
|
||||
|
||||
extension_path = isolated_filesystem / "test_extension"
|
||||
package_json_path = extension_path / "frontend" / "package.json"
|
||||
|
||||
# Verify the package.json structure and values
|
||||
assert_json_content(
|
||||
package_json_path,
|
||||
{
|
||||
"name": "test_extension",
|
||||
"version": "0.1.0",
|
||||
"license": "Apache-2.0",
|
||||
},
|
||||
)
|
||||
|
||||
# Verify more complex structures
|
||||
content = load_json_file(package_json_path)
|
||||
assert "scripts" in content
|
||||
assert "build" in content["scripts"]
|
||||
assert "peerDependencies" in content
|
||||
assert "@apache-superset/core" in content["peerDependencies"]
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
def test_backend_pyproject_toml_is_created(
|
||||
cli_runner, isolated_filesystem, cli_input_both
|
||||
):
|
||||
"""Test that the generated backend/pyproject.toml file is created."""
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input_both)
|
||||
assert result.exit_code == 0
|
||||
|
||||
extension_path = isolated_filesystem / "test_extension"
|
||||
pyproject_path = extension_path / "backend" / "pyproject.toml"
|
||||
|
||||
assert_file_exists(pyproject_path, "backend pyproject.toml")
|
||||
|
||||
# Basic content verification (without parsing TOML for now)
|
||||
content = pyproject_path.read_text()
|
||||
assert "test_extension" in content
|
||||
assert "0.1.0" in content
|
||||
assert "Apache-2.0" in content
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
def test_init_command_output_messages(cli_runner, isolated_filesystem, cli_input_both):
|
||||
"""Test that init command produces expected output messages."""
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input_both)
|
||||
|
||||
assert result.exit_code == 0
|
||||
output = result.output
|
||||
|
||||
# Check for expected success messages
|
||||
assert "✅ Created extension.json" in output
|
||||
assert "✅ Created frontend folder structure" in output
|
||||
assert "✅ Created backend folder structure" in output
|
||||
assert "🎉 Extension Test Extension (ID: test_extension) initialized" in output
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
def test_init_with_custom_version_and_license(cli_runner, isolated_filesystem):
|
||||
"""Test init with custom version and license parameters."""
|
||||
cli_input = "my_extension\nMy Extension\n2.1.0\nMIT\ny\nn\n"
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input)
|
||||
|
||||
assert result.exit_code == 0
|
||||
|
||||
extension_path = isolated_filesystem / "my_extension"
|
||||
extension_json_path = extension_path / "extension.json"
|
||||
|
||||
assert_json_content(
|
||||
extension_json_path,
|
||||
{
|
||||
"id": "my_extension",
|
||||
"name": "My Extension",
|
||||
"version": "2.1.0",
|
||||
"license": "MIT",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.cli
|
||||
def test_full_init_workflow_integration(cli_runner, isolated_filesystem):
|
||||
"""Integration test for the complete init workflow."""
|
||||
# Test the complete flow with realistic user input
|
||||
cli_input = "awesome_charts\nAwesome Charts\n1.0.0\nApache-2.0\ny\ny\n"
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input)
|
||||
|
||||
# Verify success
|
||||
assert result.exit_code == 0
|
||||
|
||||
# Verify complete directory structure
|
||||
extension_path = isolated_filesystem / "awesome_charts"
|
||||
expected_structure = create_test_extension_structure(
|
||||
isolated_filesystem,
|
||||
"awesome_charts",
|
||||
include_frontend=True,
|
||||
include_backend=True,
|
||||
)
|
||||
|
||||
# Comprehensive structure verification
|
||||
assert_directory_structure(extension_path, expected_structure["expected_dirs"])
|
||||
assert_file_structure(extension_path, expected_structure["expected_files"])
|
||||
|
||||
# Verify all generated files have correct content
|
||||
extension_json = load_json_file(extension_path / "extension.json")
|
||||
assert extension_json["id"] == "awesome_charts"
|
||||
assert extension_json["name"] == "Awesome Charts"
|
||||
assert extension_json["version"] == "1.0.0"
|
||||
assert extension_json["license"] == "Apache-2.0"
|
||||
|
||||
package_json = load_json_file(extension_path / "frontend" / "package.json")
|
||||
assert package_json["name"] == "awesome_charts"
|
||||
|
||||
pyproject_content = (extension_path / "backend" / "pyproject.toml").read_text()
|
||||
assert "awesome_charts" in pyproject_content
|
||||
@@ -1,195 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
from superset_extensions_cli.cli import app, validate_npm
|
||||
|
||||
|
||||
# Validate Command Tests
|
||||
@pytest.mark.cli
|
||||
def test_validate_command_success(cli_runner):
|
||||
"""Test validate command succeeds when npm is available and valid."""
|
||||
with patch("superset_extensions_cli.cli.validate_npm") as mock_validate:
|
||||
result = cli_runner.invoke(app, ["validate"])
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert "✅ Validation successful" in result.output
|
||||
mock_validate.assert_called_once()
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
def test_validate_command_calls_npm_validation(cli_runner):
|
||||
"""Test that validate command calls the npm validation function."""
|
||||
with patch("superset_extensions_cli.cli.validate_npm") as mock_validate:
|
||||
cli_runner.invoke(app, ["validate"])
|
||||
mock_validate.assert_called_once()
|
||||
|
||||
|
||||
# Validate NPM Function Tests
|
||||
@pytest.mark.unit
|
||||
@patch("shutil.which")
|
||||
def test_validate_npm_fails_when_npm_not_on_path(mock_which):
|
||||
"""Test validate_npm fails when npm is not on PATH."""
|
||||
mock_which.return_value = None
|
||||
|
||||
with pytest.raises(SystemExit) as exc_info:
|
||||
validate_npm()
|
||||
|
||||
assert exc_info.value.code == 1
|
||||
mock_which.assert_called_once_with("npm")
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@patch("shutil.which")
|
||||
@patch("subprocess.run")
|
||||
def test_validate_npm_fails_when_npm_command_fails(mock_run, mock_which):
|
||||
"""Test validate_npm fails when npm -v command fails."""
|
||||
mock_which.return_value = "/usr/bin/npm"
|
||||
mock_run.return_value = Mock(returncode=1, stderr="Command failed")
|
||||
|
||||
with pytest.raises(SystemExit) as exc_info:
|
||||
validate_npm()
|
||||
|
||||
assert exc_info.value.code == 1
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@patch("shutil.which")
|
||||
@patch("subprocess.run")
|
||||
def test_validate_npm_fails_when_version_too_low(mock_run, mock_which):
|
||||
"""Test validate_npm fails when npm version is below minimum."""
|
||||
mock_which.return_value = "/usr/bin/npm"
|
||||
mock_run.return_value = Mock(returncode=0, stdout="9.0.0\n", stderr="")
|
||||
|
||||
with pytest.raises(SystemExit) as exc_info:
|
||||
validate_npm()
|
||||
|
||||
assert exc_info.value.code == 1
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@pytest.mark.parametrize(
|
||||
"npm_version",
|
||||
[
|
||||
"10.8.2", # Exact minimum version
|
||||
"11.0.0", # Higher version
|
||||
"10.9.0-alpha.1", # Pre-release version higher than minimum
|
||||
],
|
||||
)
|
||||
@patch("shutil.which")
|
||||
@patch("subprocess.run")
|
||||
def test_validate_npm_succeeds_with_valid_versions(mock_run, mock_which, npm_version):
|
||||
"""Test validate_npm succeeds when npm version is valid."""
|
||||
mock_which.return_value = "/usr/bin/npm"
|
||||
mock_run.return_value = Mock(returncode=0, stdout=f"{npm_version}\n", stderr="")
|
||||
|
||||
# Should not raise SystemExit
|
||||
validate_npm()
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@pytest.mark.parametrize(
|
||||
"npm_version,should_pass",
|
||||
[
|
||||
("10.8.2", True), # Exact minimum version
|
||||
("10.8.1", False), # Slightly lower version
|
||||
("10.9.0-alpha.1", True), # Pre-release version higher than minimum
|
||||
("9.9.9", False), # Much lower version
|
||||
("11.0.0", True), # Much higher version
|
||||
],
|
||||
)
|
||||
@patch("shutil.which")
|
||||
@patch("subprocess.run")
|
||||
def test_validate_npm_version_comparison_edge_cases(
|
||||
mock_run, mock_which, npm_version, should_pass
|
||||
):
|
||||
"""Test npm version comparison with edge cases."""
|
||||
mock_which.return_value = "/usr/bin/npm"
|
||||
mock_run.return_value = Mock(returncode=0, stdout=f"{npm_version}\n", stderr="")
|
||||
|
||||
if should_pass:
|
||||
# Should not raise SystemExit
|
||||
validate_npm()
|
||||
else:
|
||||
with pytest.raises(SystemExit):
|
||||
validate_npm()
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@patch("shutil.which")
|
||||
@patch("subprocess.run")
|
||||
def test_validate_npm_handles_file_not_found_exception(mock_run, mock_which):
|
||||
"""Test validate_npm handles FileNotFoundError gracefully."""
|
||||
mock_which.return_value = "/usr/bin/npm"
|
||||
mock_run.side_effect = FileNotFoundError("Test error")
|
||||
|
||||
with pytest.raises(SystemExit) as exc_info:
|
||||
validate_npm()
|
||||
|
||||
assert exc_info.value.code == 1
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@pytest.mark.parametrize(
|
||||
"exception_type",
|
||||
[
|
||||
OSError,
|
||||
PermissionError,
|
||||
],
|
||||
)
|
||||
@patch("shutil.which")
|
||||
@patch("subprocess.run")
|
||||
def test_validate_npm_does_not_catch_other_subprocess_exceptions(
|
||||
mock_run, mock_which, exception_type
|
||||
):
|
||||
"""Test validate_npm does not catch OSError and PermissionError (they propagate up)."""
|
||||
mock_which.return_value = "/usr/bin/npm"
|
||||
mock_run.side_effect = exception_type("Test error")
|
||||
|
||||
# These exceptions should propagate up, not be caught
|
||||
with pytest.raises(exception_type):
|
||||
validate_npm()
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@patch("shutil.which")
|
||||
@patch("subprocess.run")
|
||||
def test_validate_npm_with_malformed_version_output_raises_error(mock_run, mock_which):
|
||||
"""Test validate_npm raises ValueError with malformed version output."""
|
||||
mock_which.return_value = "/usr/bin/npm"
|
||||
mock_run.return_value = Mock(returncode=0, stdout="not-a-version\n", stderr="")
|
||||
|
||||
# semver.compare will raise ValueError for malformed version
|
||||
with pytest.raises(ValueError):
|
||||
validate_npm()
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@patch("shutil.which")
|
||||
@patch("subprocess.run")
|
||||
def test_validate_npm_with_empty_version_output_raises_error(mock_run, mock_which):
|
||||
"""Test validate_npm raises ValueError with empty version output."""
|
||||
mock_which.return_value = "/usr/bin/npm"
|
||||
mock_run.return_value = Mock(returncode=0, stdout="", stderr="")
|
||||
|
||||
# semver.compare will raise ValueError for empty version
|
||||
with pytest.raises(ValueError):
|
||||
validate_npm()
|
||||
@@ -1,331 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def templates_dir():
|
||||
"""Get the templates directory path."""
|
||||
return (
|
||||
Path(__file__).parent.parent / "src" / "superset_extensions_cli" / "templates"
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def jinja_env(templates_dir):
|
||||
"""Create a Jinja2 environment for testing templates."""
|
||||
return Environment(loader=FileSystemLoader(templates_dir))
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def template_context():
|
||||
"""Default template context for testing."""
|
||||
return {
|
||||
"id": "test_extension",
|
||||
"name": "Test Extension",
|
||||
"version": "0.1.0",
|
||||
"license": "Apache-2.0",
|
||||
"include_frontend": True,
|
||||
"include_backend": True,
|
||||
}
|
||||
|
||||
|
||||
# Extension JSON Template Tests
|
||||
@pytest.mark.unit
|
||||
def test_extension_json_template_renders_with_both_frontend_and_backend(
|
||||
jinja_env, template_context
|
||||
):
|
||||
"""Test extension.json template renders correctly with both frontend and backend."""
|
||||
template = jinja_env.get_template("extension.json.j2")
|
||||
rendered = template.render(template_context)
|
||||
|
||||
# Parse the rendered JSON to ensure it's valid
|
||||
parsed = json.loads(rendered)
|
||||
|
||||
# Verify basic fields
|
||||
assert parsed["id"] == "test_extension"
|
||||
assert parsed["name"] == "Test Extension"
|
||||
assert parsed["version"] == "0.1.0"
|
||||
assert parsed["license"] == "Apache-2.0"
|
||||
assert parsed["permissions"] == []
|
||||
|
||||
# Verify frontend section exists
|
||||
assert "frontend" in parsed
|
||||
frontend = parsed["frontend"]
|
||||
assert "contributions" in frontend
|
||||
assert "moduleFederation" in frontend
|
||||
assert frontend["contributions"] == {"commands": [], "views": [], "menus": []}
|
||||
assert frontend["moduleFederation"] == {"exposes": ["./index"]}
|
||||
|
||||
# Verify backend section exists
|
||||
assert "backend" in parsed
|
||||
backend = parsed["backend"]
|
||||
assert backend["entryPoints"] == ["test_extension.entrypoint"]
|
||||
assert backend["files"] == ["backend/src/test_extension/**/*.py"]
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@pytest.mark.parametrize(
|
||||
"include_frontend,include_backend,expected_sections",
|
||||
[
|
||||
(True, False, ["frontend"]),
|
||||
(False, True, ["backend"]),
|
||||
(False, False, []),
|
||||
],
|
||||
)
|
||||
def test_extension_json_template_renders_with_different_configurations(
|
||||
jinja_env, template_context, include_frontend, include_backend, expected_sections
|
||||
):
|
||||
"""Test extension.json template renders correctly with different configurations."""
|
||||
template_context["include_frontend"] = include_frontend
|
||||
template_context["include_backend"] = include_backend
|
||||
|
||||
template = jinja_env.get_template("extension.json.j2")
|
||||
rendered = template.render(template_context)
|
||||
|
||||
parsed = json.loads(rendered)
|
||||
|
||||
# Check for expected sections
|
||||
for section in expected_sections:
|
||||
assert section in parsed, f"Expected section '{section}' not found"
|
||||
|
||||
# Check that unexpected sections are not present
|
||||
all_sections = ["frontend", "backend"]
|
||||
for section in all_sections:
|
||||
if section not in expected_sections:
|
||||
assert section not in parsed, f"Unexpected section '{section}' found"
|
||||
|
||||
|
||||
# Frontend Package JSON Template Tests
|
||||
@pytest.mark.unit
|
||||
def test_frontend_package_json_template_renders_correctly(jinja_env, template_context):
|
||||
"""Test frontend/package.json template renders correctly."""
|
||||
template = jinja_env.get_template("frontend/package.json.j2")
|
||||
rendered = template.render(template_context)
|
||||
|
||||
parsed = json.loads(rendered)
|
||||
|
||||
# Verify basic package info
|
||||
assert parsed["name"] == "test_extension"
|
||||
assert parsed["version"] == "0.1.0"
|
||||
assert parsed["license"] == "Apache-2.0"
|
||||
assert parsed["private"] is True
|
||||
|
||||
# Verify scripts section
|
||||
assert "scripts" in parsed
|
||||
scripts = parsed["scripts"]
|
||||
assert "start" in scripts
|
||||
assert "build" in scripts
|
||||
assert "webpack" in scripts["build"]
|
||||
|
||||
# Verify dependencies
|
||||
assert "peerDependencies" in parsed
|
||||
peer_deps = parsed["peerDependencies"]
|
||||
assert "@apache-superset/core" in peer_deps
|
||||
assert "react" in peer_deps
|
||||
assert "react-dom" in peer_deps
|
||||
|
||||
# Verify dev dependencies
|
||||
assert "devDependencies" in parsed
|
||||
dev_deps = parsed["devDependencies"]
|
||||
assert "webpack" in dev_deps
|
||||
assert "typescript" in dev_deps
|
||||
|
||||
|
||||
# Backend Pyproject TOML Template Tests
|
||||
@pytest.mark.unit
|
||||
def test_backend_pyproject_toml_template_renders_correctly(jinja_env, template_context):
|
||||
"""Test backend/pyproject.toml template renders correctly."""
|
||||
template = jinja_env.get_template("backend/pyproject.toml.j2")
|
||||
rendered = template.render(template_context)
|
||||
|
||||
# Basic content verification (without full TOML parsing)
|
||||
assert "test_extension" in rendered
|
||||
assert "0.1.0" in rendered
|
||||
assert "Apache-2.0" in rendered
|
||||
|
||||
|
||||
# Template Rendering with Different Parameters Tests
|
||||
@pytest.mark.unit
|
||||
@pytest.mark.parametrize(
|
||||
"id_,name",
|
||||
[
|
||||
("simple_extension", "Simple Extension"),
|
||||
("MyExtension123", "My Extension 123"),
|
||||
("complex_extension_name_123", "Complex Extension Name 123"),
|
||||
("ext", "Ext"),
|
||||
],
|
||||
)
|
||||
def test_template_rendering_with_different_ids(jinja_env, id_, name):
|
||||
"""Test templates render correctly with various extension ids/names."""
|
||||
context = {
|
||||
"id": id_,
|
||||
"name": name,
|
||||
"version": "1.0.0",
|
||||
"license": "MIT",
|
||||
"include_frontend": True,
|
||||
"include_backend": True,
|
||||
}
|
||||
|
||||
# Test extension.json template
|
||||
template = jinja_env.get_template("extension.json.j2")
|
||||
rendered = template.render(context)
|
||||
parsed = json.loads(rendered)
|
||||
|
||||
assert parsed["id"] == id_
|
||||
assert parsed["name"] == name
|
||||
assert parsed["backend"]["entryPoints"] == [f"{id_}.entrypoint"]
|
||||
assert parsed["backend"]["files"] == [f"backend/src/{id_}/**/*.py"]
|
||||
|
||||
# Test package.json template
|
||||
template = jinja_env.get_template("frontend/package.json.j2")
|
||||
rendered = template.render(context)
|
||||
parsed = json.loads(rendered)
|
||||
|
||||
assert parsed["name"] == id_
|
||||
|
||||
# Test pyproject.toml template
|
||||
template = jinja_env.get_template("backend/pyproject.toml.j2")
|
||||
rendered = template.render(context)
|
||||
|
||||
assert id_ in rendered
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@pytest.mark.parametrize("version", ["0.1.0", "1.0.0", "2.1.3-alpha", "10.20.30"])
|
||||
def test_template_rendering_with_different_versions(jinja_env, version):
|
||||
"""Test templates render correctly with various version formats."""
|
||||
context = {
|
||||
"id": "test_ext",
|
||||
"name": "Test Extension",
|
||||
"version": version,
|
||||
"license": "Apache-2.0",
|
||||
"include_frontend": True,
|
||||
"include_backend": False,
|
||||
}
|
||||
|
||||
template = jinja_env.get_template("extension.json.j2")
|
||||
rendered = template.render(context)
|
||||
parsed = json.loads(rendered)
|
||||
|
||||
assert parsed["version"] == version
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@pytest.mark.parametrize(
|
||||
"license_type",
|
||||
[
|
||||
"Apache-2.0",
|
||||
"MIT",
|
||||
"BSD-3-Clause",
|
||||
"GPL-3.0",
|
||||
"Custom License",
|
||||
],
|
||||
)
|
||||
def test_template_rendering_with_different_licenses(jinja_env, license_type):
|
||||
"""Test templates render correctly with various license types."""
|
||||
context = {
|
||||
"id": "test_ext",
|
||||
"name": "Test Extension",
|
||||
"version": "1.0.0",
|
||||
"license": license_type,
|
||||
"include_frontend": True,
|
||||
"include_backend": True,
|
||||
}
|
||||
|
||||
# Test extension.json template
|
||||
template = jinja_env.get_template("extension.json.j2")
|
||||
rendered = template.render(context)
|
||||
parsed = json.loads(rendered)
|
||||
|
||||
assert parsed["license"] == license_type
|
||||
|
||||
# Test package.json template
|
||||
template = jinja_env.get_template("frontend/package.json.j2")
|
||||
rendered = template.render(context)
|
||||
parsed = json.loads(rendered)
|
||||
|
||||
assert parsed["license"] == license_type
|
||||
|
||||
|
||||
# Template Validation Tests
|
||||
@pytest.mark.unit
|
||||
@pytest.mark.parametrize(
|
||||
"template_name", ["extension.json.j2", "frontend/package.json.j2"]
|
||||
)
|
||||
def test_templates_produce_valid_json(jinja_env, template_context, template_name):
|
||||
"""Test that all JSON templates produce valid JSON output."""
|
||||
template = jinja_env.get_template(template_name)
|
||||
rendered = template.render(template_context)
|
||||
|
||||
# This will raise an exception if the JSON is invalid
|
||||
try:
|
||||
json.loads(rendered)
|
||||
except json.JSONDecodeError as e:
|
||||
pytest.fail(f"Template {template_name} produced invalid JSON: {e}")
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_template_whitespace_handling(jinja_env, template_context):
|
||||
"""Test that templates handle whitespace correctly and produce clean output."""
|
||||
template = jinja_env.get_template("extension.json.j2")
|
||||
rendered = template.render(template_context)
|
||||
|
||||
# Should not have excessive empty lines
|
||||
lines = rendered.split("\n")
|
||||
empty_line_count = sum(1 for line in lines if line.strip() == "")
|
||||
|
||||
# Some empty lines are OK for formatting, but not excessive
|
||||
assert (
|
||||
empty_line_count < len(lines) / 2
|
||||
), "Too many empty lines in rendered template"
|
||||
|
||||
# Should be properly formatted JSON
|
||||
parsed = json.loads(rendered)
|
||||
# Re-serialize to check it's valid structure
|
||||
json.dumps(parsed, indent=2)
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_template_context_edge_cases(jinja_env):
|
||||
"""Test template rendering with edge case contexts."""
|
||||
# Test with minimal context
|
||||
minimal_context = {
|
||||
"id": "minimal",
|
||||
"name": "Minimal",
|
||||
"version": "1.0.0",
|
||||
"license": "MIT",
|
||||
"include_frontend": False,
|
||||
"include_backend": False,
|
||||
}
|
||||
|
||||
template = jinja_env.get_template("extension.json.j2")
|
||||
rendered = template.render(minimal_context)
|
||||
parsed = json.loads(rendered)
|
||||
|
||||
# Should still be valid JSON with basic fields
|
||||
assert parsed["id"] == "minimal"
|
||||
assert parsed["name"] == "Minimal"
|
||||
assert "frontend" not in parsed
|
||||
assert "backend" not in parsed
|
||||
@@ -1,271 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
|
||||
import pytest
|
||||
from superset_extensions_cli.utils import read_json, read_toml
|
||||
|
||||
|
||||
# Read JSON Tests
|
||||
@pytest.mark.unit
|
||||
def test_read_json_with_valid_file(isolated_filesystem):
|
||||
"""Test read_json with valid JSON file."""
|
||||
json_data = {"name": "test", "version": "1.0.0"}
|
||||
json_file = isolated_filesystem / "test.json"
|
||||
json_file.write_text(json.dumps(json_data))
|
||||
|
||||
result = read_json(json_file)
|
||||
|
||||
assert result == json_data
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_read_json_with_nonexistent_file(isolated_filesystem):
|
||||
"""Test read_json returns None when file doesn't exist."""
|
||||
nonexistent_file = isolated_filesystem / "nonexistent.json"
|
||||
|
||||
result = read_json(nonexistent_file)
|
||||
|
||||
assert result is None
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_read_json_with_invalid_json(isolated_filesystem):
|
||||
"""Test read_json with invalid JSON content."""
|
||||
invalid_json_file = isolated_filesystem / "invalid.json"
|
||||
invalid_json_file.write_text("{ invalid json content")
|
||||
|
||||
with pytest.raises(json.JSONDecodeError):
|
||||
read_json(invalid_json_file)
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_read_json_with_directory_instead_of_file(isolated_filesystem):
|
||||
"""Test read_json returns None when path is a directory."""
|
||||
directory = isolated_filesystem / "test_dir"
|
||||
directory.mkdir()
|
||||
|
||||
result = read_json(directory)
|
||||
|
||||
assert result is None
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@pytest.mark.parametrize(
|
||||
"json_content,expected",
|
||||
[
|
||||
({"simple": "value"}, {"simple": "value"}),
|
||||
({"nested": {"key": "value"}}, {"nested": {"key": "value"}}),
|
||||
({"array": [1, 2, 3]}, {"array": [1, 2, 3]}),
|
||||
({}, {}), # Empty JSON object
|
||||
],
|
||||
)
|
||||
def test_read_json_with_various_valid_content(
|
||||
isolated_filesystem, json_content, expected
|
||||
):
|
||||
"""Test read_json with various valid JSON content types."""
|
||||
json_file = isolated_filesystem / "test.json"
|
||||
json_file.write_text(json.dumps(json_content))
|
||||
|
||||
result = read_json(json_file)
|
||||
|
||||
assert result == expected
|
||||
|
||||
|
||||
# Read TOML Tests
|
||||
@pytest.mark.unit
|
||||
def test_read_toml_with_valid_file(isolated_filesystem):
|
||||
"""Test read_toml with valid TOML file."""
|
||||
toml_content = '[project]\nname = "test"\nversion = "1.0.0"'
|
||||
toml_file = isolated_filesystem / "pyproject.toml"
|
||||
toml_file.write_text(toml_content)
|
||||
|
||||
result = read_toml(toml_file)
|
||||
|
||||
assert result is not None
|
||||
assert result["project"]["name"] == "test"
|
||||
assert result["project"]["version"] == "1.0.0"
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_read_toml_with_nonexistent_file(isolated_filesystem):
|
||||
"""Test read_toml returns None when file doesn't exist."""
|
||||
nonexistent_file = isolated_filesystem / "nonexistent.toml"
|
||||
|
||||
result = read_toml(nonexistent_file)
|
||||
|
||||
assert result is None
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_read_toml_with_directory_instead_of_file(isolated_filesystem):
|
||||
"""Test read_toml returns None when path is a directory."""
|
||||
directory = isolated_filesystem / "test_dir"
|
||||
directory.mkdir()
|
||||
|
||||
result = read_toml(directory)
|
||||
|
||||
assert result is None
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_read_toml_with_invalid_toml(isolated_filesystem):
|
||||
"""Test read_toml with invalid TOML content."""
|
||||
invalid_toml_file = isolated_filesystem / "invalid.toml"
|
||||
invalid_toml_file.write_text("[ invalid toml content")
|
||||
|
||||
with pytest.raises(Exception): # tomli raises various exceptions for invalid TOML
|
||||
read_toml(invalid_toml_file)
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@pytest.mark.parametrize(
|
||||
"toml_content,expected_keys",
|
||||
[
|
||||
('[project]\nname = "test"', ["project"]),
|
||||
('[build-system]\nrequires = ["setuptools"]', ["build-system"]),
|
||||
(
|
||||
'[project]\nname = "test"\n[build-system]\nrequires = ["setuptools"]',
|
||||
["project", "build-system"],
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_read_toml_with_various_valid_content(
|
||||
isolated_filesystem, toml_content, expected_keys
|
||||
):
|
||||
"""Test read_toml with various valid TOML content types."""
|
||||
toml_file = isolated_filesystem / "test.toml"
|
||||
toml_file.write_text(toml_content)
|
||||
|
||||
result = read_toml(toml_file)
|
||||
|
||||
assert result is not None
|
||||
for key in expected_keys:
|
||||
assert key in result
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_read_toml_with_complex_structure(isolated_filesystem):
|
||||
"""Test read_toml with complex TOML structure."""
|
||||
complex_toml = """
|
||||
[project]
|
||||
name = "my-package"
|
||||
version = "1.0.0"
|
||||
authors = [
|
||||
{name = "Author Name", email = "author@example.com"}
|
||||
]
|
||||
|
||||
[project.dependencies]
|
||||
requests = "^2.25.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["setuptools", "wheel"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
"""
|
||||
toml_file = isolated_filesystem / "complex.toml"
|
||||
toml_file.write_text(complex_toml)
|
||||
|
||||
result = read_toml(toml_file)
|
||||
|
||||
assert result is not None
|
||||
assert result["project"]["name"] == "my-package"
|
||||
assert result["project"]["version"] == "1.0.0"
|
||||
assert len(result["project"]["authors"]) == 1
|
||||
assert result["project"]["authors"][0]["name"] == "Author Name"
|
||||
assert result["build-system"]["requires"] == ["setuptools", "wheel"]
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_read_toml_with_empty_file(isolated_filesystem):
|
||||
"""Test read_toml with empty TOML file."""
|
||||
toml_file = isolated_filesystem / "empty.toml"
|
||||
toml_file.write_text("")
|
||||
|
||||
result = read_toml(toml_file)
|
||||
|
||||
assert result == {}
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@pytest.mark.parametrize(
|
||||
"invalid_content",
|
||||
[
|
||||
"[ invalid section",
|
||||
"key = ",
|
||||
"key = unquoted string",
|
||||
"[section\nkey = value",
|
||||
],
|
||||
)
|
||||
def test_read_toml_with_various_invalid_content(isolated_filesystem, invalid_content):
|
||||
"""Test read_toml with various types of invalid TOML content."""
|
||||
toml_file = isolated_filesystem / "invalid.toml"
|
||||
toml_file.write_text(invalid_content)
|
||||
|
||||
with pytest.raises(Exception): # Various TOML parsing exceptions
|
||||
read_toml(toml_file)
|
||||
|
||||
|
||||
# File System Edge Cases
|
||||
@pytest.mark.unit
|
||||
def test_read_json_with_permission_denied(isolated_filesystem):
|
||||
"""Test read_json behavior when file permissions are denied."""
|
||||
json_file = isolated_filesystem / "restricted.json"
|
||||
json_file.write_text('{"test": "value"}')
|
||||
|
||||
# This test may not work on all systems, so we'll skip it if chmod doesn't work
|
||||
try:
|
||||
json_file.chmod(0o000) # No permissions
|
||||
result = read_json(json_file)
|
||||
# If we get here without exception, the file was still readable
|
||||
# This is system-dependent behavior
|
||||
assert result is None or result == {"test": "value"}
|
||||
except (OSError, PermissionError):
|
||||
# Expected on some systems
|
||||
pass
|
||||
finally:
|
||||
# Restore permissions for cleanup
|
||||
try:
|
||||
json_file.chmod(0o644)
|
||||
except (OSError, PermissionError):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_read_toml_with_permission_denied(isolated_filesystem):
|
||||
"""Test read_toml behavior when file permissions are denied."""
|
||||
toml_file = isolated_filesystem / "restricted.toml"
|
||||
toml_file.write_text('[test]\nkey = "value"')
|
||||
|
||||
# This test may not work on all systems, so we'll skip it if chmod doesn't work
|
||||
try:
|
||||
toml_file.chmod(0o000) # No permissions
|
||||
result = read_toml(toml_file)
|
||||
# If we get here without exception, the file was still readable
|
||||
# This is system-dependent behavior
|
||||
assert result is None or "test" in result
|
||||
except (OSError, PermissionError):
|
||||
# Expected on some systems
|
||||
pass
|
||||
finally:
|
||||
# Restore permissions for cleanup
|
||||
try:
|
||||
toml_file.chmod(0o644)
|
||||
except (OSError, PermissionError):
|
||||
pass
|
||||
@@ -1,211 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
def assert_file_exists(path: Path, description: str = "") -> None:
|
||||
"""
|
||||
Assert that a file exists with a descriptive error message.
|
||||
|
||||
Args:
|
||||
path: Path to the file that should exist
|
||||
description: Optional description for better error messages
|
||||
"""
|
||||
desc_msg = f" ({description})" if description else ""
|
||||
assert path.exists(), f"Expected file {path}{desc_msg} to exist, but it doesn't"
|
||||
assert path.is_file(), f"Expected {path}{desc_msg} to be a file, but it's not"
|
||||
|
||||
|
||||
def assert_directory_exists(path: Path, description: str = "") -> None:
|
||||
"""
|
||||
Assert that a directory exists with a descriptive error message.
|
||||
|
||||
Args:
|
||||
path: Path to the directory that should exist
|
||||
description: Optional description for better error messages
|
||||
"""
|
||||
desc_msg = f" ({description})" if description else ""
|
||||
assert (
|
||||
path.exists()
|
||||
), f"Expected directory {path}{desc_msg} to exist, but it doesn't"
|
||||
assert path.is_dir(), f"Expected {path}{desc_msg} to be a directory, but it's not"
|
||||
|
||||
|
||||
def assert_file_structure(base_path: Path, expected_files: list[str]) -> None:
|
||||
"""
|
||||
Assert that all expected files exist under the base path.
|
||||
|
||||
Args:
|
||||
base_path: Base directory path
|
||||
expected_files: List of relative file paths that should exist
|
||||
"""
|
||||
for file_path in expected_files:
|
||||
full_path = base_path / file_path
|
||||
assert_file_exists(full_path, "part of expected structure")
|
||||
|
||||
|
||||
def assert_directory_structure(base_path: Path, expected_dirs: list[str]) -> None:
|
||||
"""
|
||||
Assert that all expected directories exist under the base path.
|
||||
|
||||
Args:
|
||||
base_path: Base directory path
|
||||
expected_dirs: List of relative directory paths that should exist
|
||||
"""
|
||||
for dir_path in expected_dirs:
|
||||
full_path = base_path / dir_path
|
||||
assert_directory_exists(full_path, "part of expected structure")
|
||||
|
||||
|
||||
def get_directory_tree(path: Path, ignore: set[str] | None = None) -> set[str]:
|
||||
"""
|
||||
Get all files and directories under a path as relative string paths.
|
||||
|
||||
Args:
|
||||
path: Base path to scan
|
||||
ignore: Set of file/directory names to ignore
|
||||
|
||||
Returns:
|
||||
Set of relative path strings
|
||||
"""
|
||||
ignore = ignore or {".DS_Store", "__pycache__", ".pytest_cache"}
|
||||
tree: set[str] = set()
|
||||
|
||||
if not path.exists():
|
||||
return tree
|
||||
|
||||
for item in path.rglob("*"):
|
||||
if any(ignored in item.parts for ignored in ignore):
|
||||
continue
|
||||
relative = item.relative_to(path)
|
||||
tree.add(str(relative))
|
||||
|
||||
return tree
|
||||
|
||||
|
||||
def load_json_file(path: Path) -> dict[str, Any]:
|
||||
"""
|
||||
Load and parse a JSON file.
|
||||
|
||||
Args:
|
||||
path: Path to the JSON file
|
||||
|
||||
Returns:
|
||||
Parsed JSON content
|
||||
|
||||
Raises:
|
||||
AssertionError: If file doesn't exist or isn't valid JSON
|
||||
"""
|
||||
assert_file_exists(path, "JSON file")
|
||||
try:
|
||||
content = json.loads(path.read_text())
|
||||
return content
|
||||
except json.JSONDecodeError as e:
|
||||
raise AssertionError(f"File {path} contains invalid JSON: {e}")
|
||||
|
||||
|
||||
def assert_json_content(path: Path, expected_values: dict[str, Any]) -> None:
|
||||
"""
|
||||
Assert that a JSON file contains expected key-value pairs.
|
||||
|
||||
Args:
|
||||
path: Path to the JSON file
|
||||
expected_values: Dictionary of expected key-value pairs
|
||||
"""
|
||||
content = load_json_file(path)
|
||||
|
||||
for key, expected_value in expected_values.items():
|
||||
assert key in content, f"Expected key '{key}' not found in {path}"
|
||||
actual_value = content[key]
|
||||
assert (
|
||||
actual_value == expected_value
|
||||
), f"Expected {key}='{expected_value}' but got '{actual_value}' in {path}"
|
||||
|
||||
|
||||
def assert_file_contains(path: Path, text: str) -> None:
|
||||
"""
|
||||
Assert that a file contains specific text.
|
||||
|
||||
Args:
|
||||
path: Path to the file
|
||||
text: Text that should be present in the file
|
||||
"""
|
||||
assert_file_exists(path, "text file")
|
||||
content = path.read_text()
|
||||
assert text in content, f"Expected text '{text}' not found in {path}"
|
||||
|
||||
|
||||
def assert_file_content_matches(path: Path, expected_content: str) -> None:
|
||||
"""
|
||||
Assert that a file's content exactly matches expected content.
|
||||
|
||||
Args:
|
||||
path: Path to the file
|
||||
expected_content: Expected file content
|
||||
"""
|
||||
assert_file_exists(path, "content file")
|
||||
actual_content = path.read_text()
|
||||
assert actual_content == expected_content, (
|
||||
f"File content mismatch in {path}\n"
|
||||
f"Expected:\n{expected_content}\n"
|
||||
f"Actual:\n{actual_content}"
|
||||
)
|
||||
|
||||
|
||||
def create_test_extension_structure(
|
||||
base_path: Path,
|
||||
id_: str,
|
||||
include_frontend: bool = True,
|
||||
include_backend: bool = True,
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
Helper to create expected extension structure for testing.
|
||||
|
||||
Args:
|
||||
base_path: Base path where extension should be created
|
||||
id_: Unique identifier for extension
|
||||
name: Extension name
|
||||
include_frontend: Whether frontend should be included
|
||||
include_backend: Whether backend should be included
|
||||
|
||||
Returns:
|
||||
Dictionary with expected paths and metadata
|
||||
"""
|
||||
extension_path = base_path / id_
|
||||
expected_files = ["extension.json"]
|
||||
expected_dirs: list[str] = []
|
||||
|
||||
if include_frontend:
|
||||
expected_dirs.append("frontend")
|
||||
expected_files.append("frontend/package.json")
|
||||
|
||||
if include_backend:
|
||||
expected_dirs.append("backend")
|
||||
expected_files.append("backend/pyproject.toml")
|
||||
|
||||
expected = {
|
||||
"extension_path": extension_path,
|
||||
"expected_files": expected_files,
|
||||
"expected_dirs": expected_dirs,
|
||||
}
|
||||
|
||||
return expected
|
||||
@@ -46,7 +46,6 @@ module.exports = {
|
||||
plugins: [
|
||||
'lodash',
|
||||
'@babel/plugin-syntax-dynamic-import',
|
||||
'@babel/plugin-transform-export-namespace-from',
|
||||
['@babel/plugin-proposal-class-properties', { loose: true }],
|
||||
['@babel/plugin-proposal-optional-chaining', { loose: true }],
|
||||
['@babel/plugin-proposal-private-methods', { loose: true }],
|
||||
@@ -90,7 +89,6 @@ module.exports = {
|
||||
plugins: [
|
||||
'babel-plugin-dynamic-import-node',
|
||||
'@babel/plugin-transform-modules-commonjs',
|
||||
'@babel/plugin-transform-export-namespace-from',
|
||||
],
|
||||
},
|
||||
// build instrumented code for testing code coverage with Cypress
|
||||
|
||||
@@ -31,8 +31,6 @@ module.exports = {
|
||||
'^spec/(.*)$': '<rootDir>/spec/$1',
|
||||
// mapping plugins of superset-ui to source code
|
||||
'@superset-ui/(.*)$': '<rootDir>/node_modules/@superset-ui/$1/src',
|
||||
// mapping @apache-superset/core to local package
|
||||
'^@apache-superset/core$': '<rootDir>/packages/superset-core/src',
|
||||
},
|
||||
testEnvironment: 'jsdom',
|
||||
modulePathIgnorePatterns: ['<rootDir>/packages/generator-superset'],
|
||||
|
||||
4341
superset-frontend/package-lock.json
generated
4341
superset-frontend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -44,7 +44,7 @@
|
||||
"build-storybook": "storybook build",
|
||||
"build-translation": "scripts/po2json.sh",
|
||||
"bundle-stats": "cross-env BUNDLE_ANALYZER=true npm run build && npx open-cli ../superset/static/stats/statistics.html",
|
||||
"core:cover": "cross-env NODE_ENV=test NODE_OPTIONS=\"--max-old-space-size=4096\" jest --coverage --coverageThreshold='{\"global\":{\"statements\":100,\"branches\":100,\"functions\":100,\"lines\":100}}' --collectCoverageFrom='[\"packages/**/src/**/*.{js,ts}\", \"!packages/superset-ui-demo/**/*\", \"!packages/superset-core/**/*\"]' packages",
|
||||
"core:cover": "cross-env NODE_ENV=test NODE_OPTIONS=\"--max-old-space-size=4096\" jest --coverage --coverageThreshold='{\"global\":{\"statements\":100,\"branches\":100,\"functions\":100,\"lines\":100}}' --collectCoverageFrom='[\"packages/**/src/**/*.{js,ts}\", \"!packages/superset-ui-demo/**/*\"]' packages",
|
||||
"cover": "cross-env NODE_ENV=test NODE_OPTIONS=\"--max-old-space-size=4096\" jest --coverage",
|
||||
"dev": "webpack --mode=development --color --watch",
|
||||
"dev-server": "cross-env NODE_ENV=development BABEL_ENV=development node --max_old_space_size=4096 ./node_modules/webpack-dev-server/bin/webpack-dev-server.js --mode=development",
|
||||
@@ -82,7 +82,6 @@
|
||||
],
|
||||
"dependencies": {
|
||||
"@ant-design/icons": "^5.2.6",
|
||||
"@apache-superset/core": "file:packages/superset-core",
|
||||
"@emotion/cache": "^11.4.0",
|
||||
"@emotion/react": "^11.13.3",
|
||||
"@emotion/styled": "^11.3.0",
|
||||
@@ -228,7 +227,6 @@
|
||||
"@babel/plugin-proposal-optional-chaining": "^7.21.0",
|
||||
"@babel/plugin-proposal-private-methods": "^7.18.6",
|
||||
"@babel/plugin-syntax-dynamic-import": "^7.8.3",
|
||||
"@babel/plugin-transform-export-namespace-from": "^7.27.1",
|
||||
"@babel/plugin-transform-modules-commonjs": "^7.26.3",
|
||||
"@babel/plugin-transform-runtime": "^7.25.9",
|
||||
"@babel/preset-env": "^7.26.0",
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"presets": [
|
||||
"@babel/preset-env",
|
||||
"@babel/preset-react",
|
||||
"@babel/preset-typescript"
|
||||
]
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
## Change Log
|
||||
|
||||
Changelogs will be added once we have the first stable release.
|
||||
@@ -1,116 +0,0 @@
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# @apache-superset/core
|
||||
|
||||
[](https://badge.fury.io/js/%40apache-superset%2Fcore)
|
||||
[](https://opensource.org/licenses/Apache-2.0)
|
||||
|
||||
The official core package for building Apache Superset extensions and integrations. This package provides essential building blocks including shared UI components, utility functions, APIs, and type definitions for both the host application and extensions.
|
||||
|
||||
## 📦 Installation
|
||||
|
||||
```bash
|
||||
npm install @apache-superset/core
|
||||
```
|
||||
|
||||
## 🏗️ Architecture
|
||||
|
||||
The package is organized into logical namespaces, each providing specific functionality:
|
||||
|
||||
- **`authentication`** - User authentication and authorization APIs
|
||||
- **`commands`** - Command registration and execution system
|
||||
- **`contributions`** - UI contribution points and customization APIs
|
||||
- **`core`** - Fundamental types, utilities, and lifecycle management
|
||||
- **`environment`** - Environment detection and configuration APIs
|
||||
- **`extensions`** - Extension management and metadata APIs
|
||||
- **`sqlLab`** - SQL Lab integration and event handling
|
||||
|
||||
## 🚀 Quick Start
|
||||
|
||||
### Basic Extension Structure
|
||||
|
||||
```typescript
|
||||
import {
|
||||
core,
|
||||
commands,
|
||||
sqlLab,
|
||||
authentication,
|
||||
} from '@apache-superset/core';
|
||||
|
||||
export function activate(context: core.ExtensionContext) {
|
||||
// Register a command to save current query
|
||||
const commandDisposable = commands.registerCommand(
|
||||
'my_extension.save_query',
|
||||
async () => {
|
||||
const currentTab = sqlLab.getCurrentTab();
|
||||
if (currentTab?.editor.content) {
|
||||
const token = await authentication.getCSRFToken();
|
||||
// Use token for secure API calls
|
||||
console.log('Saving query with CSRF token:', token);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
// Listen for query execution events
|
||||
const eventDisposable = sqlLab.onDidQueryRun(editor => {
|
||||
console.log('Query executed:', editor.content.substring(0, 50) + '...');
|
||||
});
|
||||
|
||||
// Register a simple view
|
||||
const viewDisposable = core.registerViewProvider(
|
||||
'my_extension.panel',
|
||||
() => (
|
||||
<div>
|
||||
<h3>My Extension</h3>
|
||||
<button onClick={() => commands.executeCommand('my_extension.save_query')}>
|
||||
Save Query
|
||||
</button>
|
||||
</div>
|
||||
)
|
||||
);
|
||||
|
||||
// Cleanup registration
|
||||
context.subscriptions.push(commandDisposable, eventDisposable, viewDisposable);
|
||||
}
|
||||
|
||||
export function deactivate() {
|
||||
// Cleanup handled automatically via disposables
|
||||
}
|
||||
```
|
||||
|
||||
## 🤝 Contributing
|
||||
|
||||
We welcome contributions! Please see the [Contributing Guide](https://github.com/apache/superset/blob/master/CONTRIBUTING.md) for details.
|
||||
|
||||
## 📄 License
|
||||
|
||||
Licensed under the Apache License, Version 2.0. See [LICENSE](https://github.com/apache/superset/blob/master/LICENSE.txt) for details.
|
||||
|
||||
## 🔗 Links
|
||||
|
||||
- [Apache Superset](https://superset.apache.org/)
|
||||
- [Documentation](https://superset.apache.org/docs/)
|
||||
- [Community](https://superset.apache.org/community/)
|
||||
- [GitHub Repository](https://github.com/apache/superset)
|
||||
- [Extension Development Guide](https://superset.apache.org/docs/extensions/)
|
||||
|
||||
---
|
||||
|
||||
**Note**: This package is currently in release candidate status. APIs may change before the 1.0.0 release. Please check the [changelog](CHANGELOG.md) for breaking changes between versions.
|
||||
@@ -1,35 +0,0 @@
|
||||
{
|
||||
"name": "@apache-superset/core",
|
||||
"version": "0.0.1-rc4",
|
||||
"description": "This package contains UI elements, APIs, and utility functions used by Superset.",
|
||||
"sideEffects": false,
|
||||
"main": "lib/index.js",
|
||||
"module": "esm/index.js",
|
||||
"files": [
|
||||
"esm",
|
||||
"lib"
|
||||
],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.26.4",
|
||||
"@babel/core": "^7.26.9",
|
||||
"@babel/preset-env": "^7.26.9",
|
||||
"@babel/preset-react": "^7.26.3",
|
||||
"@babel/preset-typescript": "^7.26.0",
|
||||
"@types/react": "^17.0.83",
|
||||
"install": "^0.13.0",
|
||||
"npm": "^11.1.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"antd": "4.10.3",
|
||||
"react": "^17.0.2"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "babel src --out-dir lib --extensions \".ts,.tsx\"",
|
||||
"type": "tsc --noEmit"
|
||||
},
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
}
|
||||
}
|
||||
@@ -1,43 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @fileoverview Authentication API for Superset extensions.
|
||||
*
|
||||
* This module provides functions for handling user authentication and security
|
||||
* within Superset extensions.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Retrieves the CSRF token used for securing requests against cross-site request forgery attacks.
|
||||
* This token should be included in the headers of POST, PUT, DELETE, and other state-changing
|
||||
* HTTP requests to ensure they are authorized.
|
||||
*
|
||||
* @returns A promise that resolves to the CSRF token as a string, or undefined if not available.
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const csrfToken = await getCSRFToken();
|
||||
* if (csrfToken) {
|
||||
* // Include in request headers
|
||||
* headers['X-CSRFToken'] = csrfToken;
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export declare function getCSRFToken(): Promise<string | undefined>;
|
||||
@@ -1,70 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @fileoverview Command system API for Superset extensions.
|
||||
*
|
||||
* This module provides a command registry and execution system that allows extensions
|
||||
* to register custom commands and invoke them programmatically. Commands can be triggered
|
||||
* via keyboard shortcuts, menu items, programmatic calls, or other user interactions.
|
||||
*/
|
||||
|
||||
import { Disposable } from './core';
|
||||
|
||||
/**
|
||||
* Registers a command that can be invoked via a keyboard shortcut,
|
||||
* a menu item, an action, or directly.
|
||||
*
|
||||
* Registering a command with an existing command identifier twice
|
||||
* will cause an error.
|
||||
*
|
||||
* @param command A unique identifier for the command.
|
||||
* @param callback A command handler function.
|
||||
* @param thisArg The `this` context used when invoking the handler function.
|
||||
* @returns Disposable which unregisters this command on disposal.
|
||||
*/
|
||||
export declare function registerCommand(
|
||||
command: string,
|
||||
callback: (...args: any[]) => any,
|
||||
thisArg?: any,
|
||||
): Disposable;
|
||||
|
||||
/**
|
||||
* Executes the command denoted by the given command identifier.
|
||||
*
|
||||
* @param command Identifier of the command to execute.
|
||||
* @param rest Parameters passed to the command function.
|
||||
* @returns A promise that resolves to the returned value of the given command. Returns `undefined` when
|
||||
* the command handler function doesn't return anything.
|
||||
*/
|
||||
export declare function executeCommand<T = unknown>(
|
||||
command: string,
|
||||
...rest: any[]
|
||||
): Promise<T>;
|
||||
|
||||
/**
|
||||
* Retrieve the list of all available commands. Commands starting with an underscore are
|
||||
* treated as internal commands.
|
||||
*
|
||||
* @param filterInternal Set `true` to not see internal commands (starting with an underscore)
|
||||
* @returns Promise that resolves to a list of command ids.
|
||||
*/
|
||||
export declare function getCommands(
|
||||
filterInternal?: boolean,
|
||||
): Promise<string[]>;
|
||||
@@ -1,90 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @fileoverview Contributions API for Superset extension UI integration.
|
||||
*
|
||||
* This module defines the interfaces and types for extension contributions to the
|
||||
* Superset user interface. Extensions use these contribution types to register
|
||||
* commands, menu items, and custom views that integrate seamlessly with the
|
||||
* Superset platform. The contribution system allows extensions to extend the
|
||||
* application's functionality while maintaining a consistent user experience.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Describes a command that can be contributed to the application.
|
||||
*/
|
||||
export interface CommandContribution {
|
||||
/** The unique identifier for the command. */
|
||||
command: string;
|
||||
/** The icon associated with the command. */
|
||||
icon: string;
|
||||
/** The display title of the command. */
|
||||
title: string;
|
||||
/** A description of what the command does. */
|
||||
description: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a menu item that links a view to a command.
|
||||
*/
|
||||
export interface MenuItem {
|
||||
/** The identifier of the view associated with this menu item. */
|
||||
view: string;
|
||||
/** The command to execute when this menu item is selected. */
|
||||
command: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines the structure of menu contributions, allowing for primary, secondary, and context menus.
|
||||
*/
|
||||
export interface MenuContribution {
|
||||
/** Items to appear in the primary menu. */
|
||||
primary?: MenuItem[];
|
||||
/** Items to appear in the secondary menu. */
|
||||
secondary?: MenuItem[];
|
||||
/** Items to appear in the context menu. */
|
||||
context?: MenuItem[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a contributed view in the application.
|
||||
*/
|
||||
export interface ViewContribution {
|
||||
/** The unique identifier for the view. */
|
||||
id: string;
|
||||
/** The display name of the view. */
|
||||
name: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Aggregates all contributions (commands, menus, and views) provided by an extension or module.
|
||||
*/
|
||||
export interface Contributions {
|
||||
/** List of command contributions. */
|
||||
commands: CommandContribution[];
|
||||
/** Mapping of menu contributions by menu key. */
|
||||
menus: {
|
||||
[key: string]: MenuContribution;
|
||||
};
|
||||
/** Mapping of view contributions by view key. */
|
||||
views: {
|
||||
[key: string]: ViewContribution[];
|
||||
};
|
||||
}
|
||||
@@ -1,304 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @fileoverview Core types and utilities for Superset extensions.
|
||||
*
|
||||
* This module provides fundamental types and interfaces used throughout the
|
||||
* Superset extension API. It includes database metadata types, event handling,
|
||||
* resource management, and extension lifecycle definitions.
|
||||
*/
|
||||
|
||||
import { ReactElement } from 'react';
|
||||
import { Contributions } from './contributions';
|
||||
|
||||
/**
|
||||
* Represents a database column with its name and data type.
|
||||
*/
|
||||
export type Column = {
|
||||
/**
|
||||
* Label of the column
|
||||
*/
|
||||
name: string;
|
||||
|
||||
/**
|
||||
* Column name defined
|
||||
*/
|
||||
column_name: string;
|
||||
|
||||
/**
|
||||
* The data type of the column (e.g., 'INTEGER', 'VARCHAR', 'TIMESTAMP')
|
||||
*/
|
||||
type: string;
|
||||
|
||||
/**
|
||||
* Generic data type format
|
||||
*/
|
||||
type_generic: GenericDataType;
|
||||
|
||||
/**
|
||||
* True if the column is date format
|
||||
*/
|
||||
is_dttm: boolean;
|
||||
};
|
||||
|
||||
/**
|
||||
* Represents a database table with its name and column definitions.
|
||||
*/
|
||||
export declare interface Table {
|
||||
/** The name of the table */
|
||||
name: string;
|
||||
/** Array of columns in this table */
|
||||
columns: Column[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a database catalog.
|
||||
* @todo This interface needs to be expanded with catalog-specific properties.
|
||||
*/
|
||||
export declare interface Catalog {} // eslint-disable-line @typescript-eslint/no-empty-interface
|
||||
|
||||
/**
|
||||
* Represents a database schema containing tables.
|
||||
*/
|
||||
export declare interface Schema {
|
||||
/** Array of tables in this schema */
|
||||
tables: Table[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a database connection with its metadata.
|
||||
*/
|
||||
export declare interface Database {
|
||||
/** Unique identifier for the database */
|
||||
id: number;
|
||||
/** Display name of the database */
|
||||
name: string;
|
||||
/** Array of catalogs available in this database */
|
||||
catalogs: Catalog[];
|
||||
/** Array of schemas available in this database */
|
||||
schemas: Schema[];
|
||||
}
|
||||
|
||||
// Keep in sync with superset/errors.py
|
||||
export type ErrorLevel = 'info' | 'warning' | 'error';
|
||||
|
||||
/**
|
||||
* Superset error object structure.
|
||||
* Contains details about an error that occurred within Superset.
|
||||
*/
|
||||
export type SupersetError = {
|
||||
/**
|
||||
* Error types, see enum of SupersetErrorType in superset/errors.py
|
||||
*/
|
||||
error_type: string;
|
||||
|
||||
/**
|
||||
* Extra properties based on the error types
|
||||
*/
|
||||
extra: Record<string, any>;
|
||||
|
||||
/**
|
||||
* Level of the error type
|
||||
*/
|
||||
level: ErrorLevel;
|
||||
|
||||
/**
|
||||
* Detail description for the error
|
||||
*/
|
||||
message: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Generic data types, see enum of the same name in superset/utils/core.py.
|
||||
*/
|
||||
export enum GenericDataType {
|
||||
Numeric = 0,
|
||||
String = 1,
|
||||
Temporal = 2,
|
||||
Boolean = 3,
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a type which can release resources, such
|
||||
* as event listening or a timer.
|
||||
*/
|
||||
export declare class Disposable {
|
||||
/**
|
||||
* Combine many disposable-likes into one. You can use this method when having objects with
|
||||
* a dispose function which aren't instances of `Disposable`.
|
||||
*
|
||||
* @param disposableLikes Objects that have at least a `dispose`-function member. Note that asynchronous
|
||||
* dispose-functions aren't awaited.
|
||||
* @returns Returns a new disposable which, upon dispose, will
|
||||
* dispose all provided disposables.
|
||||
*/
|
||||
static from(
|
||||
...disposableLikes: {
|
||||
/**
|
||||
* Function to clean up resources.
|
||||
*/
|
||||
dispose: () => any;
|
||||
}[]
|
||||
): Disposable;
|
||||
|
||||
/**
|
||||
* Creates a new disposable that calls the provided function
|
||||
* on dispose.
|
||||
*
|
||||
* *Note* that an asynchronous function is not awaited.
|
||||
*
|
||||
* @param callOnDispose Function that disposes something.
|
||||
*/
|
||||
constructor(callOnDispose: () => any);
|
||||
|
||||
/**
|
||||
* Dispose this object.
|
||||
*/
|
||||
dispose(): any;
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a typed event system for handling asynchronous notifications.
|
||||
*
|
||||
* A function that represents an event to which you subscribe by calling it with
|
||||
* a listener function as argument. This provides a type-safe way to handle
|
||||
* events throughout the Superset extension system.
|
||||
*
|
||||
* @template T The type of data that will be passed to event listeners.
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* // Subscribe to an event
|
||||
* const disposable = myEvent((data) => {
|
||||
* console.log("Event happened:", data);
|
||||
* });
|
||||
*
|
||||
* // Unsubscribe when done
|
||||
* disposable.dispose();
|
||||
* ```
|
||||
*/
|
||||
export declare interface Event<T> {
|
||||
/**
|
||||
* Subscribe to this event by providing a listener function.
|
||||
*
|
||||
* @param listener The listener function that will be called when the event is fired.
|
||||
* The function receives the event data as its parameter.
|
||||
* @param thisArgs Optional `this` context that will be used when calling the event listener.
|
||||
* @returns A Disposable object that can be used to unsubscribe from the event.
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const subscription = onSomeEvent((data) => {
|
||||
* console.log('Received:', data);
|
||||
* });
|
||||
*
|
||||
* // Later, clean up the subscription
|
||||
* subscription.dispose();
|
||||
* ```
|
||||
*/
|
||||
(listener: (e: T) => any, thisArgs?: any): Disposable;
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a Superset extension with its metadata and lifecycle methods.
|
||||
* Extensions are modular components that can extend Superset's functionality.
|
||||
*/
|
||||
export interface Extension {
|
||||
/** Function called when the extension is activated */
|
||||
activate: Function;
|
||||
/** UI contributions provided by this extension */
|
||||
contributions: Contributions;
|
||||
/** Function called when the extension is deactivated */
|
||||
deactivate: Function;
|
||||
/** List of other extensions that this extension depends on */
|
||||
dependencies: string[];
|
||||
/** Human-readable description of the extension */
|
||||
description: string;
|
||||
/** List of modules exposed by this extension for use by other extensions */
|
||||
exposedModules: string[];
|
||||
/** List of other extensions that this extension depends on */
|
||||
extensionDependencies: string[];
|
||||
/** Unique identifier for the extension */
|
||||
id: string;
|
||||
/** Human-readable name of the extension */
|
||||
name: string;
|
||||
/** URL or path to the extension's remote entry point */
|
||||
remoteEntry: string;
|
||||
/** Version of the extension */
|
||||
version: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Context object provided to extensions during activation.
|
||||
* Contains utilities and resources that extensions can use during their lifecycle.
|
||||
*/
|
||||
export interface ExtensionContext {
|
||||
/**
|
||||
* Array of disposable objects that will be automatically disposed when the extension is deactivated.
|
||||
* Extensions should add any resources that need cleanup to this array.
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* export function activate(context: ExtensionContext) {
|
||||
* // Register an event listener
|
||||
* const disposable = onSomeEvent(() => { ... });
|
||||
*
|
||||
* // Add to context so it's cleaned up automatically
|
||||
* context.disposables.push(disposable);
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
disposables: Disposable[];
|
||||
|
||||
/**
|
||||
* @todo We might want to add more properties to this interface in the future like
|
||||
* storage, configuration, logging, etc. For now, it serves as a placeholder
|
||||
* to allow for future extensibility without breaking existing extensions.
|
||||
*/
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers a view provider that can render custom React components in Superset.
|
||||
* View providers allow extensions to contribute custom UI components that can be
|
||||
* displayed in various parts of the Superset interface.
|
||||
*
|
||||
* @param id Unique identifier for the view provider. This ID is used to reference
|
||||
* the view provider from other parts of the system.
|
||||
* @param viewProvider Function that returns a React element to be rendered.
|
||||
* This function will be called whenever the view needs to be displayed.
|
||||
* @returns A Disposable object that can be used to unregister the view provider.
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const disposable = registerViewProvider('my-extension.custom-view', () => (
|
||||
* <div>
|
||||
* <h1>My Custom View</h1>
|
||||
* <p>This is a custom component from my extension.</p>
|
||||
* </div>
|
||||
* ));
|
||||
*
|
||||
* // Later, unregister the view provider
|
||||
* disposable.dispose();
|
||||
* ```
|
||||
*/
|
||||
export declare const registerViewProvider: (
|
||||
id: string,
|
||||
viewProvider: () => ReactElement,
|
||||
) => Disposable;
|
||||
@@ -1,153 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @fileoverview Environment API for Superset extensions.
|
||||
*
|
||||
* This module provides access to the execution environment, including system
|
||||
* clipboard operations, logging capabilities, internationalization features,
|
||||
* and environment variables. It allows extensions to interact with the host
|
||||
* system and platform in a controlled manner.
|
||||
*/
|
||||
|
||||
import { Event } from './core';
|
||||
|
||||
/**
|
||||
* Interface for system clipboard operations.
|
||||
* Provides methods to read from and write to the system clipboard.
|
||||
*/
|
||||
export interface Clipboard {
|
||||
/**
|
||||
* Read the current clipboard contents as text.
|
||||
*
|
||||
* @returns A promise that resolves to the clipboard text content.
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const clipboardText = await clipboard.readText();
|
||||
* console.log('Clipboard contains:', clipboardText);
|
||||
* ```
|
||||
*/
|
||||
readText(): Promise<string>;
|
||||
|
||||
/**
|
||||
* Writes text into the clipboard, replacing any existing content.
|
||||
*
|
||||
* @param value The text to write to the clipboard.
|
||||
* @returns A promise that resolves when the write operation completes.
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* await clipboard.writeText('Hello, world!');
|
||||
* console.log('Text copied to clipboard');
|
||||
* ```
|
||||
*/
|
||||
writeText(value: string): Promise<void>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Logging levels for controlling the verbosity of log output.
|
||||
* Higher numeric values indicate more restrictive logging levels.
|
||||
*/
|
||||
export enum LogLevel {
|
||||
/**
|
||||
* No messages are logged with this level.
|
||||
* Use this to completely disable logging.
|
||||
*/
|
||||
Off = 0,
|
||||
|
||||
/**
|
||||
* All messages are logged with this level.
|
||||
* Most verbose logging level, includes all types of messages.
|
||||
*/
|
||||
Trace = 1,
|
||||
|
||||
/**
|
||||
* Messages with debug and higher log level are logged with this level.
|
||||
* Useful for development and troubleshooting.
|
||||
*/
|
||||
Debug = 2,
|
||||
|
||||
/**
|
||||
* Messages with info and higher log level are logged with this level.
|
||||
* General informational messages about application flow.
|
||||
*/
|
||||
Info = 3,
|
||||
|
||||
/**
|
||||
* Messages with warning and higher log level are logged with this level.
|
||||
* Indicates potential issues that don't prevent operation.
|
||||
*/
|
||||
Warning = 4,
|
||||
|
||||
/**
|
||||
* Only error messages are logged with this level.
|
||||
* Most restrictive level, shows only critical failures.
|
||||
*/
|
||||
Error = 5,
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents the preferred user-language, like `de-CH`, `fr`, or `en-US`.
|
||||
*/
|
||||
export declare const language: string;
|
||||
|
||||
/**
|
||||
* The system clipboard.
|
||||
*/
|
||||
export declare const clipboard: Clipboard;
|
||||
|
||||
/**
|
||||
* The current log level of the editor.
|
||||
*/
|
||||
export declare const logLevel: LogLevel;
|
||||
|
||||
/**
|
||||
* An {@link Event} which fires when the log level of the editor changes.
|
||||
*/
|
||||
export declare const onDidChangeLogLevel: Event<LogLevel>;
|
||||
|
||||
/**
|
||||
* Opens an external URL in the default system browser or application.
|
||||
* This function provides a secure way to open external resources while
|
||||
* respecting user security preferences.
|
||||
*
|
||||
* @param target The URL to open externally.
|
||||
* @returns A promise that resolves to true if the URL was successfully opened, false otherwise.
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const success = await openExternal(new URL('https://superset.apache.org'));
|
||||
* if (success) {
|
||||
* console.log('URL opened successfully');
|
||||
* } else {
|
||||
* console.log('Failed to open URL');
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export declare function openExternal(target: URL): Promise<boolean>;
|
||||
|
||||
/**
|
||||
* Gets an environment variable value.
|
||||
* @param name The name of the environment variable
|
||||
* @returns The value of the environment variable or undefined if not found
|
||||
*/
|
||||
export declare function getEnvironmentVariable(
|
||||
name: string,
|
||||
): string | undefined;
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user