Compare commits

..

60 Commits

Author SHA1 Message Date
Đỗ Trọng Hải
ff83620354 feat(sec): harden GHA ref by using its SHA ID to prevent accidental usage of compromised actions (#38782)
Signed-off-by: hainenber <dotronghai96@gmail.com>
(cherry picked from commit 83823911b5)
2026-04-15 08:48:43 -03:00
Luiz Otavio
9549cbb8ba fix(native-filters): prevent infinite recursion in filter scope tree traversal (#39355)
(cherry picked from commit 86575e129b)
2026-04-15 08:22:08 -03:00
Michael S. Molina
1e3c46038c chore: Install setuptools 2026-03-18 13:39:00 -03:00
Michael S. Molina
5731355ff2 fix(chart): prevent chart list from failing when a datasource lacks explore_url (#38721)
(cherry picked from commit fd1c423826)
2026-03-18 13:09:02 -03:00
justinpark
ccaf2c921b remove spec 2026-02-09 10:54:30 -08:00
justinpark
03214c1ca8 theme color change 2026-02-09 10:28:24 -08:00
justinpark
5159c27bdc lint fix 2026-02-09 09:12:08 -08:00
JUST.in DO IT
f3b21ecaa2 fix(world-map): reset hover highlight on mouse out (#37716)
Co-authored-by: Arunodoy18 <arunodoy630@gmail.com>
(cherry picked from commit a04571fa20)
2026-02-06 10:36:34 -08:00
justinpark
677e440a94 remove duplicated configs 2026-02-03 13:38:30 -08:00
JUST.in DO IT
e30a0b0a8f fix(dashboard): Avoid calling loadData for invisible charts on virtual rendering (#37452)
(cherry picked from commit be404f9b84)
2026-02-02 14:36:42 -08:00
justinpark
3f45502bb3 pylint and current_app related change in spec 2026-01-06 08:56:35 -08:00
justinpark
3a6bd39dd7 fix(trino): update query progress using cursor stats (#36872)
(cherry picked from commit 12a266fd2f)
2026-01-05 14:05:42 -08:00
JUST.in DO IT
34ebb8a7dc chore(sqllab): add logging for switching south panel tabs (#36168) (#36369) 2025-12-02 10:32:55 -08:00
Michael S. Molina
853143ad4a fix: Parsing error handling 2025-11-26 10:43:12 -05:00
Michael S. Molina
8060b01b68 fix: Changes ResultSet to include sqlEditorImmutableId when fetching results (#35773)
(cherry picked from commit 337da13ba7)
2025-10-21 14:32:33 -03:00
Michael S. Molina
5f0059bf68 fix: Extension events not fired in SQL Lab tabs (#35753)
(cherry picked from commit fa90ba976c)
2025-10-20 16:46:09 -03:00
Ville Brofeldt
bfd31ccba7 fix: no fs logging of extensions unless flag is set (#35612)
Co-authored-by: Ville Brofeldt <v_brofeldt@apple.com>
(cherry picked from commit bd48e87eeb)
2025-10-15 08:42:35 -03:00
Luiz Otavio
fc9e94e82d fix(csv upload): Correctly casting to string numbers with floating points (e+) (#35586)
(cherry picked from commit 17ebbdd966)
2025-10-13 15:41:05 -03:00
Michael S. Molina
5b39116dea refactor: Organizes the src/core folder (#35119)
(cherry picked from commit bc6859a99d)
2025-09-19 09:29:24 -03:00
Michael S. Molina
84b81b814e fix: SQL Lab tab events (#35105)
(cherry picked from commit e729b2dbb4)
2025-09-11 18:25:49 -03:00
JUST.in DO IT
47485093b9 feat(sqllab extension): core query interface api (#35077)
(cherry picked from commit 078c1701f4)
2025-09-11 18:24:18 -03:00
Michael S. Molina
5c43b13c1e fix: Change database event in core (#35071)
(cherry picked from commit 4c2b27e7f0)
2025-09-09 11:59:09 -03:00
Luiz Otavio
bf7c97d807 fix: Upload CSV as Dataset (#34763)
(cherry picked from commit 1c2b9db4f0)
2025-09-09 10:13:10 -03:00
JUST.in DO IT
fea386309a fix(echarts): rename time series shifted for isTimeComparisonValue (#35022)
(cherry picked from commit bc54b7970a)
2025-09-09 10:09:43 -03:00
Michael S. Molina
e7296ea709 fix: apache-superset-extensions-cli exported files
(cherry picked from commit b0ca61cbe0)
2025-09-02 16:04:49 -03:00
Michael S. Molina
ad81dc6d9a fix: apache-superset-core exported packages (#34977)
(cherry picked from commit 322442d5be)
2025-09-02 15:25:15 -03:00
Michael S. Molina
63612096c1 fix: Add tests to improve test coverage 2025-09-01 10:57:25 -03:00
Michael S. Molina
9bb5a39ec7 fix: Footer test mock 2025-09-01 10:50:09 -03:00
Michael S. Molina
1f5452179c fix: Skip coverage for @apache-superset/core 2025-09-01 10:23:14 -03:00
Michael S. Molina
150d0a3439 fix: Remove useTheme from test files 2025-09-01 10:01:23 -03:00
Michael S. Molina
c5463d6675 fix: Update @apache-superset/core reference in package-lock.json 2025-09-01 09:26:56 -03:00
Michael S. Molina
383f5389f3 fix: Rename apache-superset-cli to apache-superset-extensions-cli (#34883)
(cherry picked from commit bcf156c969)
2025-08-28 15:45:55 -03:00
Joe Li
ccd0ca3c32 fix: Update apache-superset-core dependency to accept rc1 version (#34872)
Co-authored-by: Claude <noreply@anthropic.com>
(cherry picked from commit b558b34faf)
2025-08-28 10:11:56 -03:00
Michael S. Molina
cc62e437dc chore: Adds README and CHANGELOG to apache-superset-core and apache-superset-cli (#34867)
(cherry picked from commit 30c72ba0a3)
2025-08-28 10:08:53 -03:00
Michael S. Molina
9b5634356a chore: Adds a README and CHANGELOG to @apache-superset/core (#34866)
(cherry picked from commit d8a3d29ad9)
2025-08-28 10:08:40 -03:00
Michael S. Molina
8c25808299 chore: Extensions architecture POC (#31934)
Co-authored-by: Ville Brofeldt <ville.brofeldt@apple.com>
Co-authored-by: Ville Brofeldt <ville@Villes-MacBook-Pro-2024.local>
Co-authored-by: Ville Brofeldt <v_brofeldt@apple.com>
(cherry picked from commit a8be5a5a0c)
2025-08-26 13:47:13 -03:00
Michael S. Molina
aa69ce43d9 fix: User-provided Jinja template parameters causing SQL parsing errors (#34802)
(cherry picked from commit e1234b2264)
2025-08-22 15:02:44 -03:00
JUST.in DO IT
878289a2e6 fix: customize column description limit size in db_engine_spec (#34808)
(cherry picked from commit 75af53dc3d)
2025-08-22 14:40:39 -03:00
Vitor Avila
097f576244 feat(file uploads): List only allowed schemas in the file uploads dialog (#32702)
(cherry picked from commit e35145c816)
2025-08-22 11:30:09 -03:00
Michael S. Molina
e986496ef4 fix: Timeseries annotation layers (#34709)
(cherry picked from commit fc95c4fc89)
2025-08-15 13:10:03 -03:00
JUST.in DO IT
752f7aa80c chore(saved_query): Copy link to clipboard before redirect to edit (#34567) 2025-08-07 11:10:05 -07:00
JUST.in DO IT
882aba67d6 fix(echart): broken aggregator due to bigint value (#34580)
(cherry picked from commit 3e12d97e8e)
2025-08-06 15:22:45 -03:00
JUST.in DO IT
2696d3e800 fix: navigate to SQL Lab due to router api updates (#34569)
(cherry picked from commit 53e9cf6d17)
2025-08-06 11:56:00 -03:00
JUST.in DO IT
ab886db246 fix(echarts): rename time series shifted without dimensions (#34541)
(cherry picked from commit 6f5d9c989a)
2025-08-06 11:55:50 -03:00
JUST.in DO IT
8828e59dca fix(table chart): render bigint value in a raw mode (#34556)
(cherry picked from commit 8700a0b939)
2025-08-05 17:15:47 -03:00
JUST.in DO IT
d1ded45633 chore(explore): Add format sql and view in SQL Lab option in View Query (#33341)
(cherry picked from commit 3a3984006c)
2025-08-05 16:11:54 -03:00
JUST.in DO IT
ee2a5915b2 fix(native filters): throws an error when a chart containing a bigint value (#34539)
(cherry picked from commit 2f8939d229)
2025-08-04 16:18:14 -03:00
JUST.in DO IT
bcbf17bdf3 fix(echart): initial chart animation (#34516)
(cherry picked from commit 1a7a381bd5)
2025-08-02 09:39:59 -03:00
Michael S. Molina
c2238b92cc fix: Users can't skip column sync when saving virtual datasets (#34509) 2025-08-02 09:38:48 -03:00
JUST.in DO IT
32accfff7d fix(sqllab): access legacy kv record (#34411)
(cherry picked from commit 762a11b0bb)
2025-07-31 09:29:38 -03:00
jqqin
8a5dff8491 fix(dataset): prevent metric duplication error when editing SQL and adding metric (#33523)
Co-authored-by: QinQin <qinqin@geotab.com>
(cherry picked from commit 2fba789e8d)
2025-07-30 09:11:39 -03:00
Michael S. Molina
c6e628a384 fix: Charts list is displaying empty dataset names when there's no schema (#34315)
(cherry picked from commit 5f11f9097a)
2025-07-25 14:15:34 -03:00
JUST.in DO IT
5eb61ec1b6 fix: extract tables from jinja (#34307) 2025-07-25 09:06:15 -03:00
Joe Li
fb035f4210 fix: Update spacing on echart legends (#34018)
(cherry picked from commit cb6342fc73)
2025-07-24 10:53:58 -03:00
Michael S. Molina
1926e90e12 fix: Saved queries list break if one query can't be parsed (#34289)
(cherry picked from commit 1e5a4e9bdc)
2025-07-24 08:31:14 -03:00
Michael S. Molina
21f77f3717 fix: Bulk select is not respecting the TAGGING_SYSTEM feature flag (#34282)
(cherry picked from commit 11324607d0)
2025-07-23 11:35:34 -03:00
JUST.in DO IT
31098dee40 fix: Matching errorType on superset api error with SupersetError (#34261)
(cherry picked from commit 229d92590a)
2025-07-23 09:41:37 -03:00
JUST.in DO IT
ffcc8a2c53 fix(explore): Display missing dataset for denied access (#34129)
(cherry picked from commit 96cb6030c8)
2025-07-21 08:37:47 -03:00
Michael S. Molina
f313554040 fix: Annotation layer errors (#34074)
(cherry picked from commit d6ed819fe2)
2025-07-14 08:47:15 -03:00
Beto Dealmeida
41c82337b1 chore: pin Marshmallow < 4 (#33978) 2025-07-14 08:45:50 -03:00
248 changed files with 17678 additions and 941 deletions

10
.github/CODEOWNERS vendored
View File

@@ -30,3 +30,13 @@
**/*.geojson @villebro @rusackas
/superset-frontend/plugins/legacy-plugin-chart-country-map/ @villebro @rusackas
# Notify PMC members of changes to extension-related files
/superset-core/ @michael-s-molina @villebro
/superset-extensions-cli/ @michael-s-molina @villebro
/superset/core/ @michael-s-molina @villebro
/superset/extensions/ @michael-s-molina @villebro
/superset-frontend/src/packages/superset-core/ @michael-s-molina @villebro
/superset-frontend/src/core/ @michael-s-molina @villebro
/superset-frontend/src/extensions/ @michael-s-molina @villebro

View File

@@ -1,24 +1,27 @@
name: 'Change Detector'
description: 'Detects file changes for pull request and push events'
name: Change Detector
description: Detects file changes for pull request and push events
inputs:
token:
description: 'GitHub token for authentication'
description: GitHub token for authentication
required: true
outputs:
python:
description: 'Whether Python-related files were changed'
description: Whether Python-related files were changed
value: ${{ steps.change-detector.outputs.python }}
frontend:
description: 'Whether frontend-related files were changed'
description: Whether frontend-related files were changed
value: ${{ steps.change-detector.outputs.frontend }}
docker:
description: 'Whether docker-related files were changed'
description: Whether docker-related files were changed
value: ${{ steps.change-detector.outputs.docker }}
docs:
description: 'Whether docs-related files were changed'
description: Whether docs-related files were changed
value: ${{ steps.change-detector.outputs.docs }}
superset-extensions-cli:
description: Whether superset-extensions-cli package-related files were changed
value: ${{ steps.change-detector.outputs.superset-extensions-cli }}
runs:
using: 'composite'
using: composite
steps:
- name: Detect file changes
id: change-detector

View File

@@ -26,16 +26,16 @@ runs:
- name: Set up QEMU
if: ${{ inputs.build == 'true' }}
uses: docker/setup-qemu-action@v3
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
- name: Set up Docker Buildx
if: ${{ inputs.build == 'true' }}
uses: docker/setup-buildx-action@v3
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
- name: Try to login to DockerHub
if: ${{ inputs.login-to-dockerhub == 'true' }}
continue-on-error: true
uses: docker/login-action@v3
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
with:
username: ${{ inputs.dockerhub-user }}
password: ${{ inputs.dockerhub-token }}

View File

@@ -30,9 +30,8 @@ jobs:
pull-requests: write
checks: write
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: true
ref: master
@@ -41,7 +40,7 @@ jobs:
uses: ./.github/actions/setup-supersetbot/
- name: Set up Python ${{ inputs.python-version }}
uses: actions/setup-python@v5
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6
with:
python-version: "3.10"

View File

@@ -31,7 +31,7 @@ jobs:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
if: steps.check_queued.outputs.count >= 20
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Cancel duplicate workflow runs
if: steps.check_queued.outputs.count >= 20

View File

@@ -17,9 +17,8 @@ jobs:
check-python-deps:
runs-on: ubuntu-22.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive

View File

@@ -25,9 +25,9 @@ jobs:
pull-requests: write
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Check and notify
uses: actions/github-script@v7
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
github-token: ${{ github.token }}
script: |

View File

@@ -31,7 +31,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Check for file changes
id: check

View File

@@ -27,9 +27,9 @@ jobs:
runs-on: ubuntu-24.04
steps:
- name: "Checkout Repository"
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: "Dependency Review"
uses: actions/dependency-review-action@v4
uses: actions/dependency-review-action@2031cfc080254a8a887f58cffee85186f0e49e48 # v4.9.0
continue-on-error: true
with:
fail-on-severity: critical
@@ -51,16 +51,17 @@ jobs:
runs-on: ubuntu-22.04
steps:
- name: "Checkout Repository"
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Setup Python
uses: ./.github/actions/setup-backend/
- name: Set up Python
uses: actions/setup-python@v5
with:
requirements-type: base
python-version: "3.11"
cache: "pip"
- name: "Install dependencies and liccheck"
run: pip install -r requirements/base.txt liccheck
- name: "Set up liccheck"
run: |
uv pip install --system liccheck
- name: "Run liccheck"
run: |
# run the checks

View File

@@ -14,7 +14,6 @@ concurrency:
cancel-in-progress: true
jobs:
setup_matrix:
runs-on: ubuntu-24.04
outputs:
@@ -40,9 +39,8 @@ jobs:
IMAGE_TAG: apache/superset:GHA-${{ matrix.build_preset }}-${{ github.run_id }}
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
@@ -91,7 +89,7 @@ jobs:
# in the context of push (using multi-platform build), we need to pull the image locally
- name: Docker pull
if: github.event_name == 'push' && (steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker)
run: docker pull $IMAGE_TAG
run: docker pull $IMAGE_TAG
- name: Print docker stats
if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker
@@ -114,7 +112,7 @@ jobs:
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
- name: Check for file changes

View File

@@ -28,11 +28,11 @@ jobs:
run:
working-directory: superset-embedded-sdk
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version: "20"
registry-url: 'https://registry.npmjs.org'
registry-url: "https://registry.npmjs.org"
- run: npm ci
- run: npm run ci:release
env:

View File

@@ -18,11 +18,11 @@ jobs:
run:
working-directory: superset-embedded-sdk
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version: "20"
registry-url: 'https://registry.npmjs.org'
registry-url: "https://registry.npmjs.org"
- run: npm ci
- run: npm test
- run: npm run build

View File

@@ -63,7 +63,7 @@ jobs:
- name: Comment (success)
if: steps.describe-services.outputs.active == 'true'
uses: actions/github-script@v7
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
github-token: ${{github.token}}
script: |

View File

@@ -10,11 +10,11 @@ on:
workflow_dispatch:
inputs:
label_name:
description: 'Label name to simulate label-based /testenv trigger'
description: "Label name to simulate label-based /testenv trigger"
required: true
default: 'testenv-up'
default: "testenv-up"
issue_number:
description: 'Issue or PR number'
description: "Issue or PR number"
required: true
jobs:
@@ -55,7 +55,7 @@ jobs:
- name: Get event SHA
id: get-sha
if: steps.eval-label.outputs.result == 'up'
uses: actions/github-script@v7
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
@@ -86,7 +86,7 @@ jobs:
core.setOutput("sha", prSha);
- name: Looking for feature flags in PR description
uses: actions/github-script@v7
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
id: eval-feature-flags
if: steps.eval-label.outputs.result == 'up'
with:
@@ -108,7 +108,7 @@ jobs:
return results;
- name: Reply with confirmation comment
uses: actions/github-script@v7
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
if: steps.eval-label.outputs.result == 'up'
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
@@ -145,7 +145,7 @@ jobs:
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ needs.ephemeral-env-label.outputs.sha }} : ${{steps.get-sha.outputs.sha}} )"
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
ref: ${{ needs.ephemeral-env-label.outputs.sha }}
persist-credentials: false
@@ -174,7 +174,7 @@ jobs:
--extra-flags "--build-arg INCLUDE_CHROMIUM=false"
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7 # v6
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
@@ -182,7 +182,7 @@ jobs:
- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v2
uses: aws-actions/amazon-ecr-login@c962da2960ed15f492addc26fffa274485265950 # v2
- name: Load, tag and push image to ECR
id: push-image
@@ -205,12 +205,12 @@ jobs:
pull-requests: write
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7 # v6
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
@@ -218,7 +218,7 @@ jobs:
- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v2
uses: aws-actions/amazon-ecr-login@c962da2960ed15f492addc26fffa274485265950 # v2
- name: Check target image exists in ECR
id: check-image
@@ -233,7 +233,7 @@ jobs:
- name: Fail on missing container image
if: steps.check-image.outcome == 'failure'
uses: actions/github-script@v7
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
github-token: ${{ github.token }}
script: |
@@ -248,7 +248,7 @@ jobs:
- name: Fill in the new image ID in the Amazon ECS task definition
id: task-def
uses: aws-actions/amazon-ecs-render-task-definition@v1
uses: aws-actions/amazon-ecs-render-task-definition@77954e213ba1f9f9cb016b86a1d4f6fcdea0d57e # v1
with:
task-definition: .github/workflows/ecs-task-definition.json
container-name: superset-ci
@@ -281,7 +281,7 @@ jobs:
--tags key=pr,value=$PR_NUMBER key=github_user,value=${{ github.actor }}
- name: Deploy Amazon ECS task definition
id: deploy-task
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
uses: aws-actions/amazon-ecs-deploy-task-definition@cbf54ec46642b86ff78c2f5793da6746954cf8ff # v2
with:
task-definition: ${{ steps.task-def.outputs.task-definition }}
service: pr-${{ github.event.inputs.issue_number || github.event.pull_request.number }}-service
@@ -303,7 +303,7 @@ jobs:
echo "ip=$(aws ec2 describe-network-interfaces --network-interface-ids ${{ steps.get-eni.outputs.eni }} | jq -r '.NetworkInterfaces | first | .Association.PublicIp')" >> $GITHUB_OUTPUT
- name: Comment (success)
if: ${{ success() }}
uses: actions/github-script@v7
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
github-token: ${{github.token}}
script: |
@@ -316,7 +316,7 @@ jobs:
});
- name: Comment (failure)
if: ${{ failure() }}
uses: actions/github-script@v7
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
github-token: ${{github.token}}
script: |

View File

@@ -27,12 +27,12 @@ jobs:
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
- name: Setup Java
uses: actions/setup-java@v4
uses: actions/setup-java@be666c2fcd27ec809703dec50e508c2fdc7f6654 # v5
with:
distribution: "temurin"
java-version: "11"

View File

@@ -9,17 +9,16 @@ on:
types: [synchronize, opened, reopened, ready_for_review]
jobs:
validate-all-ghas:
runs-on: ubuntu-24.04
steps:
- name: Checkout Repository
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Set up Node.js
uses: actions/setup-node@v4
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version: '20'
node-version: "20"
- name: Install Dependencies
run: npm install -g @action-validator/core @action-validator/cli --save-dev

View File

@@ -15,9 +15,8 @@ jobs:
pull-requests: write
issues: write
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false

View File

@@ -11,27 +11,27 @@ jobs:
contents: write
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
- name: Check for latest tag
id: latest-tag
run: |
source ./scripts/tag_latest_release.sh $(echo ${{ github.event.release.tag_name }}) --dry-run
- name: Check for latest tag
id: latest-tag
run: |
source ./scripts/tag_latest_release.sh $(echo ${{ github.event.release.tag_name }}) --dry-run
- name: Configure Git
run: |
git config user.name "$GITHUB_ACTOR"
git config user.email "$GITHUB_ACTOR@users.noreply.github.com"
- name: Configure Git
run: |
git config user.name "$GITHUB_ACTOR"
git config user.email "$GITHUB_ACTOR@users.noreply.github.com"
- name: Run latest-tag
uses: ./.github/actions/latest-tag
if: (! ${{ steps.latest-tag.outputs.SKIP_TAG }} )
with:
description: Superset latest release
tag-name: latest
env:
GITHUB_TOKEN: ${{ github.token }}
- name: Run latest-tag
uses: ./.github/actions/latest-tag
if: (! ${{ steps.latest-tag.outputs.SKIP_TAG }} )
with:
description: Superset latest release
tag-name: latest
env:
GITHUB_TOKEN: ${{ github.token }}

View File

@@ -15,14 +15,14 @@ jobs:
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
- name: Setup Java
uses: actions/setup-java@v4
uses: actions/setup-java@be666c2fcd27ec809703dec50e508c2fdc7f6654 # v5
with:
distribution: 'temurin'
java-version: '11'
distribution: "temurin"
java-version: "11"
- name: Run license check
run: ./scripts/check_license.sh

View File

@@ -13,13 +13,13 @@ jobs:
check-hold-label:
runs-on: ubuntu-24.04
steps:
- name: Check for 'hold' label
uses: actions/github-script@v7
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
const payload = context.payload.pull_request
const holdLabelPresent = !!payload.labels.find(label => label.name.includes('hold'))
if (holdLabelPresent) {
core.setFailed('Hold label is present, merge is blocked.')
}
- name: Check for 'hold' label
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
const payload = context.payload.pull_request
const holdLabelPresent = !!payload.labels.find(label => label.name.includes('hold'))
if (holdLabelPresent) {
core.setFailed('Hold label is present, merge is blocked.')
}

View File

@@ -16,7 +16,7 @@ jobs:
pull-requests: write
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
@@ -26,6 +26,5 @@ jobs:
on-failed-regex-fail-action: true
on-failed-regex-request-changes: false
on-failed-regex-create-review: false
on-failed-regex-comment:
"Please format your PR title to match: `%regex%`!"
on-failed-regex-comment: "Please format your PR title to match: `%regex%`!"
repo-token: "${{ github.token }}"

View File

@@ -21,7 +21,7 @@ jobs:
python-version: ["current", "previous"]
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive

View File

@@ -32,7 +32,7 @@ jobs:
node-version: [20]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
# pulls all commits (needed for lerna / semantic release to correctly version)
fetch-depth: 0
@@ -48,13 +48,13 @@ jobs:
- name: Use Node.js ${{ matrix.node-version }}
if: env.HAS_TAGS
uses: actions/setup-node@v4
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version: ${{ matrix.node-version }}
- name: Cache npm
if: env.HAS_TAGS
uses: actions/cache@v4
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: ~/.npm # npm cache files are stored in `~/.npm` on Linux/macOS
key: ${{ runner.OS }}-node-${{ hashFiles('**/package-lock.json') }}
@@ -68,7 +68,7 @@ jobs:
run: echo "dir=$(npm config get cache)" >> $GITHUB_OUTPUT
- name: Cache npm
if: env.HAS_TAGS
uses: actions/cache@v4
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
id: npm-cache # use this to check for `cache-hit` (`steps.npm-cache.outputs.cache-hit != 'true'`)
with:
path: ${{ steps.npm-cache-dir-path.outputs.dir }}

View File

@@ -1,4 +1,4 @@
name: Superset CLI tests
name: Superset App CLI tests
on:
push:
@@ -23,7 +23,7 @@ jobs:
SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset
services:
postgres:
image: postgres:15-alpine
image: postgres:16-alpine
env:
POSTGRES_USER: superset
POSTGRES_PASSWORD: superset
@@ -37,7 +37,7 @@ jobs:
- 16379:6379
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive

View File

@@ -30,21 +30,21 @@ jobs:
name: Build & Deploy
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
- name: "Checkout ${{ github.event.workflow_run.head_sha || github.sha }}"
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
- name: Set up Node.js 20
uses: actions/setup-node@v4
- name: Set up Node.js
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version: '20'
node-version: "20"
- name: Setup Python
uses: ./.github/actions/setup-backend/
- uses: actions/setup-java@v4
- uses: actions/setup-java@be666c2fcd27ec809703dec50e508c2fdc7f6654 # v5
with:
distribution: 'zulu'
java-version: '21'
distribution: "zulu"
java-version: "21"
- name: Install Graphviz
run: sudo apt-get install -y graphviz
- name: Compute Entity Relationship diagram (ERD)

View File

@@ -18,7 +18,7 @@ jobs:
name: Link Checking
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
# Do not bump this linkinator-action version without opening
# an ASF Infra ticket to allow the new verison first!
- uses: JustinBeckwith/linkinator-action@v1.11.0
@@ -56,14 +56,14 @@ jobs:
working-directory: docs
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
- name: Set up Node.js 20
uses: actions/setup-node@v4
- name: Set up Node.js
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version: '20'
node-version: "20"
- name: yarn install
run: |
yarn install --check-cache

View File

@@ -10,17 +10,17 @@ on:
workflow_dispatch:
inputs:
use_dashboard:
description: 'Use Cypress Dashboard (true/false) [paid service - trigger manually when needed]. You MUST provide a branch and/or PR number below for this to work.'
description: "Use Cypress Dashboard (true/false) [paid service - trigger manually when needed]. You MUST provide a branch and/or PR number below for this to work."
required: false
default: 'false'
default: "false"
ref:
description: 'The branch or tag to checkout'
description: "The branch or tag to checkout"
required: false
default: ''
default: ""
pr_id:
description: 'The pull request ID to checkout'
description: "The pull request ID to checkout"
required: false
default: ''
default: ""
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
@@ -68,20 +68,20 @@ jobs:
# Conditional checkout based on context
- name: Checkout for push or pull_request event
if: github.event_name == 'push' || github.event_name == 'pull_request'
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
- name: Checkout using ref (workflow_dispatch)
if: github.event_name == 'workflow_dispatch' && github.event.inputs.ref != ''
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
ref: ${{ github.event.inputs.ref }}
submodules: recursive
- name: Checkout using PR ID (workflow_dispatch)
if: github.event_name == 'workflow_dispatch' && github.event.inputs.pr_id != ''
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
ref: refs/pull/${{ github.event.inputs.pr_id }}/merge
@@ -107,7 +107,7 @@ jobs:
run: testdata
- name: Setup Node.js
if: steps.check.outputs.python || steps.check.outputs.frontend
uses: actions/setup-node@v4
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version: "20"
- name: Install npm dependencies
@@ -137,8 +137,8 @@ jobs:
with:
run: cypress-run-all ${{ env.USE_DASHBOARD }}
- name: Upload Artifacts
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
if: failure()
with:
path: ${{ github.workspace }}/superset-frontend/cypress-base/cypress/screenshots
name: cypress-artifact-${{ github.run_id }}-${{ github.job }}-${{ matrix.browser }}-${{ matrix.parallel_id }}
name: cypress-artifact-${{ github.run_id }}-${{ github.job }}-${{ matrix.browser }}-${{ matrix.parallel_id }}--${{ steps.set-safe-app-root.outputs.safe_app_root }}

View File

@@ -0,0 +1,64 @@
name: Superset Extensions CLI Package Tests
on:
push:
branches:
- "master"
- "[0-9].[0-9]*"
pull_request:
types: [synchronize, opened, reopened, ready_for_review]
# cancel previous workflow jobs for PRs
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
jobs:
test-superset-extensions-cli-package:
runs-on: ubuntu-24.04
strategy:
matrix:
python-version: ["previous", "current", "next"]
defaults:
run:
working-directory: superset-extensions-cli
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
- name: Check for file changes
id: check
uses: ./.github/actions/change-detector/
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Setup Python
if: steps.check.outputs.superset-extensions-cli
uses: ./.github/actions/setup-backend/
with:
python-version: ${{ matrix.python-version }}
requirements-type: dev
- name: Run pytest with coverage
if: steps.check.outputs.superset-extensions-cli
run: |
pytest --cov=superset_extensions_cli --cov-report=xml --cov-report=term-missing --cov-report=html -v --tb=short
- name: Upload coverage reports to Codecov
if: steps.check.outputs.superset-extensions-cli
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5
with:
file: ./coverage.xml
flags: superset-extensions-cli
name: superset-extensions-cli-coverage
fail_ci_if_error: false
- name: Upload HTML coverage report
if: steps.check.outputs.superset-extensions-cli
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: superset-extensions-cli-coverage-html
path: htmlcov/

View File

@@ -23,7 +23,7 @@ jobs:
should-run: ${{ steps.check.outputs.frontend }}
steps:
- name: Checkout Code
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
@@ -52,7 +52,7 @@ jobs:
- name: Upload Docker Image Artifact
if: steps.check.outputs.frontend
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: docker-image
path: docker-image.tar.gz
@@ -67,7 +67,7 @@ jobs:
runs-on: ubuntu-24.04
steps:
- name: Download Docker Image Artifact
uses: actions/download-artifact@v4
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
with:
name: docker-image
@@ -84,7 +84,7 @@ jobs:
"npm run test -- --coverage --shard=${{ matrix.shard }}/8 --coverageReporters=json-summary"
- name: Upload Coverage Artifact
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: coverage-artifacts-${{ matrix.shard }}
path: superset-frontend/coverage
@@ -95,7 +95,7 @@ jobs:
runs-on: ubuntu-24.04
steps:
- name: Download Coverage Artifacts
uses: actions/download-artifact@v4
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
with:
pattern: coverage-artifacts-*
path: coverage/
@@ -107,7 +107,7 @@ jobs:
run: npx nyc merge coverage/ merged-output/coverage-summary.json
- name: Upload Code Coverage
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5
with:
flags: javascript
token: ${{ secrets.CODECOV_TOKEN }}
@@ -139,7 +139,7 @@ jobs:
runs-on: ubuntu-24.04
steps:
- name: Download Docker Image Artifact
uses: actions/download-artifact@v4
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
with:
name: docker-image
@@ -162,7 +162,7 @@ jobs:
runs-on: ubuntu-24.04
steps:
- name: Download Docker Image Artifact
uses: actions/download-artifact@v4
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
with:
name: docker-image
@@ -174,7 +174,7 @@ jobs:
docker run --rm $TAG bash -c \
"npm run plugins:build"
- name: Build Plugins Storybook
- name: Build Storybook and Run Tests
run: |
docker run --rm $TAG bash -c \
"npm run plugins:build-storybook"

View File

@@ -16,21 +16,21 @@ jobs:
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
fetch-depth: 0
- name: Set up Helm
uses: azure/setup-helm@v4
uses: azure/setup-helm@1a275c3b69536ee54be43f2070a358922e12c8d4 # v4
with:
version: v3.16.4
- name: Setup Python
uses: ./.github/actions/setup-backend/
with:
install-superset: 'false'
install-superset: "false"
- name: Set up chart-testing
uses: ./.github/actions/chart-testing-action

View File

@@ -29,7 +29,7 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
ref: ${{ inputs.ref || github.ref_name }}
persist-credentials: true
@@ -42,7 +42,7 @@ jobs:
git config user.email "$GITHUB_ACTOR@users.noreply.github.com"
- name: Install Helm
uses: azure/setup-helm@v4
uses: azure/setup-helm@1a275c3b69536ee54be43f2070a358922e12c8d4 # v4
with:
version: v3.5.4
@@ -101,7 +101,7 @@ jobs:
CR_RELEASE_NAME_TEMPLATE: "superset-helm-chart-{{ .Version }}"
- name: Open Pull Request
uses: actions/github-script@v7
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
script: |
const branchName = '${{ env.branch_name }}';

View File

@@ -41,7 +41,7 @@ jobs:
- 16379:6379
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
@@ -68,7 +68,7 @@ jobs:
run: |
./scripts/python_tests.sh
- name: Upload code coverage
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5
with:
flags: python,mysql
token: ${{ secrets.CODECOV_TOKEN }}
@@ -99,7 +99,7 @@ jobs:
- 16379:6379
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
@@ -129,7 +129,7 @@ jobs:
run: |
./scripts/python_tests.sh
- name: Upload code coverage
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5
with:
flags: python,postgres
token: ${{ secrets.CODECOV_TOKEN }}
@@ -152,7 +152,7 @@ jobs:
- 16379:6379
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
@@ -181,7 +181,7 @@ jobs:
run: |
./scripts/python_tests.sh
- name: Upload code coverage
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5
with:
flags: python,sqlite
token: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -48,7 +48,7 @@ jobs:
- 16379:6379
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
@@ -77,7 +77,7 @@ jobs:
run: |
./scripts/python_tests.sh -m 'chart_data_flow or sql_json_flow'
- name: Upload code coverage
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5
with:
flags: python,presto
token: ${{ secrets.CODECOV_TOKEN }}
@@ -108,7 +108,7 @@ jobs:
- 16379:6379
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
@@ -145,7 +145,7 @@ jobs:
pip install -e .[hive]
./scripts/python_tests.sh -m 'chart_data_flow or sql_json_flow'
- name: Upload code coverage
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5
with:
flags: python,hive
token: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -24,7 +24,7 @@ jobs:
PYTHONPATH: ${{ github.workspace }}
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
@@ -46,7 +46,7 @@ jobs:
run: |
pytest --durations-min=0.5 --cov-report= --cov=superset ./tests/common ./tests/unit_tests --cache-clear
- name: Upload code coverage
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5
with:
flags: python,unit
token: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -18,7 +18,7 @@ jobs:
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive
@@ -31,9 +31,9 @@ jobs:
- name: Setup Node.js
if: steps.check.outputs.frontend
uses: actions/setup-node@v4
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version: '18'
node-version: "18"
- name: Install dependencies
if: steps.check.outputs.frontend
uses: ./.github/actions/cached-dependencies
@@ -49,7 +49,7 @@ jobs:
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
submodules: recursive

View File

@@ -21,7 +21,7 @@ jobs:
runs-on: ubuntu-24.04
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false
- name: Install dependencies

View File

@@ -9,7 +9,7 @@ on:
workflow_dispatch:
inputs:
comment_body:
description: 'Comment Body'
description: "Comment Body"
required: true
type: string
@@ -26,7 +26,7 @@ jobs:
steps:
- name: Quickly add thumbs up!
if: github.event_name == 'issue_comment' && contains(github.event.comment.body, '@supersetbot')
uses: actions/github-script@v7
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
script: |
const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/')
@@ -38,7 +38,7 @@ jobs:
});
- name: "Checkout ( ${{ github.sha }} )"
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
persist-credentials: false

View File

@@ -16,11 +16,11 @@ on:
force-latest:
required: true
type: choice
default: 'false'
default: "false"
description: Whether to force a latest tag on the release
options:
- 'true'
- 'false'
- "true"
- "false"
jobs:
config:
runs-on: ubuntu-24.04
@@ -42,12 +42,12 @@ jobs:
runs-on: ubuntu-24.04
strategy:
matrix:
build_preset: ["dev", "lean", "py310", "websocket", "dockerize", "py311"]
build_preset:
["dev", "lean", "py310", "websocket", "dockerize", "py311"]
fail-fast: false
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
fetch-depth: 0
@@ -60,7 +60,7 @@ jobs:
build: "true"
- name: Use Node.js 20
uses: actions/setup-node@v4
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version: 20
@@ -105,14 +105,13 @@ jobs:
contents: read
pull-requests: write
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
fetch-depth: 0
- name: Use Node.js 20
uses: actions/setup-node@v4
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version: 20

View File

@@ -27,12 +27,12 @@ jobs:
name: Generate Reports
steps:
- name: Checkout Repository
uses: actions/checkout@v4
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Set up Node.js
uses: actions/setup-node@v4
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version: '20'
node-version: "20"
- name: Install Dependencies
run: npm install

2
.gitignore vendored
View File

@@ -42,7 +42,7 @@ _modules
_static
build
app.db
apache_superset.egg-info/
*.egg-info/
changelog.sh
dist
dump.rdb

View File

@@ -23,7 +23,9 @@ repos:
rev: v1.13.0
hooks:
- id: mypy
name: mypy (main)
args: [--check-untyped-defs]
exclude: ^superset-extensions-cli/
additional_dependencies: [
types-simplejson,
types-python-dateutil,
@@ -38,6 +40,10 @@ repos:
types-paramiko,
types-Markdown,
]
- id: mypy
name: mypy (superset-extensions-cli)
args: [--check-untyped-defs]
files: ^superset-extensions-cli/
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
hooks:
@@ -83,5 +89,5 @@ repos:
rev: v0.8.0
hooks:
- id: ruff
args: [ --fix ]
args: [--fix]
- id: ruff-format

View File

@@ -32,6 +32,8 @@ apache_superset.egg-info
# json and csv in general cannot have comments
.*json
.*csv
# jinja templates often need to be as-is
.*j2
# Generated doc files
env/*
docs/.htaccess*

View File

@@ -219,6 +219,10 @@ FROM python-common AS lean
# Install Python dependencies using docker/pip-install.sh
COPY requirements/base.txt requirements/
# Copy superset-core package needed for editable install in base.txt
COPY superset-core superset-core
RUN --mount=type=cache,target=${SUPERSET_HOME}/.cache/uv \
/app/docker/pip-install.sh --requires-build-essential -r requirements/base.txt
# Install the superset package
@@ -241,6 +245,11 @@ RUN /app/docker/apt-install.sh \
# Copy development requirements and install them
COPY requirements/*.txt requirements/
# Copy local packages needed for editable installs in development.txt
COPY superset-core superset-core
COPY superset-extensions-cli superset-extensions-cli
# Install Python dependencies using docker/pip-install.sh
RUN --mount=type=cache,target=${SUPERSET_HOME}/.cache/uv \
/app/docker/pip-install.sh --requires-build-essential -r requirements/development.txt

View File

@@ -34,6 +34,8 @@ classifiers = [
"Programming Language :: Python :: 3.11",
]
dependencies = [
# no bounds for apache-superset-core until we have a stable version
"apache-superset-core",
"backoff>=1.8.0",
"celery>=5.3.6, <6.0.0",
"click>=8.0.3",
@@ -64,6 +66,8 @@ dependencies = [
"jsonpath-ng>=1.6.1, <2",
"Mako>=1.2.2",
"markdown>=3.0",
# marshmallow>=4 has issues: https://github.com/apache/superset/issues/33162
"marshmallow>=3.0, <4",
"msgpack>=1.0.0, <1.1",
"nh3>=0.2.11, <0.3",
"numpy>1.23.5, <2",
@@ -98,6 +102,7 @@ dependencies = [
"tabulate>=0.8.9, <0.9",
"typing-extensions>=4, <5",
"waitress; sys_platform == 'win32'",
"watchdog>=6.0.0",
"wtforms>=2.3.3, <4",
"wtforms-json",
"xlsxwriter>=3.0.7, <3.1",
@@ -182,6 +187,8 @@ doris = ["pydoris>=1.0.0, <2.0.0"]
oceanbase = ["oceanbase_py>=0.0.1"]
ydb = ["ydb-sqlalchemy>=0.1.2"]
development = [
# no bounds for apache-superset-extensions-cli until a stable version
"apache-superset-extensions-cli",
"docker",
"flask-testing",
"freezegun",
@@ -211,7 +218,7 @@ documentation = "https://superset.apache.org/docs/intro"
combine_as_imports = true
include_trailing_comma = true
line_length = 88
known_first_party = "superset"
known_first_party = "superset, apache-superset-core, apache-superset-extensions-cli"
known_third_party = "alembic, apispec, backoff, celery, click, colorama, cron_descriptor, croniter, cryptography, dateutil, deprecation, flask, flask_appbuilder, flask_babel, flask_caching, flask_compress, flask_jwt_extended, flask_login, flask_migrate, flask_sqlalchemy, flask_talisman, flask_testing, flask_wtf, freezegun, geohash, geopy, holidays, humanize, isodate, jinja2, jwt, markdown, markupsafe, marshmallow, msgpack, nh3, numpy, pandas, parameterized, parsedatetime, pgsanity, polyline, prison, progress, pyarrow, sqlalchemy_bigquery, pyhive, pyparsing, pytest, pytest_mock, pytz, redis, requests, selenium, setuptools, shillelagh, simplejson, slack, sqlalchemy, sqlalchemy_utils, sqlparse, typing_extensions, urllib3, werkzeug, wtforms, wtforms_json, yaml"
multi_line_output = 3
order_by_type = false
@@ -390,3 +397,7 @@ python-geohash = "0"
# TODO REMOVE THESE DEPS FROM CODEBASE
paramiko = "3" # GPL
pyxlsb = "1" # GPL
[tool.uv.sources]
apache-superset-core = { path = "./superset-core", editable = true }
apache-superset-extensions-cli = { path = "./superset-extensions-cli", editable = true }

View File

@@ -1,5 +1,7 @@
# This file was autogenerated by uv via the following command:
# uv pip compile pyproject.toml requirements/base.in -o requirements/base.txt
-e ./superset-core
# via apache-superset (pyproject.toml)
alembic==1.14.0
# via flask-migrate
amqp==5.3.1
@@ -9,7 +11,9 @@ apispec==6.3.0
apsw==3.46.0.0
# via shillelagh
async-timeout==4.0.3
# via -r requirements/base.in
# via
# -r requirements/base.in
# redis
attrs==24.2.0
# via
# cattrs
@@ -94,6 +98,11 @@ email-validator==2.2.0
# via flask-appbuilder
et-xmlfile==2.0.0
# via openpyxl
exceptiongroup==1.3.0
# via
# cattrs
# trio
# trio-websocket
flask==2.3.3
# via
# apache-superset (pyproject.toml)
@@ -109,7 +118,9 @@ flask==2.3.3
# flask-sqlalchemy
# flask-wtf
flask-appbuilder==4.5.5
# via apache-superset (pyproject.toml)
# via
# apache-superset (pyproject.toml)
# apache-superset-core
flask-babel==2.0.0
# via flask-appbuilder
flask-caching==2.3.0
@@ -148,7 +159,6 @@ greenlet==3.0.3
# via
# apache-superset (pyproject.toml)
# shillelagh
# sqlalchemy
gunicorn==23.0.0
# via apache-superset (pyproject.toml)
h11==0.16.0
@@ -204,6 +214,7 @@ markupsafe==3.0.2
# wtforms
marshmallow==3.23.1
# via
# apache-superset (pyproject.toml)
# flask-appbuilder
# marshmallow-sqlalchemy
marshmallow-sqlalchemy==0.28.2
@@ -224,6 +235,7 @@ numpy==1.26.4
# bottleneck
# numexpr
# pandas
# pyarrow
odfpy==1.4.1
# via pandas
openpyxl==3.1.5
@@ -376,8 +388,11 @@ typing-extensions==4.12.2
# via
# apache-superset (pyproject.toml)
# alembic
# cattrs
# exceptiongroup
# flask-limiter
# limits
# rich
# selenium
# shillelagh
tzdata==2024.2
@@ -398,6 +413,8 @@ vine==5.1.0
# amqp
# celery
# kombu
watchdog==6.0.0
# via apache-superset (pyproject.toml)
wcwidth==0.2.13
# via prompt-toolkit
websocket-client==1.8.0

View File

@@ -17,3 +17,4 @@
# under the License.
#
-e .[development,bigquery,cors,druid,gevent,gsheets,mysql,postgres,presto,prophet,trino,thumbnails]
-e ./superset-extensions-cli[test]

View File

@@ -1,26 +1,38 @@
# This file was autogenerated by uv via the following command:
# uv pip compile requirements/development.in -c requirements/base.txt -o requirements/development.txt
# uv pip compile requirements/development.in -c requirements/base-constraint.txt -o requirements/development.txt
-e .
# via -r requirements/development.in
-e ./superset-core
# via
# apache-superset
# apache-superset-extensions-cli
-e ./superset-extensions-cli
# via
# -r requirements/development.in
# apache-superset
alembic==1.14.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# flask-migrate
amqp==5.3.1
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# kombu
apispec==6.3.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# flask-appbuilder
apsw==3.46.0.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# shillelagh
async-timeout==4.0.3
# via
# -c requirements/base-constraint.txt
# redis
attrs==24.2.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# cattrs
# jsonschema
# outcome
@@ -28,69 +40,70 @@ attrs==24.2.0
# trio
babel==2.16.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# flask-babel
backoff==2.2.1
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
bcrypt==4.2.1
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# paramiko
billiard==4.2.1
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# celery
blinker==1.9.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# flask
bottleneck==1.4.2
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
brotli==1.1.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# flask-compress
cachelib==0.9.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# flask-caching
# flask-session
cachetools==5.5.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# google-auth
cattrs==24.1.2
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# requests-cache
celery==5.4.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
certifi==2024.8.30
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# requests
# selenium
cffi==1.17.1
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# cryptography
# pynacl
cfgv==3.4.0
# via pre-commit
charset-normalizer==3.4.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# requests
click==8.1.7
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
# apache-superset-extensions-cli
# celery
# click-didyoumean
# click-option-group
@@ -100,25 +113,25 @@ click==8.1.7
# flask-appbuilder
click-didyoumean==0.3.1
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# celery
click-option-group==0.5.6
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
click-plugins==1.1.1
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# celery
click-repl==0.3.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# celery
cmdstanpy==1.1.0
# via prophet
colorama==0.4.6
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
# flask-appbuilder
contourpy==1.0.7
@@ -127,15 +140,15 @@ coverage==7.6.8
# via pytest-cov
cron-descriptor==1.4.5
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
croniter==5.0.1
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
cryptography==43.0.3
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
# paramiko
# pyopenssl
@@ -145,37 +158,44 @@ db-dtypes==1.3.1
# via pandas-gbq
defusedxml==0.7.1
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# odfpy
deprecated==1.2.15
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# limits
deprecation==2.1.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
distlib==0.3.8
# via virtualenv
dnspython==2.7.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# email-validator
docker==7.0.0
# via apache-superset
email-validator==2.2.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# flask-appbuilder
et-xmlfile==2.0.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# openpyxl
exceptiongroup==1.3.0
# via
# -c requirements/base-constraint.txt
# cattrs
# pytest
# trio
# trio-websocket
filelock==3.12.2
# via virtualenv
flask==2.3.3
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
# flask-appbuilder
# flask-babel
@@ -192,57 +212,58 @@ flask==2.3.3
# flask-wtf
flask-appbuilder==4.5.5
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
# apache-superset-core
flask-babel==2.0.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# flask-appbuilder
flask-caching==2.3.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
flask-compress==1.17
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
flask-cors==4.0.0
# via apache-superset
flask-jwt-extended==4.7.1
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# flask-appbuilder
flask-limiter==3.8.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# flask-appbuilder
flask-login==0.6.3
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
# flask-appbuilder
flask-migrate==3.1.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
flask-session==0.8.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
flask-sqlalchemy==2.5.1
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# flask-appbuilder
# flask-migrate
flask-talisman==1.1.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
flask-testing==0.8.1
# via apache-superset
flask-wtf==1.2.2
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
# flask-appbuilder
fonttools==4.55.0
@@ -253,11 +274,11 @@ future==1.0.0
# via pyhive
geographiclib==2.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# geopy
geopy==2.4.1
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
gevent==24.2.1
# via apache-superset
@@ -270,7 +291,7 @@ google-api-core==2.23.0
# sqlalchemy-bigquery
google-auth==2.36.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# google-api-core
# google-auth-oauthlib
# google-cloud-bigquery
@@ -302,11 +323,10 @@ googleapis-common-protos==1.66.0
# grpcio-status
greenlet==3.0.3
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
# gevent
# shillelagh
# sqlalchemy
grpcio==1.68.0
# via
# apache-superset
@@ -316,65 +336,66 @@ grpcio-status==1.60.1
# via google-api-core
gunicorn==23.0.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
h11==0.16.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# wsproto
hashids==1.3.1
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
holidays==0.25
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
# prophet
humanize==4.11.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
identify==2.5.36
# via pre-commit
idna==3.10
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# email-validator
# requests
# trio
importlib-metadata==8.5.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
importlib-resources==6.4.5
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# limits
# prophet
iniconfig==2.0.0
# via pytest
isodate==0.7.2
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
itsdangerous==2.2.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# flask
# flask-wtf
jinja2==3.1.4
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset-extensions-cli
# flask
# flask-babel
jsonpath-ng==1.7.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
jsonschema==4.17.3
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# flask-appbuilder
# jsonschema-spec
# openapi-schema-validator
@@ -385,72 +406,73 @@ kiwisolver==1.4.7
# via matplotlib
kombu==5.4.2
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# celery
korean-lunar-calendar==0.3.1
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# holidays
lazy-object-proxy==1.10.0
# via openapi-spec-validator
limits==3.13.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# flask-limiter
mako==1.3.6
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# alembic
# apache-superset
markdown==3.7
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
markdown-it-py==3.0.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# rich
markupsafe==3.0.2
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# jinja2
# mako
# werkzeug
# wtforms
marshmallow==3.23.1
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
# flask-appbuilder
# marshmallow-sqlalchemy
marshmallow-sqlalchemy==0.28.2
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# flask-appbuilder
matplotlib==3.9.0
# via prophet
mdurl==0.1.2
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# markdown-it-py
msgpack==1.0.8
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
msgspec==0.18.6
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# flask-session
mysqlclient==2.2.6
# via apache-superset
nh3==0.2.19
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
nodeenv==1.8.0
# via pre-commit
numpy==1.26.4
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
# bottleneck
# cmdstanpy
@@ -460,11 +482,12 @@ numpy==1.26.4
# pandas
# pandas-gbq
# prophet
# pyarrow
oauthlib==3.2.2
# via requests-oauthlib
odfpy==1.4.1
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# pandas
openapi-schema-validator==0.4.4
# via openapi-spec-validator
@@ -472,19 +495,19 @@ openapi-spec-validator==0.5.6
# via apache-superset
openpyxl==3.1.5
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# pandas
ordered-set==4.1.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# flask-limiter
outcome==1.3.0.post0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# trio
packaging==24.2
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
# apispec
# db-dtypes
@@ -501,7 +524,7 @@ packaging==24.2
# sqlalchemy-bigquery
pandas==2.0.3
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
# cmdstanpy
# db-dtypes
@@ -513,18 +536,18 @@ parameterized==0.9.0
# via apache-superset
paramiko==3.5.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
# sshtunnel
parsedatetime==2.6
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
pathable==0.4.3
# via jsonschema-spec
pgsanity==0.2.9
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
pillow==10.3.0
# via
@@ -532,30 +555,30 @@ pillow==10.3.0
# matplotlib
platformdirs==3.8.1
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# requests-cache
# virtualenv
pluggy==1.5.0
# via pytest
ply==3.11
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# jsonpath-ng
polyline==2.0.2
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
pre-commit==4.0.1
# via apache-superset
prison==0.2.1
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# flask-appbuilder
progress==1.6
# via apache-superset
prompt-toolkit==3.0.48
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# click-repl
prophet==1.1.5
# via apache-superset
@@ -576,24 +599,24 @@ psycopg2-binary==2.9.6
# via apache-superset
pyarrow==14.0.2
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
# db-dtypes
# pandas-gbq
pyasn1==0.6.1
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# pyasn1-modules
# python-ldap
# rsa
pyasn1-modules==0.4.1
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# google-auth
# python-ldap
pycparser==2.22
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# cffi
pydata-google-auth==1.9.0
# via pandas-gbq
@@ -603,7 +626,7 @@ pyfakefs==5.3.5
# via apache-superset
pygments==2.18.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# rich
pyhive==0.7.0
# via apache-superset
@@ -611,43 +634,48 @@ pyinstrument==4.4.0
# via apache-superset
pyjwt==2.10.1
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
# flask-appbuilder
# flask-jwt-extended
pynacl==1.5.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# paramiko
pyopenssl==24.2.1
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# shillelagh
pyparsing==3.2.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
# matplotlib
pyrsistent==0.20.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# jsonschema
pysocks==1.7.1
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# urllib3
pytest==7.4.4
# via
# apache-superset
# apache-superset-extensions-cli
# pytest-cov
# pytest-mock
pytest-cov==6.0.0
# via apache-superset
# via
# apache-superset
# apache-superset-extensions-cli
pytest-mock==3.10.0
# via apache-superset
# via
# apache-superset
# apache-superset-extensions-cli
python-dateutil==2.9.0.post0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
# celery
# croniter
@@ -662,39 +690,39 @@ python-dateutil==2.9.0.post0
# trino
python-dotenv==1.0.1
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
python-geohash==0.8.5
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
python-ldap==3.4.4
# via apache-superset
pytz==2024.2
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# croniter
# flask-babel
# pandas
# trino
pyxlsb==1.0.10
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# pandas
pyyaml==6.0.2
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
# apispec
# jsonschema-spec
# pre-commit
redis==4.6.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
requests==2.32.2
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# docker
# google-api-core
# google-cloud-bigquery
@@ -707,7 +735,7 @@ requests==2.32.2
# trino
requests-cache==1.2.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# shillelagh
requests-oauthlib==2.0.0
# via google-auth-oauthlib
@@ -715,18 +743,20 @@ rfc3339-validator==0.1.4
# via openapi-schema-validator
rich==13.9.4
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# flask-limiter
rsa==4.9
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# google-auth
ruff==0.8.0
# via apache-superset
selenium==4.27.1
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
semver==3.0.4
# via apache-superset-extensions-cli
setuptools==75.6.0
# via
# nodeenv
@@ -736,19 +766,19 @@ setuptools==75.6.0
# zope-interface
shillelagh==1.2.18
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
shortid==0.1.2
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
simplejson==3.19.3
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
six==1.16.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# prison
# python-dateutil
# rfc3339-validator
@@ -756,19 +786,19 @@ six==1.16.0
# wtforms-json
slack-sdk==3.33.4
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
sniffio==1.3.1
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# trio
sortedcontainers==2.4.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# trio
sqlalchemy==1.4.54
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# alembic
# apache-superset
# flask-appbuilder
@@ -781,29 +811,34 @@ sqlalchemy-bigquery==1.12.0
# via apache-superset
sqlalchemy-utils==0.38.3
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
# flask-appbuilder
sqlglot==26.11.1
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
sqloxide==0.1.51
# via apache-superset
sqlparse==0.5.2
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
sshtunnel==0.4.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
statsd==4.0.1
# via apache-superset
tabulate==0.8.10
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
tomli==2.2.1
# via
# apache-superset-extensions-cli
# coverage
# pytest
tqdm==4.67.1
# via
# cmdstanpy
@@ -812,25 +847,28 @@ trino==0.330.0
# via apache-superset
trio==0.28.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# selenium
# trio-websocket
trio-websocket==0.11.1
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# selenium
typing-extensions==4.12.2
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# alembic
# apache-superset
# cattrs
# exceptiongroup
# flask-limiter
# limits
# rich
# selenium
# shillelagh
tzdata==2024.2
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# celery
# kombu
# pandas
@@ -838,69 +876,74 @@ tzlocal==5.2
# via trino
url-normalize==1.4.3
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# requests-cache
urllib3==1.26.18
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# docker
# requests
# requests-cache
# selenium
vine==5.1.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# amqp
# celery
# kombu
virtualenv==20.23.1
# via pre-commit
watchdog==6.0.0
# via
# -c requirements/base-constraint.txt
# apache-superset
# apache-superset-extensions-cli
wcwidth==0.2.13
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# prompt-toolkit
websocket-client==1.8.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# selenium
werkzeug==3.1.3
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# flask
# flask-appbuilder
# flask-jwt-extended
# flask-login
wrapt==1.17.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# deprecated
wsproto==1.2.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# trio-websocket
wtforms==3.2.1
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
# flask-appbuilder
# flask-wtf
# wtforms-json
wtforms-json==0.3.5
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
xlrd==2.0.1
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# pandas
xlsxwriter==3.0.9
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# apache-superset
# pandas
zipp==3.21.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# importlib-metadata
zope-event==5.0
# via gevent
@@ -908,5 +951,5 @@ zope-interface==5.4.0
# via gevent
zstandard==0.23.0
# via
# -c requirements/base.txt
# -c requirements/base-constraint.txt
# flask-compress

View File

@@ -45,6 +45,11 @@ PATTERNS = {
"docs": [
r"^docs/",
],
"superset-extensions-cli": [
r"^\.github/workflows/superset-extensions-cli\.yml",
r"^superset-extensions-cli/",
r"^superset-core/",
],
}
GITHUB_TOKEN = os.environ.get("GITHUB_TOKEN")

View File

@@ -24,7 +24,13 @@ ADDITIONAL_ARGS="$@"
# Generate the requirements/base.txt file
uv pip compile pyproject.toml requirements/base.in -o requirements/base.txt $ADDITIONAL_ARGS
# Generate the requirements/development.txt file, making sure requirements/base.txt is a constraint to keep the versions in sync
uv pip compile requirements/development.in -c requirements/base.txt -o requirements/development.txt $ADDITIONAL_ARGS
# Hack to remove "Unnamed requirements are not allowed as constraints" error from base requirements
grep --invert-match "./superset-core" requirements/base.txt > requirements/base-constraint.txt
# Generate the requirements/development.txt file, making sure the base requirements are used as a constraint to keep the versions in sync. Note that `development.txt` is a Superset of `base.txt` where version for the shared libs should match their version.
uv pip compile requirements/development.in -c requirements/base-constraint.txt -o requirements/development.txt $ADDITIONAL_ARGS
# Remove temporary base requirement file
rm requirements/base-constraint.txt
uv pip compile requirements/translations.in -o requirements/translations.txt $ADDITIONAL_ARGS

1
superset-core/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
apache_superset_primitives.egg-info/

View File

@@ -0,0 +1,22 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
## Change Log
Changelogs will be added once we have the first stable release.

216
superset-core/LICENSE.txt Normal file
View File

@@ -0,0 +1,216 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
============================================================================
APACHE SUPERSET SUBCOMPONENTS:
The Apache Superset project contains subcomponents with separate copyright
notices and license terms. Your use of the source code for the these
subcomponents is subject to the terms and conditions of the following
licenses.
========================================================================
Third party SIL Open Font License v1.1 (OFL-1.1)
========================================================================
(SIL OPEN FONT LICENSE Version 1.1) The Inter font family (https://github.com/rsms/inter)
(SIL OPEN FONT LICENSE Version 1.1) The Fira Code font family (https://github.com/tonsky/FiraCode)

113
superset-core/README.md Normal file
View File

@@ -0,0 +1,113 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
# apache-superset-core
[![PyPI version](https://badge.fury.io/py/apache-superset-core.svg)](https://badge.fury.io/py/apache-superset-core)
[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0)
[![Python 3.10+](https://img.shields.io/badge/python-3.10+-blue.svg)](https://www.python.org/downloads/)
The official core package for building Apache Superset backend extensions and integrations. This package provides essential building blocks including base classes, API utilities, type definitions, and decorators for both the host application and extensions.
## 📦 Installation
```bash
pip install apache-superset-core
```
## 🏗️ Architecture
The package is organized into logical modules, each providing specific functionality:
- **`api`** - REST API base classes, models access, query utilities, and registration
- **`api.models`** - Access to Superset's database models (datasets, databases, etc.)
- **`api.query`** - Database query utilities and SQL dialect handling
- **`api.rest_api`** - Extension API registration and management
- **`api.types.rest_api`** - REST API base classes and type definitions
## 🚀 Quick Start
### Basic Extension Structure
```python
from flask import request, Response
from flask_appbuilder.api import expose, permission_name, protect, safe
from superset_core.api import models, query, rest_api
from superset_core.api.types.rest_api import RestApi
class DatasetReferencesAPI(RestApi):
"""Example extension API demonstrating core functionality."""
resource_name = "dataset_references"
openapi_spec_tag = "Dataset references"
class_permission_name = "dataset_references"
@expose("/metadata", methods=("POST",))
@protect()
@safe
@permission_name("read")
def metadata(self) -> Response:
"""Get dataset metadata for tables referenced in SQL."""
sql: str = request.json.get("sql")
database_id: int = request.json.get("databaseId")
# Access Superset's models using core APIs
databases = models.get_databases(id=database_id)
if not databases:
return self.response_404()
database = databases[0]
dialect = query.get_sqlglot_dialect(database)
# Access datasets to get owner information
datasets = models.get_datasets()
owners_map = {
dataset.table_name: [
f"{owner.first_name} {owner.last_name}"
for owner in dataset.owners
]
for dataset in datasets
}
# Process SQL and return dataset metadata
return self.response(200, result=owners_map)
# Register the extension API
rest_api.add_extension_api(DatasetReferencesAPI)
```
## 🤝 Contributing
We welcome contributions! Please see the [Contributing Guide](https://github.com/apache/superset/blob/master/CONTRIBUTING.md) for details.
## 📄 License
Licensed under the Apache License, Version 2.0. See [LICENSE](https://github.com/apache/superset/blob/master/LICENSE.txt) for details.
## 🔗 Links
- [Apache Superset](https://superset.apache.org/)
- [Documentation](https://superset.apache.org/docs/)
- [Community](https://superset.apache.org/community/)
- [GitHub Repository](https://github.com/apache/superset)
- [Extension Development Guide](https://superset.apache.org/docs/extensions/)
---
**Note**: This package is currently in release candidate status. APIs may change before the 1.0.0 release. Please check the [changelog](CHANGELOG.md) for breaking changes between versions.

View File

@@ -0,0 +1,63 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
[project]
name = "apache-superset-core"
version = "0.0.1rc2"
description = "Core Python package for building Apache Superset backend extensions and integrations"
readme = "README.md"
authors = [
{ name = "Apache Software Foundation", email = "dev@superset.apache.org" },
]
license = { file="LICENSE.txt" }
requires-python = ">=3.10"
keywords = ["superset", "apache", "analytics", "business-intelligence", "extensions", "visualization"]
classifiers = [
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Topic :: Database",
"Topic :: Scientific/Engineering :: Visualization",
"Topic :: Software Development :: Libraries :: Python Modules",
]
dependencies = [
"flask-appbuilder>=4.5.3, <5.0.0",
]
[project.urls]
Homepage = "https://superset.apache.org/"
Documentation = "https://superset.apache.org/docs/"
Repository = "https://github.com/apache/superset"
"Bug Tracker" = "https://github.com/apache/superset/issues"
Changelog = "https://github.com/apache/superset/blob/master/CHANGELOG.md"
[build-system]
requires = ["setuptools>=76.0.0", "wheel"]
build-backend = "setuptools.build_meta"
[tool.setuptools]
package-dir = { "" = "src" }
[tool.setuptools.packages.find]
where = ["src"]

View File

@@ -0,0 +1,16 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

View File

@@ -0,0 +1,24 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from .types.models import CoreModelsApi
from .types.query import CoreQueryApi
from .types.rest_api import CoreRestApi
models: CoreModelsApi
rest_api: CoreRestApi
query: CoreQueryApi

View File

@@ -0,0 +1,16 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

View File

@@ -0,0 +1,90 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from abc import ABC, abstractmethod
from typing import Any, Type
from flask_sqlalchemy import BaseQuery
from sqlalchemy.orm import scoped_session
class CoreModelsApi(ABC):
"""
Abstract interface for accessing Superset data models.
This class defines the contract for retrieving SQLAlchemy sessions
and model instances for datasets and databases within Superset.
"""
@staticmethod
@abstractmethod
def get_session() -> scoped_session:
"""
Retrieve the SQLAlchemy session to directly interface with the
Superset models.
:returns: The SQLAlchemy scoped session instance.
"""
...
@staticmethod
@abstractmethod
def get_dataset_model() -> Type[Any]:
"""
Retrieve the Dataset (SqlaTable) SQLAlchemy model.
:returns: The Dataset SQLAlchemy model class.
"""
...
@staticmethod
@abstractmethod
def get_database_model() -> Type[Any]:
"""
Retrieve the Database SQLAlchemy model.
:returns: The Database SQLAlchemy model class.
"""
...
@staticmethod
@abstractmethod
def get_datasets(query: BaseQuery | None = None, **kwargs: Any) -> list[Any]:
"""
Retrieve Dataset (SqlaTable) entities.
:param query: A query with the Dataset model as the primary entity for complex
queries.
:param kwargs: Optional keyword arguments to filter datasets using SQLAlchemy's
`filter_by()`.
:returns: SqlaTable entities.
"""
...
@staticmethod
@abstractmethod
def get_databases(query: BaseQuery | None = None, **kwargs: Any) -> list[Any]:
"""
Retrieve Database entities.
:param query: A query with the Database model as the primary entity for complex
queries.
:param kwargs: Optional keyword arguments to filter databases using SQLAlchemy's
`filter_by()`.
:returns: Database entities.
"""
...

View File

@@ -0,0 +1,41 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from abc import ABC, abstractmethod
from typing import Any
from sqlglot import Dialects
class CoreQueryApi(ABC):
"""
Abstract interface for query-related operations.
This class defines the contract for database query operations,
including dialect handling and query processing.
"""
@staticmethod
@abstractmethod
def get_sqlglot_dialect(database: Any) -> Dialects:
"""
Get the SQLGlot dialect for the specified database.
:param database: The database instance to get the dialect for.
:returns: The SQLGlot dialect enum corresponding to the database.
"""
...

View File

@@ -0,0 +1,64 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from abc import ABC, abstractmethod
from typing import Type
from flask_appbuilder.api import BaseApi
class RestApi(BaseApi):
"""
Base REST API class for Superset with browser login support.
This class extends Flask-AppBuilder's BaseApi and enables browser-based
authentication by default.
"""
allow_browser_login = True
class CoreRestApi(ABC):
"""
Abstract interface for managing REST APIs in Superset.
This class defines the contract for adding and managing REST APIs,
including both core APIs and extension APIs.
"""
@staticmethod
@abstractmethod
def add_api(api: Type[RestApi]) -> None:
"""
Add a REST API to the Superset API.
:param api: A REST API instance.
:returns: None.
"""
...
@staticmethod
@abstractmethod
def add_extension_api(api: Type[RestApi]) -> None:
"""
Add an extension REST API to the Superset API.
:param api: An extension REST API instance. These are placed under
the /extensions resource.
:returns: None.
"""
...

View File

@@ -0,0 +1,16 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

View File

@@ -0,0 +1,63 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from typing import TypedDict
class ModuleFederationConfig(TypedDict):
exposes: dict[str, str]
filename: str
shared: dict[str, str]
remotes: dict[str, str]
class FrontendContributionConfig(TypedDict):
commands: dict[str, list[dict[str, str]]]
views: dict[str, list[dict[str, str]]]
menus: dict[str, list[dict[str, str]]]
class FrontendManifest(TypedDict):
contributions: FrontendContributionConfig
moduleFederation: ModuleFederationConfig
remoteEntry: str
class BackendManifest(TypedDict):
entryPoints: list[str]
class SharedBase(TypedDict, total=False):
id: str
name: str
dependencies: list[str]
description: str
version: str
frontend: FrontendManifest
permissions: list[str]
class Manifest(SharedBase, total=False):
backend: BackendManifest
class BackendMetadata(BackendManifest):
files: list[str]
class Metadata(SharedBase):
backend: BackendMetadata

View File

@@ -0,0 +1,22 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
## Change Log
Changelogs will be added once we have the first stable release.

View File

@@ -0,0 +1,216 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
============================================================================
APACHE SUPERSET SUBCOMPONENTS:
The Apache Superset project contains subcomponents with separate copyright
notices and license terms. Your use of the source code for the these
subcomponents is subject to the terms and conditions of the following
licenses.
========================================================================
Third party SIL Open Font License v1.1 (OFL-1.1)
========================================================================
(SIL OPEN FONT LICENSE Version 1.1) The Inter font family (https://github.com/rsms/inter)
(SIL OPEN FONT LICENSE Version 1.1) The Fira Code font family (https://github.com/tonsky/FiraCode)

View File

@@ -0,0 +1,110 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
# apache-superset-extensions-cli
[![PyPI version](https://badge.fury.io/py/apache-superset-extensions-cli.svg)](https://badge.fury.io/py/apache-superset-extensions-cli)
[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0)
[![Python 3.10+](https://img.shields.io/badge/python-3.10+-blue.svg)](https://www.python.org/downloads/)
Official command-line interface for building, bundling, and managing Apache Superset extensions. This CLI tool provides developers with everything needed to create, develop, and package extensions for the Superset ecosystem.
## 🚀 Features
- **Extension Scaffolding** - Generate initial folder structure and scaffold new extension projects
- **Development Server** - Automatically rebuild extensions as files change during development
- **Build System** - Build extension assets for production deployment
- **Bundle Packaging** - Package extensions into distributable .supx files
## 📦 Installation
```bash
pip install apache-superset-extensions-cli
```
## 🛠️ Quick Start
### Available Commands
```bash
# Generate initial folder structure and scaffold a new extension project
superset-extensions init <extension-name>
# Automatically rebuild extension as files change during development
superset-extensions dev
# Build extension assets for production
superset-extensions build
# Package extension into a distributable .supx file
superset-extensions bundle
```
## 📋 Extension Structure
The CLI generates extensions with the following structure:
```
extension_name/
├── extension.json # Extension configuration and metadata
├── frontend/ # Frontend code
│ ├── src/ # TypeScript/React source files
│ ├── webpack.config.js # Frontend build configuration
│ ├── tsconfig.json # TypeScript configuration
│ └── package.json # Frontend dependencies
├── backend/ # Backend code
│ ├── src/
│ │ └── dataset_references/ # Python package source
│ ├── tests/ # Backend tests
│ ├── pyproject.toml # Python package configuration
│ └── requirements.txt # Python dependencies
├── dist/ # Built extension files (generated)
│ ├── manifest.json # Generated extension manifest
│ ├── frontend/
│ │ └── dist/ # Built frontend assets
│ │ ├── remoteEntry.*.js # Module federation entry
│ │ └── *.js # Additional frontend bundles
│ └── backend/
│ └── dataset_references/ # Built backend package
│ ├── __init__.py
│ ├── api.py
│ └── entrypoint.py
├── dataset_references-1.0.0.supx # Packaged extension file (generated)
└── README.md # Extension documentation
```
## 🤝 Contributing
We welcome contributions! Please see the [Contributing Guide](https://github.com/apache/superset/blob/master/CONTRIBUTING.md) for details.
## 📄 License
Licensed under the Apache License, Version 2.0. See [LICENSE](https://github.com/apache/superset/blob/master/LICENSE.txt) for details.
## 🔗 Links
- [Apache Superset](https://superset.apache.org/)
- [Extension Development Guide](https://superset.apache.org/docs/extensions/)
- [API Documentation](https://superset.apache.org/docs/api/)
- [GitHub Repository](https://github.com/apache/superset)
- [Community](https://superset.apache.org/community/)
---
**Note**: This package is currently in early development. APIs and commands may change before the 1.0.0 release. Please check the [changelog](CHANGELOG.md) for breaking changes between versions.

View File

@@ -0,0 +1,125 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
[project]
name = "apache-superset-extensions-cli"
version = "0.0.1rc2"
description = "Official command-line interface for building, bundling, and managing Apache Superset extensions"
readme = "README.md"
authors = [
{ name = "Apache Software Foundation", email = "dev@superset.apache.org" },
]
license = { file="LICENSE.txt" }
requires-python = ">=3.10"
keywords = ["superset", "apache", "cli", "extensions", "analytics", "business-intelligence", "development-tools"]
classifiers = [
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Topic :: Database",
"Topic :: Scientific/Engineering :: Visualization",
"Topic :: Software Development :: Build Tools",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: System :: Software Distribution",
]
dependencies = [
# no bounds for apache-superset-core until we have a stable version
"apache-superset-core",
"click>=8.0.3",
"jinja2>=3.1.4",
"semver>=3.0.4",
"tomli>=2.2.1; python_version < '3.11'",
"watchdog>=6.0.0",
]
[project.urls]
Homepage = "https://superset.apache.org/"
Documentation = "https://superset.apache.org/docs/"
Repository = "https://github.com/apache/superset"
"Bug Tracker" = "https://github.com/apache/superset/issues"
Changelog = "https://github.com/apache/superset/blob/master/CHANGELOG.md"
[project.optional-dependencies]
test = [
"pytest",
"pytest-cov",
"pytest-mock",
]
[build-system]
requires = ["setuptools>=76.0.0", "wheel"]
build-backend = "setuptools.build_meta"
[tool.setuptools]
package-dir = { "" = "src" }
include-package-data = true
[tool.setuptools.packages.find]
where = ["src"]
[tool.setuptools.package-data]
superset_extensions_cli = ["templates/**/*"]
[project.scripts]
superset-extensions = "superset_extensions_cli.cli:app"
[tool.pytest.ini_options]
testpaths = ["tests"]
python_files = ["test_*.py", "*_test.py"]
python_classes = ["Test*"]
python_functions = ["test_*"]
addopts = [
"--strict-markers",
"--strict-config",
"--verbose",
"--cov=superset_extensions_cli",
"--cov-report=term-missing",
"--cov-report=html:htmlcov"
]
markers = [
"unit: Unit tests",
"integration: Integration tests",
"cli: CLI command tests",
"slow: Slow running tests",
]
[tool.coverage.run]
source = ["src/superset_extensions_cli"]
omit = ["*/tests/*", "*/test_*"]
[tool.coverage.report]
exclude_lines = [
"pragma: no cover",
"def __repr__",
"if self.debug:",
"if settings.DEBUG",
"raise AssertionError",
"raise NotImplementedError",
"if 0:",
"if __name__ == .__main__.:",
"class .*\\bProtocol\\):",
"@(abc\\.)?abstractmethod",
]
[tool.ruff.lint.per-file-ignores]
"src/superset_extensions_cli/*" = ["TID251"]

View File

@@ -0,0 +1,16 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

View File

@@ -0,0 +1,471 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import json # noqa: TID251
import re
import shutil
import subprocess
import sys
import time
import zipfile
from pathlib import Path
from typing import Any, Callable, cast
import click
import semver
from jinja2 import Environment, FileSystemLoader
from superset_core.extensions.types import Manifest, Metadata
from watchdog.events import FileSystemEventHandler
from watchdog.observers import Observer
from superset_extensions_cli.constants import MIN_NPM_VERSION
from superset_extensions_cli.utils import read_json, read_toml
REMOTE_ENTRY_REGEX = re.compile(r"^remoteEntry\..+\.js$")
FRONTEND_DIST_REGEX = re.compile(r"/frontend/dist")
def validate_npm() -> None:
"""Abort if `npm` is not on PATH."""
if shutil.which("npm") is None:
click.secho(
"❌ npm is not installed or not on your PATH.",
err=True,
fg="red",
)
sys.exit(1)
try:
result = subprocess.run( # noqa: S603
["npm", "-v"], # noqa: S607
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
)
if result.returncode != 0:
click.secho(
f"❌ Failed to run `npm -v`: {result.stderr.strip()}",
err=True,
fg="red",
)
sys.exit(1)
npm_version = result.stdout.strip()
if semver.compare(npm_version, MIN_NPM_VERSION) < 0:
click.secho(
f"❌ npm version {npm_version} is lower than the required {MIN_NPM_VERSION}.", # noqa: E501
err=True,
fg="red",
)
sys.exit(1)
except FileNotFoundError:
click.secho(
"❌ npm was not found when checking its version.",
err=True,
fg="red",
)
sys.exit(1)
def init_frontend_deps(frontend_dir: Path) -> None:
"""
If node_modules is missing under `frontend_dir`, run `npm ci` if package-lock.json
exists, otherwise run `npm i`.
"""
node_modules = frontend_dir / "node_modules"
if not node_modules.exists():
package_lock = frontend_dir / "package-lock.json"
if package_lock.exists():
click.secho("⚙️ node_modules not found, running `npm ci`…", fg="cyan")
npm_command = ["npm", "ci"]
error_msg = "❌ `npm ci` failed. Aborting."
else:
click.secho("⚙️ node_modules not found, running `npm i`…", fg="cyan")
npm_command = ["npm", "i"]
error_msg = "❌ `npm i` failed. Aborting."
validate_npm()
res = subprocess.run( # noqa: S603
npm_command, # noqa: S607
cwd=frontend_dir,
text=True,
)
if res.returncode != 0:
click.secho(error_msg, err=True, fg="red")
sys.exit(1)
click.secho("✅ Dependencies installed", fg="green")
def clean_dist(cwd: Path) -> None:
dist_dir = cwd / "dist"
if dist_dir.exists():
shutil.rmtree(dist_dir)
dist_dir.mkdir(parents=True)
def clean_dist_frontend(cwd: Path) -> None:
frontend_dist = cwd / "dist" / "frontend"
if frontend_dist.exists():
shutil.rmtree(frontend_dist)
def build_manifest(cwd: Path, remote_entry: str | None) -> Manifest:
extension: Metadata = cast(Metadata, read_json(cwd / "extension.json"))
if not extension:
click.secho("❌ extension.json not found.", err=True, fg="red")
sys.exit(1)
manifest: Manifest = {
"id": extension["id"],
"name": extension["name"],
"version": extension["version"],
"permissions": extension["permissions"],
"dependencies": extension.get("dependencies", []),
}
if (
(frontend := extension.get("frontend"))
and (contributions := frontend.get("contributions"))
and (module_federation := frontend.get("moduleFederation"))
and remote_entry
):
manifest["frontend"] = {
"contributions": contributions,
"moduleFederation": module_federation,
"remoteEntry": remote_entry,
}
if entry_points := extension.get("backend", {}).get("entryPoints"):
manifest["backend"] = {"entryPoints": entry_points}
return manifest
def write_manifest(cwd: Path, manifest: Manifest) -> None:
dist_dir = cwd / "dist"
(dist_dir / "manifest.json").write_text(
json.dumps(manifest, indent=2, sort_keys=True)
)
click.secho("✅ Manifest updated", fg="green")
def run_frontend_build(frontend_dir: Path) -> subprocess.CompletedProcess[str]:
click.echo()
click.secho("⚙️ Building frontend assets…", fg="cyan")
return subprocess.run( # noqa: S603
["npm", "run", "build"], # noqa: S607
cwd=frontend_dir,
text=True,
)
def copy_frontend_dist(cwd: Path) -> str:
dist_dir = cwd / "dist"
frontend_dist = cwd / "frontend" / "dist"
remote_entry: str | None = None
for f in frontend_dist.rglob("*"):
if not f.is_file():
continue
if REMOTE_ENTRY_REGEX.match(f.name):
remote_entry = f.name
tgt = dist_dir / f.relative_to(cwd)
tgt.parent.mkdir(parents=True, exist_ok=True)
shutil.copy2(f, tgt)
if not remote_entry:
click.secho("❌ No remote entry file found.", err=True, fg="red")
sys.exit(1)
return remote_entry
def copy_backend_files(cwd: Path) -> None:
dist_dir = cwd / "dist"
extension = read_json(cwd / "extension.json")
if not extension:
click.secho("❌ No extension.json file found.", err=True, fg="red")
sys.exit(1)
for pat in extension.get("backend", {}).get("files", []):
for f in cwd.glob(pat):
if not f.is_file():
continue
tgt = dist_dir / f.relative_to(cwd)
tgt.parent.mkdir(parents=True, exist_ok=True)
shutil.copy2(f, tgt)
def rebuild_frontend(cwd: Path, frontend_dir: Path) -> str | None:
"""Clean and rebuild frontend, return the remoteEntry filename."""
clean_dist_frontend(cwd)
res = run_frontend_build(frontend_dir)
if res.returncode != 0:
click.secho("❌ Frontend build failed", fg="red")
return None
remote_entry = copy_frontend_dist(cwd)
click.secho("✅ Frontend rebuilt", fg="green")
return remote_entry
def rebuild_backend(cwd: Path) -> None:
"""Copy backend files (no manifest update)."""
copy_backend_files(cwd)
click.secho("✅ Backend files synced", fg="green")
class FrontendChangeHandler(FileSystemEventHandler):
def __init__(self, trigger_build: Callable[[], None]):
self.trigger_build = trigger_build
def on_any_event(self, event: Any) -> None:
if FRONTEND_DIST_REGEX.search(event.src_path):
return
click.secho(f"🔁 Frontend change detected: {event.src_path}", fg="yellow")
self.trigger_build()
@click.group(help="CLI for validating and bundling Superset extensions.")
def app() -> None:
pass
@app.command()
def validate() -> None:
validate_npm()
click.secho("✅ Validation successful", fg="green")
@app.command()
@click.pass_context
def build(ctx: click.Context) -> None:
ctx.invoke(validate)
cwd = Path.cwd()
frontend_dir = cwd / "frontend"
backend_dir = cwd / "backend"
clean_dist(cwd)
# Build frontend if it exists
remote_entry = None
if frontend_dir.exists():
init_frontend_deps(frontend_dir)
remote_entry = rebuild_frontend(cwd, frontend_dir)
# Build backend independently if it exists
if backend_dir.exists():
pyproject = read_toml(backend_dir / "pyproject.toml")
if pyproject:
rebuild_backend(cwd)
# Build manifest and write it
manifest = build_manifest(cwd, remote_entry)
write_manifest(cwd, manifest)
click.secho("✅ Full build completed in dist/", fg="green")
@app.command()
@click.option(
"--output",
"-o",
type=click.Path(path_type=Path, dir_okay=True, file_okay=True, writable=True),
help="Optional output path or filename for the bundle.",
)
@click.pass_context
def bundle(ctx: click.Context, output: Path | None) -> None:
ctx.invoke(build)
cwd = Path.cwd()
dist_dir = cwd / "dist"
manifest_path = dist_dir / "manifest.json"
if not manifest_path.exists():
click.secho(
"❌ dist/manifest.json not found. Run `build` first.", err=True, fg="red"
)
sys.exit(1)
manifest = json.loads(manifest_path.read_text())
id_ = manifest["id"]
version = manifest["version"]
default_filename = f"{id_}-{version}.supx"
if output is None:
zip_path = Path(default_filename)
elif output.is_dir():
zip_path = output / default_filename
else:
zip_path = output
try:
with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zipf:
for file in dist_dir.rglob("*"):
if file.is_file():
arcname = file.relative_to(dist_dir)
zipf.write(file, arcname)
except Exception as ex:
click.secho(f"❌ Failed to create bundle: {ex}", err=True, fg="red")
sys.exit(1)
click.secho(f"✅ Bundle created: {zip_path}", fg="green")
@app.command()
@click.pass_context
def dev(ctx: click.Context) -> None:
cwd = Path.cwd()
frontend_dir = cwd / "frontend"
backend_dir = cwd / "backend"
clean_dist(cwd)
# Build frontend if it exists
remote_entry = None
if frontend_dir.exists():
init_frontend_deps(frontend_dir)
remote_entry = rebuild_frontend(cwd, frontend_dir)
# Build backend if it exists
if backend_dir.exists():
rebuild_backend(cwd)
manifest = build_manifest(cwd, remote_entry)
write_manifest(cwd, manifest)
def frontend_watcher() -> None:
if frontend_dir.exists():
if (remote_entry := rebuild_frontend(cwd, frontend_dir)) is not None:
manifest = build_manifest(cwd, remote_entry)
write_manifest(cwd, manifest)
def backend_watcher() -> None:
if backend_dir.exists():
rebuild_backend(cwd)
dist_dir = cwd / "dist"
manifest_path = dist_dir / "manifest.json"
if manifest_path.exists():
manifest = json.loads(manifest_path.read_text())
write_manifest(cwd, manifest)
# Build watch message based on existing directories
watch_dirs = []
if frontend_dir.exists():
watch_dirs.append(str(frontend_dir))
if backend_dir.exists():
watch_dirs.append(str(backend_dir))
if watch_dirs:
click.secho(f"👀 Watching for changes in: {', '.join(watch_dirs)}", fg="green")
else:
click.secho("⚠️ No frontend or backend directories found to watch", fg="yellow")
observer = Observer()
# Only set up watchers for directories that exist
if frontend_dir.exists():
frontend_handler = FrontendChangeHandler(trigger_build=frontend_watcher)
observer.schedule(frontend_handler, str(frontend_dir), recursive=True)
if backend_dir.exists():
backend_handler = FileSystemEventHandler()
backend_handler.on_any_event = lambda event: backend_watcher()
observer.schedule(backend_handler, str(backend_dir), recursive=True)
if watch_dirs:
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
click.secho("\n🛑 Stopping watch mode", fg="blue")
observer.stop()
observer.join()
else:
click.secho("❌ No directories to watch. Exiting.", fg="red")
@app.command()
def init() -> None:
id_ = click.prompt("Extension ID (unique identifier, alphanumeric only)", type=str)
if not re.match(r"^[a-zA-Z0-9_]+$", id_):
click.secho(
"❌ ID must be alphanumeric (letters, digits, underscore).", fg="red"
)
sys.exit(1)
name = click.prompt("Extension name (human-readable display name)", type=str)
version = click.prompt("Initial version", default="0.1.0")
license = click.prompt("License", default="Apache-2.0")
include_frontend = click.confirm("Include frontend?", default=True)
include_backend = click.confirm("Include backend?", default=True)
target_dir = Path.cwd() / id_
if target_dir.exists():
click.secho(f"❌ Directory {target_dir} already exists.", fg="red")
sys.exit(1)
# Set up Jinja environment
templates_dir = Path(__file__).parent / "templates"
env = Environment(loader=FileSystemLoader(templates_dir)) # noqa: S701
ctx = {
"id": id_,
"name": name,
"include_frontend": include_frontend,
"include_backend": include_backend,
"license": license,
"version": version,
}
# Create base directory
target_dir.mkdir()
extension_json = env.get_template("extension.json.j2").render(ctx)
(target_dir / "extension.json").write_text(extension_json)
click.secho("✅ Created extension.json", fg="green")
# Copy frontend template
if include_frontend:
frontend_dir = target_dir / "frontend"
frontend_dir.mkdir()
# package.json
package_json = env.get_template("frontend/package.json.j2").render(ctx)
(frontend_dir / "package.json").write_text(package_json)
click.secho("✅ Created frontend folder structure", fg="green")
# Copy backend template
if include_backend:
backend_dir = target_dir / "backend"
backend_dir.mkdir()
# pyproject.toml
pyproject_toml = env.get_template("backend/pyproject.toml.j2").render(ctx)
(backend_dir / "pyproject.toml").write_text(pyproject_toml)
click.secho("✅ Created backend folder structure", fg="green")
click.secho(
f"🎉 Extension {name} (ID: {id_}) initialized at {target_dir}", fg="cyan"
)
if __name__ == "__main__":
app()

View File

@@ -0,0 +1,19 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
MIN_NPM_VERSION = "10.8.2"

View File

@@ -0,0 +1,4 @@
[project]
name = "{{ id }}"
version = "{{ version }}"
license = "{{ license }}"

View File

@@ -0,0 +1,25 @@
{
"id": "{{ id }}",
"name": "{{ name }}",
"version": "{{ version }}",
"license": "{{ license }}",
{% if include_frontend -%}
"frontend": {
"contributions": {
"commands": [],
"views": [],
"menus": []
},
"moduleFederation": {
"exposes": ["./index"]
}
},
{% endif -%}
{% if include_backend -%}
"backend": {
"entryPoints": ["{{ id }}.entrypoint"],
"files": ["backend/src/{{ id }}/**/*.py"]
},
{% endif -%}
"permissions": []
}

View File

@@ -0,0 +1,34 @@
{
"name": "{{ id }}",
"version": "{{ version }}",
"main": "dist/main.js",
"types": "dist/publicAPI.d.ts",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"start": "webpack serve --mode development",
"build": "webpack --stats-error-details --mode production"
},
"keywords": [],
"private": true,
"author": "",
"license": "{{ license }}",
"description": "",
"peerDependencies": {
"@apache-superset/core": "file:../../../superset-frontend/packages/superset-core",
"react": "^17.0.2",
"react-dom": "^17.0.2"
},
"devDependencies": {
"@babel/preset-react": "^7.26.3",
"@babel/preset-typescript": "^7.26.0",
"@types/react": "^19.0.10",
"copy-webpack-plugin": "^13.0.0",
"install": "^0.13.0",
"npm": "^11.1.0",
"ts-loader": "^9.5.2",
"typescript": "^5.8.2",
"webpack": "^5.98.0",
"webpack-cli": "^6.0.1",
"webpack-dev-server": "^5.2.0"
}
}

View File

@@ -0,0 +1,42 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import json # noqa: TID251
import sys
from pathlib import Path
from typing import Any
if sys.version_info >= (3, 11):
import tomllib
else:
import tomli as tomllib
def read_toml(path: Path) -> dict[str, Any] | None:
if not path.is_file():
return None
with path.open("rb") as f:
return tomllib.load(f)
def read_json(path: Path) -> dict[str, Any] | None:
path = Path(path)
if not path.is_file():
return None
return json.loads(path.read_text())

View File

@@ -0,0 +1,206 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Superset CLI Tests
This directory contains tests for the superset-extensions-cli package, focusing on the `init` command and other CLI functionality.
## Test Structure
### Core Test Files
- **`test_cli_init.py`**: Comprehensive tests for the `init` command scaffolder
- **`test_templates.py`**: Unit tests for Jinja2 template rendering
- **`conftest.py`**: Pytest fixtures and configuration
- **`utils.py`**: Reusable testing utilities and helpers
### Test Categories
#### Unit Tests (`@pytest.mark.unit`)
- Template rendering functionality
- Individual function testing
- Input validation logic
#### Integration Tests (`@pytest.mark.integration`)
- Complete CLI command workflows
- End-to-end scaffolding processes
#### CLI Tests (`@pytest.mark.cli`)
- Click command interface testing
- User input simulation
- Command output verification
## Testing Approach for Scaffolders/Generators
The tests use these patterns for testing code generators:
### 1. Isolated Environment Testing
```python
@pytest.fixture
def isolated_filesystem(tmp_path):
"""Provide isolated temporary directory for each test."""
```
### 2. Click CLI Testing Framework
```python
from click.testing import CliRunner
runner = CliRunner()
result = runner.invoke(app, ["init"], input="...")
```
### 3. File Structure Validation
```python
from tests.utils import assert_file_structure, assert_directory_structure
assert_file_structure(extension_path, expected_files)
```
### 4. Template Content Verification
```python
from tests.utils import assert_json_content
assert_json_content(json_path, {"name": "expected_value"})
```
### 5. Parametrized Testing
```python
@pytest.mark.parametrize("include_frontend,include_backend", [
(True, True), (True, False), (False, True), (False, False)
])
```
## Key Test Cases
### Init Command Tests
- ✅ Creates extension with both frontend and backend
- ✅ Creates frontend-only extensions
- ✅ Creates backend-only extensions
- ✅ Validates extension naming (alphanumeric + underscore only)
- ✅ Handles existing directory conflicts
- ✅ Verifies generated file content accuracy
- ✅ Tests custom version and license inputs
- ✅ Integration test for complete workflow
### Template Rendering Tests
- ✅ Extension.json template with various configurations
- ✅ Package.json template rendering
- ✅ Pyproject.toml template rendering
- ✅ Template validation with different names/versions/licenses
- ✅ JSON validity verification
- ✅ Whitespace and formatting checks
## Running Tests
### All tests
```bash
pytest
```
### Specific test categories
```bash
pytest -m unit # Unit tests only
pytest -m integration # Integration tests only
pytest -m cli # CLI tests only
```
### With coverage
```bash
pytest --cov=superset_extensions_cli --cov-report=html
```
### Specific test files
```bash
pytest tests/test_cli_init.py
pytest tests/test_templates.py
```
## Reusable Testing Infrastructure
The testing infrastructure is designed for reusability:
### Test Utilities (`tests/utils.py`)
- `assert_file_exists()` / `assert_directory_exists()`
- `assert_file_structure()` / `assert_directory_structure()`
- `assert_json_content()` / `load_json_file()`
- `create_test_extension_structure()` - Helper for expected structures
### Fixtures (`tests/conftest.py`)
- `cli_runner` - Click CLI runner
- `isolated_filesystem` - Temporary directory with cleanup
- `extension_params` - Default extension parameters
- `cli_input_*` - Pre-configured user inputs
This infrastructure can be easily extended for testing additional CLI commands like `build`, `bundle`, `dev`, and `validate`.
## Best Practices Implemented
1. **Isolation**: Each test runs in its own temporary directory
2. **Comprehensive Coverage**: Tests cover happy paths, edge cases, and error conditions
3. **Realistic Testing**: Uses actual Click CLI runner with realistic user input
4. **Content Verification**: Validates both file existence and content accuracy
5. **Template Testing**: Separates template rendering logic from CLI integration
6. **Reusable Components**: Utilities and fixtures designed for extension
7. **Clear Documentation**: Well-documented test cases and helper functions
8. **Type Safety**: Uses modern Python type annotations with `from __future__ import annotations`

View File

@@ -0,0 +1,16 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

View File

@@ -0,0 +1,136 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
from pathlib import Path
import pytest
from click.testing import CliRunner
@pytest.fixture
def cli_runner():
"""Provide a Click CLI runner for testing commands."""
return CliRunner()
@pytest.fixture
def isolated_filesystem(tmp_path):
"""
Provide an isolated temporary directory and change to it.
This ensures tests don't interfere with each other.
"""
original_cwd = Path.cwd()
os.chdir(tmp_path)
yield tmp_path
os.chdir(original_cwd)
@pytest.fixture
def extension_params():
"""Default parameters for extension creation."""
return {
"id": "test_extension",
"name": "Test Extension",
"version": "0.1.0",
"license": "Apache-2.0",
"include_frontend": True,
"include_backend": True,
}
@pytest.fixture
def cli_input_both():
"""CLI input for creating extension with both frontend and backend."""
return "test_extension\nTest Extension\n0.1.0\nApache-2.0\ny\ny\n"
@pytest.fixture
def cli_input_frontend_only():
"""CLI input for creating extension with frontend only."""
return "test_extension\nTest Extension\n0.1.0\nApache-2.0\ny\nn\n"
@pytest.fixture
def cli_input_backend_only():
"""CLI input for creating extension with backend only."""
return "test_extension\nTest Extension\n0.1.0\nApache-2.0\nn\ny\n"
@pytest.fixture
def cli_input_neither():
"""CLI input for creating extension with neither frontend nor backend."""
return "test_extension\nTest Extension\n0.1.0\nApache-2.0\nn\nn\n"
@pytest.fixture
def extension_setup_for_dev():
"""Set up extension structure for dev testing."""
def _setup(base_path: Path) -> None:
import json
# Create extension.json
extension_json = {
"id": "test_extension",
"name": "Test Extension",
"version": "1.0.0",
"permissions": [],
}
(base_path / "extension.json").write_text(json.dumps(extension_json))
# Create frontend and backend directories
(base_path / "frontend").mkdir()
(base_path / "backend").mkdir()
return _setup
@pytest.fixture
def extension_setup_for_bundling():
"""Set up a complete extension structure ready for bundling."""
def _setup(base_path: Path) -> None:
import json
# Create dist directory with manifest and files
dist_dir = base_path / "dist"
dist_dir.mkdir(parents=True)
# Create manifest.json
manifest = {
"id": "test_extension",
"name": "Test Extension",
"version": "1.0.0",
"permissions": [],
}
(dist_dir / "manifest.json").write_text(json.dumps(manifest))
# Create some frontend files
frontend_dir = dist_dir / "frontend" / "dist"
frontend_dir.mkdir(parents=True)
(frontend_dir / "remoteEntry.abc123.js").write_text("// remote entry")
(frontend_dir / "main.js").write_text("// main js")
# Create some backend files
backend_dir = dist_dir / "backend" / "src" / "test_extension"
backend_dir.mkdir(parents=True)
(backend_dir / "__init__.py").write_text("# init")
return _setup

View File

@@ -0,0 +1,552 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
from unittest.mock import Mock, patch
import pytest
from superset_extensions_cli.cli import (
app,
build_manifest,
clean_dist,
copy_backend_files,
copy_frontend_dist,
init_frontend_deps,
)
from tests.utils import (
assert_directory_exists,
assert_file_exists,
)
@pytest.fixture
def extension_with_build_structure():
"""Create extension structure suitable for build testing."""
def _create(base_path, include_frontend=True, include_backend=True):
# Create required directories
if include_frontend:
frontend_dir = base_path / "frontend"
frontend_dir.mkdir()
if include_backend:
backend_dir = base_path / "backend"
backend_dir.mkdir()
# Create extension.json
extension_json = {
"id": "test_extension",
"name": "Test Extension",
"version": "1.0.0",
"permissions": [],
}
if include_frontend:
extension_json["frontend"] = {
"contributions": {"commands": []},
"moduleFederation": {"exposes": ["./index"]},
}
if include_backend:
extension_json["backend"] = {"entryPoints": ["test_extension.entrypoint"]}
(base_path / "extension.json").write_text(json.dumps(extension_json))
return {
"frontend_dir": frontend_dir if include_frontend else None,
"backend_dir": backend_dir if include_backend else None,
}
return _create
# Build Command Tests
@pytest.mark.cli
@patch("superset_extensions_cli.cli.validate_npm")
@patch("superset_extensions_cli.cli.init_frontend_deps")
@patch("superset_extensions_cli.cli.rebuild_frontend")
@patch("superset_extensions_cli.cli.rebuild_backend")
@patch("superset_extensions_cli.cli.read_toml")
def test_build_command_success_flow(
mock_read_toml,
mock_rebuild_backend,
mock_rebuild_frontend,
mock_init_frontend_deps,
mock_validate_npm,
cli_runner,
isolated_filesystem,
extension_with_build_structure,
):
"""Test build command success flow."""
# Setup mocks
mock_rebuild_frontend.return_value = "remoteEntry.abc123.js"
mock_read_toml.return_value = {"project": {"name": "test"}}
# Create extension structure
dirs = extension_with_build_structure(isolated_filesystem)
result = cli_runner.invoke(app, ["build"])
assert result.exit_code == 0
assert "✅ Full build completed in dist/" in result.output
# Verify function calls
mock_validate_npm.assert_called_once()
mock_init_frontend_deps.assert_called_once_with(dirs["frontend_dir"])
mock_rebuild_frontend.assert_called_once()
mock_rebuild_backend.assert_called_once()
@pytest.mark.cli
@patch("superset_extensions_cli.cli.validate_npm")
@patch("superset_extensions_cli.cli.init_frontend_deps")
@patch("superset_extensions_cli.cli.rebuild_frontend")
def test_build_command_handles_frontend_build_failure(
mock_rebuild_frontend,
mock_init_frontend_deps,
mock_validate_npm,
cli_runner,
isolated_filesystem,
extension_with_build_structure,
):
"""Test build command handles frontend build failure."""
# Setup mocks
mock_rebuild_frontend.return_value = None # Indicates failure
# Create extension structure
extension_with_build_structure(isolated_filesystem)
result = cli_runner.invoke(app, ["build"])
# Command should complete and create manifest even with frontend failure
assert result.exit_code == 0
assert "✅ Full build completed in dist/" in result.output
# Clean Dist Tests
@pytest.mark.unit
def test_clean_dist_removes_existing_dist_directory(isolated_filesystem):
"""Test clean_dist removes existing dist directory and recreates it."""
# Create dist directory with some content
dist_dir = isolated_filesystem / "dist"
dist_dir.mkdir()
(dist_dir / "some_file.txt").write_text("test content")
(dist_dir / "subdir").mkdir()
clean_dist(isolated_filesystem)
# Should exist but be empty
assert_directory_exists(dist_dir)
assert list(dist_dir.iterdir()) == []
@pytest.mark.unit
def test_clean_dist_creates_dist_directory_if_missing(isolated_filesystem):
"""Test clean_dist creates dist directory when it doesn't exist."""
dist_dir = isolated_filesystem / "dist"
assert not dist_dir.exists()
clean_dist(isolated_filesystem)
assert_directory_exists(dist_dir)
# Frontend Dependencies Tests
@pytest.mark.unit
@patch("subprocess.run")
def test_init_frontend_deps_skips_when_node_modules_exists(
mock_run, isolated_filesystem
):
"""Test init_frontend_deps skips npm ci when node_modules exists."""
frontend_dir = isolated_filesystem / "frontend"
frontend_dir.mkdir()
(frontend_dir / "node_modules").mkdir()
init_frontend_deps(frontend_dir)
# Should not call subprocess.run for npm ci
mock_run.assert_not_called()
@pytest.mark.unit
@patch("subprocess.run")
@patch("superset_extensions_cli.cli.validate_npm")
def test_init_frontend_deps_runs_npm_i_when_missing(
mock_validate_npm, mock_run, isolated_filesystem
):
"""Test init_frontend_deps runs npm ci when node_modules is missing."""
frontend_dir = isolated_filesystem / "frontend"
frontend_dir.mkdir()
# Mock successful npm ci
mock_run.return_value = Mock(returncode=0)
init_frontend_deps(frontend_dir)
# Should validate npm and run npm ci
mock_validate_npm.assert_called_once()
mock_run.assert_called_once_with(["npm", "i"], cwd=frontend_dir, text=True)
@pytest.mark.unit
@patch("subprocess.run")
@patch("superset_extensions_cli.cli.validate_npm")
def test_init_frontend_deps_exits_on_npm_ci_failure(
mock_validate_npm, mock_run, isolated_filesystem
):
"""Test init_frontend_deps exits when npm ci fails."""
frontend_dir = isolated_filesystem / "frontend"
frontend_dir.mkdir()
# Mock failed npm ci
mock_run.return_value = Mock(returncode=1)
with pytest.raises(SystemExit) as exc_info:
init_frontend_deps(frontend_dir)
assert exc_info.value.code == 1
# Build Manifest Tests
@pytest.mark.unit
def test_build_manifest_creates_correct_manifest_structure(isolated_filesystem):
"""Test build_manifest creates correct manifest from extension.json."""
# Create extension.json
extension_data = {
"id": "test_extension",
"name": "Test Extension",
"version": "1.0.0",
"permissions": ["read_data"],
"dependencies": ["some_dep"],
"frontend": {
"contributions": {"commands": ["test_command"]},
"moduleFederation": {"exposes": ["./index"]},
},
"backend": {"entryPoints": ["test_extension.entrypoint"]},
}
extension_json = isolated_filesystem / "extension.json"
extension_json.write_text(json.dumps(extension_data))
manifest = build_manifest(isolated_filesystem, "remoteEntry.abc123.js")
# Verify manifest structure
manifest_dict = dict(manifest)
assert manifest_dict["id"] == "test_extension"
assert manifest_dict["name"] == "Test Extension"
assert manifest_dict["version"] == "1.0.0"
assert manifest_dict["permissions"] == ["read_data"]
assert manifest_dict["dependencies"] == ["some_dep"]
# Verify frontend section
assert "frontend" in manifest
frontend = manifest["frontend"]
assert frontend["contributions"] == {"commands": ["test_command"]}
assert frontend["moduleFederation"] == {"exposes": ["./index"]}
assert frontend["remoteEntry"] == "remoteEntry.abc123.js"
# Verify backend section
assert "backend" in manifest
assert manifest["backend"]["entryPoints"] == ["test_extension.entrypoint"]
@pytest.mark.unit
def test_build_manifest_handles_minimal_extension(isolated_filesystem):
"""Test build_manifest with minimal extension.json (no frontend/backend)."""
extension_data = {
"id": "minimal_extension",
"name": "Minimal Extension",
"version": "0.1.0",
"permissions": [],
}
extension_json = isolated_filesystem / "extension.json"
extension_json.write_text(json.dumps(extension_data))
manifest = build_manifest(isolated_filesystem, None)
manifest_dict = dict(manifest)
assert manifest_dict["id"] == "minimal_extension"
assert manifest_dict["name"] == "Minimal Extension"
assert manifest_dict["version"] == "0.1.0"
assert manifest_dict["permissions"] == []
assert manifest_dict["dependencies"] == [] # Default empty list
assert "frontend" not in manifest
assert "backend" not in manifest
@pytest.mark.unit
def test_build_manifest_exits_when_extension_json_missing(isolated_filesystem):
"""Test build_manifest exits when extension.json is missing."""
with pytest.raises(SystemExit) as exc_info:
build_manifest(isolated_filesystem, "remoteEntry.js")
assert exc_info.value.code == 1
# Frontend Build Tests
@pytest.mark.unit
def test_clean_dist_frontend_removes_frontend_dist(isolated_filesystem):
"""Test clean_dist_frontend removes frontend/dist directory specifically."""
from superset_extensions_cli.cli import clean_dist_frontend
# Create dist/frontend structure
dist_dir = isolated_filesystem / "dist"
dist_dir.mkdir(parents=True)
frontend_dist = dist_dir / "frontend"
frontend_dist.mkdir()
(frontend_dist / "some_file.js").write_text("content")
clean_dist_frontend(isolated_filesystem)
# Frontend dist should be removed, but dist should remain
assert dist_dir.exists()
assert not frontend_dist.exists()
@pytest.mark.unit
def test_clean_dist_frontend_handles_nonexistent_directory(isolated_filesystem):
"""Test clean_dist_frontend handles case where frontend dist doesn't exist."""
from superset_extensions_cli.cli import clean_dist_frontend
# No dist directory exists
clean_dist_frontend(isolated_filesystem)
# Should not raise error
@pytest.mark.unit
def test_run_frontend_build_with_output_messages(isolated_filesystem):
"""Test run_frontend_build produces expected output messages."""
from superset_extensions_cli.cli import run_frontend_build
frontend_dir = isolated_filesystem / "frontend"
frontend_dir.mkdir()
with patch("subprocess.run") as mock_run:
mock_result = Mock(returncode=0)
mock_run.return_value = mock_result
result = run_frontend_build(frontend_dir)
assert result.returncode == 0
mock_run.assert_called_once_with(
["npm", "run", "build"], cwd=frontend_dir, text=True
)
@pytest.mark.unit
@pytest.mark.parametrize(
"return_code,expected_result",
[
(0, "remoteEntry.abc123.js"),
(1, None),
],
)
def test_rebuild_frontend_handles_build_results(
isolated_filesystem, return_code, expected_result
):
"""Test rebuild_frontend handles different build results."""
from superset_extensions_cli.cli import rebuild_frontend
# Create frontend structure
frontend_dir = isolated_filesystem / "frontend"
frontend_dir.mkdir()
if return_code == 0:
# Create frontend/dist with remoteEntry for success case
frontend_dist = frontend_dir / "dist"
frontend_dist.mkdir()
(frontend_dist / "remoteEntry.abc123.js").write_text("content")
# Create dist directory
dist_dir = isolated_filesystem / "dist"
dist_dir.mkdir()
with patch("superset_extensions_cli.cli.run_frontend_build") as mock_build:
mock_build.return_value = Mock(returncode=return_code)
result = rebuild_frontend(isolated_filesystem, frontend_dir)
assert result == expected_result
# Backend Build Tests
@pytest.mark.unit
def test_rebuild_backend_calls_copy_and_shows_message(isolated_filesystem):
"""Test rebuild_backend calls copy_backend_files and shows success message."""
from superset_extensions_cli.cli import rebuild_backend
# Create extension.json
extension_json = {
"id": "test",
"name": "Test Extension",
"version": "1.0.0",
"permissions": [],
}
(isolated_filesystem / "extension.json").write_text(json.dumps(extension_json))
with patch("superset_extensions_cli.cli.copy_backend_files") as mock_copy:
rebuild_backend(isolated_filesystem)
mock_copy.assert_called_once_with(isolated_filesystem)
@pytest.mark.unit
def test_copy_backend_files_skips_non_files(isolated_filesystem):
"""Test copy_backend_files skips directories and non-files."""
# Create backend structure with directory
backend_src = isolated_filesystem / "backend" / "src" / "test_ext"
backend_src.mkdir(parents=True)
(backend_src / "__init__.py").write_text("# init")
# Create a subdirectory (should be skipped)
subdir = backend_src / "subdir"
subdir.mkdir()
# Create extension.json with backend file patterns
extension_data = {
"id": "test_ext",
"name": "Test Extension",
"version": "1.0.0",
"permissions": [],
"backend": {
"files": ["backend/src/test_ext/**/*"] # Will match both files and dirs
},
}
(isolated_filesystem / "extension.json").write_text(json.dumps(extension_data))
# Create dist directory
clean_dist(isolated_filesystem)
copy_backend_files(isolated_filesystem)
# Verify only files were copied, not directories
dist_dir = isolated_filesystem / "dist"
assert_file_exists(dist_dir / "backend" / "src" / "test_ext" / "__init__.py")
# Directory should not be copied as a file
copied_subdir = dist_dir / "backend" / "src" / "test_ext" / "subdir"
# The directory might exist but should be empty since we skip non-files
if copied_subdir.exists():
assert list(copied_subdir.iterdir()) == []
@pytest.mark.unit
def test_copy_backend_files_copies_matched_files(isolated_filesystem):
"""Test copy_backend_files copies files matching patterns from extension.json."""
# Create backend source files
backend_src = isolated_filesystem / "backend" / "src" / "test_ext"
backend_src.mkdir(parents=True)
(backend_src / "__init__.py").write_text("# init")
(backend_src / "main.py").write_text("# main")
# Create extension.json with backend file patterns
extension_data = {
"id": "test_ext",
"name": "Test Extension",
"version": "1.0.0",
"permissions": [],
"backend": {"files": ["backend/src/test_ext/**/*.py"]},
}
(isolated_filesystem / "extension.json").write_text(json.dumps(extension_data))
# Create dist directory
clean_dist(isolated_filesystem)
copy_backend_files(isolated_filesystem)
# Verify files were copied
dist_dir = isolated_filesystem / "dist"
assert_file_exists(dist_dir / "backend" / "src" / "test_ext" / "__init__.py")
assert_file_exists(dist_dir / "backend" / "src" / "test_ext" / "main.py")
@pytest.mark.unit
def test_copy_backend_files_handles_no_backend_config(isolated_filesystem):
"""Test copy_backend_files handles extension.json without backend config."""
extension_data = {
"id": "frontend_only",
"name": "Frontend Only Extension",
"version": "1.0.0",
"permissions": [],
}
(isolated_filesystem / "extension.json").write_text(json.dumps(extension_data))
clean_dist(isolated_filesystem)
# Should not raise error
copy_backend_files(isolated_filesystem)
@pytest.mark.unit
def test_copy_backend_files_exits_when_extension_json_missing(isolated_filesystem):
"""Test copy_backend_files exits when extension.json is missing."""
clean_dist(isolated_filesystem)
with pytest.raises(SystemExit) as exc_info:
copy_backend_files(isolated_filesystem)
assert exc_info.value.code == 1
# Frontend Dist Copy Tests
@pytest.mark.unit
def test_copy_frontend_dist_copies_files_correctly(isolated_filesystem):
"""Test copy_frontend_dist copies frontend build files to dist."""
# Create frontend/dist structure
frontend_dist = isolated_filesystem / "frontend" / "dist"
frontend_dist.mkdir(parents=True)
# Create some files including remoteEntry
(frontend_dist / "remoteEntry.abc123.js").write_text("remote entry content")
(frontend_dist / "main.js").write_text("main js content")
# Create subdirectory with file
assets_dir = frontend_dist / "assets"
assets_dir.mkdir()
(assets_dir / "style.css").write_text("css content")
# Create dist directory
clean_dist(isolated_filesystem)
remote_entry = copy_frontend_dist(isolated_filesystem)
assert remote_entry == "remoteEntry.abc123.js"
# Verify files were copied
dist_dir = isolated_filesystem / "dist"
assert_file_exists(dist_dir / "frontend" / "dist" / "remoteEntry.abc123.js")
assert_file_exists(dist_dir / "frontend" / "dist" / "main.js")
assert_file_exists(dist_dir / "frontend" / "dist" / "assets" / "style.css")
@pytest.mark.unit
def test_copy_frontend_dist_exits_when_no_remote_entry(isolated_filesystem):
"""Test copy_frontend_dist exits when no remoteEntry file found."""
# Create frontend/dist without remoteEntry file
frontend_dist = isolated_filesystem / "frontend" / "dist"
frontend_dist.mkdir(parents=True)
(frontend_dist / "main.js").write_text("main content")
clean_dist(isolated_filesystem)
with pytest.raises(SystemExit) as exc_info:
copy_frontend_dist(isolated_filesystem)
assert exc_info.value.code == 1

View File

@@ -0,0 +1,255 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
import zipfile
from unittest.mock import patch
import pytest
from superset_extensions_cli.cli import app
from tests.utils import assert_file_exists
# Bundle Command Tests
@pytest.mark.cli
@patch("superset_extensions_cli.cli.build")
def test_bundle_command_creates_zip_with_default_name(
mock_build, cli_runner, isolated_filesystem, extension_setup_for_bundling
):
"""Test bundle command creates zip file with default name."""
# Mock the build command to do nothing (we'll set up dist manually)
mock_build.return_value = None
# Setup extension for bundling (this creates the dist structure)
extension_setup_for_bundling(isolated_filesystem)
result = cli_runner.invoke(app, ["bundle"])
assert result.exit_code == 0
assert "✅ Bundle created: test_extension-1.0.0.supx" in result.output
# Verify zip file was created
zip_path = isolated_filesystem / "test_extension-1.0.0.supx"
assert_file_exists(zip_path)
# Verify zip contents
with zipfile.ZipFile(zip_path, "r") as zipf:
file_list = zipf.namelist()
assert "manifest.json" in file_list
assert "frontend/dist/remoteEntry.abc123.js" in file_list
assert "frontend/dist/main.js" in file_list
assert "backend/src/test_extension/__init__.py" in file_list
@pytest.mark.cli
@patch("superset_extensions_cli.cli.build")
def test_bundle_command_with_custom_output_filename(
mock_build, cli_runner, isolated_filesystem, extension_setup_for_bundling
):
"""Test bundle command with custom output filename."""
# Mock the build command
mock_build.return_value = None
extension_setup_for_bundling(isolated_filesystem)
custom_name = "my_custom_bundle.supx"
result = cli_runner.invoke(app, ["bundle", "--output", custom_name])
assert result.exit_code == 0
assert f"✅ Bundle created: {custom_name}" in result.output
# Verify custom-named zip file was created
zip_path = isolated_filesystem / custom_name
assert_file_exists(zip_path)
@pytest.mark.cli
@patch("superset_extensions_cli.cli.build")
def test_bundle_command_with_output_directory(
mock_build, cli_runner, isolated_filesystem, extension_setup_for_bundling
):
"""Test bundle command with output directory."""
# Mock the build command
mock_build.return_value = None
extension_setup_for_bundling(isolated_filesystem)
# Create output directory
output_dir = isolated_filesystem / "output"
output_dir.mkdir()
result = cli_runner.invoke(app, ["bundle", "--output", str(output_dir)])
assert result.exit_code == 0
# Verify zip file was created in output directory
expected_path = output_dir / "test_extension-1.0.0.supx"
assert_file_exists(expected_path)
assert f"✅ Bundle created: {expected_path}" in result.output
@pytest.mark.cli
@patch("superset_extensions_cli.cli.build")
def test_bundle_command_fails_without_manifest(
mock_build, cli_runner, isolated_filesystem
):
"""Test bundle command fails when manifest.json doesn't exist."""
# Mock build to succeed but not create manifest
mock_build.return_value = None
# Create empty dist directory
(isolated_filesystem / "dist").mkdir()
result = cli_runner.invoke(app, ["bundle"])
assert result.exit_code == 1
assert "dist/manifest.json not found" in result.output
@pytest.mark.cli
@patch("superset_extensions_cli.cli.build")
def test_bundle_command_handles_zip_creation_error(
mock_build, cli_runner, isolated_filesystem, extension_setup_for_bundling
):
"""Test bundle command handles zip file creation errors."""
# Mock the build command
mock_build.return_value = None
extension_setup_for_bundling(isolated_filesystem)
# Try to bundle to an invalid location (directory that doesn't exist)
invalid_path = isolated_filesystem / "nonexistent" / "bundle.supx"
with patch("zipfile.ZipFile", side_effect=OSError("Permission denied")):
result = cli_runner.invoke(app, ["bundle", "--output", str(invalid_path)])
assert result.exit_code == 1
assert "Failed to create bundle" in result.output
@pytest.mark.cli
@patch("superset_extensions_cli.cli.build")
def test_bundle_includes_all_files_recursively(
mock_build, cli_runner, isolated_filesystem
):
"""Test that bundle includes all files from dist directory recursively."""
# Mock the build command
mock_build.return_value = None
# Create complex dist structure
dist_dir = isolated_filesystem / "dist"
dist_dir.mkdir(parents=True)
# Manifest
manifest = {
"id": "complex_extension",
"name": "Complex Extension",
"version": "2.1.0",
"permissions": [],
}
(dist_dir / "manifest.json").write_text(json.dumps(manifest))
# Frontend files with nested structure
frontend_dir = dist_dir / "frontend" / "dist"
frontend_dir.mkdir(parents=True)
(frontend_dir / "remoteEntry.xyz789.js").write_text("// entry")
assets_dir = frontend_dir / "assets"
assets_dir.mkdir()
(assets_dir / "style.css").write_text("/* css */")
(assets_dir / "image.png").write_bytes(b"fake image data")
# Backend files with nested structure
backend_dir = dist_dir / "backend" / "src" / "complex_extension"
backend_dir.mkdir(parents=True)
(backend_dir / "__init__.py").write_text("# init")
(backend_dir / "core.py").write_text("# core")
utils_dir = backend_dir / "utils"
utils_dir.mkdir()
(utils_dir / "helpers.py").write_text("# helpers")
result = cli_runner.invoke(app, ["bundle"])
assert result.exit_code == 0
# Verify zip file and contents
zip_path = isolated_filesystem / "complex_extension-2.1.0.supx"
assert_file_exists(zip_path)
with zipfile.ZipFile(zip_path, "r") as zipf:
file_list = set(zipf.namelist())
# Verify all files are included
expected_files = {
"manifest.json",
"frontend/dist/remoteEntry.xyz789.js",
"frontend/dist/assets/style.css",
"frontend/dist/assets/image.png",
"backend/src/complex_extension/__init__.py",
"backend/src/complex_extension/core.py",
"backend/src/complex_extension/utils/helpers.py",
}
assert expected_files.issubset(
file_list
), f"Missing files: {expected_files - file_list}"
@pytest.mark.cli
@patch("superset_extensions_cli.cli.build")
def test_bundle_command_short_option(
mock_build, cli_runner, isolated_filesystem, extension_setup_for_bundling
):
"""Test bundle command with short -o option."""
# Mock the build command
mock_build.return_value = None
extension_setup_for_bundling(isolated_filesystem)
result = cli_runner.invoke(app, ["bundle", "-o", "short_option.supx"])
assert result.exit_code == 0
assert "✅ Bundle created: short_option.supx" in result.output
assert_file_exists(isolated_filesystem / "short_option.supx")
@pytest.mark.cli
@pytest.mark.parametrize("output_option", ["--output", "-o"])
@patch("superset_extensions_cli.cli.build")
def test_bundle_command_output_options(
mock_build,
output_option,
cli_runner,
isolated_filesystem,
extension_setup_for_bundling,
):
"""Test bundle command with both long and short output options."""
# Mock the build command
mock_build.return_value = None
extension_setup_for_bundling(isolated_filesystem)
filename = f"test_{output_option.replace('-', '')}.supx"
result = cli_runner.invoke(app, ["bundle", output_option, filename])
assert result.exit_code == 0
assert f"✅ Bundle created: {filename}" in result.output
assert_file_exists(isolated_filesystem / filename)

View File

@@ -0,0 +1,238 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
import threading
import time
from unittest.mock import Mock, patch
import pytest
from superset_extensions_cli.cli import app, FrontendChangeHandler
# Dev Command Tests
@pytest.mark.cli
@patch("superset_extensions_cli.cli.Observer")
@patch("superset_extensions_cli.cli.init_frontend_deps")
@patch("superset_extensions_cli.cli.rebuild_frontend")
@patch("superset_extensions_cli.cli.rebuild_backend")
@patch("superset_extensions_cli.cli.build_manifest")
@patch("superset_extensions_cli.cli.write_manifest")
def test_dev_command_starts_watchers(
mock_write_manifest,
mock_build_manifest,
mock_rebuild_backend,
mock_rebuild_frontend,
mock_init_frontend_deps,
mock_observer_class,
cli_runner,
isolated_filesystem,
extension_setup_for_dev,
):
"""Test dev command starts file watchers."""
# Setup mocks
mock_rebuild_frontend.return_value = "remoteEntry.abc123.js"
mock_build_manifest.return_value = {"name": "test", "version": "1.0.0"}
mock_observer = Mock()
mock_observer_class.return_value = mock_observer
extension_setup_for_dev(isolated_filesystem)
# Run dev command in a thread since it's blocking
def run_dev():
try:
cli_runner.invoke(app, ["dev"], catch_exceptions=False)
except KeyboardInterrupt:
pass
dev_thread = threading.Thread(target=run_dev)
dev_thread.daemon = True
dev_thread.start()
# Let it start up
time.sleep(0.1)
# Verify observer methods were called
mock_observer.schedule.assert_called()
mock_observer.start.assert_called_once()
# Initial setup calls
mock_init_frontend_deps.assert_called_once()
mock_rebuild_frontend.assert_called()
mock_rebuild_backend.assert_called()
mock_build_manifest.assert_called()
mock_write_manifest.assert_called()
@pytest.mark.cli
@patch("superset_extensions_cli.cli.init_frontend_deps")
@patch("superset_extensions_cli.cli.rebuild_frontend")
@patch("superset_extensions_cli.cli.rebuild_backend")
@patch("superset_extensions_cli.cli.build_manifest")
@patch("superset_extensions_cli.cli.write_manifest")
def test_dev_command_initial_build(
mock_write_manifest,
mock_build_manifest,
mock_rebuild_backend,
mock_rebuild_frontend,
mock_init_frontend_deps,
cli_runner,
isolated_filesystem,
extension_setup_for_dev,
):
"""Test dev command performs initial build setup."""
# Setup mocks
mock_rebuild_frontend.return_value = "remoteEntry.abc123.js"
mock_build_manifest.return_value = {"name": "test", "version": "1.0.0"}
extension_setup_for_dev(isolated_filesystem)
with patch("superset_extensions_cli.cli.Observer") as mock_observer_class:
mock_observer = Mock()
mock_observer_class.return_value = mock_observer
with patch("time.sleep", side_effect=KeyboardInterrupt):
try:
cli_runner.invoke(app, ["dev"], catch_exceptions=False)
except KeyboardInterrupt:
pass
# Verify initial build steps
frontend_dir = isolated_filesystem / "frontend"
mock_init_frontend_deps.assert_called_once_with(frontend_dir)
mock_rebuild_frontend.assert_called_once_with(isolated_filesystem, frontend_dir)
mock_rebuild_backend.assert_called_once_with(isolated_filesystem)
# FrontendChangeHandler Tests
@pytest.mark.unit
def test_frontend_change_handler_init():
"""Test FrontendChangeHandler initialization."""
mock_trigger = Mock()
handler = FrontendChangeHandler(trigger_build=mock_trigger)
assert handler.trigger_build == mock_trigger
@pytest.mark.unit
def test_frontend_change_handler_ignores_dist_changes():
"""Test FrontendChangeHandler ignores changes in dist directory."""
mock_trigger = Mock()
handler = FrontendChangeHandler(trigger_build=mock_trigger)
# Create mock event with dist path
mock_event = Mock()
mock_event.src_path = "/path/to/frontend/dist/file.js"
handler.on_any_event(mock_event)
# Should not trigger build for dist changes
mock_trigger.assert_not_called()
@pytest.mark.unit
@pytest.mark.parametrize(
"source_path",
[
"/path/to/frontend/src/component.tsx",
"/path/to/frontend/webpack.config.js",
"/path/to/frontend/package.json",
],
)
def test_frontend_change_handler_triggers_on_source_changes(source_path):
"""Test FrontendChangeHandler triggers build on source changes."""
mock_trigger = Mock()
handler = FrontendChangeHandler(trigger_build=mock_trigger)
# Create mock event with source path
mock_event = Mock()
mock_event.src_path = source_path
handler.on_any_event(mock_event)
# Should trigger build for source changes
mock_trigger.assert_called_once()
# Dev Utility Functions Tests
@pytest.mark.unit
def test_frontend_watcher_function_coverage(isolated_filesystem):
"""Test frontend watcher function for coverage."""
# Create extension.json
extension_json = {
"id": "test_extension",
"name": "Test Extension",
"version": "1.0.0",
"permissions": [],
}
(isolated_filesystem / "extension.json").write_text(json.dumps(extension_json))
# Create dist directory
dist_dir = isolated_filesystem / "dist"
dist_dir.mkdir()
with patch("superset_extensions_cli.cli.rebuild_frontend") as mock_rebuild:
with patch("superset_extensions_cli.cli.build_manifest") as mock_build:
with patch("superset_extensions_cli.cli.write_manifest") as mock_write:
mock_rebuild.return_value = "remoteEntry.abc123.js"
mock_build.return_value = {"name": "test", "version": "1.0.0"}
# Simulate frontend watcher function logic
frontend_dir = isolated_filesystem / "frontend"
frontend_dir.mkdir()
# Actually call the functions to simulate the frontend_watcher
if (
remote_entry := mock_rebuild(isolated_filesystem, frontend_dir)
) is not None:
manifest = mock_build(isolated_filesystem, remote_entry)
mock_write(isolated_filesystem, manifest)
mock_rebuild.assert_called_once_with(isolated_filesystem, frontend_dir)
mock_build.assert_called_once_with(
isolated_filesystem, "remoteEntry.abc123.js"
)
mock_write.assert_called_once_with(
isolated_filesystem, {"name": "test", "version": "1.0.0"}
)
@pytest.mark.unit
def test_backend_watcher_function_coverage(isolated_filesystem):
"""Test backend watcher function for coverage."""
# Create dist directory with manifest
dist_dir = isolated_filesystem / "dist"
dist_dir.mkdir()
manifest_data = {"name": "test", "version": "1.0.0"}
(dist_dir / "manifest.json").write_text(json.dumps(manifest_data))
with patch("superset_extensions_cli.cli.rebuild_backend") as mock_rebuild:
with patch("superset_extensions_cli.cli.write_manifest") as mock_write:
# Simulate backend watcher function
mock_rebuild(isolated_filesystem)
manifest_path = dist_dir / "manifest.json"
if manifest_path.exists():
manifest = json.loads(manifest_path.read_text())
mock_write(isolated_filesystem, manifest)
mock_rebuild.assert_called_once_with(isolated_filesystem)
mock_write.assert_called_once()

View File

@@ -0,0 +1,362 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from pathlib import Path
import pytest
from superset_extensions_cli.cli import app
from tests.utils import (
assert_directory_exists,
assert_directory_structure,
assert_file_exists,
assert_file_structure,
assert_json_content,
create_test_extension_structure,
load_json_file,
)
# Init Command Tests
@pytest.mark.cli
def test_init_creates_extension_with_both_frontend_and_backend(
cli_runner, isolated_filesystem, cli_input_both
):
"""Test that init creates a complete extension with both frontend and backend."""
result = cli_runner.invoke(app, ["init"], input=cli_input_both)
assert result.exit_code == 0, f"Command failed with output: {result.output}"
assert (
"🎉 Extension Test Extension (ID: test_extension) initialized" in result.output
)
# Verify directory structure
extension_path = isolated_filesystem / "test_extension"
assert_directory_exists(extension_path, "main extension directory")
expected_structure = create_test_extension_structure(
isolated_filesystem,
"test_extension",
include_frontend=True,
include_backend=True,
)
# Check directories
assert_directory_structure(extension_path, expected_structure["expected_dirs"])
# Check files
assert_file_structure(extension_path, expected_structure["expected_files"])
@pytest.mark.cli
def test_init_creates_extension_with_frontend_only(
cli_runner, isolated_filesystem, cli_input_frontend_only
):
"""Test that init creates extension with only frontend components."""
result = cli_runner.invoke(app, ["init"], input=cli_input_frontend_only)
assert result.exit_code == 0, f"Command failed with output: {result.output}"
extension_path = isolated_filesystem / "test_extension"
assert_directory_exists(extension_path)
# Should have frontend directory and package.json
assert_directory_exists(extension_path / "frontend")
assert_file_exists(extension_path / "frontend" / "package.json")
# Should NOT have backend directory
backend_path = extension_path / "backend"
assert (
not backend_path.exists()
), "Backend directory should not exist for frontend-only extension"
@pytest.mark.cli
def test_init_creates_extension_with_backend_only(
cli_runner, isolated_filesystem, cli_input_backend_only
):
"""Test that init creates extension with only backend components."""
result = cli_runner.invoke(app, ["init"], input=cli_input_backend_only)
assert result.exit_code == 0, f"Command failed with output: {result.output}"
extension_path = isolated_filesystem / "test_extension"
assert_directory_exists(extension_path)
# Should have backend directory and pyproject.toml
assert_directory_exists(extension_path / "backend")
assert_file_exists(extension_path / "backend" / "pyproject.toml")
# Should NOT have frontend directory
frontend_path = extension_path / "frontend"
assert (
not frontend_path.exists()
), "Frontend directory should not exist for backend-only extension"
@pytest.mark.cli
def test_init_creates_extension_with_neither_frontend_nor_backend(
cli_runner, isolated_filesystem, cli_input_neither
):
"""Test that init creates minimal extension with neither frontend nor backend."""
result = cli_runner.invoke(app, ["init"], input=cli_input_neither)
assert result.exit_code == 0, f"Command failed with output: {result.output}"
extension_path = isolated_filesystem / "test_extension"
assert_directory_exists(extension_path)
# Should only have extension.json
assert_file_exists(extension_path / "extension.json")
# Should NOT have frontend or backend directories
assert not (extension_path / "frontend").exists()
assert not (extension_path / "backend").exists()
@pytest.mark.cli
@pytest.mark.parametrize(
"invalid_name,expected_error",
[
("test-extension", "must be alphanumeric"),
("test extension", "must be alphanumeric"),
("test.extension", "must be alphanumeric"),
("test@extension", "must be alphanumeric"),
("", "must be alphanumeric"),
],
)
def test_init_validates_extension_name(
cli_runner, isolated_filesystem, invalid_name, expected_error
):
"""Test that init validates extension names according to regex pattern."""
cli_input = f"{invalid_name}\n0.1.0\nApache-2.0\ny\ny\n"
result = cli_runner.invoke(app, ["init"], input=cli_input)
assert (
result.exit_code == 1
), f"Expected command to fail for invalid name '{invalid_name}'"
assert expected_error in result.output
@pytest.mark.cli
def test_init_accepts_numeric_extension_name(cli_runner, isolated_filesystem):
"""Test that init accepts numeric extension ids like '123'."""
cli_input = "123\n123\n0.1.0\nApache-2.0\ny\ny\n"
result = cli_runner.invoke(app, ["init"], input=cli_input)
assert result.exit_code == 0, f"Numeric id '123' should be valid: {result.output}"
assert Path("123").exists(), "Directory for '123' should be created"
@pytest.mark.cli
@pytest.mark.parametrize(
"valid_id", ["test123", "TestExtension", "test_extension_123", "MyExt_1"]
)
def test_init_with_valid_alphanumeric_names(cli_runner, valid_id):
"""Test that init accepts various valid alphanumeric names."""
with cli_runner.isolated_filesystem():
cli_input = f"{valid_id}\nTest Extension\n0.1.0\nApache-2.0\ny\ny\n"
result = cli_runner.invoke(app, ["init"], input=cli_input)
assert (
result.exit_code == 0
), f"Valid name '{valid_id}' was rejected: {result.output}"
assert Path(valid_id).exists(), f"Directory for '{valid_id}' was not created"
@pytest.mark.cli
def test_init_fails_when_directory_already_exists(
cli_runner, isolated_filesystem, cli_input_both
):
"""Test that init fails gracefully when target directory already exists."""
# Create the directory first
existing_dir = isolated_filesystem / "test_extension"
existing_dir.mkdir()
result = cli_runner.invoke(app, ["init"], input=cli_input_both)
assert result.exit_code == 1, "Command should fail when directory already exists"
assert "already exists" in result.output
@pytest.mark.cli
def test_extension_json_content_is_correct(
cli_runner, isolated_filesystem, cli_input_both
):
"""Test that the generated extension.json has the correct content."""
result = cli_runner.invoke(app, ["init"], input=cli_input_both)
assert result.exit_code == 0
extension_path = isolated_filesystem / "test_extension"
extension_json_path = extension_path / "extension.json"
# Verify the JSON structure and values
assert_json_content(
extension_json_path,
{
"id": "test_extension",
"name": "Test Extension",
"version": "0.1.0",
"license": "Apache-2.0",
"permissions": [],
},
)
# Load and verify more complex nested structures
content = load_json_file(extension_json_path)
# Verify frontend section exists and has correct structure
assert "frontend" in content
frontend = content["frontend"]
assert "contributions" in frontend
assert "moduleFederation" in frontend
assert frontend["contributions"] == {"commands": [], "views": [], "menus": []}
assert frontend["moduleFederation"] == {"exposes": ["./index"]}
# Verify backend section exists and has correct structure
assert "backend" in content
backend = content["backend"]
assert "entryPoints" in backend
assert "files" in backend
assert backend["entryPoints"] == ["test_extension.entrypoint"]
assert backend["files"] == ["backend/src/test_extension/**/*.py"]
@pytest.mark.cli
def test_frontend_package_json_content_is_correct(
cli_runner, isolated_filesystem, cli_input_both
):
"""Test that the generated frontend/package.json has the correct content."""
result = cli_runner.invoke(app, ["init"], input=cli_input_both)
assert result.exit_code == 0
extension_path = isolated_filesystem / "test_extension"
package_json_path = extension_path / "frontend" / "package.json"
# Verify the package.json structure and values
assert_json_content(
package_json_path,
{
"name": "test_extension",
"version": "0.1.0",
"license": "Apache-2.0",
},
)
# Verify more complex structures
content = load_json_file(package_json_path)
assert "scripts" in content
assert "build" in content["scripts"]
assert "peerDependencies" in content
assert "@apache-superset/core" in content["peerDependencies"]
@pytest.mark.cli
def test_backend_pyproject_toml_is_created(
cli_runner, isolated_filesystem, cli_input_both
):
"""Test that the generated backend/pyproject.toml file is created."""
result = cli_runner.invoke(app, ["init"], input=cli_input_both)
assert result.exit_code == 0
extension_path = isolated_filesystem / "test_extension"
pyproject_path = extension_path / "backend" / "pyproject.toml"
assert_file_exists(pyproject_path, "backend pyproject.toml")
# Basic content verification (without parsing TOML for now)
content = pyproject_path.read_text()
assert "test_extension" in content
assert "0.1.0" in content
assert "Apache-2.0" in content
@pytest.mark.cli
def test_init_command_output_messages(cli_runner, isolated_filesystem, cli_input_both):
"""Test that init command produces expected output messages."""
result = cli_runner.invoke(app, ["init"], input=cli_input_both)
assert result.exit_code == 0
output = result.output
# Check for expected success messages
assert "✅ Created extension.json" in output
assert "✅ Created frontend folder structure" in output
assert "✅ Created backend folder structure" in output
assert "🎉 Extension Test Extension (ID: test_extension) initialized" in output
@pytest.mark.cli
def test_init_with_custom_version_and_license(cli_runner, isolated_filesystem):
"""Test init with custom version and license parameters."""
cli_input = "my_extension\nMy Extension\n2.1.0\nMIT\ny\nn\n"
result = cli_runner.invoke(app, ["init"], input=cli_input)
assert result.exit_code == 0
extension_path = isolated_filesystem / "my_extension"
extension_json_path = extension_path / "extension.json"
assert_json_content(
extension_json_path,
{
"id": "my_extension",
"name": "My Extension",
"version": "2.1.0",
"license": "MIT",
},
)
@pytest.mark.integration
@pytest.mark.cli
def test_full_init_workflow_integration(cli_runner, isolated_filesystem):
"""Integration test for the complete init workflow."""
# Test the complete flow with realistic user input
cli_input = "awesome_charts\nAwesome Charts\n1.0.0\nApache-2.0\ny\ny\n"
result = cli_runner.invoke(app, ["init"], input=cli_input)
# Verify success
assert result.exit_code == 0
# Verify complete directory structure
extension_path = isolated_filesystem / "awesome_charts"
expected_structure = create_test_extension_structure(
isolated_filesystem,
"awesome_charts",
include_frontend=True,
include_backend=True,
)
# Comprehensive structure verification
assert_directory_structure(extension_path, expected_structure["expected_dirs"])
assert_file_structure(extension_path, expected_structure["expected_files"])
# Verify all generated files have correct content
extension_json = load_json_file(extension_path / "extension.json")
assert extension_json["id"] == "awesome_charts"
assert extension_json["name"] == "Awesome Charts"
assert extension_json["version"] == "1.0.0"
assert extension_json["license"] == "Apache-2.0"
package_json = load_json_file(extension_path / "frontend" / "package.json")
assert package_json["name"] == "awesome_charts"
pyproject_content = (extension_path / "backend" / "pyproject.toml").read_text()
assert "awesome_charts" in pyproject_content

View File

@@ -0,0 +1,195 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest.mock import Mock, patch
import pytest
from superset_extensions_cli.cli import app, validate_npm
# Validate Command Tests
@pytest.mark.cli
def test_validate_command_success(cli_runner):
"""Test validate command succeeds when npm is available and valid."""
with patch("superset_extensions_cli.cli.validate_npm") as mock_validate:
result = cli_runner.invoke(app, ["validate"])
assert result.exit_code == 0
assert "✅ Validation successful" in result.output
mock_validate.assert_called_once()
@pytest.mark.cli
def test_validate_command_calls_npm_validation(cli_runner):
"""Test that validate command calls the npm validation function."""
with patch("superset_extensions_cli.cli.validate_npm") as mock_validate:
cli_runner.invoke(app, ["validate"])
mock_validate.assert_called_once()
# Validate NPM Function Tests
@pytest.mark.unit
@patch("shutil.which")
def test_validate_npm_fails_when_npm_not_on_path(mock_which):
"""Test validate_npm fails when npm is not on PATH."""
mock_which.return_value = None
with pytest.raises(SystemExit) as exc_info:
validate_npm()
assert exc_info.value.code == 1
mock_which.assert_called_once_with("npm")
@pytest.mark.unit
@patch("shutil.which")
@patch("subprocess.run")
def test_validate_npm_fails_when_npm_command_fails(mock_run, mock_which):
"""Test validate_npm fails when npm -v command fails."""
mock_which.return_value = "/usr/bin/npm"
mock_run.return_value = Mock(returncode=1, stderr="Command failed")
with pytest.raises(SystemExit) as exc_info:
validate_npm()
assert exc_info.value.code == 1
@pytest.mark.unit
@patch("shutil.which")
@patch("subprocess.run")
def test_validate_npm_fails_when_version_too_low(mock_run, mock_which):
"""Test validate_npm fails when npm version is below minimum."""
mock_which.return_value = "/usr/bin/npm"
mock_run.return_value = Mock(returncode=0, stdout="9.0.0\n", stderr="")
with pytest.raises(SystemExit) as exc_info:
validate_npm()
assert exc_info.value.code == 1
@pytest.mark.unit
@pytest.mark.parametrize(
"npm_version",
[
"10.8.2", # Exact minimum version
"11.0.0", # Higher version
"10.9.0-alpha.1", # Pre-release version higher than minimum
],
)
@patch("shutil.which")
@patch("subprocess.run")
def test_validate_npm_succeeds_with_valid_versions(mock_run, mock_which, npm_version):
"""Test validate_npm succeeds when npm version is valid."""
mock_which.return_value = "/usr/bin/npm"
mock_run.return_value = Mock(returncode=0, stdout=f"{npm_version}\n", stderr="")
# Should not raise SystemExit
validate_npm()
@pytest.mark.unit
@pytest.mark.parametrize(
"npm_version,should_pass",
[
("10.8.2", True), # Exact minimum version
("10.8.1", False), # Slightly lower version
("10.9.0-alpha.1", True), # Pre-release version higher than minimum
("9.9.9", False), # Much lower version
("11.0.0", True), # Much higher version
],
)
@patch("shutil.which")
@patch("subprocess.run")
def test_validate_npm_version_comparison_edge_cases(
mock_run, mock_which, npm_version, should_pass
):
"""Test npm version comparison with edge cases."""
mock_which.return_value = "/usr/bin/npm"
mock_run.return_value = Mock(returncode=0, stdout=f"{npm_version}\n", stderr="")
if should_pass:
# Should not raise SystemExit
validate_npm()
else:
with pytest.raises(SystemExit):
validate_npm()
@pytest.mark.unit
@patch("shutil.which")
@patch("subprocess.run")
def test_validate_npm_handles_file_not_found_exception(mock_run, mock_which):
"""Test validate_npm handles FileNotFoundError gracefully."""
mock_which.return_value = "/usr/bin/npm"
mock_run.side_effect = FileNotFoundError("Test error")
with pytest.raises(SystemExit) as exc_info:
validate_npm()
assert exc_info.value.code == 1
@pytest.mark.unit
@pytest.mark.parametrize(
"exception_type",
[
OSError,
PermissionError,
],
)
@patch("shutil.which")
@patch("subprocess.run")
def test_validate_npm_does_not_catch_other_subprocess_exceptions(
mock_run, mock_which, exception_type
):
"""Test validate_npm does not catch OSError and PermissionError (they propagate up)."""
mock_which.return_value = "/usr/bin/npm"
mock_run.side_effect = exception_type("Test error")
# These exceptions should propagate up, not be caught
with pytest.raises(exception_type):
validate_npm()
@pytest.mark.unit
@patch("shutil.which")
@patch("subprocess.run")
def test_validate_npm_with_malformed_version_output_raises_error(mock_run, mock_which):
"""Test validate_npm raises ValueError with malformed version output."""
mock_which.return_value = "/usr/bin/npm"
mock_run.return_value = Mock(returncode=0, stdout="not-a-version\n", stderr="")
# semver.compare will raise ValueError for malformed version
with pytest.raises(ValueError):
validate_npm()
@pytest.mark.unit
@patch("shutil.which")
@patch("subprocess.run")
def test_validate_npm_with_empty_version_output_raises_error(mock_run, mock_which):
"""Test validate_npm raises ValueError with empty version output."""
mock_which.return_value = "/usr/bin/npm"
mock_run.return_value = Mock(returncode=0, stdout="", stderr="")
# semver.compare will raise ValueError for empty version
with pytest.raises(ValueError):
validate_npm()

View File

@@ -0,0 +1,331 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
from pathlib import Path
import pytest
from jinja2 import Environment, FileSystemLoader
@pytest.fixture
def templates_dir():
"""Get the templates directory path."""
return (
Path(__file__).parent.parent / "src" / "superset_extensions_cli" / "templates"
)
@pytest.fixture
def jinja_env(templates_dir):
"""Create a Jinja2 environment for testing templates."""
return Environment(loader=FileSystemLoader(templates_dir))
@pytest.fixture
def template_context():
"""Default template context for testing."""
return {
"id": "test_extension",
"name": "Test Extension",
"version": "0.1.0",
"license": "Apache-2.0",
"include_frontend": True,
"include_backend": True,
}
# Extension JSON Template Tests
@pytest.mark.unit
def test_extension_json_template_renders_with_both_frontend_and_backend(
jinja_env, template_context
):
"""Test extension.json template renders correctly with both frontend and backend."""
template = jinja_env.get_template("extension.json.j2")
rendered = template.render(template_context)
# Parse the rendered JSON to ensure it's valid
parsed = json.loads(rendered)
# Verify basic fields
assert parsed["id"] == "test_extension"
assert parsed["name"] == "Test Extension"
assert parsed["version"] == "0.1.0"
assert parsed["license"] == "Apache-2.0"
assert parsed["permissions"] == []
# Verify frontend section exists
assert "frontend" in parsed
frontend = parsed["frontend"]
assert "contributions" in frontend
assert "moduleFederation" in frontend
assert frontend["contributions"] == {"commands": [], "views": [], "menus": []}
assert frontend["moduleFederation"] == {"exposes": ["./index"]}
# Verify backend section exists
assert "backend" in parsed
backend = parsed["backend"]
assert backend["entryPoints"] == ["test_extension.entrypoint"]
assert backend["files"] == ["backend/src/test_extension/**/*.py"]
@pytest.mark.unit
@pytest.mark.parametrize(
"include_frontend,include_backend,expected_sections",
[
(True, False, ["frontend"]),
(False, True, ["backend"]),
(False, False, []),
],
)
def test_extension_json_template_renders_with_different_configurations(
jinja_env, template_context, include_frontend, include_backend, expected_sections
):
"""Test extension.json template renders correctly with different configurations."""
template_context["include_frontend"] = include_frontend
template_context["include_backend"] = include_backend
template = jinja_env.get_template("extension.json.j2")
rendered = template.render(template_context)
parsed = json.loads(rendered)
# Check for expected sections
for section in expected_sections:
assert section in parsed, f"Expected section '{section}' not found"
# Check that unexpected sections are not present
all_sections = ["frontend", "backend"]
for section in all_sections:
if section not in expected_sections:
assert section not in parsed, f"Unexpected section '{section}' found"
# Frontend Package JSON Template Tests
@pytest.mark.unit
def test_frontend_package_json_template_renders_correctly(jinja_env, template_context):
"""Test frontend/package.json template renders correctly."""
template = jinja_env.get_template("frontend/package.json.j2")
rendered = template.render(template_context)
parsed = json.loads(rendered)
# Verify basic package info
assert parsed["name"] == "test_extension"
assert parsed["version"] == "0.1.0"
assert parsed["license"] == "Apache-2.0"
assert parsed["private"] is True
# Verify scripts section
assert "scripts" in parsed
scripts = parsed["scripts"]
assert "start" in scripts
assert "build" in scripts
assert "webpack" in scripts["build"]
# Verify dependencies
assert "peerDependencies" in parsed
peer_deps = parsed["peerDependencies"]
assert "@apache-superset/core" in peer_deps
assert "react" in peer_deps
assert "react-dom" in peer_deps
# Verify dev dependencies
assert "devDependencies" in parsed
dev_deps = parsed["devDependencies"]
assert "webpack" in dev_deps
assert "typescript" in dev_deps
# Backend Pyproject TOML Template Tests
@pytest.mark.unit
def test_backend_pyproject_toml_template_renders_correctly(jinja_env, template_context):
"""Test backend/pyproject.toml template renders correctly."""
template = jinja_env.get_template("backend/pyproject.toml.j2")
rendered = template.render(template_context)
# Basic content verification (without full TOML parsing)
assert "test_extension" in rendered
assert "0.1.0" in rendered
assert "Apache-2.0" in rendered
# Template Rendering with Different Parameters Tests
@pytest.mark.unit
@pytest.mark.parametrize(
"id_,name",
[
("simple_extension", "Simple Extension"),
("MyExtension123", "My Extension 123"),
("complex_extension_name_123", "Complex Extension Name 123"),
("ext", "Ext"),
],
)
def test_template_rendering_with_different_ids(jinja_env, id_, name):
"""Test templates render correctly with various extension ids/names."""
context = {
"id": id_,
"name": name,
"version": "1.0.0",
"license": "MIT",
"include_frontend": True,
"include_backend": True,
}
# Test extension.json template
template = jinja_env.get_template("extension.json.j2")
rendered = template.render(context)
parsed = json.loads(rendered)
assert parsed["id"] == id_
assert parsed["name"] == name
assert parsed["backend"]["entryPoints"] == [f"{id_}.entrypoint"]
assert parsed["backend"]["files"] == [f"backend/src/{id_}/**/*.py"]
# Test package.json template
template = jinja_env.get_template("frontend/package.json.j2")
rendered = template.render(context)
parsed = json.loads(rendered)
assert parsed["name"] == id_
# Test pyproject.toml template
template = jinja_env.get_template("backend/pyproject.toml.j2")
rendered = template.render(context)
assert id_ in rendered
@pytest.mark.unit
@pytest.mark.parametrize("version", ["0.1.0", "1.0.0", "2.1.3-alpha", "10.20.30"])
def test_template_rendering_with_different_versions(jinja_env, version):
"""Test templates render correctly with various version formats."""
context = {
"id": "test_ext",
"name": "Test Extension",
"version": version,
"license": "Apache-2.0",
"include_frontend": True,
"include_backend": False,
}
template = jinja_env.get_template("extension.json.j2")
rendered = template.render(context)
parsed = json.loads(rendered)
assert parsed["version"] == version
@pytest.mark.unit
@pytest.mark.parametrize(
"license_type",
[
"Apache-2.0",
"MIT",
"BSD-3-Clause",
"GPL-3.0",
"Custom License",
],
)
def test_template_rendering_with_different_licenses(jinja_env, license_type):
"""Test templates render correctly with various license types."""
context = {
"id": "test_ext",
"name": "Test Extension",
"version": "1.0.0",
"license": license_type,
"include_frontend": True,
"include_backend": True,
}
# Test extension.json template
template = jinja_env.get_template("extension.json.j2")
rendered = template.render(context)
parsed = json.loads(rendered)
assert parsed["license"] == license_type
# Test package.json template
template = jinja_env.get_template("frontend/package.json.j2")
rendered = template.render(context)
parsed = json.loads(rendered)
assert parsed["license"] == license_type
# Template Validation Tests
@pytest.mark.unit
@pytest.mark.parametrize(
"template_name", ["extension.json.j2", "frontend/package.json.j2"]
)
def test_templates_produce_valid_json(jinja_env, template_context, template_name):
"""Test that all JSON templates produce valid JSON output."""
template = jinja_env.get_template(template_name)
rendered = template.render(template_context)
# This will raise an exception if the JSON is invalid
try:
json.loads(rendered)
except json.JSONDecodeError as e:
pytest.fail(f"Template {template_name} produced invalid JSON: {e}")
@pytest.mark.unit
def test_template_whitespace_handling(jinja_env, template_context):
"""Test that templates handle whitespace correctly and produce clean output."""
template = jinja_env.get_template("extension.json.j2")
rendered = template.render(template_context)
# Should not have excessive empty lines
lines = rendered.split("\n")
empty_line_count = sum(1 for line in lines if line.strip() == "")
# Some empty lines are OK for formatting, but not excessive
assert (
empty_line_count < len(lines) / 2
), "Too many empty lines in rendered template"
# Should be properly formatted JSON
parsed = json.loads(rendered)
# Re-serialize to check it's valid structure
json.dumps(parsed, indent=2)
@pytest.mark.unit
def test_template_context_edge_cases(jinja_env):
"""Test template rendering with edge case contexts."""
# Test with minimal context
minimal_context = {
"id": "minimal",
"name": "Minimal",
"version": "1.0.0",
"license": "MIT",
"include_frontend": False,
"include_backend": False,
}
template = jinja_env.get_template("extension.json.j2")
rendered = template.render(minimal_context)
parsed = json.loads(rendered)
# Should still be valid JSON with basic fields
assert parsed["id"] == "minimal"
assert parsed["name"] == "Minimal"
assert "frontend" not in parsed
assert "backend" not in parsed

View File

@@ -0,0 +1,271 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
import pytest
from superset_extensions_cli.utils import read_json, read_toml
# Read JSON Tests
@pytest.mark.unit
def test_read_json_with_valid_file(isolated_filesystem):
"""Test read_json with valid JSON file."""
json_data = {"name": "test", "version": "1.0.0"}
json_file = isolated_filesystem / "test.json"
json_file.write_text(json.dumps(json_data))
result = read_json(json_file)
assert result == json_data
@pytest.mark.unit
def test_read_json_with_nonexistent_file(isolated_filesystem):
"""Test read_json returns None when file doesn't exist."""
nonexistent_file = isolated_filesystem / "nonexistent.json"
result = read_json(nonexistent_file)
assert result is None
@pytest.mark.unit
def test_read_json_with_invalid_json(isolated_filesystem):
"""Test read_json with invalid JSON content."""
invalid_json_file = isolated_filesystem / "invalid.json"
invalid_json_file.write_text("{ invalid json content")
with pytest.raises(json.JSONDecodeError):
read_json(invalid_json_file)
@pytest.mark.unit
def test_read_json_with_directory_instead_of_file(isolated_filesystem):
"""Test read_json returns None when path is a directory."""
directory = isolated_filesystem / "test_dir"
directory.mkdir()
result = read_json(directory)
assert result is None
@pytest.mark.unit
@pytest.mark.parametrize(
"json_content,expected",
[
({"simple": "value"}, {"simple": "value"}),
({"nested": {"key": "value"}}, {"nested": {"key": "value"}}),
({"array": [1, 2, 3]}, {"array": [1, 2, 3]}),
({}, {}), # Empty JSON object
],
)
def test_read_json_with_various_valid_content(
isolated_filesystem, json_content, expected
):
"""Test read_json with various valid JSON content types."""
json_file = isolated_filesystem / "test.json"
json_file.write_text(json.dumps(json_content))
result = read_json(json_file)
assert result == expected
# Read TOML Tests
@pytest.mark.unit
def test_read_toml_with_valid_file(isolated_filesystem):
"""Test read_toml with valid TOML file."""
toml_content = '[project]\nname = "test"\nversion = "1.0.0"'
toml_file = isolated_filesystem / "pyproject.toml"
toml_file.write_text(toml_content)
result = read_toml(toml_file)
assert result is not None
assert result["project"]["name"] == "test"
assert result["project"]["version"] == "1.0.0"
@pytest.mark.unit
def test_read_toml_with_nonexistent_file(isolated_filesystem):
"""Test read_toml returns None when file doesn't exist."""
nonexistent_file = isolated_filesystem / "nonexistent.toml"
result = read_toml(nonexistent_file)
assert result is None
@pytest.mark.unit
def test_read_toml_with_directory_instead_of_file(isolated_filesystem):
"""Test read_toml returns None when path is a directory."""
directory = isolated_filesystem / "test_dir"
directory.mkdir()
result = read_toml(directory)
assert result is None
@pytest.mark.unit
def test_read_toml_with_invalid_toml(isolated_filesystem):
"""Test read_toml with invalid TOML content."""
invalid_toml_file = isolated_filesystem / "invalid.toml"
invalid_toml_file.write_text("[ invalid toml content")
with pytest.raises(Exception): # tomli raises various exceptions for invalid TOML
read_toml(invalid_toml_file)
@pytest.mark.unit
@pytest.mark.parametrize(
"toml_content,expected_keys",
[
('[project]\nname = "test"', ["project"]),
('[build-system]\nrequires = ["setuptools"]', ["build-system"]),
(
'[project]\nname = "test"\n[build-system]\nrequires = ["setuptools"]',
["project", "build-system"],
),
],
)
def test_read_toml_with_various_valid_content(
isolated_filesystem, toml_content, expected_keys
):
"""Test read_toml with various valid TOML content types."""
toml_file = isolated_filesystem / "test.toml"
toml_file.write_text(toml_content)
result = read_toml(toml_file)
assert result is not None
for key in expected_keys:
assert key in result
@pytest.mark.unit
def test_read_toml_with_complex_structure(isolated_filesystem):
"""Test read_toml with complex TOML structure."""
complex_toml = """
[project]
name = "my-package"
version = "1.0.0"
authors = [
{name = "Author Name", email = "author@example.com"}
]
[project.dependencies]
requests = "^2.25.0"
[build-system]
requires = ["setuptools", "wheel"]
build-backend = "setuptools.build_meta"
"""
toml_file = isolated_filesystem / "complex.toml"
toml_file.write_text(complex_toml)
result = read_toml(toml_file)
assert result is not None
assert result["project"]["name"] == "my-package"
assert result["project"]["version"] == "1.0.0"
assert len(result["project"]["authors"]) == 1
assert result["project"]["authors"][0]["name"] == "Author Name"
assert result["build-system"]["requires"] == ["setuptools", "wheel"]
@pytest.mark.unit
def test_read_toml_with_empty_file(isolated_filesystem):
"""Test read_toml with empty TOML file."""
toml_file = isolated_filesystem / "empty.toml"
toml_file.write_text("")
result = read_toml(toml_file)
assert result == {}
@pytest.mark.unit
@pytest.mark.parametrize(
"invalid_content",
[
"[ invalid section",
"key = ",
"key = unquoted string",
"[section\nkey = value",
],
)
def test_read_toml_with_various_invalid_content(isolated_filesystem, invalid_content):
"""Test read_toml with various types of invalid TOML content."""
toml_file = isolated_filesystem / "invalid.toml"
toml_file.write_text(invalid_content)
with pytest.raises(Exception): # Various TOML parsing exceptions
read_toml(toml_file)
# File System Edge Cases
@pytest.mark.unit
def test_read_json_with_permission_denied(isolated_filesystem):
"""Test read_json behavior when file permissions are denied."""
json_file = isolated_filesystem / "restricted.json"
json_file.write_text('{"test": "value"}')
# This test may not work on all systems, so we'll skip it if chmod doesn't work
try:
json_file.chmod(0o000) # No permissions
result = read_json(json_file)
# If we get here without exception, the file was still readable
# This is system-dependent behavior
assert result is None or result == {"test": "value"}
except (OSError, PermissionError):
# Expected on some systems
pass
finally:
# Restore permissions for cleanup
try:
json_file.chmod(0o644)
except (OSError, PermissionError):
pass
@pytest.mark.unit
def test_read_toml_with_permission_denied(isolated_filesystem):
"""Test read_toml behavior when file permissions are denied."""
toml_file = isolated_filesystem / "restricted.toml"
toml_file.write_text('[test]\nkey = "value"')
# This test may not work on all systems, so we'll skip it if chmod doesn't work
try:
toml_file.chmod(0o000) # No permissions
result = read_toml(toml_file)
# If we get here without exception, the file was still readable
# This is system-dependent behavior
assert result is None or "test" in result
except (OSError, PermissionError):
# Expected on some systems
pass
finally:
# Restore permissions for cleanup
try:
toml_file.chmod(0o644)
except (OSError, PermissionError):
pass

View File

@@ -0,0 +1,211 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
from pathlib import Path
from typing import Any
def assert_file_exists(path: Path, description: str = "") -> None:
"""
Assert that a file exists with a descriptive error message.
Args:
path: Path to the file that should exist
description: Optional description for better error messages
"""
desc_msg = f" ({description})" if description else ""
assert path.exists(), f"Expected file {path}{desc_msg} to exist, but it doesn't"
assert path.is_file(), f"Expected {path}{desc_msg} to be a file, but it's not"
def assert_directory_exists(path: Path, description: str = "") -> None:
"""
Assert that a directory exists with a descriptive error message.
Args:
path: Path to the directory that should exist
description: Optional description for better error messages
"""
desc_msg = f" ({description})" if description else ""
assert (
path.exists()
), f"Expected directory {path}{desc_msg} to exist, but it doesn't"
assert path.is_dir(), f"Expected {path}{desc_msg} to be a directory, but it's not"
def assert_file_structure(base_path: Path, expected_files: list[str]) -> None:
"""
Assert that all expected files exist under the base path.
Args:
base_path: Base directory path
expected_files: List of relative file paths that should exist
"""
for file_path in expected_files:
full_path = base_path / file_path
assert_file_exists(full_path, "part of expected structure")
def assert_directory_structure(base_path: Path, expected_dirs: list[str]) -> None:
"""
Assert that all expected directories exist under the base path.
Args:
base_path: Base directory path
expected_dirs: List of relative directory paths that should exist
"""
for dir_path in expected_dirs:
full_path = base_path / dir_path
assert_directory_exists(full_path, "part of expected structure")
def get_directory_tree(path: Path, ignore: set[str] | None = None) -> set[str]:
"""
Get all files and directories under a path as relative string paths.
Args:
path: Base path to scan
ignore: Set of file/directory names to ignore
Returns:
Set of relative path strings
"""
ignore = ignore or {".DS_Store", "__pycache__", ".pytest_cache"}
tree: set[str] = set()
if not path.exists():
return tree
for item in path.rglob("*"):
if any(ignored in item.parts for ignored in ignore):
continue
relative = item.relative_to(path)
tree.add(str(relative))
return tree
def load_json_file(path: Path) -> dict[str, Any]:
"""
Load and parse a JSON file.
Args:
path: Path to the JSON file
Returns:
Parsed JSON content
Raises:
AssertionError: If file doesn't exist or isn't valid JSON
"""
assert_file_exists(path, "JSON file")
try:
content = json.loads(path.read_text())
return content
except json.JSONDecodeError as e:
raise AssertionError(f"File {path} contains invalid JSON: {e}")
def assert_json_content(path: Path, expected_values: dict[str, Any]) -> None:
"""
Assert that a JSON file contains expected key-value pairs.
Args:
path: Path to the JSON file
expected_values: Dictionary of expected key-value pairs
"""
content = load_json_file(path)
for key, expected_value in expected_values.items():
assert key in content, f"Expected key '{key}' not found in {path}"
actual_value = content[key]
assert (
actual_value == expected_value
), f"Expected {key}='{expected_value}' but got '{actual_value}' in {path}"
def assert_file_contains(path: Path, text: str) -> None:
"""
Assert that a file contains specific text.
Args:
path: Path to the file
text: Text that should be present in the file
"""
assert_file_exists(path, "text file")
content = path.read_text()
assert text in content, f"Expected text '{text}' not found in {path}"
def assert_file_content_matches(path: Path, expected_content: str) -> None:
"""
Assert that a file's content exactly matches expected content.
Args:
path: Path to the file
expected_content: Expected file content
"""
assert_file_exists(path, "content file")
actual_content = path.read_text()
assert actual_content == expected_content, (
f"File content mismatch in {path}\n"
f"Expected:\n{expected_content}\n"
f"Actual:\n{actual_content}"
)
def create_test_extension_structure(
base_path: Path,
id_: str,
include_frontend: bool = True,
include_backend: bool = True,
) -> dict[str, Any]:
"""
Helper to create expected extension structure for testing.
Args:
base_path: Base path where extension should be created
id_: Unique identifier for extension
name: Extension name
include_frontend: Whether frontend should be included
include_backend: Whether backend should be included
Returns:
Dictionary with expected paths and metadata
"""
extension_path = base_path / id_
expected_files = ["extension.json"]
expected_dirs: list[str] = []
if include_frontend:
expected_dirs.append("frontend")
expected_files.append("frontend/package.json")
if include_backend:
expected_dirs.append("backend")
expected_files.append("backend/pyproject.toml")
expected = {
"extension_path": extension_path,
"expected_files": expected_files,
"expected_dirs": expected_dirs,
}
return expected

View File

@@ -46,6 +46,7 @@ module.exports = {
plugins: [
'lodash',
'@babel/plugin-syntax-dynamic-import',
'@babel/plugin-transform-export-namespace-from',
['@babel/plugin-proposal-class-properties', { loose: true }],
['@babel/plugin-proposal-optional-chaining', { loose: true }],
['@babel/plugin-proposal-private-methods', { loose: true }],
@@ -89,6 +90,7 @@ module.exports = {
plugins: [
'babel-plugin-dynamic-import-node',
'@babel/plugin-transform-modules-commonjs',
'@babel/plugin-transform-export-namespace-from',
],
},
// build instrumented code for testing code coverage with Cypress

View File

@@ -31,6 +31,8 @@ module.exports = {
'^spec/(.*)$': '<rootDir>/spec/$1',
// mapping plugins of superset-ui to source code
'@superset-ui/(.*)$': '<rootDir>/node_modules/@superset-ui/$1/src',
// mapping @apache-superset/core to local package
'^@apache-superset/core$': '<rootDir>/packages/superset-core/src',
},
testEnvironment: 'jsdom',
modulePathIgnorePatterns: ['<rootDir>/packages/generator-superset'],

File diff suppressed because it is too large Load Diff

View File

@@ -44,7 +44,7 @@
"build-storybook": "storybook build",
"build-translation": "scripts/po2json.sh",
"bundle-stats": "cross-env BUNDLE_ANALYZER=true npm run build && npx open-cli ../superset/static/stats/statistics.html",
"core:cover": "cross-env NODE_ENV=test NODE_OPTIONS=\"--max-old-space-size=4096\" jest --coverage --coverageThreshold='{\"global\":{\"statements\":100,\"branches\":100,\"functions\":100,\"lines\":100}}' --collectCoverageFrom='[\"packages/**/src/**/*.{js,ts}\", \"!packages/superset-ui-demo/**/*\"]' packages",
"core:cover": "cross-env NODE_ENV=test NODE_OPTIONS=\"--max-old-space-size=4096\" jest --coverage --coverageThreshold='{\"global\":{\"statements\":100,\"branches\":100,\"functions\":100,\"lines\":100}}' --collectCoverageFrom='[\"packages/**/src/**/*.{js,ts}\", \"!packages/superset-ui-demo/**/*\", \"!packages/superset-core/**/*\"]' packages",
"cover": "cross-env NODE_ENV=test NODE_OPTIONS=\"--max-old-space-size=4096\" jest --coverage",
"dev": "webpack --mode=development --color --watch",
"dev-server": "cross-env NODE_ENV=development BABEL_ENV=development node --max_old_space_size=4096 ./node_modules/webpack-dev-server/bin/webpack-dev-server.js --mode=development",
@@ -82,6 +82,7 @@
],
"dependencies": {
"@ant-design/icons": "^5.2.6",
"@apache-superset/core": "file:packages/superset-core",
"@emotion/cache": "^11.4.0",
"@emotion/react": "^11.13.3",
"@emotion/styled": "^11.3.0",
@@ -227,6 +228,7 @@
"@babel/plugin-proposal-optional-chaining": "^7.21.0",
"@babel/plugin-proposal-private-methods": "^7.18.6",
"@babel/plugin-syntax-dynamic-import": "^7.8.3",
"@babel/plugin-transform-export-namespace-from": "^7.27.1",
"@babel/plugin-transform-modules-commonjs": "^7.26.3",
"@babel/plugin-transform-runtime": "^7.25.9",
"@babel/preset-env": "^7.26.0",

View File

@@ -0,0 +1,7 @@
{
"presets": [
"@babel/preset-env",
"@babel/preset-react",
"@babel/preset-typescript"
]
}

View File

@@ -0,0 +1,22 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
## Change Log
Changelogs will be added once we have the first stable release.

View File

@@ -0,0 +1,116 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
# @apache-superset/core
[![npm version](https://badge.fury.io/js/%40apache-superset%2Fcore.svg)](https://badge.fury.io/js/%40apache-superset%2Fcore)
[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0)
The official core package for building Apache Superset extensions and integrations. This package provides essential building blocks including shared UI components, utility functions, APIs, and type definitions for both the host application and extensions.
## 📦 Installation
```bash
npm install @apache-superset/core
```
## 🏗️ Architecture
The package is organized into logical namespaces, each providing specific functionality:
- **`authentication`** - User authentication and authorization APIs
- **`commands`** - Command registration and execution system
- **`contributions`** - UI contribution points and customization APIs
- **`core`** - Fundamental types, utilities, and lifecycle management
- **`environment`** - Environment detection and configuration APIs
- **`extensions`** - Extension management and metadata APIs
- **`sqlLab`** - SQL Lab integration and event handling
## 🚀 Quick Start
### Basic Extension Structure
```typescript
import {
core,
commands,
sqlLab,
authentication,
} from '@apache-superset/core';
export function activate(context: core.ExtensionContext) {
// Register a command to save current query
const commandDisposable = commands.registerCommand(
'my_extension.save_query',
async () => {
const currentTab = sqlLab.getCurrentTab();
if (currentTab?.editor.content) {
const token = await authentication.getCSRFToken();
// Use token for secure API calls
console.log('Saving query with CSRF token:', token);
}
},
);
// Listen for query execution events
const eventDisposable = sqlLab.onDidQueryRun(editor => {
console.log('Query executed:', editor.content.substring(0, 50) + '...');
});
// Register a simple view
const viewDisposable = core.registerViewProvider(
'my_extension.panel',
() => (
<div>
<h3>My Extension</h3>
<button onClick={() => commands.executeCommand('my_extension.save_query')}>
Save Query
</button>
</div>
)
);
// Cleanup registration
context.subscriptions.push(commandDisposable, eventDisposable, viewDisposable);
}
export function deactivate() {
// Cleanup handled automatically via disposables
}
```
## 🤝 Contributing
We welcome contributions! Please see the [Contributing Guide](https://github.com/apache/superset/blob/master/CONTRIBUTING.md) for details.
## 📄 License
Licensed under the Apache License, Version 2.0. See [LICENSE](https://github.com/apache/superset/blob/master/LICENSE.txt) for details.
## 🔗 Links
- [Apache Superset](https://superset.apache.org/)
- [Documentation](https://superset.apache.org/docs/)
- [Community](https://superset.apache.org/community/)
- [GitHub Repository](https://github.com/apache/superset)
- [Extension Development Guide](https://superset.apache.org/docs/extensions/)
---
**Note**: This package is currently in release candidate status. APIs may change before the 1.0.0 release. Please check the [changelog](CHANGELOG.md) for breaking changes between versions.

View File

@@ -0,0 +1,35 @@
{
"name": "@apache-superset/core",
"version": "0.0.1-rc4",
"description": "This package contains UI elements, APIs, and utility functions used by Superset.",
"sideEffects": false,
"main": "lib/index.js",
"module": "esm/index.js",
"files": [
"esm",
"lib"
],
"author": "",
"license": "ISC",
"devDependencies": {
"@babel/cli": "^7.26.4",
"@babel/core": "^7.26.9",
"@babel/preset-env": "^7.26.9",
"@babel/preset-react": "^7.26.3",
"@babel/preset-typescript": "^7.26.0",
"@types/react": "^17.0.83",
"install": "^0.13.0",
"npm": "^11.1.0"
},
"peerDependencies": {
"antd": "4.10.3",
"react": "^17.0.2"
},
"scripts": {
"build": "babel src --out-dir lib --extensions \".ts,.tsx\"",
"type": "tsc --noEmit"
},
"publishConfig": {
"access": "public"
}
}

View File

@@ -0,0 +1,43 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* @fileoverview Authentication API for Superset extensions.
*
* This module provides functions for handling user authentication and security
* within Superset extensions.
*/
/**
* Retrieves the CSRF token used for securing requests against cross-site request forgery attacks.
* This token should be included in the headers of POST, PUT, DELETE, and other state-changing
* HTTP requests to ensure they are authorized.
*
* @returns A promise that resolves to the CSRF token as a string, or undefined if not available.
*
* @example
* ```typescript
* const csrfToken = await getCSRFToken();
* if (csrfToken) {
* // Include in request headers
* headers['X-CSRFToken'] = csrfToken;
* }
* ```
*/
export declare function getCSRFToken(): Promise<string | undefined>;

View File

@@ -0,0 +1,70 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* @fileoverview Command system API for Superset extensions.
*
* This module provides a command registry and execution system that allows extensions
* to register custom commands and invoke them programmatically. Commands can be triggered
* via keyboard shortcuts, menu items, programmatic calls, or other user interactions.
*/
import { Disposable } from './core';
/**
* Registers a command that can be invoked via a keyboard shortcut,
* a menu item, an action, or directly.
*
* Registering a command with an existing command identifier twice
* will cause an error.
*
* @param command A unique identifier for the command.
* @param callback A command handler function.
* @param thisArg The `this` context used when invoking the handler function.
* @returns Disposable which unregisters this command on disposal.
*/
export declare function registerCommand(
command: string,
callback: (...args: any[]) => any,
thisArg?: any,
): Disposable;
/**
* Executes the command denoted by the given command identifier.
*
* @param command Identifier of the command to execute.
* @param rest Parameters passed to the command function.
* @returns A promise that resolves to the returned value of the given command. Returns `undefined` when
* the command handler function doesn't return anything.
*/
export declare function executeCommand<T = unknown>(
command: string,
...rest: any[]
): Promise<T>;
/**
* Retrieve the list of all available commands. Commands starting with an underscore are
* treated as internal commands.
*
* @param filterInternal Set `true` to not see internal commands (starting with an underscore)
* @returns Promise that resolves to a list of command ids.
*/
export declare function getCommands(
filterInternal?: boolean,
): Promise<string[]>;

View File

@@ -0,0 +1,90 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* @fileoverview Contributions API for Superset extension UI integration.
*
* This module defines the interfaces and types for extension contributions to the
* Superset user interface. Extensions use these contribution types to register
* commands, menu items, and custom views that integrate seamlessly with the
* Superset platform. The contribution system allows extensions to extend the
* application's functionality while maintaining a consistent user experience.
*/
/**
* Describes a command that can be contributed to the application.
*/
export interface CommandContribution {
/** The unique identifier for the command. */
command: string;
/** The icon associated with the command. */
icon: string;
/** The display title of the command. */
title: string;
/** A description of what the command does. */
description: string;
}
/**
* Represents a menu item that links a view to a command.
*/
export interface MenuItem {
/** The identifier of the view associated with this menu item. */
view: string;
/** The command to execute when this menu item is selected. */
command: string;
}
/**
* Defines the structure of menu contributions, allowing for primary, secondary, and context menus.
*/
export interface MenuContribution {
/** Items to appear in the primary menu. */
primary?: MenuItem[];
/** Items to appear in the secondary menu. */
secondary?: MenuItem[];
/** Items to appear in the context menu. */
context?: MenuItem[];
}
/**
* Represents a contributed view in the application.
*/
export interface ViewContribution {
/** The unique identifier for the view. */
id: string;
/** The display name of the view. */
name: string;
}
/**
* Aggregates all contributions (commands, menus, and views) provided by an extension or module.
*/
export interface Contributions {
/** List of command contributions. */
commands: CommandContribution[];
/** Mapping of menu contributions by menu key. */
menus: {
[key: string]: MenuContribution;
};
/** Mapping of view contributions by view key. */
views: {
[key: string]: ViewContribution[];
};
}

View File

@@ -0,0 +1,304 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* @fileoverview Core types and utilities for Superset extensions.
*
* This module provides fundamental types and interfaces used throughout the
* Superset extension API. It includes database metadata types, event handling,
* resource management, and extension lifecycle definitions.
*/
import { ReactElement } from 'react';
import { Contributions } from './contributions';
/**
* Represents a database column with its name and data type.
*/
export type Column = {
/**
* Label of the column
*/
name: string;
/**
* Column name defined
*/
column_name: string;
/**
* The data type of the column (e.g., 'INTEGER', 'VARCHAR', 'TIMESTAMP')
*/
type: string;
/**
* Generic data type format
*/
type_generic: GenericDataType;
/**
* True if the column is date format
*/
is_dttm: boolean;
};
/**
* Represents a database table with its name and column definitions.
*/
export declare interface Table {
/** The name of the table */
name: string;
/** Array of columns in this table */
columns: Column[];
}
/**
* Represents a database catalog.
* @todo This interface needs to be expanded with catalog-specific properties.
*/
export declare interface Catalog {} // eslint-disable-line @typescript-eslint/no-empty-interface
/**
* Represents a database schema containing tables.
*/
export declare interface Schema {
/** Array of tables in this schema */
tables: Table[];
}
/**
* Represents a database connection with its metadata.
*/
export declare interface Database {
/** Unique identifier for the database */
id: number;
/** Display name of the database */
name: string;
/** Array of catalogs available in this database */
catalogs: Catalog[];
/** Array of schemas available in this database */
schemas: Schema[];
}
// Keep in sync with superset/errors.py
export type ErrorLevel = 'info' | 'warning' | 'error';
/**
* Superset error object structure.
* Contains details about an error that occurred within Superset.
*/
export type SupersetError = {
/**
* Error types, see enum of SupersetErrorType in superset/errors.py
*/
error_type: string;
/**
* Extra properties based on the error types
*/
extra: Record<string, any>;
/**
* Level of the error type
*/
level: ErrorLevel;
/**
* Detail description for the error
*/
message: string;
};
/**
* Generic data types, see enum of the same name in superset/utils/core.py.
*/
export enum GenericDataType {
Numeric = 0,
String = 1,
Temporal = 2,
Boolean = 3,
}
/**
* Represents a type which can release resources, such
* as event listening or a timer.
*/
export declare class Disposable {
/**
* Combine many disposable-likes into one. You can use this method when having objects with
* a dispose function which aren't instances of `Disposable`.
*
* @param disposableLikes Objects that have at least a `dispose`-function member. Note that asynchronous
* dispose-functions aren't awaited.
* @returns Returns a new disposable which, upon dispose, will
* dispose all provided disposables.
*/
static from(
...disposableLikes: {
/**
* Function to clean up resources.
*/
dispose: () => any;
}[]
): Disposable;
/**
* Creates a new disposable that calls the provided function
* on dispose.
*
* *Note* that an asynchronous function is not awaited.
*
* @param callOnDispose Function that disposes something.
*/
constructor(callOnDispose: () => any);
/**
* Dispose this object.
*/
dispose(): any;
}
/**
* Represents a typed event system for handling asynchronous notifications.
*
* A function that represents an event to which you subscribe by calling it with
* a listener function as argument. This provides a type-safe way to handle
* events throughout the Superset extension system.
*
* @template T The type of data that will be passed to event listeners.
*
* @example
* ```typescript
* // Subscribe to an event
* const disposable = myEvent((data) => {
* console.log("Event happened:", data);
* });
*
* // Unsubscribe when done
* disposable.dispose();
* ```
*/
export declare interface Event<T> {
/**
* Subscribe to this event by providing a listener function.
*
* @param listener The listener function that will be called when the event is fired.
* The function receives the event data as its parameter.
* @param thisArgs Optional `this` context that will be used when calling the event listener.
* @returns A Disposable object that can be used to unsubscribe from the event.
*
* @example
* ```typescript
* const subscription = onSomeEvent((data) => {
* console.log('Received:', data);
* });
*
* // Later, clean up the subscription
* subscription.dispose();
* ```
*/
(listener: (e: T) => any, thisArgs?: any): Disposable;
}
/**
* Represents a Superset extension with its metadata and lifecycle methods.
* Extensions are modular components that can extend Superset's functionality.
*/
export interface Extension {
/** Function called when the extension is activated */
activate: Function;
/** UI contributions provided by this extension */
contributions: Contributions;
/** Function called when the extension is deactivated */
deactivate: Function;
/** List of other extensions that this extension depends on */
dependencies: string[];
/** Human-readable description of the extension */
description: string;
/** List of modules exposed by this extension for use by other extensions */
exposedModules: string[];
/** List of other extensions that this extension depends on */
extensionDependencies: string[];
/** Unique identifier for the extension */
id: string;
/** Human-readable name of the extension */
name: string;
/** URL or path to the extension's remote entry point */
remoteEntry: string;
/** Version of the extension */
version: string;
}
/**
* Context object provided to extensions during activation.
* Contains utilities and resources that extensions can use during their lifecycle.
*/
export interface ExtensionContext {
/**
* Array of disposable objects that will be automatically disposed when the extension is deactivated.
* Extensions should add any resources that need cleanup to this array.
*
* @example
* ```typescript
* export function activate(context: ExtensionContext) {
* // Register an event listener
* const disposable = onSomeEvent(() => { ... });
*
* // Add to context so it's cleaned up automatically
* context.disposables.push(disposable);
* }
* ```
*/
disposables: Disposable[];
/**
* @todo We might want to add more properties to this interface in the future like
* storage, configuration, logging, etc. For now, it serves as a placeholder
* to allow for future extensibility without breaking existing extensions.
*/
}
/**
* Registers a view provider that can render custom React components in Superset.
* View providers allow extensions to contribute custom UI components that can be
* displayed in various parts of the Superset interface.
*
* @param id Unique identifier for the view provider. This ID is used to reference
* the view provider from other parts of the system.
* @param viewProvider Function that returns a React element to be rendered.
* This function will be called whenever the view needs to be displayed.
* @returns A Disposable object that can be used to unregister the view provider.
*
* @example
* ```typescript
* const disposable = registerViewProvider('my-extension.custom-view', () => (
* <div>
* <h1>My Custom View</h1>
* <p>This is a custom component from my extension.</p>
* </div>
* ));
*
* // Later, unregister the view provider
* disposable.dispose();
* ```
*/
export declare const registerViewProvider: (
id: string,
viewProvider: () => ReactElement,
) => Disposable;

View File

@@ -0,0 +1,153 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* @fileoverview Environment API for Superset extensions.
*
* This module provides access to the execution environment, including system
* clipboard operations, logging capabilities, internationalization features,
* and environment variables. It allows extensions to interact with the host
* system and platform in a controlled manner.
*/
import { Event } from './core';
/**
* Interface for system clipboard operations.
* Provides methods to read from and write to the system clipboard.
*/
export interface Clipboard {
/**
* Read the current clipboard contents as text.
*
* @returns A promise that resolves to the clipboard text content.
*
* @example
* ```typescript
* const clipboardText = await clipboard.readText();
* console.log('Clipboard contains:', clipboardText);
* ```
*/
readText(): Promise<string>;
/**
* Writes text into the clipboard, replacing any existing content.
*
* @param value The text to write to the clipboard.
* @returns A promise that resolves when the write operation completes.
*
* @example
* ```typescript
* await clipboard.writeText('Hello, world!');
* console.log('Text copied to clipboard');
* ```
*/
writeText(value: string): Promise<void>;
}
/**
* Logging levels for controlling the verbosity of log output.
* Higher numeric values indicate more restrictive logging levels.
*/
export enum LogLevel {
/**
* No messages are logged with this level.
* Use this to completely disable logging.
*/
Off = 0,
/**
* All messages are logged with this level.
* Most verbose logging level, includes all types of messages.
*/
Trace = 1,
/**
* Messages with debug and higher log level are logged with this level.
* Useful for development and troubleshooting.
*/
Debug = 2,
/**
* Messages with info and higher log level are logged with this level.
* General informational messages about application flow.
*/
Info = 3,
/**
* Messages with warning and higher log level are logged with this level.
* Indicates potential issues that don't prevent operation.
*/
Warning = 4,
/**
* Only error messages are logged with this level.
* Most restrictive level, shows only critical failures.
*/
Error = 5,
}
/**
* Represents the preferred user-language, like `de-CH`, `fr`, or `en-US`.
*/
export declare const language: string;
/**
* The system clipboard.
*/
export declare const clipboard: Clipboard;
/**
* The current log level of the editor.
*/
export declare const logLevel: LogLevel;
/**
* An {@link Event} which fires when the log level of the editor changes.
*/
export declare const onDidChangeLogLevel: Event<LogLevel>;
/**
* Opens an external URL in the default system browser or application.
* This function provides a secure way to open external resources while
* respecting user security preferences.
*
* @param target The URL to open externally.
* @returns A promise that resolves to true if the URL was successfully opened, false otherwise.
*
* @example
* ```typescript
* const success = await openExternal(new URL('https://superset.apache.org'));
* if (success) {
* console.log('URL opened successfully');
* } else {
* console.log('Failed to open URL');
* }
* ```
*/
export declare function openExternal(target: URL): Promise<boolean>;
/**
* Gets an environment variable value.
* @param name The name of the environment variable
* @returns The value of the environment variable or undefined if not found
*/
export declare function getEnvironmentVariable(
name: string,
): string | undefined;

View File

@@ -0,0 +1,69 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* @fileoverview Extensions API for Superset extension management.
*
* This module provides functions and events for managing Superset extensions,
* including querying extension metadata and monitoring extension lifecycle events.
* Extensions can use this API to discover other extensions and react to changes
* in the extension ecosystem.
*/
import { Extension } from './core';
/**
* Get an extension by its full identifier in the form of: `publisher.name`.
* This function allows extensions to discover and interact with other extensions
* in the Superset ecosystem.
*
* @param extensionId An extension identifier in the format "publisher.name".
* @returns The extension object if found, or `undefined` if no extension matches the identifier.
*
* @example
* ```typescript
* const chartExtension = getExtension('superset.chart-plugins');
* if (chartExtension) {
* console.log('Chart extension is available:', chartExtension.displayName);
* } else {
* console.log('Chart extension not found');
* }
* ```
*/
export declare function getExtension(
extensionId: string,
): Extension | undefined;
/**
* Get all extensions currently known to the system.
* This function returns a readonly array containing all extensions that are installed
* and available, regardless of their activation status.
*
* @returns A readonly array of all extension objects in the system.
*
* @example
* ```typescript
* const extensions = getAllExtensions();
* console.log(`Total extensions: ${extensions.length}`);
* extensions.forEach(ext => {
* console.log(`- ${ext.id}: ${ext.name} (enabled: ${ext.enabled})`);
* });
* ```
*/
export declare function getAllExtensions(): readonly Extension[];

View File

@@ -0,0 +1,42 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* @fileoverview Main entry point for the Superset Extension API.
*
* This module exports all public APIs for Superset extensions, providing
* a unified interface for extension developers to interact with the Superset
* platform. The API includes:
*
* - `authentication`: Handle user authentication and authorization
* - `commands`: Execute Superset commands and operations
* - `contributions`: Register UI contributions and customizations
* - `core`: Access fundamental Superset types and utilities
* - `environment`: Interact with the execution environment
* - `extensions`: Manage extension lifecycle and metadata
* - `sqlLab`: Integrate with SQL Lab functionality
*/
export * as authentication from './authentication';
export * as commands from './commands';
export * as contributions from './contributions';
export * as core from './core';
export * as environment from './environment';
export * as extensions from './extensions';
export * as sqlLab from './sqlLab';

View File

@@ -0,0 +1,541 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* @fileoverview SQL Lab API for Superset extensions.
*
* This module provides interfaces and functions for integrating with Superset's SQL Lab,
* allowing extensions to interact with SQL editors, tabs, panels, and query execution.
* Extensions can listen to various events and access current state information.
*
* The API is organized into two main categories:
* - Tab-scoped APIs: Functions and events available within the context of a specific tab
* - Global APIs: Functions and events available across the entire SQL Lab interface
*/
import { Event, Database, SupersetError, Column } from './core';
/**
* Represents an SQL editor instance within a SQL Lab tab.
* Contains the editor content and associated database connection information.
*/
export interface Editor {
/**
* The SQL content of the editor.
* This represents the current text in the SQL editor.
*/
content: string;
/**
* The database identifier associated with the editor.
* This determines which database the queries will be executed against.
*/
databaseId: number;
/**
* The catalog name associated with the editor.
* Can be null if no specific catalog is selected.
*/
catalog: string | null;
/**
* The schema name associated with the editor.
* Defines the database schema context for the editor.
*/
schema: string;
/**
* The table name associated with the editor.
* Can be null if no specific table is selected.
*
* @todo Revisit if we actually need the table property
*/
table: string | null;
}
/**
* Represents a panel within a SQL Lab tab.
* Panels can display query results, database schema information, or other tools.
*/
export interface Panel {
/**
* The unique identifier of the panel.
* Used to distinguish between different panels in the same tab.
*/
id: string;
}
/**
* Represents a tab in the SQL Lab interface.
* Each tab contains an SQL editor and can have multiple associated panels.
*/
export interface Tab {
/**
* The unique identifier of the tab.
* Used to identify and manage specific tabs.
*/
id: string;
/**
* The display title of the tab.
* This is what users see in the tab header.
*/
title: string;
/**
* The SQL editor instance associated with this tab.
* Contains the editor content and database connection settings.
*/
editor: Editor;
/**
* The panels associated with the tab.
* Panels provide additional functionality like result display and schema browsing.
*/
panels: Panel[];
}
export enum CTASMethod {
Table = 'TABLE',
View = 'VIEW',
}
export interface CTAS {
/**
* Create method for CTAS creation request.
*/
method: CTASMethod;
/**
* Temporary table name for creation using a CTAS query.
*/
tempTable: string | null;
}
export interface QueryContext {
/**
* Unique query ID on client side.
*/
clientId: string;
/**
* Contains CTAS if the query requests table creation.
*/
ctas: CTAS | null;
/**
* Requested row limit for the query.
*/
requestedLimit: number | null;
/**
* True if the query execution result will be/was delivered asynchronously
*/
runAsync?: boolean;
/**
* Start datetime for the query in a numerical timestamp
*/
startDttm: number;
/**
* The tab instance associated with the request query
*/
tab: Tab;
/**
* A key-value JSON associated with Jinja template variables
*/
templateParameters: Record<string, any>;
}
export interface QueryErrorResultContext extends QueryContext {
/**
* Finished datetime for the query in a numerical timestamp
*/
endDttm: number;
/**
* Error message returned from DB engine
*/
errorMessage: string;
/**
* Error details in a SupersetError structure
*/
errors: SupersetError[] | null;
/**
* Executed SQL after parsing Jinja templates.
*/
executedSql: string | null;
}
export interface QueryResultContext extends QueryContext {
/**
* Actual number of rows returned by the query.
*/
appliedLimit: number;
/**
* Major factor that is determining the row limit of the query results.
*/
appliedLimitingFactor: string;
/**
* Finished datetime for the query in a numerical timestamp.
*/
endDttm: number;
/**
* Executed SQL after parsing Jinja templates.
*/
executedSql: string;
/**
* Remote query id stored in backend.
*/
remoteId: number;
/**
* Query result data and metadata.
*/
result: QueryResult;
}
export interface QueryResult {
/**
* Column metadata associated with the query result.
*/
columns: Column[];
/**
* Query result data.
*/
data: Record<string, any>[];
}
/**
* Tab-scoped Events and Functions
*
* These APIs are available within the context of a specific SQL Lab tab and provide
* access to tab-specific state and events.
*/
/**
* Gets the currently active tab in SQL Lab.
*
* @returns The current tab object, or undefined if no tab is active.
*
* @example
* ```typescript
* const tab = getCurrentTab();
* if (tab) {
* console.log(`Active tab: ${tab.title}`);
* console.log(`Database ID: ${tab.editor.databaseId}`);
* }
* ```
*/
export declare const getCurrentTab: () => Tab | undefined;
/**
* Event fired when the content of the SQL editor changes.
* Provides the new content as the event payload.
*
* @example
* ```typescript
* onDidChangeEditorContent.event((newContent) => {
* console.log('Editor content changed:', newContent.length, 'characters');
* });
* ```
*/
export declare const onDidChangeEditorContent: Event<string>;
/**
* Event fired when the database selection changes in the editor.
* Provides the new database ID as the event payload.
*
* @example
* ```typescript
* onDidChangeEditorDatabase.event((databaseId) => {
* console.log('Database changed to:', databaseId);
* });
* ```
*/
export declare const onDidChangeEditorDatabase: Event<number>;
/**
* Event fired when the catalog selection changes in the editor.
* Provides the new catalog name as the event payload.
*
* @example
* ```typescript
* onDidChangeEditorCatalog.event((catalog) => {
* console.log('Catalog changed to:', catalog);
* });
* ```
*/
export declare const onDidChangeEditorCatalog: Event<string>;
/**
* Event fired when the schema selection changes in the editor.
* Provides the new schema name as the event payload.
*
* @example
* ```typescript
* onDidChangeEditorSchema.event((schema) => {
* console.log('Schema changed to:', schema);
* });
* ```
*/
export declare const onDidChangeEditorSchema: Event<string>;
/**
* Event fired when the table selection changes in the editor.
* Provides the new table name as the event payload.
*
* @example
* ```typescript
* onDidChangeEditorTable.event((table) => {
* console.log('Table changed to:', table);
* });
* ```
*/
export declare const onDidChangeEditorTable: Event<string>;
/**
* Event fired when a panel is closed in the current tab.
* Provides the closed panel object as the event payload.
*
* @example
* ```typescript
* onDidClosePanel.event((panel) => {
* console.log('Panel closed:', panel.id);
* });
* ```
*/
export declare const onDidClosePanel: Event<Panel>;
/**
* Event fired when the active panel changes in the current tab.
* Provides the newly active panel object as the event payload.
*
* @example
* ```typescript
* onDidChangeActivePanel.event((panel) => {
* console.log('Active panel changed to:', panel.id);
* });
* ```
*/
export declare const onDidChangeActivePanel: Event<Panel>;
/**
* Event fired when the title of the current tab changes.
* Provides the new title as the event payload.
*
* @example
* ```typescript
* onDidChangeTabTitle.event((title) => {
* console.log('Tab title changed to:', title);
* });
* ```
*/
export declare const onDidChangeTabTitle: Event<string>;
/**
* Event fired when a query starts running in the current tab.
* Provides the query request state at the time of query execution.
*
* @example
* ```typescript
* onDidQueryRun.event((query) => {
* console.log('Query started on database:', query.tab.editor.databaseId);
* console.log('Query content:', query.tab.editor.content);
* });
* ```
*/
export declare const onDidQueryRun: Event<QueryContext>;
/**
* Event fired when a running query is stopped in the current tab.
* Provides the query request state when the query was stopped.
*
* @example
* ```typescript
* onDidQueryStop.event((query) => {
* console.log('Query stopped for database:', query.tab.editor.databaseId);
* });
* ```
*/
export declare const onDidQueryStop: Event<QueryContext>;
/**
* Event fired when a query fails in the current tab.
*
* @todo Check what's the state object for onDidQueryFail and onDidQuerySuccess.
* Currently it's a string, but it should be an object with properties like queryId, status, etc.
*
* @example
* ```typescript
* onDidQueryFail.event((result) => {
* console.error('Query failed:', result.errorMessage);
* });
* ```
*/
export declare const onDidQueryFail: Event<QueryErrorResultContext>;
/**
* Event fired when a query succeeds in the current tab.
*
* @todo Check what's the state object for onDidQueryFail and onDidQuerySuccess.
* Currently it's a string, but it should be an object with properties like queryId, status, etc.
*
* @example
* ```typescript
* onDidQuerySuccess.event((query) => {
* console.log('Query succeeded:', query.result.data);
* console.log('Query executed content:', query.executedSql);
* });
* ```
*/
export declare const onDidQuerySuccess: Event<QueryResultContext>;
/**
* Global Events and Functions
*
* These APIs are available across the entire SQL Lab interface and provide
* access to global state and events that affect the overall SQL Lab experience.
*/
/**
* Gets all available databases in the Superset instance.
*
* @returns An array of database objects that the current user has access to.
*
* @example
* ```typescript
* const databases = getDatabases();
* console.log(`Available databases: ${databases.length}`);
* databases.forEach(db => {
* console.log(`- ${db.database_name} (ID: ${db.id})`);
* });
* ```
*/
export declare const getDatabases: () => Database[];
/**
* Gets all tabs currently open in SQL Lab.
*
* @returns An array of all open tab objects.
*
* @example
* ```typescript
* const tabs = getTabs();
* console.log(`Open tabs: ${tabs.length}`);
* tabs.forEach(tab => {
* console.log(`- ${tab.title} (ID: ${tab.id})`);
* });
* ```
*/
export declare const getTabs: () => Tab[];
/**
* Event fired when a tab is closed in SQL Lab.
* Provides the closed tab object as the event payload.
*
* @example
* ```typescript
* onDidCloseTab.event((tab) => {
* console.log('Tab closed:', tab.title);
* // Clean up any tab-specific resources
* });
* ```
*/
export declare const onDidCloseTab: Event<Tab>;
/**
* Event fired when the active tab changes in SQL Lab.
* Provides the newly active tab object as the event payload.
*
* @example
* ```typescript
* onDidChangeActiveTab.event((tab) => {
* console.log('Active tab changed to:', tab.title);
* // Update UI based on new active tab
* });
* ```
*/
export declare const onDidChangeActiveTab: Event<Tab>;
/**
* Event fired when the databases list is refreshed.
* This can happen when new databases are added or existing ones are modified.
*
* @example
* ```typescript
* onDidRefreshDatabases.event(() => {
* console.log('Databases refreshed, updating UI...');
* const updatedDatabases = getDatabases();
* // Update UI with new database list
* });
* ```
*/
export declare const onDidRefreshDatabases: Event<void>;
/**
* Event fired when the catalogs list is refreshed for the current database.
* This typically happens when switching databases or when catalog metadata is updated.
*
* @example
* ```typescript
* onDidRefreshCatalogs.event(() => {
* console.log('Catalogs refreshed');
* // Update catalog dropdown or related UI
* });
* ```
*/
export declare const onDidRefreshCatalogs: Event<void>;
/**
* Event fired when the schemas list is refreshed for the current database/catalog.
* This happens when switching databases/catalogs or when schema metadata is updated.
*
* @example
* ```typescript
* onDidRefreshSchemas.event(() => {
* console.log('Schemas refreshed');
* // Update schema dropdown or related UI
* });
* ```
*/
export declare const onDidRefreshSchemas: Event<void>;
/**
* Event fired when the tables list is refreshed for the current database/catalog/schema.
* This happens when switching schema contexts or when table metadata is updated.
*
* @example
* ```typescript
* onDidRefreshTables.event(() => {
* console.log('Tables refreshed');
* // Update table browser or autocomplete suggestions
* });
* ```
*/
export declare const onDidRefreshTables: Event<void>;

Some files were not shown because too many files have changed in this diff Show More