mirror of
https://github.com/apache/superset.git
synced 2026-04-19 08:04:53 +00:00
fix: Rename apache-superset-cli to apache-superset-extensions-cli (#34883)
This commit is contained in:
committed by
GitHub
parent
ebfb14c353
commit
bcf156c969
22
superset-extensions-cli/CHANGELOG.md
Normal file
22
superset-extensions-cli/CHANGELOG.md
Normal file
@@ -0,0 +1,22 @@
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
## Change Log
|
||||
|
||||
Changelogs will be added once we have the first stable release.
|
||||
216
superset-extensions-cli/LICENSE.txt
Normal file
216
superset-extensions-cli/LICENSE.txt
Normal file
@@ -0,0 +1,216 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
============================================================================
|
||||
APACHE SUPERSET SUBCOMPONENTS:
|
||||
|
||||
The Apache Superset project contains subcomponents with separate copyright
|
||||
notices and license terms. Your use of the source code for the these
|
||||
subcomponents is subject to the terms and conditions of the following
|
||||
licenses.
|
||||
|
||||
========================================================================
|
||||
Third party SIL Open Font License v1.1 (OFL-1.1)
|
||||
========================================================================
|
||||
|
||||
(SIL OPEN FONT LICENSE Version 1.1) The Inter font family (https://github.com/rsms/inter)
|
||||
(SIL OPEN FONT LICENSE Version 1.1) The Fira Code font family (https://github.com/tonsky/FiraCode)
|
||||
110
superset-extensions-cli/README.md
Normal file
110
superset-extensions-cli/README.md
Normal file
@@ -0,0 +1,110 @@
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# apache-superset-extensions-cli
|
||||
|
||||
[](https://badge.fury.io/py/apache-superset-extensions-cli)
|
||||
[](https://opensource.org/licenses/Apache-2.0)
|
||||
[](https://www.python.org/downloads/)
|
||||
|
||||
Official command-line interface for building, bundling, and managing Apache Superset extensions. This CLI tool provides developers with everything needed to create, develop, and package extensions for the Superset ecosystem.
|
||||
|
||||
## 🚀 Features
|
||||
|
||||
- **Extension Scaffolding** - Generate initial folder structure and scaffold new extension projects
|
||||
- **Development Server** - Automatically rebuild extensions as files change during development
|
||||
- **Build System** - Build extension assets for production deployment
|
||||
- **Bundle Packaging** - Package extensions into distributable .supx files
|
||||
|
||||
## 📦 Installation
|
||||
|
||||
```bash
|
||||
pip install apache-superset-extensions-cli
|
||||
```
|
||||
|
||||
## 🛠️ Quick Start
|
||||
|
||||
### Available Commands
|
||||
|
||||
```bash
|
||||
# Generate initial folder structure and scaffold a new extension project
|
||||
superset-extensions init <extension-name>
|
||||
|
||||
# Automatically rebuild extension as files change during development
|
||||
superset-extensions dev
|
||||
|
||||
# Build extension assets for production
|
||||
superset-extensions build
|
||||
|
||||
# Package extension into a distributable .supx file
|
||||
superset-extensions bundle
|
||||
```
|
||||
|
||||
## 📋 Extension Structure
|
||||
|
||||
The CLI generates extensions with the following structure:
|
||||
|
||||
```
|
||||
extension_name/
|
||||
├── extension.json # Extension configuration and metadata
|
||||
├── frontend/ # Frontend code
|
||||
│ ├── src/ # TypeScript/React source files
|
||||
│ ├── webpack.config.js # Frontend build configuration
|
||||
│ ├── tsconfig.json # TypeScript configuration
|
||||
│ └── package.json # Frontend dependencies
|
||||
├── backend/ # Backend code
|
||||
│ ├── src/
|
||||
│ │ └── dataset_references/ # Python package source
|
||||
│ ├── tests/ # Backend tests
|
||||
│ ├── pyproject.toml # Python package configuration
|
||||
│ └── requirements.txt # Python dependencies
|
||||
├── dist/ # Built extension files (generated)
|
||||
│ ├── manifest.json # Generated extension manifest
|
||||
│ ├── frontend/
|
||||
│ │ └── dist/ # Built frontend assets
|
||||
│ │ ├── remoteEntry.*.js # Module federation entry
|
||||
│ │ └── *.js # Additional frontend bundles
|
||||
│ └── backend/
|
||||
│ └── dataset_references/ # Built backend package
|
||||
│ ├── __init__.py
|
||||
│ ├── api.py
|
||||
│ └── entrypoint.py
|
||||
├── dataset_references-1.0.0.supx # Packaged extension file (generated)
|
||||
└── README.md # Extension documentation
|
||||
```
|
||||
|
||||
## 🤝 Contributing
|
||||
|
||||
We welcome contributions! Please see the [Contributing Guide](https://github.com/apache/superset/blob/master/CONTRIBUTING.md) for details.
|
||||
|
||||
## 📄 License
|
||||
|
||||
Licensed under the Apache License, Version 2.0. See [LICENSE](https://github.com/apache/superset/blob/master/LICENSE.txt) for details.
|
||||
|
||||
## 🔗 Links
|
||||
|
||||
- [Apache Superset](https://superset.apache.org/)
|
||||
- [Extension Development Guide](https://superset.apache.org/docs/extensions/)
|
||||
- [API Documentation](https://superset.apache.org/docs/api/)
|
||||
- [GitHub Repository](https://github.com/apache/superset)
|
||||
- [Community](https://superset.apache.org/community/)
|
||||
|
||||
---
|
||||
|
||||
**Note**: This package is currently in early development. APIs and commands may change before the 1.0.0 release. Please check the [changelog](CHANGELOG.md) for breaking changes between versions.
|
||||
118
superset-extensions-cli/pyproject.toml
Normal file
118
superset-extensions-cli/pyproject.toml
Normal file
@@ -0,0 +1,118 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
[project]
|
||||
name = "apache-superset-extensions-cli"
|
||||
version = "0.0.1rc1"
|
||||
description = "Official command-line interface for building, bundling, and managing Apache Superset extensions"
|
||||
readme = "README.md"
|
||||
authors = [
|
||||
{ name = "Apache Software Foundation", email = "dev@superset.apache.org" },
|
||||
]
|
||||
license = { file="LICENSE.txt" }
|
||||
requires-python = ">=3.10"
|
||||
keywords = ["superset", "apache", "cli", "extensions", "analytics", "business-intelligence", "development-tools"]
|
||||
classifiers = [
|
||||
"Development Status :: 3 - Alpha",
|
||||
"Environment :: Console",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: Apache Software License",
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Topic :: Database",
|
||||
"Topic :: Scientific/Engineering :: Visualization",
|
||||
"Topic :: Software Development :: Build Tools",
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
"Topic :: System :: Software Distribution",
|
||||
]
|
||||
dependencies = [
|
||||
"apache-superset-core>=0.0.1rc1, <0.2",
|
||||
"click>=8.0.3",
|
||||
"jinja2>=3.1.6",
|
||||
"semver>=3.0.4",
|
||||
"tomli>=2.2.1; python_version < '3.11'",
|
||||
"watchdog>=6.0.0",
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
Homepage = "https://superset.apache.org/"
|
||||
Documentation = "https://superset.apache.org/docs/"
|
||||
Repository = "https://github.com/apache/superset"
|
||||
"Bug Tracker" = "https://github.com/apache/superset/issues"
|
||||
Changelog = "https://github.com/apache/superset/blob/master/CHANGELOG.md"
|
||||
|
||||
[project.optional-dependencies]
|
||||
test = [
|
||||
"pytest",
|
||||
"pytest-cov",
|
||||
"pytest-mock",
|
||||
]
|
||||
|
||||
[build-system]
|
||||
requires = ["setuptools>=76.0.0", "wheel"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[tool.setuptools]
|
||||
packages = ["superset_extensions_cli"]
|
||||
package-dir = { "" = "src" }
|
||||
|
||||
[project.scripts]
|
||||
superset-extensions = "superset_extensions_cli.cli:app"
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
testpaths = ["tests"]
|
||||
python_files = ["test_*.py", "*_test.py"]
|
||||
python_classes = ["Test*"]
|
||||
python_functions = ["test_*"]
|
||||
addopts = [
|
||||
"--strict-markers",
|
||||
"--strict-config",
|
||||
"--verbose",
|
||||
"--cov=superset_extensions_cli",
|
||||
"--cov-report=term-missing",
|
||||
"--cov-report=html:htmlcov"
|
||||
]
|
||||
markers = [
|
||||
"unit: Unit tests",
|
||||
"integration: Integration tests",
|
||||
"cli: CLI command tests",
|
||||
"slow: Slow running tests",
|
||||
]
|
||||
|
||||
[tool.coverage.run]
|
||||
source = ["src/superset_extensions_cli"]
|
||||
omit = ["*/tests/*", "*/test_*"]
|
||||
|
||||
[tool.coverage.report]
|
||||
exclude_lines = [
|
||||
"pragma: no cover",
|
||||
"def __repr__",
|
||||
"if self.debug:",
|
||||
"if settings.DEBUG",
|
||||
"raise AssertionError",
|
||||
"raise NotImplementedError",
|
||||
"if 0:",
|
||||
"if __name__ == .__main__.:",
|
||||
"class .*\\bProtocol\\):",
|
||||
"@(abc\\.)?abstractmethod",
|
||||
]
|
||||
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"src/superset_extensions_cli/*" = ["TID251"]
|
||||
@@ -0,0 +1,16 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
471
superset-extensions-cli/src/superset_extensions_cli/cli.py
Normal file
471
superset-extensions-cli/src/superset_extensions_cli/cli.py
Normal file
@@ -0,0 +1,471 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json # noqa: TID251
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, cast
|
||||
|
||||
import click
|
||||
import semver
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
from superset_core.extensions.types import Manifest, Metadata
|
||||
from watchdog.events import FileSystemEventHandler
|
||||
from watchdog.observers import Observer
|
||||
|
||||
from superset_extensions_cli.constants import MIN_NPM_VERSION
|
||||
from superset_extensions_cli.utils import read_json, read_toml
|
||||
|
||||
REMOTE_ENTRY_REGEX = re.compile(r"^remoteEntry\..+\.js$")
|
||||
FRONTEND_DIST_REGEX = re.compile(r"/frontend/dist")
|
||||
|
||||
|
||||
def validate_npm() -> None:
|
||||
"""Abort if `npm` is not on PATH."""
|
||||
if shutil.which("npm") is None:
|
||||
click.secho(
|
||||
"❌ npm is not installed or not on your PATH.",
|
||||
err=True,
|
||||
fg="red",
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
result = subprocess.run( # noqa: S603
|
||||
["npm", "-v"], # noqa: S607
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
)
|
||||
if result.returncode != 0:
|
||||
click.secho(
|
||||
f"❌ Failed to run `npm -v`: {result.stderr.strip()}",
|
||||
err=True,
|
||||
fg="red",
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
npm_version = result.stdout.strip()
|
||||
if semver.compare(npm_version, MIN_NPM_VERSION) < 0:
|
||||
click.secho(
|
||||
f"❌ npm version {npm_version} is lower than the required {MIN_NPM_VERSION}.", # noqa: E501
|
||||
err=True,
|
||||
fg="red",
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
except FileNotFoundError:
|
||||
click.secho(
|
||||
"❌ npm was not found when checking its version.",
|
||||
err=True,
|
||||
fg="red",
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def init_frontend_deps(frontend_dir: Path) -> None:
|
||||
"""
|
||||
If node_modules is missing under `frontend_dir`, run `npm ci` if package-lock.json
|
||||
exists, otherwise run `npm i`.
|
||||
"""
|
||||
node_modules = frontend_dir / "node_modules"
|
||||
if not node_modules.exists():
|
||||
package_lock = frontend_dir / "package-lock.json"
|
||||
if package_lock.exists():
|
||||
click.secho("⚙️ node_modules not found, running `npm ci`…", fg="cyan")
|
||||
npm_command = ["npm", "ci"]
|
||||
error_msg = "❌ `npm ci` failed. Aborting."
|
||||
else:
|
||||
click.secho("⚙️ node_modules not found, running `npm i`…", fg="cyan")
|
||||
npm_command = ["npm", "i"]
|
||||
error_msg = "❌ `npm i` failed. Aborting."
|
||||
|
||||
validate_npm()
|
||||
res = subprocess.run( # noqa: S603
|
||||
npm_command, # noqa: S607
|
||||
cwd=frontend_dir,
|
||||
text=True,
|
||||
)
|
||||
if res.returncode != 0:
|
||||
click.secho(error_msg, err=True, fg="red")
|
||||
sys.exit(1)
|
||||
click.secho("✅ Dependencies installed", fg="green")
|
||||
|
||||
|
||||
def clean_dist(cwd: Path) -> None:
|
||||
dist_dir = cwd / "dist"
|
||||
if dist_dir.exists():
|
||||
shutil.rmtree(dist_dir)
|
||||
dist_dir.mkdir(parents=True)
|
||||
|
||||
|
||||
def clean_dist_frontend(cwd: Path) -> None:
|
||||
frontend_dist = cwd / "dist" / "frontend"
|
||||
if frontend_dist.exists():
|
||||
shutil.rmtree(frontend_dist)
|
||||
|
||||
|
||||
def build_manifest(cwd: Path, remote_entry: str | None) -> Manifest:
|
||||
extension: Metadata = cast(Metadata, read_json(cwd / "extension.json"))
|
||||
if not extension:
|
||||
click.secho("❌ extension.json not found.", err=True, fg="red")
|
||||
sys.exit(1)
|
||||
|
||||
manifest: Manifest = {
|
||||
"id": extension["id"],
|
||||
"name": extension["name"],
|
||||
"version": extension["version"],
|
||||
"permissions": extension["permissions"],
|
||||
"dependencies": extension.get("dependencies", []),
|
||||
}
|
||||
if (
|
||||
(frontend := extension.get("frontend"))
|
||||
and (contributions := frontend.get("contributions"))
|
||||
and (module_federation := frontend.get("moduleFederation"))
|
||||
and remote_entry
|
||||
):
|
||||
manifest["frontend"] = {
|
||||
"contributions": contributions,
|
||||
"moduleFederation": module_federation,
|
||||
"remoteEntry": remote_entry,
|
||||
}
|
||||
|
||||
if entry_points := extension.get("backend", {}).get("entryPoints"):
|
||||
manifest["backend"] = {"entryPoints": entry_points}
|
||||
|
||||
return manifest
|
||||
|
||||
|
||||
def write_manifest(cwd: Path, manifest: Manifest) -> None:
|
||||
dist_dir = cwd / "dist"
|
||||
(dist_dir / "manifest.json").write_text(
|
||||
json.dumps(manifest, indent=2, sort_keys=True)
|
||||
)
|
||||
click.secho("✅ Manifest updated", fg="green")
|
||||
|
||||
|
||||
def run_frontend_build(frontend_dir: Path) -> subprocess.CompletedProcess[str]:
|
||||
click.echo()
|
||||
click.secho("⚙️ Building frontend assets…", fg="cyan")
|
||||
return subprocess.run( # noqa: S603
|
||||
["npm", "run", "build"], # noqa: S607
|
||||
cwd=frontend_dir,
|
||||
text=True,
|
||||
)
|
||||
|
||||
|
||||
def copy_frontend_dist(cwd: Path) -> str:
|
||||
dist_dir = cwd / "dist"
|
||||
frontend_dist = cwd / "frontend" / "dist"
|
||||
remote_entry: str | None = None
|
||||
|
||||
for f in frontend_dist.rglob("*"):
|
||||
if not f.is_file():
|
||||
continue
|
||||
if REMOTE_ENTRY_REGEX.match(f.name):
|
||||
remote_entry = f.name
|
||||
tgt = dist_dir / f.relative_to(cwd)
|
||||
tgt.parent.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copy2(f, tgt)
|
||||
|
||||
if not remote_entry:
|
||||
click.secho("❌ No remote entry file found.", err=True, fg="red")
|
||||
sys.exit(1)
|
||||
return remote_entry
|
||||
|
||||
|
||||
def copy_backend_files(cwd: Path) -> None:
|
||||
dist_dir = cwd / "dist"
|
||||
extension = read_json(cwd / "extension.json")
|
||||
if not extension:
|
||||
click.secho("❌ No extension.json file found.", err=True, fg="red")
|
||||
sys.exit(1)
|
||||
|
||||
for pat in extension.get("backend", {}).get("files", []):
|
||||
for f in cwd.glob(pat):
|
||||
if not f.is_file():
|
||||
continue
|
||||
tgt = dist_dir / f.relative_to(cwd)
|
||||
tgt.parent.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copy2(f, tgt)
|
||||
|
||||
|
||||
def rebuild_frontend(cwd: Path, frontend_dir: Path) -> str | None:
|
||||
"""Clean and rebuild frontend, return the remoteEntry filename."""
|
||||
clean_dist_frontend(cwd)
|
||||
|
||||
res = run_frontend_build(frontend_dir)
|
||||
if res.returncode != 0:
|
||||
click.secho("❌ Frontend build failed", fg="red")
|
||||
return None
|
||||
|
||||
remote_entry = copy_frontend_dist(cwd)
|
||||
click.secho("✅ Frontend rebuilt", fg="green")
|
||||
return remote_entry
|
||||
|
||||
|
||||
def rebuild_backend(cwd: Path) -> None:
|
||||
"""Copy backend files (no manifest update)."""
|
||||
copy_backend_files(cwd)
|
||||
click.secho("✅ Backend files synced", fg="green")
|
||||
|
||||
|
||||
class FrontendChangeHandler(FileSystemEventHandler):
|
||||
def __init__(self, trigger_build: Callable[[], None]):
|
||||
self.trigger_build = trigger_build
|
||||
|
||||
def on_any_event(self, event: Any) -> None:
|
||||
if FRONTEND_DIST_REGEX.search(event.src_path):
|
||||
return
|
||||
click.secho(f"🔁 Frontend change detected: {event.src_path}", fg="yellow")
|
||||
self.trigger_build()
|
||||
|
||||
|
||||
@click.group(help="CLI for validating and bundling Superset extensions.")
|
||||
def app() -> None:
|
||||
pass
|
||||
|
||||
|
||||
@app.command()
|
||||
def validate() -> None:
|
||||
validate_npm()
|
||||
|
||||
click.secho("✅ Validation successful", fg="green")
|
||||
|
||||
|
||||
@app.command()
|
||||
@click.pass_context
|
||||
def build(ctx: click.Context) -> None:
|
||||
ctx.invoke(validate)
|
||||
cwd = Path.cwd()
|
||||
frontend_dir = cwd / "frontend"
|
||||
backend_dir = cwd / "backend"
|
||||
|
||||
clean_dist(cwd)
|
||||
|
||||
# Build frontend if it exists
|
||||
remote_entry = None
|
||||
if frontend_dir.exists():
|
||||
init_frontend_deps(frontend_dir)
|
||||
remote_entry = rebuild_frontend(cwd, frontend_dir)
|
||||
|
||||
# Build backend independently if it exists
|
||||
if backend_dir.exists():
|
||||
pyproject = read_toml(backend_dir / "pyproject.toml")
|
||||
if pyproject:
|
||||
rebuild_backend(cwd)
|
||||
|
||||
# Build manifest and write it
|
||||
manifest = build_manifest(cwd, remote_entry)
|
||||
write_manifest(cwd, manifest)
|
||||
|
||||
click.secho("✅ Full build completed in dist/", fg="green")
|
||||
|
||||
|
||||
@app.command()
|
||||
@click.option(
|
||||
"--output",
|
||||
"-o",
|
||||
type=click.Path(path_type=Path, dir_okay=True, file_okay=True, writable=True),
|
||||
help="Optional output path or filename for the bundle.",
|
||||
)
|
||||
@click.pass_context
|
||||
def bundle(ctx: click.Context, output: Path | None) -> None:
|
||||
ctx.invoke(build)
|
||||
|
||||
cwd = Path.cwd()
|
||||
dist_dir = cwd / "dist"
|
||||
manifest_path = dist_dir / "manifest.json"
|
||||
|
||||
if not manifest_path.exists():
|
||||
click.secho(
|
||||
"❌ dist/manifest.json not found. Run `build` first.", err=True, fg="red"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
manifest = json.loads(manifest_path.read_text())
|
||||
id_ = manifest["id"]
|
||||
version = manifest["version"]
|
||||
default_filename = f"{id_}-{version}.supx"
|
||||
|
||||
if output is None:
|
||||
zip_path = Path(default_filename)
|
||||
elif output.is_dir():
|
||||
zip_path = output / default_filename
|
||||
else:
|
||||
zip_path = output
|
||||
|
||||
try:
|
||||
with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zipf:
|
||||
for file in dist_dir.rglob("*"):
|
||||
if file.is_file():
|
||||
arcname = file.relative_to(dist_dir)
|
||||
zipf.write(file, arcname)
|
||||
except Exception as ex:
|
||||
click.secho(f"❌ Failed to create bundle: {ex}", err=True, fg="red")
|
||||
sys.exit(1)
|
||||
|
||||
click.secho(f"✅ Bundle created: {zip_path}", fg="green")
|
||||
|
||||
|
||||
@app.command()
|
||||
@click.pass_context
|
||||
def dev(ctx: click.Context) -> None:
|
||||
cwd = Path.cwd()
|
||||
frontend_dir = cwd / "frontend"
|
||||
backend_dir = cwd / "backend"
|
||||
|
||||
clean_dist(cwd)
|
||||
|
||||
# Build frontend if it exists
|
||||
remote_entry = None
|
||||
if frontend_dir.exists():
|
||||
init_frontend_deps(frontend_dir)
|
||||
remote_entry = rebuild_frontend(cwd, frontend_dir)
|
||||
|
||||
# Build backend if it exists
|
||||
if backend_dir.exists():
|
||||
rebuild_backend(cwd)
|
||||
|
||||
manifest = build_manifest(cwd, remote_entry)
|
||||
write_manifest(cwd, manifest)
|
||||
|
||||
def frontend_watcher() -> None:
|
||||
if frontend_dir.exists():
|
||||
if (remote_entry := rebuild_frontend(cwd, frontend_dir)) is not None:
|
||||
manifest = build_manifest(cwd, remote_entry)
|
||||
write_manifest(cwd, manifest)
|
||||
|
||||
def backend_watcher() -> None:
|
||||
if backend_dir.exists():
|
||||
rebuild_backend(cwd)
|
||||
dist_dir = cwd / "dist"
|
||||
manifest_path = dist_dir / "manifest.json"
|
||||
if manifest_path.exists():
|
||||
manifest = json.loads(manifest_path.read_text())
|
||||
write_manifest(cwd, manifest)
|
||||
|
||||
# Build watch message based on existing directories
|
||||
watch_dirs = []
|
||||
if frontend_dir.exists():
|
||||
watch_dirs.append(str(frontend_dir))
|
||||
if backend_dir.exists():
|
||||
watch_dirs.append(str(backend_dir))
|
||||
|
||||
if watch_dirs:
|
||||
click.secho(f"👀 Watching for changes in: {', '.join(watch_dirs)}", fg="green")
|
||||
else:
|
||||
click.secho("⚠️ No frontend or backend directories found to watch", fg="yellow")
|
||||
|
||||
observer = Observer()
|
||||
|
||||
# Only set up watchers for directories that exist
|
||||
if frontend_dir.exists():
|
||||
frontend_handler = FrontendChangeHandler(trigger_build=frontend_watcher)
|
||||
observer.schedule(frontend_handler, str(frontend_dir), recursive=True)
|
||||
|
||||
if backend_dir.exists():
|
||||
backend_handler = FileSystemEventHandler()
|
||||
backend_handler.on_any_event = lambda event: backend_watcher()
|
||||
observer.schedule(backend_handler, str(backend_dir), recursive=True)
|
||||
|
||||
if watch_dirs:
|
||||
observer.start()
|
||||
|
||||
try:
|
||||
while True:
|
||||
time.sleep(1)
|
||||
except KeyboardInterrupt:
|
||||
click.secho("\n🛑 Stopping watch mode", fg="blue")
|
||||
observer.stop()
|
||||
|
||||
observer.join()
|
||||
else:
|
||||
click.secho("❌ No directories to watch. Exiting.", fg="red")
|
||||
|
||||
|
||||
@app.command()
|
||||
def init() -> None:
|
||||
id_ = click.prompt("Extension ID (unique identifier, alphanumeric only)", type=str)
|
||||
if not re.match(r"^[a-zA-Z0-9_]+$", id_):
|
||||
click.secho(
|
||||
"❌ ID must be alphanumeric (letters, digits, underscore).", fg="red"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
name = click.prompt("Extension name (human-readable display name)", type=str)
|
||||
version = click.prompt("Initial version", default="0.1.0")
|
||||
license = click.prompt("License", default="Apache-2.0")
|
||||
include_frontend = click.confirm("Include frontend?", default=True)
|
||||
include_backend = click.confirm("Include backend?", default=True)
|
||||
|
||||
target_dir = Path.cwd() / id_
|
||||
if target_dir.exists():
|
||||
click.secho(f"❌ Directory {target_dir} already exists.", fg="red")
|
||||
sys.exit(1)
|
||||
|
||||
# Set up Jinja environment
|
||||
templates_dir = Path(__file__).parent / "templates"
|
||||
env = Environment(loader=FileSystemLoader(templates_dir)) # noqa: S701
|
||||
ctx = {
|
||||
"id": id_,
|
||||
"name": name,
|
||||
"include_frontend": include_frontend,
|
||||
"include_backend": include_backend,
|
||||
"license": license,
|
||||
"version": version,
|
||||
}
|
||||
|
||||
# Create base directory
|
||||
target_dir.mkdir()
|
||||
extension_json = env.get_template("extension.json.j2").render(ctx)
|
||||
(target_dir / "extension.json").write_text(extension_json)
|
||||
click.secho("✅ Created extension.json", fg="green")
|
||||
|
||||
# Copy frontend template
|
||||
if include_frontend:
|
||||
frontend_dir = target_dir / "frontend"
|
||||
frontend_dir.mkdir()
|
||||
|
||||
# package.json
|
||||
package_json = env.get_template("frontend/package.json.j2").render(ctx)
|
||||
(frontend_dir / "package.json").write_text(package_json)
|
||||
click.secho("✅ Created frontend folder structure", fg="green")
|
||||
|
||||
# Copy backend template
|
||||
if include_backend:
|
||||
backend_dir = target_dir / "backend"
|
||||
backend_dir.mkdir()
|
||||
|
||||
# pyproject.toml
|
||||
pyproject_toml = env.get_template("backend/pyproject.toml.j2").render(ctx)
|
||||
(backend_dir / "pyproject.toml").write_text(pyproject_toml)
|
||||
|
||||
click.secho("✅ Created backend folder structure", fg="green")
|
||||
|
||||
click.secho(
|
||||
f"🎉 Extension {name} (ID: {id_}) initialized at {target_dir}", fg="cyan"
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
app()
|
||||
@@ -0,0 +1,19 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
MIN_NPM_VERSION = "10.8.2"
|
||||
@@ -0,0 +1,4 @@
|
||||
[project]
|
||||
name = "{{ id }}"
|
||||
version = "{{ version }}"
|
||||
license = "{{ license }}"
|
||||
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"id": "{{ id }}",
|
||||
"name": "{{ name }}",
|
||||
"version": "{{ version }}",
|
||||
"license": "{{ license }}",
|
||||
{% if include_frontend -%}
|
||||
"frontend": {
|
||||
"contributions": {
|
||||
"commands": [],
|
||||
"views": [],
|
||||
"menus": []
|
||||
},
|
||||
"moduleFederation": {
|
||||
"exposes": ["./index"]
|
||||
}
|
||||
},
|
||||
{% endif -%}
|
||||
{% if include_backend -%}
|
||||
"backend": {
|
||||
"entryPoints": ["{{ id }}.entrypoint"],
|
||||
"files": ["backend/src/{{ id }}/**/*.py"]
|
||||
},
|
||||
{% endif -%}
|
||||
"permissions": []
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"name": "{{ id }}",
|
||||
"version": "{{ version }}",
|
||||
"main": "dist/main.js",
|
||||
"types": "dist/publicAPI.d.ts",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"start": "webpack serve --mode development",
|
||||
"build": "webpack --stats-error-details --mode production"
|
||||
},
|
||||
"keywords": [],
|
||||
"private": true,
|
||||
"author": "",
|
||||
"license": "{{ license }}",
|
||||
"description": "",
|
||||
"peerDependencies": {
|
||||
"@apache-superset/core": "file:../../../superset-frontend/packages/superset-core",
|
||||
"react": "^17.0.2",
|
||||
"react-dom": "^17.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/preset-react": "^7.26.3",
|
||||
"@babel/preset-typescript": "^7.26.0",
|
||||
"@types/react": "^19.0.10",
|
||||
"copy-webpack-plugin": "^13.0.0",
|
||||
"install": "^0.13.0",
|
||||
"npm": "^11.1.0",
|
||||
"ts-loader": "^9.5.2",
|
||||
"typescript": "^5.8.2",
|
||||
"webpack": "^5.98.0",
|
||||
"webpack-cli": "^6.0.1",
|
||||
"webpack-dev-server": "^5.2.0"
|
||||
}
|
||||
}
|
||||
42
superset-extensions-cli/src/superset_extensions_cli/utils.py
Normal file
42
superset-extensions-cli/src/superset_extensions_cli/utils.py
Normal file
@@ -0,0 +1,42 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json # noqa: TID251
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
import tomllib
|
||||
else:
|
||||
import tomli as tomllib
|
||||
|
||||
|
||||
def read_toml(path: Path) -> dict[str, Any] | None:
|
||||
if not path.is_file():
|
||||
return None
|
||||
|
||||
with path.open("rb") as f:
|
||||
return tomllib.load(f)
|
||||
|
||||
|
||||
def read_json(path: Path) -> dict[str, Any] | None:
|
||||
path = Path(path)
|
||||
if not path.is_file():
|
||||
return None
|
||||
|
||||
return json.loads(path.read_text())
|
||||
206
superset-extensions-cli/tests/README.md
Normal file
206
superset-extensions-cli/tests/README.md
Normal file
@@ -0,0 +1,206 @@
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
|
||||
# distributed with this work for additional information
|
||||
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
|
||||
# "License"); you may not use this file except in compliance
|
||||
|
||||
# with the License. You may obtain a copy of the License at
|
||||
|
||||
#
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
#
|
||||
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
|
||||
# software distributed under the License is distributed on an
|
||||
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
|
||||
# KIND, either express or implied. See the License for the
|
||||
|
||||
# specific language governing permissions and limitations
|
||||
|
||||
# under the License.
|
||||
|
||||
# Superset CLI Tests
|
||||
|
||||
This directory contains tests for the superset-extensions-cli package, focusing on the `init` command and other CLI functionality.
|
||||
|
||||
## Test Structure
|
||||
|
||||
### Core Test Files
|
||||
|
||||
- **`test_cli_init.py`**: Comprehensive tests for the `init` command scaffolder
|
||||
- **`test_templates.py`**: Unit tests for Jinja2 template rendering
|
||||
- **`conftest.py`**: Pytest fixtures and configuration
|
||||
- **`utils.py`**: Reusable testing utilities and helpers
|
||||
|
||||
### Test Categories
|
||||
|
||||
#### Unit Tests (`@pytest.mark.unit`)
|
||||
|
||||
- Template rendering functionality
|
||||
- Individual function testing
|
||||
- Input validation logic
|
||||
|
||||
#### Integration Tests (`@pytest.mark.integration`)
|
||||
|
||||
- Complete CLI command workflows
|
||||
- End-to-end scaffolding processes
|
||||
|
||||
#### CLI Tests (`@pytest.mark.cli`)
|
||||
|
||||
- Click command interface testing
|
||||
- User input simulation
|
||||
- Command output verification
|
||||
|
||||
## Testing Approach for Scaffolders/Generators
|
||||
|
||||
The tests use these patterns for testing code generators:
|
||||
|
||||
### 1. Isolated Environment Testing
|
||||
|
||||
```python
|
||||
@pytest.fixture
|
||||
def isolated_filesystem(tmp_path):
|
||||
"""Provide isolated temporary directory for each test."""
|
||||
```
|
||||
|
||||
### 2. Click CLI Testing Framework
|
||||
|
||||
```python
|
||||
from click.testing import CliRunner
|
||||
runner = CliRunner()
|
||||
result = runner.invoke(app, ["init"], input="...")
|
||||
```
|
||||
|
||||
### 3. File Structure Validation
|
||||
|
||||
```python
|
||||
from tests.utils import assert_file_structure, assert_directory_structure
|
||||
assert_file_structure(extension_path, expected_files)
|
||||
```
|
||||
|
||||
### 4. Template Content Verification
|
||||
|
||||
```python
|
||||
from tests.utils import assert_json_content
|
||||
assert_json_content(json_path, {"name": "expected_value"})
|
||||
```
|
||||
|
||||
### 5. Parametrized Testing
|
||||
|
||||
```python
|
||||
@pytest.mark.parametrize("include_frontend,include_backend", [
|
||||
(True, True), (True, False), (False, True), (False, False)
|
||||
])
|
||||
```
|
||||
|
||||
## Key Test Cases
|
||||
|
||||
### Init Command Tests
|
||||
|
||||
- ✅ Creates extension with both frontend and backend
|
||||
- ✅ Creates frontend-only extensions
|
||||
- ✅ Creates backend-only extensions
|
||||
- ✅ Validates extension naming (alphanumeric + underscore only)
|
||||
- ✅ Handles existing directory conflicts
|
||||
- ✅ Verifies generated file content accuracy
|
||||
- ✅ Tests custom version and license inputs
|
||||
- ✅ Integration test for complete workflow
|
||||
|
||||
### Template Rendering Tests
|
||||
|
||||
- ✅ Extension.json template with various configurations
|
||||
- ✅ Package.json template rendering
|
||||
- ✅ Pyproject.toml template rendering
|
||||
- ✅ Template validation with different names/versions/licenses
|
||||
- ✅ JSON validity verification
|
||||
- ✅ Whitespace and formatting checks
|
||||
|
||||
## Running Tests
|
||||
|
||||
### All tests
|
||||
|
||||
```bash
|
||||
pytest
|
||||
```
|
||||
|
||||
### Specific test categories
|
||||
|
||||
```bash
|
||||
pytest -m unit # Unit tests only
|
||||
pytest -m integration # Integration tests only
|
||||
pytest -m cli # CLI tests only
|
||||
```
|
||||
|
||||
### With coverage
|
||||
|
||||
```bash
|
||||
pytest --cov=superset_extensions_cli --cov-report=html
|
||||
```
|
||||
|
||||
### Specific test files
|
||||
|
||||
```bash
|
||||
pytest tests/test_cli_init.py
|
||||
pytest tests/test_templates.py
|
||||
```
|
||||
|
||||
## Reusable Testing Infrastructure
|
||||
|
||||
The testing infrastructure is designed for reusability:
|
||||
|
||||
### Test Utilities (`tests/utils.py`)
|
||||
|
||||
- `assert_file_exists()` / `assert_directory_exists()`
|
||||
- `assert_file_structure()` / `assert_directory_structure()`
|
||||
- `assert_json_content()` / `load_json_file()`
|
||||
- `create_test_extension_structure()` - Helper for expected structures
|
||||
|
||||
### Fixtures (`tests/conftest.py`)
|
||||
|
||||
- `cli_runner` - Click CLI runner
|
||||
- `isolated_filesystem` - Temporary directory with cleanup
|
||||
- `extension_params` - Default extension parameters
|
||||
- `cli_input_*` - Pre-configured user inputs
|
||||
|
||||
This infrastructure can be easily extended for testing additional CLI commands like `build`, `bundle`, `dev`, and `validate`.
|
||||
|
||||
## Best Practices Implemented
|
||||
|
||||
1. **Isolation**: Each test runs in its own temporary directory
|
||||
2. **Comprehensive Coverage**: Tests cover happy paths, edge cases, and error conditions
|
||||
3. **Realistic Testing**: Uses actual Click CLI runner with realistic user input
|
||||
4. **Content Verification**: Validates both file existence and content accuracy
|
||||
5. **Template Testing**: Separates template rendering logic from CLI integration
|
||||
6. **Reusable Components**: Utilities and fixtures designed for extension
|
||||
7. **Clear Documentation**: Well-documented test cases and helper functions
|
||||
8. **Type Safety**: Uses modern Python type annotations with `from __future__ import annotations`
|
||||
16
superset-extensions-cli/tests/__init__.py
Normal file
16
superset-extensions-cli/tests/__init__.py
Normal file
@@ -0,0 +1,16 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
136
superset-extensions-cli/tests/conftest.py
Normal file
136
superset-extensions-cli/tests/conftest.py
Normal file
@@ -0,0 +1,136 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
from click.testing import CliRunner
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cli_runner():
|
||||
"""Provide a Click CLI runner for testing commands."""
|
||||
return CliRunner()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def isolated_filesystem(tmp_path):
|
||||
"""
|
||||
Provide an isolated temporary directory and change to it.
|
||||
This ensures tests don't interfere with each other.
|
||||
"""
|
||||
original_cwd = Path.cwd()
|
||||
os.chdir(tmp_path)
|
||||
yield tmp_path
|
||||
os.chdir(original_cwd)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def extension_params():
|
||||
"""Default parameters for extension creation."""
|
||||
return {
|
||||
"id": "test_extension",
|
||||
"name": "Test Extension",
|
||||
"version": "0.1.0",
|
||||
"license": "Apache-2.0",
|
||||
"include_frontend": True,
|
||||
"include_backend": True,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cli_input_both():
|
||||
"""CLI input for creating extension with both frontend and backend."""
|
||||
return "test_extension\nTest Extension\n0.1.0\nApache-2.0\ny\ny\n"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cli_input_frontend_only():
|
||||
"""CLI input for creating extension with frontend only."""
|
||||
return "test_extension\nTest Extension\n0.1.0\nApache-2.0\ny\nn\n"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cli_input_backend_only():
|
||||
"""CLI input for creating extension with backend only."""
|
||||
return "test_extension\nTest Extension\n0.1.0\nApache-2.0\nn\ny\n"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cli_input_neither():
|
||||
"""CLI input for creating extension with neither frontend nor backend."""
|
||||
return "test_extension\nTest Extension\n0.1.0\nApache-2.0\nn\nn\n"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def extension_setup_for_dev():
|
||||
"""Set up extension structure for dev testing."""
|
||||
|
||||
def _setup(base_path: Path) -> None:
|
||||
import json
|
||||
|
||||
# Create extension.json
|
||||
extension_json = {
|
||||
"id": "test_extension",
|
||||
"name": "Test Extension",
|
||||
"version": "1.0.0",
|
||||
"permissions": [],
|
||||
}
|
||||
(base_path / "extension.json").write_text(json.dumps(extension_json))
|
||||
|
||||
# Create frontend and backend directories
|
||||
(base_path / "frontend").mkdir()
|
||||
(base_path / "backend").mkdir()
|
||||
|
||||
return _setup
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def extension_setup_for_bundling():
|
||||
"""Set up a complete extension structure ready for bundling."""
|
||||
|
||||
def _setup(base_path: Path) -> None:
|
||||
import json
|
||||
|
||||
# Create dist directory with manifest and files
|
||||
dist_dir = base_path / "dist"
|
||||
dist_dir.mkdir(parents=True)
|
||||
|
||||
# Create manifest.json
|
||||
manifest = {
|
||||
"id": "test_extension",
|
||||
"name": "Test Extension",
|
||||
"version": "1.0.0",
|
||||
"permissions": [],
|
||||
}
|
||||
(dist_dir / "manifest.json").write_text(json.dumps(manifest))
|
||||
|
||||
# Create some frontend files
|
||||
frontend_dir = dist_dir / "frontend" / "dist"
|
||||
frontend_dir.mkdir(parents=True)
|
||||
(frontend_dir / "remoteEntry.abc123.js").write_text("// remote entry")
|
||||
(frontend_dir / "main.js").write_text("// main js")
|
||||
|
||||
# Create some backend files
|
||||
backend_dir = dist_dir / "backend" / "src" / "test_extension"
|
||||
backend_dir.mkdir(parents=True)
|
||||
(backend_dir / "__init__.py").write_text("# init")
|
||||
|
||||
return _setup
|
||||
552
superset-extensions-cli/tests/test_cli_build.py
Normal file
552
superset-extensions-cli/tests/test_cli_build.py
Normal file
@@ -0,0 +1,552 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
from superset_extensions_cli.cli import (
|
||||
app,
|
||||
build_manifest,
|
||||
clean_dist,
|
||||
copy_backend_files,
|
||||
copy_frontend_dist,
|
||||
init_frontend_deps,
|
||||
)
|
||||
|
||||
from tests.utils import (
|
||||
assert_directory_exists,
|
||||
assert_file_exists,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def extension_with_build_structure():
|
||||
"""Create extension structure suitable for build testing."""
|
||||
|
||||
def _create(base_path, include_frontend=True, include_backend=True):
|
||||
# Create required directories
|
||||
if include_frontend:
|
||||
frontend_dir = base_path / "frontend"
|
||||
frontend_dir.mkdir()
|
||||
|
||||
if include_backend:
|
||||
backend_dir = base_path / "backend"
|
||||
backend_dir.mkdir()
|
||||
|
||||
# Create extension.json
|
||||
extension_json = {
|
||||
"id": "test_extension",
|
||||
"name": "Test Extension",
|
||||
"version": "1.0.0",
|
||||
"permissions": [],
|
||||
}
|
||||
|
||||
if include_frontend:
|
||||
extension_json["frontend"] = {
|
||||
"contributions": {"commands": []},
|
||||
"moduleFederation": {"exposes": ["./index"]},
|
||||
}
|
||||
|
||||
if include_backend:
|
||||
extension_json["backend"] = {"entryPoints": ["test_extension.entrypoint"]}
|
||||
|
||||
(base_path / "extension.json").write_text(json.dumps(extension_json))
|
||||
|
||||
return {
|
||||
"frontend_dir": frontend_dir if include_frontend else None,
|
||||
"backend_dir": backend_dir if include_backend else None,
|
||||
}
|
||||
|
||||
return _create
|
||||
|
||||
|
||||
# Build Command Tests
|
||||
@pytest.mark.cli
|
||||
@patch("superset_extensions_cli.cli.validate_npm")
|
||||
@patch("superset_extensions_cli.cli.init_frontend_deps")
|
||||
@patch("superset_extensions_cli.cli.rebuild_frontend")
|
||||
@patch("superset_extensions_cli.cli.rebuild_backend")
|
||||
@patch("superset_extensions_cli.cli.read_toml")
|
||||
def test_build_command_success_flow(
|
||||
mock_read_toml,
|
||||
mock_rebuild_backend,
|
||||
mock_rebuild_frontend,
|
||||
mock_init_frontend_deps,
|
||||
mock_validate_npm,
|
||||
cli_runner,
|
||||
isolated_filesystem,
|
||||
extension_with_build_structure,
|
||||
):
|
||||
"""Test build command success flow."""
|
||||
# Setup mocks
|
||||
mock_rebuild_frontend.return_value = "remoteEntry.abc123.js"
|
||||
mock_read_toml.return_value = {"project": {"name": "test"}}
|
||||
|
||||
# Create extension structure
|
||||
dirs = extension_with_build_structure(isolated_filesystem)
|
||||
|
||||
result = cli_runner.invoke(app, ["build"])
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert "✅ Full build completed in dist/" in result.output
|
||||
|
||||
# Verify function calls
|
||||
mock_validate_npm.assert_called_once()
|
||||
mock_init_frontend_deps.assert_called_once_with(dirs["frontend_dir"])
|
||||
mock_rebuild_frontend.assert_called_once()
|
||||
mock_rebuild_backend.assert_called_once()
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
@patch("superset_extensions_cli.cli.validate_npm")
|
||||
@patch("superset_extensions_cli.cli.init_frontend_deps")
|
||||
@patch("superset_extensions_cli.cli.rebuild_frontend")
|
||||
def test_build_command_handles_frontend_build_failure(
|
||||
mock_rebuild_frontend,
|
||||
mock_init_frontend_deps,
|
||||
mock_validate_npm,
|
||||
cli_runner,
|
||||
isolated_filesystem,
|
||||
extension_with_build_structure,
|
||||
):
|
||||
"""Test build command handles frontend build failure."""
|
||||
# Setup mocks
|
||||
mock_rebuild_frontend.return_value = None # Indicates failure
|
||||
|
||||
# Create extension structure
|
||||
extension_with_build_structure(isolated_filesystem)
|
||||
|
||||
result = cli_runner.invoke(app, ["build"])
|
||||
|
||||
# Command should complete and create manifest even with frontend failure
|
||||
assert result.exit_code == 0
|
||||
assert "✅ Full build completed in dist/" in result.output
|
||||
|
||||
|
||||
# Clean Dist Tests
|
||||
@pytest.mark.unit
|
||||
def test_clean_dist_removes_existing_dist_directory(isolated_filesystem):
|
||||
"""Test clean_dist removes existing dist directory and recreates it."""
|
||||
# Create dist directory with some content
|
||||
dist_dir = isolated_filesystem / "dist"
|
||||
dist_dir.mkdir()
|
||||
(dist_dir / "some_file.txt").write_text("test content")
|
||||
(dist_dir / "subdir").mkdir()
|
||||
|
||||
clean_dist(isolated_filesystem)
|
||||
|
||||
# Should exist but be empty
|
||||
assert_directory_exists(dist_dir)
|
||||
assert list(dist_dir.iterdir()) == []
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_clean_dist_creates_dist_directory_if_missing(isolated_filesystem):
|
||||
"""Test clean_dist creates dist directory when it doesn't exist."""
|
||||
dist_dir = isolated_filesystem / "dist"
|
||||
assert not dist_dir.exists()
|
||||
|
||||
clean_dist(isolated_filesystem)
|
||||
|
||||
assert_directory_exists(dist_dir)
|
||||
|
||||
|
||||
# Frontend Dependencies Tests
|
||||
@pytest.mark.unit
|
||||
@patch("subprocess.run")
|
||||
def test_init_frontend_deps_skips_when_node_modules_exists(
|
||||
mock_run, isolated_filesystem
|
||||
):
|
||||
"""Test init_frontend_deps skips npm ci when node_modules exists."""
|
||||
frontend_dir = isolated_filesystem / "frontend"
|
||||
frontend_dir.mkdir()
|
||||
(frontend_dir / "node_modules").mkdir()
|
||||
|
||||
init_frontend_deps(frontend_dir)
|
||||
|
||||
# Should not call subprocess.run for npm ci
|
||||
mock_run.assert_not_called()
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@patch("subprocess.run")
|
||||
@patch("superset_extensions_cli.cli.validate_npm")
|
||||
def test_init_frontend_deps_runs_npm_i_when_missing(
|
||||
mock_validate_npm, mock_run, isolated_filesystem
|
||||
):
|
||||
"""Test init_frontend_deps runs npm ci when node_modules is missing."""
|
||||
frontend_dir = isolated_filesystem / "frontend"
|
||||
frontend_dir.mkdir()
|
||||
|
||||
# Mock successful npm ci
|
||||
mock_run.return_value = Mock(returncode=0)
|
||||
|
||||
init_frontend_deps(frontend_dir)
|
||||
|
||||
# Should validate npm and run npm ci
|
||||
mock_validate_npm.assert_called_once()
|
||||
mock_run.assert_called_once_with(["npm", "i"], cwd=frontend_dir, text=True)
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@patch("subprocess.run")
|
||||
@patch("superset_extensions_cli.cli.validate_npm")
|
||||
def test_init_frontend_deps_exits_on_npm_ci_failure(
|
||||
mock_validate_npm, mock_run, isolated_filesystem
|
||||
):
|
||||
"""Test init_frontend_deps exits when npm ci fails."""
|
||||
frontend_dir = isolated_filesystem / "frontend"
|
||||
frontend_dir.mkdir()
|
||||
|
||||
# Mock failed npm ci
|
||||
mock_run.return_value = Mock(returncode=1)
|
||||
|
||||
with pytest.raises(SystemExit) as exc_info:
|
||||
init_frontend_deps(frontend_dir)
|
||||
|
||||
assert exc_info.value.code == 1
|
||||
|
||||
|
||||
# Build Manifest Tests
|
||||
@pytest.mark.unit
|
||||
def test_build_manifest_creates_correct_manifest_structure(isolated_filesystem):
|
||||
"""Test build_manifest creates correct manifest from extension.json."""
|
||||
# Create extension.json
|
||||
extension_data = {
|
||||
"id": "test_extension",
|
||||
"name": "Test Extension",
|
||||
"version": "1.0.0",
|
||||
"permissions": ["read_data"],
|
||||
"dependencies": ["some_dep"],
|
||||
"frontend": {
|
||||
"contributions": {"commands": ["test_command"]},
|
||||
"moduleFederation": {"exposes": ["./index"]},
|
||||
},
|
||||
"backend": {"entryPoints": ["test_extension.entrypoint"]},
|
||||
}
|
||||
extension_json = isolated_filesystem / "extension.json"
|
||||
extension_json.write_text(json.dumps(extension_data))
|
||||
|
||||
manifest = build_manifest(isolated_filesystem, "remoteEntry.abc123.js")
|
||||
|
||||
# Verify manifest structure
|
||||
manifest_dict = dict(manifest)
|
||||
assert manifest_dict["id"] == "test_extension"
|
||||
assert manifest_dict["name"] == "Test Extension"
|
||||
assert manifest_dict["version"] == "1.0.0"
|
||||
assert manifest_dict["permissions"] == ["read_data"]
|
||||
assert manifest_dict["dependencies"] == ["some_dep"]
|
||||
|
||||
# Verify frontend section
|
||||
assert "frontend" in manifest
|
||||
frontend = manifest["frontend"]
|
||||
assert frontend["contributions"] == {"commands": ["test_command"]}
|
||||
assert frontend["moduleFederation"] == {"exposes": ["./index"]}
|
||||
assert frontend["remoteEntry"] == "remoteEntry.abc123.js"
|
||||
|
||||
# Verify backend section
|
||||
assert "backend" in manifest
|
||||
assert manifest["backend"]["entryPoints"] == ["test_extension.entrypoint"]
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_build_manifest_handles_minimal_extension(isolated_filesystem):
|
||||
"""Test build_manifest with minimal extension.json (no frontend/backend)."""
|
||||
extension_data = {
|
||||
"id": "minimal_extension",
|
||||
"name": "Minimal Extension",
|
||||
"version": "0.1.0",
|
||||
"permissions": [],
|
||||
}
|
||||
extension_json = isolated_filesystem / "extension.json"
|
||||
extension_json.write_text(json.dumps(extension_data))
|
||||
|
||||
manifest = build_manifest(isolated_filesystem, None)
|
||||
|
||||
manifest_dict = dict(manifest)
|
||||
assert manifest_dict["id"] == "minimal_extension"
|
||||
assert manifest_dict["name"] == "Minimal Extension"
|
||||
assert manifest_dict["version"] == "0.1.0"
|
||||
assert manifest_dict["permissions"] == []
|
||||
assert manifest_dict["dependencies"] == [] # Default empty list
|
||||
assert "frontend" not in manifest
|
||||
assert "backend" not in manifest
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_build_manifest_exits_when_extension_json_missing(isolated_filesystem):
|
||||
"""Test build_manifest exits when extension.json is missing."""
|
||||
with pytest.raises(SystemExit) as exc_info:
|
||||
build_manifest(isolated_filesystem, "remoteEntry.js")
|
||||
|
||||
assert exc_info.value.code == 1
|
||||
|
||||
|
||||
# Frontend Build Tests
|
||||
@pytest.mark.unit
|
||||
def test_clean_dist_frontend_removes_frontend_dist(isolated_filesystem):
|
||||
"""Test clean_dist_frontend removes frontend/dist directory specifically."""
|
||||
from superset_extensions_cli.cli import clean_dist_frontend
|
||||
|
||||
# Create dist/frontend structure
|
||||
dist_dir = isolated_filesystem / "dist"
|
||||
dist_dir.mkdir(parents=True)
|
||||
frontend_dist = dist_dir / "frontend"
|
||||
frontend_dist.mkdir()
|
||||
(frontend_dist / "some_file.js").write_text("content")
|
||||
|
||||
clean_dist_frontend(isolated_filesystem)
|
||||
|
||||
# Frontend dist should be removed, but dist should remain
|
||||
assert dist_dir.exists()
|
||||
assert not frontend_dist.exists()
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_clean_dist_frontend_handles_nonexistent_directory(isolated_filesystem):
|
||||
"""Test clean_dist_frontend handles case where frontend dist doesn't exist."""
|
||||
from superset_extensions_cli.cli import clean_dist_frontend
|
||||
|
||||
# No dist directory exists
|
||||
clean_dist_frontend(isolated_filesystem)
|
||||
|
||||
# Should not raise error
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_run_frontend_build_with_output_messages(isolated_filesystem):
|
||||
"""Test run_frontend_build produces expected output messages."""
|
||||
from superset_extensions_cli.cli import run_frontend_build
|
||||
|
||||
frontend_dir = isolated_filesystem / "frontend"
|
||||
frontend_dir.mkdir()
|
||||
|
||||
with patch("subprocess.run") as mock_run:
|
||||
mock_result = Mock(returncode=0)
|
||||
mock_run.return_value = mock_result
|
||||
|
||||
result = run_frontend_build(frontend_dir)
|
||||
|
||||
assert result.returncode == 0
|
||||
mock_run.assert_called_once_with(
|
||||
["npm", "run", "build"], cwd=frontend_dir, text=True
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@pytest.mark.parametrize(
|
||||
"return_code,expected_result",
|
||||
[
|
||||
(0, "remoteEntry.abc123.js"),
|
||||
(1, None),
|
||||
],
|
||||
)
|
||||
def test_rebuild_frontend_handles_build_results(
|
||||
isolated_filesystem, return_code, expected_result
|
||||
):
|
||||
"""Test rebuild_frontend handles different build results."""
|
||||
from superset_extensions_cli.cli import rebuild_frontend
|
||||
|
||||
# Create frontend structure
|
||||
frontend_dir = isolated_filesystem / "frontend"
|
||||
frontend_dir.mkdir()
|
||||
|
||||
if return_code == 0:
|
||||
# Create frontend/dist with remoteEntry for success case
|
||||
frontend_dist = frontend_dir / "dist"
|
||||
frontend_dist.mkdir()
|
||||
(frontend_dist / "remoteEntry.abc123.js").write_text("content")
|
||||
|
||||
# Create dist directory
|
||||
dist_dir = isolated_filesystem / "dist"
|
||||
dist_dir.mkdir()
|
||||
|
||||
with patch("superset_extensions_cli.cli.run_frontend_build") as mock_build:
|
||||
mock_build.return_value = Mock(returncode=return_code)
|
||||
|
||||
result = rebuild_frontend(isolated_filesystem, frontend_dir)
|
||||
|
||||
assert result == expected_result
|
||||
|
||||
|
||||
# Backend Build Tests
|
||||
@pytest.mark.unit
|
||||
def test_rebuild_backend_calls_copy_and_shows_message(isolated_filesystem):
|
||||
"""Test rebuild_backend calls copy_backend_files and shows success message."""
|
||||
from superset_extensions_cli.cli import rebuild_backend
|
||||
|
||||
# Create extension.json
|
||||
extension_json = {
|
||||
"id": "test",
|
||||
"name": "Test Extension",
|
||||
"version": "1.0.0",
|
||||
"permissions": [],
|
||||
}
|
||||
(isolated_filesystem / "extension.json").write_text(json.dumps(extension_json))
|
||||
|
||||
with patch("superset_extensions_cli.cli.copy_backend_files") as mock_copy:
|
||||
rebuild_backend(isolated_filesystem)
|
||||
|
||||
mock_copy.assert_called_once_with(isolated_filesystem)
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_copy_backend_files_skips_non_files(isolated_filesystem):
|
||||
"""Test copy_backend_files skips directories and non-files."""
|
||||
# Create backend structure with directory
|
||||
backend_src = isolated_filesystem / "backend" / "src" / "test_ext"
|
||||
backend_src.mkdir(parents=True)
|
||||
(backend_src / "__init__.py").write_text("# init")
|
||||
|
||||
# Create a subdirectory (should be skipped)
|
||||
subdir = backend_src / "subdir"
|
||||
subdir.mkdir()
|
||||
|
||||
# Create extension.json with backend file patterns
|
||||
extension_data = {
|
||||
"id": "test_ext",
|
||||
"name": "Test Extension",
|
||||
"version": "1.0.0",
|
||||
"permissions": [],
|
||||
"backend": {
|
||||
"files": ["backend/src/test_ext/**/*"] # Will match both files and dirs
|
||||
},
|
||||
}
|
||||
(isolated_filesystem / "extension.json").write_text(json.dumps(extension_data))
|
||||
|
||||
# Create dist directory
|
||||
clean_dist(isolated_filesystem)
|
||||
|
||||
copy_backend_files(isolated_filesystem)
|
||||
|
||||
# Verify only files were copied, not directories
|
||||
dist_dir = isolated_filesystem / "dist"
|
||||
assert_file_exists(dist_dir / "backend" / "src" / "test_ext" / "__init__.py")
|
||||
|
||||
# Directory should not be copied as a file
|
||||
copied_subdir = dist_dir / "backend" / "src" / "test_ext" / "subdir"
|
||||
# The directory might exist but should be empty since we skip non-files
|
||||
if copied_subdir.exists():
|
||||
assert list(copied_subdir.iterdir()) == []
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_copy_backend_files_copies_matched_files(isolated_filesystem):
|
||||
"""Test copy_backend_files copies files matching patterns from extension.json."""
|
||||
# Create backend source files
|
||||
backend_src = isolated_filesystem / "backend" / "src" / "test_ext"
|
||||
backend_src.mkdir(parents=True)
|
||||
(backend_src / "__init__.py").write_text("# init")
|
||||
(backend_src / "main.py").write_text("# main")
|
||||
|
||||
# Create extension.json with backend file patterns
|
||||
extension_data = {
|
||||
"id": "test_ext",
|
||||
"name": "Test Extension",
|
||||
"version": "1.0.0",
|
||||
"permissions": [],
|
||||
"backend": {"files": ["backend/src/test_ext/**/*.py"]},
|
||||
}
|
||||
(isolated_filesystem / "extension.json").write_text(json.dumps(extension_data))
|
||||
|
||||
# Create dist directory
|
||||
clean_dist(isolated_filesystem)
|
||||
|
||||
copy_backend_files(isolated_filesystem)
|
||||
|
||||
# Verify files were copied
|
||||
dist_dir = isolated_filesystem / "dist"
|
||||
assert_file_exists(dist_dir / "backend" / "src" / "test_ext" / "__init__.py")
|
||||
assert_file_exists(dist_dir / "backend" / "src" / "test_ext" / "main.py")
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_copy_backend_files_handles_no_backend_config(isolated_filesystem):
|
||||
"""Test copy_backend_files handles extension.json without backend config."""
|
||||
extension_data = {
|
||||
"id": "frontend_only",
|
||||
"name": "Frontend Only Extension",
|
||||
"version": "1.0.0",
|
||||
"permissions": [],
|
||||
}
|
||||
(isolated_filesystem / "extension.json").write_text(json.dumps(extension_data))
|
||||
|
||||
clean_dist(isolated_filesystem)
|
||||
|
||||
# Should not raise error
|
||||
copy_backend_files(isolated_filesystem)
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_copy_backend_files_exits_when_extension_json_missing(isolated_filesystem):
|
||||
"""Test copy_backend_files exits when extension.json is missing."""
|
||||
clean_dist(isolated_filesystem)
|
||||
|
||||
with pytest.raises(SystemExit) as exc_info:
|
||||
copy_backend_files(isolated_filesystem)
|
||||
|
||||
assert exc_info.value.code == 1
|
||||
|
||||
|
||||
# Frontend Dist Copy Tests
|
||||
@pytest.mark.unit
|
||||
def test_copy_frontend_dist_copies_files_correctly(isolated_filesystem):
|
||||
"""Test copy_frontend_dist copies frontend build files to dist."""
|
||||
# Create frontend/dist structure
|
||||
frontend_dist = isolated_filesystem / "frontend" / "dist"
|
||||
frontend_dist.mkdir(parents=True)
|
||||
|
||||
# Create some files including remoteEntry
|
||||
(frontend_dist / "remoteEntry.abc123.js").write_text("remote entry content")
|
||||
(frontend_dist / "main.js").write_text("main js content")
|
||||
|
||||
# Create subdirectory with file
|
||||
assets_dir = frontend_dist / "assets"
|
||||
assets_dir.mkdir()
|
||||
(assets_dir / "style.css").write_text("css content")
|
||||
|
||||
# Create dist directory
|
||||
clean_dist(isolated_filesystem)
|
||||
|
||||
remote_entry = copy_frontend_dist(isolated_filesystem)
|
||||
|
||||
assert remote_entry == "remoteEntry.abc123.js"
|
||||
|
||||
# Verify files were copied
|
||||
dist_dir = isolated_filesystem / "dist"
|
||||
assert_file_exists(dist_dir / "frontend" / "dist" / "remoteEntry.abc123.js")
|
||||
assert_file_exists(dist_dir / "frontend" / "dist" / "main.js")
|
||||
assert_file_exists(dist_dir / "frontend" / "dist" / "assets" / "style.css")
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_copy_frontend_dist_exits_when_no_remote_entry(isolated_filesystem):
|
||||
"""Test copy_frontend_dist exits when no remoteEntry file found."""
|
||||
# Create frontend/dist without remoteEntry file
|
||||
frontend_dist = isolated_filesystem / "frontend" / "dist"
|
||||
frontend_dist.mkdir(parents=True)
|
||||
(frontend_dist / "main.js").write_text("main content")
|
||||
|
||||
clean_dist(isolated_filesystem)
|
||||
|
||||
with pytest.raises(SystemExit) as exc_info:
|
||||
copy_frontend_dist(isolated_filesystem)
|
||||
|
||||
assert exc_info.value.code == 1
|
||||
255
superset-extensions-cli/tests/test_cli_bundle.py
Normal file
255
superset-extensions-cli/tests/test_cli_bundle.py
Normal file
@@ -0,0 +1,255 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import zipfile
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from superset_extensions_cli.cli import app
|
||||
|
||||
from tests.utils import assert_file_exists
|
||||
|
||||
|
||||
# Bundle Command Tests
|
||||
@pytest.mark.cli
|
||||
@patch("superset_extensions_cli.cli.build")
|
||||
def test_bundle_command_creates_zip_with_default_name(
|
||||
mock_build, cli_runner, isolated_filesystem, extension_setup_for_bundling
|
||||
):
|
||||
"""Test bundle command creates zip file with default name."""
|
||||
# Mock the build command to do nothing (we'll set up dist manually)
|
||||
mock_build.return_value = None
|
||||
|
||||
# Setup extension for bundling (this creates the dist structure)
|
||||
extension_setup_for_bundling(isolated_filesystem)
|
||||
|
||||
result = cli_runner.invoke(app, ["bundle"])
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert "✅ Bundle created: test_extension-1.0.0.supx" in result.output
|
||||
|
||||
# Verify zip file was created
|
||||
zip_path = isolated_filesystem / "test_extension-1.0.0.supx"
|
||||
assert_file_exists(zip_path)
|
||||
|
||||
# Verify zip contents
|
||||
with zipfile.ZipFile(zip_path, "r") as zipf:
|
||||
file_list = zipf.namelist()
|
||||
assert "manifest.json" in file_list
|
||||
assert "frontend/dist/remoteEntry.abc123.js" in file_list
|
||||
assert "frontend/dist/main.js" in file_list
|
||||
assert "backend/src/test_extension/__init__.py" in file_list
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
@patch("superset_extensions_cli.cli.build")
|
||||
def test_bundle_command_with_custom_output_filename(
|
||||
mock_build, cli_runner, isolated_filesystem, extension_setup_for_bundling
|
||||
):
|
||||
"""Test bundle command with custom output filename."""
|
||||
# Mock the build command
|
||||
mock_build.return_value = None
|
||||
|
||||
extension_setup_for_bundling(isolated_filesystem)
|
||||
|
||||
custom_name = "my_custom_bundle.supx"
|
||||
result = cli_runner.invoke(app, ["bundle", "--output", custom_name])
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert f"✅ Bundle created: {custom_name}" in result.output
|
||||
|
||||
# Verify custom-named zip file was created
|
||||
zip_path = isolated_filesystem / custom_name
|
||||
assert_file_exists(zip_path)
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
@patch("superset_extensions_cli.cli.build")
|
||||
def test_bundle_command_with_output_directory(
|
||||
mock_build, cli_runner, isolated_filesystem, extension_setup_for_bundling
|
||||
):
|
||||
"""Test bundle command with output directory."""
|
||||
# Mock the build command
|
||||
mock_build.return_value = None
|
||||
|
||||
extension_setup_for_bundling(isolated_filesystem)
|
||||
|
||||
# Create output directory
|
||||
output_dir = isolated_filesystem / "output"
|
||||
output_dir.mkdir()
|
||||
|
||||
result = cli_runner.invoke(app, ["bundle", "--output", str(output_dir)])
|
||||
|
||||
assert result.exit_code == 0
|
||||
|
||||
# Verify zip file was created in output directory
|
||||
expected_path = output_dir / "test_extension-1.0.0.supx"
|
||||
assert_file_exists(expected_path)
|
||||
assert f"✅ Bundle created: {expected_path}" in result.output
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
@patch("superset_extensions_cli.cli.build")
|
||||
def test_bundle_command_fails_without_manifest(
|
||||
mock_build, cli_runner, isolated_filesystem
|
||||
):
|
||||
"""Test bundle command fails when manifest.json doesn't exist."""
|
||||
# Mock build to succeed but not create manifest
|
||||
mock_build.return_value = None
|
||||
|
||||
# Create empty dist directory
|
||||
(isolated_filesystem / "dist").mkdir()
|
||||
|
||||
result = cli_runner.invoke(app, ["bundle"])
|
||||
|
||||
assert result.exit_code == 1
|
||||
assert "dist/manifest.json not found" in result.output
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
@patch("superset_extensions_cli.cli.build")
|
||||
def test_bundle_command_handles_zip_creation_error(
|
||||
mock_build, cli_runner, isolated_filesystem, extension_setup_for_bundling
|
||||
):
|
||||
"""Test bundle command handles zip file creation errors."""
|
||||
# Mock the build command
|
||||
mock_build.return_value = None
|
||||
|
||||
extension_setup_for_bundling(isolated_filesystem)
|
||||
|
||||
# Try to bundle to an invalid location (directory that doesn't exist)
|
||||
invalid_path = isolated_filesystem / "nonexistent" / "bundle.supx"
|
||||
|
||||
with patch("zipfile.ZipFile", side_effect=OSError("Permission denied")):
|
||||
result = cli_runner.invoke(app, ["bundle", "--output", str(invalid_path)])
|
||||
|
||||
assert result.exit_code == 1
|
||||
assert "Failed to create bundle" in result.output
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
@patch("superset_extensions_cli.cli.build")
|
||||
def test_bundle_includes_all_files_recursively(
|
||||
mock_build, cli_runner, isolated_filesystem
|
||||
):
|
||||
"""Test that bundle includes all files from dist directory recursively."""
|
||||
# Mock the build command
|
||||
mock_build.return_value = None
|
||||
|
||||
# Create complex dist structure
|
||||
dist_dir = isolated_filesystem / "dist"
|
||||
dist_dir.mkdir(parents=True)
|
||||
|
||||
# Manifest
|
||||
manifest = {
|
||||
"id": "complex_extension",
|
||||
"name": "Complex Extension",
|
||||
"version": "2.1.0",
|
||||
"permissions": [],
|
||||
}
|
||||
(dist_dir / "manifest.json").write_text(json.dumps(manifest))
|
||||
|
||||
# Frontend files with nested structure
|
||||
frontend_dir = dist_dir / "frontend" / "dist"
|
||||
frontend_dir.mkdir(parents=True)
|
||||
(frontend_dir / "remoteEntry.xyz789.js").write_text("// entry")
|
||||
|
||||
assets_dir = frontend_dir / "assets"
|
||||
assets_dir.mkdir()
|
||||
(assets_dir / "style.css").write_text("/* css */")
|
||||
(assets_dir / "image.png").write_bytes(b"fake image data")
|
||||
|
||||
# Backend files with nested structure
|
||||
backend_dir = dist_dir / "backend" / "src" / "complex_extension"
|
||||
backend_dir.mkdir(parents=True)
|
||||
(backend_dir / "__init__.py").write_text("# init")
|
||||
(backend_dir / "core.py").write_text("# core")
|
||||
|
||||
utils_dir = backend_dir / "utils"
|
||||
utils_dir.mkdir()
|
||||
(utils_dir / "helpers.py").write_text("# helpers")
|
||||
|
||||
result = cli_runner.invoke(app, ["bundle"])
|
||||
|
||||
assert result.exit_code == 0
|
||||
|
||||
# Verify zip file and contents
|
||||
zip_path = isolated_filesystem / "complex_extension-2.1.0.supx"
|
||||
assert_file_exists(zip_path)
|
||||
|
||||
with zipfile.ZipFile(zip_path, "r") as zipf:
|
||||
file_list = set(zipf.namelist())
|
||||
|
||||
# Verify all files are included
|
||||
expected_files = {
|
||||
"manifest.json",
|
||||
"frontend/dist/remoteEntry.xyz789.js",
|
||||
"frontend/dist/assets/style.css",
|
||||
"frontend/dist/assets/image.png",
|
||||
"backend/src/complex_extension/__init__.py",
|
||||
"backend/src/complex_extension/core.py",
|
||||
"backend/src/complex_extension/utils/helpers.py",
|
||||
}
|
||||
|
||||
assert expected_files.issubset(file_list), (
|
||||
f"Missing files: {expected_files - file_list}"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
@patch("superset_extensions_cli.cli.build")
|
||||
def test_bundle_command_short_option(
|
||||
mock_build, cli_runner, isolated_filesystem, extension_setup_for_bundling
|
||||
):
|
||||
"""Test bundle command with short -o option."""
|
||||
# Mock the build command
|
||||
mock_build.return_value = None
|
||||
|
||||
extension_setup_for_bundling(isolated_filesystem)
|
||||
|
||||
result = cli_runner.invoke(app, ["bundle", "-o", "short_option.supx"])
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert "✅ Bundle created: short_option.supx" in result.output
|
||||
assert_file_exists(isolated_filesystem / "short_option.supx")
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
@pytest.mark.parametrize("output_option", ["--output", "-o"])
|
||||
@patch("superset_extensions_cli.cli.build")
|
||||
def test_bundle_command_output_options(
|
||||
mock_build,
|
||||
output_option,
|
||||
cli_runner,
|
||||
isolated_filesystem,
|
||||
extension_setup_for_bundling,
|
||||
):
|
||||
"""Test bundle command with both long and short output options."""
|
||||
# Mock the build command
|
||||
mock_build.return_value = None
|
||||
|
||||
extension_setup_for_bundling(isolated_filesystem)
|
||||
|
||||
filename = f"test_{output_option.replace('-', '')}.supx"
|
||||
result = cli_runner.invoke(app, ["bundle", output_option, filename])
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert f"✅ Bundle created: {filename}" in result.output
|
||||
assert_file_exists(isolated_filesystem / filename)
|
||||
238
superset-extensions-cli/tests/test_cli_dev.py
Normal file
238
superset-extensions-cli/tests/test_cli_dev.py
Normal file
@@ -0,0 +1,238 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import threading
|
||||
import time
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
from superset_extensions_cli.cli import app, FrontendChangeHandler
|
||||
|
||||
|
||||
# Dev Command Tests
|
||||
@pytest.mark.cli
|
||||
@patch("superset_extensions_cli.cli.Observer")
|
||||
@patch("superset_extensions_cli.cli.init_frontend_deps")
|
||||
@patch("superset_extensions_cli.cli.rebuild_frontend")
|
||||
@patch("superset_extensions_cli.cli.rebuild_backend")
|
||||
@patch("superset_extensions_cli.cli.build_manifest")
|
||||
@patch("superset_extensions_cli.cli.write_manifest")
|
||||
def test_dev_command_starts_watchers(
|
||||
mock_write_manifest,
|
||||
mock_build_manifest,
|
||||
mock_rebuild_backend,
|
||||
mock_rebuild_frontend,
|
||||
mock_init_frontend_deps,
|
||||
mock_observer_class,
|
||||
cli_runner,
|
||||
isolated_filesystem,
|
||||
extension_setup_for_dev,
|
||||
):
|
||||
"""Test dev command starts file watchers."""
|
||||
# Setup mocks
|
||||
mock_rebuild_frontend.return_value = "remoteEntry.abc123.js"
|
||||
mock_build_manifest.return_value = {"name": "test", "version": "1.0.0"}
|
||||
|
||||
mock_observer = Mock()
|
||||
mock_observer_class.return_value = mock_observer
|
||||
|
||||
extension_setup_for_dev(isolated_filesystem)
|
||||
|
||||
# Run dev command in a thread since it's blocking
|
||||
def run_dev():
|
||||
try:
|
||||
cli_runner.invoke(app, ["dev"], catch_exceptions=False)
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
dev_thread = threading.Thread(target=run_dev)
|
||||
dev_thread.daemon = True
|
||||
dev_thread.start()
|
||||
|
||||
# Let it start up
|
||||
time.sleep(0.1)
|
||||
|
||||
# Verify observer methods were called
|
||||
mock_observer.schedule.assert_called()
|
||||
mock_observer.start.assert_called_once()
|
||||
|
||||
# Initial setup calls
|
||||
mock_init_frontend_deps.assert_called_once()
|
||||
mock_rebuild_frontend.assert_called()
|
||||
mock_rebuild_backend.assert_called()
|
||||
mock_build_manifest.assert_called()
|
||||
mock_write_manifest.assert_called()
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
@patch("superset_extensions_cli.cli.init_frontend_deps")
|
||||
@patch("superset_extensions_cli.cli.rebuild_frontend")
|
||||
@patch("superset_extensions_cli.cli.rebuild_backend")
|
||||
@patch("superset_extensions_cli.cli.build_manifest")
|
||||
@patch("superset_extensions_cli.cli.write_manifest")
|
||||
def test_dev_command_initial_build(
|
||||
mock_write_manifest,
|
||||
mock_build_manifest,
|
||||
mock_rebuild_backend,
|
||||
mock_rebuild_frontend,
|
||||
mock_init_frontend_deps,
|
||||
cli_runner,
|
||||
isolated_filesystem,
|
||||
extension_setup_for_dev,
|
||||
):
|
||||
"""Test dev command performs initial build setup."""
|
||||
# Setup mocks
|
||||
mock_rebuild_frontend.return_value = "remoteEntry.abc123.js"
|
||||
mock_build_manifest.return_value = {"name": "test", "version": "1.0.0"}
|
||||
|
||||
extension_setup_for_dev(isolated_filesystem)
|
||||
|
||||
with patch("superset_extensions_cli.cli.Observer") as mock_observer_class:
|
||||
mock_observer = Mock()
|
||||
mock_observer_class.return_value = mock_observer
|
||||
|
||||
with patch("time.sleep", side_effect=KeyboardInterrupt):
|
||||
try:
|
||||
cli_runner.invoke(app, ["dev"], catch_exceptions=False)
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
# Verify initial build steps
|
||||
frontend_dir = isolated_filesystem / "frontend"
|
||||
mock_init_frontend_deps.assert_called_once_with(frontend_dir)
|
||||
mock_rebuild_frontend.assert_called_once_with(isolated_filesystem, frontend_dir)
|
||||
mock_rebuild_backend.assert_called_once_with(isolated_filesystem)
|
||||
|
||||
|
||||
# FrontendChangeHandler Tests
|
||||
@pytest.mark.unit
|
||||
def test_frontend_change_handler_init():
|
||||
"""Test FrontendChangeHandler initialization."""
|
||||
mock_trigger = Mock()
|
||||
handler = FrontendChangeHandler(trigger_build=mock_trigger)
|
||||
|
||||
assert handler.trigger_build == mock_trigger
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_frontend_change_handler_ignores_dist_changes():
|
||||
"""Test FrontendChangeHandler ignores changes in dist directory."""
|
||||
mock_trigger = Mock()
|
||||
handler = FrontendChangeHandler(trigger_build=mock_trigger)
|
||||
|
||||
# Create mock event with dist path
|
||||
mock_event = Mock()
|
||||
mock_event.src_path = "/path/to/frontend/dist/file.js"
|
||||
|
||||
handler.on_any_event(mock_event)
|
||||
|
||||
# Should not trigger build for dist changes
|
||||
mock_trigger.assert_not_called()
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@pytest.mark.parametrize(
|
||||
"source_path",
|
||||
[
|
||||
"/path/to/frontend/src/component.tsx",
|
||||
"/path/to/frontend/webpack.config.js",
|
||||
"/path/to/frontend/package.json",
|
||||
],
|
||||
)
|
||||
def test_frontend_change_handler_triggers_on_source_changes(source_path):
|
||||
"""Test FrontendChangeHandler triggers build on source changes."""
|
||||
mock_trigger = Mock()
|
||||
handler = FrontendChangeHandler(trigger_build=mock_trigger)
|
||||
|
||||
# Create mock event with source path
|
||||
mock_event = Mock()
|
||||
mock_event.src_path = source_path
|
||||
|
||||
handler.on_any_event(mock_event)
|
||||
|
||||
# Should trigger build for source changes
|
||||
mock_trigger.assert_called_once()
|
||||
|
||||
|
||||
# Dev Utility Functions Tests
|
||||
@pytest.mark.unit
|
||||
def test_frontend_watcher_function_coverage(isolated_filesystem):
|
||||
"""Test frontend watcher function for coverage."""
|
||||
# Create extension.json
|
||||
extension_json = {
|
||||
"id": "test_extension",
|
||||
"name": "Test Extension",
|
||||
"version": "1.0.0",
|
||||
"permissions": [],
|
||||
}
|
||||
(isolated_filesystem / "extension.json").write_text(json.dumps(extension_json))
|
||||
|
||||
# Create dist directory
|
||||
dist_dir = isolated_filesystem / "dist"
|
||||
dist_dir.mkdir()
|
||||
|
||||
with patch("superset_extensions_cli.cli.rebuild_frontend") as mock_rebuild:
|
||||
with patch("superset_extensions_cli.cli.build_manifest") as mock_build:
|
||||
with patch("superset_extensions_cli.cli.write_manifest") as mock_write:
|
||||
mock_rebuild.return_value = "remoteEntry.abc123.js"
|
||||
mock_build.return_value = {"name": "test", "version": "1.0.0"}
|
||||
|
||||
# Simulate frontend watcher function logic
|
||||
frontend_dir = isolated_filesystem / "frontend"
|
||||
frontend_dir.mkdir()
|
||||
|
||||
# Actually call the functions to simulate the frontend_watcher
|
||||
if (
|
||||
remote_entry := mock_rebuild(isolated_filesystem, frontend_dir)
|
||||
) is not None:
|
||||
manifest = mock_build(isolated_filesystem, remote_entry)
|
||||
mock_write(isolated_filesystem, manifest)
|
||||
|
||||
mock_rebuild.assert_called_once_with(isolated_filesystem, frontend_dir)
|
||||
mock_build.assert_called_once_with(
|
||||
isolated_filesystem, "remoteEntry.abc123.js"
|
||||
)
|
||||
mock_write.assert_called_once_with(
|
||||
isolated_filesystem, {"name": "test", "version": "1.0.0"}
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_backend_watcher_function_coverage(isolated_filesystem):
|
||||
"""Test backend watcher function for coverage."""
|
||||
# Create dist directory with manifest
|
||||
dist_dir = isolated_filesystem / "dist"
|
||||
dist_dir.mkdir()
|
||||
|
||||
manifest_data = {"name": "test", "version": "1.0.0"}
|
||||
(dist_dir / "manifest.json").write_text(json.dumps(manifest_data))
|
||||
|
||||
with patch("superset_extensions_cli.cli.rebuild_backend") as mock_rebuild:
|
||||
with patch("superset_extensions_cli.cli.write_manifest") as mock_write:
|
||||
# Simulate backend watcher function
|
||||
mock_rebuild(isolated_filesystem)
|
||||
|
||||
manifest_path = dist_dir / "manifest.json"
|
||||
if manifest_path.exists():
|
||||
manifest = json.loads(manifest_path.read_text())
|
||||
mock_write(isolated_filesystem, manifest)
|
||||
|
||||
mock_rebuild.assert_called_once_with(isolated_filesystem)
|
||||
mock_write.assert_called_once()
|
||||
362
superset-extensions-cli/tests/test_cli_init.py
Normal file
362
superset-extensions-cli/tests/test_cli_init.py
Normal file
@@ -0,0 +1,362 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
from superset_extensions_cli.cli import app
|
||||
|
||||
from tests.utils import (
|
||||
assert_directory_exists,
|
||||
assert_directory_structure,
|
||||
assert_file_exists,
|
||||
assert_file_structure,
|
||||
assert_json_content,
|
||||
create_test_extension_structure,
|
||||
load_json_file,
|
||||
)
|
||||
|
||||
|
||||
# Init Command Tests
|
||||
@pytest.mark.cli
|
||||
def test_init_creates_extension_with_both_frontend_and_backend(
|
||||
cli_runner, isolated_filesystem, cli_input_both
|
||||
):
|
||||
"""Test that init creates a complete extension with both frontend and backend."""
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input_both)
|
||||
|
||||
assert result.exit_code == 0, f"Command failed with output: {result.output}"
|
||||
assert (
|
||||
"🎉 Extension Test Extension (ID: test_extension) initialized" in result.output
|
||||
)
|
||||
|
||||
# Verify directory structure
|
||||
extension_path = isolated_filesystem / "test_extension"
|
||||
assert_directory_exists(extension_path, "main extension directory")
|
||||
|
||||
expected_structure = create_test_extension_structure(
|
||||
isolated_filesystem,
|
||||
"test_extension",
|
||||
include_frontend=True,
|
||||
include_backend=True,
|
||||
)
|
||||
|
||||
# Check directories
|
||||
assert_directory_structure(extension_path, expected_structure["expected_dirs"])
|
||||
|
||||
# Check files
|
||||
assert_file_structure(extension_path, expected_structure["expected_files"])
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
def test_init_creates_extension_with_frontend_only(
|
||||
cli_runner, isolated_filesystem, cli_input_frontend_only
|
||||
):
|
||||
"""Test that init creates extension with only frontend components."""
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input_frontend_only)
|
||||
|
||||
assert result.exit_code == 0, f"Command failed with output: {result.output}"
|
||||
|
||||
extension_path = isolated_filesystem / "test_extension"
|
||||
assert_directory_exists(extension_path)
|
||||
|
||||
# Should have frontend directory and package.json
|
||||
assert_directory_exists(extension_path / "frontend")
|
||||
assert_file_exists(extension_path / "frontend" / "package.json")
|
||||
|
||||
# Should NOT have backend directory
|
||||
backend_path = extension_path / "backend"
|
||||
assert not backend_path.exists(), (
|
||||
"Backend directory should not exist for frontend-only extension"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
def test_init_creates_extension_with_backend_only(
|
||||
cli_runner, isolated_filesystem, cli_input_backend_only
|
||||
):
|
||||
"""Test that init creates extension with only backend components."""
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input_backend_only)
|
||||
|
||||
assert result.exit_code == 0, f"Command failed with output: {result.output}"
|
||||
|
||||
extension_path = isolated_filesystem / "test_extension"
|
||||
assert_directory_exists(extension_path)
|
||||
|
||||
# Should have backend directory and pyproject.toml
|
||||
assert_directory_exists(extension_path / "backend")
|
||||
assert_file_exists(extension_path / "backend" / "pyproject.toml")
|
||||
|
||||
# Should NOT have frontend directory
|
||||
frontend_path = extension_path / "frontend"
|
||||
assert not frontend_path.exists(), (
|
||||
"Frontend directory should not exist for backend-only extension"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
def test_init_creates_extension_with_neither_frontend_nor_backend(
|
||||
cli_runner, isolated_filesystem, cli_input_neither
|
||||
):
|
||||
"""Test that init creates minimal extension with neither frontend nor backend."""
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input_neither)
|
||||
|
||||
assert result.exit_code == 0, f"Command failed with output: {result.output}"
|
||||
|
||||
extension_path = isolated_filesystem / "test_extension"
|
||||
assert_directory_exists(extension_path)
|
||||
|
||||
# Should only have extension.json
|
||||
assert_file_exists(extension_path / "extension.json")
|
||||
|
||||
# Should NOT have frontend or backend directories
|
||||
assert not (extension_path / "frontend").exists()
|
||||
assert not (extension_path / "backend").exists()
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
@pytest.mark.parametrize(
|
||||
"invalid_name,expected_error",
|
||||
[
|
||||
("test-extension", "must be alphanumeric"),
|
||||
("test extension", "must be alphanumeric"),
|
||||
("test.extension", "must be alphanumeric"),
|
||||
("test@extension", "must be alphanumeric"),
|
||||
("", "must be alphanumeric"),
|
||||
],
|
||||
)
|
||||
def test_init_validates_extension_name(
|
||||
cli_runner, isolated_filesystem, invalid_name, expected_error
|
||||
):
|
||||
"""Test that init validates extension names according to regex pattern."""
|
||||
cli_input = f"{invalid_name}\n0.1.0\nApache-2.0\ny\ny\n"
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input)
|
||||
|
||||
assert result.exit_code == 1, (
|
||||
f"Expected command to fail for invalid name '{invalid_name}'"
|
||||
)
|
||||
assert expected_error in result.output
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
def test_init_accepts_numeric_extension_name(cli_runner, isolated_filesystem):
|
||||
"""Test that init accepts numeric extension ids like '123'."""
|
||||
cli_input = "123\n123\n0.1.0\nApache-2.0\ny\ny\n"
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input)
|
||||
|
||||
assert result.exit_code == 0, f"Numeric id '123' should be valid: {result.output}"
|
||||
assert Path("123").exists(), "Directory for '123' should be created"
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
@pytest.mark.parametrize(
|
||||
"valid_id", ["test123", "TestExtension", "test_extension_123", "MyExt_1"]
|
||||
)
|
||||
def test_init_with_valid_alphanumeric_names(cli_runner, valid_id):
|
||||
"""Test that init accepts various valid alphanumeric names."""
|
||||
with cli_runner.isolated_filesystem():
|
||||
cli_input = f"{valid_id}\nTest Extension\n0.1.0\nApache-2.0\ny\ny\n"
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input)
|
||||
|
||||
assert result.exit_code == 0, (
|
||||
f"Valid name '{valid_id}' was rejected: {result.output}"
|
||||
)
|
||||
assert Path(valid_id).exists(), f"Directory for '{valid_id}' was not created"
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
def test_init_fails_when_directory_already_exists(
|
||||
cli_runner, isolated_filesystem, cli_input_both
|
||||
):
|
||||
"""Test that init fails gracefully when target directory already exists."""
|
||||
# Create the directory first
|
||||
existing_dir = isolated_filesystem / "test_extension"
|
||||
existing_dir.mkdir()
|
||||
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input_both)
|
||||
|
||||
assert result.exit_code == 1, "Command should fail when directory already exists"
|
||||
assert "already exists" in result.output
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
def test_extension_json_content_is_correct(
|
||||
cli_runner, isolated_filesystem, cli_input_both
|
||||
):
|
||||
"""Test that the generated extension.json has the correct content."""
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input_both)
|
||||
assert result.exit_code == 0
|
||||
|
||||
extension_path = isolated_filesystem / "test_extension"
|
||||
extension_json_path = extension_path / "extension.json"
|
||||
|
||||
# Verify the JSON structure and values
|
||||
assert_json_content(
|
||||
extension_json_path,
|
||||
{
|
||||
"id": "test_extension",
|
||||
"name": "Test Extension",
|
||||
"version": "0.1.0",
|
||||
"license": "Apache-2.0",
|
||||
"permissions": [],
|
||||
},
|
||||
)
|
||||
|
||||
# Load and verify more complex nested structures
|
||||
content = load_json_file(extension_json_path)
|
||||
|
||||
# Verify frontend section exists and has correct structure
|
||||
assert "frontend" in content
|
||||
frontend = content["frontend"]
|
||||
assert "contributions" in frontend
|
||||
assert "moduleFederation" in frontend
|
||||
assert frontend["contributions"] == {"commands": [], "views": [], "menus": []}
|
||||
assert frontend["moduleFederation"] == {"exposes": ["./index"]}
|
||||
|
||||
# Verify backend section exists and has correct structure
|
||||
assert "backend" in content
|
||||
backend = content["backend"]
|
||||
assert "entryPoints" in backend
|
||||
assert "files" in backend
|
||||
assert backend["entryPoints"] == ["test_extension.entrypoint"]
|
||||
assert backend["files"] == ["backend/src/test_extension/**/*.py"]
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
def test_frontend_package_json_content_is_correct(
|
||||
cli_runner, isolated_filesystem, cli_input_both
|
||||
):
|
||||
"""Test that the generated frontend/package.json has the correct content."""
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input_both)
|
||||
assert result.exit_code == 0
|
||||
|
||||
extension_path = isolated_filesystem / "test_extension"
|
||||
package_json_path = extension_path / "frontend" / "package.json"
|
||||
|
||||
# Verify the package.json structure and values
|
||||
assert_json_content(
|
||||
package_json_path,
|
||||
{
|
||||
"name": "test_extension",
|
||||
"version": "0.1.0",
|
||||
"license": "Apache-2.0",
|
||||
},
|
||||
)
|
||||
|
||||
# Verify more complex structures
|
||||
content = load_json_file(package_json_path)
|
||||
assert "scripts" in content
|
||||
assert "build" in content["scripts"]
|
||||
assert "peerDependencies" in content
|
||||
assert "@apache-superset/core" in content["peerDependencies"]
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
def test_backend_pyproject_toml_is_created(
|
||||
cli_runner, isolated_filesystem, cli_input_both
|
||||
):
|
||||
"""Test that the generated backend/pyproject.toml file is created."""
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input_both)
|
||||
assert result.exit_code == 0
|
||||
|
||||
extension_path = isolated_filesystem / "test_extension"
|
||||
pyproject_path = extension_path / "backend" / "pyproject.toml"
|
||||
|
||||
assert_file_exists(pyproject_path, "backend pyproject.toml")
|
||||
|
||||
# Basic content verification (without parsing TOML for now)
|
||||
content = pyproject_path.read_text()
|
||||
assert "test_extension" in content
|
||||
assert "0.1.0" in content
|
||||
assert "Apache-2.0" in content
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
def test_init_command_output_messages(cli_runner, isolated_filesystem, cli_input_both):
|
||||
"""Test that init command produces expected output messages."""
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input_both)
|
||||
|
||||
assert result.exit_code == 0
|
||||
output = result.output
|
||||
|
||||
# Check for expected success messages
|
||||
assert "✅ Created extension.json" in output
|
||||
assert "✅ Created frontend folder structure" in output
|
||||
assert "✅ Created backend folder structure" in output
|
||||
assert "🎉 Extension Test Extension (ID: test_extension) initialized" in output
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
def test_init_with_custom_version_and_license(cli_runner, isolated_filesystem):
|
||||
"""Test init with custom version and license parameters."""
|
||||
cli_input = "my_extension\nMy Extension\n2.1.0\nMIT\ny\nn\n"
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input)
|
||||
|
||||
assert result.exit_code == 0
|
||||
|
||||
extension_path = isolated_filesystem / "my_extension"
|
||||
extension_json_path = extension_path / "extension.json"
|
||||
|
||||
assert_json_content(
|
||||
extension_json_path,
|
||||
{
|
||||
"id": "my_extension",
|
||||
"name": "My Extension",
|
||||
"version": "2.1.0",
|
||||
"license": "MIT",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.cli
|
||||
def test_full_init_workflow_integration(cli_runner, isolated_filesystem):
|
||||
"""Integration test for the complete init workflow."""
|
||||
# Test the complete flow with realistic user input
|
||||
cli_input = "awesome_charts\nAwesome Charts\n1.0.0\nApache-2.0\ny\ny\n"
|
||||
result = cli_runner.invoke(app, ["init"], input=cli_input)
|
||||
|
||||
# Verify success
|
||||
assert result.exit_code == 0
|
||||
|
||||
# Verify complete directory structure
|
||||
extension_path = isolated_filesystem / "awesome_charts"
|
||||
expected_structure = create_test_extension_structure(
|
||||
isolated_filesystem,
|
||||
"awesome_charts",
|
||||
include_frontend=True,
|
||||
include_backend=True,
|
||||
)
|
||||
|
||||
# Comprehensive structure verification
|
||||
assert_directory_structure(extension_path, expected_structure["expected_dirs"])
|
||||
assert_file_structure(extension_path, expected_structure["expected_files"])
|
||||
|
||||
# Verify all generated files have correct content
|
||||
extension_json = load_json_file(extension_path / "extension.json")
|
||||
assert extension_json["id"] == "awesome_charts"
|
||||
assert extension_json["name"] == "Awesome Charts"
|
||||
assert extension_json["version"] == "1.0.0"
|
||||
assert extension_json["license"] == "Apache-2.0"
|
||||
|
||||
package_json = load_json_file(extension_path / "frontend" / "package.json")
|
||||
assert package_json["name"] == "awesome_charts"
|
||||
|
||||
pyproject_content = (extension_path / "backend" / "pyproject.toml").read_text()
|
||||
assert "awesome_charts" in pyproject_content
|
||||
195
superset-extensions-cli/tests/test_cli_validate.py
Normal file
195
superset-extensions-cli/tests/test_cli_validate.py
Normal file
@@ -0,0 +1,195 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
from superset_extensions_cli.cli import app, validate_npm
|
||||
|
||||
|
||||
# Validate Command Tests
|
||||
@pytest.mark.cli
|
||||
def test_validate_command_success(cli_runner):
|
||||
"""Test validate command succeeds when npm is available and valid."""
|
||||
with patch("superset_extensions_cli.cli.validate_npm") as mock_validate:
|
||||
result = cli_runner.invoke(app, ["validate"])
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert "✅ Validation successful" in result.output
|
||||
mock_validate.assert_called_once()
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
def test_validate_command_calls_npm_validation(cli_runner):
|
||||
"""Test that validate command calls the npm validation function."""
|
||||
with patch("superset_extensions_cli.cli.validate_npm") as mock_validate:
|
||||
cli_runner.invoke(app, ["validate"])
|
||||
mock_validate.assert_called_once()
|
||||
|
||||
|
||||
# Validate NPM Function Tests
|
||||
@pytest.mark.unit
|
||||
@patch("shutil.which")
|
||||
def test_validate_npm_fails_when_npm_not_on_path(mock_which):
|
||||
"""Test validate_npm fails when npm is not on PATH."""
|
||||
mock_which.return_value = None
|
||||
|
||||
with pytest.raises(SystemExit) as exc_info:
|
||||
validate_npm()
|
||||
|
||||
assert exc_info.value.code == 1
|
||||
mock_which.assert_called_once_with("npm")
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@patch("shutil.which")
|
||||
@patch("subprocess.run")
|
||||
def test_validate_npm_fails_when_npm_command_fails(mock_run, mock_which):
|
||||
"""Test validate_npm fails when npm -v command fails."""
|
||||
mock_which.return_value = "/usr/bin/npm"
|
||||
mock_run.return_value = Mock(returncode=1, stderr="Command failed")
|
||||
|
||||
with pytest.raises(SystemExit) as exc_info:
|
||||
validate_npm()
|
||||
|
||||
assert exc_info.value.code == 1
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@patch("shutil.which")
|
||||
@patch("subprocess.run")
|
||||
def test_validate_npm_fails_when_version_too_low(mock_run, mock_which):
|
||||
"""Test validate_npm fails when npm version is below minimum."""
|
||||
mock_which.return_value = "/usr/bin/npm"
|
||||
mock_run.return_value = Mock(returncode=0, stdout="9.0.0\n", stderr="")
|
||||
|
||||
with pytest.raises(SystemExit) as exc_info:
|
||||
validate_npm()
|
||||
|
||||
assert exc_info.value.code == 1
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@pytest.mark.parametrize(
|
||||
"npm_version",
|
||||
[
|
||||
"10.8.2", # Exact minimum version
|
||||
"11.0.0", # Higher version
|
||||
"10.9.0-alpha.1", # Pre-release version higher than minimum
|
||||
],
|
||||
)
|
||||
@patch("shutil.which")
|
||||
@patch("subprocess.run")
|
||||
def test_validate_npm_succeeds_with_valid_versions(mock_run, mock_which, npm_version):
|
||||
"""Test validate_npm succeeds when npm version is valid."""
|
||||
mock_which.return_value = "/usr/bin/npm"
|
||||
mock_run.return_value = Mock(returncode=0, stdout=f"{npm_version}\n", stderr="")
|
||||
|
||||
# Should not raise SystemExit
|
||||
validate_npm()
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@pytest.mark.parametrize(
|
||||
"npm_version,should_pass",
|
||||
[
|
||||
("10.8.2", True), # Exact minimum version
|
||||
("10.8.1", False), # Slightly lower version
|
||||
("10.9.0-alpha.1", True), # Pre-release version higher than minimum
|
||||
("9.9.9", False), # Much lower version
|
||||
("11.0.0", True), # Much higher version
|
||||
],
|
||||
)
|
||||
@patch("shutil.which")
|
||||
@patch("subprocess.run")
|
||||
def test_validate_npm_version_comparison_edge_cases(
|
||||
mock_run, mock_which, npm_version, should_pass
|
||||
):
|
||||
"""Test npm version comparison with edge cases."""
|
||||
mock_which.return_value = "/usr/bin/npm"
|
||||
mock_run.return_value = Mock(returncode=0, stdout=f"{npm_version}\n", stderr="")
|
||||
|
||||
if should_pass:
|
||||
# Should not raise SystemExit
|
||||
validate_npm()
|
||||
else:
|
||||
with pytest.raises(SystemExit):
|
||||
validate_npm()
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@patch("shutil.which")
|
||||
@patch("subprocess.run")
|
||||
def test_validate_npm_handles_file_not_found_exception(mock_run, mock_which):
|
||||
"""Test validate_npm handles FileNotFoundError gracefully."""
|
||||
mock_which.return_value = "/usr/bin/npm"
|
||||
mock_run.side_effect = FileNotFoundError("Test error")
|
||||
|
||||
with pytest.raises(SystemExit) as exc_info:
|
||||
validate_npm()
|
||||
|
||||
assert exc_info.value.code == 1
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@pytest.mark.parametrize(
|
||||
"exception_type",
|
||||
[
|
||||
OSError,
|
||||
PermissionError,
|
||||
],
|
||||
)
|
||||
@patch("shutil.which")
|
||||
@patch("subprocess.run")
|
||||
def test_validate_npm_does_not_catch_other_subprocess_exceptions(
|
||||
mock_run, mock_which, exception_type
|
||||
):
|
||||
"""Test validate_npm does not catch OSError and PermissionError (they propagate up)."""
|
||||
mock_which.return_value = "/usr/bin/npm"
|
||||
mock_run.side_effect = exception_type("Test error")
|
||||
|
||||
# These exceptions should propagate up, not be caught
|
||||
with pytest.raises(exception_type):
|
||||
validate_npm()
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@patch("shutil.which")
|
||||
@patch("subprocess.run")
|
||||
def test_validate_npm_with_malformed_version_output_raises_error(mock_run, mock_which):
|
||||
"""Test validate_npm raises ValueError with malformed version output."""
|
||||
mock_which.return_value = "/usr/bin/npm"
|
||||
mock_run.return_value = Mock(returncode=0, stdout="not-a-version\n", stderr="")
|
||||
|
||||
# semver.compare will raise ValueError for malformed version
|
||||
with pytest.raises(ValueError):
|
||||
validate_npm()
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@patch("shutil.which")
|
||||
@patch("subprocess.run")
|
||||
def test_validate_npm_with_empty_version_output_raises_error(mock_run, mock_which):
|
||||
"""Test validate_npm raises ValueError with empty version output."""
|
||||
mock_which.return_value = "/usr/bin/npm"
|
||||
mock_run.return_value = Mock(returncode=0, stdout="", stderr="")
|
||||
|
||||
# semver.compare will raise ValueError for empty version
|
||||
with pytest.raises(ValueError):
|
||||
validate_npm()
|
||||
331
superset-extensions-cli/tests/test_templates.py
Normal file
331
superset-extensions-cli/tests/test_templates.py
Normal file
@@ -0,0 +1,331 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def templates_dir():
|
||||
"""Get the templates directory path."""
|
||||
return (
|
||||
Path(__file__).parent.parent / "src" / "superset_extensions_cli" / "templates"
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def jinja_env(templates_dir):
|
||||
"""Create a Jinja2 environment for testing templates."""
|
||||
return Environment(loader=FileSystemLoader(templates_dir))
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def template_context():
|
||||
"""Default template context for testing."""
|
||||
return {
|
||||
"id": "test_extension",
|
||||
"name": "Test Extension",
|
||||
"version": "0.1.0",
|
||||
"license": "Apache-2.0",
|
||||
"include_frontend": True,
|
||||
"include_backend": True,
|
||||
}
|
||||
|
||||
|
||||
# Extension JSON Template Tests
|
||||
@pytest.mark.unit
|
||||
def test_extension_json_template_renders_with_both_frontend_and_backend(
|
||||
jinja_env, template_context
|
||||
):
|
||||
"""Test extension.json template renders correctly with both frontend and backend."""
|
||||
template = jinja_env.get_template("extension.json.j2")
|
||||
rendered = template.render(template_context)
|
||||
|
||||
# Parse the rendered JSON to ensure it's valid
|
||||
parsed = json.loads(rendered)
|
||||
|
||||
# Verify basic fields
|
||||
assert parsed["id"] == "test_extension"
|
||||
assert parsed["name"] == "Test Extension"
|
||||
assert parsed["version"] == "0.1.0"
|
||||
assert parsed["license"] == "Apache-2.0"
|
||||
assert parsed["permissions"] == []
|
||||
|
||||
# Verify frontend section exists
|
||||
assert "frontend" in parsed
|
||||
frontend = parsed["frontend"]
|
||||
assert "contributions" in frontend
|
||||
assert "moduleFederation" in frontend
|
||||
assert frontend["contributions"] == {"commands": [], "views": [], "menus": []}
|
||||
assert frontend["moduleFederation"] == {"exposes": ["./index"]}
|
||||
|
||||
# Verify backend section exists
|
||||
assert "backend" in parsed
|
||||
backend = parsed["backend"]
|
||||
assert backend["entryPoints"] == ["test_extension.entrypoint"]
|
||||
assert backend["files"] == ["backend/src/test_extension/**/*.py"]
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@pytest.mark.parametrize(
|
||||
"include_frontend,include_backend,expected_sections",
|
||||
[
|
||||
(True, False, ["frontend"]),
|
||||
(False, True, ["backend"]),
|
||||
(False, False, []),
|
||||
],
|
||||
)
|
||||
def test_extension_json_template_renders_with_different_configurations(
|
||||
jinja_env, template_context, include_frontend, include_backend, expected_sections
|
||||
):
|
||||
"""Test extension.json template renders correctly with different configurations."""
|
||||
template_context["include_frontend"] = include_frontend
|
||||
template_context["include_backend"] = include_backend
|
||||
|
||||
template = jinja_env.get_template("extension.json.j2")
|
||||
rendered = template.render(template_context)
|
||||
|
||||
parsed = json.loads(rendered)
|
||||
|
||||
# Check for expected sections
|
||||
for section in expected_sections:
|
||||
assert section in parsed, f"Expected section '{section}' not found"
|
||||
|
||||
# Check that unexpected sections are not present
|
||||
all_sections = ["frontend", "backend"]
|
||||
for section in all_sections:
|
||||
if section not in expected_sections:
|
||||
assert section not in parsed, f"Unexpected section '{section}' found"
|
||||
|
||||
|
||||
# Frontend Package JSON Template Tests
|
||||
@pytest.mark.unit
|
||||
def test_frontend_package_json_template_renders_correctly(jinja_env, template_context):
|
||||
"""Test frontend/package.json template renders correctly."""
|
||||
template = jinja_env.get_template("frontend/package.json.j2")
|
||||
rendered = template.render(template_context)
|
||||
|
||||
parsed = json.loads(rendered)
|
||||
|
||||
# Verify basic package info
|
||||
assert parsed["name"] == "test_extension"
|
||||
assert parsed["version"] == "0.1.0"
|
||||
assert parsed["license"] == "Apache-2.0"
|
||||
assert parsed["private"] is True
|
||||
|
||||
# Verify scripts section
|
||||
assert "scripts" in parsed
|
||||
scripts = parsed["scripts"]
|
||||
assert "start" in scripts
|
||||
assert "build" in scripts
|
||||
assert "webpack" in scripts["build"]
|
||||
|
||||
# Verify dependencies
|
||||
assert "peerDependencies" in parsed
|
||||
peer_deps = parsed["peerDependencies"]
|
||||
assert "@apache-superset/core" in peer_deps
|
||||
assert "react" in peer_deps
|
||||
assert "react-dom" in peer_deps
|
||||
|
||||
# Verify dev dependencies
|
||||
assert "devDependencies" in parsed
|
||||
dev_deps = parsed["devDependencies"]
|
||||
assert "webpack" in dev_deps
|
||||
assert "typescript" in dev_deps
|
||||
|
||||
|
||||
# Backend Pyproject TOML Template Tests
|
||||
@pytest.mark.unit
|
||||
def test_backend_pyproject_toml_template_renders_correctly(jinja_env, template_context):
|
||||
"""Test backend/pyproject.toml template renders correctly."""
|
||||
template = jinja_env.get_template("backend/pyproject.toml.j2")
|
||||
rendered = template.render(template_context)
|
||||
|
||||
# Basic content verification (without full TOML parsing)
|
||||
assert "test_extension" in rendered
|
||||
assert "0.1.0" in rendered
|
||||
assert "Apache-2.0" in rendered
|
||||
|
||||
|
||||
# Template Rendering with Different Parameters Tests
|
||||
@pytest.mark.unit
|
||||
@pytest.mark.parametrize(
|
||||
"id_,name",
|
||||
[
|
||||
("simple_extension", "Simple Extension"),
|
||||
("MyExtension123", "My Extension 123"),
|
||||
("complex_extension_name_123", "Complex Extension Name 123"),
|
||||
("ext", "Ext"),
|
||||
],
|
||||
)
|
||||
def test_template_rendering_with_different_ids(jinja_env, id_, name):
|
||||
"""Test templates render correctly with various extension ids/names."""
|
||||
context = {
|
||||
"id": id_,
|
||||
"name": name,
|
||||
"version": "1.0.0",
|
||||
"license": "MIT",
|
||||
"include_frontend": True,
|
||||
"include_backend": True,
|
||||
}
|
||||
|
||||
# Test extension.json template
|
||||
template = jinja_env.get_template("extension.json.j2")
|
||||
rendered = template.render(context)
|
||||
parsed = json.loads(rendered)
|
||||
|
||||
assert parsed["id"] == id_
|
||||
assert parsed["name"] == name
|
||||
assert parsed["backend"]["entryPoints"] == [f"{id_}.entrypoint"]
|
||||
assert parsed["backend"]["files"] == [f"backend/src/{id_}/**/*.py"]
|
||||
|
||||
# Test package.json template
|
||||
template = jinja_env.get_template("frontend/package.json.j2")
|
||||
rendered = template.render(context)
|
||||
parsed = json.loads(rendered)
|
||||
|
||||
assert parsed["name"] == id_
|
||||
|
||||
# Test pyproject.toml template
|
||||
template = jinja_env.get_template("backend/pyproject.toml.j2")
|
||||
rendered = template.render(context)
|
||||
|
||||
assert id_ in rendered
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@pytest.mark.parametrize("version", ["0.1.0", "1.0.0", "2.1.3-alpha", "10.20.30"])
|
||||
def test_template_rendering_with_different_versions(jinja_env, version):
|
||||
"""Test templates render correctly with various version formats."""
|
||||
context = {
|
||||
"id": "test_ext",
|
||||
"name": "Test Extension",
|
||||
"version": version,
|
||||
"license": "Apache-2.0",
|
||||
"include_frontend": True,
|
||||
"include_backend": False,
|
||||
}
|
||||
|
||||
template = jinja_env.get_template("extension.json.j2")
|
||||
rendered = template.render(context)
|
||||
parsed = json.loads(rendered)
|
||||
|
||||
assert parsed["version"] == version
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@pytest.mark.parametrize(
|
||||
"license_type",
|
||||
[
|
||||
"Apache-2.0",
|
||||
"MIT",
|
||||
"BSD-3-Clause",
|
||||
"GPL-3.0",
|
||||
"Custom License",
|
||||
],
|
||||
)
|
||||
def test_template_rendering_with_different_licenses(jinja_env, license_type):
|
||||
"""Test templates render correctly with various license types."""
|
||||
context = {
|
||||
"id": "test_ext",
|
||||
"name": "Test Extension",
|
||||
"version": "1.0.0",
|
||||
"license": license_type,
|
||||
"include_frontend": True,
|
||||
"include_backend": True,
|
||||
}
|
||||
|
||||
# Test extension.json template
|
||||
template = jinja_env.get_template("extension.json.j2")
|
||||
rendered = template.render(context)
|
||||
parsed = json.loads(rendered)
|
||||
|
||||
assert parsed["license"] == license_type
|
||||
|
||||
# Test package.json template
|
||||
template = jinja_env.get_template("frontend/package.json.j2")
|
||||
rendered = template.render(context)
|
||||
parsed = json.loads(rendered)
|
||||
|
||||
assert parsed["license"] == license_type
|
||||
|
||||
|
||||
# Template Validation Tests
|
||||
@pytest.mark.unit
|
||||
@pytest.mark.parametrize(
|
||||
"template_name", ["extension.json.j2", "frontend/package.json.j2"]
|
||||
)
|
||||
def test_templates_produce_valid_json(jinja_env, template_context, template_name):
|
||||
"""Test that all JSON templates produce valid JSON output."""
|
||||
template = jinja_env.get_template(template_name)
|
||||
rendered = template.render(template_context)
|
||||
|
||||
# This will raise an exception if the JSON is invalid
|
||||
try:
|
||||
json.loads(rendered)
|
||||
except json.JSONDecodeError as e:
|
||||
pytest.fail(f"Template {template_name} produced invalid JSON: {e}")
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_template_whitespace_handling(jinja_env, template_context):
|
||||
"""Test that templates handle whitespace correctly and produce clean output."""
|
||||
template = jinja_env.get_template("extension.json.j2")
|
||||
rendered = template.render(template_context)
|
||||
|
||||
# Should not have excessive empty lines
|
||||
lines = rendered.split("\n")
|
||||
empty_line_count = sum(1 for line in lines if line.strip() == "")
|
||||
|
||||
# Some empty lines are OK for formatting, but not excessive
|
||||
assert empty_line_count < len(lines) / 2, (
|
||||
"Too many empty lines in rendered template"
|
||||
)
|
||||
|
||||
# Should be properly formatted JSON
|
||||
parsed = json.loads(rendered)
|
||||
# Re-serialize to check it's valid structure
|
||||
json.dumps(parsed, indent=2)
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_template_context_edge_cases(jinja_env):
|
||||
"""Test template rendering with edge case contexts."""
|
||||
# Test with minimal context
|
||||
minimal_context = {
|
||||
"id": "minimal",
|
||||
"name": "Minimal",
|
||||
"version": "1.0.0",
|
||||
"license": "MIT",
|
||||
"include_frontend": False,
|
||||
"include_backend": False,
|
||||
}
|
||||
|
||||
template = jinja_env.get_template("extension.json.j2")
|
||||
rendered = template.render(minimal_context)
|
||||
parsed = json.loads(rendered)
|
||||
|
||||
# Should still be valid JSON with basic fields
|
||||
assert parsed["id"] == "minimal"
|
||||
assert parsed["name"] == "Minimal"
|
||||
assert "frontend" not in parsed
|
||||
assert "backend" not in parsed
|
||||
271
superset-extensions-cli/tests/test_utils.py
Normal file
271
superset-extensions-cli/tests/test_utils.py
Normal file
@@ -0,0 +1,271 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
|
||||
import pytest
|
||||
from superset_extensions_cli.utils import read_json, read_toml
|
||||
|
||||
|
||||
# Read JSON Tests
|
||||
@pytest.mark.unit
|
||||
def test_read_json_with_valid_file(isolated_filesystem):
|
||||
"""Test read_json with valid JSON file."""
|
||||
json_data = {"name": "test", "version": "1.0.0"}
|
||||
json_file = isolated_filesystem / "test.json"
|
||||
json_file.write_text(json.dumps(json_data))
|
||||
|
||||
result = read_json(json_file)
|
||||
|
||||
assert result == json_data
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_read_json_with_nonexistent_file(isolated_filesystem):
|
||||
"""Test read_json returns None when file doesn't exist."""
|
||||
nonexistent_file = isolated_filesystem / "nonexistent.json"
|
||||
|
||||
result = read_json(nonexistent_file)
|
||||
|
||||
assert result is None
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_read_json_with_invalid_json(isolated_filesystem):
|
||||
"""Test read_json with invalid JSON content."""
|
||||
invalid_json_file = isolated_filesystem / "invalid.json"
|
||||
invalid_json_file.write_text("{ invalid json content")
|
||||
|
||||
with pytest.raises(json.JSONDecodeError):
|
||||
read_json(invalid_json_file)
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_read_json_with_directory_instead_of_file(isolated_filesystem):
|
||||
"""Test read_json returns None when path is a directory."""
|
||||
directory = isolated_filesystem / "test_dir"
|
||||
directory.mkdir()
|
||||
|
||||
result = read_json(directory)
|
||||
|
||||
assert result is None
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@pytest.mark.parametrize(
|
||||
"json_content,expected",
|
||||
[
|
||||
({"simple": "value"}, {"simple": "value"}),
|
||||
({"nested": {"key": "value"}}, {"nested": {"key": "value"}}),
|
||||
({"array": [1, 2, 3]}, {"array": [1, 2, 3]}),
|
||||
({}, {}), # Empty JSON object
|
||||
],
|
||||
)
|
||||
def test_read_json_with_various_valid_content(
|
||||
isolated_filesystem, json_content, expected
|
||||
):
|
||||
"""Test read_json with various valid JSON content types."""
|
||||
json_file = isolated_filesystem / "test.json"
|
||||
json_file.write_text(json.dumps(json_content))
|
||||
|
||||
result = read_json(json_file)
|
||||
|
||||
assert result == expected
|
||||
|
||||
|
||||
# Read TOML Tests
|
||||
@pytest.mark.unit
|
||||
def test_read_toml_with_valid_file(isolated_filesystem):
|
||||
"""Test read_toml with valid TOML file."""
|
||||
toml_content = '[project]\nname = "test"\nversion = "1.0.0"'
|
||||
toml_file = isolated_filesystem / "pyproject.toml"
|
||||
toml_file.write_text(toml_content)
|
||||
|
||||
result = read_toml(toml_file)
|
||||
|
||||
assert result is not None
|
||||
assert result["project"]["name"] == "test"
|
||||
assert result["project"]["version"] == "1.0.0"
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_read_toml_with_nonexistent_file(isolated_filesystem):
|
||||
"""Test read_toml returns None when file doesn't exist."""
|
||||
nonexistent_file = isolated_filesystem / "nonexistent.toml"
|
||||
|
||||
result = read_toml(nonexistent_file)
|
||||
|
||||
assert result is None
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_read_toml_with_directory_instead_of_file(isolated_filesystem):
|
||||
"""Test read_toml returns None when path is a directory."""
|
||||
directory = isolated_filesystem / "test_dir"
|
||||
directory.mkdir()
|
||||
|
||||
result = read_toml(directory)
|
||||
|
||||
assert result is None
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_read_toml_with_invalid_toml(isolated_filesystem):
|
||||
"""Test read_toml with invalid TOML content."""
|
||||
invalid_toml_file = isolated_filesystem / "invalid.toml"
|
||||
invalid_toml_file.write_text("[ invalid toml content")
|
||||
|
||||
with pytest.raises(Exception): # tomli raises various exceptions for invalid TOML
|
||||
read_toml(invalid_toml_file)
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@pytest.mark.parametrize(
|
||||
"toml_content,expected_keys",
|
||||
[
|
||||
('[project]\nname = "test"', ["project"]),
|
||||
('[build-system]\nrequires = ["setuptools"]', ["build-system"]),
|
||||
(
|
||||
'[project]\nname = "test"\n[build-system]\nrequires = ["setuptools"]',
|
||||
["project", "build-system"],
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_read_toml_with_various_valid_content(
|
||||
isolated_filesystem, toml_content, expected_keys
|
||||
):
|
||||
"""Test read_toml with various valid TOML content types."""
|
||||
toml_file = isolated_filesystem / "test.toml"
|
||||
toml_file.write_text(toml_content)
|
||||
|
||||
result = read_toml(toml_file)
|
||||
|
||||
assert result is not None
|
||||
for key in expected_keys:
|
||||
assert key in result
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_read_toml_with_complex_structure(isolated_filesystem):
|
||||
"""Test read_toml with complex TOML structure."""
|
||||
complex_toml = """
|
||||
[project]
|
||||
name = "my-package"
|
||||
version = "1.0.0"
|
||||
authors = [
|
||||
{name = "Author Name", email = "author@example.com"}
|
||||
]
|
||||
|
||||
[project.dependencies]
|
||||
requests = "^2.25.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["setuptools", "wheel"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
"""
|
||||
toml_file = isolated_filesystem / "complex.toml"
|
||||
toml_file.write_text(complex_toml)
|
||||
|
||||
result = read_toml(toml_file)
|
||||
|
||||
assert result is not None
|
||||
assert result["project"]["name"] == "my-package"
|
||||
assert result["project"]["version"] == "1.0.0"
|
||||
assert len(result["project"]["authors"]) == 1
|
||||
assert result["project"]["authors"][0]["name"] == "Author Name"
|
||||
assert result["build-system"]["requires"] == ["setuptools", "wheel"]
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_read_toml_with_empty_file(isolated_filesystem):
|
||||
"""Test read_toml with empty TOML file."""
|
||||
toml_file = isolated_filesystem / "empty.toml"
|
||||
toml_file.write_text("")
|
||||
|
||||
result = read_toml(toml_file)
|
||||
|
||||
assert result == {}
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@pytest.mark.parametrize(
|
||||
"invalid_content",
|
||||
[
|
||||
"[ invalid section",
|
||||
"key = ",
|
||||
"key = unquoted string",
|
||||
"[section\nkey = value",
|
||||
],
|
||||
)
|
||||
def test_read_toml_with_various_invalid_content(isolated_filesystem, invalid_content):
|
||||
"""Test read_toml with various types of invalid TOML content."""
|
||||
toml_file = isolated_filesystem / "invalid.toml"
|
||||
toml_file.write_text(invalid_content)
|
||||
|
||||
with pytest.raises(Exception): # Various TOML parsing exceptions
|
||||
read_toml(toml_file)
|
||||
|
||||
|
||||
# File System Edge Cases
|
||||
@pytest.mark.unit
|
||||
def test_read_json_with_permission_denied(isolated_filesystem):
|
||||
"""Test read_json behavior when file permissions are denied."""
|
||||
json_file = isolated_filesystem / "restricted.json"
|
||||
json_file.write_text('{"test": "value"}')
|
||||
|
||||
# This test may not work on all systems, so we'll skip it if chmod doesn't work
|
||||
try:
|
||||
json_file.chmod(0o000) # No permissions
|
||||
result = read_json(json_file)
|
||||
# If we get here without exception, the file was still readable
|
||||
# This is system-dependent behavior
|
||||
assert result is None or result == {"test": "value"}
|
||||
except (OSError, PermissionError):
|
||||
# Expected on some systems
|
||||
pass
|
||||
finally:
|
||||
# Restore permissions for cleanup
|
||||
try:
|
||||
json_file.chmod(0o644)
|
||||
except (OSError, PermissionError):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_read_toml_with_permission_denied(isolated_filesystem):
|
||||
"""Test read_toml behavior when file permissions are denied."""
|
||||
toml_file = isolated_filesystem / "restricted.toml"
|
||||
toml_file.write_text('[test]\nkey = "value"')
|
||||
|
||||
# This test may not work on all systems, so we'll skip it if chmod doesn't work
|
||||
try:
|
||||
toml_file.chmod(0o000) # No permissions
|
||||
result = read_toml(toml_file)
|
||||
# If we get here without exception, the file was still readable
|
||||
# This is system-dependent behavior
|
||||
assert result is None or "test" in result
|
||||
except (OSError, PermissionError):
|
||||
# Expected on some systems
|
||||
pass
|
||||
finally:
|
||||
# Restore permissions for cleanup
|
||||
try:
|
||||
toml_file.chmod(0o644)
|
||||
except (OSError, PermissionError):
|
||||
pass
|
||||
211
superset-extensions-cli/tests/utils.py
Normal file
211
superset-extensions-cli/tests/utils.py
Normal file
@@ -0,0 +1,211 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
def assert_file_exists(path: Path, description: str = "") -> None:
|
||||
"""
|
||||
Assert that a file exists with a descriptive error message.
|
||||
|
||||
Args:
|
||||
path: Path to the file that should exist
|
||||
description: Optional description for better error messages
|
||||
"""
|
||||
desc_msg = f" ({description})" if description else ""
|
||||
assert path.exists(), f"Expected file {path}{desc_msg} to exist, but it doesn't"
|
||||
assert path.is_file(), f"Expected {path}{desc_msg} to be a file, but it's not"
|
||||
|
||||
|
||||
def assert_directory_exists(path: Path, description: str = "") -> None:
|
||||
"""
|
||||
Assert that a directory exists with a descriptive error message.
|
||||
|
||||
Args:
|
||||
path: Path to the directory that should exist
|
||||
description: Optional description for better error messages
|
||||
"""
|
||||
desc_msg = f" ({description})" if description else ""
|
||||
assert path.exists(), (
|
||||
f"Expected directory {path}{desc_msg} to exist, but it doesn't"
|
||||
)
|
||||
assert path.is_dir(), f"Expected {path}{desc_msg} to be a directory, but it's not"
|
||||
|
||||
|
||||
def assert_file_structure(base_path: Path, expected_files: list[str]) -> None:
|
||||
"""
|
||||
Assert that all expected files exist under the base path.
|
||||
|
||||
Args:
|
||||
base_path: Base directory path
|
||||
expected_files: List of relative file paths that should exist
|
||||
"""
|
||||
for file_path in expected_files:
|
||||
full_path = base_path / file_path
|
||||
assert_file_exists(full_path, "part of expected structure")
|
||||
|
||||
|
||||
def assert_directory_structure(base_path: Path, expected_dirs: list[str]) -> None:
|
||||
"""
|
||||
Assert that all expected directories exist under the base path.
|
||||
|
||||
Args:
|
||||
base_path: Base directory path
|
||||
expected_dirs: List of relative directory paths that should exist
|
||||
"""
|
||||
for dir_path in expected_dirs:
|
||||
full_path = base_path / dir_path
|
||||
assert_directory_exists(full_path, "part of expected structure")
|
||||
|
||||
|
||||
def get_directory_tree(path: Path, ignore: set[str] | None = None) -> set[str]:
|
||||
"""
|
||||
Get all files and directories under a path as relative string paths.
|
||||
|
||||
Args:
|
||||
path: Base path to scan
|
||||
ignore: Set of file/directory names to ignore
|
||||
|
||||
Returns:
|
||||
Set of relative path strings
|
||||
"""
|
||||
ignore = ignore or {".DS_Store", "__pycache__", ".pytest_cache"}
|
||||
tree: set[str] = set()
|
||||
|
||||
if not path.exists():
|
||||
return tree
|
||||
|
||||
for item in path.rglob("*"):
|
||||
if any(ignored in item.parts for ignored in ignore):
|
||||
continue
|
||||
relative = item.relative_to(path)
|
||||
tree.add(str(relative))
|
||||
|
||||
return tree
|
||||
|
||||
|
||||
def load_json_file(path: Path) -> dict[str, Any]:
|
||||
"""
|
||||
Load and parse a JSON file.
|
||||
|
||||
Args:
|
||||
path: Path to the JSON file
|
||||
|
||||
Returns:
|
||||
Parsed JSON content
|
||||
|
||||
Raises:
|
||||
AssertionError: If file doesn't exist or isn't valid JSON
|
||||
"""
|
||||
assert_file_exists(path, "JSON file")
|
||||
try:
|
||||
content = json.loads(path.read_text())
|
||||
return content
|
||||
except json.JSONDecodeError as e:
|
||||
raise AssertionError(f"File {path} contains invalid JSON: {e}")
|
||||
|
||||
|
||||
def assert_json_content(path: Path, expected_values: dict[str, Any]) -> None:
|
||||
"""
|
||||
Assert that a JSON file contains expected key-value pairs.
|
||||
|
||||
Args:
|
||||
path: Path to the JSON file
|
||||
expected_values: Dictionary of expected key-value pairs
|
||||
"""
|
||||
content = load_json_file(path)
|
||||
|
||||
for key, expected_value in expected_values.items():
|
||||
assert key in content, f"Expected key '{key}' not found in {path}"
|
||||
actual_value = content[key]
|
||||
assert actual_value == expected_value, (
|
||||
f"Expected {key}='{expected_value}' but got '{actual_value}' in {path}"
|
||||
)
|
||||
|
||||
|
||||
def assert_file_contains(path: Path, text: str) -> None:
|
||||
"""
|
||||
Assert that a file contains specific text.
|
||||
|
||||
Args:
|
||||
path: Path to the file
|
||||
text: Text that should be present in the file
|
||||
"""
|
||||
assert_file_exists(path, "text file")
|
||||
content = path.read_text()
|
||||
assert text in content, f"Expected text '{text}' not found in {path}"
|
||||
|
||||
|
||||
def assert_file_content_matches(path: Path, expected_content: str) -> None:
|
||||
"""
|
||||
Assert that a file's content exactly matches expected content.
|
||||
|
||||
Args:
|
||||
path: Path to the file
|
||||
expected_content: Expected file content
|
||||
"""
|
||||
assert_file_exists(path, "content file")
|
||||
actual_content = path.read_text()
|
||||
assert actual_content == expected_content, (
|
||||
f"File content mismatch in {path}\n"
|
||||
f"Expected:\n{expected_content}\n"
|
||||
f"Actual:\n{actual_content}"
|
||||
)
|
||||
|
||||
|
||||
def create_test_extension_structure(
|
||||
base_path: Path,
|
||||
id_: str,
|
||||
include_frontend: bool = True,
|
||||
include_backend: bool = True,
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
Helper to create expected extension structure for testing.
|
||||
|
||||
Args:
|
||||
base_path: Base path where extension should be created
|
||||
id_: Unique identifier for extension
|
||||
name: Extension name
|
||||
include_frontend: Whether frontend should be included
|
||||
include_backend: Whether backend should be included
|
||||
|
||||
Returns:
|
||||
Dictionary with expected paths and metadata
|
||||
"""
|
||||
extension_path = base_path / id_
|
||||
expected_files = ["extension.json"]
|
||||
expected_dirs: list[str] = []
|
||||
|
||||
if include_frontend:
|
||||
expected_dirs.append("frontend")
|
||||
expected_files.append("frontend/package.json")
|
||||
|
||||
if include_backend:
|
||||
expected_dirs.append("backend")
|
||||
expected_files.append("backend/pyproject.toml")
|
||||
|
||||
expected = {
|
||||
"extension_path": extension_path,
|
||||
"expected_files": expected_files,
|
||||
"expected_dirs": expected_dirs,
|
||||
}
|
||||
|
||||
return expected
|
||||
Reference in New Issue
Block a user