Files
superset2/tests/integration_tests/mcp_service/test_get_dashboard_info.py
Amin Ghadersohi a9d543b6f4 update: unify FastMCP server, modularize tools, and document new DAO-based architecture
- Replace dual Flask/FastAPI setup with a single, unified FastMCP server (`server.py`)
- Introduce `MCPDAOWrapper` for secure, context-aware DAO access (`dao_wrapper.py`)
- Refactor all MCP tools to be modular and domain-organized (`tools/dashboard/`, `tools/chart/`, `tools/dataset/`, `tools/system/`)
- Strongly type all tool contracts with Pydantic v2 models, including full field documentation for LLM/OpenAPI compatibility
- Refactor and extend `BaseDAO` for robust, generic CRUD/list operations
- Add and update documentation:
  - Architecture and flow diagrams (`README_ARCHITECTURE.md`)
  - Tool schema reference and usage instructions (`README.md`, `README_SCHEMAS.md`)
  - Phase 1 status and roadmap (`README_PHASE1_STATUS.md`)
- Implement and test all core list/info tools for dashboards, datasets, and charts, with full search and filter support
- Add chart creation tool (`create_chart_simple`)
- Provide extension points for Preset-specific auth, RBAC, and logging (stubbed in Phase 1)
- Prepare for LLM/agent workflows and future command-based mutations (create/update/delete)
- Expand and update unit/integration test coverage for all tools
2025-07-30 14:20:37 -04:00

58 lines
3.1 KiB
Python

import logging
import sys
import traceback
import json
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
async def main():
from fastmcp import Client
logger.info("Starting get_dashboard_info integration test for dashboard IDs 1 through 10")
issues = [] # Collect (dashboard_id, message) for any warnings/errors
async with Client("http://localhost:5008/mcp") as client:
for dashboard_id in range(1, 11):
logger.info(f"\n---\nCalling get_dashboard_info with dashboard_id={dashboard_id}")
try:
logger.info(f"Sending request: {{'dashboard_id': {dashboard_id}}}")
result = await client.call_tool("get_dashboard_info", {"dashboard_id": dashboard_id})
logger.info(f"Raw result object: {result}")
logger.info(f"Result type: {type(result.data)}")
logger.info(f"get_dashboard_info output for id={dashboard_id} (repr): {repr(result.data)}")
# Pretty-print output
if hasattr(result.data, "model_dump"):
as_dict = result.data.model_dump()
logger.info(f"get_dashboard_info output for id={dashboard_id} (dict): {as_dict}")
pretty = json.dumps(as_dict, indent=2, default=str)
logger.info(f"get_dashboard_info output for id={dashboard_id} (pretty):\n{pretty}")
# Detect error/warning fields
if as_dict.get('error') or as_dict.get('error_type'):
issues.append((dashboard_id, f"Error: {as_dict.get('error')} | Type: {as_dict.get('error_type')}"))
elif isinstance(result.data, dict):
logger.info(f"get_dashboard_info output for id={dashboard_id} (dict): {result.data}")
pretty = json.dumps(result.data, indent=2, default=str)
logger.info(f"get_dashboard_info output for id={dashboard_id} (pretty):\n{pretty}")
if result.data.get('error') or result.data.get('error_type'):
issues.append((dashboard_id, f"Error: {result.data.get('error')} | Type: {result.data.get('error_type')}"))
else:
msg = f"Output for id={dashboard_id} is not a dict or Pydantic model. Type: {type(result.data)}. Value: {result.data}"
logger.warning(msg)
issues.append((dashboard_id, msg))
except Exception as e:
msg = f"Exception calling get_dashboard_info with id={dashboard_id}: {e}"
logger.error(msg)
logger.error(traceback.format_exc())
issues.append((dashboard_id, msg))
# Summary
logger.info("\n=== SUMMARY ===")
if issues:
logger.warning(f"Found issues with the following dashboards:")
for dashboard_id, msg in issues:
logger.warning(f" Dashboard {dashboard_id}: {msg}")
else:
logger.info("All dashboards 1-10 returned successfully with no errors or warnings.")
if __name__ == "__main__":
import asyncio
asyncio.run(main())