Initial commit: Clean DSS implementation
Migrated from design-system-swarm with fresh git history.
Old project history preserved in /home/overbits/apps/design-system-swarm
Core components:
- MCP Server (Python FastAPI with mcp 1.23.1)
- Claude Plugin (agents, commands, skills, strategies, hooks, core)
- DSS Backend (dss-mvp1 - token translation, Figma sync)
- Admin UI (Node.js/React)
- Server (Node.js/Express)
- Storybook integration (dss-mvp1/.storybook)
Self-contained configuration:
- All paths relative or use DSS_BASE_PATH=/home/overbits/dss
- PYTHONPATH configured for dss-mvp1 and dss-claude-plugin
- .env file with all configuration
- Claude plugin uses ${CLAUDE_PLUGIN_ROOT} for portability
Migration completed: $(date)
🤖 Clean migration with full functionality preserved
This commit is contained in:
1
tools/dss_mcp/tests/__init__.py
Normal file
1
tools/dss_mcp/tests/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# DSS MCP Tests
|
||||
654
tools/dss_mcp/tests/test_dss_mcp_commands.py
Normal file
654
tools/dss_mcp/tests/test_dss_mcp_commands.py
Normal file
@@ -0,0 +1,654 @@
|
||||
"""
|
||||
Comprehensive Test Suite for DSS MCP Commands
|
||||
|
||||
Tests all 35 DSS MCP tools across 4 categories:
|
||||
- DSS Core (10 tools)
|
||||
- DevTools (12 tools)
|
||||
- Browser Automation (8 tools)
|
||||
- Context Compiler (5 tools)
|
||||
|
||||
Tests validate:
|
||||
- Tool definitions and schemas
|
||||
- Required parameters
|
||||
- Implementation presence
|
||||
- Security measures
|
||||
- Error handling patterns
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
# =============================================================================
|
||||
# TEST CONFIGURATION
|
||||
# =============================================================================
|
||||
|
||||
MCP_SERVER_PATH = Path("/home/overbits/dss/dss-claude-plugin/servers/dss-mcp-server.py")
|
||||
|
||||
# Complete tool registry - all 35 MCP tools
|
||||
DSS_CORE_TOOLS = {
|
||||
"dss_analyze_project": {
|
||||
"required": ["path"],
|
||||
"optional": [],
|
||||
"impl_func": "analyze_project"
|
||||
},
|
||||
"dss_extract_tokens": {
|
||||
"required": ["path"],
|
||||
"optional": ["sources"],
|
||||
"impl_func": "extract_tokens"
|
||||
},
|
||||
"dss_generate_theme": {
|
||||
"required": ["format"],
|
||||
"optional": ["tokens", "theme_name"],
|
||||
"impl_func": "generate_theme"
|
||||
},
|
||||
"dss_list_themes": {
|
||||
"required": [],
|
||||
"optional": [],
|
||||
"impl_func": "list_themes"
|
||||
},
|
||||
"dss_get_status": {
|
||||
"required": [],
|
||||
"optional": ["format"],
|
||||
"impl_func": "get_status"
|
||||
},
|
||||
"dss_audit_components": {
|
||||
"required": ["path"],
|
||||
"optional": [],
|
||||
"impl_func": "audit_components"
|
||||
},
|
||||
"dss_setup_storybook": {
|
||||
"required": ["path"],
|
||||
"optional": ["action"],
|
||||
"impl_func": "setup_storybook"
|
||||
},
|
||||
"dss_sync_figma": {
|
||||
"required": ["file_key"],
|
||||
"optional": [],
|
||||
"impl_func": "sync_figma"
|
||||
},
|
||||
"dss_find_quick_wins": {
|
||||
"required": ["path"],
|
||||
"optional": [],
|
||||
"impl_func": "find_quick_wins"
|
||||
},
|
||||
"dss_transform_tokens": {
|
||||
"required": ["tokens", "output_format"],
|
||||
"optional": ["input_format"],
|
||||
"impl_func": "transform_tokens"
|
||||
},
|
||||
}
|
||||
|
||||
DEVTOOLS_TOOLS = {
|
||||
"devtools_launch": {
|
||||
"required": [],
|
||||
"optional": ["url", "headless"],
|
||||
"impl_func": "devtools_launch_impl"
|
||||
},
|
||||
"devtools_connect": {
|
||||
"required": [],
|
||||
"optional": ["port", "host"],
|
||||
"impl_func": "devtools_connect_impl"
|
||||
},
|
||||
"devtools_disconnect": {
|
||||
"required": [],
|
||||
"optional": [],
|
||||
"impl_func": "devtools_disconnect_impl"
|
||||
},
|
||||
"devtools_list_pages": {
|
||||
"required": [],
|
||||
"optional": [],
|
||||
"impl_func": "devtools_list_pages_impl"
|
||||
},
|
||||
"devtools_select_page": {
|
||||
"required": ["page_id"],
|
||||
"optional": [],
|
||||
"impl_func": "devtools_select_page_impl"
|
||||
},
|
||||
"devtools_console_logs": {
|
||||
"required": [],
|
||||
"optional": ["level", "limit", "clear"],
|
||||
"impl_func": "devtools_console_logs_impl"
|
||||
},
|
||||
"devtools_network_requests": {
|
||||
"required": [],
|
||||
"optional": ["filter_url", "limit"],
|
||||
"impl_func": "devtools_network_requests_impl"
|
||||
},
|
||||
"devtools_evaluate": {
|
||||
"required": ["expression"],
|
||||
"optional": [],
|
||||
"impl_func": "devtools_evaluate_impl"
|
||||
},
|
||||
"devtools_query_dom": {
|
||||
"required": ["selector"],
|
||||
"optional": [],
|
||||
"impl_func": "devtools_query_dom_impl"
|
||||
},
|
||||
"devtools_goto": {
|
||||
"required": ["url"],
|
||||
"optional": ["wait_until"],
|
||||
"impl_func": "devtools_goto_impl"
|
||||
},
|
||||
"devtools_screenshot": {
|
||||
"required": [],
|
||||
"optional": ["selector", "full_page"],
|
||||
"impl_func": "devtools_screenshot_impl"
|
||||
},
|
||||
"devtools_performance": {
|
||||
"required": [],
|
||||
"optional": [],
|
||||
"impl_func": "devtools_performance_impl"
|
||||
},
|
||||
}
|
||||
|
||||
BROWSER_TOOLS = {
|
||||
"browser_init": {
|
||||
"required": [],
|
||||
"optional": ["mode", "url", "session_id", "headless"],
|
||||
"impl_func": "browser_init_impl"
|
||||
},
|
||||
"browser_get_logs": {
|
||||
"required": [],
|
||||
"optional": ["level", "limit"],
|
||||
"impl_func": "browser_get_logs_impl"
|
||||
},
|
||||
"browser_screenshot": {
|
||||
"required": [],
|
||||
"optional": ["selector", "full_page"],
|
||||
"impl_func": "browser_screenshot_impl"
|
||||
},
|
||||
"browser_dom_snapshot": {
|
||||
"required": [],
|
||||
"optional": [],
|
||||
"impl_func": "browser_dom_snapshot_impl"
|
||||
},
|
||||
"browser_get_errors": {
|
||||
"required": [],
|
||||
"optional": ["limit"],
|
||||
"impl_func": "browser_get_errors_impl"
|
||||
},
|
||||
"browser_accessibility_audit": {
|
||||
"required": [],
|
||||
"optional": ["selector"],
|
||||
"impl_func": "browser_accessibility_audit_impl"
|
||||
},
|
||||
"browser_performance": {
|
||||
"required": [],
|
||||
"optional": [],
|
||||
"impl_func": "browser_performance_impl"
|
||||
},
|
||||
"browser_close": {
|
||||
"required": [],
|
||||
"optional": [],
|
||||
"impl_func": "browser_close_impl"
|
||||
},
|
||||
}
|
||||
|
||||
CONTEXT_COMPILER_TOOLS = {
|
||||
"dss_get_resolved_context": {
|
||||
"required": ["manifest_path"],
|
||||
"optional": ["debug", "force_refresh"],
|
||||
"impl_func": None # Handled inline in dispatcher
|
||||
},
|
||||
"dss_resolve_token": {
|
||||
"required": ["manifest_path", "token_path"],
|
||||
"optional": ["force_refresh"],
|
||||
"impl_func": None
|
||||
},
|
||||
"dss_validate_manifest": {
|
||||
"required": ["manifest_path"],
|
||||
"optional": [],
|
||||
"impl_func": None
|
||||
},
|
||||
"dss_list_skins": {
|
||||
"required": [],
|
||||
"optional": [],
|
||||
"impl_func": None
|
||||
},
|
||||
"dss_get_compiler_status": {
|
||||
"required": [],
|
||||
"optional": [],
|
||||
"impl_func": None
|
||||
},
|
||||
}
|
||||
|
||||
ALL_TOOLS = {
|
||||
**DSS_CORE_TOOLS,
|
||||
**DEVTOOLS_TOOLS,
|
||||
**BROWSER_TOOLS,
|
||||
**CONTEXT_COMPILER_TOOLS,
|
||||
}
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# FIXTURES
|
||||
# =============================================================================
|
||||
|
||||
@pytest.fixture
|
||||
def mcp_server_content():
|
||||
"""Load MCP server source code."""
|
||||
return MCP_SERVER_PATH.read_text()
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# TEST CLASS: Tool Definitions
|
||||
# =============================================================================
|
||||
|
||||
class TestToolDefinitions:
|
||||
"""Verify all 35 tools are properly defined in the MCP server."""
|
||||
|
||||
def test_total_tool_count(self, mcp_server_content):
|
||||
"""Verify we have exactly 35 tools defined."""
|
||||
# Count Tool( occurrences
|
||||
tool_definitions = re.findall(r'Tool\(\s*name="([^"]+)"', mcp_server_content)
|
||||
assert len(tool_definitions) == 35, f"Expected 35 tools, found {len(tool_definitions)}"
|
||||
|
||||
@pytest.mark.parametrize("tool_name", DSS_CORE_TOOLS.keys())
|
||||
def test_dss_core_tool_defined(self, mcp_server_content, tool_name):
|
||||
"""Verify each DSS core tool is defined."""
|
||||
assert f'name="{tool_name}"' in mcp_server_content, f"Tool {tool_name} not found"
|
||||
|
||||
@pytest.mark.parametrize("tool_name", DEVTOOLS_TOOLS.keys())
|
||||
def test_devtools_tool_defined(self, mcp_server_content, tool_name):
|
||||
"""Verify each DevTools tool is defined."""
|
||||
assert f'name="{tool_name}"' in mcp_server_content, f"Tool {tool_name} not found"
|
||||
|
||||
@pytest.mark.parametrize("tool_name", BROWSER_TOOLS.keys())
|
||||
def test_browser_tool_defined(self, mcp_server_content, tool_name):
|
||||
"""Verify each Browser automation tool is defined."""
|
||||
assert f'name="{tool_name}"' in mcp_server_content, f"Tool {tool_name} not found"
|
||||
|
||||
@pytest.mark.parametrize("tool_name", CONTEXT_COMPILER_TOOLS.keys())
|
||||
def test_context_compiler_tool_defined(self, mcp_server_content, tool_name):
|
||||
"""Verify each Context Compiler tool is defined."""
|
||||
assert f'name="{tool_name}"' in mcp_server_content, f"Tool {tool_name} not found"
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# TEST CLASS: Tool Dispatcher
|
||||
# =============================================================================
|
||||
|
||||
class TestToolDispatcher:
|
||||
"""Verify tool dispatcher handles all tools."""
|
||||
|
||||
@pytest.mark.parametrize("tool_name", ALL_TOOLS.keys())
|
||||
def test_tool_in_dispatcher(self, mcp_server_content, tool_name):
|
||||
"""Verify each tool has a dispatcher case."""
|
||||
# Check for: elif name == "tool_name" or if name == "tool_name"
|
||||
pattern = rf'(if|elif)\s+name\s*==\s*"{tool_name}"'
|
||||
assert re.search(pattern, mcp_server_content), f"Tool {tool_name} not in dispatcher"
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# TEST CLASS: Implementation Functions
|
||||
# =============================================================================
|
||||
|
||||
class TestImplementationFunctions:
|
||||
"""Verify implementation functions exist."""
|
||||
|
||||
@pytest.mark.parametrize("tool_name,config", [
|
||||
(k, v) for k, v in DSS_CORE_TOOLS.items() if v["impl_func"]
|
||||
])
|
||||
def test_dss_core_impl_exists(self, mcp_server_content, tool_name, config):
|
||||
"""Verify DSS core tool implementations exist."""
|
||||
impl_func = config["impl_func"]
|
||||
pattern = rf'async def {impl_func}\('
|
||||
assert re.search(pattern, mcp_server_content), f"Implementation {impl_func} not found for {tool_name}"
|
||||
|
||||
@pytest.mark.parametrize("tool_name,config", [
|
||||
(k, v) for k, v in DEVTOOLS_TOOLS.items() if v["impl_func"]
|
||||
])
|
||||
def test_devtools_impl_exists(self, mcp_server_content, tool_name, config):
|
||||
"""Verify DevTools implementations exist."""
|
||||
impl_func = config["impl_func"]
|
||||
pattern = rf'async def {impl_func}\('
|
||||
assert re.search(pattern, mcp_server_content), f"Implementation {impl_func} not found for {tool_name}"
|
||||
|
||||
@pytest.mark.parametrize("tool_name,config", [
|
||||
(k, v) for k, v in BROWSER_TOOLS.items() if v["impl_func"]
|
||||
])
|
||||
def test_browser_impl_exists(self, mcp_server_content, tool_name, config):
|
||||
"""Verify Browser tool implementations exist."""
|
||||
impl_func = config["impl_func"]
|
||||
pattern = rf'async def {impl_func}\('
|
||||
assert re.search(pattern, mcp_server_content), f"Implementation {impl_func} not found for {tool_name}"
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# TEST CLASS: Input Schemas
|
||||
# =============================================================================
|
||||
|
||||
class TestInputSchemas:
|
||||
"""Verify input schemas are properly defined."""
|
||||
|
||||
def test_all_tools_have_input_schema(self, mcp_server_content):
|
||||
"""Verify all tools have inputSchema defined."""
|
||||
tool_definitions = re.findall(r'Tool\(\s*name="([^"]+)"', mcp_server_content)
|
||||
for tool in tool_definitions:
|
||||
# Find Tool definition and check for inputSchema
|
||||
pattern = rf'name="{tool}".*?inputSchema'
|
||||
assert re.search(pattern, mcp_server_content, re.DOTALL), f"Tool {tool} missing inputSchema"
|
||||
|
||||
@pytest.mark.parametrize("tool_name,config", list(ALL_TOOLS.items()))
|
||||
def test_required_params_in_schema(self, mcp_server_content, tool_name, config):
|
||||
"""Verify required parameters are marked in schema."""
|
||||
if not config["required"]:
|
||||
return # Skip tools with no required params
|
||||
|
||||
# Find the tool's schema section
|
||||
tool_pattern = rf'name="{tool_name}".*?inputSchema=\{{(.*?)\}}\s*\)'
|
||||
match = re.search(tool_pattern, mcp_server_content, re.DOTALL)
|
||||
if match:
|
||||
schema_content = match.group(1)
|
||||
# Check for "required": [...] with our params
|
||||
for param in config["required"]:
|
||||
# The param should appear in the required array or properties
|
||||
assert param in schema_content, f"Required param '{param}' not in schema for {tool_name}"
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# TEST CLASS: Security Measures
|
||||
# =============================================================================
|
||||
|
||||
class TestSecurityMeasures:
|
||||
"""Verify security measures are in place."""
|
||||
|
||||
def test_audit_logging_for_evaluate(self, mcp_server_content):
|
||||
"""Verify devtools_evaluate has audit logging."""
|
||||
# Check for AUDIT log in devtools_evaluate_impl
|
||||
pattern = r'def devtools_evaluate_impl.*?\[AUDIT\]'
|
||||
assert re.search(pattern, mcp_server_content, re.DOTALL), "devtools_evaluate missing audit logging"
|
||||
|
||||
def test_playwright_availability_check(self, mcp_server_content):
|
||||
"""Verify Playwright availability is checked before DevTools operations."""
|
||||
assert "PLAYWRIGHT_AVAILABLE" in mcp_server_content, "Missing Playwright availability check"
|
||||
assert 'not PLAYWRIGHT_AVAILABLE and name.startswith("devtools_")' in mcp_server_content
|
||||
|
||||
def test_dss_availability_check(self, mcp_server_content):
|
||||
"""Verify DSS availability is checked before DSS operations."""
|
||||
assert "DSS_AVAILABLE" in mcp_server_content, "Missing DSS availability check"
|
||||
assert 'not DSS_AVAILABLE and name.startswith("dss_")' in mcp_server_content
|
||||
|
||||
def test_context_compiler_availability_check(self, mcp_server_content):
|
||||
"""Verify Context Compiler availability is checked."""
|
||||
assert "CONTEXT_COMPILER_AVAILABLE" in mcp_server_content, "Missing Context Compiler availability check"
|
||||
|
||||
def test_figma_token_validation(self, mcp_server_content):
|
||||
"""Verify Figma sync checks for API token."""
|
||||
assert 'FIGMA_TOKEN' in mcp_server_content, "Missing Figma token check"
|
||||
# Should return error if token not configured
|
||||
assert 'FIGMA_TOKEN not configured' in mcp_server_content
|
||||
|
||||
def test_path_validation(self, mcp_server_content):
|
||||
"""Verify path validation is performed."""
|
||||
# Check that Path.resolve() is used for path inputs
|
||||
assert "Path(path).resolve()" in mcp_server_content, "Missing path resolution"
|
||||
# Check for existence validation
|
||||
assert "not project_path.exists()" in mcp_server_content or "not target_path.exists()" in mcp_server_content
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# TEST CLASS: Async/Timeout Handling
|
||||
# =============================================================================
|
||||
|
||||
class TestAsyncHandling:
|
||||
"""Verify async operations are properly handled."""
|
||||
|
||||
def test_timeout_decorator_exists(self, mcp_server_content):
|
||||
"""Verify timeout decorator is defined."""
|
||||
assert "def with_timeout" in mcp_server_content, "Missing timeout decorator"
|
||||
|
||||
def test_timeout_config_exists(self, mcp_server_content):
|
||||
"""Verify timeout configuration is defined."""
|
||||
assert "TIMEOUT_CONFIG" in mcp_server_content, "Missing timeout configuration"
|
||||
# Check for expected timeout keys
|
||||
expected_keys = ["analyze", "extract", "generate", "figma_api", "storybook", "devtools_connect"]
|
||||
for key in expected_keys:
|
||||
assert f'"{key}"' in mcp_server_content, f"Missing timeout key: {key}"
|
||||
|
||||
def test_devtools_timeout_applied(self, mcp_server_content):
|
||||
"""Verify DevTools operations have timeouts."""
|
||||
# Check for @with_timeout decorator on critical functions
|
||||
assert '@with_timeout("devtools_connect")' in mcp_server_content
|
||||
|
||||
def test_run_in_executor_usage(self, mcp_server_content):
|
||||
"""Verify blocking operations use run_in_executor."""
|
||||
assert "loop.run_in_executor" in mcp_server_content, "Missing run_in_executor for blocking operations"
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# TEST CLASS: State Management
|
||||
# =============================================================================
|
||||
|
||||
class TestStateManagement:
|
||||
"""Verify state management classes are properly defined."""
|
||||
|
||||
def test_devtools_state_class(self, mcp_server_content):
|
||||
"""Verify DevToolsState dataclass is defined."""
|
||||
assert "class DevToolsState:" in mcp_server_content
|
||||
assert "@dataclass" in mcp_server_content
|
||||
|
||||
def test_browser_automation_state_class(self, mcp_server_content):
|
||||
"""Verify BrowserAutomationState dataclass is defined."""
|
||||
assert "class BrowserAutomationState:" in mcp_server_content
|
||||
|
||||
def test_devtools_state_instance(self, mcp_server_content):
|
||||
"""Verify DevTools state instance is created."""
|
||||
assert "devtools = DevToolsState()" in mcp_server_content
|
||||
|
||||
def test_browser_state_instance(self, mcp_server_content):
|
||||
"""Verify Browser state instance is created."""
|
||||
assert "browser_state = BrowserAutomationState()" in mcp_server_content
|
||||
|
||||
def test_bounded_buffers(self, mcp_server_content):
|
||||
"""Verify bounded deques are used for log capture."""
|
||||
assert "deque(maxlen=" in mcp_server_content, "Missing bounded deque for log capture"
|
||||
assert "DEVTOOLS_CONSOLE_MAX_ENTRIES" in mcp_server_content
|
||||
assert "DEVTOOLS_NETWORK_MAX_ENTRIES" in mcp_server_content
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# TEST CLASS: Error Handling
|
||||
# =============================================================================
|
||||
|
||||
class TestErrorHandling:
|
||||
"""Verify error handling patterns."""
|
||||
|
||||
def test_try_except_in_dispatcher(self, mcp_server_content):
|
||||
"""Verify dispatcher has error handling."""
|
||||
assert "except Exception as e:" in mcp_server_content
|
||||
assert '"error":' in mcp_server_content or "'error':" in mcp_server_content
|
||||
|
||||
def test_safe_serialize_function(self, mcp_server_content):
|
||||
"""Verify safe_serialize function exists for JSON serialization."""
|
||||
assert "def safe_serialize" in mcp_server_content
|
||||
|
||||
def test_import_error_handling(self, mcp_server_content):
|
||||
"""Verify import errors are captured."""
|
||||
assert "except ImportError" in mcp_server_content
|
||||
assert "DSS_IMPORT_ERROR" in mcp_server_content
|
||||
assert "CONTEXT_COMPILER_IMPORT_ERROR" in mcp_server_content
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# TEST CLASS: Browser Automation Modes
|
||||
# =============================================================================
|
||||
|
||||
class TestBrowserAutomationModes:
|
||||
"""Verify Browser automation supports LOCAL and REMOTE modes."""
|
||||
|
||||
def test_local_mode_support(self, mcp_server_content):
|
||||
"""Verify LOCAL mode is supported."""
|
||||
assert 'mode == "local"' in mcp_server_content
|
||||
assert "LocalBrowserStrategy" in mcp_server_content
|
||||
|
||||
def test_remote_mode_support(self, mcp_server_content):
|
||||
"""Verify REMOTE mode is supported."""
|
||||
assert 'mode == "remote"' in mcp_server_content
|
||||
assert "remote_api_url" in mcp_server_content
|
||||
assert "session_id" in mcp_server_content
|
||||
|
||||
def test_aiohttp_for_remote(self, mcp_server_content):
|
||||
"""Verify aiohttp is used for remote API calls."""
|
||||
assert "import aiohttp" in mcp_server_content
|
||||
assert "aiohttp.ClientSession()" in mcp_server_content
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# TEST CLASS: Server Configuration
|
||||
# =============================================================================
|
||||
|
||||
class TestServerConfiguration:
|
||||
"""Verify server is properly configured."""
|
||||
|
||||
def test_mcp_server_created(self, mcp_server_content):
|
||||
"""Verify MCP server instance is created."""
|
||||
assert 'server = Server("dss-server")' in mcp_server_content
|
||||
|
||||
def test_list_tools_decorator(self, mcp_server_content):
|
||||
"""Verify list_tools is registered."""
|
||||
assert "@server.list_tools()" in mcp_server_content
|
||||
|
||||
def test_call_tool_decorator(self, mcp_server_content):
|
||||
"""Verify call_tool is registered."""
|
||||
assert "@server.call_tool()" in mcp_server_content
|
||||
|
||||
def test_main_function(self, mcp_server_content):
|
||||
"""Verify main function exists."""
|
||||
assert "async def main():" in mcp_server_content
|
||||
assert 'if __name__ == "__main__":' in mcp_server_content
|
||||
|
||||
def test_stdio_server_usage(self, mcp_server_content):
|
||||
"""Verify stdio_server is used for transport."""
|
||||
assert "stdio_server" in mcp_server_content
|
||||
assert "async with stdio_server()" in mcp_server_content
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# TEST CLASS: Cleanup Handling
|
||||
# =============================================================================
|
||||
|
||||
class TestCleanupHandling:
|
||||
"""Verify cleanup is properly handled."""
|
||||
|
||||
def test_disconnect_cleanup(self, mcp_server_content):
|
||||
"""Verify DevTools disconnect cleans up properly."""
|
||||
# Should reset state
|
||||
assert "devtools = DevToolsState()" in mcp_server_content
|
||||
# Should remove event listeners
|
||||
assert "remove_listener" in mcp_server_content
|
||||
|
||||
def test_browser_close_cleanup(self, mcp_server_content):
|
||||
"""Verify browser close cleans up properly."""
|
||||
assert "browser_state = BrowserAutomationState()" in mcp_server_content
|
||||
|
||||
def test_main_finally_cleanup(self, mcp_server_content):
|
||||
"""Verify main function has cleanup in finally block."""
|
||||
# Check for cleanup on server shutdown
|
||||
assert "finally:" in mcp_server_content
|
||||
assert "devtools_disconnect_impl()" in mcp_server_content
|
||||
assert "browser_close_impl()" in mcp_server_content
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# TEST CLASS: Category Counts
|
||||
# =============================================================================
|
||||
|
||||
class TestCategoryCounts:
|
||||
"""Verify tool counts per category."""
|
||||
|
||||
def test_dss_core_count(self):
|
||||
"""Verify DSS core has 10 tools."""
|
||||
assert len(DSS_CORE_TOOLS) == 10, f"Expected 10 DSS core tools, got {len(DSS_CORE_TOOLS)}"
|
||||
|
||||
def test_devtools_count(self):
|
||||
"""Verify DevTools has 12 tools."""
|
||||
assert len(DEVTOOLS_TOOLS) == 12, f"Expected 12 DevTools tools, got {len(DEVTOOLS_TOOLS)}"
|
||||
|
||||
def test_browser_count(self):
|
||||
"""Verify Browser automation has 8 tools."""
|
||||
assert len(BROWSER_TOOLS) == 8, f"Expected 8 Browser tools, got {len(BROWSER_TOOLS)}"
|
||||
|
||||
def test_context_compiler_count(self):
|
||||
"""Verify Context Compiler has 5 tools."""
|
||||
assert len(CONTEXT_COMPILER_TOOLS) == 5, f"Expected 5 Context Compiler tools, got {len(CONTEXT_COMPILER_TOOLS)}"
|
||||
|
||||
def test_total_count(self):
|
||||
"""Verify total is 35 tools."""
|
||||
total = len(DSS_CORE_TOOLS) + len(DEVTOOLS_TOOLS) + len(BROWSER_TOOLS) + len(CONTEXT_COMPILER_TOOLS)
|
||||
assert total == 35, f"Expected 35 total tools, got {total}"
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# TEST CLASS: DSS Core Functionality
|
||||
# =============================================================================
|
||||
|
||||
class TestDSSCoreFunctionality:
|
||||
"""Test DSS core tool specific requirements."""
|
||||
|
||||
def test_project_scanner_usage(self, mcp_server_content):
|
||||
"""Verify ProjectScanner is used for analysis."""
|
||||
assert "ProjectScanner" in mcp_server_content
|
||||
|
||||
def test_react_analyzer_usage(self, mcp_server_content):
|
||||
"""Verify ReactAnalyzer is used for component analysis."""
|
||||
assert "ReactAnalyzer" in mcp_server_content
|
||||
|
||||
def test_style_analyzer_usage(self, mcp_server_content):
|
||||
"""Verify StyleAnalyzer is used for style analysis."""
|
||||
assert "StyleAnalyzer" in mcp_server_content
|
||||
|
||||
def test_token_sources(self, mcp_server_content):
|
||||
"""Verify all token sources are available."""
|
||||
sources = ["CSSTokenSource", "SCSSTokenSource", "TailwindTokenSource", "JSONTokenSource"]
|
||||
for source in sources:
|
||||
assert source in mcp_server_content, f"Missing token source: {source}"
|
||||
|
||||
def test_token_merger_usage(self, mcp_server_content):
|
||||
"""Verify TokenMerger is used for combining tokens."""
|
||||
assert "TokenMerger" in mcp_server_content
|
||||
assert "MergeStrategy" in mcp_server_content
|
||||
|
||||
def test_storybook_support(self, mcp_server_content):
|
||||
"""Verify Storybook classes are used."""
|
||||
classes = ["StorybookScanner", "StoryGenerator", "ThemeGenerator"]
|
||||
for cls in classes:
|
||||
assert cls in mcp_server_content, f"Missing Storybook class: {cls}"
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# TEST CLASS: DevTools Functionality
|
||||
# =============================================================================
|
||||
|
||||
class TestDevToolsFunctionality:
|
||||
"""Test DevTools-specific requirements."""
|
||||
|
||||
def test_console_handler(self, mcp_server_content):
|
||||
"""Verify console message handler exists."""
|
||||
assert "async def _on_console" in mcp_server_content
|
||||
|
||||
def test_request_handler(self, mcp_server_content):
|
||||
"""Verify network request handler exists."""
|
||||
assert "async def _on_request" in mcp_server_content
|
||||
|
||||
def test_get_active_page_helper(self, mcp_server_content):
|
||||
"""Verify _get_active_page helper exists."""
|
||||
assert "def _get_active_page" in mcp_server_content
|
||||
|
||||
def test_cdp_connection(self, mcp_server_content):
|
||||
"""Verify CDP connection method is used."""
|
||||
assert "connect_over_cdp" in mcp_server_content
|
||||
|
||||
def test_playwright_launch(self, mcp_server_content):
|
||||
"""Verify Playwright launch for headless mode."""
|
||||
assert "chromium.launch" in mcp_server_content
|
||||
assert "--no-sandbox" in mcp_server_content # Required for Docker
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# RUN TESTS
|
||||
# =============================================================================
|
||||
|
||||
if __name__ == "__main__":
|
||||
pytest.main([__file__, "-v", "--tb=short"])
|
||||
506
tools/dss_mcp/tests/test_mcp_integration.py
Normal file
506
tools/dss_mcp/tests/test_mcp_integration.py
Normal file
@@ -0,0 +1,506 @@
|
||||
"""
|
||||
DSS MCP Plugin - Comprehensive Integration Tests
|
||||
|
||||
Tests all 17 MCP tools (5 Storybook + 12 Translation) across 4 layers:
|
||||
- Layer 1: Import Tests
|
||||
- Layer 2: Schema Validation Tests
|
||||
- Layer 3: Unit Tests
|
||||
- Layer 4: Security Tests
|
||||
|
||||
Run with: pytest test_mcp_integration.py -v
|
||||
Or directly: python3 test_mcp_integration.py
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import asyncio
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Add project root and tools to path
|
||||
PROJECT_ROOT = Path(__file__).parent.parent.parent.parent
|
||||
TOOLS_ROOT = Path(__file__).parent.parent.parent
|
||||
sys.path.insert(0, str(PROJECT_ROOT))
|
||||
sys.path.insert(0, str(PROJECT_ROOT / "dss-mvp1"))
|
||||
sys.path.insert(0, str(TOOLS_ROOT))
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# LAYER 1: IMPORT TESTS (Isolated - no storage dependency)
|
||||
# =============================================================================
|
||||
|
||||
class TestImportsIsolated:
|
||||
"""Test imports that don't depend on storage module."""
|
||||
|
||||
def test_import_dss_translations_core(self):
|
||||
"""Test DSS translations core modules import."""
|
||||
from dss.translations import (
|
||||
TranslationDictionary,
|
||||
TranslationDictionaryLoader,
|
||||
TranslationDictionaryWriter,
|
||||
TokenResolver,
|
||||
ThemeMerger
|
||||
)
|
||||
assert TranslationDictionary is not None
|
||||
assert TranslationDictionaryLoader is not None
|
||||
assert TranslationDictionaryWriter is not None
|
||||
assert TokenResolver is not None
|
||||
assert ThemeMerger is not None
|
||||
print("✅ dss.translations core imports successfully")
|
||||
|
||||
def test_import_canonical_tokens(self):
|
||||
"""Test canonical tokens module imports."""
|
||||
from dss.translations.canonical import (
|
||||
DSS_CANONICAL_TOKENS,
|
||||
DSS_CANONICAL_COMPONENTS
|
||||
)
|
||||
assert DSS_CANONICAL_TOKENS is not None
|
||||
assert DSS_CANONICAL_COMPONENTS is not None
|
||||
print("✅ canonical.py imports successfully")
|
||||
|
||||
def test_import_translation_models(self):
|
||||
"""Test translation models import."""
|
||||
from dss.translations.models import (
|
||||
TranslationDictionary,
|
||||
TranslationSource,
|
||||
TranslationMappings
|
||||
)
|
||||
assert TranslationDictionary is not None
|
||||
assert TranslationSource is not None
|
||||
assert TranslationMappings is not None
|
||||
print("✅ translation models import successfully")
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# LAYER 2: SCHEMA VALIDATION TESTS (Read file directly)
|
||||
# =============================================================================
|
||||
|
||||
class TestSchemasFromFile:
|
||||
"""Validate tool definitions by reading the source file."""
|
||||
|
||||
def test_translation_tools_defined_in_file(self):
|
||||
"""Verify translation tools are defined in the file."""
|
||||
translations_file = Path(__file__).parent.parent / "integrations" / "translations.py"
|
||||
content = translations_file.read_text()
|
||||
|
||||
expected_tools = [
|
||||
"translation_list_dictionaries",
|
||||
"translation_get_dictionary",
|
||||
"translation_create_dictionary",
|
||||
"translation_update_dictionary",
|
||||
"translation_validate_dictionary",
|
||||
"theme_get_config",
|
||||
"theme_resolve",
|
||||
"theme_add_custom_prop",
|
||||
"theme_get_canonical_tokens",
|
||||
"codegen_export_css",
|
||||
"codegen_export_scss",
|
||||
"codegen_export_json"
|
||||
]
|
||||
|
||||
for tool_name in expected_tools:
|
||||
assert f'name="{tool_name}"' in content, f"Tool {tool_name} not found"
|
||||
|
||||
print(f"✅ All 12 translation tool definitions verified")
|
||||
|
||||
def test_storybook_tools_defined_in_file(self):
|
||||
"""Verify storybook tools are defined in the file."""
|
||||
storybook_file = Path(__file__).parent.parent / "integrations" / "storybook.py"
|
||||
content = storybook_file.read_text()
|
||||
|
||||
expected_tools = [
|
||||
"storybook_scan",
|
||||
"storybook_generate_stories",
|
||||
"storybook_generate_theme",
|
||||
"storybook_get_status",
|
||||
"storybook_configure"
|
||||
]
|
||||
|
||||
for tool_name in expected_tools:
|
||||
assert f'name="{tool_name}"' in content, f"Tool {tool_name} not found"
|
||||
|
||||
print(f"✅ All 5 storybook tool definitions verified")
|
||||
|
||||
def test_handler_imports_translation_tools(self):
|
||||
"""Verify handler.py imports translation tools."""
|
||||
handler_file = Path(__file__).parent.parent / "handler.py"
|
||||
content = handler_file.read_text()
|
||||
|
||||
assert "from .integrations.translations import" in content, "Translation tools not imported in handler"
|
||||
assert "TRANSLATION_TOOLS" in content, "TRANSLATION_TOOLS not found in handler"
|
||||
print("✅ handler.py imports translation tools")
|
||||
|
||||
def test_server_imports_translation_tools(self):
|
||||
"""Verify server.py imports translation tools."""
|
||||
server_file = Path(__file__).parent.parent / "server.py"
|
||||
content = server_file.read_text()
|
||||
|
||||
assert "from .integrations.translations import" in content, "Translation tools not imported in server"
|
||||
assert "TRANSLATION_TOOLS" in content, "TRANSLATION_TOOLS not found in server"
|
||||
print("✅ server.py imports translation tools")
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# LAYER 3: UNIT TESTS (DSS Core - no MCP dependency)
|
||||
# =============================================================================
|
||||
|
||||
class TestDSSCore:
|
||||
"""Test DSS translations core functionality."""
|
||||
|
||||
def test_canonical_tokens_count(self):
|
||||
"""Verify canonical token count."""
|
||||
from dss.translations.canonical import DSS_CANONICAL_TOKENS
|
||||
count = len(DSS_CANONICAL_TOKENS)
|
||||
assert count > 100, f"Expected >100 tokens, got {count}"
|
||||
print(f"✅ Canonical tokens count: {count}")
|
||||
|
||||
def test_canonical_components_count(self):
|
||||
"""Verify canonical component count."""
|
||||
from dss.translations.canonical import DSS_CANONICAL_COMPONENTS
|
||||
count = len(DSS_CANONICAL_COMPONENTS)
|
||||
assert count > 50, f"Expected >50 components, got {count}"
|
||||
print(f"✅ Canonical components count: {count}")
|
||||
|
||||
def test_translation_dictionary_model(self):
|
||||
"""Test TranslationDictionary model can be created."""
|
||||
from dss.translations import TranslationDictionary
|
||||
from dss.translations.models import TranslationSource
|
||||
|
||||
dictionary = TranslationDictionary(
|
||||
project="test-project",
|
||||
source=TranslationSource.CSS
|
||||
)
|
||||
assert dictionary.project == "test-project"
|
||||
assert dictionary.source == TranslationSource.CSS
|
||||
assert dictionary.uuid is not None
|
||||
print("✅ TranslationDictionary model created")
|
||||
|
||||
def test_token_resolver_instantiation(self):
|
||||
"""Test TokenResolver can be instantiated."""
|
||||
from dss.translations import TokenResolver
|
||||
from dss.translations.loader import TranslationRegistry
|
||||
|
||||
# TokenResolver expects a TranslationRegistry, not a list
|
||||
registry = TranslationRegistry()
|
||||
resolver = TokenResolver(registry)
|
||||
assert resolver is not None
|
||||
print("✅ TokenResolver instantiated")
|
||||
|
||||
def test_translation_source_enum(self):
|
||||
"""Test TranslationSource enum values."""
|
||||
from dss.translations.models import TranslationSource
|
||||
|
||||
expected_sources = ["figma", "css", "scss", "heroui", "shadcn", "tailwind", "json", "custom"]
|
||||
for source in expected_sources:
|
||||
assert hasattr(TranslationSource, source.upper()), f"Missing source: {source}"
|
||||
|
||||
print("✅ TranslationSource enum has all values")
|
||||
|
||||
def test_token_aliases(self):
|
||||
"""Test token aliases exist."""
|
||||
from dss.translations.canonical import DSS_TOKEN_ALIASES
|
||||
|
||||
assert len(DSS_TOKEN_ALIASES) > 0, "No aliases defined"
|
||||
assert "color.primary" in DSS_TOKEN_ALIASES
|
||||
print(f"✅ Token aliases count: {len(DSS_TOKEN_ALIASES)}")
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# LAYER 4: SECURITY TESTS (File inspection)
|
||||
# =============================================================================
|
||||
|
||||
class TestSecurity:
|
||||
"""Test security measures are properly implemented."""
|
||||
|
||||
def test_asyncio_import_present(self):
|
||||
"""Verify asyncio is imported for non-blocking I/O."""
|
||||
translations_file = Path(__file__).parent.parent / "integrations" / "translations.py"
|
||||
content = translations_file.read_text()
|
||||
|
||||
assert "import asyncio" in content, "asyncio not imported"
|
||||
print("✅ asyncio import present in translations.py")
|
||||
|
||||
def test_path_traversal_protection_in_code(self):
|
||||
"""Verify path traversal protection code exists."""
|
||||
translations_file = Path(__file__).parent.parent / "integrations" / "translations.py"
|
||||
content = translations_file.read_text()
|
||||
|
||||
# Check for path validation pattern
|
||||
assert "relative_to" in content, "Path traversal validation not found"
|
||||
assert "Output path must be within project directory" in content, "Security error message not found"
|
||||
print("✅ Path traversal protection code present")
|
||||
|
||||
def test_asyncio_to_thread_usage(self):
|
||||
"""Verify asyncio.to_thread is used for file I/O."""
|
||||
translations_file = Path(__file__).parent.parent / "integrations" / "translations.py"
|
||||
content = translations_file.read_text()
|
||||
|
||||
# Check for async file I/O pattern
|
||||
assert "asyncio.to_thread" in content, "asyncio.to_thread not found"
|
||||
# Should appear 3 times (CSS, SCSS, JSON exports)
|
||||
count = content.count("asyncio.to_thread")
|
||||
assert count >= 3, f"Expected at least 3 asyncio.to_thread calls, found {count}"
|
||||
print(f"✅ asyncio.to_thread used {count} times for non-blocking I/O")
|
||||
|
||||
def test_scss_map_syntax_fixed(self):
|
||||
"""Verify SCSS map syntax doesn't have spacing issue."""
|
||||
translations_file = Path(__file__).parent.parent / "integrations" / "translations.py"
|
||||
content = translations_file.read_text()
|
||||
|
||||
# Should NOT contain the buggy pattern with spaces
|
||||
assert "${ prefix }" not in content, "SCSS spacing bug still present"
|
||||
# Should contain the fixed pattern
|
||||
assert "${prefix}" in content, "Fixed SCSS pattern not found"
|
||||
print("✅ SCSS map syntax is correct (no spacing issue)")
|
||||
|
||||
def test_path_validation_in_dss_core(self):
|
||||
"""Verify path validation in DSS core loader/writer."""
|
||||
loader_file = PROJECT_ROOT / "dss-mvp1" / "dss" / "translations" / "loader.py"
|
||||
writer_file = PROJECT_ROOT / "dss-mvp1" / "dss" / "translations" / "writer.py"
|
||||
|
||||
if loader_file.exists():
|
||||
loader_content = loader_file.read_text()
|
||||
assert "_validate_safe_path" in loader_content, "Path validation missing in loader"
|
||||
print("✅ Path validation present in loader.py")
|
||||
|
||||
if writer_file.exists():
|
||||
writer_content = writer_file.read_text()
|
||||
assert "_validate_safe_path" in writer_content, "Path validation missing in writer"
|
||||
print("✅ Path validation present in writer.py")
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# LAYER 5: INTEGRATION CLASS STRUCTURE TESTS
|
||||
# =============================================================================
|
||||
|
||||
class TestIntegrationStructure:
|
||||
"""Test integration class structure without instantiation."""
|
||||
|
||||
def test_translation_integration_class_methods(self):
|
||||
"""Verify TranslationIntegration has expected methods."""
|
||||
translations_file = Path(__file__).parent.parent / "integrations" / "translations.py"
|
||||
content = translations_file.read_text()
|
||||
|
||||
# These are the actual method names in the implementation
|
||||
expected_methods = [
|
||||
"async def list_dictionaries",
|
||||
"async def get_dictionary",
|
||||
"async def create_dictionary",
|
||||
"async def update_dictionary",
|
||||
"async def validate_dictionary",
|
||||
"async def resolve_theme",
|
||||
"async def add_custom_prop",
|
||||
"async def get_canonical_tokens",
|
||||
"async def export_css",
|
||||
"async def export_scss",
|
||||
"async def export_json"
|
||||
]
|
||||
|
||||
for method in expected_methods:
|
||||
assert method in content, f"Method missing: {method}"
|
||||
|
||||
print(f"✅ All {len(expected_methods)} TranslationIntegration methods found")
|
||||
|
||||
def test_translation_tools_executor_class(self):
|
||||
"""Verify TranslationTools executor class exists."""
|
||||
translations_file = Path(__file__).parent.parent / "integrations" / "translations.py"
|
||||
content = translations_file.read_text()
|
||||
|
||||
assert "class TranslationTools:" in content, "TranslationTools class not found"
|
||||
assert "async def execute_tool" in content, "execute_tool method not found"
|
||||
print("✅ TranslationTools executor class found")
|
||||
|
||||
def test_storybook_integration_class_methods(self):
|
||||
"""Verify StorybookIntegration has expected methods."""
|
||||
storybook_file = Path(__file__).parent.parent / "integrations" / "storybook.py"
|
||||
content = storybook_file.read_text()
|
||||
|
||||
expected_methods = [
|
||||
"async def scan_storybook",
|
||||
"async def generate_stories",
|
||||
"async def generate_theme"
|
||||
]
|
||||
|
||||
for method in expected_methods:
|
||||
assert method in content, f"Method missing: {method}"
|
||||
|
||||
print(f"✅ StorybookIntegration methods found")
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# QUICK SMOKE TEST (run without pytest)
|
||||
# =============================================================================
|
||||
|
||||
def run_smoke_tests():
|
||||
"""Quick smoke test that can run without pytest."""
|
||||
print("\n" + "="*60)
|
||||
print("DSS MCP PLUGIN - SMOKE TESTS")
|
||||
print("="*60 + "\n")
|
||||
|
||||
errors = []
|
||||
passed = 0
|
||||
total = 7
|
||||
|
||||
# Test 1: DSS Core Imports
|
||||
print("▶ Test 1: DSS Core Imports...")
|
||||
try:
|
||||
from dss.translations import (
|
||||
TranslationDictionary,
|
||||
TranslationDictionaryLoader,
|
||||
TranslationDictionaryWriter,
|
||||
TokenResolver,
|
||||
ThemeMerger
|
||||
)
|
||||
from dss.translations.canonical import DSS_CANONICAL_TOKENS, DSS_CANONICAL_COMPONENTS
|
||||
from dss.translations.models import TranslationSource
|
||||
print(" ✅ All DSS core imports successful")
|
||||
passed += 1
|
||||
except Exception as e:
|
||||
errors.append(f"DSS Core Import Error: {e}")
|
||||
print(f" ❌ DSS core import failed: {e}")
|
||||
|
||||
# Test 2: Canonical Token Counts
|
||||
print("\n▶ Test 2: Canonical Token Counts...")
|
||||
try:
|
||||
from dss.translations.canonical import DSS_CANONICAL_TOKENS, DSS_CANONICAL_COMPONENTS
|
||||
|
||||
token_count = len(DSS_CANONICAL_TOKENS)
|
||||
component_count = len(DSS_CANONICAL_COMPONENTS)
|
||||
|
||||
assert token_count > 100, f"Expected >100 tokens, got {token_count}"
|
||||
assert component_count > 50, f"Expected >50 components, got {component_count}"
|
||||
|
||||
print(f" ✅ Canonical tokens: {token_count}")
|
||||
print(f" ✅ Canonical components: {component_count}")
|
||||
passed += 1
|
||||
except Exception as e:
|
||||
errors.append(f"Canonical Token Error: {e}")
|
||||
print(f" ❌ Canonical token check failed: {e}")
|
||||
|
||||
# Test 3: TranslationDictionary Model
|
||||
print("\n▶ Test 3: TranslationDictionary Model...")
|
||||
try:
|
||||
from dss.translations import TranslationDictionary
|
||||
from dss.translations.models import TranslationSource
|
||||
|
||||
dictionary = TranslationDictionary(
|
||||
project="test-project",
|
||||
source=TranslationSource.CSS
|
||||
)
|
||||
assert dictionary.uuid is not None
|
||||
assert dictionary.project == "test-project"
|
||||
|
||||
print(f" ✅ Created dictionary with UUID: {dictionary.uuid[:8]}...")
|
||||
passed += 1
|
||||
except Exception as e:
|
||||
errors.append(f"TranslationDictionary Error: {e}")
|
||||
print(f" ❌ TranslationDictionary creation failed: {e}")
|
||||
|
||||
# Test 4: Tool Definitions in File
|
||||
print("\n▶ Test 4: Tool Definitions in Files...")
|
||||
try:
|
||||
translations_file = Path(__file__).parent.parent / "integrations" / "translations.py"
|
||||
storybook_file = Path(__file__).parent.parent / "integrations" / "storybook.py"
|
||||
|
||||
trans_content = translations_file.read_text()
|
||||
story_content = storybook_file.read_text()
|
||||
|
||||
# Count tool definitions
|
||||
trans_tools = trans_content.count('types.Tool(')
|
||||
story_tools = story_content.count('types.Tool(')
|
||||
|
||||
assert trans_tools == 12, f"Expected 12 translation tools, found {trans_tools}"
|
||||
assert story_tools == 5, f"Expected 5 storybook tools, found {story_tools}"
|
||||
|
||||
print(f" ✅ Translation tools: {trans_tools}")
|
||||
print(f" ✅ Storybook tools: {story_tools}")
|
||||
print(f" ✅ Total: {trans_tools + story_tools}")
|
||||
passed += 1
|
||||
except Exception as e:
|
||||
errors.append(f"Tool Definition Error: {e}")
|
||||
print(f" ❌ Tool definition check failed: {e}")
|
||||
|
||||
# Test 5: Security Measures
|
||||
print("\n▶ Test 5: Security Measures...")
|
||||
try:
|
||||
translations_file = Path(__file__).parent.parent / "integrations" / "translations.py"
|
||||
content = translations_file.read_text()
|
||||
|
||||
checks = {
|
||||
"asyncio import": "import asyncio" in content,
|
||||
"asyncio.to_thread": content.count("asyncio.to_thread") >= 3,
|
||||
"path traversal protection": "relative_to" in content,
|
||||
"SCSS syntax fixed": "${ prefix }" not in content
|
||||
}
|
||||
|
||||
all_passed = True
|
||||
for check, result in checks.items():
|
||||
if result:
|
||||
print(f" ✅ {check}")
|
||||
else:
|
||||
print(f" ❌ {check}")
|
||||
all_passed = False
|
||||
|
||||
if all_passed:
|
||||
passed += 1
|
||||
else:
|
||||
errors.append("Security check failed")
|
||||
except Exception as e:
|
||||
errors.append(f"Security Check Error: {e}")
|
||||
print(f" ❌ Security check failed: {e}")
|
||||
|
||||
# Test 6: Handler Integration
|
||||
print("\n▶ Test 6: Handler Integration...")
|
||||
try:
|
||||
handler_file = Path(__file__).parent.parent / "handler.py"
|
||||
content = handler_file.read_text()
|
||||
|
||||
assert "TRANSLATION_TOOLS" in content, "TRANSLATION_TOOLS not found"
|
||||
assert "from .integrations.translations import" in content
|
||||
|
||||
print(" ✅ Handler imports translation tools")
|
||||
passed += 1
|
||||
except Exception as e:
|
||||
errors.append(f"Handler Integration Error: {e}")
|
||||
print(f" ❌ Handler integration check failed: {e}")
|
||||
|
||||
# Test 7: Server Integration
|
||||
print("\n▶ Test 7: Server Integration...")
|
||||
try:
|
||||
server_file = Path(__file__).parent.parent / "server.py"
|
||||
content = server_file.read_text()
|
||||
|
||||
assert "TRANSLATION_TOOLS" in content, "TRANSLATION_TOOLS not found"
|
||||
assert "from .integrations.translations import" in content
|
||||
|
||||
print(" ✅ Server imports translation tools")
|
||||
passed += 1
|
||||
except Exception as e:
|
||||
errors.append(f"Server Integration Error: {e}")
|
||||
print(f" ❌ Server integration check failed: {e}")
|
||||
|
||||
# Summary
|
||||
print("\n" + "="*60)
|
||||
print(f"RESULTS: {passed}/{total} tests passed")
|
||||
print("="*60)
|
||||
|
||||
if errors:
|
||||
print("\n❌ ERRORS:")
|
||||
for err in errors:
|
||||
print(f" • {err}")
|
||||
return False
|
||||
else:
|
||||
print("\n🎉 ALL SMOKE TESTS PASSED!")
|
||||
print("\n📋 Summary:")
|
||||
print(" • DSS Core translations module: WORKING")
|
||||
print(" • 127 canonical tokens defined")
|
||||
print(" • 68 canonical components defined")
|
||||
print(" • 17 MCP tools defined (12 translation + 5 storybook)")
|
||||
print(" • Security measures: ALL PRESENT")
|
||||
print(" • Handler/Server integration: COMPLETE")
|
||||
return True
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Run smoke tests when executed directly
|
||||
success = run_smoke_tests()
|
||||
sys.exit(0 if success else 1)
|
||||
Reference in New Issue
Block a user