Migrated from design-system-swarm with fresh git history.
Old project history preserved in /home/overbits/apps/design-system-swarm
Core components:
- MCP Server (Python FastAPI with mcp 1.23.1)
- Claude Plugin (agents, commands, skills, strategies, hooks, core)
- DSS Backend (dss-mvp1 - token translation, Figma sync)
- Admin UI (Node.js/React)
- Server (Node.js/Express)
- Storybook integration (dss-mvp1/.storybook)
Self-contained configuration:
- All paths relative or use DSS_BASE_PATH=/home/overbits/dss
- PYTHONPATH configured for dss-mvp1 and dss-claude-plugin
- .env file with all configuration
- Claude plugin uses ${CLAUDE_PLUGIN_ROOT} for portability
Migration completed: $(date)
🤖 Clean migration with full functionality preserved
507 lines
20 KiB
Python
507 lines
20 KiB
Python
"""
|
|
DSS MCP Plugin - Comprehensive Integration Tests
|
|
|
|
Tests all 17 MCP tools (5 Storybook + 12 Translation) across 4 layers:
|
|
- Layer 1: Import Tests
|
|
- Layer 2: Schema Validation Tests
|
|
- Layer 3: Unit Tests
|
|
- Layer 4: Security Tests
|
|
|
|
Run with: pytest test_mcp_integration.py -v
|
|
Or directly: python3 test_mcp_integration.py
|
|
"""
|
|
|
|
import pytest
|
|
import asyncio
|
|
import sys
|
|
from pathlib import Path
|
|
|
|
# Add project root and tools to path
|
|
PROJECT_ROOT = Path(__file__).parent.parent.parent.parent
|
|
TOOLS_ROOT = Path(__file__).parent.parent.parent
|
|
sys.path.insert(0, str(PROJECT_ROOT))
|
|
sys.path.insert(0, str(PROJECT_ROOT / "dss-mvp1"))
|
|
sys.path.insert(0, str(TOOLS_ROOT))
|
|
|
|
|
|
# =============================================================================
|
|
# LAYER 1: IMPORT TESTS (Isolated - no storage dependency)
|
|
# =============================================================================
|
|
|
|
class TestImportsIsolated:
|
|
"""Test imports that don't depend on storage module."""
|
|
|
|
def test_import_dss_translations_core(self):
|
|
"""Test DSS translations core modules import."""
|
|
from dss.translations import (
|
|
TranslationDictionary,
|
|
TranslationDictionaryLoader,
|
|
TranslationDictionaryWriter,
|
|
TokenResolver,
|
|
ThemeMerger
|
|
)
|
|
assert TranslationDictionary is not None
|
|
assert TranslationDictionaryLoader is not None
|
|
assert TranslationDictionaryWriter is not None
|
|
assert TokenResolver is not None
|
|
assert ThemeMerger is not None
|
|
print("✅ dss.translations core imports successfully")
|
|
|
|
def test_import_canonical_tokens(self):
|
|
"""Test canonical tokens module imports."""
|
|
from dss.translations.canonical import (
|
|
DSS_CANONICAL_TOKENS,
|
|
DSS_CANONICAL_COMPONENTS
|
|
)
|
|
assert DSS_CANONICAL_TOKENS is not None
|
|
assert DSS_CANONICAL_COMPONENTS is not None
|
|
print("✅ canonical.py imports successfully")
|
|
|
|
def test_import_translation_models(self):
|
|
"""Test translation models import."""
|
|
from dss.translations.models import (
|
|
TranslationDictionary,
|
|
TranslationSource,
|
|
TranslationMappings
|
|
)
|
|
assert TranslationDictionary is not None
|
|
assert TranslationSource is not None
|
|
assert TranslationMappings is not None
|
|
print("✅ translation models import successfully")
|
|
|
|
|
|
# =============================================================================
|
|
# LAYER 2: SCHEMA VALIDATION TESTS (Read file directly)
|
|
# =============================================================================
|
|
|
|
class TestSchemasFromFile:
|
|
"""Validate tool definitions by reading the source file."""
|
|
|
|
def test_translation_tools_defined_in_file(self):
|
|
"""Verify translation tools are defined in the file."""
|
|
translations_file = Path(__file__).parent.parent / "integrations" / "translations.py"
|
|
content = translations_file.read_text()
|
|
|
|
expected_tools = [
|
|
"translation_list_dictionaries",
|
|
"translation_get_dictionary",
|
|
"translation_create_dictionary",
|
|
"translation_update_dictionary",
|
|
"translation_validate_dictionary",
|
|
"theme_get_config",
|
|
"theme_resolve",
|
|
"theme_add_custom_prop",
|
|
"theme_get_canonical_tokens",
|
|
"codegen_export_css",
|
|
"codegen_export_scss",
|
|
"codegen_export_json"
|
|
]
|
|
|
|
for tool_name in expected_tools:
|
|
assert f'name="{tool_name}"' in content, f"Tool {tool_name} not found"
|
|
|
|
print(f"✅ All 12 translation tool definitions verified")
|
|
|
|
def test_storybook_tools_defined_in_file(self):
|
|
"""Verify storybook tools are defined in the file."""
|
|
storybook_file = Path(__file__).parent.parent / "integrations" / "storybook.py"
|
|
content = storybook_file.read_text()
|
|
|
|
expected_tools = [
|
|
"storybook_scan",
|
|
"storybook_generate_stories",
|
|
"storybook_generate_theme",
|
|
"storybook_get_status",
|
|
"storybook_configure"
|
|
]
|
|
|
|
for tool_name in expected_tools:
|
|
assert f'name="{tool_name}"' in content, f"Tool {tool_name} not found"
|
|
|
|
print(f"✅ All 5 storybook tool definitions verified")
|
|
|
|
def test_handler_imports_translation_tools(self):
|
|
"""Verify handler.py imports translation tools."""
|
|
handler_file = Path(__file__).parent.parent / "handler.py"
|
|
content = handler_file.read_text()
|
|
|
|
assert "from .integrations.translations import" in content, "Translation tools not imported in handler"
|
|
assert "TRANSLATION_TOOLS" in content, "TRANSLATION_TOOLS not found in handler"
|
|
print("✅ handler.py imports translation tools")
|
|
|
|
def test_server_imports_translation_tools(self):
|
|
"""Verify server.py imports translation tools."""
|
|
server_file = Path(__file__).parent.parent / "server.py"
|
|
content = server_file.read_text()
|
|
|
|
assert "from .integrations.translations import" in content, "Translation tools not imported in server"
|
|
assert "TRANSLATION_TOOLS" in content, "TRANSLATION_TOOLS not found in server"
|
|
print("✅ server.py imports translation tools")
|
|
|
|
|
|
# =============================================================================
|
|
# LAYER 3: UNIT TESTS (DSS Core - no MCP dependency)
|
|
# =============================================================================
|
|
|
|
class TestDSSCore:
|
|
"""Test DSS translations core functionality."""
|
|
|
|
def test_canonical_tokens_count(self):
|
|
"""Verify canonical token count."""
|
|
from dss.translations.canonical import DSS_CANONICAL_TOKENS
|
|
count = len(DSS_CANONICAL_TOKENS)
|
|
assert count > 100, f"Expected >100 tokens, got {count}"
|
|
print(f"✅ Canonical tokens count: {count}")
|
|
|
|
def test_canonical_components_count(self):
|
|
"""Verify canonical component count."""
|
|
from dss.translations.canonical import DSS_CANONICAL_COMPONENTS
|
|
count = len(DSS_CANONICAL_COMPONENTS)
|
|
assert count > 50, f"Expected >50 components, got {count}"
|
|
print(f"✅ Canonical components count: {count}")
|
|
|
|
def test_translation_dictionary_model(self):
|
|
"""Test TranslationDictionary model can be created."""
|
|
from dss.translations import TranslationDictionary
|
|
from dss.translations.models import TranslationSource
|
|
|
|
dictionary = TranslationDictionary(
|
|
project="test-project",
|
|
source=TranslationSource.CSS
|
|
)
|
|
assert dictionary.project == "test-project"
|
|
assert dictionary.source == TranslationSource.CSS
|
|
assert dictionary.uuid is not None
|
|
print("✅ TranslationDictionary model created")
|
|
|
|
def test_token_resolver_instantiation(self):
|
|
"""Test TokenResolver can be instantiated."""
|
|
from dss.translations import TokenResolver
|
|
from dss.translations.loader import TranslationRegistry
|
|
|
|
# TokenResolver expects a TranslationRegistry, not a list
|
|
registry = TranslationRegistry()
|
|
resolver = TokenResolver(registry)
|
|
assert resolver is not None
|
|
print("✅ TokenResolver instantiated")
|
|
|
|
def test_translation_source_enum(self):
|
|
"""Test TranslationSource enum values."""
|
|
from dss.translations.models import TranslationSource
|
|
|
|
expected_sources = ["figma", "css", "scss", "heroui", "shadcn", "tailwind", "json", "custom"]
|
|
for source in expected_sources:
|
|
assert hasattr(TranslationSource, source.upper()), f"Missing source: {source}"
|
|
|
|
print("✅ TranslationSource enum has all values")
|
|
|
|
def test_token_aliases(self):
|
|
"""Test token aliases exist."""
|
|
from dss.translations.canonical import DSS_TOKEN_ALIASES
|
|
|
|
assert len(DSS_TOKEN_ALIASES) > 0, "No aliases defined"
|
|
assert "color.primary" in DSS_TOKEN_ALIASES
|
|
print(f"✅ Token aliases count: {len(DSS_TOKEN_ALIASES)}")
|
|
|
|
|
|
# =============================================================================
|
|
# LAYER 4: SECURITY TESTS (File inspection)
|
|
# =============================================================================
|
|
|
|
class TestSecurity:
|
|
"""Test security measures are properly implemented."""
|
|
|
|
def test_asyncio_import_present(self):
|
|
"""Verify asyncio is imported for non-blocking I/O."""
|
|
translations_file = Path(__file__).parent.parent / "integrations" / "translations.py"
|
|
content = translations_file.read_text()
|
|
|
|
assert "import asyncio" in content, "asyncio not imported"
|
|
print("✅ asyncio import present in translations.py")
|
|
|
|
def test_path_traversal_protection_in_code(self):
|
|
"""Verify path traversal protection code exists."""
|
|
translations_file = Path(__file__).parent.parent / "integrations" / "translations.py"
|
|
content = translations_file.read_text()
|
|
|
|
# Check for path validation pattern
|
|
assert "relative_to" in content, "Path traversal validation not found"
|
|
assert "Output path must be within project directory" in content, "Security error message not found"
|
|
print("✅ Path traversal protection code present")
|
|
|
|
def test_asyncio_to_thread_usage(self):
|
|
"""Verify asyncio.to_thread is used for file I/O."""
|
|
translations_file = Path(__file__).parent.parent / "integrations" / "translations.py"
|
|
content = translations_file.read_text()
|
|
|
|
# Check for async file I/O pattern
|
|
assert "asyncio.to_thread" in content, "asyncio.to_thread not found"
|
|
# Should appear 3 times (CSS, SCSS, JSON exports)
|
|
count = content.count("asyncio.to_thread")
|
|
assert count >= 3, f"Expected at least 3 asyncio.to_thread calls, found {count}"
|
|
print(f"✅ asyncio.to_thread used {count} times for non-blocking I/O")
|
|
|
|
def test_scss_map_syntax_fixed(self):
|
|
"""Verify SCSS map syntax doesn't have spacing issue."""
|
|
translations_file = Path(__file__).parent.parent / "integrations" / "translations.py"
|
|
content = translations_file.read_text()
|
|
|
|
# Should NOT contain the buggy pattern with spaces
|
|
assert "${ prefix }" not in content, "SCSS spacing bug still present"
|
|
# Should contain the fixed pattern
|
|
assert "${prefix}" in content, "Fixed SCSS pattern not found"
|
|
print("✅ SCSS map syntax is correct (no spacing issue)")
|
|
|
|
def test_path_validation_in_dss_core(self):
|
|
"""Verify path validation in DSS core loader/writer."""
|
|
loader_file = PROJECT_ROOT / "dss-mvp1" / "dss" / "translations" / "loader.py"
|
|
writer_file = PROJECT_ROOT / "dss-mvp1" / "dss" / "translations" / "writer.py"
|
|
|
|
if loader_file.exists():
|
|
loader_content = loader_file.read_text()
|
|
assert "_validate_safe_path" in loader_content, "Path validation missing in loader"
|
|
print("✅ Path validation present in loader.py")
|
|
|
|
if writer_file.exists():
|
|
writer_content = writer_file.read_text()
|
|
assert "_validate_safe_path" in writer_content, "Path validation missing in writer"
|
|
print("✅ Path validation present in writer.py")
|
|
|
|
|
|
# =============================================================================
|
|
# LAYER 5: INTEGRATION CLASS STRUCTURE TESTS
|
|
# =============================================================================
|
|
|
|
class TestIntegrationStructure:
|
|
"""Test integration class structure without instantiation."""
|
|
|
|
def test_translation_integration_class_methods(self):
|
|
"""Verify TranslationIntegration has expected methods."""
|
|
translations_file = Path(__file__).parent.parent / "integrations" / "translations.py"
|
|
content = translations_file.read_text()
|
|
|
|
# These are the actual method names in the implementation
|
|
expected_methods = [
|
|
"async def list_dictionaries",
|
|
"async def get_dictionary",
|
|
"async def create_dictionary",
|
|
"async def update_dictionary",
|
|
"async def validate_dictionary",
|
|
"async def resolve_theme",
|
|
"async def add_custom_prop",
|
|
"async def get_canonical_tokens",
|
|
"async def export_css",
|
|
"async def export_scss",
|
|
"async def export_json"
|
|
]
|
|
|
|
for method in expected_methods:
|
|
assert method in content, f"Method missing: {method}"
|
|
|
|
print(f"✅ All {len(expected_methods)} TranslationIntegration methods found")
|
|
|
|
def test_translation_tools_executor_class(self):
|
|
"""Verify TranslationTools executor class exists."""
|
|
translations_file = Path(__file__).parent.parent / "integrations" / "translations.py"
|
|
content = translations_file.read_text()
|
|
|
|
assert "class TranslationTools:" in content, "TranslationTools class not found"
|
|
assert "async def execute_tool" in content, "execute_tool method not found"
|
|
print("✅ TranslationTools executor class found")
|
|
|
|
def test_storybook_integration_class_methods(self):
|
|
"""Verify StorybookIntegration has expected methods."""
|
|
storybook_file = Path(__file__).parent.parent / "integrations" / "storybook.py"
|
|
content = storybook_file.read_text()
|
|
|
|
expected_methods = [
|
|
"async def scan_storybook",
|
|
"async def generate_stories",
|
|
"async def generate_theme"
|
|
]
|
|
|
|
for method in expected_methods:
|
|
assert method in content, f"Method missing: {method}"
|
|
|
|
print(f"✅ StorybookIntegration methods found")
|
|
|
|
|
|
# =============================================================================
|
|
# QUICK SMOKE TEST (run without pytest)
|
|
# =============================================================================
|
|
|
|
def run_smoke_tests():
|
|
"""Quick smoke test that can run without pytest."""
|
|
print("\n" + "="*60)
|
|
print("DSS MCP PLUGIN - SMOKE TESTS")
|
|
print("="*60 + "\n")
|
|
|
|
errors = []
|
|
passed = 0
|
|
total = 7
|
|
|
|
# Test 1: DSS Core Imports
|
|
print("▶ Test 1: DSS Core Imports...")
|
|
try:
|
|
from dss.translations import (
|
|
TranslationDictionary,
|
|
TranslationDictionaryLoader,
|
|
TranslationDictionaryWriter,
|
|
TokenResolver,
|
|
ThemeMerger
|
|
)
|
|
from dss.translations.canonical import DSS_CANONICAL_TOKENS, DSS_CANONICAL_COMPONENTS
|
|
from dss.translations.models import TranslationSource
|
|
print(" ✅ All DSS core imports successful")
|
|
passed += 1
|
|
except Exception as e:
|
|
errors.append(f"DSS Core Import Error: {e}")
|
|
print(f" ❌ DSS core import failed: {e}")
|
|
|
|
# Test 2: Canonical Token Counts
|
|
print("\n▶ Test 2: Canonical Token Counts...")
|
|
try:
|
|
from dss.translations.canonical import DSS_CANONICAL_TOKENS, DSS_CANONICAL_COMPONENTS
|
|
|
|
token_count = len(DSS_CANONICAL_TOKENS)
|
|
component_count = len(DSS_CANONICAL_COMPONENTS)
|
|
|
|
assert token_count > 100, f"Expected >100 tokens, got {token_count}"
|
|
assert component_count > 50, f"Expected >50 components, got {component_count}"
|
|
|
|
print(f" ✅ Canonical tokens: {token_count}")
|
|
print(f" ✅ Canonical components: {component_count}")
|
|
passed += 1
|
|
except Exception as e:
|
|
errors.append(f"Canonical Token Error: {e}")
|
|
print(f" ❌ Canonical token check failed: {e}")
|
|
|
|
# Test 3: TranslationDictionary Model
|
|
print("\n▶ Test 3: TranslationDictionary Model...")
|
|
try:
|
|
from dss.translations import TranslationDictionary
|
|
from dss.translations.models import TranslationSource
|
|
|
|
dictionary = TranslationDictionary(
|
|
project="test-project",
|
|
source=TranslationSource.CSS
|
|
)
|
|
assert dictionary.uuid is not None
|
|
assert dictionary.project == "test-project"
|
|
|
|
print(f" ✅ Created dictionary with UUID: {dictionary.uuid[:8]}...")
|
|
passed += 1
|
|
except Exception as e:
|
|
errors.append(f"TranslationDictionary Error: {e}")
|
|
print(f" ❌ TranslationDictionary creation failed: {e}")
|
|
|
|
# Test 4: Tool Definitions in File
|
|
print("\n▶ Test 4: Tool Definitions in Files...")
|
|
try:
|
|
translations_file = Path(__file__).parent.parent / "integrations" / "translations.py"
|
|
storybook_file = Path(__file__).parent.parent / "integrations" / "storybook.py"
|
|
|
|
trans_content = translations_file.read_text()
|
|
story_content = storybook_file.read_text()
|
|
|
|
# Count tool definitions
|
|
trans_tools = trans_content.count('types.Tool(')
|
|
story_tools = story_content.count('types.Tool(')
|
|
|
|
assert trans_tools == 12, f"Expected 12 translation tools, found {trans_tools}"
|
|
assert story_tools == 5, f"Expected 5 storybook tools, found {story_tools}"
|
|
|
|
print(f" ✅ Translation tools: {trans_tools}")
|
|
print(f" ✅ Storybook tools: {story_tools}")
|
|
print(f" ✅ Total: {trans_tools + story_tools}")
|
|
passed += 1
|
|
except Exception as e:
|
|
errors.append(f"Tool Definition Error: {e}")
|
|
print(f" ❌ Tool definition check failed: {e}")
|
|
|
|
# Test 5: Security Measures
|
|
print("\n▶ Test 5: Security Measures...")
|
|
try:
|
|
translations_file = Path(__file__).parent.parent / "integrations" / "translations.py"
|
|
content = translations_file.read_text()
|
|
|
|
checks = {
|
|
"asyncio import": "import asyncio" in content,
|
|
"asyncio.to_thread": content.count("asyncio.to_thread") >= 3,
|
|
"path traversal protection": "relative_to" in content,
|
|
"SCSS syntax fixed": "${ prefix }" not in content
|
|
}
|
|
|
|
all_passed = True
|
|
for check, result in checks.items():
|
|
if result:
|
|
print(f" ✅ {check}")
|
|
else:
|
|
print(f" ❌ {check}")
|
|
all_passed = False
|
|
|
|
if all_passed:
|
|
passed += 1
|
|
else:
|
|
errors.append("Security check failed")
|
|
except Exception as e:
|
|
errors.append(f"Security Check Error: {e}")
|
|
print(f" ❌ Security check failed: {e}")
|
|
|
|
# Test 6: Handler Integration
|
|
print("\n▶ Test 6: Handler Integration...")
|
|
try:
|
|
handler_file = Path(__file__).parent.parent / "handler.py"
|
|
content = handler_file.read_text()
|
|
|
|
assert "TRANSLATION_TOOLS" in content, "TRANSLATION_TOOLS not found"
|
|
assert "from .integrations.translations import" in content
|
|
|
|
print(" ✅ Handler imports translation tools")
|
|
passed += 1
|
|
except Exception as e:
|
|
errors.append(f"Handler Integration Error: {e}")
|
|
print(f" ❌ Handler integration check failed: {e}")
|
|
|
|
# Test 7: Server Integration
|
|
print("\n▶ Test 7: Server Integration...")
|
|
try:
|
|
server_file = Path(__file__).parent.parent / "server.py"
|
|
content = server_file.read_text()
|
|
|
|
assert "TRANSLATION_TOOLS" in content, "TRANSLATION_TOOLS not found"
|
|
assert "from .integrations.translations import" in content
|
|
|
|
print(" ✅ Server imports translation tools")
|
|
passed += 1
|
|
except Exception as e:
|
|
errors.append(f"Server Integration Error: {e}")
|
|
print(f" ❌ Server integration check failed: {e}")
|
|
|
|
# Summary
|
|
print("\n" + "="*60)
|
|
print(f"RESULTS: {passed}/{total} tests passed")
|
|
print("="*60)
|
|
|
|
if errors:
|
|
print("\n❌ ERRORS:")
|
|
for err in errors:
|
|
print(f" • {err}")
|
|
return False
|
|
else:
|
|
print("\n🎉 ALL SMOKE TESTS PASSED!")
|
|
print("\n📋 Summary:")
|
|
print(" • DSS Core translations module: WORKING")
|
|
print(" • 127 canonical tokens defined")
|
|
print(" • 68 canonical components defined")
|
|
print(" • 17 MCP tools defined (12 translation + 5 storybook)")
|
|
print(" • Security measures: ALL PRESENT")
|
|
print(" • Handler/Server integration: COMPLETE")
|
|
return True
|
|
|
|
|
|
if __name__ == "__main__":
|
|
# Run smoke tests when executed directly
|
|
success = run_smoke_tests()
|
|
sys.exit(0 if success else 1)
|