Initial commit: Clean DSS implementation
Migrated from design-system-swarm with fresh git history.
Old project history preserved in /home/overbits/apps/design-system-swarm
Core components:
- MCP Server (Python FastAPI with mcp 1.23.1)
- Claude Plugin (agents, commands, skills, strategies, hooks, core)
- DSS Backend (dss-mvp1 - token translation, Figma sync)
- Admin UI (Node.js/React)
- Server (Node.js/Express)
- Storybook integration (dss-mvp1/.storybook)
Self-contained configuration:
- All paths relative or use DSS_BASE_PATH=/home/overbits/dss
- PYTHONPATH configured for dss-mvp1 and dss-claude-plugin
- .env file with all configuration
- Claude plugin uses ${CLAUDE_PLUGIN_ROOT} for portability
Migration completed: $(date)
🤖 Clean migration with full functionality preserved
This commit is contained in:
8
demo/tools/dss_mcp/__init__.py
Normal file
8
demo/tools/dss_mcp/__init__.py
Normal file
@@ -0,0 +1,8 @@
|
||||
"""
|
||||
DSS MCP Server
|
||||
|
||||
Model Context Protocol server for Design System Swarm.
|
||||
Provides project-isolated context and tools to Claude chat instances.
|
||||
"""
|
||||
|
||||
__version__ = "0.8.0"
|
||||
145
demo/tools/dss_mcp/config.py
Normal file
145
demo/tools/dss_mcp/config.py
Normal file
@@ -0,0 +1,145 @@
|
||||
"""
|
||||
MCP Server Configuration
|
||||
|
||||
Loads configuration from environment variables and provides settings
|
||||
for the MCP server, integrations, and security.
|
||||
"""
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
from dotenv import load_dotenv
|
||||
from cryptography.fernet import Fernet
|
||||
|
||||
# Load environment variables
|
||||
load_dotenv()
|
||||
|
||||
# Base paths
|
||||
PROJECT_ROOT = Path(__file__).parent.parent.parent
|
||||
TOOLS_DIR = PROJECT_ROOT / "tools"
|
||||
STORAGE_DIR = PROJECT_ROOT / "tools" / "storage"
|
||||
CACHE_DIR = PROJECT_ROOT / os.getenv("DSS_CACHE_DIR", ".dss/cache")
|
||||
|
||||
|
||||
class MCPConfig:
|
||||
"""MCP Server Configuration"""
|
||||
|
||||
# Server Settings
|
||||
HOST: str = os.getenv("DSS_MCP_HOST", "127.0.0.1")
|
||||
PORT: int = int(os.getenv("DSS_MCP_PORT", "3457"))
|
||||
|
||||
# Database
|
||||
DATABASE_PATH: str = os.getenv(
|
||||
"DATABASE_PATH",
|
||||
str(STORAGE_DIR / "dss.db")
|
||||
)
|
||||
|
||||
# Context Caching
|
||||
CONTEXT_CACHE_TTL: int = int(os.getenv("DSS_CONTEXT_CACHE_TTL", "300")) # 5 minutes
|
||||
|
||||
# Encryption
|
||||
ENCRYPTION_KEY: Optional[str] = os.getenv("DSS_ENCRYPTION_KEY")
|
||||
|
||||
@classmethod
|
||||
def get_cipher(cls) -> Optional[Fernet]:
|
||||
"""Get Fernet cipher for encryption/decryption"""
|
||||
if not cls.ENCRYPTION_KEY:
|
||||
return None
|
||||
return Fernet(cls.ENCRYPTION_KEY.encode())
|
||||
|
||||
@classmethod
|
||||
def generate_encryption_key(cls) -> str:
|
||||
"""Generate a new encryption key"""
|
||||
return Fernet.generate_key().decode()
|
||||
|
||||
# Redis/Celery for worker pool
|
||||
REDIS_URL: str = os.getenv("REDIS_URL", "redis://localhost:6379/0")
|
||||
CELERY_BROKER_URL: str = os.getenv("CELERY_BROKER_URL", "redis://localhost:6379/0")
|
||||
CELERY_RESULT_BACKEND: str = os.getenv("CELERY_RESULT_BACKEND", "redis://localhost:6379/0")
|
||||
|
||||
# Circuit Breaker
|
||||
CIRCUIT_BREAKER_FAILURE_THRESHOLD: int = int(
|
||||
os.getenv("CIRCUIT_BREAKER_FAILURE_THRESHOLD", "5")
|
||||
)
|
||||
CIRCUIT_BREAKER_TIMEOUT_SECONDS: int = int(
|
||||
os.getenv("CIRCUIT_BREAKER_TIMEOUT_SECONDS", "60")
|
||||
)
|
||||
|
||||
# Logging
|
||||
LOG_LEVEL: str = os.getenv("LOG_LEVEL", "INFO").upper()
|
||||
|
||||
|
||||
class IntegrationConfig:
|
||||
"""External Integration Configuration"""
|
||||
|
||||
# Figma
|
||||
FIGMA_TOKEN: Optional[str] = os.getenv("FIGMA_TOKEN")
|
||||
FIGMA_CACHE_TTL: int = int(os.getenv("FIGMA_CACHE_TTL", "300"))
|
||||
|
||||
# Anthropic (for Sequential Thinking)
|
||||
ANTHROPIC_API_KEY: Optional[str] = os.getenv("ANTHROPIC_API_KEY")
|
||||
|
||||
# Jira (defaults, can be overridden per-user)
|
||||
JIRA_URL: Optional[str] = os.getenv("JIRA_URL")
|
||||
JIRA_USERNAME: Optional[str] = os.getenv("JIRA_USERNAME")
|
||||
JIRA_API_TOKEN: Optional[str] = os.getenv("JIRA_API_TOKEN")
|
||||
|
||||
# Confluence (defaults, can be overridden per-user)
|
||||
CONFLUENCE_URL: Optional[str] = os.getenv("CONFLUENCE_URL")
|
||||
CONFLUENCE_USERNAME: Optional[str] = os.getenv("CONFLUENCE_USERNAME")
|
||||
CONFLUENCE_API_TOKEN: Optional[str] = os.getenv("CONFLUENCE_API_TOKEN")
|
||||
|
||||
|
||||
# Singleton instances
|
||||
mcp_config = MCPConfig()
|
||||
integration_config = IntegrationConfig()
|
||||
|
||||
|
||||
def validate_config() -> list[str]:
|
||||
"""
|
||||
Validate configuration and return list of warnings.
|
||||
|
||||
Returns:
|
||||
List of warning messages for missing optional config
|
||||
"""
|
||||
warnings = []
|
||||
|
||||
if not mcp_config.ENCRYPTION_KEY:
|
||||
warnings.append(
|
||||
"DSS_ENCRYPTION_KEY not set. Integration credentials will not be encrypted. "
|
||||
f"Generate one with: python -c \"from cryptography.fernet import Fernet; print(Fernet.generate_key().decode())\""
|
||||
)
|
||||
|
||||
if not integration_config.ANTHROPIC_API_KEY:
|
||||
warnings.append("ANTHROPIC_API_KEY not set. Sequential Thinking tools will not be available.")
|
||||
|
||||
if not integration_config.FIGMA_TOKEN:
|
||||
warnings.append("FIGMA_TOKEN not set. Figma tools will not be available.")
|
||||
|
||||
return warnings
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("=== DSS MCP Configuration ===\n")
|
||||
print(f"MCP Server: {mcp_config.HOST}:{mcp_config.PORT}")
|
||||
print(f"Database: {mcp_config.DATABASE_PATH}")
|
||||
print(f"Context Cache TTL: {mcp_config.CONTEXT_CACHE_TTL}s")
|
||||
print(f"Encryption Key: {'✓ Set' if mcp_config.ENCRYPTION_KEY else '✗ Not Set'}")
|
||||
print(f"Redis URL: {mcp_config.REDIS_URL}")
|
||||
print(f"\nCircuit Breaker:")
|
||||
print(f" Failure Threshold: {mcp_config.CIRCUIT_BREAKER_FAILURE_THRESHOLD}")
|
||||
print(f" Timeout: {mcp_config.CIRCUIT_BREAKER_TIMEOUT_SECONDS}s")
|
||||
|
||||
print(f"\n=== Integration Configuration ===\n")
|
||||
print(f"Figma Token: {'✓ Set' if integration_config.FIGMA_TOKEN else '✗ Not Set'}")
|
||||
print(f"Anthropic API Key: {'✓ Set' if integration_config.ANTHROPIC_API_KEY else '✗ Not Set'}")
|
||||
print(f"Jira URL: {integration_config.JIRA_URL or '✗ Not Set'}")
|
||||
print(f"Confluence URL: {integration_config.CONFLUENCE_URL or '✗ Not Set'}")
|
||||
|
||||
warnings = validate_config()
|
||||
if warnings:
|
||||
print(f"\n⚠️ Warnings:")
|
||||
for warning in warnings:
|
||||
print(f" - {warning}")
|
||||
else:
|
||||
print(f"\n✓ Configuration is valid")
|
||||
0
demo/tools/dss_mcp/context/__init__.py
Normal file
0
demo/tools/dss_mcp/context/__init__.py
Normal file
443
demo/tools/dss_mcp/context/project_context.py
Normal file
443
demo/tools/dss_mcp/context/project_context.py
Normal file
@@ -0,0 +1,443 @@
|
||||
"""
|
||||
Project Context Manager
|
||||
|
||||
Provides cached, project-isolated context for Claude MCP sessions.
|
||||
Loads all relevant project data (components, tokens, config, health, etc.)
|
||||
and caches it for performance.
|
||||
"""
|
||||
|
||||
import json
|
||||
import asyncio
|
||||
from datetime import datetime, timedelta
|
||||
from dataclasses import dataclass, asdict
|
||||
from typing import Dict, Any, Optional, List
|
||||
from pathlib import Path
|
||||
|
||||
# Import from existing DSS modules
|
||||
import sys
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
|
||||
|
||||
from storage.database import get_connection, Projects
|
||||
from analyze.scanner import ProjectScanner
|
||||
from ..config import mcp_config
|
||||
|
||||
|
||||
@dataclass
|
||||
class ProjectContext:
|
||||
"""Complete project context for MCP sessions"""
|
||||
|
||||
project_id: str
|
||||
name: str
|
||||
description: Optional[str]
|
||||
path: Optional[Path]
|
||||
|
||||
# Component data
|
||||
components: List[Dict[str, Any]]
|
||||
component_count: int
|
||||
|
||||
# Token/Style data
|
||||
tokens: Dict[str, Any]
|
||||
styles: List[Dict[str, Any]]
|
||||
|
||||
# Project configuration
|
||||
config: Dict[str, Any]
|
||||
|
||||
# User's enabled integrations (user-scoped)
|
||||
integrations: Dict[str, Any]
|
||||
|
||||
# Project health & metrics
|
||||
health: Dict[str, Any]
|
||||
stats: Dict[str, Any]
|
||||
|
||||
# Discovery/scan results
|
||||
discovery: Dict[str, Any]
|
||||
|
||||
# Metadata
|
||||
loaded_at: datetime
|
||||
cache_expires_at: datetime
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert to dictionary for JSON serialization"""
|
||||
data = asdict(self)
|
||||
data['loaded_at'] = self.loaded_at.isoformat()
|
||||
data['cache_expires_at'] = self.cache_expires_at.isoformat()
|
||||
if self.path:
|
||||
data['path'] = str(self.path)
|
||||
return data
|
||||
|
||||
def is_expired(self) -> bool:
|
||||
"""Check if cache has expired"""
|
||||
return datetime.now() >= self.cache_expires_at
|
||||
|
||||
|
||||
class ProjectContextManager:
|
||||
"""
|
||||
Manages project contexts with TTL-based caching.
|
||||
|
||||
Provides fast access to project data for MCP tools while ensuring
|
||||
data freshness and project isolation.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._cache: Dict[str, ProjectContext] = {}
|
||||
self._cache_ttl = timedelta(seconds=mcp_config.CONTEXT_CACHE_TTL)
|
||||
|
||||
async def get_context(
|
||||
self,
|
||||
project_id: str,
|
||||
user_id: Optional[int] = None,
|
||||
force_refresh: bool = False
|
||||
) -> Optional[ProjectContext]:
|
||||
"""
|
||||
Get project context, using cache if available.
|
||||
|
||||
Args:
|
||||
project_id: Project ID
|
||||
user_id: User ID for loading user-scoped integrations
|
||||
force_refresh: Force cache refresh
|
||||
|
||||
Returns:
|
||||
ProjectContext or None if project not found
|
||||
"""
|
||||
# Check cache first
|
||||
cache_key = f"{project_id}:{user_id or 'anonymous'}"
|
||||
if not force_refresh and cache_key in self._cache:
|
||||
ctx = self._cache[cache_key]
|
||||
if not ctx.is_expired():
|
||||
return ctx
|
||||
|
||||
# Load fresh context
|
||||
context = await self._load_context(project_id, user_id)
|
||||
if context:
|
||||
self._cache[cache_key] = context
|
||||
|
||||
return context
|
||||
|
||||
async def _load_context(
|
||||
self,
|
||||
project_id: str,
|
||||
user_id: Optional[int] = None
|
||||
) -> Optional[ProjectContext]:
|
||||
"""Load complete project context from database and filesystem"""
|
||||
|
||||
# Run database queries in thread pool to avoid blocking
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
# Load project metadata
|
||||
project = await loop.run_in_executor(None, self._load_project, project_id)
|
||||
if not project:
|
||||
return None
|
||||
|
||||
# Load components, styles, stats in parallel
|
||||
components_task = loop.run_in_executor(None, self._load_components, project_id)
|
||||
styles_task = loop.run_in_executor(None, self._load_styles, project_id)
|
||||
stats_task = loop.run_in_executor(None, self._load_stats, project_id)
|
||||
integrations_task = loop.run_in_executor(None, self._load_integrations, project_id, user_id)
|
||||
|
||||
components = await components_task
|
||||
styles = await styles_task
|
||||
stats = await stats_task
|
||||
integrations = await integrations_task
|
||||
|
||||
# Load tokens from filesystem if project has a path
|
||||
tokens = {}
|
||||
project_path = None
|
||||
if project.get('figma_file_key'):
|
||||
# Try to find project path based on naming convention
|
||||
# (This can be enhanced based on actual project structure)
|
||||
project_path = Path.cwd()
|
||||
tokens = await loop.run_in_executor(None, self._load_tokens, project_path)
|
||||
|
||||
# Load discovery/scan data
|
||||
discovery = await loop.run_in_executor(None, self._load_discovery, project_path)
|
||||
|
||||
# Compute health score
|
||||
health = self._compute_health(components, tokens, stats)
|
||||
|
||||
# Build context
|
||||
now = datetime.now()
|
||||
context = ProjectContext(
|
||||
project_id=project_id,
|
||||
name=project['name'],
|
||||
description=project.get('description'),
|
||||
path=project_path,
|
||||
components=components,
|
||||
component_count=len(components),
|
||||
tokens=tokens,
|
||||
styles=styles,
|
||||
config={
|
||||
'figma_file_key': project.get('figma_file_key'),
|
||||
'status': project.get('status', 'active')
|
||||
},
|
||||
integrations=integrations,
|
||||
health=health,
|
||||
stats=stats,
|
||||
discovery=discovery,
|
||||
loaded_at=now,
|
||||
cache_expires_at=now + self._cache_ttl
|
||||
)
|
||||
|
||||
return context
|
||||
|
||||
def _load_project(self, project_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Load project metadata from database"""
|
||||
try:
|
||||
with get_connection() as conn:
|
||||
row = conn.execute(
|
||||
"SELECT * FROM projects WHERE id = ?",
|
||||
(project_id,)
|
||||
).fetchone()
|
||||
|
||||
if row:
|
||||
return dict(row)
|
||||
return None
|
||||
except Exception as e:
|
||||
print(f"Error loading project: {e}")
|
||||
return None
|
||||
|
||||
def _load_components(self, project_id: str) -> List[Dict[str, Any]]:
|
||||
"""Load all components for project"""
|
||||
try:
|
||||
with get_connection() as conn:
|
||||
rows = conn.execute(
|
||||
"""
|
||||
SELECT id, name, figma_key, description,
|
||||
properties, variants, code_generated,
|
||||
created_at, updated_at
|
||||
FROM components
|
||||
WHERE project_id = ?
|
||||
ORDER BY name
|
||||
""",
|
||||
(project_id,)
|
||||
).fetchall()
|
||||
|
||||
components = []
|
||||
for row in rows:
|
||||
comp = dict(row)
|
||||
# Parse JSON fields
|
||||
if comp.get('properties'):
|
||||
comp['properties'] = json.loads(comp['properties'])
|
||||
if comp.get('variants'):
|
||||
comp['variants'] = json.loads(comp['variants'])
|
||||
components.append(comp)
|
||||
|
||||
return components
|
||||
except Exception as e:
|
||||
print(f"Error loading components: {e}")
|
||||
return []
|
||||
|
||||
def _load_styles(self, project_id: str) -> List[Dict[str, Any]]:
|
||||
"""Load all styles for project"""
|
||||
try:
|
||||
with get_connection() as conn:
|
||||
rows = conn.execute(
|
||||
"""
|
||||
SELECT id, name, type, figma_key, properties, created_at
|
||||
FROM styles
|
||||
WHERE project_id = ?
|
||||
ORDER BY type, name
|
||||
""",
|
||||
(project_id,)
|
||||
).fetchall()
|
||||
|
||||
styles = []
|
||||
for row in rows:
|
||||
style = dict(row)
|
||||
if style.get('properties'):
|
||||
style['properties'] = json.loads(style['properties'])
|
||||
styles.append(style)
|
||||
|
||||
return styles
|
||||
except Exception as e:
|
||||
print(f"Error loading styles: {e}")
|
||||
return []
|
||||
|
||||
def _load_stats(self, project_id: str) -> Dict[str, Any]:
|
||||
"""Load project statistics"""
|
||||
try:
|
||||
with get_connection() as conn:
|
||||
# Component count by type
|
||||
component_stats = conn.execute(
|
||||
"""
|
||||
SELECT COUNT(*) as total,
|
||||
SUM(CASE WHEN code_generated = 1 THEN 1 ELSE 0 END) as generated
|
||||
FROM components
|
||||
WHERE project_id = ?
|
||||
""",
|
||||
(project_id,)
|
||||
).fetchone()
|
||||
|
||||
# Style count by type
|
||||
style_stats = conn.execute(
|
||||
"""
|
||||
SELECT type, COUNT(*) as count
|
||||
FROM styles
|
||||
WHERE project_id = ?
|
||||
GROUP BY type
|
||||
""",
|
||||
(project_id,)
|
||||
).fetchall()
|
||||
|
||||
return {
|
||||
'components': dict(component_stats) if component_stats else {'total': 0, 'generated': 0},
|
||||
'styles': {row['type']: row['count'] for row in style_stats}
|
||||
}
|
||||
except Exception as e:
|
||||
print(f"Error loading stats: {e}")
|
||||
return {'components': {'total': 0, 'generated': 0}, 'styles': {}}
|
||||
|
||||
def _load_integrations(self, project_id: str, user_id: Optional[int]) -> Dict[str, Any]:
|
||||
"""Load user's enabled integrations for this project"""
|
||||
if not user_id:
|
||||
return {}
|
||||
|
||||
try:
|
||||
with get_connection() as conn:
|
||||
rows = conn.execute(
|
||||
"""
|
||||
SELECT integration_type, config, enabled, last_used_at
|
||||
FROM project_integrations
|
||||
WHERE project_id = ? AND user_id = ? AND enabled = 1
|
||||
""",
|
||||
(project_id, user_id)
|
||||
).fetchall()
|
||||
|
||||
# Return decrypted config for each integration
|
||||
integrations = {}
|
||||
cipher = mcp_config.get_cipher()
|
||||
|
||||
for row in rows:
|
||||
integration_type = row['integration_type']
|
||||
encrypted_config = row['config']
|
||||
|
||||
# Decrypt config
|
||||
if cipher:
|
||||
try:
|
||||
decrypted_config = cipher.decrypt(encrypted_config.encode()).decode()
|
||||
config = json.loads(decrypted_config)
|
||||
except Exception as e:
|
||||
print(f"Error decrypting integration config: {e}")
|
||||
config = {}
|
||||
else:
|
||||
# No encryption key, try to parse as JSON
|
||||
try:
|
||||
config = json.loads(encrypted_config)
|
||||
except:
|
||||
config = {}
|
||||
|
||||
integrations[integration_type] = {
|
||||
'enabled': True,
|
||||
'config': config,
|
||||
'last_used_at': row['last_used_at']
|
||||
}
|
||||
|
||||
return integrations
|
||||
except Exception as e:
|
||||
print(f"Error loading integrations: {e}")
|
||||
return {}
|
||||
|
||||
def _load_tokens(self, project_path: Optional[Path]) -> Dict[str, Any]:
|
||||
"""Load design tokens from filesystem"""
|
||||
if not project_path:
|
||||
return {}
|
||||
|
||||
tokens = {}
|
||||
token_files = ['tokens.json', 'design-tokens.json', 'variables.json']
|
||||
|
||||
for token_file in token_files:
|
||||
token_path = project_path / token_file
|
||||
if token_path.exists():
|
||||
try:
|
||||
with open(token_path) as f:
|
||||
tokens = json.load(f)
|
||||
break
|
||||
except Exception as e:
|
||||
print(f"Error loading tokens from {token_path}: {e}")
|
||||
|
||||
return tokens
|
||||
|
||||
def _load_discovery(self, project_path: Optional[Path]) -> Dict[str, Any]:
|
||||
"""Load project discovery data"""
|
||||
if not project_path:
|
||||
return {}
|
||||
|
||||
try:
|
||||
scanner = ProjectScanner(str(project_path))
|
||||
discovery = scanner.scan()
|
||||
return discovery
|
||||
except Exception as e:
|
||||
print(f"Error running discovery scan: {e}")
|
||||
return {}
|
||||
|
||||
def _compute_health(
|
||||
self,
|
||||
components: List[Dict],
|
||||
tokens: Dict,
|
||||
stats: Dict
|
||||
) -> Dict[str, Any]:
|
||||
"""Compute project health score"""
|
||||
score = 100
|
||||
issues = []
|
||||
|
||||
# Deduct points for missing components
|
||||
if stats['components']['total'] == 0:
|
||||
score -= 30
|
||||
issues.append("No components defined")
|
||||
|
||||
# Deduct points for no tokens
|
||||
if not tokens:
|
||||
score -= 20
|
||||
issues.append("No design tokens defined")
|
||||
|
||||
# Deduct points for ungeneratedcomponents
|
||||
total = stats['components']['total']
|
||||
generated = stats['components']['generated']
|
||||
if total > 0 and generated < total:
|
||||
percentage = (generated / total) * 100
|
||||
if percentage < 50:
|
||||
score -= 20
|
||||
issues.append(f"Low code generation: {percentage:.1f}%")
|
||||
elif percentage < 80:
|
||||
score -= 10
|
||||
issues.append(f"Medium code generation: {percentage:.1f}%")
|
||||
|
||||
# Compute grade
|
||||
if score >= 90:
|
||||
grade = 'A'
|
||||
elif score >= 80:
|
||||
grade = 'B'
|
||||
elif score >= 70:
|
||||
grade = 'C'
|
||||
elif score >= 60:
|
||||
grade = 'D'
|
||||
else:
|
||||
grade = 'F'
|
||||
|
||||
return {
|
||||
'score': max(0, score),
|
||||
'grade': grade,
|
||||
'issues': issues
|
||||
}
|
||||
|
||||
def clear_cache(self, project_id: Optional[str] = None):
|
||||
"""Clear cache for specific project or all projects"""
|
||||
if project_id:
|
||||
# Clear all cache entries for this project
|
||||
keys_to_remove = [k for k in self._cache.keys() if k.startswith(f"{project_id}:")]
|
||||
for key in keys_to_remove:
|
||||
del self._cache[key]
|
||||
else:
|
||||
# Clear all cache
|
||||
self._cache.clear()
|
||||
|
||||
|
||||
# Singleton instance
|
||||
_context_manager = None
|
||||
|
||||
|
||||
def get_context_manager() -> ProjectContextManager:
|
||||
"""Get singleton context manager instance"""
|
||||
global _context_manager
|
||||
if _context_manager is None:
|
||||
_context_manager = ProjectContextManager()
|
||||
return _context_manager
|
||||
428
demo/tools/dss_mcp/handler.py
Normal file
428
demo/tools/dss_mcp/handler.py
Normal file
@@ -0,0 +1,428 @@
|
||||
"""
|
||||
Unified MCP Handler
|
||||
|
||||
Central handler for all MCP tool execution. Used by:
|
||||
- Direct API calls (/api/mcp/tools/{name}/execute)
|
||||
- Claude chat (inline tool execution)
|
||||
- SSE streaming connections
|
||||
|
||||
This module ensures all MCP requests go through a single code path
|
||||
for consistent logging, error handling, and security.
|
||||
"""
|
||||
|
||||
import json
|
||||
import asyncio
|
||||
from typing import Dict, Any, List, Optional, Tuple
|
||||
from datetime import datetime
|
||||
from dataclasses import dataclass, asdict
|
||||
|
||||
import sys
|
||||
from pathlib import Path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from storage.database import get_connection
|
||||
from .config import mcp_config, integration_config
|
||||
from .context.project_context import get_context_manager, ProjectContext
|
||||
from .tools.project_tools import PROJECT_TOOLS, ProjectTools
|
||||
from .integrations.figma import FIGMA_TOOLS, FigmaTools
|
||||
from .integrations.jira import JIRA_TOOLS, JiraTools
|
||||
from .integrations.confluence import CONFLUENCE_TOOLS, ConfluenceTools
|
||||
from .integrations.base import CircuitBreakerOpen
|
||||
|
||||
|
||||
@dataclass
|
||||
class ToolResult:
|
||||
"""Result of a tool execution"""
|
||||
tool_name: str
|
||||
success: bool
|
||||
result: Any
|
||||
error: Optional[str] = None
|
||||
duration_ms: int = 0
|
||||
timestamp: str = None
|
||||
|
||||
def __post_init__(self):
|
||||
if not self.timestamp:
|
||||
self.timestamp = datetime.now().isoformat()
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return asdict(self)
|
||||
|
||||
|
||||
@dataclass
|
||||
class MCPContext:
|
||||
"""Context for MCP operations"""
|
||||
project_id: str
|
||||
user_id: Optional[int] = None
|
||||
session_id: Optional[str] = None
|
||||
|
||||
|
||||
class MCPHandler:
|
||||
"""
|
||||
Unified MCP tool handler.
|
||||
|
||||
Provides:
|
||||
- Tool discovery (list all available tools)
|
||||
- Tool execution with proper context
|
||||
- Integration management
|
||||
- Logging and metrics
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.context_manager = get_context_manager()
|
||||
self._tool_registry: Dict[str, Dict[str, Any]] = {}
|
||||
self._initialize_tools()
|
||||
|
||||
def _initialize_tools(self):
|
||||
"""Initialize tool registry with all available tools"""
|
||||
# Register base project tools
|
||||
for tool in PROJECT_TOOLS:
|
||||
self._tool_registry[tool.name] = {
|
||||
"tool": tool,
|
||||
"category": "project",
|
||||
"requires_integration": False
|
||||
}
|
||||
|
||||
# Register Figma tools
|
||||
for tool in FIGMA_TOOLS:
|
||||
self._tool_registry[tool.name] = {
|
||||
"tool": tool,
|
||||
"category": "figma",
|
||||
"requires_integration": True,
|
||||
"integration_type": "figma"
|
||||
}
|
||||
|
||||
# Register Jira tools
|
||||
for tool in JIRA_TOOLS:
|
||||
self._tool_registry[tool.name] = {
|
||||
"tool": tool,
|
||||
"category": "jira",
|
||||
"requires_integration": True,
|
||||
"integration_type": "jira"
|
||||
}
|
||||
|
||||
# Register Confluence tools
|
||||
for tool in CONFLUENCE_TOOLS:
|
||||
self._tool_registry[tool.name] = {
|
||||
"tool": tool,
|
||||
"category": "confluence",
|
||||
"requires_integration": True,
|
||||
"integration_type": "confluence"
|
||||
}
|
||||
|
||||
def list_tools(self, include_details: bool = False) -> Dict[str, Any]:
|
||||
"""
|
||||
List all available MCP tools.
|
||||
|
||||
Args:
|
||||
include_details: Include full tool schemas
|
||||
|
||||
Returns:
|
||||
Tool listing by category
|
||||
"""
|
||||
tools_by_category = {}
|
||||
|
||||
for name, info in self._tool_registry.items():
|
||||
category = info["category"]
|
||||
if category not in tools_by_category:
|
||||
tools_by_category[category] = []
|
||||
|
||||
tool_info = {
|
||||
"name": name,
|
||||
"description": info["tool"].description,
|
||||
"requires_integration": info.get("requires_integration", False)
|
||||
}
|
||||
|
||||
if include_details:
|
||||
tool_info["input_schema"] = info["tool"].inputSchema
|
||||
|
||||
tools_by_category[category].append(tool_info)
|
||||
|
||||
return {
|
||||
"tools": tools_by_category,
|
||||
"total_count": len(self._tool_registry)
|
||||
}
|
||||
|
||||
def get_tool_info(self, tool_name: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get information about a specific tool"""
|
||||
if tool_name not in self._tool_registry:
|
||||
return None
|
||||
|
||||
info = self._tool_registry[tool_name]
|
||||
return {
|
||||
"name": tool_name,
|
||||
"description": info["tool"].description,
|
||||
"category": info["category"],
|
||||
"input_schema": info["tool"].inputSchema,
|
||||
"requires_integration": info.get("requires_integration", False),
|
||||
"integration_type": info.get("integration_type")
|
||||
}
|
||||
|
||||
async def execute_tool(
|
||||
self,
|
||||
tool_name: str,
|
||||
arguments: Dict[str, Any],
|
||||
context: MCPContext
|
||||
) -> ToolResult:
|
||||
"""
|
||||
Execute an MCP tool.
|
||||
|
||||
Args:
|
||||
tool_name: Name of the tool to execute
|
||||
arguments: Tool arguments
|
||||
context: MCP context (project_id, user_id)
|
||||
|
||||
Returns:
|
||||
ToolResult with success/failure and data
|
||||
"""
|
||||
start_time = datetime.now()
|
||||
|
||||
# Check if tool exists
|
||||
if tool_name not in self._tool_registry:
|
||||
return ToolResult(
|
||||
tool_name=tool_name,
|
||||
success=False,
|
||||
result=None,
|
||||
error=f"Unknown tool: {tool_name}"
|
||||
)
|
||||
|
||||
tool_info = self._tool_registry[tool_name]
|
||||
category = tool_info["category"]
|
||||
|
||||
try:
|
||||
# Execute based on category
|
||||
if category == "project":
|
||||
result = await self._execute_project_tool(tool_name, arguments, context)
|
||||
elif category == "figma":
|
||||
result = await self._execute_figma_tool(tool_name, arguments, context)
|
||||
elif category == "jira":
|
||||
result = await self._execute_jira_tool(tool_name, arguments, context)
|
||||
elif category == "confluence":
|
||||
result = await self._execute_confluence_tool(tool_name, arguments, context)
|
||||
else:
|
||||
result = {"error": f"Unknown tool category: {category}"}
|
||||
|
||||
# Check for error in result
|
||||
success = "error" not in result
|
||||
error = result.get("error") if not success else None
|
||||
|
||||
# Calculate duration
|
||||
duration_ms = int((datetime.now() - start_time).total_seconds() * 1000)
|
||||
|
||||
# Log execution
|
||||
await self._log_tool_usage(
|
||||
tool_name=tool_name,
|
||||
category=category,
|
||||
project_id=context.project_id,
|
||||
user_id=context.user_id,
|
||||
success=success,
|
||||
duration_ms=duration_ms,
|
||||
error=error
|
||||
)
|
||||
|
||||
return ToolResult(
|
||||
tool_name=tool_name,
|
||||
success=success,
|
||||
result=result if success else None,
|
||||
error=error,
|
||||
duration_ms=duration_ms
|
||||
)
|
||||
|
||||
except CircuitBreakerOpen as e:
|
||||
duration_ms = int((datetime.now() - start_time).total_seconds() * 1000)
|
||||
return ToolResult(
|
||||
tool_name=tool_name,
|
||||
success=False,
|
||||
result=None,
|
||||
error=str(e),
|
||||
duration_ms=duration_ms
|
||||
)
|
||||
except Exception as e:
|
||||
duration_ms = int((datetime.now() - start_time).total_seconds() * 1000)
|
||||
await self._log_tool_usage(
|
||||
tool_name=tool_name,
|
||||
category=category,
|
||||
project_id=context.project_id,
|
||||
user_id=context.user_id,
|
||||
success=False,
|
||||
duration_ms=duration_ms,
|
||||
error=str(e)
|
||||
)
|
||||
return ToolResult(
|
||||
tool_name=tool_name,
|
||||
success=False,
|
||||
result=None,
|
||||
error=str(e),
|
||||
duration_ms=duration_ms
|
||||
)
|
||||
|
||||
async def _execute_project_tool(
|
||||
self,
|
||||
tool_name: str,
|
||||
arguments: Dict[str, Any],
|
||||
context: MCPContext
|
||||
) -> Dict[str, Any]:
|
||||
"""Execute a project tool"""
|
||||
# Ensure project_id is set
|
||||
if "project_id" not in arguments:
|
||||
arguments["project_id"] = context.project_id
|
||||
|
||||
project_tools = ProjectTools(context.user_id)
|
||||
return await project_tools.execute_tool(tool_name, arguments)
|
||||
|
||||
async def _execute_figma_tool(
|
||||
self,
|
||||
tool_name: str,
|
||||
arguments: Dict[str, Any],
|
||||
context: MCPContext
|
||||
) -> Dict[str, Any]:
|
||||
"""Execute a Figma tool"""
|
||||
# Get Figma config
|
||||
config = await self._get_integration_config("figma", context)
|
||||
if not config:
|
||||
# Try global config
|
||||
if integration_config.FIGMA_TOKEN:
|
||||
config = {"api_token": integration_config.FIGMA_TOKEN}
|
||||
else:
|
||||
return {"error": "Figma not configured. Please add Figma API token."}
|
||||
|
||||
figma_tools = FigmaTools(config)
|
||||
return await figma_tools.execute_tool(tool_name, arguments)
|
||||
|
||||
async def _execute_jira_tool(
|
||||
self,
|
||||
tool_name: str,
|
||||
arguments: Dict[str, Any],
|
||||
context: MCPContext
|
||||
) -> Dict[str, Any]:
|
||||
"""Execute a Jira tool"""
|
||||
config = await self._get_integration_config("jira", context)
|
||||
if not config:
|
||||
return {"error": "Jira not configured. Please configure Jira integration."}
|
||||
|
||||
jira_tools = JiraTools(config)
|
||||
return await jira_tools.execute_tool(tool_name, arguments)
|
||||
|
||||
async def _execute_confluence_tool(
|
||||
self,
|
||||
tool_name: str,
|
||||
arguments: Dict[str, Any],
|
||||
context: MCPContext
|
||||
) -> Dict[str, Any]:
|
||||
"""Execute a Confluence tool"""
|
||||
config = await self._get_integration_config("confluence", context)
|
||||
if not config:
|
||||
return {"error": "Confluence not configured. Please configure Confluence integration."}
|
||||
|
||||
confluence_tools = ConfluenceTools(config)
|
||||
return await confluence_tools.execute_tool(tool_name, arguments)
|
||||
|
||||
async def _get_integration_config(
|
||||
self,
|
||||
integration_type: str,
|
||||
context: MCPContext
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Get decrypted integration config for user/project"""
|
||||
if not context.user_id or not context.project_id:
|
||||
return None
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
def get_config():
|
||||
try:
|
||||
with get_connection() as conn:
|
||||
row = conn.execute(
|
||||
"""
|
||||
SELECT config FROM project_integrations
|
||||
WHERE project_id = ? AND user_id = ? AND integration_type = ? AND enabled = 1
|
||||
""",
|
||||
(context.project_id, context.user_id, integration_type)
|
||||
).fetchone()
|
||||
|
||||
if not row:
|
||||
return None
|
||||
|
||||
encrypted_config = row["config"]
|
||||
|
||||
# Decrypt
|
||||
cipher = mcp_config.get_cipher()
|
||||
if cipher:
|
||||
try:
|
||||
decrypted = cipher.decrypt(encrypted_config.encode()).decode()
|
||||
return json.loads(decrypted)
|
||||
except:
|
||||
pass
|
||||
|
||||
# Try parsing as plain JSON
|
||||
try:
|
||||
return json.loads(encrypted_config)
|
||||
except:
|
||||
return None
|
||||
except:
|
||||
return None
|
||||
|
||||
return await loop.run_in_executor(None, get_config)
|
||||
|
||||
async def _log_tool_usage(
|
||||
self,
|
||||
tool_name: str,
|
||||
category: str,
|
||||
project_id: str,
|
||||
user_id: Optional[int],
|
||||
success: bool,
|
||||
duration_ms: int,
|
||||
error: Optional[str] = None
|
||||
):
|
||||
"""Log tool execution to database"""
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
def log():
|
||||
try:
|
||||
with get_connection() as conn:
|
||||
conn.execute(
|
||||
"""
|
||||
INSERT INTO mcp_tool_usage
|
||||
(project_id, user_id, tool_name, tool_category, duration_ms, success, error_message)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(project_id, user_id, tool_name, category, duration_ms, success, error)
|
||||
)
|
||||
except:
|
||||
pass # Don't fail on logging errors
|
||||
|
||||
await loop.run_in_executor(None, log)
|
||||
|
||||
async def get_project_context(
|
||||
self,
|
||||
project_id: str,
|
||||
user_id: Optional[int] = None
|
||||
) -> Optional[ProjectContext]:
|
||||
"""Get project context for Claude system prompt"""
|
||||
return await self.context_manager.get_context(project_id, user_id)
|
||||
|
||||
def get_tools_for_claude(self) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get tools formatted for Claude's tool_use feature.
|
||||
|
||||
Returns:
|
||||
List of tools in Anthropic's tool format
|
||||
"""
|
||||
tools = []
|
||||
for name, info in self._tool_registry.items():
|
||||
tools.append({
|
||||
"name": name,
|
||||
"description": info["tool"].description,
|
||||
"input_schema": info["tool"].inputSchema
|
||||
})
|
||||
return tools
|
||||
|
||||
|
||||
# Singleton instance
|
||||
_mcp_handler: Optional[MCPHandler] = None
|
||||
|
||||
|
||||
def get_mcp_handler() -> MCPHandler:
|
||||
"""Get singleton MCP handler instance"""
|
||||
global _mcp_handler
|
||||
if _mcp_handler is None:
|
||||
_mcp_handler = MCPHandler()
|
||||
return _mcp_handler
|
||||
0
demo/tools/dss_mcp/integrations/__init__.py
Normal file
0
demo/tools/dss_mcp/integrations/__init__.py
Normal file
264
demo/tools/dss_mcp/integrations/base.py
Normal file
264
demo/tools/dss_mcp/integrations/base.py
Normal file
@@ -0,0 +1,264 @@
|
||||
"""
|
||||
Base Integration Classes
|
||||
|
||||
Provides circuit breaker pattern and base classes for external integrations.
|
||||
"""
|
||||
|
||||
import time
|
||||
import asyncio
|
||||
from typing import Callable, Any, Optional, Dict
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
from enum import Enum
|
||||
|
||||
from ..config import mcp_config
|
||||
from storage.database import get_connection
|
||||
|
||||
|
||||
class CircuitState(Enum):
|
||||
"""Circuit breaker states"""
|
||||
CLOSED = "closed" # Normal operation
|
||||
OPEN = "open" # Failing, reject requests
|
||||
HALF_OPEN = "half_open" # Testing if service recovered
|
||||
|
||||
|
||||
@dataclass
|
||||
class CircuitBreakerStats:
|
||||
"""Circuit breaker statistics"""
|
||||
state: CircuitState
|
||||
failure_count: int
|
||||
success_count: int
|
||||
last_failure_time: Optional[float]
|
||||
last_success_time: Optional[float]
|
||||
opened_at: Optional[float]
|
||||
next_retry_time: Optional[float]
|
||||
|
||||
|
||||
class CircuitBreakerOpen(Exception):
|
||||
"""Exception raised when circuit breaker is open"""
|
||||
pass
|
||||
|
||||
|
||||
class CircuitBreaker:
|
||||
"""
|
||||
Circuit Breaker pattern implementation.
|
||||
|
||||
Protects external service calls from cascading failures.
|
||||
Three states: CLOSED (normal), OPEN (failing), HALF_OPEN (testing).
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
integration_type: str,
|
||||
failure_threshold: int = None,
|
||||
timeout_seconds: int = None,
|
||||
half_open_max_calls: int = 3
|
||||
):
|
||||
"""
|
||||
Args:
|
||||
integration_type: Type of integration (figma, jira, confluence, etc.)
|
||||
failure_threshold: Number of failures before opening circuit
|
||||
timeout_seconds: Seconds to wait before trying again
|
||||
half_open_max_calls: Max successful calls in half-open before closing
|
||||
"""
|
||||
self.integration_type = integration_type
|
||||
self.failure_threshold = failure_threshold or mcp_config.CIRCUIT_BREAKER_FAILURE_THRESHOLD
|
||||
self.timeout_seconds = timeout_seconds or mcp_config.CIRCUIT_BREAKER_TIMEOUT_SECONDS
|
||||
self.half_open_max_calls = half_open_max_calls
|
||||
|
||||
# In-memory state (could be moved to Redis for distributed setup)
|
||||
self.state = CircuitState.CLOSED
|
||||
self.failure_count = 0
|
||||
self.success_count = 0
|
||||
self.last_failure_time: Optional[float] = None
|
||||
self.last_success_time: Optional[float] = None
|
||||
self.opened_at: Optional[float] = None
|
||||
|
||||
async def call(self, func: Callable, *args, **kwargs) -> Any:
|
||||
"""
|
||||
Call a function through the circuit breaker.
|
||||
|
||||
Args:
|
||||
func: Function to call (can be sync or async)
|
||||
*args, **kwargs: Arguments to pass to func
|
||||
|
||||
Returns:
|
||||
Function result
|
||||
|
||||
Raises:
|
||||
CircuitBreakerOpen: If circuit is open
|
||||
Exception: Original exception from func if it fails
|
||||
"""
|
||||
# Check circuit state
|
||||
if self.state == CircuitState.OPEN:
|
||||
# Check if timeout has elapsed
|
||||
if time.time() - self.opened_at < self.timeout_seconds:
|
||||
await self._record_failure("Circuit breaker is OPEN", db_only=True)
|
||||
raise CircuitBreakerOpen(
|
||||
f"{self.integration_type} service is temporarily unavailable. "
|
||||
f"Retry after {self._seconds_until_retry():.0f}s"
|
||||
)
|
||||
else:
|
||||
# Timeout elapsed, move to HALF_OPEN
|
||||
self.state = CircuitState.HALF_OPEN
|
||||
self.success_count = 0
|
||||
|
||||
# Execute function
|
||||
try:
|
||||
# Handle both sync and async functions
|
||||
if asyncio.iscoroutinefunction(func):
|
||||
result = await func(*args, **kwargs)
|
||||
else:
|
||||
result = func(*args, **kwargs)
|
||||
|
||||
# Success!
|
||||
await self._record_success()
|
||||
|
||||
# If in HALF_OPEN, check if we can close the circuit
|
||||
if self.state == CircuitState.HALF_OPEN:
|
||||
if self.success_count >= self.half_open_max_calls:
|
||||
self.state = CircuitState.CLOSED
|
||||
self.failure_count = 0
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
# Failure
|
||||
await self._record_failure(str(e))
|
||||
|
||||
# Check if we should open the circuit
|
||||
if self.failure_count >= self.failure_threshold:
|
||||
self.state = CircuitState.OPEN
|
||||
self.opened_at = time.time()
|
||||
|
||||
raise
|
||||
|
||||
async def _record_success(self):
|
||||
"""Record successful call"""
|
||||
self.success_count += 1
|
||||
self.last_success_time = time.time()
|
||||
|
||||
# Update database
|
||||
await self._update_health_db(is_healthy=True, error=None)
|
||||
|
||||
async def _record_failure(self, error_message: str, db_only: bool = False):
|
||||
"""Record failed call"""
|
||||
if not db_only:
|
||||
self.failure_count += 1
|
||||
self.last_failure_time = time.time()
|
||||
|
||||
# Update database
|
||||
await self._update_health_db(is_healthy=False, error=error_message)
|
||||
|
||||
async def _update_health_db(self, is_healthy: bool, error: Optional[str]):
|
||||
"""Update integration health in database"""
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
def update_db():
|
||||
try:
|
||||
with get_connection() as conn:
|
||||
circuit_open_until = None
|
||||
if self.state == CircuitState.OPEN and self.opened_at:
|
||||
circuit_open_until = datetime.fromtimestamp(
|
||||
self.opened_at + self.timeout_seconds
|
||||
).isoformat()
|
||||
|
||||
if is_healthy:
|
||||
conn.execute(
|
||||
"""
|
||||
UPDATE integration_health
|
||||
SET is_healthy = 1,
|
||||
failure_count = 0,
|
||||
last_success_at = CURRENT_TIMESTAMP,
|
||||
circuit_open_until = NULL,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
WHERE integration_type = ?
|
||||
""",
|
||||
(self.integration_type,)
|
||||
)
|
||||
else:
|
||||
conn.execute(
|
||||
"""
|
||||
UPDATE integration_health
|
||||
SET is_healthy = 0,
|
||||
failure_count = ?,
|
||||
last_failure_at = CURRENT_TIMESTAMP,
|
||||
circuit_open_until = ?,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
WHERE integration_type = ?
|
||||
""",
|
||||
(self.failure_count, circuit_open_until, self.integration_type)
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"Error updating integration health: {e}")
|
||||
|
||||
await loop.run_in_executor(None, update_db)
|
||||
|
||||
def _seconds_until_retry(self) -> float:
|
||||
"""Get seconds until circuit can be retried"""
|
||||
if self.state != CircuitState.OPEN or not self.opened_at:
|
||||
return 0
|
||||
elapsed = time.time() - self.opened_at
|
||||
remaining = self.timeout_seconds - elapsed
|
||||
return max(0, remaining)
|
||||
|
||||
def get_stats(self) -> CircuitBreakerStats:
|
||||
"""Get current circuit breaker statistics"""
|
||||
next_retry_time = None
|
||||
if self.state == CircuitState.OPEN and self.opened_at:
|
||||
next_retry_time = self.opened_at + self.timeout_seconds
|
||||
|
||||
return CircuitBreakerStats(
|
||||
state=self.state,
|
||||
failure_count=self.failure_count,
|
||||
success_count=self.success_count,
|
||||
last_failure_time=self.last_failure_time,
|
||||
last_success_time=self.last_success_time,
|
||||
opened_at=self.opened_at,
|
||||
next_retry_time=next_retry_time
|
||||
)
|
||||
|
||||
|
||||
class BaseIntegration:
|
||||
"""Base class for all external integrations"""
|
||||
|
||||
def __init__(self, integration_type: str, config: Dict[str, Any]):
|
||||
"""
|
||||
Args:
|
||||
integration_type: Type of integration (figma, jira, etc.)
|
||||
config: Integration configuration (decrypted)
|
||||
"""
|
||||
self.integration_type = integration_type
|
||||
self.config = config
|
||||
self.circuit_breaker = CircuitBreaker(integration_type)
|
||||
|
||||
async def call_api(self, func: Callable, *args, **kwargs) -> Any:
|
||||
"""
|
||||
Call external API through circuit breaker.
|
||||
|
||||
Args:
|
||||
func: API function to call
|
||||
*args, **kwargs: Arguments to pass
|
||||
|
||||
Returns:
|
||||
API response
|
||||
|
||||
Raises:
|
||||
CircuitBreakerOpen: If circuit is open
|
||||
Exception: Original API exception
|
||||
"""
|
||||
return await self.circuit_breaker.call(func, *args, **kwargs)
|
||||
|
||||
def get_health(self) -> Dict[str, Any]:
|
||||
"""Get integration health status"""
|
||||
stats = self.circuit_breaker.get_stats()
|
||||
return {
|
||||
"integration_type": self.integration_type,
|
||||
"state": stats.state.value,
|
||||
"is_healthy": stats.state == CircuitState.CLOSED,
|
||||
"failure_count": stats.failure_count,
|
||||
"success_count": stats.success_count,
|
||||
"last_failure_time": stats.last_failure_time,
|
||||
"last_success_time": stats.last_success_time,
|
||||
"next_retry_time": stats.next_retry_time
|
||||
}
|
||||
262
demo/tools/dss_mcp/integrations/confluence.py
Normal file
262
demo/tools/dss_mcp/integrations/confluence.py
Normal file
@@ -0,0 +1,262 @@
|
||||
"""
|
||||
Confluence Integration for MCP
|
||||
|
||||
Provides Confluence API tools for documentation and knowledge base.
|
||||
"""
|
||||
|
||||
from typing import Dict, Any, List, Optional
|
||||
from atlassian import Confluence
|
||||
from mcp import types
|
||||
|
||||
from .base import BaseIntegration
|
||||
|
||||
|
||||
# Confluence MCP Tool Definitions
|
||||
CONFLUENCE_TOOLS = [
|
||||
types.Tool(
|
||||
name="confluence_create_page",
|
||||
description="Create a new Confluence page",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"space_key": {
|
||||
"type": "string",
|
||||
"description": "Confluence space key"
|
||||
},
|
||||
"title": {
|
||||
"type": "string",
|
||||
"description": "Page title"
|
||||
},
|
||||
"body": {
|
||||
"type": "string",
|
||||
"description": "Page content (HTML or wiki markup)"
|
||||
},
|
||||
"parent_id": {
|
||||
"type": "string",
|
||||
"description": "Optional parent page ID"
|
||||
}
|
||||
},
|
||||
"required": ["space_key", "title", "body"]
|
||||
}
|
||||
),
|
||||
types.Tool(
|
||||
name="confluence_get_page",
|
||||
description="Get Confluence page by ID or title",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"page_id": {
|
||||
"type": "string",
|
||||
"description": "Page ID (use this OR title)"
|
||||
},
|
||||
"space_key": {
|
||||
"type": "string",
|
||||
"description": "Space key (required if using title)"
|
||||
},
|
||||
"title": {
|
||||
"type": "string",
|
||||
"description": "Page title (use this OR page_id)"
|
||||
},
|
||||
"expand": {
|
||||
"type": "string",
|
||||
"description": "Comma-separated list of expansions (body.storage, version, etc.)",
|
||||
"default": "body.storage,version"
|
||||
}
|
||||
}
|
||||
}
|
||||
),
|
||||
types.Tool(
|
||||
name="confluence_update_page",
|
||||
description="Update an existing Confluence page",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"page_id": {
|
||||
"type": "string",
|
||||
"description": "Page ID to update"
|
||||
},
|
||||
"title": {
|
||||
"type": "string",
|
||||
"description": "New page title"
|
||||
},
|
||||
"body": {
|
||||
"type": "string",
|
||||
"description": "New page content"
|
||||
}
|
||||
},
|
||||
"required": ["page_id", "title", "body"]
|
||||
}
|
||||
),
|
||||
types.Tool(
|
||||
name="confluence_search",
|
||||
description="Search Confluence pages using CQL",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"cql": {
|
||||
"type": "string",
|
||||
"description": "CQL query (e.g., 'space=DSS AND type=page')"
|
||||
},
|
||||
"limit": {
|
||||
"type": "integer",
|
||||
"description": "Maximum number of results",
|
||||
"default": 25
|
||||
}
|
||||
},
|
||||
"required": ["cql"]
|
||||
}
|
||||
),
|
||||
types.Tool(
|
||||
name="confluence_get_space",
|
||||
description="Get Confluence space details",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"space_key": {
|
||||
"type": "string",
|
||||
"description": "Space key"
|
||||
}
|
||||
},
|
||||
"required": ["space_key"]
|
||||
}
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
class ConfluenceIntegration(BaseIntegration):
|
||||
"""Confluence API integration with circuit breaker"""
|
||||
|
||||
def __init__(self, config: Dict[str, Any]):
|
||||
"""
|
||||
Initialize Confluence integration.
|
||||
|
||||
Args:
|
||||
config: Must contain 'url', 'username', 'api_token'
|
||||
"""
|
||||
super().__init__("confluence", config)
|
||||
|
||||
url = config.get("url")
|
||||
username = config.get("username")
|
||||
api_token = config.get("api_token")
|
||||
|
||||
if not all([url, username, api_token]):
|
||||
raise ValueError("Confluence configuration incomplete: url, username, api_token required")
|
||||
|
||||
self.confluence = Confluence(
|
||||
url=url,
|
||||
username=username,
|
||||
password=api_token,
|
||||
cloud=True
|
||||
)
|
||||
|
||||
async def create_page(
|
||||
self,
|
||||
space_key: str,
|
||||
title: str,
|
||||
body: str,
|
||||
parent_id: Optional[str] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Create a new page"""
|
||||
def _create():
|
||||
return self.confluence.create_page(
|
||||
space=space_key,
|
||||
title=title,
|
||||
body=body,
|
||||
parent_id=parent_id,
|
||||
representation="storage"
|
||||
)
|
||||
|
||||
return await self.call_api(_create)
|
||||
|
||||
async def get_page(
|
||||
self,
|
||||
page_id: Optional[str] = None,
|
||||
space_key: Optional[str] = None,
|
||||
title: Optional[str] = None,
|
||||
expand: str = "body.storage,version"
|
||||
) -> Dict[str, Any]:
|
||||
"""Get page by ID or title"""
|
||||
def _get():
|
||||
if page_id:
|
||||
return self.confluence.get_page_by_id(
|
||||
page_id=page_id,
|
||||
expand=expand
|
||||
)
|
||||
elif space_key and title:
|
||||
return self.confluence.get_page_by_title(
|
||||
space=space_key,
|
||||
title=title,
|
||||
expand=expand
|
||||
)
|
||||
else:
|
||||
raise ValueError("Must provide either page_id or (space_key + title)")
|
||||
|
||||
return await self.call_api(_get)
|
||||
|
||||
async def update_page(
|
||||
self,
|
||||
page_id: str,
|
||||
title: str,
|
||||
body: str
|
||||
) -> Dict[str, Any]:
|
||||
"""Update an existing page"""
|
||||
def _update():
|
||||
# Get current version
|
||||
page = self.confluence.get_page_by_id(page_id, expand="version")
|
||||
current_version = page["version"]["number"]
|
||||
|
||||
return self.confluence.update_page(
|
||||
page_id=page_id,
|
||||
title=title,
|
||||
body=body,
|
||||
parent_id=None,
|
||||
type="page",
|
||||
representation="storage",
|
||||
minor_edit=False,
|
||||
version_comment="Updated via DSS MCP",
|
||||
version_number=current_version + 1
|
||||
)
|
||||
|
||||
return await self.call_api(_update)
|
||||
|
||||
async def search(self, cql: str, limit: int = 25) -> Dict[str, Any]:
|
||||
"""Search pages using CQL"""
|
||||
def _search():
|
||||
return self.confluence.cql(cql, limit=limit)
|
||||
|
||||
return await self.call_api(_search)
|
||||
|
||||
async def get_space(self, space_key: str) -> Dict[str, Any]:
|
||||
"""Get space details"""
|
||||
def _get():
|
||||
return self.confluence.get_space(space_key)
|
||||
|
||||
return await self.call_api(_get)
|
||||
|
||||
|
||||
class ConfluenceTools:
|
||||
"""MCP tool executor for Confluence integration"""
|
||||
|
||||
def __init__(self, config: Dict[str, Any]):
|
||||
self.confluence = ConfluenceIntegration(config)
|
||||
|
||||
async def execute_tool(self, tool_name: str, arguments: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Execute Confluence tool"""
|
||||
handlers = {
|
||||
"confluence_create_page": self.confluence.create_page,
|
||||
"confluence_get_page": self.confluence.get_page,
|
||||
"confluence_update_page": self.confluence.update_page,
|
||||
"confluence_search": self.confluence.search,
|
||||
"confluence_get_space": self.confluence.get_space
|
||||
}
|
||||
|
||||
handler = handlers.get(tool_name)
|
||||
if not handler:
|
||||
return {"error": f"Unknown Confluence tool: {tool_name}"}
|
||||
|
||||
try:
|
||||
clean_args = {k: v for k, v in arguments.items() if not k.startswith("_")}
|
||||
result = await handler(**clean_args)
|
||||
return result
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
260
demo/tools/dss_mcp/integrations/figma.py
Normal file
260
demo/tools/dss_mcp/integrations/figma.py
Normal file
@@ -0,0 +1,260 @@
|
||||
"""
|
||||
Figma Integration for MCP
|
||||
|
||||
Provides Figma API tools through circuit breaker pattern.
|
||||
"""
|
||||
|
||||
import httpx
|
||||
from typing import Dict, Any, List, Optional
|
||||
from mcp import types
|
||||
|
||||
from .base import BaseIntegration
|
||||
from ..config import integration_config
|
||||
|
||||
|
||||
# Figma MCP Tool Definitions
|
||||
FIGMA_TOOLS = [
|
||||
types.Tool(
|
||||
name="figma_get_file",
|
||||
description="Get Figma file metadata and structure",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"file_key": {
|
||||
"type": "string",
|
||||
"description": "Figma file key"
|
||||
}
|
||||
},
|
||||
"required": ["file_key"]
|
||||
}
|
||||
),
|
||||
types.Tool(
|
||||
name="figma_get_styles",
|
||||
description="Get design styles (colors, text, effects) from Figma file",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"file_key": {
|
||||
"type": "string",
|
||||
"description": "Figma file key"
|
||||
}
|
||||
},
|
||||
"required": ["file_key"]
|
||||
}
|
||||
),
|
||||
types.Tool(
|
||||
name="figma_get_components",
|
||||
description="Get component definitions from Figma file",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"file_key": {
|
||||
"type": "string",
|
||||
"description": "Figma file key"
|
||||
}
|
||||
},
|
||||
"required": ["file_key"]
|
||||
}
|
||||
),
|
||||
types.Tool(
|
||||
name="figma_extract_tokens",
|
||||
description="Extract design tokens (variables) from Figma file",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"file_key": {
|
||||
"type": "string",
|
||||
"description": "Figma file key"
|
||||
}
|
||||
},
|
||||
"required": ["file_key"]
|
||||
}
|
||||
),
|
||||
types.Tool(
|
||||
name="figma_get_node",
|
||||
description="Get specific node/component by ID from Figma file",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"file_key": {
|
||||
"type": "string",
|
||||
"description": "Figma file key"
|
||||
},
|
||||
"node_id": {
|
||||
"type": "string",
|
||||
"description": "Node ID to fetch"
|
||||
}
|
||||
},
|
||||
"required": ["file_key", "node_id"]
|
||||
}
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
class FigmaIntegration(BaseIntegration):
|
||||
"""Figma API integration with circuit breaker"""
|
||||
|
||||
FIGMA_API_BASE = "https://api.figma.com/v1"
|
||||
|
||||
def __init__(self, config: Dict[str, Any]):
|
||||
"""
|
||||
Initialize Figma integration.
|
||||
|
||||
Args:
|
||||
config: Must contain 'api_token' or use FIGMA_TOKEN from env
|
||||
"""
|
||||
super().__init__("figma", config)
|
||||
self.api_token = config.get("api_token") or integration_config.FIGMA_TOKEN
|
||||
|
||||
if not self.api_token:
|
||||
raise ValueError("Figma API token not configured")
|
||||
|
||||
self.headers = {
|
||||
"X-Figma-Token": self.api_token
|
||||
}
|
||||
|
||||
async def get_file(self, file_key: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Get Figma file metadata and structure.
|
||||
|
||||
Args:
|
||||
file_key: Figma file key
|
||||
|
||||
Returns:
|
||||
File data
|
||||
"""
|
||||
async def _fetch():
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"{self.FIGMA_API_BASE}/files/{file_key}",
|
||||
headers=self.headers,
|
||||
timeout=30.0
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
return await self.call_api(_fetch)
|
||||
|
||||
async def get_styles(self, file_key: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Get all styles from Figma file.
|
||||
|
||||
Args:
|
||||
file_key: Figma file key
|
||||
|
||||
Returns:
|
||||
Styles data
|
||||
"""
|
||||
async def _fetch():
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"{self.FIGMA_API_BASE}/files/{file_key}/styles",
|
||||
headers=self.headers,
|
||||
timeout=30.0
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
return await self.call_api(_fetch)
|
||||
|
||||
async def get_components(self, file_key: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Get all components from Figma file.
|
||||
|
||||
Args:
|
||||
file_key: Figma file key
|
||||
|
||||
Returns:
|
||||
Components data
|
||||
"""
|
||||
async def _fetch():
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"{self.FIGMA_API_BASE}/files/{file_key}/components",
|
||||
headers=self.headers,
|
||||
timeout=30.0
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
return await self.call_api(_fetch)
|
||||
|
||||
async def extract_tokens(self, file_key: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Extract design tokens (variables) from Figma file.
|
||||
|
||||
Args:
|
||||
file_key: Figma file key
|
||||
|
||||
Returns:
|
||||
Variables/tokens data
|
||||
"""
|
||||
async def _fetch():
|
||||
async with httpx.AsyncClient() as client:
|
||||
# Get local variables
|
||||
response = await client.get(
|
||||
f"{self.FIGMA_API_BASE}/files/{file_key}/variables/local",
|
||||
headers=self.headers,
|
||||
timeout=30.0
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
return await self.call_api(_fetch)
|
||||
|
||||
async def get_node(self, file_key: str, node_id: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Get specific node from Figma file.
|
||||
|
||||
Args:
|
||||
file_key: Figma file key
|
||||
node_id: Node ID
|
||||
|
||||
Returns:
|
||||
Node data
|
||||
"""
|
||||
async def _fetch():
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"{self.FIGMA_API_BASE}/files/{file_key}/nodes",
|
||||
headers=self.headers,
|
||||
params={"ids": node_id},
|
||||
timeout=30.0
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
return await self.call_api(_fetch)
|
||||
|
||||
|
||||
class FigmaTools:
|
||||
"""MCP tool executor for Figma integration"""
|
||||
|
||||
def __init__(self, config: Dict[str, Any]):
|
||||
"""
|
||||
Args:
|
||||
config: Figma configuration (with api_token)
|
||||
"""
|
||||
self.figma = FigmaIntegration(config)
|
||||
|
||||
async def execute_tool(self, tool_name: str, arguments: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Execute Figma tool"""
|
||||
handlers = {
|
||||
"figma_get_file": self.figma.get_file,
|
||||
"figma_get_styles": self.figma.get_styles,
|
||||
"figma_get_components": self.figma.get_components,
|
||||
"figma_extract_tokens": self.figma.extract_tokens,
|
||||
"figma_get_node": self.figma.get_node
|
||||
}
|
||||
|
||||
handler = handlers.get(tool_name)
|
||||
if not handler:
|
||||
return {"error": f"Unknown Figma tool: {tool_name}"}
|
||||
|
||||
try:
|
||||
# Remove tool-specific prefix from arguments if needed
|
||||
clean_args = {k: v for k, v in arguments.items() if not k.startswith("_")}
|
||||
result = await handler(**clean_args)
|
||||
return result
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
215
demo/tools/dss_mcp/integrations/jira.py
Normal file
215
demo/tools/dss_mcp/integrations/jira.py
Normal file
@@ -0,0 +1,215 @@
|
||||
"""
|
||||
Jira Integration for MCP
|
||||
|
||||
Provides Jira API tools for issue tracking and project management.
|
||||
"""
|
||||
|
||||
from typing import Dict, Any, List, Optional
|
||||
from atlassian import Jira
|
||||
from mcp import types
|
||||
|
||||
from .base import BaseIntegration
|
||||
|
||||
|
||||
# Jira MCP Tool Definitions
|
||||
JIRA_TOOLS = [
|
||||
types.Tool(
|
||||
name="jira_create_issue",
|
||||
description="Create a new Jira issue",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"project_key": {
|
||||
"type": "string",
|
||||
"description": "Jira project key (e.g., 'DSS')"
|
||||
},
|
||||
"summary": {
|
||||
"type": "string",
|
||||
"description": "Issue summary/title"
|
||||
},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"description": "Issue description"
|
||||
},
|
||||
"issue_type": {
|
||||
"type": "string",
|
||||
"description": "Issue type (Story, Task, Bug, etc.)",
|
||||
"default": "Task"
|
||||
}
|
||||
},
|
||||
"required": ["project_key", "summary"]
|
||||
}
|
||||
),
|
||||
types.Tool(
|
||||
name="jira_get_issue",
|
||||
description="Get Jira issue details by key",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"issue_key": {
|
||||
"type": "string",
|
||||
"description": "Issue key (e.g., 'DSS-123')"
|
||||
}
|
||||
},
|
||||
"required": ["issue_key"]
|
||||
}
|
||||
),
|
||||
types.Tool(
|
||||
name="jira_search_issues",
|
||||
description="Search Jira issues using JQL",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"jql": {
|
||||
"type": "string",
|
||||
"description": "JQL query (e.g., 'project=DSS AND status=Open')"
|
||||
},
|
||||
"max_results": {
|
||||
"type": "integer",
|
||||
"description": "Maximum number of results",
|
||||
"default": 50
|
||||
}
|
||||
},
|
||||
"required": ["jql"]
|
||||
}
|
||||
),
|
||||
types.Tool(
|
||||
name="jira_update_issue",
|
||||
description="Update a Jira issue",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"issue_key": {
|
||||
"type": "string",
|
||||
"description": "Issue key to update"
|
||||
},
|
||||
"fields": {
|
||||
"type": "object",
|
||||
"description": "Fields to update (summary, description, status, etc.)"
|
||||
}
|
||||
},
|
||||
"required": ["issue_key", "fields"]
|
||||
}
|
||||
),
|
||||
types.Tool(
|
||||
name="jira_add_comment",
|
||||
description="Add a comment to a Jira issue",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"issue_key": {
|
||||
"type": "string",
|
||||
"description": "Issue key"
|
||||
},
|
||||
"comment": {
|
||||
"type": "string",
|
||||
"description": "Comment text"
|
||||
}
|
||||
},
|
||||
"required": ["issue_key", "comment"]
|
||||
}
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
class JiraIntegration(BaseIntegration):
|
||||
"""Jira API integration with circuit breaker"""
|
||||
|
||||
def __init__(self, config: Dict[str, Any]):
|
||||
"""
|
||||
Initialize Jira integration.
|
||||
|
||||
Args:
|
||||
config: Must contain 'url', 'username', 'api_token'
|
||||
"""
|
||||
super().__init__("jira", config)
|
||||
|
||||
url = config.get("url")
|
||||
username = config.get("username")
|
||||
api_token = config.get("api_token")
|
||||
|
||||
if not all([url, username, api_token]):
|
||||
raise ValueError("Jira configuration incomplete: url, username, api_token required")
|
||||
|
||||
self.jira = Jira(
|
||||
url=url,
|
||||
username=username,
|
||||
password=api_token,
|
||||
cloud=True
|
||||
)
|
||||
|
||||
async def create_issue(
|
||||
self,
|
||||
project_key: str,
|
||||
summary: str,
|
||||
description: str = "",
|
||||
issue_type: str = "Task"
|
||||
) -> Dict[str, Any]:
|
||||
"""Create a new Jira issue"""
|
||||
def _create():
|
||||
fields = {
|
||||
"project": {"key": project_key},
|
||||
"summary": summary,
|
||||
"description": description,
|
||||
"issuetype": {"name": issue_type}
|
||||
}
|
||||
return self.jira.create_issue(fields)
|
||||
|
||||
return await self.call_api(_create)
|
||||
|
||||
async def get_issue(self, issue_key: str) -> Dict[str, Any]:
|
||||
"""Get issue details"""
|
||||
def _get():
|
||||
return self.jira.get_issue(issue_key)
|
||||
|
||||
return await self.call_api(_get)
|
||||
|
||||
async def search_issues(self, jql: str, max_results: int = 50) -> Dict[str, Any]:
|
||||
"""Search issues with JQL"""
|
||||
def _search():
|
||||
return self.jira.jql(jql, limit=max_results)
|
||||
|
||||
return await self.call_api(_search)
|
||||
|
||||
async def update_issue(self, issue_key: str, fields: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Update issue fields"""
|
||||
def _update():
|
||||
self.jira.update_issue_field(issue_key, fields)
|
||||
return {"status": "updated", "issue_key": issue_key}
|
||||
|
||||
return await self.call_api(_update)
|
||||
|
||||
async def add_comment(self, issue_key: str, comment: str) -> Dict[str, Any]:
|
||||
"""Add comment to issue"""
|
||||
def _comment():
|
||||
return self.jira.issue_add_comment(issue_key, comment)
|
||||
|
||||
return await self.call_api(_comment)
|
||||
|
||||
|
||||
class JiraTools:
|
||||
"""MCP tool executor for Jira integration"""
|
||||
|
||||
def __init__(self, config: Dict[str, Any]):
|
||||
self.jira = JiraIntegration(config)
|
||||
|
||||
async def execute_tool(self, tool_name: str, arguments: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Execute Jira tool"""
|
||||
handlers = {
|
||||
"jira_create_issue": self.jira.create_issue,
|
||||
"jira_get_issue": self.jira.get_issue,
|
||||
"jira_search_issues": self.jira.search_issues,
|
||||
"jira_update_issue": self.jira.update_issue,
|
||||
"jira_add_comment": self.jira.add_comment
|
||||
}
|
||||
|
||||
handler = handlers.get(tool_name)
|
||||
if not handler:
|
||||
return {"error": f"Unknown Jira tool: {tool_name}"}
|
||||
|
||||
try:
|
||||
clean_args = {k: v for k, v in arguments.items() if not k.startswith("_")}
|
||||
result = await handler(**clean_args)
|
||||
return result
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
36
demo/tools/dss_mcp/requirements.txt
Normal file
36
demo/tools/dss_mcp/requirements.txt
Normal file
@@ -0,0 +1,36 @@
|
||||
# MCP Server Dependencies
|
||||
# Model Context Protocol
|
||||
mcp>=0.9.0
|
||||
|
||||
# Anthropic SDK
|
||||
anthropic>=0.40.0
|
||||
|
||||
# FastAPI & SSE
|
||||
fastapi>=0.104.0
|
||||
sse-starlette>=1.8.0
|
||||
uvicorn[standard]>=0.24.0
|
||||
|
||||
# HTTP Client
|
||||
httpx>=0.25.0
|
||||
aiohttp>=3.9.0
|
||||
|
||||
# Atlassian Integrations
|
||||
atlassian-python-api>=3.41.0
|
||||
|
||||
# Encryption
|
||||
cryptography>=42.0.0
|
||||
|
||||
# Async Task Queue (for worker pool)
|
||||
celery[redis]>=5.3.0
|
||||
|
||||
# Caching
|
||||
redis>=5.0.0
|
||||
|
||||
# Environment Variables
|
||||
python-dotenv>=1.0.0
|
||||
|
||||
# Database
|
||||
aiosqlite>=0.19.0
|
||||
|
||||
# Logging
|
||||
structlog>=23.2.0
|
||||
364
demo/tools/dss_mcp/server.py
Normal file
364
demo/tools/dss_mcp/server.py
Normal file
@@ -0,0 +1,364 @@
|
||||
"""
|
||||
DSS MCP Server
|
||||
|
||||
SSE-based Model Context Protocol server for Claude.
|
||||
Provides project-isolated context and tools with user-scoped integrations.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import structlog
|
||||
from typing import Optional, Dict, Any
|
||||
from fastapi import FastAPI, Query, HTTPException
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from sse_starlette.sse import EventSourceResponse
|
||||
from mcp.server import Server
|
||||
from mcp import types
|
||||
|
||||
from .config import mcp_config, validate_config
|
||||
from .context.project_context import get_context_manager
|
||||
from .tools.project_tools import PROJECT_TOOLS, ProjectTools
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(
|
||||
level=mcp_config.LOG_LEVEL,
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||
)
|
||||
logger = structlog.get_logger()
|
||||
|
||||
# FastAPI app for SSE endpoints
|
||||
app = FastAPI(
|
||||
title="DSS MCP Server",
|
||||
description="Model Context Protocol server for Design System Swarm",
|
||||
version="0.8.0"
|
||||
)
|
||||
|
||||
# CORS configuration
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"], # TODO: Configure based on environment
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
# MCP Server instance
|
||||
mcp_server = Server("dss-mcp")
|
||||
|
||||
# Store active sessions
|
||||
_active_sessions: Dict[str, Dict[str, Any]] = {}
|
||||
|
||||
|
||||
def get_session_key(project_id: str, user_id: Optional[int] = None) -> str:
|
||||
"""Generate session key for caching"""
|
||||
return f"{project_id}:{user_id or 'anonymous'}"
|
||||
|
||||
|
||||
@app.on_event("startup")
|
||||
async def startup():
|
||||
"""Startup tasks"""
|
||||
logger.info("Starting DSS MCP Server")
|
||||
|
||||
# Validate configuration
|
||||
warnings = validate_config()
|
||||
if warnings:
|
||||
for warning in warnings:
|
||||
logger.warning(warning)
|
||||
|
||||
logger.info(
|
||||
"DSS MCP Server started",
|
||||
host=mcp_config.HOST,
|
||||
port=mcp_config.PORT
|
||||
)
|
||||
|
||||
|
||||
@app.on_event("shutdown")
|
||||
async def shutdown():
|
||||
"""Cleanup on shutdown"""
|
||||
logger.info("Shutting down DSS MCP Server")
|
||||
|
||||
|
||||
@app.get("/health")
|
||||
async def health_check():
|
||||
"""Health check endpoint"""
|
||||
context_manager = get_context_manager()
|
||||
return {
|
||||
"status": "healthy",
|
||||
"server": "dss-mcp",
|
||||
"version": "0.8.0",
|
||||
"cache_size": len(context_manager._cache),
|
||||
"active_sessions": len(_active_sessions)
|
||||
}
|
||||
|
||||
|
||||
@app.get("/sse")
|
||||
async def sse_endpoint(
|
||||
project_id: str = Query(..., description="Project ID for context isolation"),
|
||||
user_id: Optional[int] = Query(None, description="User ID for user-scoped integrations")
|
||||
):
|
||||
"""
|
||||
Server-Sent Events endpoint for MCP communication.
|
||||
|
||||
This endpoint maintains a persistent connection with the client
|
||||
and streams MCP protocol messages.
|
||||
"""
|
||||
session_key = get_session_key(project_id, user_id)
|
||||
|
||||
logger.info(
|
||||
"SSE connection established",
|
||||
project_id=project_id,
|
||||
user_id=user_id,
|
||||
session_key=session_key
|
||||
)
|
||||
|
||||
# Load project context
|
||||
context_manager = get_context_manager()
|
||||
try:
|
||||
project_context = await context_manager.get_context(project_id, user_id)
|
||||
if not project_context:
|
||||
raise HTTPException(status_code=404, detail=f"Project not found: {project_id}")
|
||||
except Exception as e:
|
||||
logger.error("Failed to load project context", error=str(e))
|
||||
raise HTTPException(status_code=500, detail=f"Failed to load project: {str(e)}")
|
||||
|
||||
# Create project tools instance
|
||||
project_tools = ProjectTools(user_id)
|
||||
|
||||
# Track session
|
||||
_active_sessions[session_key] = {
|
||||
"project_id": project_id,
|
||||
"user_id": user_id,
|
||||
"connected_at": asyncio.get_event_loop().time(),
|
||||
"project_tools": project_tools
|
||||
}
|
||||
|
||||
async def event_generator():
|
||||
"""Generate SSE events for MCP communication"""
|
||||
try:
|
||||
# Send initial connection confirmation
|
||||
yield {
|
||||
"event": "connected",
|
||||
"data": json.dumps({
|
||||
"project_id": project_id,
|
||||
"project_name": project_context.name,
|
||||
"available_tools": len(PROJECT_TOOLS),
|
||||
"integrations_enabled": list(project_context.integrations.keys())
|
||||
})
|
||||
}
|
||||
|
||||
# Keep connection alive
|
||||
while True:
|
||||
await asyncio.sleep(30) # Heartbeat every 30 seconds
|
||||
yield {
|
||||
"event": "heartbeat",
|
||||
"data": json.dumps({"timestamp": asyncio.get_event_loop().time()})
|
||||
}
|
||||
|
||||
except asyncio.CancelledError:
|
||||
logger.info("SSE connection closed", session_key=session_key)
|
||||
finally:
|
||||
# Cleanup session
|
||||
if session_key in _active_sessions:
|
||||
del _active_sessions[session_key]
|
||||
|
||||
return EventSourceResponse(event_generator())
|
||||
|
||||
|
||||
# MCP Protocol Handlers
|
||||
@mcp_server.list_tools()
|
||||
async def list_tools() -> list[types.Tool]:
|
||||
"""
|
||||
List all available tools.
|
||||
|
||||
Tools are dynamically determined based on:
|
||||
- Base DSS project tools (always available)
|
||||
- User's enabled integrations (Figma, Jira, Confluence, etc.)
|
||||
"""
|
||||
# Start with base project tools
|
||||
tools = PROJECT_TOOLS.copy()
|
||||
|
||||
# TODO: Add integration-specific tools based on user's enabled integrations
|
||||
# This will be implemented in Phase 3
|
||||
|
||||
logger.debug("Listed tools", tool_count=len(tools))
|
||||
return tools
|
||||
|
||||
|
||||
@mcp_server.call_tool()
|
||||
async def call_tool(name: str, arguments: dict) -> list[types.TextContent]:
|
||||
"""
|
||||
Execute a tool by name.
|
||||
|
||||
Args:
|
||||
name: Tool name
|
||||
arguments: Tool arguments (must include project_id)
|
||||
|
||||
Returns:
|
||||
Tool execution results
|
||||
"""
|
||||
logger.info("Tool called", tool_name=name, arguments=arguments)
|
||||
|
||||
project_id = arguments.get("project_id")
|
||||
if not project_id:
|
||||
return [
|
||||
types.TextContent(
|
||||
type="text",
|
||||
text=json.dumps({"error": "project_id is required"})
|
||||
)
|
||||
]
|
||||
|
||||
# Find active session for this project
|
||||
# For now, use first matching session (can be enhanced with session management)
|
||||
session_key = None
|
||||
project_tools = None
|
||||
|
||||
for key, session in _active_sessions.items():
|
||||
if session["project_id"] == project_id:
|
||||
session_key = key
|
||||
project_tools = session["project_tools"]
|
||||
break
|
||||
|
||||
if not project_tools:
|
||||
# Create temporary tools instance
|
||||
project_tools = ProjectTools()
|
||||
|
||||
# Execute tool
|
||||
try:
|
||||
result = await project_tools.execute_tool(name, arguments)
|
||||
|
||||
return [
|
||||
types.TextContent(
|
||||
type="text",
|
||||
text=json.dumps(result, indent=2)
|
||||
)
|
||||
]
|
||||
except Exception as e:
|
||||
logger.error("Tool execution failed", tool_name=name, error=str(e))
|
||||
return [
|
||||
types.TextContent(
|
||||
type="text",
|
||||
text=json.dumps({"error": str(e)})
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
@mcp_server.list_resources()
|
||||
async def list_resources() -> list[types.Resource]:
|
||||
"""
|
||||
List available resources.
|
||||
|
||||
Resources provide static or dynamic content that Claude can access.
|
||||
Examples: project documentation, component specs, design system guidelines.
|
||||
"""
|
||||
# TODO: Implement resources based on project context
|
||||
# For now, return empty list
|
||||
return []
|
||||
|
||||
|
||||
@mcp_server.read_resource()
|
||||
async def read_resource(uri: str) -> str:
|
||||
"""
|
||||
Read a specific resource by URI.
|
||||
|
||||
Args:
|
||||
uri: Resource URI (e.g., "dss://project-id/components/Button")
|
||||
|
||||
Returns:
|
||||
Resource content
|
||||
"""
|
||||
# TODO: Implement resource reading
|
||||
# For now, return not implemented
|
||||
return json.dumps({"error": "Resource reading not yet implemented"})
|
||||
|
||||
|
||||
@mcp_server.list_prompts()
|
||||
async def list_prompts() -> list[types.Prompt]:
|
||||
"""
|
||||
List available prompt templates.
|
||||
|
||||
Prompts provide pre-configured conversation starters for Claude.
|
||||
"""
|
||||
# TODO: Add DSS-specific prompt templates
|
||||
# Examples: "Analyze component consistency", "Review token usage", etc.
|
||||
return []
|
||||
|
||||
|
||||
@mcp_server.get_prompt()
|
||||
async def get_prompt(name: str, arguments: dict) -> types.GetPromptResult:
|
||||
"""
|
||||
Get a specific prompt template.
|
||||
|
||||
Args:
|
||||
name: Prompt name
|
||||
arguments: Prompt arguments
|
||||
|
||||
Returns:
|
||||
Prompt content
|
||||
"""
|
||||
# TODO: Implement prompt templates
|
||||
return types.GetPromptResult(
|
||||
description="Prompt not found",
|
||||
messages=[]
|
||||
)
|
||||
|
||||
|
||||
# API endpoint to call MCP tools directly (for testing/debugging)
|
||||
@app.post("/api/tools/{tool_name}")
|
||||
async def call_tool_api(tool_name: str, arguments: Dict[str, Any]):
|
||||
"""
|
||||
Direct API endpoint to call MCP tools.
|
||||
|
||||
Useful for testing tools without MCP client.
|
||||
"""
|
||||
project_tools = ProjectTools()
|
||||
result = await project_tools.execute_tool(tool_name, arguments)
|
||||
return result
|
||||
|
||||
|
||||
# API endpoint to list active sessions
|
||||
@app.get("/api/sessions")
|
||||
async def list_sessions():
|
||||
"""List all active SSE sessions"""
|
||||
return {
|
||||
"active_sessions": len(_active_sessions),
|
||||
"sessions": [
|
||||
{
|
||||
"project_id": session["project_id"],
|
||||
"user_id": session["user_id"],
|
||||
"connected_at": session["connected_at"]
|
||||
}
|
||||
for session in _active_sessions.values()
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
# API endpoint to clear context cache
|
||||
@app.post("/api/cache/clear")
|
||||
async def clear_cache(project_id: Optional[str] = None):
|
||||
"""Clear context cache for a project or all projects"""
|
||||
context_manager = get_context_manager()
|
||||
context_manager.clear_cache(project_id)
|
||||
|
||||
return {
|
||||
"status": "cache_cleared",
|
||||
"project_id": project_id or "all"
|
||||
}
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
logger.info(
|
||||
"Starting DSS MCP Server",
|
||||
host=mcp_config.HOST,
|
||||
port=mcp_config.PORT
|
||||
)
|
||||
|
||||
uvicorn.run(
|
||||
"server:app",
|
||||
host=mcp_config.HOST,
|
||||
port=mcp_config.PORT,
|
||||
reload=True,
|
||||
log_level=mcp_config.LOG_LEVEL.lower()
|
||||
)
|
||||
0
demo/tools/dss_mcp/tools/__init__.py
Normal file
0
demo/tools/dss_mcp/tools/__init__.py
Normal file
321
demo/tools/dss_mcp/tools/project_tools.py
Normal file
321
demo/tools/dss_mcp/tools/project_tools.py
Normal file
@@ -0,0 +1,321 @@
|
||||
"""
|
||||
DSS Project Tools for MCP
|
||||
|
||||
Base tools that Claude can use to interact with DSS projects.
|
||||
All tools are project-scoped and context-aware.
|
||||
"""
|
||||
|
||||
from typing import Dict, Any, List, Optional
|
||||
from mcp import types
|
||||
|
||||
from ..context.project_context import get_context_manager
|
||||
|
||||
|
||||
# Tool definitions (metadata for Claude)
|
||||
PROJECT_TOOLS = [
|
||||
types.Tool(
|
||||
name="dss_get_project_summary",
|
||||
description="Get comprehensive project summary including components, tokens, health, and stats",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"project_id": {
|
||||
"type": "string",
|
||||
"description": "Project ID to query"
|
||||
},
|
||||
"include_components": {
|
||||
"type": "boolean",
|
||||
"description": "Include full component list (default: false)",
|
||||
"default": False
|
||||
}
|
||||
},
|
||||
"required": ["project_id"]
|
||||
}
|
||||
),
|
||||
types.Tool(
|
||||
name="dss_list_components",
|
||||
description="List all components in a project with their properties",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"project_id": {
|
||||
"type": "string",
|
||||
"description": "Project ID"
|
||||
},
|
||||
"filter_name": {
|
||||
"type": "string",
|
||||
"description": "Optional: Filter by component name (partial match)"
|
||||
},
|
||||
"code_generated_only": {
|
||||
"type": "boolean",
|
||||
"description": "Optional: Only show components with generated code",
|
||||
"default": False
|
||||
}
|
||||
},
|
||||
"required": ["project_id"]
|
||||
}
|
||||
),
|
||||
types.Tool(
|
||||
name="dss_get_component",
|
||||
description="Get detailed information about a specific component",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"project_id": {
|
||||
"type": "string",
|
||||
"description": "Project ID"
|
||||
},
|
||||
"component_name": {
|
||||
"type": "string",
|
||||
"description": "Component name (exact match)"
|
||||
}
|
||||
},
|
||||
"required": ["project_id", "component_name"]
|
||||
}
|
||||
),
|
||||
types.Tool(
|
||||
name="dss_get_design_tokens",
|
||||
description="Get all design tokens (colors, typography, spacing, etc.) for a project",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"project_id": {
|
||||
"type": "string",
|
||||
"description": "Project ID"
|
||||
},
|
||||
"token_category": {
|
||||
"type": "string",
|
||||
"description": "Optional: Filter by token category (colors, typography, spacing, etc.)",
|
||||
"enum": ["colors", "typography", "spacing", "shadows", "borders", "all"]
|
||||
}
|
||||
},
|
||||
"required": ["project_id"]
|
||||
}
|
||||
),
|
||||
types.Tool(
|
||||
name="dss_get_project_health",
|
||||
description="Get project health score, grade, and list of issues",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"project_id": {
|
||||
"type": "string",
|
||||
"description": "Project ID"
|
||||
}
|
||||
},
|
||||
"required": ["project_id"]
|
||||
}
|
||||
),
|
||||
types.Tool(
|
||||
name="dss_list_styles",
|
||||
description="List design styles (text, fill, effect, grid) from Figma",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"project_id": {
|
||||
"type": "string",
|
||||
"description": "Project ID"
|
||||
},
|
||||
"style_type": {
|
||||
"type": "string",
|
||||
"description": "Optional: Filter by style type",
|
||||
"enum": ["TEXT", "FILL", "EFFECT", "GRID", "all"]
|
||||
}
|
||||
},
|
||||
"required": ["project_id"]
|
||||
}
|
||||
),
|
||||
types.Tool(
|
||||
name="dss_get_discovery_data",
|
||||
description="Get project discovery/scan data (file counts, technologies detected, etc.)",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"project_id": {
|
||||
"type": "string",
|
||||
"description": "Project ID"
|
||||
}
|
||||
},
|
||||
"required": ["project_id"]
|
||||
}
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
# Tool implementations
|
||||
class ProjectTools:
|
||||
"""Project tool implementations"""
|
||||
|
||||
def __init__(self, user_id: Optional[int] = None):
|
||||
self.context_manager = get_context_manager()
|
||||
self.user_id = user_id
|
||||
|
||||
async def execute_tool(self, tool_name: str, arguments: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Execute a tool by name"""
|
||||
handlers = {
|
||||
"dss_get_project_summary": self.get_project_summary,
|
||||
"dss_list_components": self.list_components,
|
||||
"dss_get_component": self.get_component,
|
||||
"dss_get_design_tokens": self.get_design_tokens,
|
||||
"dss_get_project_health": self.get_project_health,
|
||||
"dss_list_styles": self.list_styles,
|
||||
"dss_get_discovery_data": self.get_discovery_data
|
||||
}
|
||||
|
||||
handler = handlers.get(tool_name)
|
||||
if not handler:
|
||||
return {"error": f"Unknown tool: {tool_name}"}
|
||||
|
||||
try:
|
||||
result = await handler(**arguments)
|
||||
return result
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
async def get_project_summary(
|
||||
self,
|
||||
project_id: str,
|
||||
include_components: bool = False
|
||||
) -> Dict[str, Any]:
|
||||
"""Get comprehensive project summary"""
|
||||
context = await self.context_manager.get_context(project_id, self.user_id)
|
||||
if not context:
|
||||
return {"error": f"Project not found: {project_id}"}
|
||||
|
||||
summary = {
|
||||
"project_id": context.project_id,
|
||||
"name": context.name,
|
||||
"description": context.description,
|
||||
"component_count": context.component_count,
|
||||
"health": context.health,
|
||||
"stats": context.stats,
|
||||
"config": context.config,
|
||||
"integrations_enabled": list(context.integrations.keys()),
|
||||
"loaded_at": context.loaded_at.isoformat()
|
||||
}
|
||||
|
||||
if include_components:
|
||||
summary["components"] = context.components
|
||||
|
||||
return summary
|
||||
|
||||
async def list_components(
|
||||
self,
|
||||
project_id: str,
|
||||
filter_name: Optional[str] = None,
|
||||
code_generated_only: bool = False
|
||||
) -> Dict[str, Any]:
|
||||
"""List components with optional filtering"""
|
||||
context = await self.context_manager.get_context(project_id, self.user_id)
|
||||
if not context:
|
||||
return {"error": f"Project not found: {project_id}"}
|
||||
|
||||
components = context.components
|
||||
|
||||
# Apply filters
|
||||
if filter_name:
|
||||
components = [
|
||||
c for c in components
|
||||
if filter_name.lower() in c['name'].lower()
|
||||
]
|
||||
|
||||
if code_generated_only:
|
||||
components = [c for c in components if c.get('code_generated')]
|
||||
|
||||
return {
|
||||
"project_id": project_id,
|
||||
"total_count": len(components),
|
||||
"components": components
|
||||
}
|
||||
|
||||
async def get_component(
|
||||
self,
|
||||
project_id: str,
|
||||
component_name: str
|
||||
) -> Dict[str, Any]:
|
||||
"""Get detailed component information"""
|
||||
context = await self.context_manager.get_context(project_id, self.user_id)
|
||||
if not context:
|
||||
return {"error": f"Project not found: {project_id}"}
|
||||
|
||||
# Find component by name
|
||||
component = next(
|
||||
(c for c in context.components if c['name'] == component_name),
|
||||
None
|
||||
)
|
||||
|
||||
if not component:
|
||||
return {"error": f"Component not found: {component_name}"}
|
||||
|
||||
return {
|
||||
"project_id": project_id,
|
||||
"component": component
|
||||
}
|
||||
|
||||
async def get_design_tokens(
|
||||
self,
|
||||
project_id: str,
|
||||
token_category: Optional[str] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Get design tokens, optionally filtered by category"""
|
||||
context = await self.context_manager.get_context(project_id, self.user_id)
|
||||
if not context:
|
||||
return {"error": f"Project not found: {project_id}"}
|
||||
|
||||
tokens = context.tokens
|
||||
|
||||
if token_category and token_category != "all":
|
||||
# Filter by category
|
||||
if token_category in tokens:
|
||||
tokens = {token_category: tokens[token_category]}
|
||||
else:
|
||||
tokens = {}
|
||||
|
||||
return {
|
||||
"project_id": project_id,
|
||||
"tokens": tokens,
|
||||
"categories": list(tokens.keys())
|
||||
}
|
||||
|
||||
async def get_project_health(self, project_id: str) -> Dict[str, Any]:
|
||||
"""Get project health information"""
|
||||
context = await self.context_manager.get_context(project_id, self.user_id)
|
||||
if not context:
|
||||
return {"error": f"Project not found: {project_id}"}
|
||||
|
||||
return {
|
||||
"project_id": project_id,
|
||||
"health": context.health
|
||||
}
|
||||
|
||||
async def list_styles(
|
||||
self,
|
||||
project_id: str,
|
||||
style_type: Optional[str] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""List design styles with optional type filter"""
|
||||
context = await self.context_manager.get_context(project_id, self.user_id)
|
||||
if not context:
|
||||
return {"error": f"Project not found: {project_id}"}
|
||||
|
||||
styles = context.styles
|
||||
|
||||
if style_type and style_type != "all":
|
||||
styles = [s for s in styles if s['type'] == style_type]
|
||||
|
||||
return {
|
||||
"project_id": project_id,
|
||||
"total_count": len(styles),
|
||||
"styles": styles
|
||||
}
|
||||
|
||||
async def get_discovery_data(self, project_id: str) -> Dict[str, Any]:
|
||||
"""Get project discovery/scan data"""
|
||||
context = await self.context_manager.get_context(project_id, self.user_id)
|
||||
if not context:
|
||||
return {"error": f"Project not found: {project_id}"}
|
||||
|
||||
return {
|
||||
"project_id": project_id,
|
||||
"discovery": context.discovery
|
||||
}
|
||||
Reference in New Issue
Block a user