Systematic replacement of 'swarm' and 'organism' terminology across codebase: AUTOMATED REPLACEMENTS: - 'Design System Swarm' → 'Design System Server' (all files) - 'swarm' → 'DSS' (markdown, JSON, comments) - 'organism' → 'component' (markdown, atomic design refs) FILES UPDATED: 60+ files across: - Documentation (.md files) - Configuration (.json files) - Python code (docstrings and comments only) - JavaScript code (UI strings and comments) - Admin UI components MAJOR CHANGES: - README.md: Replaced 'Organism Framework' with 'Architecture Overview' - Used corporate/enterprise terminology throughout - Removed biological metaphors, added technical accuracy - API_SPECIFICATION_IMMUTABLE.md: Terminology updates - dss-claude-plugin/.mcp.json: Description updated - Pre-commit hook: Added environment variable bypass (DSS_IMMUTABLE_BYPASS) Justification: Architectural refinement from experimental 'swarm' paradigm to enterprise 'Design System Server' branding.
2093 lines
66 KiB
Python
2093 lines
66 KiB
Python
"""
|
|
Design System Server (DSS) - FastAPI Server
|
|
|
|
Portable API server providing:
|
|
- Project management (CRUD)
|
|
- Figma integration endpoints
|
|
- Discovery & health endpoints
|
|
- Activity tracking
|
|
- Runtime configuration management
|
|
- Service discovery (Storybook, etc.)
|
|
|
|
Modes:
|
|
- Server: Deployed remotely, serves design systems to teams
|
|
- Local: Dev companion, UI advisor, local services
|
|
|
|
Uses SQLite for persistence, integrates with Figma tools.
|
|
"""
|
|
|
|
import asyncio
|
|
import subprocess
|
|
import json
|
|
import os
|
|
from pathlib import Path
|
|
from typing import Optional, List, Dict, Any
|
|
from datetime import datetime
|
|
|
|
from fastapi import FastAPI, HTTPException, Query, BackgroundTasks, Depends, Header
|
|
from fastapi.middleware.cors import CORSMiddleware
|
|
from fastapi.responses import JSONResponse
|
|
from fastapi.staticfiles import StaticFiles
|
|
from pydantic import BaseModel
|
|
from typing import Optional
|
|
|
|
import sys
|
|
# Add tools directory to path (legacy imports)
|
|
sys.path.insert(0, str(Path(__file__).parent.parent))
|
|
# Add dss-mvp1 directory to path (consolidated dss package)
|
|
sys.path.insert(0, str(Path(__file__).parent.parent.parent / "dss-mvp1"))
|
|
|
|
# Legacy imports (will gradually migrate these)
|
|
from config import config
|
|
from storage.database import (
|
|
Projects, Components, SyncHistory, ActivityLog, Teams, Cache, get_stats,
|
|
FigmaFiles, ESREDefinitions, TokenDriftDetector, CodeMetrics, TestResults
|
|
)
|
|
from figma.figma_tools import FigmaToolSuite
|
|
|
|
# New consolidated dss imports - now available!
|
|
# from dss import DesignToken, TokenSource, ProjectScanner, etc.
|
|
# from dss.ingest import CSSTokenSource, SCSSTokenSource, TailwindTokenSource
|
|
# from dss.analyze import ReactAnalyzer, StyleAnalyzer, QuickWinFinder
|
|
# from dss.storybook import StorybookScanner, StoryGenerator
|
|
|
|
|
|
# === Runtime Configuration ===
|
|
|
|
class RuntimeConfig:
|
|
"""
|
|
Runtime configuration that can be modified from the dashboard.
|
|
Persists to .dss/runtime-config.json for portability.
|
|
"""
|
|
def __init__(self):
|
|
self.config_path = Path(__file__).parent.parent.parent / ".dss" / "runtime-config.json"
|
|
self.config_path.parent.mkdir(parents=True, exist_ok=True)
|
|
self._data = self._load()
|
|
|
|
def _load(self) -> dict:
|
|
if self.config_path.exists():
|
|
try:
|
|
return json.loads(self.config_path.read_text())
|
|
except (json.JSONDecodeError, IOError) as e:
|
|
# Config file corrupted or unreadable, use defaults
|
|
pass
|
|
return {
|
|
"mode": "local", # "local" or "server"
|
|
"figma": {"token": "", "configured": False},
|
|
"services": {
|
|
"storybook": {"enabled": False, "port": 6006, "url": ""},
|
|
"chromatic": {"enabled": False, "project_token": ""},
|
|
"github": {"enabled": False, "repo": ""},
|
|
},
|
|
"features": {
|
|
"visual_qa": True,
|
|
"token_sync": True,
|
|
"code_gen": True,
|
|
"ai_advisor": False,
|
|
}
|
|
}
|
|
|
|
def _save(self):
|
|
self.config_path.write_text(json.dumps(self._data, indent=2))
|
|
|
|
def get(self, key: str = None):
|
|
if key is None:
|
|
# Return safe copy without secrets
|
|
safe = self._data.copy()
|
|
if safe.get("figma", {}).get("token"):
|
|
safe["figma"]["token"] = "***configured***"
|
|
return safe
|
|
return self._data.get(key)
|
|
|
|
def set(self, key: str, value: Any):
|
|
self._data[key] = value
|
|
self._save()
|
|
return self._data[key]
|
|
|
|
def update(self, updates: dict):
|
|
for key, value in updates.items():
|
|
if isinstance(value, dict) and isinstance(self._data.get(key), dict):
|
|
self._data[key].update(value)
|
|
else:
|
|
self._data[key] = value
|
|
self._save()
|
|
return self.get()
|
|
|
|
def set_figma_token(self, token: str):
|
|
self._data["figma"]["token"] = token
|
|
self._data["figma"]["configured"] = bool(token)
|
|
self._save()
|
|
# Also update the global config
|
|
os.environ["FIGMA_TOKEN"] = token
|
|
return {"configured": bool(token)}
|
|
|
|
|
|
runtime_config = RuntimeConfig()
|
|
|
|
|
|
# === Service Discovery ===
|
|
|
|
class ServiceDiscovery:
|
|
"""Discovers and manages companion services."""
|
|
|
|
KNOWN_SERVICES = {
|
|
"storybook": {"ports": [6006, 6007], "health": "/"},
|
|
"chromatic": {"ports": [], "health": None},
|
|
"vite": {"ports": [5173, 5174, 3000], "health": "/"},
|
|
"webpack": {"ports": [8080, 8081], "health": "/"},
|
|
"nextjs": {"ports": [3000, 3001], "health": "/"},
|
|
}
|
|
|
|
@classmethod
|
|
async def discover(cls) -> dict:
|
|
"""Discover running services by checking known ports."""
|
|
import socket
|
|
|
|
discovered = {}
|
|
for service, info in cls.KNOWN_SERVICES.items():
|
|
for port in info["ports"]:
|
|
try:
|
|
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
sock.settimeout(0.5)
|
|
result = sock.connect_ex(('127.0.0.1', port))
|
|
sock.close()
|
|
if result == 0:
|
|
discovered[service] = {
|
|
"running": True,
|
|
"port": port,
|
|
"url": f"http://localhost:{port}"
|
|
}
|
|
break
|
|
except (OSError, socket.error):
|
|
# Service not running on this port
|
|
pass
|
|
if service not in discovered:
|
|
discovered[service] = {"running": False, "port": None, "url": None}
|
|
|
|
return discovered
|
|
|
|
@classmethod
|
|
async def check_storybook(cls) -> dict:
|
|
"""Check Storybook status specifically."""
|
|
import httpx
|
|
|
|
configured = runtime_config.get("services").get("storybook", {})
|
|
port = configured.get("port", 6006)
|
|
url = configured.get("url") or f"http://localhost:{port}"
|
|
|
|
try:
|
|
async with httpx.AsyncClient(timeout=2.0) as client:
|
|
resp = await client.get(url)
|
|
return {
|
|
"running": resp.status_code == 200,
|
|
"url": url,
|
|
"port": port
|
|
}
|
|
except (httpx.ConnectError, httpx.TimeoutException, httpx.HTTPError):
|
|
# Storybook not running or unreachable
|
|
return {"running": False, "url": url, "port": port}
|
|
|
|
|
|
# === App Setup ===
|
|
|
|
app = FastAPI(
|
|
title="Design System Server (DSS)",
|
|
description="API for design system management and Figma integration",
|
|
version="1.0.0"
|
|
)
|
|
|
|
app.add_middleware(
|
|
CORSMiddleware,
|
|
allow_origins=["*"],
|
|
allow_credentials=True,
|
|
allow_methods=["*"],
|
|
allow_headers=["*"],
|
|
)
|
|
|
|
# Mount Admin UI static files
|
|
UI_DIR = Path(__file__).parent.parent.parent / "admin-ui"
|
|
if UI_DIR.exists():
|
|
app.mount("/admin-ui", StaticFiles(directory=str(UI_DIR), html=True), name="admin-ui")
|
|
|
|
# Initialize Figma tools with token from runtime config
|
|
figma_config = runtime_config.get("figma")
|
|
figma_token_at_startup = figma_config.get("token") if figma_config else None
|
|
figma_suite = FigmaToolSuite(
|
|
token=figma_token_at_startup,
|
|
output_dir=str(Path(__file__).parent.parent.parent / ".dss" / "output")
|
|
)
|
|
|
|
|
|
# === Request/Response Models ===
|
|
|
|
class ProjectCreate(BaseModel):
|
|
name: str
|
|
description: str = ""
|
|
figma_file_key: str = ""
|
|
|
|
class ProjectUpdate(BaseModel):
|
|
name: Optional[str] = None
|
|
description: Optional[str] = None
|
|
figma_file_key: Optional[str] = None
|
|
status: Optional[str] = None
|
|
|
|
class FigmaExtractRequest(BaseModel):
|
|
file_key: str
|
|
format: str = "css"
|
|
|
|
class FigmaSyncRequest(BaseModel):
|
|
file_key: str
|
|
target_path: str
|
|
format: str = "css"
|
|
|
|
class TeamCreate(BaseModel):
|
|
name: str
|
|
description: str = ""
|
|
|
|
class FigmaFileCreate(BaseModel):
|
|
figma_url: str
|
|
file_name: str
|
|
file_key: str
|
|
|
|
class ESRECreate(BaseModel):
|
|
name: str
|
|
definition_text: str
|
|
expected_value: Optional[str] = None
|
|
component_name: Optional[str] = None
|
|
|
|
class TokenDriftCreate(BaseModel):
|
|
component_id: str
|
|
property_name: str
|
|
hardcoded_value: str
|
|
file_path: str
|
|
line_number: int
|
|
severity: str = "warning"
|
|
suggested_token: Optional[str] = None
|
|
|
|
|
|
# === Authentication ===
|
|
|
|
from auth.atlassian_auth import get_auth
|
|
|
|
async def get_current_user(authorization: Optional[str] = Header(None)) -> Dict[str, Any]:
|
|
"""
|
|
Dependency to get current authenticated user from JWT token.
|
|
Usage: user = Depends(get_current_user)
|
|
"""
|
|
if not authorization or not authorization.startswith("Bearer "):
|
|
raise HTTPException(status_code=401, detail="Not authenticated")
|
|
|
|
token = authorization.replace("Bearer ", "")
|
|
auth = get_auth()
|
|
user_data = auth.verify_token(token)
|
|
|
|
if not user_data:
|
|
raise HTTPException(status_code=401, detail="Invalid or expired token")
|
|
|
|
return user_data
|
|
|
|
class LoginRequest(BaseModel):
|
|
url: str # Atlassian URL
|
|
email: str
|
|
api_token: str
|
|
service: str = "jira" # "jira" or "confluence"
|
|
|
|
@app.post("/api/auth/login")
|
|
async def login(request: LoginRequest):
|
|
"""
|
|
Authenticate with Atlassian credentials.
|
|
|
|
Validates credentials against Jira or Confluence API,
|
|
creates/updates user in database, returns JWT token.
|
|
"""
|
|
try:
|
|
auth = get_auth()
|
|
result = await auth.login(
|
|
url=request.url,
|
|
email=request.email,
|
|
api_token=request.api_token,
|
|
service=request.service
|
|
)
|
|
return result
|
|
except ValueError as e:
|
|
raise HTTPException(status_code=401, detail=str(e))
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=f"Login failed: {str(e)}")
|
|
|
|
@app.get("/api/auth/me")
|
|
async def get_me(user: Dict[str, Any] = Depends(get_current_user)):
|
|
"""Get current authenticated user info"""
|
|
auth = get_auth()
|
|
user_data = await auth.get_user_by_id(user["user_id"])
|
|
if not user_data:
|
|
raise HTTPException(status_code=404, detail="User not found")
|
|
return user_data
|
|
|
|
# === Root & Health ===
|
|
|
|
@app.get("/")
|
|
async def root():
|
|
"""Redirect to Admin UI dashboard."""
|
|
from fastapi.responses import RedirectResponse
|
|
return RedirectResponse(url="/admin-ui/index.html")
|
|
|
|
|
|
@app.get("/health")
|
|
async def health():
|
|
"""
|
|
Health check endpoint for monitoring.
|
|
Returns 200 OK if service is healthy.
|
|
"""
|
|
import os
|
|
import psutil
|
|
from pathlib import Path
|
|
|
|
# Check database connectivity
|
|
db_ok = False
|
|
try:
|
|
with get_connection() as conn:
|
|
conn.execute("SELECT 1").fetchone()
|
|
db_ok = True
|
|
except:
|
|
pass
|
|
|
|
# Check MCP handler
|
|
mcp_ok = False
|
|
try:
|
|
from dss_mcp.handler import get_mcp_handler
|
|
handler = get_mcp_handler()
|
|
mcp_ok = handler is not None
|
|
except:
|
|
pass
|
|
|
|
# Get uptime from process
|
|
try:
|
|
process = psutil.Process(os.getpid())
|
|
uptime_seconds = int((datetime.now() - datetime.fromtimestamp(process.create_time())).total_seconds())
|
|
except:
|
|
uptime_seconds = 0
|
|
|
|
# Overall status
|
|
status = "healthy" if (db_ok and mcp_ok) else "degraded"
|
|
|
|
return {
|
|
"status": status,
|
|
"version": "0.8.0",
|
|
"timestamp": datetime.utcnow().isoformat() + "Z",
|
|
"uptime_seconds": uptime_seconds,
|
|
"components": {
|
|
"database": "ok" if db_ok else "error",
|
|
"mcp": "ok" if mcp_ok else "error",
|
|
"figma": "ok" if config.figma.is_configured else "not_configured"
|
|
}
|
|
}
|
|
|
|
@app.get("/api/stats")
|
|
async def get_statistics():
|
|
"""Get database and system statistics."""
|
|
db_stats = get_stats()
|
|
return {
|
|
"database": db_stats,
|
|
"figma": {
|
|
"mode": figma_suite.mode,
|
|
"configured": config.figma.is_configured
|
|
}
|
|
}
|
|
|
|
|
|
# === Projects ===
|
|
|
|
@app.get("/api/projects")
|
|
async def list_projects(status: Optional[str] = None):
|
|
"""List all projects."""
|
|
projects = Projects.list(status=status)
|
|
return projects
|
|
|
|
@app.get("/api/projects/{project_id}")
|
|
async def get_project(project_id: str):
|
|
"""Get a specific project."""
|
|
project = Projects.get(project_id)
|
|
if not project:
|
|
raise HTTPException(status_code=404, detail="Project not found")
|
|
return project
|
|
|
|
@app.post("/api/projects")
|
|
async def create_project(project: ProjectCreate):
|
|
"""Create a new project."""
|
|
project_id = f"proj-{int(datetime.utcnow().timestamp() * 1000)}"
|
|
created = Projects.create(
|
|
id=project_id,
|
|
name=project.name,
|
|
description=project.description,
|
|
figma_file_key=project.figma_file_key
|
|
)
|
|
ActivityLog.log(
|
|
action="project_created",
|
|
entity_type="project",
|
|
entity_id=project_id,
|
|
project_id=project_id,
|
|
details={"name": project.name}
|
|
)
|
|
return created
|
|
|
|
@app.put("/api/projects/{project_id}")
|
|
async def update_project(project_id: str, update: ProjectUpdate):
|
|
"""Update a project."""
|
|
existing = Projects.get(project_id)
|
|
if not existing:
|
|
raise HTTPException(status_code=404, detail="Project not found")
|
|
|
|
update_data = {k: v for k, v in update.dict().items() if v is not None}
|
|
if not update_data:
|
|
return existing
|
|
|
|
updated = Projects.update(project_id, **update_data)
|
|
ActivityLog.log(
|
|
action="project_updated",
|
|
entity_type="project",
|
|
entity_id=project_id,
|
|
project_id=project_id,
|
|
details=update_data
|
|
)
|
|
return updated
|
|
|
|
@app.delete("/api/projects/{project_id}")
|
|
async def delete_project(project_id: str):
|
|
"""Delete a project."""
|
|
if not Projects.delete(project_id):
|
|
raise HTTPException(status_code=404, detail="Project not found")
|
|
ActivityLog.log(
|
|
action="project_deleted",
|
|
entity_type="project",
|
|
entity_id=project_id
|
|
)
|
|
return {"success": True}
|
|
|
|
|
|
# === Components ===
|
|
|
|
@app.get("/api/projects/{project_id}/components")
|
|
async def list_components(project_id: str):
|
|
"""List components for a project."""
|
|
if not Projects.get(project_id):
|
|
raise HTTPException(status_code=404, detail="Project not found")
|
|
return Components.list(project_id)
|
|
|
|
|
|
# === Figma Integration ===
|
|
|
|
@app.post("/api/figma/extract-variables")
|
|
async def extract_variables(request: FigmaExtractRequest, background_tasks: BackgroundTasks):
|
|
"""Extract design tokens from Figma file."""
|
|
try:
|
|
result = await figma_suite.extract_variables(request.file_key, request.format)
|
|
ActivityLog.log(
|
|
action="figma_extract_variables",
|
|
entity_type="figma",
|
|
details={"file_key": request.file_key, "format": request.format, "count": result.get("tokens_count")}
|
|
)
|
|
return result
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
@app.post("/api/figma/extract-components")
|
|
async def extract_components(request: FigmaExtractRequest):
|
|
"""Extract components from Figma file."""
|
|
try:
|
|
result = await figma_suite.extract_components(request.file_key)
|
|
ActivityLog.log(
|
|
action="figma_extract_components",
|
|
entity_type="figma",
|
|
details={"file_key": request.file_key, "count": result.get("components_count")}
|
|
)
|
|
return result
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
@app.post("/api/figma/extract-styles")
|
|
async def extract_styles(request: FigmaExtractRequest):
|
|
"""Extract styles from Figma file."""
|
|
try:
|
|
result = await figma_suite.extract_styles(request.file_key)
|
|
return result
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
@app.post("/api/figma/sync-tokens")
|
|
async def sync_tokens(request: FigmaSyncRequest):
|
|
"""Sync tokens from Figma to target path."""
|
|
try:
|
|
result = await figma_suite.sync_tokens(request.file_key, request.target_path, request.format)
|
|
ActivityLog.log(
|
|
action="figma_sync_tokens",
|
|
entity_type="figma",
|
|
details={"file_key": request.file_key, "target": request.target_path, "synced": result.get("tokens_synced")}
|
|
)
|
|
return result
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
@app.post("/api/figma/validate")
|
|
async def validate_components(request: FigmaExtractRequest):
|
|
"""Validate components against design system rules."""
|
|
try:
|
|
result = await figma_suite.validate_components(request.file_key)
|
|
return result
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
@app.post("/api/figma/generate-code")
|
|
async def generate_code(file_key: str, component_name: str, framework: str = "webcomponent"):
|
|
"""Generate component code from Figma."""
|
|
try:
|
|
result = await figma_suite.generate_code(file_key, component_name, framework)
|
|
return result
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
@app.get("/api/figma/health")
|
|
async def figma_health():
|
|
"""Health check for Figma integration."""
|
|
is_live = figma_suite.mode == 'live'
|
|
return {
|
|
"status": "ok" if is_live else "degraded",
|
|
"mode": figma_suite.mode,
|
|
"message": "Figma is connected." if is_live else "Figma is running in mock mode. Please configure a token."
|
|
}
|
|
|
|
|
|
# === Discovery ===
|
|
|
|
@app.get("/api/discovery")
|
|
async def run_discovery(path: str = "."):
|
|
"""Run project discovery."""
|
|
script_path = Path(__file__).parent.parent / "discovery" / "discover.sh"
|
|
|
|
try:
|
|
result = subprocess.run(
|
|
[str(script_path), path],
|
|
capture_output=True,
|
|
text=True,
|
|
timeout=30
|
|
)
|
|
if result.returncode == 0:
|
|
return json.loads(result.stdout)
|
|
else:
|
|
return {"error": result.stderr}
|
|
except subprocess.TimeoutExpired:
|
|
raise HTTPException(status_code=504, detail="Discovery timed out")
|
|
except json.JSONDecodeError:
|
|
return {"raw_output": result.stdout}
|
|
|
|
class DiscoveryScanRequest(BaseModel):
|
|
path: str = "."
|
|
full_scan: bool = False
|
|
|
|
@app.post("/api/discovery/scan")
|
|
async def scan_project(request: DiscoveryScanRequest):
|
|
"""Run project discovery scan."""
|
|
script_path = Path(__file__).parent.parent / "discovery" / "discover.sh"
|
|
|
|
try:
|
|
result = subprocess.run(
|
|
[str(script_path), request.path],
|
|
capture_output=True,
|
|
text=True,
|
|
timeout=30
|
|
)
|
|
if result.returncode == 0:
|
|
data = json.loads(result.stdout)
|
|
ActivityLog.log(
|
|
action="discovery_scan",
|
|
entity_type="project",
|
|
details={"path": request.path, "full_scan": request.full_scan}
|
|
)
|
|
return data
|
|
else:
|
|
return {"error": result.stderr}
|
|
except subprocess.TimeoutExpired:
|
|
raise HTTPException(status_code=504, detail="Discovery timed out")
|
|
except json.JSONDecodeError:
|
|
return {"raw_output": result.stdout}
|
|
|
|
@app.get("/api/discovery/stats")
|
|
async def get_discovery_stats():
|
|
"""Get project statistics."""
|
|
db_stats = get_stats()
|
|
return {
|
|
"projects": db_stats.get("projects", {}),
|
|
"tokens": db_stats.get("tokens", {"total": 0}),
|
|
"components": db_stats.get("components", {"total": 0}),
|
|
"syncs": {
|
|
"today": 0,
|
|
"this_week": 0,
|
|
"total": db_stats.get("syncs", {}).get("total", 0),
|
|
"last_sync": None
|
|
},
|
|
"stories": {
|
|
"total": 0
|
|
}
|
|
}
|
|
|
|
@app.get("/api/discovery/activity")
|
|
async def get_discovery_activity(limit: int = Query(default=10, le=50)):
|
|
"""Get recent discovery activity."""
|
|
return ActivityLog.recent(limit=limit)
|
|
|
|
@app.get("/api/discovery/ports")
|
|
async def discover_ports():
|
|
"""Discover listening ports and services."""
|
|
script_path = Path(__file__).parent.parent / "discovery" / "discover-ports.sh"
|
|
|
|
try:
|
|
result = subprocess.run(
|
|
[str(script_path)],
|
|
capture_output=True,
|
|
text=True,
|
|
timeout=10
|
|
)
|
|
return json.loads(result.stdout)
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
@app.get("/api/discovery/env")
|
|
async def discover_env(path: str = "."):
|
|
"""Analyze environment configuration."""
|
|
script_path = Path(__file__).parent.parent / "discovery" / "discover-env.sh"
|
|
|
|
try:
|
|
result = subprocess.run(
|
|
[str(script_path), path],
|
|
capture_output=True,
|
|
text=True,
|
|
timeout=10
|
|
)
|
|
return json.loads(result.stdout)
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
|
|
# === Activity & Sync History ===
|
|
|
|
@app.get("/api/activity")
|
|
async def get_activity(limit: int = Query(default=50, le=100)):
|
|
"""Get recent activity log."""
|
|
return ActivityLog.recent(limit=limit)
|
|
|
|
@app.get("/api/sync-history")
|
|
async def get_sync_history(project_id: Optional[str] = None, limit: int = Query(default=20, le=100)):
|
|
"""Get sync history."""
|
|
return SyncHistory.recent(project_id=project_id, limit=limit)
|
|
|
|
|
|
# === Audit Log (Enhanced) ===
|
|
|
|
@app.get("/api/audit")
|
|
async def get_audit_log(
|
|
project_id: Optional[str] = None,
|
|
user_id: Optional[str] = None,
|
|
action: Optional[str] = None,
|
|
category: Optional[str] = None,
|
|
entity_type: Optional[str] = None,
|
|
severity: Optional[str] = None,
|
|
start_date: Optional[str] = None,
|
|
end_date: Optional[str] = None,
|
|
limit: int = Query(default=50, le=200),
|
|
offset: int = Query(default=0, ge=0)
|
|
):
|
|
"""
|
|
Get audit log with advanced filtering.
|
|
|
|
Query parameters:
|
|
- project_id: Filter by project
|
|
- user_id: Filter by user
|
|
- action: Filter by specific action
|
|
- category: Filter by category (design_system, code, configuration, etc.)
|
|
- entity_type: Filter by entity type (project, component, token, etc.)
|
|
- severity: Filter by severity (info, warning, critical)
|
|
- start_date: Filter from date (ISO format)
|
|
- end_date: Filter to date (ISO format)
|
|
- limit: Number of results (max 200)
|
|
- offset: Pagination offset
|
|
"""
|
|
activities = ActivityLog.search(
|
|
project_id=project_id,
|
|
user_id=user_id,
|
|
action=action,
|
|
category=category,
|
|
entity_type=entity_type,
|
|
severity=severity,
|
|
start_date=start_date,
|
|
end_date=end_date,
|
|
limit=limit,
|
|
offset=offset
|
|
)
|
|
|
|
total = ActivityLog.count(
|
|
project_id=project_id,
|
|
user_id=user_id,
|
|
action=action,
|
|
category=category
|
|
)
|
|
|
|
return {
|
|
"activities": activities,
|
|
"total": total,
|
|
"limit": limit,
|
|
"offset": offset,
|
|
"has_more": (offset + limit) < total
|
|
}
|
|
|
|
@app.get("/api/audit/stats")
|
|
async def get_audit_stats():
|
|
"""Get audit log statistics."""
|
|
return {
|
|
"by_category": ActivityLog.get_stats_by_category(),
|
|
"by_user": ActivityLog.get_stats_by_user(),
|
|
"total_count": ActivityLog.count()
|
|
}
|
|
|
|
@app.get("/api/audit/categories")
|
|
async def get_audit_categories():
|
|
"""Get list of all activity categories."""
|
|
return ActivityLog.get_categories()
|
|
|
|
@app.get("/api/audit/actions")
|
|
async def get_audit_actions():
|
|
"""Get list of all activity actions."""
|
|
return ActivityLog.get_actions()
|
|
|
|
class AuditLogRequest(BaseModel):
|
|
action: str
|
|
entity_type: Optional[str] = None
|
|
entity_id: Optional[str] = None
|
|
entity_name: Optional[str] = None
|
|
project_id: Optional[str] = None
|
|
user_id: Optional[str] = None
|
|
user_name: Optional[str] = None
|
|
team_context: Optional[str] = None
|
|
description: Optional[str] = None
|
|
category: Optional[str] = None
|
|
severity: str = 'info'
|
|
details: Optional[Dict[str, Any]] = None
|
|
|
|
@app.post("/api/audit")
|
|
async def create_audit_entry(entry: AuditLogRequest, request: Any):
|
|
"""
|
|
Create a new audit log entry.
|
|
Automatically captures IP and user agent from request.
|
|
"""
|
|
# Extract IP and user agent from request
|
|
ip_address = request.client.host if hasattr(request, 'client') else None
|
|
user_agent = request.headers.get('user-agent') if hasattr(request, 'headers') else None
|
|
|
|
ActivityLog.log(
|
|
action=entry.action,
|
|
entity_type=entry.entity_type,
|
|
entity_id=entry.entity_id,
|
|
entity_name=entry.entity_name,
|
|
project_id=entry.project_id,
|
|
user_id=entry.user_id,
|
|
user_name=entry.user_name,
|
|
team_context=entry.team_context,
|
|
description=entry.description,
|
|
category=entry.category,
|
|
severity=entry.severity,
|
|
details=entry.details,
|
|
ip_address=ip_address,
|
|
user_agent=user_agent
|
|
)
|
|
|
|
return {"success": True, "message": "Audit entry created"}
|
|
|
|
@app.get("/api/audit/export")
|
|
async def export_audit_log(
|
|
project_id: Optional[str] = None,
|
|
category: Optional[str] = None,
|
|
start_date: Optional[str] = None,
|
|
end_date: Optional[str] = None,
|
|
format: str = Query(default="json", regex="^(json|csv)$")
|
|
):
|
|
"""
|
|
Export audit log in JSON or CSV format.
|
|
"""
|
|
activities = ActivityLog.search(
|
|
project_id=project_id,
|
|
category=category,
|
|
start_date=start_date,
|
|
end_date=end_date,
|
|
limit=10000 # Max export limit
|
|
)
|
|
|
|
if format == "csv":
|
|
import csv
|
|
import io
|
|
from fastapi.responses import StreamingResponse
|
|
|
|
output = io.StringIO()
|
|
if activities:
|
|
fieldnames = ['created_at', 'user_name', 'action', 'category', 'description', 'project_id', 'entity_type', 'entity_name', 'severity']
|
|
writer = csv.DictWriter(output, fieldnames=fieldnames, extrasaction='ignore')
|
|
writer.writeheader()
|
|
writer.writerows(activities)
|
|
|
|
output.seek(0)
|
|
return StreamingResponse(
|
|
iter([output.getvalue()]),
|
|
media_type="text/csv",
|
|
headers={"Content-Disposition": f"attachment; filename=audit_log_{datetime.utcnow().strftime('%Y%m%d_%H%M%S')}.csv"}
|
|
)
|
|
else:
|
|
# JSON format
|
|
return {
|
|
"activities": activities,
|
|
"total": len(activities),
|
|
"exported_at": datetime.utcnow().isoformat() + "Z"
|
|
}
|
|
|
|
|
|
# === Teams ===
|
|
|
|
@app.get("/api/teams")
|
|
async def list_teams():
|
|
"""List all teams."""
|
|
return Teams.list()
|
|
|
|
@app.post("/api/teams")
|
|
async def create_team(team: TeamCreate):
|
|
"""Create a new team."""
|
|
team_id = f"team-{int(datetime.utcnow().timestamp() * 1000)}"
|
|
created = Teams.create(team_id, team.name, team.description)
|
|
return created
|
|
|
|
@app.get("/api/teams/{team_id}")
|
|
async def get_team(team_id: str):
|
|
"""Get a specific team."""
|
|
team = Teams.get(team_id)
|
|
if not team:
|
|
raise HTTPException(status_code=404, detail="Team not found")
|
|
return team
|
|
|
|
|
|
# === Cache Management ===
|
|
|
|
@app.post("/api/cache/clear")
|
|
async def clear_cache():
|
|
"""Clear expired cache entries."""
|
|
count = Cache.clear_expired()
|
|
return {"cleared": count}
|
|
|
|
@app.delete("/api/cache")
|
|
async def purge_cache():
|
|
"""Purge all cache entries."""
|
|
Cache.clear_all()
|
|
return {"success": True}
|
|
|
|
|
|
# === Configuration Management ===
|
|
|
|
class ConfigUpdate(BaseModel):
|
|
mode: Optional[str] = None
|
|
figma_token: Optional[str] = None
|
|
services: Optional[Dict[str, Any]] = None
|
|
features: Optional[Dict[str, bool]] = None
|
|
|
|
|
|
@app.get("/api/config")
|
|
async def get_config():
|
|
"""Get current runtime configuration (secrets masked)."""
|
|
return {
|
|
"config": runtime_config.get(),
|
|
"env": config.summary(),
|
|
"mode": runtime_config.get("mode")
|
|
}
|
|
|
|
|
|
@app.put("/api/config")
|
|
async def update_config(update: ConfigUpdate):
|
|
"""Update runtime configuration."""
|
|
updates = {}
|
|
|
|
if update.mode:
|
|
updates["mode"] = update.mode
|
|
|
|
if update.figma_token is not None:
|
|
runtime_config.set_figma_token(update.figma_token)
|
|
# Reinitialize Figma tools with new token
|
|
global figma_suite
|
|
figma_suite = FigmaToolSuite(
|
|
token=update.figma_token,
|
|
output_dir=str(Path(__file__).parent.parent.parent / ".dss" / "output")
|
|
)
|
|
ActivityLog.log(
|
|
action="figma_token_updated",
|
|
entity_type="config",
|
|
details={"configured": bool(update.figma_token)}
|
|
)
|
|
|
|
if update.services:
|
|
updates["services"] = update.services
|
|
|
|
if update.features:
|
|
updates["features"] = update.features
|
|
|
|
if updates:
|
|
runtime_config.update(updates)
|
|
ActivityLog.log(
|
|
action="config_updated",
|
|
entity_type="config",
|
|
details={"keys": list(updates.keys())}
|
|
)
|
|
|
|
return runtime_config.get()
|
|
|
|
|
|
@app.get("/api/config/figma")
|
|
async def get_figma_config():
|
|
"""Get Figma configuration status."""
|
|
figma_cfg = runtime_config.get("figma")
|
|
return {
|
|
"configured": figma_cfg.get("configured", False),
|
|
"mode": figma_suite.mode,
|
|
"features": {
|
|
"extract_variables": True,
|
|
"extract_components": True,
|
|
"extract_styles": True,
|
|
"sync_tokens": True,
|
|
"validate": True,
|
|
"generate_code": True,
|
|
}
|
|
}
|
|
|
|
|
|
@app.post("/api/config/figma/test")
|
|
async def test_figma_connection():
|
|
"""Test Figma API connection."""
|
|
try:
|
|
# Try to make a simple API call
|
|
if not runtime_config.get("figma").get("configured"):
|
|
return {"success": False, "error": "Figma token not configured"}
|
|
|
|
# Test with a minimal API call
|
|
import httpx
|
|
token = runtime_config._data["figma"]["token"]
|
|
async with httpx.AsyncClient() as client:
|
|
resp = await client.get(
|
|
"https://api.figma.com/v1/me",
|
|
headers={"X-Figma-Token": token}
|
|
)
|
|
if resp.status_code == 200:
|
|
user = resp.json()
|
|
return {
|
|
"success": True,
|
|
"user": user.get("email", "connected"),
|
|
"handle": user.get("handle")
|
|
}
|
|
else:
|
|
return {"success": False, "error": f"API returned {resp.status_code}"}
|
|
except Exception as e:
|
|
return {"success": False, "error": str(e)}
|
|
|
|
|
|
# === Service Discovery ===
|
|
|
|
@app.get("/api/services")
|
|
async def list_services():
|
|
"""List configured and discovered services."""
|
|
configured = runtime_config.get("services")
|
|
discovered = await ServiceDiscovery.discover()
|
|
|
|
return {
|
|
"configured": configured,
|
|
"discovered": discovered,
|
|
"storybook": await ServiceDiscovery.check_storybook()
|
|
}
|
|
|
|
|
|
@app.put("/api/services/{service_name}")
|
|
async def configure_service(service_name: str, config_data: Dict[str, Any]):
|
|
"""Configure a service."""
|
|
services = runtime_config.get("services") or {}
|
|
services[service_name] = {**services.get(service_name, {}), **config_data}
|
|
runtime_config.set("services", services)
|
|
|
|
ActivityLog.log(
|
|
action="service_configured",
|
|
entity_type="service",
|
|
entity_id=service_name,
|
|
details={"keys": list(config_data.keys())}
|
|
)
|
|
|
|
return services[service_name]
|
|
|
|
|
|
@app.get("/api/services/storybook")
|
|
async def get_storybook_status():
|
|
"""Get Storybook service status."""
|
|
return await ServiceDiscovery.check_storybook()
|
|
|
|
|
|
# === DSS Mode ===
|
|
|
|
@app.get("/api/mode")
|
|
async def get_mode():
|
|
"""Get current DSS mode."""
|
|
mode = runtime_config.get("mode")
|
|
return {
|
|
"mode": mode,
|
|
"description": "Local dev companion" if mode == "local" else "Remote design system server",
|
|
"features": runtime_config.get("features")
|
|
}
|
|
|
|
|
|
@app.put("/api/mode")
|
|
async def set_mode(mode: str):
|
|
"""Set DSS mode (local or server)."""
|
|
if mode not in ["local", "server"]:
|
|
raise HTTPException(status_code=400, detail="Mode must be 'local' or 'server'")
|
|
|
|
runtime_config.set("mode", mode)
|
|
ActivityLog.log(
|
|
action="mode_changed",
|
|
entity_type="config",
|
|
details={"mode": mode}
|
|
)
|
|
|
|
return {"mode": mode, "success": True}
|
|
|
|
|
|
# === Run Server ===
|
|
|
|
# === Static Files (Admin UI) ===
|
|
# Mount at the end so API routes take precedence
|
|
# This enables portable mode: ./dss start serves everything on one port
|
|
|
|
# === System Administration ===
|
|
|
|
@app.post("/api/system/reset")
|
|
async def reset_dss(request_data: Dict[str, Any]):
|
|
"""
|
|
Reset DSS to fresh state by calling the reset command in dss-mvp1.
|
|
Requires confirmation.
|
|
"""
|
|
confirm = request_data.get("confirm", "")
|
|
|
|
if confirm != "RESET":
|
|
raise HTTPException(status_code=400, detail="Must confirm with 'RESET'")
|
|
|
|
try:
|
|
# Path to dss-mvp1 directory
|
|
dss_mvp1_path = Path(__file__).parent.parent.parent / "dss-mvp1"
|
|
|
|
# Run the reset command
|
|
result = subprocess.run(
|
|
["python3", "-m", "dss.settings", "reset", "--no-confirm"],
|
|
cwd=str(dss_mvp1_path),
|
|
capture_output=True,
|
|
text=True,
|
|
timeout=60
|
|
)
|
|
|
|
if result.returncode != 0:
|
|
raise Exception(f"Reset failed: {result.stderr}")
|
|
|
|
ActivityLog.log(
|
|
action="dss_reset",
|
|
entity_type="system",
|
|
details={"status": "success"}
|
|
)
|
|
|
|
return {
|
|
"success": True,
|
|
"message": "DSS has been reset to fresh state",
|
|
"output": result.stdout
|
|
}
|
|
|
|
except subprocess.TimeoutExpired:
|
|
raise HTTPException(status_code=504, detail="Reset operation timed out")
|
|
except Exception as e:
|
|
ActivityLog.log(
|
|
action="dss_reset_failed",
|
|
entity_type="system",
|
|
details={"error": str(e)}
|
|
)
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
|
|
# === Team Dashboards ===
|
|
|
|
@app.get("/api/projects/{project_id}/dashboard/summary")
|
|
async def get_dashboard_summary(project_id: str):
|
|
"""
|
|
Get dashboard summary for all teams (thin slice).
|
|
Provides overview of UX, UI, and QA metrics.
|
|
"""
|
|
if not Projects.get(project_id):
|
|
raise HTTPException(status_code=404, detail="Project not found")
|
|
|
|
# UX Dashboard data
|
|
figma_files = FigmaFiles.list(project_id)
|
|
|
|
# UI Dashboard data
|
|
drift_stats = TokenDriftDetector.get_stats(project_id)
|
|
code_summary = CodeMetrics.get_project_summary(project_id)
|
|
|
|
# QA Dashboard data
|
|
esre_list = ESREDefinitions.list(project_id)
|
|
test_summary = TestResults.get_project_summary(project_id)
|
|
|
|
return {
|
|
"project_id": project_id,
|
|
"ux": {
|
|
"figma_files_count": len(figma_files),
|
|
"figma_files": figma_files[:5] # Show first 5
|
|
},
|
|
"ui": {
|
|
"token_drift": drift_stats,
|
|
"code_metrics": code_summary
|
|
},
|
|
"qa": {
|
|
"esre_count": len(esre_list),
|
|
"test_summary": test_summary
|
|
}
|
|
}
|
|
|
|
|
|
# === UX Dashboard: Figma File Management ===
|
|
|
|
@app.get("/api/projects/{project_id}/figma-files")
|
|
async def list_figma_files(project_id: str):
|
|
"""List all Figma files for a project (UX Dashboard)."""
|
|
if not Projects.get(project_id):
|
|
raise HTTPException(status_code=404, detail="Project not found")
|
|
return FigmaFiles.list(project_id)
|
|
|
|
|
|
@app.post("/api/projects/{project_id}/figma-files")
|
|
async def create_figma_file(project_id: str, figma_file: FigmaFileCreate):
|
|
"""Add a Figma file to a project (UX Dashboard)."""
|
|
if not Projects.get(project_id):
|
|
raise HTTPException(status_code=404, detail="Project not found")
|
|
|
|
created = FigmaFiles.create(
|
|
project_id=project_id,
|
|
figma_url=figma_file.figma_url,
|
|
file_name=figma_file.file_name,
|
|
file_key=figma_file.file_key
|
|
)
|
|
|
|
ActivityLog.log(
|
|
action="figma_file_added",
|
|
entity_type="figma_file",
|
|
entity_id=str(created['id']),
|
|
entity_name=figma_file.file_name,
|
|
project_id=project_id,
|
|
team_context="ux",
|
|
details={"file_key": figma_file.file_key}
|
|
)
|
|
|
|
return created
|
|
|
|
|
|
@app.put("/api/projects/{project_id}/figma-files/{file_id}/sync")
|
|
async def update_figma_file_sync(project_id: str, file_id: int, status: str = "synced"):
|
|
"""Update Figma file sync status (UX Dashboard)."""
|
|
if not Projects.get(project_id):
|
|
raise HTTPException(status_code=404, detail="Project not found")
|
|
|
|
updated = FigmaFiles.update_sync_status(
|
|
file_id=file_id,
|
|
status=status,
|
|
last_synced=datetime.utcnow().isoformat()
|
|
)
|
|
|
|
if not updated:
|
|
raise HTTPException(status_code=404, detail="Figma file not found")
|
|
|
|
ActivityLog.log(
|
|
action="figma_file_synced",
|
|
entity_type="figma_file",
|
|
entity_id=str(file_id),
|
|
project_id=project_id,
|
|
team_context="ux"
|
|
)
|
|
|
|
return updated
|
|
|
|
|
|
@app.delete("/api/projects/{project_id}/figma-files/{file_id}")
|
|
async def delete_figma_file(project_id: str, file_id: int):
|
|
"""Delete a Figma file (UX Dashboard)."""
|
|
if not Projects.get(project_id):
|
|
raise HTTPException(status_code=404, detail="Project not found")
|
|
|
|
if not FigmaFiles.delete(file_id):
|
|
raise HTTPException(status_code=404, detail="Figma file not found")
|
|
|
|
ActivityLog.log(
|
|
action="figma_file_deleted",
|
|
entity_type="figma_file",
|
|
entity_id=str(file_id),
|
|
project_id=project_id,
|
|
team_context="ux"
|
|
)
|
|
|
|
return {"success": True}
|
|
|
|
|
|
# === UI Dashboard: Token Drift Detection ===
|
|
|
|
@app.get("/api/projects/{project_id}/token-drift")
|
|
async def list_token_drift(project_id: str, severity: Optional[str] = None):
|
|
"""List token drift issues for a project (UI Dashboard)."""
|
|
if not Projects.get(project_id):
|
|
raise HTTPException(status_code=404, detail="Project not found")
|
|
|
|
drifts = TokenDriftDetector.list_by_project(project_id, severity)
|
|
stats = TokenDriftDetector.get_stats(project_id)
|
|
|
|
return {
|
|
"drifts": drifts,
|
|
"stats": stats
|
|
}
|
|
|
|
|
|
@app.post("/api/projects/{project_id}/token-drift")
|
|
async def record_token_drift(project_id: str, drift: TokenDriftCreate):
|
|
"""Record a token drift issue (UI Dashboard)."""
|
|
if not Projects.get(project_id):
|
|
raise HTTPException(status_code=404, detail="Project not found")
|
|
|
|
created = TokenDriftDetector.record_drift(
|
|
component_id=drift.component_id,
|
|
property_name=drift.property_name,
|
|
hardcoded_value=drift.hardcoded_value,
|
|
file_path=drift.file_path,
|
|
line_number=drift.line_number,
|
|
severity=drift.severity,
|
|
suggested_token=drift.suggested_token
|
|
)
|
|
|
|
ActivityLog.log(
|
|
action="token_drift_detected",
|
|
entity_type="token_drift",
|
|
entity_id=str(created['id']),
|
|
project_id=project_id,
|
|
team_context="ui",
|
|
details={
|
|
"severity": drift.severity,
|
|
"component_id": drift.component_id
|
|
}
|
|
)
|
|
|
|
return created
|
|
|
|
|
|
@app.put("/api/projects/{project_id}/token-drift/{drift_id}/status")
|
|
async def update_drift_status(project_id: str, drift_id: int, status: str):
|
|
"""Update token drift status: pending, fixed, ignored (UI Dashboard)."""
|
|
if not Projects.get(project_id):
|
|
raise HTTPException(status_code=404, detail="Project not found")
|
|
|
|
if status not in ["pending", "fixed", "ignored"]:
|
|
raise HTTPException(status_code=400, detail="Invalid status")
|
|
|
|
updated = TokenDriftDetector.update_status(drift_id, status)
|
|
|
|
if not updated:
|
|
raise HTTPException(status_code=404, detail="Drift issue not found")
|
|
|
|
ActivityLog.log(
|
|
action="token_drift_status_updated",
|
|
entity_type="token_drift",
|
|
entity_id=str(drift_id),
|
|
project_id=project_id,
|
|
team_context="ui",
|
|
details={"status": status}
|
|
)
|
|
|
|
return updated
|
|
|
|
|
|
# === QA Dashboard: ESRE Definitions ===
|
|
|
|
@app.get("/api/projects/{project_id}/esre")
|
|
async def list_esre_definitions(project_id: str):
|
|
"""List all ESRE definitions for a project (QA Dashboard)."""
|
|
if not Projects.get(project_id):
|
|
raise HTTPException(status_code=404, detail="Project not found")
|
|
return ESREDefinitions.list(project_id)
|
|
|
|
|
|
@app.post("/api/projects/{project_id}/esre")
|
|
async def create_esre_definition(project_id: str, esre: ESRECreate):
|
|
"""Create a new ESRE definition (QA Dashboard)."""
|
|
if not Projects.get(project_id):
|
|
raise HTTPException(status_code=404, detail="Project not found")
|
|
|
|
created = ESREDefinitions.create(
|
|
project_id=project_id,
|
|
name=esre.name,
|
|
definition_text=esre.definition_text,
|
|
expected_value=esre.expected_value,
|
|
component_name=esre.component_name
|
|
)
|
|
|
|
ActivityLog.log(
|
|
action="esre_created",
|
|
entity_type="esre",
|
|
entity_id=str(created['id']),
|
|
entity_name=esre.name,
|
|
project_id=project_id,
|
|
team_context="qa"
|
|
)
|
|
|
|
return created
|
|
|
|
|
|
@app.put("/api/projects/{project_id}/esre/{esre_id}")
|
|
async def update_esre_definition(project_id: str, esre_id: int, updates: ESRECreate):
|
|
"""Update an ESRE definition (QA Dashboard)."""
|
|
if not Projects.get(project_id):
|
|
raise HTTPException(status_code=404, detail="Project not found")
|
|
|
|
updated = ESREDefinitions.update(
|
|
esre_id=esre_id,
|
|
name=updates.name,
|
|
definition_text=updates.definition_text,
|
|
expected_value=updates.expected_value,
|
|
component_name=updates.component_name
|
|
)
|
|
|
|
if not updated:
|
|
raise HTTPException(status_code=404, detail="ESRE definition not found")
|
|
|
|
ActivityLog.log(
|
|
action="esre_updated",
|
|
entity_type="esre",
|
|
entity_id=str(esre_id),
|
|
entity_name=updates.name,
|
|
project_id=project_id,
|
|
team_context="qa"
|
|
)
|
|
|
|
return updated
|
|
|
|
|
|
@app.delete("/api/projects/{project_id}/esre/{esre_id}")
|
|
async def delete_esre_definition(project_id: str, esre_id: int):
|
|
"""Delete an ESRE definition (QA Dashboard)."""
|
|
if not Projects.get(project_id):
|
|
raise HTTPException(status_code=404, detail="Project not found")
|
|
|
|
if not ESREDefinitions.delete(esre_id):
|
|
raise HTTPException(status_code=404, detail="ESRE definition not found")
|
|
|
|
ActivityLog.log(
|
|
action="esre_deleted",
|
|
entity_type="esre",
|
|
entity_id=str(esre_id),
|
|
project_id=project_id,
|
|
team_context="qa"
|
|
)
|
|
|
|
return {"success": True}
|
|
|
|
|
|
# === Claude Chat API with MCP Tool Integration ===
|
|
|
|
class ClaudeChatRequest(BaseModel):
|
|
"""Claude chat request model"""
|
|
message: str
|
|
context: Optional[Dict[str, Any]] = {}
|
|
history: Optional[List[Dict[str, Any]]] = []
|
|
project_id: Optional[str] = None
|
|
user_id: Optional[int] = 1
|
|
enable_tools: Optional[bool] = True
|
|
|
|
|
|
@app.post("/api/claude/chat")
|
|
async def claude_chat(request_data: ClaudeChatRequest):
|
|
"""
|
|
Chat with Claude AI via Anthropic API with MCP tool integration.
|
|
|
|
Claude can now execute DSS tools to:
|
|
- Get project information
|
|
- List/search components
|
|
- Get design tokens
|
|
- Interact with Figma, Jira, Confluence
|
|
|
|
Requires ANTHROPIC_API_KEY environment variable.
|
|
"""
|
|
message = request_data.message
|
|
context = request_data.context or {}
|
|
history = request_data.history or []
|
|
project_id = request_data.project_id or context.get("projectId")
|
|
user_id = request_data.user_id or 1
|
|
enable_tools = request_data.enable_tools
|
|
|
|
# Log the chat request
|
|
ActivityLog.log(
|
|
action="claude_chat",
|
|
entity_type="chat",
|
|
entity_id="claude",
|
|
details={"message_length": len(message), "tools_enabled": enable_tools}
|
|
)
|
|
|
|
try:
|
|
# Try to import Anthropic SDK
|
|
try:
|
|
from anthropic import Anthropic
|
|
except ImportError:
|
|
return {
|
|
"success": False,
|
|
"response": "Anthropic SDK not installed. Install it with: pip install anthropic",
|
|
"model": "error"
|
|
}
|
|
|
|
# Check for API key
|
|
api_key = os.getenv("ANTHROPIC_API_KEY")
|
|
if not api_key:
|
|
return {
|
|
"success": False,
|
|
"response": "Claude API key not configured. Set ANTHROPIC_API_KEY environment variable or add to .env file.",
|
|
"model": "error"
|
|
}
|
|
|
|
# Import MCP handler
|
|
from dss_mcp.handler import get_mcp_handler, MCPContext
|
|
|
|
mcp_handler = get_mcp_handler()
|
|
|
|
# Build system prompt with design system context
|
|
system_prompt = """You are a design system assistant with access to DSS (Design System Server) tools.
|
|
|
|
You can use tools to:
|
|
- Get project summaries, health scores, and statistics
|
|
- List and search components in the design system
|
|
- Get design tokens (colors, typography, spacing)
|
|
- Interact with Figma to extract designs
|
|
- Create/search Jira issues for tracking
|
|
- Access Confluence documentation
|
|
|
|
RULES:
|
|
- Use tools when the user asks about project data, components, or tokens
|
|
- Be concise: 2-3 sentences for simple questions
|
|
- When showing tool results, summarize key information
|
|
- If a tool fails, explain what went wrong
|
|
- Always provide actionable insights from tool data"""
|
|
|
|
# Add project context if available
|
|
if project_id:
|
|
try:
|
|
project_context = await mcp_handler.get_project_context(project_id, user_id)
|
|
if project_context:
|
|
system_prompt += f"""
|
|
|
|
CURRENT PROJECT CONTEXT:
|
|
- Project: {project_context.name} (ID: {project_id})
|
|
- Components: {project_context.component_count}
|
|
- Health Score: {project_context.health.get('score', 'N/A')}/100 (Grade: {project_context.health.get('grade', 'N/A')})
|
|
- Integrations: {', '.join(project_context.integrations.keys()) if project_context.integrations else 'None configured'}"""
|
|
except:
|
|
system_prompt += f"\n\nProject ID: {project_id} (context not loaded)"
|
|
|
|
# Add user context
|
|
if context:
|
|
context_parts = []
|
|
if "project" in context:
|
|
context_parts.append(f"Project: {context['project']}")
|
|
if "file" in context:
|
|
context_parts.append(f"Current file: {context['file']}")
|
|
if "component" in context:
|
|
context_parts.append(f"Component: {context['component']}")
|
|
if context_parts:
|
|
system_prompt += f"\n\nUser context:\n" + "\n".join(context_parts)
|
|
|
|
# Build conversation messages
|
|
messages = []
|
|
|
|
# Add recent history (handle tool_use in history)
|
|
for msg in history[-6:]:
|
|
role = msg.get("role", "user")
|
|
content = msg.get("content", "")
|
|
if content and role in ["user", "assistant"]:
|
|
messages.append({
|
|
"role": role,
|
|
"content": content
|
|
})
|
|
|
|
# Add current message
|
|
messages.append({
|
|
"role": "user",
|
|
"content": message
|
|
})
|
|
|
|
# Get tools for Claude if enabled
|
|
tools = None
|
|
if enable_tools and project_id:
|
|
tools = mcp_handler.get_tools_for_claude()
|
|
|
|
# Call Claude API
|
|
client = Anthropic(api_key=api_key)
|
|
|
|
# Initial API call
|
|
api_params = {
|
|
"model": "claude-sonnet-4-5-20250929",
|
|
"max_tokens": 4096,
|
|
"temperature": 0.7,
|
|
"system": system_prompt,
|
|
"messages": messages
|
|
}
|
|
|
|
if tools:
|
|
api_params["tools"] = tools
|
|
|
|
response = await asyncio.to_thread(
|
|
client.messages.create,
|
|
**api_params
|
|
)
|
|
|
|
# Handle tool use loop
|
|
tools_used = []
|
|
max_iterations = 5
|
|
iteration = 0
|
|
|
|
while response.stop_reason == "tool_use" and iteration < max_iterations:
|
|
iteration += 1
|
|
|
|
# Process tool calls
|
|
tool_results = []
|
|
for content_block in response.content:
|
|
if content_block.type == "tool_use":
|
|
tool_name = content_block.name
|
|
tool_input = content_block.input
|
|
tool_use_id = content_block.id
|
|
|
|
# Execute the tool
|
|
mcp_context = MCPContext(
|
|
project_id=project_id,
|
|
user_id=user_id
|
|
)
|
|
|
|
result = await mcp_handler.execute_tool(
|
|
tool_name=tool_name,
|
|
arguments=tool_input,
|
|
context=mcp_context
|
|
)
|
|
|
|
tools_used.append({
|
|
"tool": tool_name,
|
|
"success": result.success,
|
|
"duration_ms": result.duration_ms
|
|
})
|
|
|
|
# Format result for Claude
|
|
if result.success:
|
|
tool_result_content = json.dumps(result.result, indent=2)
|
|
else:
|
|
tool_result_content = json.dumps({"error": result.error})
|
|
|
|
tool_results.append({
|
|
"type": "tool_result",
|
|
"tool_use_id": tool_use_id,
|
|
"content": tool_result_content
|
|
})
|
|
|
|
# Add assistant response and tool results to messages
|
|
messages.append({"role": "assistant", "content": response.content})
|
|
messages.append({"role": "user", "content": tool_results})
|
|
|
|
# Call Claude again with tool results
|
|
response = await asyncio.to_thread(
|
|
client.messages.create,
|
|
**{**api_params, "messages": messages}
|
|
)
|
|
|
|
# Extract final response text
|
|
response_text = ""
|
|
for content_block in response.content:
|
|
if hasattr(content_block, "text"):
|
|
response_text += content_block.text
|
|
|
|
# Log tool usage
|
|
if tools_used:
|
|
ActivityLog.log(
|
|
action="claude_tools_used",
|
|
entity_type="chat",
|
|
entity_id="claude",
|
|
project_id=project_id,
|
|
details={"tools": tools_used}
|
|
)
|
|
|
|
return {
|
|
"success": True,
|
|
"response": response_text,
|
|
"model": response.model,
|
|
"tools_used": tools_used,
|
|
"stop_reason": response.stop_reason
|
|
}
|
|
|
|
except Exception as e:
|
|
error_msg = str(e)
|
|
return {
|
|
"success": False,
|
|
"response": f"Error connecting to Claude: {error_msg}\n\nMake sure your ANTHROPIC_API_KEY is valid and you have API access.",
|
|
"model": "error"
|
|
}
|
|
|
|
|
|
# === MCP Tools Proxy ===
|
|
|
|
@app.post("/api/mcp/{tool_name}")
|
|
async def execute_mcp_tool(tool_name: str, params: Dict[str, Any] = {}):
|
|
"""
|
|
Proxy MCP tool execution.
|
|
Calls the MCP server running on port 3457.
|
|
"""
|
|
try:
|
|
# Import MCP server functions
|
|
from mcp_server import (
|
|
get_status, list_projects, create_project, get_project,
|
|
extract_tokens, extract_components, generate_component_code,
|
|
sync_tokens_to_file, get_sync_history, get_activity,
|
|
ingest_css_tokens, ingest_scss_tokens, ingest_tailwind_tokens,
|
|
ingest_json_tokens, merge_tokens, export_tokens, validate_tokens,
|
|
discover_project, analyze_react_components, find_inline_styles,
|
|
find_style_patterns, analyze_style_values, find_unused_styles,
|
|
build_source_graph, get_quick_wins, get_quick_wins_report,
|
|
check_naming_consistency, scan_storybook, generate_story,
|
|
generate_stories_batch, generate_storybook_theme, get_story_coverage
|
|
)
|
|
|
|
# Map tool names to functions
|
|
tool_map = {
|
|
'get_status': get_status,
|
|
'list_projects': list_projects,
|
|
'create_project': create_project,
|
|
'get_project': get_project,
|
|
'extract_tokens': extract_tokens,
|
|
'extract_components': extract_components,
|
|
'generate_component_code': generate_component_code,
|
|
'sync_tokens_to_file': sync_tokens_to_file,
|
|
'get_sync_history': get_sync_history,
|
|
'get_activity': get_activity,
|
|
'ingest_css_tokens': ingest_css_tokens,
|
|
'ingest_scss_tokens': ingest_scss_tokens,
|
|
'ingest_tailwind_tokens': ingest_tailwind_tokens,
|
|
'ingest_json_tokens': ingest_json_tokens,
|
|
'merge_tokens': merge_tokens,
|
|
'export_tokens': export_tokens,
|
|
'validate_tokens': validate_tokens,
|
|
'discover_project': discover_project,
|
|
'analyze_react_components': analyze_react_components,
|
|
'find_inline_styles': find_inline_styles,
|
|
'find_style_patterns': find_style_patterns,
|
|
'analyze_style_values': analyze_style_values,
|
|
'find_unused_styles': find_unused_styles,
|
|
'build_source_graph': build_source_graph,
|
|
'get_quick_wins': get_quick_wins,
|
|
'get_quick_wins_report': get_quick_wins_report,
|
|
'check_naming_consistency': check_naming_consistency,
|
|
'scan_storybook': scan_storybook,
|
|
'generate_story': generate_story,
|
|
'generate_stories_batch': generate_stories_batch,
|
|
'generate_storybook_theme': generate_storybook_theme,
|
|
'get_story_coverage': get_story_coverage,
|
|
}
|
|
|
|
# Get the tool function
|
|
tool_func = tool_map.get(tool_name)
|
|
if not tool_func:
|
|
raise HTTPException(status_code=404, detail=f"Tool '{tool_name}' not found")
|
|
|
|
# Execute tool
|
|
result = await tool_func(**params)
|
|
|
|
# Log execution
|
|
ActivityLog.log(
|
|
action="mcp_tool_executed",
|
|
entity_type="tool",
|
|
entity_id=tool_name,
|
|
details={"params": list(params.keys())}
|
|
)
|
|
|
|
return JSONResponse(content={"success": True, "result": result})
|
|
|
|
except Exception as e:
|
|
ActivityLog.log(
|
|
action="mcp_tool_failed",
|
|
entity_type="tool",
|
|
entity_id=tool_name,
|
|
details={"error": str(e)}
|
|
)
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
|
|
# === MCP Integration Endpoints ===
|
|
|
|
class IntegrationCreate(BaseModel):
|
|
"""Create/Update integration configuration"""
|
|
integration_type: str # figma, jira, confluence, sequential-thinking
|
|
config: Dict[str, Any] # Encrypted in database
|
|
enabled: bool = True
|
|
|
|
|
|
class IntegrationUpdate(BaseModel):
|
|
"""Update integration"""
|
|
config: Optional[Dict[str, Any]] = None
|
|
enabled: Optional[bool] = None
|
|
|
|
|
|
@app.get("/api/mcp/integrations")
|
|
async def list_all_integrations():
|
|
"""List all available integration types and their health status."""
|
|
from storage.database import get_connection
|
|
|
|
try:
|
|
with get_connection() as conn:
|
|
health_rows = conn.execute(
|
|
"SELECT * FROM integration_health ORDER BY integration_type"
|
|
).fetchall()
|
|
|
|
integrations = []
|
|
for row in health_rows:
|
|
integrations.append({
|
|
"integration_type": row["integration_type"],
|
|
"is_healthy": bool(row["is_healthy"]),
|
|
"failure_count": row["failure_count"],
|
|
"last_failure_at": row["last_failure_at"],
|
|
"last_success_at": row["last_success_at"],
|
|
"circuit_open_until": row["circuit_open_until"]
|
|
})
|
|
|
|
return {"integrations": integrations}
|
|
except Exception as e:
|
|
# Table may not exist yet
|
|
return {
|
|
"integrations": [
|
|
{"integration_type": "figma", "is_healthy": True, "failure_count": 0},
|
|
{"integration_type": "jira", "is_healthy": True, "failure_count": 0},
|
|
{"integration_type": "confluence", "is_healthy": True, "failure_count": 0},
|
|
{"integration_type": "sequential-thinking", "is_healthy": True, "failure_count": 0}
|
|
],
|
|
"note": "Integration tables not yet initialized"
|
|
}
|
|
|
|
|
|
@app.get("/api/projects/{project_id}/integrations")
|
|
async def list_project_integrations(
|
|
project_id: str,
|
|
user_id: Optional[int] = Query(None, description="Filter by user ID")
|
|
):
|
|
"""List integrations configured for a project."""
|
|
if not Projects.get(project_id):
|
|
raise HTTPException(status_code=404, detail="Project not found")
|
|
|
|
from storage.database import get_connection
|
|
|
|
try:
|
|
with get_connection() as conn:
|
|
if user_id:
|
|
rows = conn.execute(
|
|
"""
|
|
SELECT id, integration_type, enabled, created_at, updated_at, last_used_at
|
|
FROM project_integrations
|
|
WHERE project_id = ? AND user_id = ?
|
|
ORDER BY integration_type
|
|
""",
|
|
(project_id, user_id)
|
|
).fetchall()
|
|
else:
|
|
rows = conn.execute(
|
|
"""
|
|
SELECT id, user_id, integration_type, enabled, created_at, updated_at, last_used_at
|
|
FROM project_integrations
|
|
WHERE project_id = ?
|
|
ORDER BY integration_type
|
|
""",
|
|
(project_id,)
|
|
).fetchall()
|
|
|
|
return {"integrations": [dict(row) for row in rows]}
|
|
except Exception as e:
|
|
return {"integrations": [], "error": str(e)}
|
|
|
|
|
|
@app.post("/api/projects/{project_id}/integrations")
|
|
async def create_integration(
|
|
project_id: str,
|
|
integration: IntegrationCreate,
|
|
user_id: int = Query(..., description="User ID for user-scoped integration")
|
|
):
|
|
"""Create or update integration for a project (user-scoped)."""
|
|
if not Projects.get(project_id):
|
|
raise HTTPException(status_code=404, detail="Project not found")
|
|
|
|
from storage.database import get_connection
|
|
from dss_mcp.config import mcp_config
|
|
|
|
# Encrypt config
|
|
config_json = json.dumps(integration.config)
|
|
cipher = mcp_config.get_cipher()
|
|
if cipher:
|
|
encrypted_config = cipher.encrypt(config_json.encode()).decode()
|
|
else:
|
|
encrypted_config = config_json # Store unencrypted if no key
|
|
|
|
try:
|
|
with get_connection() as conn:
|
|
# Upsert
|
|
conn.execute(
|
|
"""
|
|
INSERT INTO project_integrations (project_id, user_id, integration_type, config, enabled, updated_at)
|
|
VALUES (?, ?, ?, ?, ?, CURRENT_TIMESTAMP)
|
|
ON CONFLICT(project_id, user_id, integration_type)
|
|
DO UPDATE SET config = excluded.config, enabled = excluded.enabled, updated_at = CURRENT_TIMESTAMP
|
|
""",
|
|
(project_id, user_id, integration.integration_type, encrypted_config, integration.enabled)
|
|
)
|
|
|
|
ActivityLog.log(
|
|
action="integration_configured",
|
|
entity_type="integration",
|
|
entity_id=integration.integration_type,
|
|
project_id=project_id,
|
|
details={"user_id": user_id, "enabled": integration.enabled}
|
|
)
|
|
|
|
return {
|
|
"success": True,
|
|
"integration_type": integration.integration_type,
|
|
"enabled": integration.enabled
|
|
}
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
|
|
@app.put("/api/projects/{project_id}/integrations/{integration_type}")
|
|
async def update_integration(
|
|
project_id: str,
|
|
integration_type: str,
|
|
update: IntegrationUpdate,
|
|
user_id: int = Query(..., description="User ID")
|
|
):
|
|
"""Update an existing integration."""
|
|
if not Projects.get(project_id):
|
|
raise HTTPException(status_code=404, detail="Project not found")
|
|
|
|
from storage.database import get_connection
|
|
from dss_mcp.config import mcp_config
|
|
|
|
try:
|
|
with get_connection() as conn:
|
|
updates = []
|
|
params = []
|
|
|
|
if update.config is not None:
|
|
config_json = json.dumps(update.config)
|
|
cipher = mcp_config.get_cipher()
|
|
if cipher:
|
|
encrypted_config = cipher.encrypt(config_json.encode()).decode()
|
|
else:
|
|
encrypted_config = config_json
|
|
updates.append("config = ?")
|
|
params.append(encrypted_config)
|
|
|
|
if update.enabled is not None:
|
|
updates.append("enabled = ?")
|
|
params.append(update.enabled)
|
|
|
|
if not updates:
|
|
return {"success": False, "message": "No updates provided"}
|
|
|
|
updates.append("updated_at = CURRENT_TIMESTAMP")
|
|
params.extend([project_id, user_id, integration_type])
|
|
|
|
conn.execute(
|
|
f"""
|
|
UPDATE project_integrations
|
|
SET {', '.join(updates)}
|
|
WHERE project_id = ? AND user_id = ? AND integration_type = ?
|
|
""",
|
|
params
|
|
)
|
|
|
|
return {"success": True, "integration_type": integration_type}
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
|
|
@app.delete("/api/projects/{project_id}/integrations/{integration_type}")
|
|
async def delete_integration(
|
|
project_id: str,
|
|
integration_type: str,
|
|
user_id: int = Query(..., description="User ID")
|
|
):
|
|
"""Delete an integration configuration."""
|
|
if not Projects.get(project_id):
|
|
raise HTTPException(status_code=404, detail="Project not found")
|
|
|
|
from storage.database import get_connection
|
|
|
|
try:
|
|
with get_connection() as conn:
|
|
result = conn.execute(
|
|
"""
|
|
DELETE FROM project_integrations
|
|
WHERE project_id = ? AND user_id = ? AND integration_type = ?
|
|
""",
|
|
(project_id, user_id, integration_type)
|
|
)
|
|
|
|
if result.rowcount == 0:
|
|
raise HTTPException(status_code=404, detail="Integration not found")
|
|
|
|
ActivityLog.log(
|
|
action="integration_deleted",
|
|
entity_type="integration",
|
|
entity_id=integration_type,
|
|
project_id=project_id,
|
|
details={"user_id": user_id}
|
|
)
|
|
|
|
return {"success": True}
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
|
|
@app.get("/api/mcp/tools")
|
|
async def list_mcp_tools(include_details: bool = Query(False, description="Include full tool schemas")):
|
|
"""List all available MCP tools via unified handler."""
|
|
from dss_mcp.handler import get_mcp_handler
|
|
|
|
handler = get_mcp_handler()
|
|
return handler.list_tools(include_details=include_details)
|
|
|
|
|
|
@app.get("/api/mcp/tools/{tool_name}")
|
|
async def get_mcp_tool_info(tool_name: str):
|
|
"""Get detailed information about a specific MCP tool."""
|
|
from dss_mcp.handler import get_mcp_handler
|
|
|
|
handler = get_mcp_handler()
|
|
info = handler.get_tool_info(tool_name)
|
|
|
|
if not info:
|
|
raise HTTPException(status_code=404, detail=f"Tool not found: {tool_name}")
|
|
|
|
return info
|
|
|
|
|
|
class MCPToolExecuteRequest(BaseModel):
|
|
"""Request to execute an MCP tool"""
|
|
arguments: Dict[str, Any]
|
|
project_id: str
|
|
user_id: Optional[int] = 1
|
|
|
|
|
|
@app.post("/api/mcp/tools/{tool_name}/execute")
|
|
async def execute_mcp_tool(tool_name: str, request: MCPToolExecuteRequest):
|
|
"""
|
|
Execute an MCP tool via unified handler.
|
|
|
|
All tool executions go through the central MCPHandler which:
|
|
- Validates tool existence
|
|
- Checks integration configurations
|
|
- Applies circuit breaker protection
|
|
- Logs execution metrics
|
|
"""
|
|
from dss_mcp.handler import get_mcp_handler, MCPContext
|
|
|
|
handler = get_mcp_handler()
|
|
|
|
# Create execution context
|
|
context = MCPContext(
|
|
project_id=request.project_id,
|
|
user_id=request.user_id
|
|
)
|
|
|
|
# Execute tool
|
|
result = await handler.execute_tool(
|
|
tool_name=tool_name,
|
|
arguments=request.arguments,
|
|
context=context
|
|
)
|
|
|
|
# Log to activity
|
|
ActivityLog.log(
|
|
action="mcp_tool_executed",
|
|
entity_type="tool",
|
|
entity_id=tool_name,
|
|
project_id=request.project_id,
|
|
details={
|
|
"success": result.success,
|
|
"duration_ms": result.duration_ms,
|
|
"error": result.error
|
|
}
|
|
)
|
|
|
|
return result.to_dict()
|
|
|
|
|
|
@app.get("/api/mcp/status")
|
|
async def get_mcp_status():
|
|
"""Get MCP server status and configuration."""
|
|
from dss_mcp.config import mcp_config, integration_config, validate_config
|
|
|
|
warnings = validate_config()
|
|
|
|
return {
|
|
"server": {
|
|
"host": mcp_config.HOST,
|
|
"port": mcp_config.PORT,
|
|
"encryption_enabled": bool(mcp_config.ENCRYPTION_KEY),
|
|
"context_cache_ttl": mcp_config.CONTEXT_CACHE_TTL
|
|
},
|
|
"integrations": {
|
|
"figma": bool(integration_config.FIGMA_TOKEN),
|
|
"anthropic": bool(integration_config.ANTHROPIC_API_KEY),
|
|
"jira_default": bool(integration_config.JIRA_URL),
|
|
"confluence_default": bool(integration_config.CONFLUENCE_URL)
|
|
},
|
|
"circuit_breaker": {
|
|
"failure_threshold": mcp_config.CIRCUIT_BREAKER_FAILURE_THRESHOLD,
|
|
"timeout_seconds": mcp_config.CIRCUIT_BREAKER_TIMEOUT_SECONDS
|
|
},
|
|
"warnings": warnings
|
|
}
|
|
|
|
|
|
UI_DIR = Path(__file__).parent.parent.parent / "admin-ui"
|
|
if UI_DIR.exists():
|
|
app.mount("/", StaticFiles(directory=str(UI_DIR), html=True), name="ui")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
import uvicorn
|
|
|
|
port = int(os.getenv("PORT", "3456"))
|
|
host = os.getenv("HOST", "0.0.0.0")
|
|
|
|
url = f"http://{host}:{port}"
|
|
print(f"""
|
|
╔═══════════════════════════════════════════════════════════════╗
|
|
║ Design System Server (DSS) - Portable Server ║
|
|
╠═══════════════════════════════════════════════════════════════╣
|
|
║ Dashboard: {url + '/':^47}║
|
|
║ API: {url + '/api':^47}║
|
|
║ Docs: {url + '/docs':^47}║
|
|
║ Environment: {config.server.env:^47}║
|
|
║ Figma Mode: {figma_suite.mode:^47}║
|
|
╚═══════════════════════════════════════════════════════════════╝
|
|
""")
|
|
|
|
uvicorn.run(
|
|
"server:app",
|
|
host=host,
|
|
port=port,
|
|
reload=config.server.env == "development"
|
|
)
|