Unify MCP across clients; remove legacy plugin server
Some checks failed
DSS Project Analysis / dss-context-update (push) Has been cancelled
Some checks failed
DSS Project Analysis / dss-context-update (push) Has been cancelled
This commit is contained in:
@@ -25,7 +25,7 @@ from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from dotenv import load_dotenv
|
||||
from fastapi import BackgroundTasks, Depends, FastAPI, Header, HTTPException, Query
|
||||
from fastapi import BackgroundTasks, Body, Depends, FastAPI, Header, HTTPException, Query
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.responses import JSONResponse
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
@@ -45,6 +45,7 @@ from dss.storage.json_store import (
|
||||
Cache,
|
||||
CodeMetrics,
|
||||
Components,
|
||||
ESREDefinitions,
|
||||
FigmaFiles,
|
||||
IntegrationHealth,
|
||||
Integrations,
|
||||
@@ -52,6 +53,7 @@ from dss.storage.json_store import (
|
||||
SyncHistory,
|
||||
Teams,
|
||||
TestResults,
|
||||
TokenDriftDetector,
|
||||
get_stats,
|
||||
)
|
||||
|
||||
@@ -59,6 +61,12 @@ from dss.storage.json_store import (
|
||||
_server_file = Path(__file__).resolve()
|
||||
_project_root = _server_file.parent.parent.parent # /home/.../dss
|
||||
|
||||
# Admin UI static serving (production-like)
|
||||
# - In dev, run `admin-ui` via Vite (`npm run dev`) and use its `/api` proxy.
|
||||
# - In headless/server mode, serve the built UI bundle from `admin-ui/dist/`.
|
||||
_admin_ui_dist = _project_root / "admin-ui" / "dist"
|
||||
_admin_ui_dist_index = _admin_ui_dist / "index.html"
|
||||
|
||||
# Try loading from multiple possible .env locations
|
||||
env_paths = [
|
||||
_project_root / ".env", # root .env (primary)
|
||||
@@ -134,7 +142,11 @@ class _ConfigCompat:
|
||||
"env": settings.SERVER_ENV,
|
||||
"log_level": settings.LOG_LEVEL,
|
||||
},
|
||||
"database": {"path": str(settings.DATABASE_PATH)},
|
||||
"storage": {
|
||||
"type": "json",
|
||||
"dss_home": str(settings.DSS_HOME),
|
||||
"data_dir": str(settings.DATA_DIR),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -226,7 +238,7 @@ runtime_config = RuntimeConfig()
|
||||
config_service = ConfigService()
|
||||
project_manager = ProjectManager(Projects, config_service)
|
||||
|
||||
# Ensure database schema is up to date (adds root_path column if missing)
|
||||
# Legacy compatibility hook: JSON storage needs no migrations.
|
||||
ProjectManager.ensure_schema()
|
||||
|
||||
|
||||
@@ -419,7 +431,7 @@ async def login(request: LoginRequest):
|
||||
Authenticate with Atlassian credentials.
|
||||
|
||||
Validates credentials against Jira or Confluence API,
|
||||
creates/updates user in database, returns JWT token.
|
||||
creates/updates user in JSON storage, returns JWT token.
|
||||
"""
|
||||
try:
|
||||
auth = get_auth()
|
||||
@@ -451,10 +463,19 @@ async def get_me(user: Dict[str, Any] = Depends(get_current_user)):
|
||||
|
||||
@app.get("/")
|
||||
async def root():
|
||||
"""Redirect to Admin UI dashboard."""
|
||||
from fastapi.responses import RedirectResponse
|
||||
"""Serve the Admin UI (when built) or show setup guidance."""
|
||||
if _admin_ui_dist_index.exists():
|
||||
from fastapi.responses import RedirectResponse
|
||||
|
||||
return RedirectResponse(url="/admin-ui/index.html")
|
||||
return RedirectResponse(url="/index.html")
|
||||
|
||||
return JSONResponse(
|
||||
status_code=200,
|
||||
content={
|
||||
"status": "ok",
|
||||
"message": "Admin UI is not built. Run `cd admin-ui && npm run build` (or `npm run dev` for development).",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@app.get("/health")
|
||||
@@ -494,7 +515,7 @@ async def health():
|
||||
if str(project_root) not in sys.path:
|
||||
sys.path.insert(0, str(project_root))
|
||||
|
||||
from dss.mcp_server.handler import get_mcp_handler
|
||||
from dss.mcp.handler import get_mcp_handler
|
||||
handler = get_mcp_handler()
|
||||
mcp_ok = handler is not None
|
||||
except Exception as e:
|
||||
@@ -558,25 +579,18 @@ async def receive_browser_logs(logs: dict):
|
||||
log_file = browser_logs_dir / f"{session_id}.json"
|
||||
log_file.write_text(json.dumps(logs, indent=2))
|
||||
|
||||
# Log to activity (skip if ActivityLog not available)
|
||||
# Log to activity (JSON store)
|
||||
try:
|
||||
with get_connection() as conn:
|
||||
conn.execute(
|
||||
"""
|
||||
INSERT INTO activity_log (category, action, details, metadata, created_at)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
"debug",
|
||||
"browser_logs_received",
|
||||
f"Received browser logs for session {session_id}",
|
||||
json.dumps({"session_id": session_id, "log_count": len(logs.get("logs", []))}),
|
||||
datetime.utcnow().isoformat(),
|
||||
),
|
||||
)
|
||||
conn.commit()
|
||||
except:
|
||||
pass # Activity logging is optional
|
||||
ActivityLog.log(
|
||||
action="browser_logs_received",
|
||||
entity_type="browser_logs",
|
||||
entity_id=session_id,
|
||||
description=f"Received browser logs for session {session_id}",
|
||||
category="debug",
|
||||
details={"session_id": session_id, "log_count": len(logs.get("logs", []))},
|
||||
)
|
||||
except Exception:
|
||||
pass # Activity logging is best-effort
|
||||
|
||||
# Check for errors and create notification task
|
||||
error_count = logs.get("diagnostic", {}).get("errorCount", 0)
|
||||
@@ -651,7 +665,7 @@ async def get_debug_diagnostic():
|
||||
- Health status (from /health endpoint)
|
||||
- Browser log session count
|
||||
- API uptime
|
||||
- Database size and stats
|
||||
- Storage size and stats
|
||||
- Memory usage
|
||||
- Recent errors
|
||||
"""
|
||||
@@ -668,31 +682,34 @@ async def get_debug_diagnostic():
|
||||
browser_logs_dir.mkdir(parents=True, exist_ok=True)
|
||||
browser_sessions = len(list(browser_logs_dir.glob("*.json")))
|
||||
|
||||
# Get database size
|
||||
db_path = Path(__file__).parent.parent.parent / ".dss" / "dss.db"
|
||||
db_size_bytes = db_path.stat().st_size if db_path.exists() else 0
|
||||
from dss.storage.json_store import DATA_DIR, ActivityLog, get_stats
|
||||
|
||||
storage_stats = get_stats()
|
||||
|
||||
# Get process stats
|
||||
process = psutil.Process(os.getpid())
|
||||
memory_info = process.memory_info()
|
||||
|
||||
# Get recent errors from activity log
|
||||
# Get recent errors from activity log (JSON)
|
||||
recent_errors: List[Dict[str, Any]] = []
|
||||
try:
|
||||
with get_connection() as conn:
|
||||
recent_errors = conn.execute(
|
||||
"""
|
||||
SELECT category, action, details, created_at
|
||||
FROM activity_log
|
||||
WHERE category = 'error' OR action LIKE '%error%' OR action LIKE '%fail%'
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 10
|
||||
"""
|
||||
).fetchall()
|
||||
recent_errors = [
|
||||
{"category": row[0], "action": row[1], "details": row[2], "timestamp": row[3]}
|
||||
for row in recent_errors
|
||||
]
|
||||
except:
|
||||
candidates = ActivityLog.search(days=7, limit=200)
|
||||
for r in candidates:
|
||||
action = (r.get("action") or "").lower()
|
||||
severity = (r.get("severity") or "").lower()
|
||||
if severity in {"error", "critical"} or "error" in action or "fail" in action:
|
||||
recent_errors.append(
|
||||
{
|
||||
"category": r.get("category"),
|
||||
"action": r.get("action"),
|
||||
"details": r.get("details"),
|
||||
"timestamp": r.get("timestamp"),
|
||||
"severity": r.get("severity"),
|
||||
}
|
||||
)
|
||||
if len(recent_errors) >= 10:
|
||||
break
|
||||
except Exception:
|
||||
recent_errors = []
|
||||
|
||||
return {
|
||||
@@ -700,10 +717,10 @@ async def get_debug_diagnostic():
|
||||
"timestamp": datetime.utcnow().isoformat() + "Z",
|
||||
"health": health_status,
|
||||
"browser": {"session_count": browser_sessions, "logs_directory": str(browser_logs_dir)},
|
||||
"database": {
|
||||
"size_bytes": db_size_bytes,
|
||||
"size_mb": round(db_size_bytes / 1024 / 1024, 2),
|
||||
"path": str(db_path),
|
||||
"storage": {
|
||||
"type": "json",
|
||||
"path": str(DATA_DIR),
|
||||
"stats": storage_stats,
|
||||
},
|
||||
"process": {
|
||||
"pid": os.getpid(),
|
||||
@@ -793,10 +810,12 @@ async def get_config():
|
||||
|
||||
@app.get("/api/stats")
|
||||
async def get_statistics():
|
||||
"""Get database and system statistics."""
|
||||
db_stats = get_stats()
|
||||
"""Get storage and system statistics."""
|
||||
storage_stats = get_stats()
|
||||
return {
|
||||
"database": db_stats,
|
||||
"storage": storage_stats,
|
||||
# Backwards-compatible alias (historical naming; underlying storage is JSON files).
|
||||
"database": storage_stats,
|
||||
"figma": {"mode": figma_suite.mode, "configured": config.figma.is_configured},
|
||||
}
|
||||
|
||||
@@ -1469,97 +1488,80 @@ async def get_storybook_status():
|
||||
|
||||
|
||||
@app.post("/api/storybook/init")
|
||||
async def init_storybook(request_data: Dict[str, Any] = None):
|
||||
async def init_storybook(request_data: Dict[str, Any] = Body(default_factory=dict)):
|
||||
"""
|
||||
Initialize Storybook with design system components.
|
||||
Initialize Storybook stories for a project.
|
||||
|
||||
Clears existing generated stories and generates new ones from
|
||||
the specified component source path.
|
||||
Clears previously auto-generated stories and regenerates them using the
|
||||
shared DSS StoryGenerator.
|
||||
|
||||
Request body (optional):
|
||||
source_path: Path to components directory (defaults to configured path)
|
||||
- project_id: DSS project id (recommended for headless server mode)
|
||||
- path: absolute path to the project directory (local/dev mode)
|
||||
|
||||
Returns:
|
||||
JSON with generation status and count
|
||||
"""
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
try:
|
||||
# Get paths
|
||||
dss_mvp1_path = Path(__file__).parent.parent.parent / "dss-mvp1"
|
||||
generated_dir = dss_mvp1_path / "stories" / "generated"
|
||||
from dss.storage.json_store import Projects
|
||||
from dss.storybook.generator import StoryGenerator, StoryTemplate
|
||||
|
||||
# Default source path - can be overridden in request
|
||||
source_path = dss_mvp1_path / "dss" / "components"
|
||||
if request_data and request_data.get("source_path"):
|
||||
# Validate path is within allowed directories
|
||||
requested_path = Path(request_data["source_path"]).resolve()
|
||||
if not str(requested_path).startswith(str(dss_mvp1_path.resolve())):
|
||||
raise HTTPException(status_code=400, detail="Source path must be within dss-mvp1")
|
||||
source_path = requested_path
|
||||
|
||||
# Step 1: Clear existing generated stories
|
||||
if generated_dir.exists():
|
||||
for item in generated_dir.iterdir():
|
||||
if item.name != ".gitkeep":
|
||||
if item.is_dir():
|
||||
shutil.rmtree(item)
|
||||
else:
|
||||
item.unlink()
|
||||
# Resolve project root (prefer project_id in headless mode)
|
||||
project_root = None
|
||||
if request_data.get("project_id"):
|
||||
project = Projects.get(request_data["project_id"])
|
||||
if not project:
|
||||
raise HTTPException(status_code=404, detail="Project not found")
|
||||
project_root = project.get("root_path")
|
||||
elif request_data.get("path"):
|
||||
project_root = request_data["path"]
|
||||
else:
|
||||
generated_dir.mkdir(parents=True, exist_ok=True)
|
||||
# Default: first registered project with a root_path, else repo admin-ui.
|
||||
for project in Projects.list():
|
||||
if project.get("root_path"):
|
||||
project_root = project["root_path"]
|
||||
break
|
||||
project_root = project_root or str(_project_root / "admin-ui")
|
||||
|
||||
# Step 2: Generate stories using StoryGenerator
|
||||
stories_generated = 0
|
||||
errors = []
|
||||
root = Path(project_root).resolve()
|
||||
if not root.exists():
|
||||
raise HTTPException(status_code=400, detail=f"Project path not found: {root}")
|
||||
|
||||
# Add dss-mvp1 to path for imports
|
||||
sys.path.insert(0, str(dss_mvp1_path))
|
||||
# Clear previously auto-generated stories (do NOT touch hand-written stories)
|
||||
marker = "Auto-generated by DSS Storybook Generator"
|
||||
cleared = 0
|
||||
for pattern in ["**/*.stories.tsx", "**/*.stories.jsx", "**/*.stories.js"]:
|
||||
for story_path in root.rglob(pattern):
|
||||
if any(skip in story_path.parts for skip in {"node_modules", ".git", "dist", "build"}):
|
||||
continue
|
||||
try:
|
||||
if marker in story_path.read_text(encoding="utf-8", errors="ignore"):
|
||||
story_path.unlink()
|
||||
cleared += 1
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
try:
|
||||
from dss.storybook.generator import StoryGenerator, StoryTemplate
|
||||
generator = StoryGenerator(str(root))
|
||||
results = generator.generate(template=StoryTemplate.CSF3, dry_run=False)
|
||||
|
||||
generator = StoryGenerator(str(dss_mvp1_path))
|
||||
|
||||
# Check if source path exists and has components
|
||||
if source_path.exists():
|
||||
results = await generator.generate_stories_for_directory(
|
||||
str(source_path.relative_to(dss_mvp1_path)),
|
||||
template=StoryTemplate.CSF3,
|
||||
dry_run=False,
|
||||
)
|
||||
|
||||
# Move generated stories to stories/generated/
|
||||
for result in results:
|
||||
if "story" in result and "error" not in result:
|
||||
story_filename = Path(result["component"]).stem + ".stories.js"
|
||||
output_path = generated_dir / story_filename
|
||||
output_path.write_text(result["story"])
|
||||
stories_generated += 1
|
||||
elif "error" in result:
|
||||
errors.append(result)
|
||||
else:
|
||||
# No components yet - that's okay, Storybook will show welcome
|
||||
pass
|
||||
|
||||
except ImportError as e:
|
||||
# StoryGenerator not available - log but don't fail
|
||||
errors.append({"error": f"StoryGenerator import failed: {str(e)}"})
|
||||
finally:
|
||||
# Clean up path
|
||||
if str(dss_mvp1_path) in sys.path:
|
||||
sys.path.remove(str(dss_mvp1_path))
|
||||
stories_generated = len([r for r in results if r.get("written")])
|
||||
errors = [r for r in results if r.get("error")]
|
||||
|
||||
ActivityLog.log(
|
||||
action="storybook_initialized",
|
||||
entity_type="storybook",
|
||||
details={"stories_generated": stories_generated, "errors_count": len(errors)},
|
||||
details={
|
||||
"project_path": str(root),
|
||||
"stories_generated": stories_generated,
|
||||
"cleared": cleared,
|
||||
"errors_count": len(errors),
|
||||
},
|
||||
)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"stories_generated": stories_generated,
|
||||
"cleared": cleared,
|
||||
"message": f"Generated {stories_generated} stories"
|
||||
if stories_generated > 0
|
||||
else "Storybook initialized (no components found)",
|
||||
@@ -1576,32 +1578,51 @@ async def init_storybook(request_data: Dict[str, Any] = None):
|
||||
|
||||
|
||||
@app.delete("/api/storybook/stories")
|
||||
async def clear_storybook_stories():
|
||||
async def clear_storybook_stories(request_data: Dict[str, Any] = Body(default_factory=dict)):
|
||||
"""
|
||||
Clear all generated stories from Storybook.
|
||||
|
||||
Returns Storybook to blank state (only Welcome page).
|
||||
"""
|
||||
import shutil
|
||||
|
||||
try:
|
||||
dss_mvp1_path = Path(__file__).parent.parent.parent / "dss-mvp1"
|
||||
generated_dir = dss_mvp1_path / "stories" / "generated"
|
||||
from dss.storage.json_store import Projects
|
||||
|
||||
project_root = None
|
||||
if request_data.get("project_id"):
|
||||
project = Projects.get(request_data["project_id"])
|
||||
if not project:
|
||||
raise HTTPException(status_code=404, detail="Project not found")
|
||||
project_root = project.get("root_path")
|
||||
elif request_data.get("path"):
|
||||
project_root = request_data["path"]
|
||||
else:
|
||||
for project in Projects.list():
|
||||
if project.get("root_path"):
|
||||
project_root = project["root_path"]
|
||||
break
|
||||
project_root = project_root or str(_project_root / "admin-ui")
|
||||
|
||||
root = Path(project_root).resolve()
|
||||
if not root.exists():
|
||||
raise HTTPException(status_code=400, detail=f"Project path not found: {root}")
|
||||
|
||||
marker = "Auto-generated by DSS Storybook Generator"
|
||||
cleared_count = 0
|
||||
if generated_dir.exists():
|
||||
for item in generated_dir.iterdir():
|
||||
if item.name != ".gitkeep":
|
||||
if item.is_dir():
|
||||
shutil.rmtree(item)
|
||||
else:
|
||||
item.unlink()
|
||||
cleared_count += 1
|
||||
for pattern in ["**/*.stories.tsx", "**/*.stories.jsx", "**/*.stories.js"]:
|
||||
for story_path in root.rglob(pattern):
|
||||
if any(skip in story_path.parts for skip in {"node_modules", ".git", "dist", "build"}):
|
||||
continue
|
||||
try:
|
||||
if marker in story_path.read_text(encoding="utf-8", errors="ignore"):
|
||||
story_path.unlink()
|
||||
cleared_count += 1
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
ActivityLog.log(
|
||||
action="storybook_cleared",
|
||||
entity_type="storybook",
|
||||
details={"cleared_count": cleared_count},
|
||||
details={"cleared_count": cleared_count, "project_path": str(root)},
|
||||
)
|
||||
|
||||
return {
|
||||
@@ -1900,44 +1921,10 @@ async def execute_ingestion(
|
||||
tokens_extracted = 0
|
||||
|
||||
if method == "npm" and system:
|
||||
# Import existing token ingestion tools
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent / "dss-mvp1"))
|
||||
|
||||
try:
|
||||
from dss.ingest import TokenCollection
|
||||
|
||||
# Create a token collection for this design system
|
||||
collection = TokenCollection(name=system.name)
|
||||
|
||||
# Based on primary ingestion method, use appropriate source
|
||||
if system.primary_ingestion.value == "css_variables":
|
||||
if system.css_cdn_url:
|
||||
# Fetch CSS from CDN and parse
|
||||
import httpx
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
resp = await client.get(system.css_cdn_url)
|
||||
if resp.status_code == 200:
|
||||
from dss.ingest.css import CSSTokenSource
|
||||
|
||||
# Write temp file and parse
|
||||
temp_css = Path("/tmp") / f"{system.id}_tokens.css"
|
||||
temp_css.write_text(resp.text)
|
||||
source = CSSTokenSource(str(temp_css))
|
||||
source.parse()
|
||||
collection.merge(source.tokens)
|
||||
tokens_extracted = len(collection.tokens)
|
||||
|
||||
elif system.primary_ingestion.value == "tailwind_config":
|
||||
# For Tailwind-based systems, we'll need their config
|
||||
tokens_extracted = 0 # Placeholder for Tailwind parsing
|
||||
|
||||
except ImportError:
|
||||
# Token ingestion module not available
|
||||
pass
|
||||
finally:
|
||||
if str(Path(__file__).parent.parent.parent / "dss-mvp1") in sys.path:
|
||||
sys.path.remove(str(Path(__file__).parent.parent.parent / "dss-mvp1"))
|
||||
# MVP: npm ingestion is not implemented yet.
|
||||
# Prefer using the dedicated ingest endpoints (/api/ingest/npm/*) to
|
||||
# discover packages, then add a concrete extraction strategy per system.
|
||||
tokens_extracted = 0
|
||||
|
||||
elif method == "figma" and source_url:
|
||||
# Use existing Figma extraction
|
||||
@@ -1947,23 +1934,13 @@ async def execute_ingestion(
|
||||
elif method == "css" and source_url:
|
||||
# Fetch and parse CSS
|
||||
import httpx
|
||||
from dss.ingest.css import CSSTokenSource
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent / "dss-mvp1"))
|
||||
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
resp = await client.get(source_url)
|
||||
if resp.status_code == 200:
|
||||
from dss.ingest.css import CSSTokenSource
|
||||
|
||||
temp_css = Path("/tmp") / "ingested_tokens.css"
|
||||
temp_css.write_text(resp.text)
|
||||
source = CSSTokenSource(str(temp_css))
|
||||
source.parse()
|
||||
tokens_extracted = len(source.tokens.tokens)
|
||||
finally:
|
||||
if str(Path(__file__).parent.parent.parent / "dss-mvp1") in sys.path:
|
||||
sys.path.remove(str(Path(__file__).parent.parent.parent / "dss-mvp1"))
|
||||
async with httpx.AsyncClient(timeout=30.0) as client:
|
||||
resp = await client.get(source_url)
|
||||
resp.raise_for_status()
|
||||
collection = await CSSTokenSource().extract(resp.text)
|
||||
tokens_extracted = len(collection.tokens)
|
||||
|
||||
ActivityLog.log(
|
||||
action="ingestion_executed",
|
||||
@@ -2051,7 +2028,7 @@ async def set_mode(request_data: Dict[str, Any]):
|
||||
@app.post("/api/system/reset")
|
||||
async def reset_dss(request_data: Dict[str, Any]):
|
||||
"""
|
||||
Reset DSS to fresh state by calling the reset command in dss-mvp1.
|
||||
Reset DSS to fresh state by calling the built-in reset command.
|
||||
|
||||
Requires confirmation.
|
||||
"""
|
||||
@@ -2061,13 +2038,12 @@ async def reset_dss(request_data: Dict[str, Any]):
|
||||
raise HTTPException(status_code=400, detail="Must confirm with 'RESET'")
|
||||
|
||||
try:
|
||||
# Path to dss-mvp1 directory
|
||||
dss_mvp1_path = Path(__file__).parent.parent.parent / "dss-mvp1"
|
||||
repo_root = Path(__file__).resolve().parent.parent.parent
|
||||
|
||||
# Run the reset command
|
||||
result = subprocess.run(
|
||||
["python3", "-m", "dss.settings", "reset", "--no-confirm"],
|
||||
cwd=str(dss_mvp1_path),
|
||||
cwd=str(repo_root),
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=60,
|
||||
@@ -2229,6 +2205,7 @@ async def record_token_drift(project_id: str, drift: TokenDriftCreate):
|
||||
raise HTTPException(status_code=404, detail="Project not found")
|
||||
|
||||
created = TokenDriftDetector.record_drift(
|
||||
project_id=project_id,
|
||||
component_id=drift.component_id,
|
||||
property_name=drift.property_name,
|
||||
hardcoded_value=drift.hardcoded_value,
|
||||
@@ -2251,15 +2228,18 @@ async def record_token_drift(project_id: str, drift: TokenDriftCreate):
|
||||
|
||||
|
||||
@app.put("/api/projects/{project_id}/token-drift/{drift_id}/status")
|
||||
async def update_drift_status(project_id: str, drift_id: int, status: str):
|
||||
async def update_drift_status(
|
||||
project_id: str, drift_id: str, payload: Dict[str, Any] = Body(default_factory=dict)
|
||||
):
|
||||
"""Update token drift status: pending, fixed, ignored (UI Dashboard)."""
|
||||
if not Projects.get(project_id):
|
||||
raise HTTPException(status_code=404, detail="Project not found")
|
||||
|
||||
status = payload.get("status") or payload.get("status", "")
|
||||
if status not in ["pending", "fixed", "ignored"]:
|
||||
raise HTTPException(status_code=400, detail="Invalid status")
|
||||
|
||||
updated = TokenDriftDetector.update_status(drift_id, status)
|
||||
updated = TokenDriftDetector.update_status(project_id=project_id, drift_id=drift_id, status=status)
|
||||
|
||||
if not updated:
|
||||
raise HTTPException(status_code=404, detail="Drift issue not found")
|
||||
@@ -2314,12 +2294,13 @@ async def create_esre_definition(project_id: str, esre: ESRECreate):
|
||||
|
||||
|
||||
@app.put("/api/projects/{project_id}/esre/{esre_id}")
|
||||
async def update_esre_definition(project_id: str, esre_id: int, updates: ESRECreate):
|
||||
async def update_esre_definition(project_id: str, esre_id: str, updates: ESRECreate):
|
||||
"""Update an ESRE definition (QA Dashboard)."""
|
||||
if not Projects.get(project_id):
|
||||
raise HTTPException(status_code=404, detail="Project not found")
|
||||
|
||||
updated = ESREDefinitions.update(
|
||||
project_id=project_id,
|
||||
esre_id=esre_id,
|
||||
name=updates.name,
|
||||
definition_text=updates.definition_text,
|
||||
@@ -2343,12 +2324,12 @@ async def update_esre_definition(project_id: str, esre_id: int, updates: ESRECre
|
||||
|
||||
|
||||
@app.delete("/api/projects/{project_id}/esre/{esre_id}")
|
||||
async def delete_esre_definition(project_id: str, esre_id: int):
|
||||
async def delete_esre_definition(project_id: str, esre_id: str):
|
||||
"""Delete an ESRE definition (QA Dashboard)."""
|
||||
if not Projects.get(project_id):
|
||||
raise HTTPException(status_code=404, detail="Project not found")
|
||||
|
||||
if not ESREDefinitions.delete(esre_id):
|
||||
if not ESREDefinitions.delete(project_id=project_id, esre_id=esre_id):
|
||||
raise HTTPException(status_code=404, detail="ESRE definition not found")
|
||||
|
||||
ActivityLog.log(
|
||||
@@ -2424,11 +2405,13 @@ async def claude_chat(request_data: ClaudeChatRequest):
|
||||
"model": "error",
|
||||
}
|
||||
|
||||
# Import MCP handler (may fail if database not migrated)
|
||||
# Import MCP handler (optional; tools disabled if unavailable)
|
||||
mcp_handler = None
|
||||
MCPContext = None
|
||||
try:
|
||||
from dss_mcp.handler import get_mcp_handler, MCPContext as _MCPContext
|
||||
from dss.mcp.handler import MCPContext as _MCPContext
|
||||
from dss.mcp.handler import get_mcp_handler
|
||||
|
||||
MCPContext = _MCPContext
|
||||
mcp_handler = get_mcp_handler()
|
||||
except Exception as e:
|
||||
@@ -2490,10 +2473,7 @@ CURRENT PROJECT CONTEXT:
|
||||
# Create MCP context (or None if MCP not available)
|
||||
mcp_context = None
|
||||
if MCPContext is not None:
|
||||
mcp_context = MCPContext(
|
||||
project_id=project_id,
|
||||
user_id=user_id
|
||||
)
|
||||
mcp_context = MCPContext(project_id=project_id, user_id=user_id)
|
||||
|
||||
# Call AI provider with all context
|
||||
result = await provider.chat(
|
||||
@@ -2538,85 +2518,12 @@ async def execute_mcp_tool(tool_name: str, params: Dict[str, Any] = {}):
|
||||
Calls the MCP server running on port 3457.
|
||||
"""
|
||||
try:
|
||||
# Import MCP server functions
|
||||
from mcp_server import (
|
||||
analyze_react_components,
|
||||
analyze_style_values,
|
||||
build_source_graph,
|
||||
check_naming_consistency,
|
||||
create_project,
|
||||
discover_project,
|
||||
export_tokens,
|
||||
extract_components,
|
||||
extract_tokens,
|
||||
find_inline_styles,
|
||||
find_style_patterns,
|
||||
find_unused_styles,
|
||||
generate_component_code,
|
||||
generate_stories_batch,
|
||||
generate_story,
|
||||
generate_storybook_theme,
|
||||
get_activity,
|
||||
get_project,
|
||||
get_quick_wins,
|
||||
get_quick_wins_report,
|
||||
get_status,
|
||||
get_story_coverage,
|
||||
get_sync_history,
|
||||
ingest_css_tokens,
|
||||
ingest_json_tokens,
|
||||
ingest_scss_tokens,
|
||||
ingest_tailwind_tokens,
|
||||
list_projects,
|
||||
merge_tokens,
|
||||
scan_storybook,
|
||||
sync_tokens_to_file,
|
||||
validate_tokens,
|
||||
)
|
||||
# Legacy endpoint: forward to unified MCP handler.
|
||||
from dss.mcp.handler import MCPContext, get_mcp_handler
|
||||
|
||||
# Map tool names to functions
|
||||
tool_map = {
|
||||
"get_status": get_status,
|
||||
"list_projects": list_projects,
|
||||
"create_project": create_project,
|
||||
"get_project": get_project,
|
||||
"extract_tokens": extract_tokens,
|
||||
"extract_components": extract_components,
|
||||
"generate_component_code": generate_component_code,
|
||||
"sync_tokens_to_file": sync_tokens_to_file,
|
||||
"get_sync_history": get_sync_history,
|
||||
"get_activity": get_activity,
|
||||
"ingest_css_tokens": ingest_css_tokens,
|
||||
"ingest_scss_tokens": ingest_scss_tokens,
|
||||
"ingest_tailwind_tokens": ingest_tailwind_tokens,
|
||||
"ingest_json_tokens": ingest_json_tokens,
|
||||
"merge_tokens": merge_tokens,
|
||||
"export_tokens": export_tokens,
|
||||
"validate_tokens": validate_tokens,
|
||||
"discover_project": discover_project,
|
||||
"analyze_react_components": analyze_react_components,
|
||||
"find_inline_styles": find_inline_styles,
|
||||
"find_style_patterns": find_style_patterns,
|
||||
"analyze_style_values": analyze_style_values,
|
||||
"find_unused_styles": find_unused_styles,
|
||||
"build_source_graph": build_source_graph,
|
||||
"get_quick_wins": get_quick_wins,
|
||||
"get_quick_wins_report": get_quick_wins_report,
|
||||
"check_naming_consistency": check_naming_consistency,
|
||||
"scan_storybook": scan_storybook,
|
||||
"generate_story": generate_story,
|
||||
"generate_stories_batch": generate_stories_batch,
|
||||
"generate_storybook_theme": generate_storybook_theme,
|
||||
"get_story_coverage": get_story_coverage,
|
||||
}
|
||||
|
||||
# Get the tool function
|
||||
tool_func = tool_map.get(tool_name)
|
||||
if not tool_func:
|
||||
raise HTTPException(status_code=404, detail=f"Tool '{tool_name}' not found")
|
||||
|
||||
# Execute tool
|
||||
result = await tool_func(**params)
|
||||
handler = get_mcp_handler()
|
||||
context = MCPContext(project_id=params.get("project_id"), user_id=params.get("user_id"))
|
||||
result = await handler.execute_tool(tool_name=tool_name, arguments=params or {}, context=context)
|
||||
|
||||
# Log execution
|
||||
ActivityLog.log(
|
||||
@@ -2626,7 +2533,7 @@ async def execute_mcp_tool(tool_name: str, params: Dict[str, Any] = {}):
|
||||
details={"params": list(params.keys())},
|
||||
)
|
||||
|
||||
return JSONResponse(content={"success": True, "result": result})
|
||||
return JSONResponse(content=result.to_dict())
|
||||
|
||||
except Exception as e:
|
||||
ActivityLog.log(
|
||||
@@ -2645,7 +2552,7 @@ class IntegrationCreate(BaseModel):
|
||||
"""Create/Update integration configuration."""
|
||||
|
||||
integration_type: str # figma, jira, confluence, sequential-thinking
|
||||
config: Dict[str, Any] # Encrypted in database
|
||||
config: Dict[str, Any] # Encrypted at rest when DSS_MCP_ENCRYPTION_KEY is configured
|
||||
enabled: bool = True
|
||||
|
||||
|
||||
@@ -2697,7 +2604,7 @@ async def create_integration(
|
||||
if not Projects.get(project_id):
|
||||
raise HTTPException(status_code=404, detail="Project not found")
|
||||
|
||||
from dss_mcp.config import mcp_config
|
||||
from dss.mcp.config import mcp_config
|
||||
|
||||
# Encrypt config
|
||||
config_json = json.dumps(integration.config)
|
||||
@@ -2744,7 +2651,7 @@ async def update_integration(
|
||||
if not Projects.get(project_id):
|
||||
raise HTTPException(status_code=404, detail="Project not found")
|
||||
|
||||
from dss_mcp.config import mcp_config
|
||||
from dss.mcp.config import mcp_config
|
||||
|
||||
try:
|
||||
encrypted_config = None
|
||||
@@ -2811,7 +2718,7 @@ async def list_mcp_tools(
|
||||
include_details: bool = Query(False, description="Include full tool schemas"),
|
||||
):
|
||||
"""List all available MCP tools via unified handler."""
|
||||
from dss_mcp.handler import get_mcp_handler
|
||||
from dss.mcp.handler import get_mcp_handler
|
||||
|
||||
handler = get_mcp_handler()
|
||||
return handler.list_tools(include_details=include_details)
|
||||
@@ -2820,7 +2727,7 @@ async def list_mcp_tools(
|
||||
@app.get("/api/mcp/tools/{tool_name}")
|
||||
async def get_mcp_tool_info(tool_name: str):
|
||||
"""Get detailed information about a specific MCP tool."""
|
||||
from dss_mcp.handler import get_mcp_handler
|
||||
from dss.mcp.handler import get_mcp_handler
|
||||
|
||||
handler = get_mcp_handler()
|
||||
info = handler.get_tool_info(tool_name)
|
||||
@@ -2831,16 +2738,8 @@ async def get_mcp_tool_info(tool_name: str):
|
||||
return info
|
||||
|
||||
|
||||
class MCPToolExecuteRequest(BaseModel):
|
||||
"""Request to execute an MCP tool."""
|
||||
|
||||
arguments: Dict[str, Any]
|
||||
project_id: str
|
||||
user_id: Optional[int] = 1
|
||||
|
||||
|
||||
@app.post("/api/mcp/tools/{tool_name}/execute")
|
||||
async def execute_mcp_tool(tool_name: str, request: MCPToolExecuteRequest):
|
||||
async def execute_mcp_tool(tool_name: str, payload: Dict[str, Any] = Body(default_factory=dict)):
|
||||
"""
|
||||
Execute an MCP tool via unified handler.
|
||||
|
||||
@@ -2850,16 +2749,23 @@ async def execute_mcp_tool(tool_name: str, request: MCPToolExecuteRequest):
|
||||
- Applies circuit breaker protection
|
||||
- Logs execution metrics
|
||||
"""
|
||||
from dss_mcp.handler import MCPContext, get_mcp_handler
|
||||
from dss.mcp.handler import MCPContext, get_mcp_handler
|
||||
|
||||
handler = get_mcp_handler()
|
||||
|
||||
# Backwards-compatible request parsing:
|
||||
# - New: { arguments: {...}, project_id: "...", user_id: 1 }
|
||||
# - Old (Admin UI): { ...toolArgs }
|
||||
arguments = payload.get("arguments") if isinstance(payload.get("arguments"), dict) else payload
|
||||
project_id = payload.get("project_id") or payload.get("projectId")
|
||||
user_id = payload.get("user_id") or payload.get("userId") or 1
|
||||
|
||||
# Create execution context
|
||||
context = MCPContext(project_id=request.project_id, user_id=request.user_id)
|
||||
context = MCPContext(project_id=project_id, user_id=user_id)
|
||||
|
||||
# Execute tool
|
||||
result = await handler.execute_tool(
|
||||
tool_name=tool_name, arguments=request.arguments, context=context
|
||||
tool_name=tool_name, arguments=arguments or {}, context=context
|
||||
)
|
||||
|
||||
# Log to activity
|
||||
@@ -2867,7 +2773,7 @@ async def execute_mcp_tool(tool_name: str, request: MCPToolExecuteRequest):
|
||||
action="mcp_tool_executed",
|
||||
entity_type="tool",
|
||||
entity_id=tool_name,
|
||||
project_id=request.project_id,
|
||||
project_id=project_id,
|
||||
details={
|
||||
"success": result.success,
|
||||
"duration_ms": result.duration_ms,
|
||||
@@ -2881,28 +2787,43 @@ async def execute_mcp_tool(tool_name: str, request: MCPToolExecuteRequest):
|
||||
@app.get("/api/mcp/status")
|
||||
async def get_mcp_status():
|
||||
"""Get MCP server status and configuration."""
|
||||
from dss_mcp.config import integration_config, mcp_config, validate_config
|
||||
from dss.mcp.config import integration_config, mcp_config, validate_config
|
||||
|
||||
warnings = validate_config()
|
||||
|
||||
# Admin UI expects a minimal `{ connected, tools }` shape.
|
||||
# Keep detailed config under `details` for debugging.
|
||||
try:
|
||||
from dss.mcp.handler import get_mcp_handler
|
||||
|
||||
tools_count = len(get_mcp_handler().list_tools(include_details=False))
|
||||
connected = True
|
||||
except Exception:
|
||||
tools_count = 0
|
||||
connected = False
|
||||
|
||||
return {
|
||||
"server": {
|
||||
"host": mcp_config.HOST,
|
||||
"port": mcp_config.PORT,
|
||||
"encryption_enabled": bool(mcp_config.ENCRYPTION_KEY),
|
||||
"context_cache_ttl": mcp_config.CONTEXT_CACHE_TTL,
|
||||
"connected": connected,
|
||||
"tools": tools_count,
|
||||
"details": {
|
||||
"server": {
|
||||
"host": mcp_config.HOST,
|
||||
"port": mcp_config.PORT,
|
||||
"encryption_enabled": bool(mcp_config.ENCRYPTION_KEY),
|
||||
"context_cache_ttl": mcp_config.CONTEXT_CACHE_TTL,
|
||||
},
|
||||
"integrations": {
|
||||
"figma": bool(integration_config.FIGMA_TOKEN),
|
||||
"anthropic": bool(integration_config.ANTHROPIC_API_KEY),
|
||||
"jira_default": bool(integration_config.JIRA_URL),
|
||||
"confluence_default": bool(integration_config.CONFLUENCE_URL),
|
||||
},
|
||||
"circuit_breaker": {
|
||||
"failure_threshold": mcp_config.CIRCUIT_BREAKER_FAILURE_THRESHOLD,
|
||||
"timeout_seconds": mcp_config.CIRCUIT_BREAKER_TIMEOUT_SECONDS,
|
||||
},
|
||||
"warnings": warnings,
|
||||
},
|
||||
"integrations": {
|
||||
"figma": bool(integration_config.FIGMA_TOKEN),
|
||||
"anthropic": bool(integration_config.ANTHROPIC_API_KEY),
|
||||
"jira_default": bool(integration_config.JIRA_URL),
|
||||
"confluence_default": bool(integration_config.CONFLUENCE_URL),
|
||||
},
|
||||
"circuit_breaker": {
|
||||
"failure_threshold": mcp_config.CIRCUIT_BREAKER_FAILURE_THRESHOLD,
|
||||
"timeout_seconds": mcp_config.CIRCUIT_BREAKER_TIMEOUT_SECONDS,
|
||||
},
|
||||
"warnings": warnings,
|
||||
}
|
||||
|
||||
|
||||
@@ -3070,9 +2991,8 @@ async def write_project_file(project_id: str, request: FileWriteRequest):
|
||||
raise HTTPException(status_code=403, detail=str(e))
|
||||
|
||||
|
||||
UI_DIR = Path(__file__).parent.parent.parent / "admin-ui"
|
||||
if UI_DIR.exists():
|
||||
app.mount("/", StaticFiles(directory=str(UI_DIR), html=True), name="ui")
|
||||
if _admin_ui_dist_index.exists():
|
||||
app.mount("/", StaticFiles(directory=str(_admin_ui_dist), html=True), name="ui")
|
||||
|
||||
|
||||
def kill_port(port: int, wait: float = 0.5) -> None:
|
||||
|
||||
Reference in New Issue
Block a user