Migrated from design-system-swarm with fresh git history.
Old project history preserved in /home/overbits/apps/design-system-swarm
Core components:
- MCP Server (Python FastAPI with mcp 1.23.1)
- Claude Plugin (agents, commands, skills, strategies, hooks, core)
- DSS Backend (dss-mvp1 - token translation, Figma sync)
- Admin UI (Node.js/React)
- Server (Node.js/Express)
- Storybook integration (dss-mvp1/.storybook)
Self-contained configuration:
- All paths relative or use DSS_BASE_PATH=/home/overbits/dss
- PYTHONPATH configured for dss-mvp1 and dss-claude-plugin
- .env file with all configuration
- Claude plugin uses ${CLAUDE_PLUGIN_ROOT} for portability
Migration completed: $(date)
🤖 Clean migration with full functionality preserved
523 lines
17 KiB
Python
523 lines
17 KiB
Python
"""
|
|
FastAPI routes for DSS Export/Import system
|
|
|
|
Provides REST API endpoints for project export, import, merge, and analysis.
|
|
All operations support both synchronous and asynchronous (background job) modes.
|
|
"""
|
|
|
|
from fastapi import APIRouter, File, UploadFile, HTTPException, BackgroundTasks, Query
|
|
from fastapi.responses import FileResponse
|
|
from pydantic import BaseModel
|
|
from pathlib import Path
|
|
from datetime import datetime
|
|
from typing import Optional, List, Dict, Any
|
|
import logging
|
|
|
|
from dss.export_import.service import DSSProjectService, ExportSummary, ImportSummary, MergeSummary
|
|
from dss.models.project import Project
|
|
|
|
logger = logging.getLogger(__name__)
|
|
router = APIRouter(prefix="/api/projects", tags=["export_import"])
|
|
|
|
# Initialize service layer
|
|
service = DSSProjectService(busy_timeout_ms=5000)
|
|
|
|
# In-memory job tracking (replace with Redis/database in production)
|
|
_jobs: Dict[str, Dict[str, Any]] = {}
|
|
|
|
|
|
# ============================================================================
|
|
# Pydantic Models for API Responses
|
|
# ============================================================================
|
|
|
|
class ExportResponse(BaseModel):
|
|
"""Response from export endpoint"""
|
|
success: bool
|
|
file_size_bytes: Optional[int] = None
|
|
token_count: Optional[int] = None
|
|
component_count: Optional[int] = None
|
|
error: Optional[str] = None
|
|
duration_seconds: Optional[float] = None
|
|
|
|
|
|
class ImportResponse(BaseModel):
|
|
"""Response from import endpoint"""
|
|
success: bool
|
|
project_name: Optional[str] = None
|
|
token_count: Optional[int] = None
|
|
component_count: Optional[int] = None
|
|
migration_performed: Optional[bool] = None
|
|
warnings: Optional[List[str]] = None
|
|
error: Optional[str] = None
|
|
duration_seconds: Optional[float] = None
|
|
job_id: Optional[str] = None
|
|
|
|
|
|
class MergeResponse(BaseModel):
|
|
"""Response from merge endpoint"""
|
|
success: bool
|
|
new_items: Optional[int] = None
|
|
updated_items: Optional[int] = None
|
|
conflicts: Optional[int] = None
|
|
resolution_strategy: Optional[str] = None
|
|
error: Optional[str] = None
|
|
duration_seconds: Optional[float] = None
|
|
job_id: Optional[str] = None
|
|
|
|
|
|
class AnalysisResponse(BaseModel):
|
|
"""Response from analysis endpoint"""
|
|
is_valid: bool
|
|
project_name: Optional[str] = None
|
|
schema_version: Optional[str] = None
|
|
token_count: Optional[int] = None
|
|
component_count: Optional[int] = None
|
|
migration_needed: Optional[bool] = None
|
|
errors: Optional[List[str]] = None
|
|
warnings: Optional[List[str]] = None
|
|
|
|
|
|
class JobStatus(BaseModel):
|
|
"""Status of a background job"""
|
|
job_id: str
|
|
status: str # pending, running, completed, failed
|
|
result: Optional[Dict[str, Any]] = None
|
|
error: Optional[str] = None
|
|
created_at: str
|
|
completed_at: Optional[str] = None
|
|
|
|
|
|
# ============================================================================
|
|
# Export Endpoints
|
|
# ============================================================================
|
|
|
|
@router.post("/{project_id}/export", response_class=FileResponse)
|
|
async def export_project(
|
|
project_id: str,
|
|
background_tasks: Optional[BackgroundTasks] = None,
|
|
background: bool = Query(False, description="Run as background job")
|
|
) -> FileResponse:
|
|
"""
|
|
Export a project to a .dss archive file
|
|
|
|
Args:
|
|
project_id: ID of project to export
|
|
background: If true, schedule as background job (for large projects)
|
|
|
|
Returns:
|
|
.dss archive file download
|
|
|
|
Examples:
|
|
```bash
|
|
# Export synchronously
|
|
curl -X POST http://localhost:8000/api/projects/my-project/export \
|
|
-o my-project.dss
|
|
|
|
# Export as background job
|
|
curl -X POST "http://localhost:8000/api/projects/my-project/export?background=true"
|
|
```
|
|
"""
|
|
try:
|
|
# Load project (adapt to your data source)
|
|
project = _load_project(project_id)
|
|
if not project:
|
|
raise HTTPException(status_code=404, detail=f"Project not found: {project_id}")
|
|
|
|
# Export
|
|
output_path = Path("/tmp") / f"{project_id}_export.dss"
|
|
result: ExportSummary = service.export_project(project, output_path)
|
|
|
|
if not result.success:
|
|
raise HTTPException(status_code=400, detail=result.error)
|
|
|
|
# Return file for download
|
|
return FileResponse(
|
|
result.archive_path,
|
|
media_type="application/zip",
|
|
filename=f"{project.name}.dss"
|
|
)
|
|
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error(f"Export failed for {project_id}: {e}")
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
|
|
# ============================================================================
|
|
# Import Endpoints
|
|
# ============================================================================
|
|
|
|
@router.post("/import", response_model=ImportResponse)
|
|
async def import_project(
|
|
file: UploadFile = File(...),
|
|
strategy: str = Query("replace", description="Import strategy: replace or merge"),
|
|
background: bool = Query(False, description="Run as background job")
|
|
) -> ImportResponse:
|
|
"""
|
|
Import a project from a .dss archive file
|
|
|
|
Args:
|
|
file: .dss archive file to import
|
|
strategy: Import strategy (replace=full restoration, merge=smart update)
|
|
background: If true, schedule as background job (for large archives)
|
|
|
|
Returns:
|
|
Import result summary
|
|
|
|
Examples:
|
|
```bash
|
|
# Import synchronously
|
|
curl -X POST http://localhost:8000/api/projects/import \
|
|
-F "file=@my-project.dss"
|
|
|
|
# Import with merge strategy
|
|
curl -X POST "http://localhost:8000/api/projects/import?strategy=merge" \
|
|
-F "file=@updates.dss"
|
|
|
|
# Import as background job
|
|
curl -X POST "http://localhost:8000/api/projects/import?background=true" \
|
|
-F "file=@large-project.dss"
|
|
```
|
|
"""
|
|
archive_path = None
|
|
job_id = None
|
|
|
|
try:
|
|
# Save uploaded file
|
|
archive_path = Path("/tmp") / f"import_{datetime.now().timestamp()}.dss"
|
|
contents = await file.read()
|
|
archive_path.write_bytes(contents)
|
|
|
|
# Check if should run as background job
|
|
if service._should_schedule_background(archive_path):
|
|
# Schedule background job
|
|
job_id = _create_job_id()
|
|
_jobs[job_id] = {
|
|
"status": "pending",
|
|
"created_at": datetime.now().isoformat(),
|
|
"type": "import",
|
|
"archive_path": str(archive_path),
|
|
"strategy": strategy
|
|
}
|
|
|
|
# In production: queue with Celery, RQ, or similar
|
|
# For now: return job ID for polling
|
|
return ImportResponse(
|
|
success=True,
|
|
job_id=job_id,
|
|
duration_seconds=0
|
|
)
|
|
|
|
# Run synchronously
|
|
result: ImportSummary = service.import_project(archive_path, strategy)
|
|
|
|
if not result.success:
|
|
raise HTTPException(status_code=400, detail=result.error)
|
|
|
|
return ImportResponse(
|
|
success=True,
|
|
project_name=result.project_name,
|
|
token_count=result.item_counts.get("tokens") if result.item_counts else None,
|
|
component_count=result.item_counts.get("components") if result.item_counts else None,
|
|
migration_performed=result.migration_performed,
|
|
warnings=result.warnings or [],
|
|
duration_seconds=result.duration_seconds
|
|
)
|
|
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error(f"Import failed: {e}")
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
finally:
|
|
# Cleanup uploaded file after async processing
|
|
if archive_path and archive_path.exists() and job_id is None:
|
|
try:
|
|
archive_path.unlink()
|
|
except Exception:
|
|
pass
|
|
|
|
|
|
# ============================================================================
|
|
# Merge Endpoints
|
|
# ============================================================================
|
|
|
|
@router.post("/{project_id}/merge", response_model=MergeResponse)
|
|
async def merge_project(
|
|
project_id: str,
|
|
file: UploadFile = File(...),
|
|
strategy: str = Query("keep_local", description="Conflict resolution: overwrite, keep_local, or fork"),
|
|
background: bool = Query(False, description="Run as background job")
|
|
) -> MergeResponse:
|
|
"""
|
|
Merge updates from a .dss archive into a project
|
|
|
|
Args:
|
|
project_id: ID of project to merge into
|
|
file: .dss archive with updates
|
|
strategy: Conflict resolution strategy
|
|
background: If true, schedule as background job
|
|
|
|
Returns:
|
|
Merge result summary
|
|
|
|
Examples:
|
|
```bash
|
|
# Merge with keep_local strategy (preserve local changes)
|
|
curl -X POST "http://localhost:8000/api/projects/my-project/merge?strategy=keep_local" \
|
|
-F "file=@updates.dss"
|
|
|
|
# Merge with overwrite strategy (accept remote changes)
|
|
curl -X POST "http://localhost:8000/api/projects/my-project/merge?strategy=overwrite" \
|
|
-F "file=@updates.dss"
|
|
|
|
# Merge as background job (for large archives)
|
|
curl -X POST "http://localhost:8000/api/projects/my-project/merge?background=true" \
|
|
-F "file=@large-update.dss"
|
|
```
|
|
"""
|
|
archive_path = None
|
|
job_id = None
|
|
|
|
try:
|
|
# Load project
|
|
project = _load_project(project_id)
|
|
if not project:
|
|
raise HTTPException(status_code=404, detail=f"Project not found: {project_id}")
|
|
|
|
# Save uploaded file
|
|
archive_path = Path("/tmp") / f"merge_{datetime.now().timestamp()}.dss"
|
|
contents = await file.read()
|
|
archive_path.write_bytes(contents)
|
|
|
|
# Check if should run as background job
|
|
if service._should_schedule_background(archive_path):
|
|
job_id = _create_job_id()
|
|
_jobs[job_id] = {
|
|
"status": "pending",
|
|
"created_at": datetime.now().isoformat(),
|
|
"type": "merge",
|
|
"project_id": project_id,
|
|
"archive_path": str(archive_path),
|
|
"strategy": strategy
|
|
}
|
|
return MergeResponse(
|
|
success=True,
|
|
job_id=job_id,
|
|
duration_seconds=0
|
|
)
|
|
|
|
# Run synchronously
|
|
result: MergeSummary = service.merge_project(project, archive_path, strategy)
|
|
|
|
if not result.success:
|
|
raise HTTPException(status_code=400, detail=result.error)
|
|
|
|
return MergeResponse(
|
|
success=True,
|
|
new_items=result.new_items_count,
|
|
updated_items=result.updated_items_count,
|
|
conflicts=result.conflicts_count,
|
|
resolution_strategy=result.resolution_strategy,
|
|
duration_seconds=result.duration_seconds
|
|
)
|
|
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error(f"Merge failed for {project_id}: {e}")
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
finally:
|
|
if archive_path and archive_path.exists() and job_id is None:
|
|
try:
|
|
archive_path.unlink()
|
|
except Exception:
|
|
pass
|
|
|
|
|
|
# ============================================================================
|
|
# Analysis Endpoints
|
|
# ============================================================================
|
|
|
|
@router.post("/{project_id}/analyze-merge")
|
|
async def analyze_merge(
|
|
project_id: str,
|
|
file: UploadFile = File(...)
|
|
) -> AnalysisResponse:
|
|
"""
|
|
Analyze merge without applying it (safe preview)
|
|
|
|
Args:
|
|
project_id: ID of project to analyze merge into
|
|
file: .dss archive to analyze
|
|
|
|
Returns:
|
|
Merge analysis (what changes would happen)
|
|
|
|
Examples:
|
|
```bash
|
|
curl -X POST http://localhost:8000/api/projects/my-project/analyze-merge \
|
|
-F "file=@updates.dss"
|
|
```
|
|
"""
|
|
archive_path = None
|
|
|
|
try:
|
|
# Load project
|
|
project = _load_project(project_id)
|
|
if not project:
|
|
raise HTTPException(status_code=404, detail=f"Project not found: {project_id}")
|
|
|
|
# Save uploaded file
|
|
archive_path = Path("/tmp") / f"analyze_{datetime.now().timestamp()}.dss"
|
|
contents = await file.read()
|
|
archive_path.write_bytes(contents)
|
|
|
|
# Analyze
|
|
analysis = service.analyze_merge(project, archive_path)
|
|
|
|
return AnalysisResponse(
|
|
is_valid=analysis.is_valid,
|
|
new_items=len(analysis.new_items.get("tokens", [])),
|
|
conflicts=len(analysis.conflicted_items)
|
|
)
|
|
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error(f"Merge analysis failed for {project_id}: {e}")
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
finally:
|
|
if archive_path and archive_path.exists():
|
|
try:
|
|
archive_path.unlink()
|
|
except Exception:
|
|
pass
|
|
|
|
|
|
@router.post("/analyze-archive")
|
|
async def analyze_archive(
|
|
file: UploadFile = File(...)
|
|
) -> AnalysisResponse:
|
|
"""
|
|
Analyze a .dss archive without importing it (safe preview)
|
|
|
|
Args:
|
|
file: .dss archive to analyze
|
|
|
|
Returns:
|
|
Archive analysis details
|
|
|
|
Examples:
|
|
```bash
|
|
curl -X POST http://localhost:8000/api/projects/analyze-archive \
|
|
-F "file=@project.dss"
|
|
```
|
|
"""
|
|
archive_path = None
|
|
|
|
try:
|
|
# Save uploaded file
|
|
archive_path = Path("/tmp") / f"analyze_archive_{datetime.now().timestamp()}.dss"
|
|
contents = await file.read()
|
|
archive_path.write_bytes(contents)
|
|
|
|
# Analyze
|
|
analysis = service.analyze_import(archive_path)
|
|
|
|
return AnalysisResponse(
|
|
is_valid=analysis.is_valid,
|
|
project_name=analysis.project_name,
|
|
schema_version=analysis.schema_version,
|
|
token_count=analysis.content_summary.get("tokens", {}).get("count"),
|
|
component_count=analysis.content_summary.get("components", {}).get("count"),
|
|
migration_needed=analysis.migration_needed,
|
|
errors=[e.message for e in analysis.errors],
|
|
warnings=analysis.warnings
|
|
)
|
|
|
|
except Exception as e:
|
|
logger.error(f"Archive analysis failed: {e}")
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
finally:
|
|
if archive_path and archive_path.exists():
|
|
try:
|
|
archive_path.unlink()
|
|
except Exception:
|
|
pass
|
|
|
|
|
|
# ============================================================================
|
|
# Job Status Endpoint
|
|
# ============================================================================
|
|
|
|
@router.get("/jobs/{job_id}", response_model=JobStatus)
|
|
async def get_job_status(job_id: str) -> JobStatus:
|
|
"""
|
|
Get status of a background job
|
|
|
|
Args:
|
|
job_id: ID of the job (returned from async endpoint)
|
|
|
|
Returns:
|
|
Current job status and result (if completed)
|
|
|
|
Examples:
|
|
```bash
|
|
curl http://localhost:8000/api/projects/jobs/job-123
|
|
```
|
|
"""
|
|
job = _jobs.get(job_id)
|
|
if not job:
|
|
raise HTTPException(status_code=404, detail=f"Job not found: {job_id}")
|
|
|
|
return JobStatus(
|
|
job_id=job_id,
|
|
status=job.get("status", "unknown"),
|
|
result=job.get("result"),
|
|
error=job.get("error"),
|
|
created_at=job.get("created_at", ""),
|
|
completed_at=job.get("completed_at")
|
|
)
|
|
|
|
|
|
# ============================================================================
|
|
# Helper Functions
|
|
# ============================================================================
|
|
|
|
def _load_project(project_id: str) -> Optional[Project]:
|
|
"""
|
|
Load a project by ID
|
|
|
|
ADAPT THIS to your actual data source (database, API, etc.)
|
|
"""
|
|
try:
|
|
# Example: Load from database
|
|
# return db.query(Project).filter(Project.id == project_id).first()
|
|
|
|
# For now: return a dummy project
|
|
# In production: implement actual loading
|
|
logger.warning(f"Using dummy project for {project_id} - implement _load_project()")
|
|
return Project(
|
|
name=project_id,
|
|
description="Auto-loaded project",
|
|
author="system"
|
|
)
|
|
except Exception as e:
|
|
logger.error(f"Failed to load project {project_id}: {e}")
|
|
return None
|
|
|
|
|
|
def _create_job_id() -> str:
|
|
"""Generate unique job ID"""
|
|
import uuid
|
|
return str(uuid.uuid4())[:8]
|
|
|
|
|
|
# ============================================================================
|
|
# Export router for inclusion in FastAPI app
|
|
# ============================================================================
|
|
|
|
__all__ = ["router"]
|