Initial commit: Clean DSS implementation
Migrated from design-system-swarm with fresh git history.
Old project history preserved in /home/overbits/apps/design-system-swarm
Core components:
- MCP Server (Python FastAPI with mcp 1.23.1)
- Claude Plugin (agents, commands, skills, strategies, hooks, core)
- DSS Backend (dss-mvp1 - token translation, Figma sync)
- Admin UI (Node.js/React)
- Server (Node.js/Express)
- Storybook integration (dss-mvp1/.storybook)
Self-contained configuration:
- All paths relative or use DSS_BASE_PATH=/home/overbits/dss
- PYTHONPATH configured for dss-mvp1 and dss-claude-plugin
- .env file with all configuration
- Claude plugin uses ${CLAUDE_PLUGIN_ROOT} for portability
Migration completed: $(date)
🤖 Clean migration with full functionality preserved
This commit is contained in:
324
tools/dss_mcp/operations.py
Normal file
324
tools/dss_mcp/operations.py
Normal file
@@ -0,0 +1,324 @@
|
||||
"""
|
||||
DSS MCP Operations Module
|
||||
|
||||
Handles long-running operations with status tracking, result storage, and cancellation support.
|
||||
Operations are queued and executed asynchronously with persistent state.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import uuid
|
||||
from typing import Optional, Dict, Any, Callable
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
|
||||
from .config import mcp_config
|
||||
from storage.database import get_connection # Use absolute import (tools/ is in sys.path)
|
||||
|
||||
|
||||
class OperationStatus(Enum):
|
||||
"""Operation execution status"""
|
||||
PENDING = "pending"
|
||||
RUNNING = "running"
|
||||
COMPLETED = "completed"
|
||||
FAILED = "failed"
|
||||
CANCELLED = "cancelled"
|
||||
|
||||
|
||||
class Operation:
|
||||
"""Represents a single operation"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
operation_type: str,
|
||||
args: Dict[str, Any],
|
||||
user_id: Optional[str] = None
|
||||
):
|
||||
self.id = str(uuid.uuid4())
|
||||
self.operation_type = operation_type
|
||||
self.args = args
|
||||
self.user_id = user_id
|
||||
self.status = OperationStatus.PENDING
|
||||
self.result = None
|
||||
self.error = None
|
||||
self.progress = 0
|
||||
self.created_at = datetime.utcnow()
|
||||
self.started_at = None
|
||||
self.completed_at = None
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert to dictionary for storage"""
|
||||
return {
|
||||
"id": self.id,
|
||||
"operation_type": self.operation_type,
|
||||
"args": json.dumps(self.args),
|
||||
"user_id": self.user_id,
|
||||
"status": self.status.value,
|
||||
"result": json.dumps(self.result) if self.result else None,
|
||||
"error": self.error,
|
||||
"progress": self.progress,
|
||||
"created_at": self.created_at.isoformat(),
|
||||
"started_at": self.started_at.isoformat() if self.started_at else None,
|
||||
"completed_at": self.completed_at.isoformat() if self.completed_at else None
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Dict[str, Any]) -> "Operation":
|
||||
"""Reconstruct from dictionary"""
|
||||
op = cls(
|
||||
operation_type=data["operation_type"],
|
||||
args=json.loads(data["args"]),
|
||||
user_id=data.get("user_id")
|
||||
)
|
||||
op.id = data["id"]
|
||||
op.status = OperationStatus(data["status"])
|
||||
op.result = json.loads(data["result"]) if data.get("result") else None
|
||||
op.error = data.get("error")
|
||||
op.progress = data.get("progress", 0)
|
||||
op.created_at = datetime.fromisoformat(data["created_at"])
|
||||
if data.get("started_at"):
|
||||
op.started_at = datetime.fromisoformat(data["started_at"])
|
||||
if data.get("completed_at"):
|
||||
op.completed_at = datetime.fromisoformat(data["completed_at"])
|
||||
return op
|
||||
|
||||
|
||||
class OperationQueue:
|
||||
"""
|
||||
Manages async operations with status tracking.
|
||||
|
||||
Operations are stored in database for persistence and recovery.
|
||||
Multiple workers can process operations in parallel while respecting
|
||||
per-resource locks to prevent concurrent modifications.
|
||||
"""
|
||||
|
||||
# In-memory queue for active operations
|
||||
_active_operations: Dict[str, Operation] = {}
|
||||
_queue: asyncio.Queue = None
|
||||
_workers: list = []
|
||||
|
||||
@classmethod
|
||||
async def initialize(cls, num_workers: int = 4):
|
||||
"""Initialize operation queue with worker pool"""
|
||||
cls._queue = asyncio.Queue()
|
||||
cls._workers = []
|
||||
|
||||
for i in range(num_workers):
|
||||
worker = asyncio.create_task(cls._worker(i))
|
||||
cls._workers.append(worker)
|
||||
|
||||
@classmethod
|
||||
async def enqueue(
|
||||
cls,
|
||||
operation_type: str,
|
||||
args: Dict[str, Any],
|
||||
user_id: Optional[str] = None
|
||||
) -> str:
|
||||
"""
|
||||
Enqueue a new operation.
|
||||
|
||||
Args:
|
||||
operation_type: Type of operation (e.g., 'sync_tokens')
|
||||
args: Operation arguments
|
||||
user_id: Optional user ID for tracking
|
||||
|
||||
Returns:
|
||||
Operation ID for status checking
|
||||
"""
|
||||
operation = Operation(operation_type, args, user_id)
|
||||
|
||||
# Save to database
|
||||
cls._save_operation(operation)
|
||||
|
||||
# Add to in-memory tracking
|
||||
cls._active_operations[operation.id] = operation
|
||||
|
||||
# Queue for processing
|
||||
await cls._queue.put(operation)
|
||||
|
||||
return operation.id
|
||||
|
||||
@classmethod
|
||||
def get_status(cls, operation_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get operation status and result"""
|
||||
# Check in-memory first
|
||||
if operation_id in cls._active_operations:
|
||||
op = cls._active_operations[operation_id]
|
||||
return {
|
||||
"id": op.id,
|
||||
"status": op.status.value,
|
||||
"progress": op.progress,
|
||||
"result": op.result,
|
||||
"error": op.error
|
||||
}
|
||||
|
||||
# Check database for completed operations
|
||||
with get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT * FROM operations WHERE id = ?", (operation_id,))
|
||||
row = cursor.fetchone()
|
||||
|
||||
if not row:
|
||||
return None
|
||||
|
||||
op = Operation.from_dict(dict(row))
|
||||
return {
|
||||
"id": op.id,
|
||||
"status": op.status.value,
|
||||
"progress": op.progress,
|
||||
"result": op.result,
|
||||
"error": op.error
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get_result(cls, operation_id: str) -> Optional[Any]:
|
||||
"""Get operation result (blocks if still running)"""
|
||||
status = cls.get_status(operation_id)
|
||||
if not status:
|
||||
raise ValueError(f"Operation not found: {operation_id}")
|
||||
|
||||
if status["status"] == OperationStatus.COMPLETED.value:
|
||||
return status["result"]
|
||||
elif status["status"] == OperationStatus.FAILED.value:
|
||||
raise RuntimeError(f"Operation failed: {status['error']}")
|
||||
else:
|
||||
raise RuntimeError(
|
||||
f"Operation still {status['status']}: {operation_id}"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def cancel(cls, operation_id: str) -> bool:
|
||||
"""Cancel a pending operation"""
|
||||
if operation_id not in cls._active_operations:
|
||||
return False
|
||||
|
||||
op = cls._active_operations[operation_id]
|
||||
|
||||
if op.status == OperationStatus.PENDING:
|
||||
op.status = OperationStatus.CANCELLED
|
||||
op.completed_at = datetime.utcnow()
|
||||
cls._save_operation(op)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def list_operations(
|
||||
cls,
|
||||
operation_type: Optional[str] = None,
|
||||
status: Optional[str] = None,
|
||||
user_id: Optional[str] = None,
|
||||
limit: int = 100
|
||||
) -> list:
|
||||
"""List operations with optional filtering"""
|
||||
with get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
|
||||
query = "SELECT * FROM operations WHERE 1=1"
|
||||
params = []
|
||||
|
||||
if operation_type:
|
||||
query += " AND operation_type = ?"
|
||||
params.append(operation_type)
|
||||
|
||||
if status:
|
||||
query += " AND status = ?"
|
||||
params.append(status)
|
||||
|
||||
if user_id:
|
||||
query += " AND user_id = ?"
|
||||
params.append(user_id)
|
||||
|
||||
query += " ORDER BY created_at DESC LIMIT ?"
|
||||
params.append(limit)
|
||||
|
||||
cursor.execute(query, params)
|
||||
return [Operation.from_dict(dict(row)).to_dict() for row in cursor.fetchall()]
|
||||
|
||||
# Private helper methods
|
||||
|
||||
@classmethod
|
||||
def _save_operation(cls, operation: Operation):
|
||||
"""Save operation to database"""
|
||||
data = operation.to_dict()
|
||||
|
||||
with get_connection() as conn:
|
||||
conn.execute("""
|
||||
INSERT OR REPLACE INTO operations (
|
||||
id, operation_type, args, user_id, status, result,
|
||||
error, progress, created_at, started_at, completed_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""", tuple(data.values()))
|
||||
|
||||
@classmethod
|
||||
async def _worker(cls, worker_id: int):
|
||||
"""Worker coroutine that processes operations from queue"""
|
||||
while True:
|
||||
try:
|
||||
operation = await cls._queue.get()
|
||||
|
||||
# Mark as running
|
||||
operation.status = OperationStatus.RUNNING
|
||||
operation.started_at = datetime.utcnow()
|
||||
cls._save_operation(operation)
|
||||
|
||||
# Execute operation (placeholder - would call actual handlers)
|
||||
try:
|
||||
# TODO: Implement actual operation execution
|
||||
# based on operation_type
|
||||
|
||||
operation.result = {
|
||||
"message": f"Operation {operation.operation_type} completed"
|
||||
}
|
||||
operation.status = OperationStatus.COMPLETED
|
||||
operation.progress = 100
|
||||
|
||||
except Exception as e:
|
||||
operation.error = str(e)
|
||||
operation.status = OperationStatus.FAILED
|
||||
|
||||
# Mark as completed
|
||||
operation.completed_at = datetime.utcnow()
|
||||
cls._save_operation(operation)
|
||||
|
||||
cls._queue.task_done()
|
||||
|
||||
except asyncio.CancelledError:
|
||||
break
|
||||
except Exception as e:
|
||||
# Log error and continue
|
||||
print(f"Worker {worker_id} error: {str(e)}")
|
||||
await asyncio.sleep(1)
|
||||
|
||||
@classmethod
|
||||
def ensure_operations_table(cls):
|
||||
"""Ensure operations table exists"""
|
||||
with get_connection() as conn:
|
||||
conn.execute("""
|
||||
CREATE TABLE IF NOT EXISTS operations (
|
||||
id TEXT PRIMARY KEY,
|
||||
operation_type TEXT NOT NULL,
|
||||
args TEXT NOT NULL,
|
||||
user_id TEXT,
|
||||
status TEXT DEFAULT 'pending',
|
||||
result TEXT,
|
||||
error TEXT,
|
||||
progress INTEGER DEFAULT 0,
|
||||
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
||||
started_at TEXT,
|
||||
completed_at TEXT
|
||||
)
|
||||
""")
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_operations_type ON operations(operation_type)"
|
||||
)
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_operations_status ON operations(status)"
|
||||
)
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_operations_user ON operations(user_id)"
|
||||
)
|
||||
|
||||
|
||||
# Initialize table on import
|
||||
OperationQueue.ensure_operations_table()
|
||||
Reference in New Issue
Block a user