- Remove database.py (SQLite) from tools/storage/ and dss-mvp1/ - Add json_store.py with full JSON-based storage layer - Update 16 files to use new json_store imports - Storage now mirrors DSS canonical structure: .dss/data/ ├── _system/ (config, cache, activity) ├── projects/ (per-project: tokens, components, styles) └── teams/ (team definitions) - Remove Docker files (not needed) - Update DSS_CORE.json to v1.1.0 Philosophy: "Eat our own food" - storage structure matches DSS design 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
325 lines
10 KiB
Python
325 lines
10 KiB
Python
"""
|
|
DSS MCP Operations Module
|
|
|
|
Handles long-running operations with status tracking, result storage, and cancellation support.
|
|
Operations are queued and executed asynchronously with persistent state.
|
|
"""
|
|
|
|
import asyncio
|
|
import json
|
|
import uuid
|
|
from typing import Optional, Dict, Any, Callable
|
|
from datetime import datetime
|
|
from enum import Enum
|
|
|
|
from .config import mcp_config
|
|
from storage.json_store import ActivityLog, read_json, write_json, DATA_DIR # JSON storage
|
|
|
|
|
|
class OperationStatus(Enum):
|
|
"""Operation execution status"""
|
|
PENDING = "pending"
|
|
RUNNING = "running"
|
|
COMPLETED = "completed"
|
|
FAILED = "failed"
|
|
CANCELLED = "cancelled"
|
|
|
|
|
|
class Operation:
|
|
"""Represents a single operation"""
|
|
|
|
def __init__(
|
|
self,
|
|
operation_type: str,
|
|
args: Dict[str, Any],
|
|
user_id: Optional[str] = None
|
|
):
|
|
self.id = str(uuid.uuid4())
|
|
self.operation_type = operation_type
|
|
self.args = args
|
|
self.user_id = user_id
|
|
self.status = OperationStatus.PENDING
|
|
self.result = None
|
|
self.error = None
|
|
self.progress = 0
|
|
self.created_at = datetime.utcnow()
|
|
self.started_at = None
|
|
self.completed_at = None
|
|
|
|
def to_dict(self) -> Dict[str, Any]:
|
|
"""Convert to dictionary for storage"""
|
|
return {
|
|
"id": self.id,
|
|
"operation_type": self.operation_type,
|
|
"args": json.dumps(self.args),
|
|
"user_id": self.user_id,
|
|
"status": self.status.value,
|
|
"result": json.dumps(self.result) if self.result else None,
|
|
"error": self.error,
|
|
"progress": self.progress,
|
|
"created_at": self.created_at.isoformat(),
|
|
"started_at": self.started_at.isoformat() if self.started_at else None,
|
|
"completed_at": self.completed_at.isoformat() if self.completed_at else None
|
|
}
|
|
|
|
@classmethod
|
|
def from_dict(cls, data: Dict[str, Any]) -> "Operation":
|
|
"""Reconstruct from dictionary"""
|
|
op = cls(
|
|
operation_type=data["operation_type"],
|
|
args=json.loads(data["args"]),
|
|
user_id=data.get("user_id")
|
|
)
|
|
op.id = data["id"]
|
|
op.status = OperationStatus(data["status"])
|
|
op.result = json.loads(data["result"]) if data.get("result") else None
|
|
op.error = data.get("error")
|
|
op.progress = data.get("progress", 0)
|
|
op.created_at = datetime.fromisoformat(data["created_at"])
|
|
if data.get("started_at"):
|
|
op.started_at = datetime.fromisoformat(data["started_at"])
|
|
if data.get("completed_at"):
|
|
op.completed_at = datetime.fromisoformat(data["completed_at"])
|
|
return op
|
|
|
|
|
|
class OperationQueue:
|
|
"""
|
|
Manages async operations with status tracking.
|
|
|
|
Operations are stored in database for persistence and recovery.
|
|
Multiple workers can process operations in parallel while respecting
|
|
per-resource locks to prevent concurrent modifications.
|
|
"""
|
|
|
|
# In-memory queue for active operations
|
|
_active_operations: Dict[str, Operation] = {}
|
|
_queue: asyncio.Queue = None
|
|
_workers: list = []
|
|
|
|
@classmethod
|
|
async def initialize(cls, num_workers: int = 4):
|
|
"""Initialize operation queue with worker pool"""
|
|
cls._queue = asyncio.Queue()
|
|
cls._workers = []
|
|
|
|
for i in range(num_workers):
|
|
worker = asyncio.create_task(cls._worker(i))
|
|
cls._workers.append(worker)
|
|
|
|
@classmethod
|
|
async def enqueue(
|
|
cls,
|
|
operation_type: str,
|
|
args: Dict[str, Any],
|
|
user_id: Optional[str] = None
|
|
) -> str:
|
|
"""
|
|
Enqueue a new operation.
|
|
|
|
Args:
|
|
operation_type: Type of operation (e.g., 'sync_tokens')
|
|
args: Operation arguments
|
|
user_id: Optional user ID for tracking
|
|
|
|
Returns:
|
|
Operation ID for status checking
|
|
"""
|
|
operation = Operation(operation_type, args, user_id)
|
|
|
|
# Save to database
|
|
cls._save_operation(operation)
|
|
|
|
# Add to in-memory tracking
|
|
cls._active_operations[operation.id] = operation
|
|
|
|
# Queue for processing
|
|
await cls._queue.put(operation)
|
|
|
|
return operation.id
|
|
|
|
@classmethod
|
|
def get_status(cls, operation_id: str) -> Optional[Dict[str, Any]]:
|
|
"""Get operation status and result"""
|
|
# Check in-memory first
|
|
if operation_id in cls._active_operations:
|
|
op = cls._active_operations[operation_id]
|
|
return {
|
|
"id": op.id,
|
|
"status": op.status.value,
|
|
"progress": op.progress,
|
|
"result": op.result,
|
|
"error": op.error
|
|
}
|
|
|
|
# Check database for completed operations
|
|
with get_connection() as conn:
|
|
cursor = conn.cursor()
|
|
cursor.execute("SELECT * FROM operations WHERE id = ?", (operation_id,))
|
|
row = cursor.fetchone()
|
|
|
|
if not row:
|
|
return None
|
|
|
|
op = Operation.from_dict(dict(row))
|
|
return {
|
|
"id": op.id,
|
|
"status": op.status.value,
|
|
"progress": op.progress,
|
|
"result": op.result,
|
|
"error": op.error
|
|
}
|
|
|
|
@classmethod
|
|
def get_result(cls, operation_id: str) -> Optional[Any]:
|
|
"""Get operation result (blocks if still running)"""
|
|
status = cls.get_status(operation_id)
|
|
if not status:
|
|
raise ValueError(f"Operation not found: {operation_id}")
|
|
|
|
if status["status"] == OperationStatus.COMPLETED.value:
|
|
return status["result"]
|
|
elif status["status"] == OperationStatus.FAILED.value:
|
|
raise RuntimeError(f"Operation failed: {status['error']}")
|
|
else:
|
|
raise RuntimeError(
|
|
f"Operation still {status['status']}: {operation_id}"
|
|
)
|
|
|
|
@classmethod
|
|
def cancel(cls, operation_id: str) -> bool:
|
|
"""Cancel a pending operation"""
|
|
if operation_id not in cls._active_operations:
|
|
return False
|
|
|
|
op = cls._active_operations[operation_id]
|
|
|
|
if op.status == OperationStatus.PENDING:
|
|
op.status = OperationStatus.CANCELLED
|
|
op.completed_at = datetime.utcnow()
|
|
cls._save_operation(op)
|
|
return True
|
|
|
|
return False
|
|
|
|
@classmethod
|
|
def list_operations(
|
|
cls,
|
|
operation_type: Optional[str] = None,
|
|
status: Optional[str] = None,
|
|
user_id: Optional[str] = None,
|
|
limit: int = 100
|
|
) -> list:
|
|
"""List operations with optional filtering"""
|
|
with get_connection() as conn:
|
|
cursor = conn.cursor()
|
|
|
|
query = "SELECT * FROM operations WHERE 1=1"
|
|
params = []
|
|
|
|
if operation_type:
|
|
query += " AND operation_type = ?"
|
|
params.append(operation_type)
|
|
|
|
if status:
|
|
query += " AND status = ?"
|
|
params.append(status)
|
|
|
|
if user_id:
|
|
query += " AND user_id = ?"
|
|
params.append(user_id)
|
|
|
|
query += " ORDER BY created_at DESC LIMIT ?"
|
|
params.append(limit)
|
|
|
|
cursor.execute(query, params)
|
|
return [Operation.from_dict(dict(row)).to_dict() for row in cursor.fetchall()]
|
|
|
|
# Private helper methods
|
|
|
|
@classmethod
|
|
def _save_operation(cls, operation: Operation):
|
|
"""Save operation to database"""
|
|
data = operation.to_dict()
|
|
|
|
with get_connection() as conn:
|
|
conn.execute("""
|
|
INSERT OR REPLACE INTO operations (
|
|
id, operation_type, args, user_id, status, result,
|
|
error, progress, created_at, started_at, completed_at
|
|
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
""", tuple(data.values()))
|
|
|
|
@classmethod
|
|
async def _worker(cls, worker_id: int):
|
|
"""Worker coroutine that processes operations from queue"""
|
|
while True:
|
|
try:
|
|
operation = await cls._queue.get()
|
|
|
|
# Mark as running
|
|
operation.status = OperationStatus.RUNNING
|
|
operation.started_at = datetime.utcnow()
|
|
cls._save_operation(operation)
|
|
|
|
# Execute operation (placeholder - would call actual handlers)
|
|
try:
|
|
# TODO: Implement actual operation execution
|
|
# based on operation_type
|
|
|
|
operation.result = {
|
|
"message": f"Operation {operation.operation_type} completed"
|
|
}
|
|
operation.status = OperationStatus.COMPLETED
|
|
operation.progress = 100
|
|
|
|
except Exception as e:
|
|
operation.error = str(e)
|
|
operation.status = OperationStatus.FAILED
|
|
|
|
# Mark as completed
|
|
operation.completed_at = datetime.utcnow()
|
|
cls._save_operation(operation)
|
|
|
|
cls._queue.task_done()
|
|
|
|
except asyncio.CancelledError:
|
|
break
|
|
except Exception as e:
|
|
# Log error and continue
|
|
print(f"Worker {worker_id} error: {str(e)}")
|
|
await asyncio.sleep(1)
|
|
|
|
@classmethod
|
|
def ensure_operations_table(cls):
|
|
"""Ensure operations table exists"""
|
|
with get_connection() as conn:
|
|
conn.execute("""
|
|
CREATE TABLE IF NOT EXISTS operations (
|
|
id TEXT PRIMARY KEY,
|
|
operation_type TEXT NOT NULL,
|
|
args TEXT NOT NULL,
|
|
user_id TEXT,
|
|
status TEXT DEFAULT 'pending',
|
|
result TEXT,
|
|
error TEXT,
|
|
progress INTEGER DEFAULT 0,
|
|
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
started_at TEXT,
|
|
completed_at TEXT
|
|
)
|
|
""")
|
|
conn.execute(
|
|
"CREATE INDEX IF NOT EXISTS idx_operations_type ON operations(operation_type)"
|
|
)
|
|
conn.execute(
|
|
"CREATE INDEX IF NOT EXISTS idx_operations_status ON operations(status)"
|
|
)
|
|
conn.execute(
|
|
"CREATE INDEX IF NOT EXISTS idx_operations_user ON operations(user_id)"
|
|
)
|
|
|
|
|
|
# Initialize table on import
|
|
OperationQueue.ensure_operations_table()
|