Initial commit: Clean DSS implementation

Migrated from design-system-swarm with fresh git history.
Old project history preserved in /home/overbits/apps/design-system-swarm

Core components:
- MCP Server (Python FastAPI with mcp 1.23.1)
- Claude Plugin (agents, commands, skills, strategies, hooks, core)
- DSS Backend (dss-mvp1 - token translation, Figma sync)
- Admin UI (Node.js/React)
- Server (Node.js/Express)
- Storybook integration (dss-mvp1/.storybook)

Self-contained configuration:
- All paths relative or use DSS_BASE_PATH=/home/overbits/dss
- PYTHONPATH configured for dss-mvp1 and dss-claude-plugin
- .env file with all configuration
- Claude plugin uses ${CLAUDE_PLUGIN_ROOT} for portability

Migration completed: $(date)
🤖 Clean migration with full functionality preserved
This commit is contained in:
Digital Production Factory
2025-12-09 18:45:48 -03:00
commit 276ed71f31
884 changed files with 373737 additions and 0 deletions

341
tools/dss_mcp/audit.py Normal file
View File

@@ -0,0 +1,341 @@
"""
DSS MCP Audit Module
Tracks all operations for compliance, debugging, and audit trails.
Maintains immutable logs of all state-changing operations with before/after snapshots.
"""
import json
import uuid
from typing import Optional, Dict, Any
from datetime import datetime
from enum import Enum
from storage.database import get_connection # Use absolute import (tools/ is in sys.path)
class AuditEventType(Enum):
"""Types of auditable events"""
TOOL_CALL = "tool_call"
CREDENTIAL_ACCESS = "credential_access"
CREDENTIAL_CREATE = "credential_create"
CREDENTIAL_DELETE = "credential_delete"
PROJECT_CREATE = "project_create"
PROJECT_UPDATE = "project_update"
PROJECT_DELETE = "project_delete"
COMPONENT_SYNC = "component_sync"
TOKEN_SYNC = "token_sync"
STATE_TRANSITION = "state_transition"
ERROR = "error"
SECURITY_EVENT = "security_event"
class AuditLog:
"""
Persistent operation audit trail.
All operations are logged with:
- Full operation details
- User who performed it
- Timestamp
- Before/after state snapshots
- Result status
"""
@staticmethod
def log_operation(
event_type: AuditEventType,
operation_name: str,
operation_id: str,
user_id: Optional[str],
project_id: Optional[str],
args: Dict[str, Any],
result: Optional[Dict[str, Any]] = None,
error: Optional[str] = None,
before_state: Optional[Dict[str, Any]] = None,
after_state: Optional[Dict[str, Any]] = None
) -> str:
"""
Log an operation to the audit trail.
Args:
event_type: Type of event
operation_name: Human-readable operation name
operation_id: Unique operation ID
user_id: User who performed the operation
project_id: Associated project ID
args: Operation arguments (will be scrubbed of sensitive data)
result: Operation result
error: Error message if operation failed
before_state: State before operation
after_state: State after operation
Returns:
Audit log entry ID
"""
audit_id = str(uuid.uuid4())
# Scrub sensitive data from args
scrubbed_args = AuditLog._scrub_sensitive_data(args)
with get_connection() as conn:
conn.execute("""
INSERT INTO audit_log (
id, event_type, operation_name, operation_id, user_id,
project_id, args, result, error, before_state, after_state,
created_at
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""", (
audit_id,
event_type.value,
operation_name,
operation_id,
user_id,
project_id,
json.dumps(scrubbed_args),
json.dumps(result) if result else None,
error,
json.dumps(before_state) if before_state else None,
json.dumps(after_state) if after_state else None,
datetime.utcnow().isoformat()
))
return audit_id
@staticmethod
def get_operation_history(
project_id: Optional[str] = None,
user_id: Optional[str] = None,
operation_name: Optional[str] = None,
limit: int = 100,
offset: int = 0
) -> list:
"""
Get operation history with optional filtering.
Args:
project_id: Filter by project
user_id: Filter by user
operation_name: Filter by operation
limit: Number of records to return
offset: Pagination offset
Returns:
List of audit log entries
"""
with get_connection() as conn:
cursor = conn.cursor()
query = "SELECT * FROM audit_log WHERE 1=1"
params = []
if project_id:
query += " AND project_id = ?"
params.append(project_id)
if user_id:
query += " AND user_id = ?"
params.append(user_id)
if operation_name:
query += " AND operation_name = ?"
params.append(operation_name)
query += " ORDER BY created_at DESC LIMIT ? OFFSET ?"
params.extend([limit, offset])
cursor.execute(query, params)
return [dict(row) for row in cursor.fetchall()]
@staticmethod
def get_audit_trail(
start_date: datetime,
end_date: datetime,
event_type: Optional[str] = None
) -> list:
"""
Get audit trail for a date range.
Useful for compliance reports and security audits.
Args:
start_date: Start of date range
end_date: End of date range
event_type: Optional event type filter
Returns:
List of audit log entries
"""
with get_connection() as conn:
cursor = conn.cursor()
query = """
SELECT * FROM audit_log
WHERE created_at >= ? AND created_at <= ?
"""
params = [start_date.isoformat(), end_date.isoformat()]
if event_type:
query += " AND event_type = ?"
params.append(event_type)
query += " ORDER BY created_at DESC"
cursor.execute(query, params)
return [dict(row) for row in cursor.fetchall()]
@staticmethod
def get_user_activity(
user_id: str,
days: int = 30
) -> Dict[str, Any]:
"""
Get user activity summary for the past N days.
Args:
user_id: User to analyze
days: Number of past days to include
Returns:
Activity summary including operation counts and patterns
"""
from datetime import timedelta
start_date = datetime.utcnow() - timedelta(days=days)
with get_connection() as conn:
cursor = conn.cursor()
# Get total operations
cursor.execute("""
SELECT COUNT(*) FROM audit_log
WHERE user_id = ? AND created_at >= ?
""", (user_id, start_date.isoformat()))
total_ops = cursor.fetchone()[0]
# Get operations by type
cursor.execute("""
SELECT event_type, COUNT(*) as count
FROM audit_log
WHERE user_id = ? AND created_at >= ?
GROUP BY event_type
ORDER BY count DESC
""", (user_id, start_date.isoformat()))
ops_by_type = {row[0]: row[1] for row in cursor.fetchall()}
# Get error count
cursor.execute("""
SELECT COUNT(*) FROM audit_log
WHERE user_id = ? AND created_at >= ? AND error IS NOT NULL
""", (user_id, start_date.isoformat()))
errors = cursor.fetchone()[0]
# Get unique projects
cursor.execute("""
SELECT COUNT(DISTINCT project_id) FROM audit_log
WHERE user_id = ? AND created_at >= ?
""", (user_id, start_date.isoformat()))
projects = cursor.fetchone()[0]
return {
"user_id": user_id,
"days": days,
"total_operations": total_ops,
"operations_by_type": ops_by_type,
"errors": errors,
"projects_touched": projects,
"average_ops_per_day": round(total_ops / days, 2) if days > 0 else 0
}
@staticmethod
def search_audit_log(
search_term: str,
limit: int = 50
) -> list:
"""
Search audit log by operation name or error message.
Args:
search_term: Term to search for
limit: Maximum results
Returns:
List of matching audit entries
"""
with get_connection() as conn:
cursor = conn.cursor()
cursor.execute("""
SELECT * FROM audit_log
WHERE operation_name LIKE ? OR error LIKE ?
ORDER BY created_at DESC
LIMIT ?
""", (f"%{search_term}%", f"%{search_term}%", limit))
return [dict(row) for row in cursor.fetchall()]
@staticmethod
def _scrub_sensitive_data(data: Dict[str, Any]) -> Dict[str, Any]:
"""
Remove sensitive data from arguments for safe logging.
Removes API tokens, passwords, and other secrets.
"""
sensitive_keys = {
'token', 'api_key', 'secret', 'password',
'credential', 'auth', 'figma_token', 'encrypted_data'
}
scrubbed = {}
for key, value in data.items():
if any(sensitive in key.lower() for sensitive in sensitive_keys):
scrubbed[key] = "***REDACTED***"
elif isinstance(value, dict):
scrubbed[key] = AuditLog._scrub_sensitive_data(value)
elif isinstance(value, list):
scrubbed[key] = [
AuditLog._scrub_sensitive_data(item)
if isinstance(item, dict) else item
for item in value
]
else:
scrubbed[key] = value
return scrubbed
@staticmethod
def ensure_audit_log_table():
"""Ensure audit_log table exists"""
with get_connection() as conn:
conn.execute("""
CREATE TABLE IF NOT EXISTS audit_log (
id TEXT PRIMARY KEY,
event_type TEXT NOT NULL,
operation_name TEXT NOT NULL,
operation_id TEXT,
user_id TEXT,
project_id TEXT,
args TEXT,
result TEXT,
error TEXT,
before_state TEXT,
after_state TEXT,
created_at TEXT DEFAULT CURRENT_TIMESTAMP
)
""")
conn.execute(
"CREATE INDEX IF NOT EXISTS idx_audit_user ON audit_log(user_id)"
)
conn.execute(
"CREATE INDEX IF NOT EXISTS idx_audit_project ON audit_log(project_id)"
)
conn.execute(
"CREATE INDEX IF NOT EXISTS idx_audit_type ON audit_log(event_type)"
)
conn.execute(
"CREATE INDEX IF NOT EXISTS idx_audit_date ON audit_log(created_at)"
)
# Initialize table on import
AuditLog.ensure_audit_log_table()