Initial commit: Clean DSS implementation

Migrated from design-system-swarm with fresh git history.
Old project history preserved in /home/overbits/apps/design-system-swarm

Core components:
- MCP Server (Python FastAPI with mcp 1.23.1)
- Claude Plugin (agents, commands, skills, strategies, hooks, core)
- DSS Backend (dss-mvp1 - token translation, Figma sync)
- Admin UI (Node.js/React)
- Server (Node.js/Express)
- Storybook integration (dss-mvp1/.storybook)

Self-contained configuration:
- All paths relative or use DSS_BASE_PATH=/home/overbits/dss
- PYTHONPATH configured for dss-mvp1 and dss-claude-plugin
- .env file with all configuration
- Claude plugin uses ${CLAUDE_PLUGIN_ROOT} for portability

Migration completed: $(date)
🤖 Clean migration with full functionality preserved
This commit is contained in:
Digital Production Factory
2025-12-09 18:45:48 -03:00
commit 276ed71f31
884 changed files with 373737 additions and 0 deletions

View File

@@ -0,0 +1,14 @@
"""
DSS Services - Core business logic for the Design System Swarm
Services:
- SandboxedFS: Secure file system operations within project boundaries
- ProjectManager: Project registry and validation
- ConfigService: Project configuration loading and saving
"""
from .sandboxed_fs import SandboxedFS
from .project_manager import ProjectManager
from .config_service import ConfigService, DSSConfig
__all__ = ['SandboxedFS', 'ProjectManager', 'ConfigService', 'DSSConfig']

View File

@@ -0,0 +1,170 @@
"""
ConfigService - Project Configuration Management
Handles loading, saving, and validating project-specific .dss/config.json files.
Uses Pydantic for schema validation with sensible defaults.
"""
import json
import os
from pathlib import Path
from typing import Optional, List, Dict, Any
from pydantic import BaseModel, Field
import logging
logger = logging.getLogger(__name__)
# === Configuration Schema ===
class FigmaConfig(BaseModel):
"""Figma integration settings."""
file_id: Optional[str] = None
team_id: Optional[str] = None
class TokensConfig(BaseModel):
"""Design token export settings."""
output_path: str = "./tokens"
format: str = "css" # css | scss | json | js
class AIConfig(BaseModel):
"""AI assistant behavior settings."""
allowed_operations: List[str] = Field(default_factory=lambda: ["read", "write"])
context_files: List[str] = Field(default_factory=lambda: ["./README.md"])
max_file_size_kb: int = 500
class DSSConfig(BaseModel):
"""
Complete DSS project configuration schema.
Stored in: [project_root]/.dss/config.json
"""
schema_version: str = "1.0"
figma: FigmaConfig = Field(default_factory=FigmaConfig)
tokens: TokensConfig = Field(default_factory=TokensConfig)
ai: AIConfig = Field(default_factory=AIConfig)
class Config:
# Allow extra fields for forward compatibility
extra = "allow"
# === Config Service ===
class ConfigService:
"""
Service for managing project configuration files.
Loads .dss/config.json from project roots, validates against schema,
and provides defaults when config is missing.
"""
CONFIG_FILENAME = "config.json"
DSS_FOLDER = ".dss"
def __init__(self):
"""Initialize config service."""
logger.info("ConfigService initialized")
def get_config_path(self, project_root: str) -> Path:
"""Get path to config file for a project."""
return Path(project_root) / self.DSS_FOLDER / self.CONFIG_FILENAME
def get_config(self, project_root: str) -> DSSConfig:
"""
Load configuration for a project.
Args:
project_root: Absolute path to project root directory
Returns:
DSSConfig object (defaults if config file missing)
"""
config_path = self.get_config_path(project_root)
if config_path.exists():
try:
with open(config_path) as f:
data = json.load(f)
config = DSSConfig(**data)
logger.debug(f"Loaded config from {config_path}")
return config
except (json.JSONDecodeError, Exception) as e:
logger.warning(f"Failed to parse config at {config_path}: {e}")
# Fall through to return defaults
logger.debug(f"Using default config for {project_root}")
return DSSConfig()
def save_config(self, project_root: str, config: DSSConfig) -> None:
"""
Save configuration for a project.
Args:
project_root: Absolute path to project root directory
config: DSSConfig object to save
"""
config_path = self.get_config_path(project_root)
# Ensure .dss directory exists
config_path.parent.mkdir(parents=True, exist_ok=True)
with open(config_path, 'w') as f:
json.dump(config.dict(), f, indent=2)
logger.info(f"Saved config to {config_path}")
def update_config(self, project_root: str, updates: Dict[str, Any]) -> DSSConfig:
"""
Update specific fields in project config.
Args:
project_root: Absolute path to project root directory
updates: Dictionary of fields to update
Returns:
Updated DSSConfig object
"""
config = self.get_config(project_root)
# Deep merge updates
config_dict = config.dict()
for key, value in updates.items():
if isinstance(value, dict) and isinstance(config_dict.get(key), dict):
config_dict[key].update(value)
else:
config_dict[key] = value
new_config = DSSConfig(**config_dict)
self.save_config(project_root, new_config)
return new_config
def init_config(self, project_root: str) -> DSSConfig:
"""
Initialize config file for a new project.
Creates .dss/ folder and config.json with defaults if not exists.
Args:
project_root: Absolute path to project root directory
Returns:
DSSConfig object (new or existing)
"""
config_path = self.get_config_path(project_root)
if config_path.exists():
logger.debug(f"Config already exists at {config_path}")
return self.get_config(project_root)
config = DSSConfig()
self.save_config(project_root, config)
logger.info(f"Initialized new config at {config_path}")
return config
def config_exists(self, project_root: str) -> bool:
"""Check if config file exists for a project."""
return self.get_config_path(project_root).exists()

View File

@@ -0,0 +1,295 @@
"""
ProjectManager - Project Registry Service
Manages the server-side registry of projects, including:
- Project registration with path validation
- Root path storage and retrieval
- Project lifecycle management
"""
import os
from pathlib import Path
from typing import Optional, List, Dict, Any
import logging
logger = logging.getLogger(__name__)
class ProjectManager:
"""
Manages project registry with root path validation.
Works with the existing Projects database class to add root_path support.
Validates paths exist and are accessible before registration.
"""
def __init__(self, projects_db, config_service=None):
"""
Initialize project manager.
Args:
projects_db: Projects database class (from storage.database)
config_service: Optional ConfigService for config initialization
"""
self.db = projects_db
self.config_service = config_service
logger.info("ProjectManager initialized")
def register_project(
self,
name: str,
root_path: str,
description: str = "",
figma_file_key: str = ""
) -> Dict[str, Any]:
"""
Register a new project with validated root path.
Args:
name: Human-readable project name
root_path: Absolute path to project directory
description: Optional project description
figma_file_key: Optional Figma file key
Returns:
Created project dict
Raises:
ValueError: If path doesn't exist or isn't a directory
PermissionError: If no write access to path
"""
# Resolve and validate path
root_path = os.path.abspath(root_path)
if not os.path.isdir(root_path):
raise ValueError(f"Path does not exist or is not a directory: {root_path}")
if not os.access(root_path, os.W_OK):
raise PermissionError(f"No write access to path: {root_path}")
# Check if path already registered
existing = self.get_by_path(root_path)
if existing:
raise ValueError(f"Path already registered as project: {existing['name']}")
# Generate project ID
import uuid
project_id = str(uuid.uuid4())[:8]
# Create project in database
project = self.db.create(
id=project_id,
name=name,
description=description,
figma_file_key=figma_file_key
)
# Update with root_path (need to add this column)
self._update_root_path(project_id, root_path)
project['root_path'] = root_path
# Initialize .dss folder and config if config_service available
if self.config_service:
try:
self.config_service.init_config(root_path)
logger.info(f"Initialized .dss config for project {name}")
except Exception as e:
logger.warning(f"Failed to init config for {name}: {e}")
logger.info(f"Registered project: {name} at {root_path}")
return project
def get_project(self, project_id: str) -> Optional[Dict[str, Any]]:
"""
Get project by ID with path validation.
Args:
project_id: Project UUID
Returns:
Project dict or None if not found
Raises:
ValueError: If project path no longer exists
"""
project = self.db.get(project_id)
if not project:
return None
root_path = project.get('root_path')
if root_path and not os.path.isdir(root_path):
logger.warning(f"Project path no longer exists: {root_path}")
# Don't raise, just mark it
project['path_valid'] = False
else:
project['path_valid'] = True
return project
def list_projects(self, status: str = None, valid_only: bool = False) -> List[Dict[str, Any]]:
"""
List all projects with optional filtering.
Args:
status: Filter by status (active, archived, etc.)
valid_only: Only return projects with valid paths
Returns:
List of project dicts
"""
projects = self.db.list(status=status)
# Add path validation status
for project in projects:
root_path = project.get('root_path')
project['path_valid'] = bool(root_path and os.path.isdir(root_path))
if valid_only:
projects = [p for p in projects if p.get('path_valid', False)]
return projects
def get_by_path(self, root_path: str) -> Optional[Dict[str, Any]]:
"""
Find project by root path.
Args:
root_path: Absolute path to search for
Returns:
Project dict or None if not found
"""
root_path = os.path.abspath(root_path)
projects = self.list_projects()
for project in projects:
if project.get('root_path') == root_path:
return project
return None
def update_project(
self,
project_id: str,
name: str = None,
description: str = None,
root_path: str = None,
figma_file_key: str = None,
status: str = None
) -> Optional[Dict[str, Any]]:
"""
Update project fields.
Args:
project_id: Project UUID
name: Optional new name
description: Optional new description
root_path: Optional new root path (validated)
figma_file_key: Optional new Figma key
status: Optional new status
Returns:
Updated project dict or None if not found
"""
project = self.db.get(project_id)
if not project:
return None
# Validate new root_path if provided
if root_path:
root_path = os.path.abspath(root_path)
if not os.path.isdir(root_path):
raise ValueError(f"Path does not exist: {root_path}")
if not os.access(root_path, os.W_OK):
raise PermissionError(f"No write access: {root_path}")
self._update_root_path(project_id, root_path)
# Update other fields via existing update method
updates = {}
if name is not None:
updates['name'] = name
if description is not None:
updates['description'] = description
if figma_file_key is not None:
updates['figma_file_key'] = figma_file_key
if status is not None:
updates['status'] = status
if updates:
self.db.update(project_id, **updates)
return self.get_project(project_id)
def delete_project(self, project_id: str, delete_config: bool = False) -> bool:
"""
Delete a project from registry.
Args:
project_id: Project UUID
delete_config: If True, also delete .dss folder
Returns:
True if deleted, False if not found
"""
project = self.db.get(project_id)
if not project:
return False
if delete_config and project.get('root_path'):
import shutil
dss_path = Path(project['root_path']) / '.dss'
if dss_path.exists():
shutil.rmtree(dss_path)
logger.info(f"Deleted .dss folder at {dss_path}")
self.db.delete(project_id)
logger.info(f"Deleted project: {project_id}")
return True
def _update_root_path(self, project_id: str, root_path: str) -> None:
"""
Update root_path in database.
Uses raw SQL since the column may not be in the existing model.
"""
from storage.database import get_connection
with get_connection() as conn:
# Ensure column exists
try:
conn.execute("""
ALTER TABLE projects ADD COLUMN root_path TEXT DEFAULT ''
""")
logger.info("Added root_path column to projects table")
except Exception:
# Column already exists
pass
# Update the value
conn.execute(
"UPDATE projects SET root_path = ? WHERE id = ?",
(root_path, project_id)
)
@staticmethod
def ensure_schema():
"""
Ensure database schema has root_path column.
Call this on startup to migrate existing databases.
"""
from storage.database import get_connection
with get_connection() as conn:
cursor = conn.cursor()
# Check if column exists
cursor.execute("PRAGMA table_info(projects)")
columns = [col[1] for col in cursor.fetchall()]
if 'root_path' not in columns:
cursor.execute("""
ALTER TABLE projects ADD COLUMN root_path TEXT DEFAULT ''
""")
logger.info("Migration: Added root_path column to projects table")
else:
logger.debug("Schema check: root_path column exists")

View File

@@ -0,0 +1,231 @@
"""
SandboxedFS - Secure File System Operations
This service restricts all file operations to within a project's root directory,
preventing path traversal attacks and ensuring AI operations are safely scoped.
Security Features:
- Path resolution with escape detection
- Symlink attack prevention
- Read/write operation logging
"""
import os
from pathlib import Path
from typing import List, Dict, Optional
import logging
logger = logging.getLogger(__name__)
class SandboxedFS:
"""
File system operations restricted to a project root.
All paths are validated to ensure they don't escape the sandbox.
This is critical for AI operations that may receive untrusted input.
"""
def __init__(self, root_path: str):
"""
Initialize sandboxed file system.
Args:
root_path: Absolute path to project root directory
Raises:
ValueError: If root_path doesn't exist or isn't a directory
"""
self.root = Path(root_path).resolve()
if not self.root.is_dir():
raise ValueError(f"Invalid root path: {root_path}")
logger.info(f"SandboxedFS initialized with root: {self.root}")
def _validate_path(self, relative_path: str) -> Path:
"""
Validate and resolve a path within the sandbox.
Args:
relative_path: Path relative to project root
Returns:
Resolved absolute Path within sandbox
Raises:
PermissionError: If path escapes sandbox
"""
# Normalize the path
clean_path = os.path.normpath(relative_path)
# Resolve full path
full_path = (self.root / clean_path).resolve()
# Security check: must be within root
try:
full_path.relative_to(self.root)
except ValueError:
logger.warning(f"Path traversal attempt blocked: {relative_path}")
raise PermissionError(f"Path escapes sandbox: {relative_path}")
return full_path
def read_file(self, relative_path: str, max_size_kb: int = 500) -> str:
"""
Read file content within sandbox.
Args:
relative_path: Path relative to project root
max_size_kb: Maximum file size in KB (default 500KB)
Returns:
File content as string
Raises:
FileNotFoundError: If file doesn't exist
PermissionError: If path escapes sandbox
ValueError: If file exceeds max size
"""
path = self._validate_path(relative_path)
if not path.is_file():
raise FileNotFoundError(f"File not found: {relative_path}")
# Check file size
size_kb = path.stat().st_size / 1024
if size_kb > max_size_kb:
raise ValueError(f"File too large: {size_kb:.1f}KB > {max_size_kb}KB limit")
content = path.read_text(encoding='utf-8')
logger.debug(f"Read file: {relative_path} ({len(content)} chars)")
return content
def write_file(self, relative_path: str, content: str) -> None:
"""
Write file content within sandbox.
Args:
relative_path: Path relative to project root
content: Content to write
Raises:
PermissionError: If path escapes sandbox
"""
path = self._validate_path(relative_path)
# Create parent directories if needed
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(content, encoding='utf-8')
logger.info(f"Wrote file: {relative_path} ({len(content)} chars)")
def delete_file(self, relative_path: str) -> None:
"""
Delete file within sandbox.
Args:
relative_path: Path relative to project root
Raises:
FileNotFoundError: If file doesn't exist
PermissionError: If path escapes sandbox
"""
path = self._validate_path(relative_path)
if not path.is_file():
raise FileNotFoundError(f"File not found: {relative_path}")
path.unlink()
logger.info(f"Deleted file: {relative_path}")
def list_directory(self, relative_path: str = ".") -> List[Dict[str, any]]:
"""
List directory contents within sandbox.
Args:
relative_path: Path relative to project root
Returns:
List of dicts with name, type, and size
Raises:
NotADirectoryError: If path isn't a directory
PermissionError: If path escapes sandbox
"""
path = self._validate_path(relative_path)
if not path.is_dir():
raise NotADirectoryError(f"Not a directory: {relative_path}")
result = []
for item in sorted(path.iterdir()):
entry = {
"name": item.name,
"type": "directory" if item.is_dir() else "file",
}
if item.is_file():
entry["size"] = item.stat().st_size
result.append(entry)
return result
def file_exists(self, relative_path: str) -> bool:
"""
Check if file exists within sandbox.
Args:
relative_path: Path relative to project root
Returns:
True if file exists, False otherwise
"""
try:
path = self._validate_path(relative_path)
return path.exists()
except PermissionError:
return False
def get_file_tree(self, max_depth: int = 3, include_hidden: bool = False) -> Dict:
"""
Get hierarchical file tree for AI context injection.
Args:
max_depth: Maximum directory depth to traverse
include_hidden: Include hidden files (starting with .)
Returns:
Nested dict representing file tree with sizes
"""
def build_tree(path: Path, depth: int) -> Dict:
if depth > max_depth:
return {"...": "truncated"}
result = {}
try:
items = sorted(path.iterdir())
except PermissionError:
return {"error": "permission denied"}
for item in items:
# Skip hidden files unless requested
if not include_hidden and item.name.startswith('.'):
# Always include .dss config folder
if item.name != '.dss':
continue
# Skip common non-essential directories
if item.name in ('node_modules', '__pycache__', '.git', 'dist', 'build'):
result[item.name + "/"] = {"...": "skipped"}
continue
if item.is_dir():
result[item.name + "/"] = build_tree(item, depth + 1)
else:
result[item.name] = item.stat().st_size
return result
return build_tree(self.root, 0)
def get_root_path(self) -> str:
"""Get the absolute root path of this sandbox."""
return str(self.root)