Initial commit: Clean DSS implementation
Migrated from design-system-swarm with fresh git history.
Old project history preserved in /home/overbits/apps/design-system-swarm
Core components:
- MCP Server (Python FastAPI with mcp 1.23.1)
- Claude Plugin (agents, commands, skills, strategies, hooks, core)
- DSS Backend (dss-mvp1 - token translation, Figma sync)
- Admin UI (Node.js/React)
- Server (Node.js/Express)
- Storybook integration (dss-mvp1/.storybook)
Self-contained configuration:
- All paths relative or use DSS_BASE_PATH=/home/overbits/dss
- PYTHONPATH configured for dss-mvp1 and dss-claude-plugin
- .env file with all configuration
- Claude plugin uses ${CLAUDE_PLUGIN_ROOT} for portability
Migration completed: $(date)
🤖 Clean migration with full functionality preserved
This commit is contained in:
179
dss-claude-plugin/core/compiler.py
Normal file
179
dss-claude-plugin/core/compiler.py
Normal file
@@ -0,0 +1,179 @@
|
||||
"""
|
||||
DSS Context Compiler
|
||||
Resolves project context via 3-layer cascade: Base -> Skin -> Project
|
||||
Includes Safe Boot Protocol and Debug Provenance.
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import copy
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
from typing import Dict, Any, Optional, List, Union
|
||||
from pathlib import Path
|
||||
|
||||
# Setup logging
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger("DSSCompiler")
|
||||
|
||||
# --- SAFE BOOT PROTOCOL ---
|
||||
# Hardcoded emergency skin in case file system or JSON parsing fails catastrophicly
|
||||
EMERGENCY_SKIN = {
|
||||
"meta": {"id": "emergency", "version": "1.0.0"},
|
||||
"tokens": {
|
||||
"colors": {
|
||||
"primary": "#FF0000",
|
||||
"background": "#FFFFFF",
|
||||
"text": "#000000"
|
||||
},
|
||||
"spacing": {"base": "4px"}
|
||||
},
|
||||
"status": "emergency_mode"
|
||||
}
|
||||
|
||||
class ContextCompiler:
|
||||
def __init__(self, skins_dir: str = "./skins"):
|
||||
self.skins_dir = Path(skins_dir)
|
||||
self.cache: Dict[str, Any] = {}
|
||||
self._manifest_mtimes: Dict[str, float] = {} # Track file modification times
|
||||
|
||||
def compile(self, manifest_path: str, debug: bool = False, force_refresh: bool = False) -> Dict[str, Any]:
|
||||
"""
|
||||
Main entry point. Compiles context by merging:
|
||||
1. Base Skin (Implicit or Explicit)
|
||||
2. Extended Skin (defined in manifest)
|
||||
3. Project Overrides (defined in manifest)
|
||||
|
||||
Args:
|
||||
manifest_path: Path to ds.config.json
|
||||
debug: Enable provenance tracking
|
||||
force_refresh: Bypass cache and recompile (for long-running servers)
|
||||
"""
|
||||
try:
|
||||
# Check cache with mtime validation (unless force_refresh or debug mode)
|
||||
# Note: Debug mode bypasses cache because provenance must be recalculated
|
||||
cache_key = f"{manifest_path}:debug={debug}"
|
||||
if not force_refresh and not debug and cache_key in self.cache:
|
||||
# Verify manifest hasn't changed
|
||||
manifest_file = Path(manifest_path)
|
||||
if manifest_file.exists():
|
||||
current_mtime = manifest_file.stat().st_mtime
|
||||
cached_mtime = self._manifest_mtimes.get(cache_key, 0)
|
||||
if current_mtime == cached_mtime:
|
||||
logger.debug(f"Cache hit for {manifest_path}")
|
||||
return self.cache[cache_key]
|
||||
else:
|
||||
logger.info(f"Manifest modified, invalidating cache: {manifest_path}")
|
||||
|
||||
# 1. Load Project Manifest
|
||||
manifest = self._load_json(manifest_path)
|
||||
|
||||
# 2. Resolve Skin
|
||||
skin_id = manifest.get("extends", {}).get("skin", "classic")
|
||||
skin = self._load_skin(skin_id)
|
||||
|
||||
# 3. Resolve Base (Single Inheritance Enforced)
|
||||
# If the skin extends another, we merge that first.
|
||||
# Simplified for Phase 1: We assume all skins extend 'base' implicitly unless specified
|
||||
base_skin = self._load_skin("base")
|
||||
|
||||
# 4. Cascade Merge: Base -> Skin -> Project
|
||||
# Merge Base + Skin
|
||||
context = self._deep_merge(base_skin, skin, path="base->skin", debug=debug)
|
||||
|
||||
# Merge Result + Project Overrides
|
||||
# Need to wrap project overrides in same structure as skins
|
||||
project_overrides_wrapped = {
|
||||
"tokens": manifest.get("overrides", {}).get("tokens", {})
|
||||
}
|
||||
final_context = self._deep_merge(context, project_overrides_wrapped, path="skin->project", debug=debug)
|
||||
|
||||
# Inject Metadata
|
||||
final_context["_meta"] = {
|
||||
"project_id": manifest["project"]["id"],
|
||||
"compiled_at": datetime.now(timezone.utc).isoformat(),
|
||||
"debug_enabled": debug,
|
||||
"compiler_config": manifest.get("compiler", {})
|
||||
}
|
||||
|
||||
if debug:
|
||||
final_context["_provenance"] = self.provenance_log
|
||||
|
||||
# Cache result with mtime tracking (only cache non-debug mode results)
|
||||
if not debug:
|
||||
manifest_file = Path(manifest_path)
|
||||
if manifest_file.exists():
|
||||
cache_key = f"{manifest_path}:debug={debug}"
|
||||
self.cache[cache_key] = final_context
|
||||
self._manifest_mtimes[cache_key] = manifest_file.stat().st_mtime
|
||||
logger.debug(f"Cached compilation result for {manifest_path}")
|
||||
|
||||
return final_context
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Compiler specific error: {e}")
|
||||
logger.warning("Initiating SAFE BOOT PROTOCOL")
|
||||
return self._enter_safe_mode(e)
|
||||
|
||||
def _load_skin(self, skin_id: str) -> Dict[str, Any]:
|
||||
"""Loads a skin by ID from the skins directory."""
|
||||
# Simple caching strategy
|
||||
if skin_id in self.cache:
|
||||
return self.cache[skin_id]
|
||||
|
||||
# Security: Prevent path traversal attacks
|
||||
path = (self.skins_dir / f"{skin_id}.json").resolve()
|
||||
if not str(path).startswith(str(self.skins_dir.resolve())):
|
||||
raise ValueError(f"Invalid skin ID (path traversal detected): {skin_id}")
|
||||
|
||||
if not path.exists():
|
||||
logger.warning(f"Skin {skin_id} not found, falling back to base.")
|
||||
if skin_id == "base":
|
||||
# Return emergency tokens if base is missing
|
||||
return EMERGENCY_SKIN
|
||||
return self._load_skin("base")
|
||||
|
||||
data = self._load_json(str(path))
|
||||
self.cache[skin_id] = data
|
||||
return data
|
||||
|
||||
def _load_json(self, path: str) -> Dict[str, Any]:
|
||||
with open(path, 'r') as f:
|
||||
return json.load(f)
|
||||
|
||||
def _deep_merge(self, base: Dict, override: Dict, path: str = "", debug: bool = False, provenance: List[Dict] = None) -> Dict:
|
||||
"""
|
||||
Deep merge dictionaries. Replaces arrays.
|
||||
Populates provenance list if debug is True (thread-safe).
|
||||
"""
|
||||
# Thread-safe: use method parameter instead of instance variable
|
||||
if provenance is None and debug:
|
||||
provenance = []
|
||||
# Store reference on first call for later retrieval
|
||||
if not hasattr(self, 'provenance_log'):
|
||||
self.provenance_log = provenance
|
||||
|
||||
result = copy.deepcopy(base)
|
||||
|
||||
for key, value in override.items():
|
||||
if isinstance(value, dict) and key in result and isinstance(result[key], dict):
|
||||
# Recursive merge - pass provenance down
|
||||
result[key] = self._deep_merge(result[key], value, path=f"{path}.{key}", debug=debug, provenance=provenance)
|
||||
else:
|
||||
# Direct replacement (Primitive or Array)
|
||||
if debug and provenance is not None:
|
||||
provenance.append({
|
||||
"key": key,
|
||||
"action": "override",
|
||||
"layer": path,
|
||||
"value_type": type(value).__name__
|
||||
})
|
||||
result[key] = copy.deepcopy(value)
|
||||
|
||||
return result
|
||||
|
||||
def _enter_safe_mode(self, error: Exception) -> Dict[str, Any]:
|
||||
"""Returns the hardcoded emergency skin with error details."""
|
||||
safe_context = copy.deepcopy(EMERGENCY_SKIN)
|
||||
safe_context["_error"] = str(error)
|
||||
return safe_context
|
||||
Reference in New Issue
Block a user