Files
dss/dss-claude-plugin/core/compiler.py
2025-12-11 07:13:06 -03:00

194 lines
7.3 KiB
Python

"""
DSS Context Compiler.
Resolves project context via 3-layer cascade: Base -> Skin -> Project
Includes Safe Boot Protocol and Debug Provenance.
"""
import copy
import json
import logging
from datetime import datetime, timezone
from pathlib import Path
from typing import Any, Dict, List
# Setup logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger("DSSCompiler")
# --- SAFE BOOT PROTOCOL ---
# Hardcoded emergency skin in case file system or JSON parsing fails catastrophicly
EMERGENCY_SKIN = {
"meta": {"id": "emergency", "version": "1.0.0"},
"tokens": {
"colors": {"primary": "#FF0000", "background": "#FFFFFF", "text": "#000000"},
"spacing": {"base": "4px"},
},
"status": "emergency_mode",
}
class ContextCompiler:
def __init__(self, skins_dir: str = "./skins"):
self.skins_dir = Path(skins_dir)
self.cache: Dict[str, Any] = {}
self._manifest_mtimes: Dict[str, float] = {} # Track file modification times
def compile(
self, manifest_path: str, debug: bool = False, force_refresh: bool = False
) -> Dict[str, Any]:
"""
Main entry point.
Compiles context by merging:
1. Base Skin (Implicit or Explicit)
2. Extended Skin (defined in manifest)
3. Project Overrides (defined in manifest)
Args:
manifest_path: Path to ds.config.json
debug: Enable provenance tracking
force_refresh: Bypass cache and recompile (for long-running servers)
"""
try:
# Check cache with mtime validation (unless force_refresh or debug mode)
# Note: Debug mode bypasses cache because provenance must be recalculated
cache_key = f"{manifest_path}:debug={debug}"
if not force_refresh and not debug and cache_key in self.cache:
# Verify manifest hasn't changed
manifest_file = Path(manifest_path)
if manifest_file.exists():
current_mtime = manifest_file.stat().st_mtime
cached_mtime = self._manifest_mtimes.get(cache_key, 0)
if current_mtime == cached_mtime:
logger.debug(f"Cache hit for {manifest_path}")
return self.cache[cache_key]
else:
logger.info(f"Manifest modified, invalidating cache: {manifest_path}")
# 1. Load Project Manifest
manifest = self._load_json(manifest_path)
# 2. Resolve Skin
skin_id = manifest.get("extends", {}).get("skin", "classic")
skin = self._load_skin(skin_id)
# 3. Resolve Base (Single Inheritance Enforced)
# If the skin extends another, we merge that first.
# Simplified for Phase 1: We assume all skins extend 'base' implicitly unless specified
base_skin = self._load_skin("base")
# 4. Cascade Merge: Base -> Skin -> Project
# Merge Base + Skin
context = self._deep_merge(base_skin, skin, path="base->skin", debug=debug)
# Merge Result + Project Overrides
# Need to wrap project overrides in same structure as skins
project_overrides_wrapped = {"tokens": manifest.get("overrides", {}).get("tokens", {})}
final_context = self._deep_merge(
context, project_overrides_wrapped, path="skin->project", debug=debug
)
# Inject Metadata
final_context["_meta"] = {
"project_id": manifest["project"]["id"],
"compiled_at": datetime.now(timezone.utc).isoformat(),
"debug_enabled": debug,
"compiler_config": manifest.get("compiler", {}),
}
if debug:
final_context["_provenance"] = self.provenance_log
# Cache result with mtime tracking (only cache non-debug mode results)
if not debug:
manifest_file = Path(manifest_path)
if manifest_file.exists():
cache_key = f"{manifest_path}:debug={debug}"
self.cache[cache_key] = final_context
self._manifest_mtimes[cache_key] = manifest_file.stat().st_mtime
logger.debug(f"Cached compilation result for {manifest_path}")
return final_context
except Exception as e:
logger.error(f"Compiler specific error: {e}")
logger.warning("Initiating SAFE BOOT PROTOCOL")
return self._enter_safe_mode(e)
def _load_skin(self, skin_id: str) -> Dict[str, Any]:
"""Loads a skin by ID from the skins directory."""
# Simple caching strategy
if skin_id in self.cache:
return self.cache[skin_id]
# Security: Prevent path traversal attacks
path = (self.skins_dir / f"{skin_id}.json").resolve()
if not str(path).startswith(str(self.skins_dir.resolve())):
raise ValueError(f"Invalid skin ID (path traversal detected): {skin_id}")
if not path.exists():
logger.warning(f"Skin {skin_id} not found, falling back to base.")
if skin_id == "base":
# Return emergency tokens if base is missing
return EMERGENCY_SKIN
return self._load_skin("base")
data = self._load_json(str(path))
self.cache[skin_id] = data
return data
def _load_json(self, path: str) -> Dict[str, Any]:
with open(path, "r") as f:
return json.load(f)
def _deep_merge(
self,
base: Dict,
override: Dict,
path: str = "",
debug: bool = False,
provenance: List[Dict] = None,
) -> Dict:
"""
Deep merge dictionaries.
Replaces arrays.
Populates provenance list if debug is True (thread-safe).
"""
# Thread-safe: use method parameter instead of instance variable
if provenance is None and debug:
provenance = []
# Store reference on first call for later retrieval
if not hasattr(self, "provenance_log"):
self.provenance_log = provenance
result = copy.deepcopy(base)
for key, value in override.items():
if isinstance(value, dict) and key in result and isinstance(result[key], dict):
# Recursive merge - pass provenance down
result[key] = self._deep_merge(
result[key], value, path=f"{path}.{key}", debug=debug, provenance=provenance
)
else:
# Direct replacement (Primitive or Array)
if debug and provenance is not None:
provenance.append(
{
"key": key,
"action": "override",
"layer": path,
"value_type": type(value).__name__,
}
)
result[key] = copy.deepcopy(value)
return result
def _enter_safe_mode(self, error: Exception) -> Dict[str, Any]:
"""Returns the hardcoded emergency skin with error details."""
safe_context = copy.deepcopy(EMERGENCY_SKIN)
safe_context["_error"] = str(error)
return safe_context