Initial commit: Clean DSS implementation

Migrated from design-system-swarm with fresh git history.
Old project history preserved in /home/overbits/apps/design-system-swarm

Core components:
- MCP Server (Python FastAPI with mcp 1.23.1)
- Claude Plugin (agents, commands, skills, strategies, hooks, core)
- DSS Backend (dss-mvp1 - token translation, Figma sync)
- Admin UI (Node.js/React)
- Server (Node.js/Express)
- Storybook integration (dss-mvp1/.storybook)

Self-contained configuration:
- All paths relative or use DSS_BASE_PATH=/home/overbits/dss
- PYTHONPATH configured for dss-mvp1 and dss-claude-plugin
- .env file with all configuration
- Claude plugin uses ${CLAUDE_PLUGIN_ROOT} for portability

Migration completed: $(date)
🤖 Clean migration with full functionality preserved
This commit is contained in:
Digital Production Factory
2025-12-09 18:45:48 -03:00
commit 276ed71f31
884 changed files with 373737 additions and 0 deletions

View File

@@ -0,0 +1,32 @@
"""
DSS Core Module - Configuration and Context Management
Extended with Context Compiler for design system context resolution.
"""
from .config import DSSConfig, DSSMode
from .context import DSSContext
from .compiler import ContextCompiler, EMERGENCY_SKIN
from .mcp_extensions import (
get_active_context,
resolve_token,
validate_manifest,
list_skins,
get_compiler_status,
with_context,
COMPILER
)
__all__ = [
"DSSConfig",
"DSSMode",
"DSSContext",
"ContextCompiler",
"EMERGENCY_SKIN",
"get_active_context",
"resolve_token",
"validate_manifest",
"list_skins",
"get_compiler_status",
"with_context",
"COMPILER"
]

View File

@@ -0,0 +1,179 @@
"""
DSS Context Compiler
Resolves project context via 3-layer cascade: Base -> Skin -> Project
Includes Safe Boot Protocol and Debug Provenance.
"""
import json
import os
import copy
import logging
from datetime import datetime, timezone
from typing import Dict, Any, Optional, List, Union
from pathlib import Path
# Setup logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger("DSSCompiler")
# --- SAFE BOOT PROTOCOL ---
# Hardcoded emergency skin in case file system or JSON parsing fails catastrophicly
EMERGENCY_SKIN = {
"meta": {"id": "emergency", "version": "1.0.0"},
"tokens": {
"colors": {
"primary": "#FF0000",
"background": "#FFFFFF",
"text": "#000000"
},
"spacing": {"base": "4px"}
},
"status": "emergency_mode"
}
class ContextCompiler:
def __init__(self, skins_dir: str = "./skins"):
self.skins_dir = Path(skins_dir)
self.cache: Dict[str, Any] = {}
self._manifest_mtimes: Dict[str, float] = {} # Track file modification times
def compile(self, manifest_path: str, debug: bool = False, force_refresh: bool = False) -> Dict[str, Any]:
"""
Main entry point. Compiles context by merging:
1. Base Skin (Implicit or Explicit)
2. Extended Skin (defined in manifest)
3. Project Overrides (defined in manifest)
Args:
manifest_path: Path to ds.config.json
debug: Enable provenance tracking
force_refresh: Bypass cache and recompile (for long-running servers)
"""
try:
# Check cache with mtime validation (unless force_refresh or debug mode)
# Note: Debug mode bypasses cache because provenance must be recalculated
cache_key = f"{manifest_path}:debug={debug}"
if not force_refresh and not debug and cache_key in self.cache:
# Verify manifest hasn't changed
manifest_file = Path(manifest_path)
if manifest_file.exists():
current_mtime = manifest_file.stat().st_mtime
cached_mtime = self._manifest_mtimes.get(cache_key, 0)
if current_mtime == cached_mtime:
logger.debug(f"Cache hit for {manifest_path}")
return self.cache[cache_key]
else:
logger.info(f"Manifest modified, invalidating cache: {manifest_path}")
# 1. Load Project Manifest
manifest = self._load_json(manifest_path)
# 2. Resolve Skin
skin_id = manifest.get("extends", {}).get("skin", "classic")
skin = self._load_skin(skin_id)
# 3. Resolve Base (Single Inheritance Enforced)
# If the skin extends another, we merge that first.
# Simplified for Phase 1: We assume all skins extend 'base' implicitly unless specified
base_skin = self._load_skin("base")
# 4. Cascade Merge: Base -> Skin -> Project
# Merge Base + Skin
context = self._deep_merge(base_skin, skin, path="base->skin", debug=debug)
# Merge Result + Project Overrides
# Need to wrap project overrides in same structure as skins
project_overrides_wrapped = {
"tokens": manifest.get("overrides", {}).get("tokens", {})
}
final_context = self._deep_merge(context, project_overrides_wrapped, path="skin->project", debug=debug)
# Inject Metadata
final_context["_meta"] = {
"project_id": manifest["project"]["id"],
"compiled_at": datetime.now(timezone.utc).isoformat(),
"debug_enabled": debug,
"compiler_config": manifest.get("compiler", {})
}
if debug:
final_context["_provenance"] = self.provenance_log
# Cache result with mtime tracking (only cache non-debug mode results)
if not debug:
manifest_file = Path(manifest_path)
if manifest_file.exists():
cache_key = f"{manifest_path}:debug={debug}"
self.cache[cache_key] = final_context
self._manifest_mtimes[cache_key] = manifest_file.stat().st_mtime
logger.debug(f"Cached compilation result for {manifest_path}")
return final_context
except Exception as e:
logger.error(f"Compiler specific error: {e}")
logger.warning("Initiating SAFE BOOT PROTOCOL")
return self._enter_safe_mode(e)
def _load_skin(self, skin_id: str) -> Dict[str, Any]:
"""Loads a skin by ID from the skins directory."""
# Simple caching strategy
if skin_id in self.cache:
return self.cache[skin_id]
# Security: Prevent path traversal attacks
path = (self.skins_dir / f"{skin_id}.json").resolve()
if not str(path).startswith(str(self.skins_dir.resolve())):
raise ValueError(f"Invalid skin ID (path traversal detected): {skin_id}")
if not path.exists():
logger.warning(f"Skin {skin_id} not found, falling back to base.")
if skin_id == "base":
# Return emergency tokens if base is missing
return EMERGENCY_SKIN
return self._load_skin("base")
data = self._load_json(str(path))
self.cache[skin_id] = data
return data
def _load_json(self, path: str) -> Dict[str, Any]:
with open(path, 'r') as f:
return json.load(f)
def _deep_merge(self, base: Dict, override: Dict, path: str = "", debug: bool = False, provenance: List[Dict] = None) -> Dict:
"""
Deep merge dictionaries. Replaces arrays.
Populates provenance list if debug is True (thread-safe).
"""
# Thread-safe: use method parameter instead of instance variable
if provenance is None and debug:
provenance = []
# Store reference on first call for later retrieval
if not hasattr(self, 'provenance_log'):
self.provenance_log = provenance
result = copy.deepcopy(base)
for key, value in override.items():
if isinstance(value, dict) and key in result and isinstance(result[key], dict):
# Recursive merge - pass provenance down
result[key] = self._deep_merge(result[key], value, path=f"{path}.{key}", debug=debug, provenance=provenance)
else:
# Direct replacement (Primitive or Array)
if debug and provenance is not None:
provenance.append({
"key": key,
"action": "override",
"layer": path,
"value_type": type(value).__name__
})
result[key] = copy.deepcopy(value)
return result
def _enter_safe_mode(self, error: Exception) -> Dict[str, Any]:
"""Returns the hardcoded emergency skin with error details."""
safe_context = copy.deepcopy(EMERGENCY_SKIN)
safe_context["_error"] = str(error)
return safe_context

View File

@@ -0,0 +1,161 @@
"""
DSS Configuration Module
========================
Handles configuration management for the Design System Swarm (DSS) Claude Plugin.
Supports local/remote mode detection, persistent configuration storage, and
environment variable overrides.
"""
import os
import json
import uuid
import asyncio
import logging
from enum import Enum
from pathlib import Path
from typing import Optional, Union, Any
import aiohttp
from pydantic import BaseModel, Field, HttpUrl, ValidationError
# Configure module-level logger
logger = logging.getLogger(__name__)
CONFIG_DIR = Path.home() / ".dss"
CONFIG_FILE = CONFIG_DIR / "config.json"
DEFAULT_REMOTE_URL = "https://dss.overbits.luz.uy"
DEFAULT_LOCAL_URL = "http://localhost:6006"
class DSSMode(str, Enum):
"""Operation modes for the DSS plugin."""
LOCAL = "local"
REMOTE = "remote"
AUTO = "auto"
class DSSConfig(BaseModel):
"""
Configuration model for DSS Plugin.
Attributes:
mode (DSSMode): The configured operation mode (default: AUTO).
remote_url (str): URL for the remote DSS API.
local_url (str): URL for the local DSS API (usually localhost).
session_id (str): Unique identifier for this client instance.
"""
mode: DSSMode = Field(default=DSSMode.AUTO, description="Operation mode preference")
remote_url: str = Field(default=DEFAULT_REMOTE_URL, description="Remote API endpoint")
local_url: str = Field(default=DEFAULT_LOCAL_URL, description="Local API endpoint")
session_id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Persistent session ID")
class Config:
validate_assignment = True
extra = "ignore" # Allow forward compatibility with new config keys
@classmethod
def load(cls) -> "DSSConfig":
"""
Load configuration from ~/.dss/config.json.
Returns a default instance if the file does not exist or is invalid.
"""
if not CONFIG_FILE.exists():
logger.debug(f"No config found at {CONFIG_FILE}, using defaults.")
return cls()
try:
content = CONFIG_FILE.read_text(encoding="utf-8")
data = json.loads(content)
# Ensure complex types are handled by Pydantic validation
return cls.model_validate(data)
except (json.JSONDecodeError, ValidationError) as e:
logger.warning(f"Failed to load config from {CONFIG_FILE}: {e}. Using defaults.")
return cls()
except Exception as e:
logger.error(f"Unexpected error loading config: {e}")
return cls()
def save(self) -> None:
"""
Save the current configuration to ~/.dss/config.json.
Creates the directory if it does not exist.
"""
try:
CONFIG_DIR.mkdir(parents=True, exist_ok=True)
# Export using mode='json' to handle enums and urls correctly
json_data = self.model_dump_json(indent=2)
CONFIG_FILE.write_text(json_data, encoding="utf-8")
logger.debug(f"Configuration saved to {CONFIG_FILE}")
except Exception as e:
logger.error(f"Failed to save config to {CONFIG_FILE}: {e}")
raise
async def get_active_mode(self) -> DSSMode:
"""
Determine the actual runtime mode based on priority rules.
Priority:
1. DSS_MODE environment variable
2. Configured 'mode' (if not AUTO)
3. Auto-detection (ping local health endpoint)
4. Fallback to REMOTE
Returns:
DSSMode: The resolved active mode (LOCAL or REMOTE).
"""
# 1. Check Environment Variable
env_mode = os.getenv("DSS_MODE")
if env_mode:
try:
# Normalize string to enum
return DSSMode(env_mode.lower())
except ValueError:
logger.warning(f"Invalid DSS_MODE env var '{env_mode}', ignoring.")
# 2. Check Configuration (if explicit)
if self.mode != DSSMode.AUTO:
return self.mode
# 3. Auto-detect
logger.info("Auto-detecting DSS mode...")
is_local_healthy = await self._check_local_health()
if is_local_healthy:
logger.info(f"Local server detected at {self.local_url}. Switching to LOCAL mode.")
return DSSMode.LOCAL
else:
logger.info("Local server unreachable. Fallback to REMOTE mode.")
# 4. Fallback
return DSSMode.REMOTE
async def _check_local_health(self) -> bool:
"""
Ping the local server health endpoint to check availability.
Returns:
bool: True if server responds with 200 OK, False otherwise.
"""
health_url = f"{self.local_url.rstrip('/')}/health"
try:
timeout = aiohttp.ClientTimeout(total=2.0) # Short timeout for responsiveness
async with aiohttp.ClientSession(timeout=timeout) as session:
async with session.get(health_url) as response:
if response.status == 200:
return True
logger.debug(f"Local health check returned status {response.status}")
except aiohttp.ClientError as e:
logger.debug(f"Local health check connection failed: {e}")
except Exception as e:
logger.debug(f"Unexpected error during health check: {e}")
return False
def get_api_url(self, active_mode: DSSMode) -> str:
"""
Helper to get the correct API URL for the determined mode.
"""
if active_mode == DSSMode.LOCAL:
return self.local_url
return self.remote_url

View File

@@ -0,0 +1,181 @@
"""
DSS Context Module
==================
Singleton context manager for the DSS Plugin.
Handles configuration loading, mode detection, and strategy instantiation.
"""
import asyncio
import logging
from typing import Optional, Dict, Any
from .config import DSSConfig, DSSMode
# Logger setup
logger = logging.getLogger(__name__)
# Protocol/Type placeholder for Strategies (to be replaced by base class in next steps)
Strategy = Any
class DSSContext:
"""
Singleton context manager for the DSS Plugin.
Handles configuration loading, mode detection (Local/Remote),
and strategy instantiation.
"""
_instance: Optional['DSSContext'] = None
_lock: asyncio.Lock = asyncio.Lock()
def __init__(self) -> None:
"""
Private initializer. Use get_instance() instead.
"""
if DSSContext._instance is not None:
raise RuntimeError("DSSContext is a singleton. Use get_instance() to access it.")
self.config: Optional[DSSConfig] = None
self.active_mode: DSSMode = DSSMode.REMOTE # Default safe fallback
self._capabilities: Dict[str, bool] = {}
self._strategy_cache: Dict[str, Strategy] = {}
self.session_id: Optional[str] = None
@classmethod
async def get_instance(cls) -> 'DSSContext':
"""
Async factory method to get the singleton instance.
Ensures config is loaded and mode is detected before returning.
"""
if not cls._instance:
async with cls._lock:
# Double-check locking pattern
if not cls._instance:
instance = cls()
await instance._initialize()
cls._instance = instance
return cls._instance
@classmethod
def reset(cls) -> None:
"""
Resets the singleton instance. Useful for testing.
"""
cls._instance = None
async def _initialize(self) -> None:
"""
Internal initialization logic:
1. Load Config
2. Detect Mode
3. Cache Capabilities
"""
try:
# 1. Load Configuration
self.config = DSSConfig.load()
self.session_id = self.config.session_id
# 2. Detect Mode (Async check)
self.active_mode = await self.config.get_active_mode()
logger.info(f"DSSContext initialized. Mode: {self.active_mode.value}, Session: {self.session_id}")
# 3. Cache Capabilities
self._cache_capabilities()
except Exception as e:
logger.error(f"Failed to initialize DSSContext: {e}")
# Fallback to defaults if initialization fails
self.active_mode = DSSMode.REMOTE
self._capabilities = {"limited": True}
def _cache_capabilities(self) -> None:
"""
Determines what the plugin can do based on the active mode.
"""
# Base capabilities
caps = {
"can_read_files": False,
"can_execute_browser": False,
"can_screenshot": False,
"can_connect_remote": True
}
if self.active_mode == DSSMode.LOCAL:
# Local mode allows direct filesystem access and local browser control
caps["can_read_files"] = True
caps["can_execute_browser"] = True
caps["can_screenshot"] = True
elif self.active_mode == DSSMode.REMOTE:
# Remote mode relies on API capabilities
# Depending on remote configuration, these might differ
caps["can_execute_browser"] = False # Typically restricted in pure remote unless via API
caps["can_read_files"] = False # Security restriction
self._capabilities = caps
def get_capability(self, key: str) -> bool:
"""Check if a specific capability is active."""
return self._capabilities.get(key, False)
def get_api_url(self) -> str:
"""Get the correct API URL for the current mode."""
if self.config is None:
return "https://dss.overbits.luz.uy" # Default fallback
return self.config.get_api_url(self.active_mode)
def get_strategy(self, strategy_type: str) -> Any:
"""
Factory method to retrieve operational strategies.
Args:
strategy_type: One of 'browser', 'filesystem', 'screenshot'
Returns:
An instance of the requested strategy.
"""
# Return cached strategy if available
if strategy_type in self._strategy_cache:
return self._strategy_cache[strategy_type]
strategy_instance = None
# NOTE: Strategy classes will be implemented in the next step.
# We use local imports here to avoid circular dependency issues
# if strategies define their own types using DSSContext.
try:
if strategy_type == "browser":
# Will be implemented in Phase 2 & 3
if self.active_mode == DSSMode.LOCAL:
from ..strategies.local.browser import LocalBrowserStrategy
strategy_instance = LocalBrowserStrategy(self)
else:
from ..strategies.remote.browser import RemoteBrowserStrategy
strategy_instance = RemoteBrowserStrategy(self)
elif strategy_type == "filesystem":
# Will be implemented in Phase 2
if self.active_mode == DSSMode.LOCAL:
from ..strategies.local.filesystem import LocalFilesystemStrategy
strategy_instance = LocalFilesystemStrategy(self)
else:
from ..strategies.remote.filesystem import RemoteFilesystemStrategy
strategy_instance = RemoteFilesystemStrategy(self)
elif strategy_type == "screenshot":
# Screenshot is part of browser strategy
return self.get_strategy("browser")
else:
raise ValueError(f"Unknown strategy type: {strategy_type}")
except ImportError as e:
logger.error(f"Failed to import strategy {strategy_type}: {e}")
raise NotImplementedError(f"Strategy {strategy_type} not yet implemented") from e
# Cache and return
self._strategy_cache[strategy_type] = strategy_instance
return strategy_instance

View File

@@ -0,0 +1,113 @@
"""
MCP Extensions for Context Awareness
Implements the Factory Pattern to wrap existing tools with context
and defines 5 new tools for the Context Compiler.
"""
from typing import Any, Dict, List, Callable
import functools
import json
import os
from .compiler import ContextCompiler
# Singleton compiler instance
COMPILER = ContextCompiler(skins_dir=os.path.join(os.path.dirname(__file__), "skins"))
# --- FACTORY PATTERN: Context Wrapper ---
def with_context(default_manifest_path: str = None):
"""
Decorator that injects the compiled context into the tool's arguments.
Use this to upgrade existing 'token extractor' tools to be 'context aware'.
The manifest path is extracted from kwargs['manifest_path'] if present,
otherwise falls back to the default_manifest_path provided at decoration time.
"""
def decorator(func: Callable):
@functools.wraps(func)
def wrapper(*args, **kwargs):
# 1. Get manifest path (runtime kwarg or decorator default)
manifest_path = kwargs.get('manifest_path', default_manifest_path)
if not manifest_path:
raise ValueError("No manifest_path provided to context-aware tool")
# 2. Compile Context
context = COMPILER.compile(manifest_path)
# 3. Inject into kwargs
kwargs['dss_context'] = context
# 4. Execute Tool
return func(*args, **kwargs)
return wrapper
return decorator
# --- 5 NEW MCP TOOLS ---
def get_active_context(manifest_path: str, debug: bool = False, force_refresh: bool = False) -> str:
"""
[Tool 1] Returns the fully resolved JSON context for the project.
Set debug=True to see provenance (which layer defined which token).
Set force_refresh=True to bypass cache (for long-running servers).
"""
context = COMPILER.compile(manifest_path, debug=debug, force_refresh=force_refresh)
return json.dumps(context, indent=2)
def resolve_token(manifest_path: str, token_path: str, force_refresh: bool = False) -> str:
"""
[Tool 2] Resolves a specific token value (e.g. 'colors.primary')
through the cascade.
Set force_refresh=True to bypass cache (for long-running servers).
"""
context = COMPILER.compile(manifest_path, force_refresh=force_refresh)
keys = token_path.split('.')
current = context.get("tokens", {})
for k in keys:
if isinstance(current, dict) and k in current:
current = current[k]
else:
return f"Token not found: {token_path}"
return str(current)
def validate_manifest(manifest_path: str) -> str:
"""
[Tool 3] Validates the ds.config.json against the schema.
"""
# In a full implementation, we would use 'jsonschema' library here.
# For now, we perform a basic structural check via the Compiler's loader.
try:
COMPILER.compile(manifest_path)
return "Valid: Project manifest builds successfully."
except Exception as e:
return f"Invalid: {str(e)}"
def list_skins() -> str:
"""
[Tool 4] Lists all available skins in the registry.
"""
skins_path = COMPILER.skins_dir
if not skins_path.exists():
return "No skins directory found."
skins = [f.stem for f in skins_path.glob("*.json")]
return json.dumps(skins)
def get_compiler_status() -> str:
"""
[Tool 5] Returns the health and configuration of the Context Compiler.
"""
status = {
"status": "active",
"skins_directory": str(COMPILER.skins_dir),
"cached_skins": list(COMPILER.cache.keys()),
"safe_boot_ready": True
}
return json.dumps(status, indent=2)
# Instructions for Main Server File:
# 1. Import these tools
# 2. Register them with the MCP server instance
# 3. Apply @with_context wrapper to legacy tools if dynamic context is needed

View File

@@ -0,0 +1,167 @@
"""
MCP Integration Layer for DSS Context Compiler
Provides MCP-compliant tool wrappers for the 5 new context tools.
"""
from typing import Dict, Any
import json
from . import (
get_active_context,
resolve_token,
validate_manifest,
list_skins,
get_compiler_status
)
# MCP Tool Definitions
def mcp_get_resolved_context(manifest_path: str, debug: bool = False, force_refresh: bool = False) -> str:
"""
MCP Tool: Get Active Context
Returns the fully resolved JSON context for a project.
Set debug=True to see provenance (which layer defined which token).
Set force_refresh=True to bypass cache (for long-running servers).
Args:
manifest_path: Path to ds.config.json
debug: Enable debug provenance tracking
force_refresh: Bypass cache and recompile
Returns:
JSON string with resolved context
"""
try:
return get_active_context(manifest_path, debug, force_refresh)
except Exception as e:
return json.dumps({"error": str(e), "status": "failed"})
def mcp_resolve_token(manifest_path: str, token_path: str, force_refresh: bool = False) -> str:
"""
MCP Tool: Resolve Token
Resolves a specific token value (e.g. 'colors.primary') through the cascade.
Set force_refresh=True to bypass cache (for long-running servers).
Args:
manifest_path: Path to ds.config.json
token_path: Dot-notation path to token (e.g. 'colors.primary')
force_refresh: Bypass cache and recompile
Returns:
Resolved token value or error message
"""
try:
return resolve_token(manifest_path, token_path, force_refresh)
except Exception as e:
return f"Error resolving token: {str(e)}"
def mcp_validate_manifest(manifest_path: str) -> str:
"""
MCP Tool: Validate Manifest
Validates the ds.config.json against the schema.
Args:
manifest_path: Path to ds.config.json
Returns:
Validation result message
"""
try:
return validate_manifest(manifest_path)
except Exception as e:
return f"Validation error: {str(e)}"
def mcp_list_skins() -> str:
"""
MCP Tool: List Skins
Lists all available skins in the registry.
Returns:
JSON array of skin IDs
"""
try:
return list_skins()
except Exception as e:
return json.dumps({"error": str(e), "skins": []})
def mcp_get_compiler_status() -> str:
"""
MCP Tool: Get Compiler Status
Returns the health and configuration of the Context Compiler.
Returns:
JSON object with compiler status
"""
try:
return get_compiler_status()
except Exception as e:
return json.dumps({"error": str(e), "status": "error"})
# MCP Tool Registry
# This can be imported by dss-mcp-server.py to register the tools
MCP_TOOLS = {
"dss_get_resolved_context": {
"function": mcp_get_resolved_context,
"description": "Get fully resolved design system context for a project",
"parameters": {
"manifest_path": {
"type": "string",
"description": "Path to ds.config.json",
"required": True
},
"debug": {
"type": "boolean",
"description": "Enable debug provenance tracking",
"required": False,
"default": False
}
}
},
"dss_resolve_token": {
"function": mcp_resolve_token,
"description": "Resolve a specific design token through the cascade",
"parameters": {
"manifest_path": {
"type": "string",
"description": "Path to ds.config.json",
"required": True
},
"token_path": {
"type": "string",
"description": "Dot-notation path to token (e.g. 'colors.primary')",
"required": True
}
}
},
"dss_validate_manifest": {
"function": mcp_validate_manifest,
"description": "Validate project manifest against schema",
"parameters": {
"manifest_path": {
"type": "string",
"description": "Path to ds.config.json",
"required": True
}
}
},
"dss_list_skins": {
"function": mcp_list_skins,
"description": "List all available design system skins",
"parameters": {}
},
"dss_get_compiler_status": {
"function": mcp_get_compiler_status,
"description": "Get Context Compiler health and configuration",
"parameters": {}
}
}

View File

@@ -0,0 +1,52 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "DSS Project Manifest",
"type": "object",
"required": ["version", "project", "extends", "stack"],
"properties": {
"version": {"type": "string", "pattern": "^2\\.0\\.0$"},
"project": {
"type": "object",
"required": ["id", "name", "type"],
"properties": {
"id": {"type": "string", "pattern": "^[a-z0-9-]+$"},
"name": {"type": "string"},
"type": {"enum": ["web", "mobile", "desktop"]}
}
},
"extends": {
"type": "object",
"required": ["skin", "version"],
"properties": {
"skin": {"type": "string"},
"version": {"type": "string"}
}
},
"stack": {
"type": "object",
"required": ["framework", "styling"],
"properties": {
"framework": {"enum": ["react", "vue", "angular", "ios", "android", "flutter", "vanilla"]},
"styling": {"enum": ["tailwind", "css-modules", "styled-components", "emotion", "css-vars"]},
"icons": {"enum": ["lucide", "heroicons", "material", "custom"]},
"typescript": {"type": "boolean"}
}
},
"compiler": {
"type": "object",
"properties": {
"strict_mode": {"type": "boolean"},
"validation_level": {"enum": ["error", "warning", "info"]},
"output_format": {"enum": ["css-vars", "tailwind-config", "js-tokens"]},
"cache_strategy": {"enum": ["aggressive", "moderate", "disabled"]}
}
},
"overrides": {
"type": "object",
"properties": {
"tokens": {"type": "object"},
"files": {"type": "array", "items": {"type": "string"}}
}
}
}
}

View File

@@ -0,0 +1,28 @@
{
"meta": {
"id": "base",
"version": "1.0.0",
"description": "Foundation tokens shared across all skins"
},
"tokens": {
"colors": {
"transparent": "transparent",
"current": "currentColor",
"white": "#ffffff",
"black": "#000000"
},
"spacing": {
"0": "0px",
"1": "4px",
"2": "8px",
"4": "16px",
"8": "32px"
},
"typography": {
"fontFamily": {
"sans": ["system-ui", "sans-serif"],
"mono": ["monospace"]
}
}
}
}

View File

@@ -0,0 +1,21 @@
{
"meta": {
"id": "classic",
"version": "2.0.0",
"parent": "base"
},
"tokens": {
"colors": {
"primary": "#3B82F6",
"secondary": "#10B981",
"danger": "#EF4444",
"background": "#F3F4F6",
"surface": "#FFFFFF",
"text": "#1F2937"
},
"borderRadius": {
"default": "0.25rem",
"lg": "0.5rem"
}
}
}

View File

@@ -0,0 +1,33 @@
{
"meta": {
"id": "workbench",
"version": "2.0.0",
"parent": "base",
"description": "High density technical interface skin"
},
"tokens": {
"colors": {
"primary": "#2563EB",
"secondary": "#475569",
"danger": "#DC2626",
"background": "#0F172A",
"surface": "#1E293B",
"text": "#E2E8F0"
},
"spacing": {
"1": "2px",
"2": "4px",
"4": "8px",
"8": "16px"
},
"borderRadius": {
"default": "0px",
"lg": "2px"
},
"typography": {
"fontFamily": {
"sans": ["Inter", "system-ui", "sans-serif"]
}
}
}
}