Some checks failed
DSS Project Analysis / dss-context-update (push) Has been cancelled
- Remove legacy admin-ui/js/ vanilla JS components - Add .dss/ directory with core tokens, skins, themes - Add Storybook configuration and generated stories - Add DSS management scripts (dss-services, dss-init, dss-setup, dss-reset) - Add MCP command definitions for DSS plugin - Add Figma sync architecture and scripts - Update pre-commit hooks with documentation validation - Fix JSON trailing commas in skin files 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
273 lines
8.9 KiB
Python
Executable File
273 lines
8.9 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
"""
|
|
DSS Token Resolver - 3-Layer Cascade
|
|
Merges tokens from Core → Skin → Theme into a single style-dictionary input.
|
|
|
|
Usage: python3 scripts/resolve-tokens.py [--skin SKIN] [--theme THEME] [--output PATH]
|
|
|
|
Default: --skin shadcn --theme default --output .dss/data/_system/tokens/tokens.json
|
|
"""
|
|
|
|
import sys
|
|
import os
|
|
import json
|
|
import re
|
|
import argparse
|
|
from pathlib import Path
|
|
from datetime import datetime
|
|
from typing import Dict, Any
|
|
|
|
DSS_ROOT = Path(__file__).parent.parent
|
|
DSS_DATA = DSS_ROOT / ".dss"
|
|
|
|
# Primitive path aliases - map shorthand refs to full primitive paths
|
|
# The translation dictionary maps: {color.white} → color.base.white
|
|
PRIMITIVE_ALIASES = {
|
|
# Base colors
|
|
"color.white": "color.base.white",
|
|
"color.black": "color.base.black",
|
|
"color.transparent": "color.base.transparent",
|
|
}
|
|
|
|
# Generate aliases for neutral color scales (zinc, slate, gray, etc.)
|
|
NEUTRAL_SCALES = ["slate", "gray", "zinc", "neutral", "stone"]
|
|
SEMANTIC_SCALES = ["red", "orange", "amber", "yellow", "lime", "green", "emerald",
|
|
"teal", "cyan", "sky", "blue", "indigo", "violet", "purple",
|
|
"fuchsia", "pink", "rose"]
|
|
SCALE_VALUES = ["50", "100", "200", "300", "400", "500", "600", "700", "800", "900", "950"]
|
|
|
|
for scale in NEUTRAL_SCALES:
|
|
for val in SCALE_VALUES:
|
|
# color.zinc.50 → color.neutral.zinc.50
|
|
PRIMITIVE_ALIASES[f"color.{scale}.{val}"] = f"color.neutral.{scale}.{val}"
|
|
|
|
for scale in SEMANTIC_SCALES:
|
|
for val in SCALE_VALUES:
|
|
# color.red.500 → color.semantic.red.500
|
|
PRIMITIVE_ALIASES[f"color.{scale}.{val}"] = f"color.semantic.{scale}.{val}"
|
|
|
|
|
|
def load_json(path: Path) -> dict:
|
|
"""Load JSON file, return empty dict if not found"""
|
|
if not path.exists():
|
|
return {}
|
|
with open(path) as f:
|
|
return json.load(f)
|
|
|
|
|
|
def deep_merge(base: dict, override: dict) -> dict:
|
|
"""Deep merge override into base, returning new dict"""
|
|
result = base.copy()
|
|
for key, value in override.items():
|
|
if key.startswith("_"):
|
|
continue # Skip metadata keys
|
|
if key in result and isinstance(result[key], dict) and isinstance(value, dict):
|
|
result[key] = deep_merge(result[key], value)
|
|
else:
|
|
result[key] = value
|
|
return result
|
|
|
|
|
|
def resolve_references(tokens: dict, primitives: dict) -> dict:
|
|
"""
|
|
Resolve token references like {color.zinc.500} using primitives.
|
|
Uses translation dictionary (PRIMITIVE_ALIASES) to map shorthand paths.
|
|
Works recursively through the token structure.
|
|
"""
|
|
ref_pattern = re.compile(r'\{([^}]+)\}')
|
|
unresolved = []
|
|
|
|
def get_nested(obj: dict, path: str) -> Any:
|
|
"""Get nested value from dict using dot notation"""
|
|
parts = path.split(".")
|
|
current = obj
|
|
for part in parts:
|
|
if isinstance(current, dict) and part in current:
|
|
current = current[part]
|
|
else:
|
|
return None
|
|
# If we found a token object with 'value', return the value
|
|
if isinstance(current, dict) and "value" in current:
|
|
return current["value"]
|
|
return current
|
|
|
|
def resolve_value(value: Any) -> Any:
|
|
"""Resolve references in a value"""
|
|
if not isinstance(value, str):
|
|
return value
|
|
|
|
def replacer(match):
|
|
ref_path = match.group(1)
|
|
|
|
# First: try direct lookup in primitives
|
|
resolved = get_nested(primitives, ref_path)
|
|
if resolved is not None:
|
|
return str(resolved)
|
|
|
|
# Second: try using translation dictionary (alias mapping)
|
|
aliased_path = PRIMITIVE_ALIASES.get(ref_path)
|
|
if aliased_path:
|
|
resolved = get_nested(primitives, aliased_path)
|
|
if resolved is not None:
|
|
return str(resolved)
|
|
|
|
# Third: try in tokens themselves (for self-references)
|
|
resolved = get_nested(tokens, ref_path)
|
|
if resolved is not None:
|
|
return str(resolved)
|
|
|
|
# Track unresolved for debugging
|
|
if ref_path not in unresolved:
|
|
unresolved.append(ref_path)
|
|
|
|
# Return original if not found
|
|
return match.group(0)
|
|
|
|
return ref_pattern.sub(replacer, value)
|
|
|
|
def resolve_obj(obj: Any) -> Any:
|
|
"""Recursively resolve references in object"""
|
|
if isinstance(obj, dict):
|
|
result = {}
|
|
for key, value in obj.items():
|
|
if key.startswith("_"):
|
|
continue # Skip metadata
|
|
if key == "value":
|
|
result[key] = resolve_value(value)
|
|
else:
|
|
result[key] = resolve_obj(value)
|
|
return result
|
|
elif isinstance(obj, list):
|
|
return [resolve_obj(item) for item in obj]
|
|
else:
|
|
return obj
|
|
|
|
resolved = resolve_obj(tokens)
|
|
|
|
# Report unresolved references
|
|
if unresolved:
|
|
print(f"[WARN] {len(unresolved)} unresolved token references:")
|
|
for ref in unresolved[:10]: # Show first 10
|
|
alias = PRIMITIVE_ALIASES.get(ref, "no alias")
|
|
print(f" - {{{ref}}} (alias: {alias})")
|
|
if len(unresolved) > 10:
|
|
print(f" ... and {len(unresolved) - 10} more")
|
|
|
|
return resolved
|
|
|
|
|
|
def resolve_tokens(
|
|
skin_name: str = "shadcn",
|
|
theme_name: str = "default"
|
|
) -> Dict[str, Any]:
|
|
"""
|
|
Resolve tokens through the 3-layer cascade:
|
|
1. Core primitives (base values)
|
|
2. Skin tokens (semantic mappings)
|
|
3. Theme overrides (brand customization)
|
|
"""
|
|
# Load each layer
|
|
primitives = load_json(DSS_DATA / "core" / "primitives.json")
|
|
skin = load_json(DSS_DATA / "skins" / skin_name / "tokens.json")
|
|
theme = load_json(DSS_DATA / "themes" / f"{theme_name}.json")
|
|
|
|
# Report what we're loading
|
|
print(f"[INFO] Resolving tokens:")
|
|
print(f" Core: {len(primitives)} categories")
|
|
print(f" Skin: {skin_name}")
|
|
print(f" Theme: {theme_name}")
|
|
|
|
# Start with skin as base (it references primitives)
|
|
merged = {}
|
|
|
|
# Copy skin tokens
|
|
for category, tokens in skin.items():
|
|
if category.startswith("_"):
|
|
continue
|
|
merged[category] = tokens.copy() if isinstance(tokens, dict) else tokens
|
|
|
|
# Override with theme tokens
|
|
for category, tokens in theme.items():
|
|
if category.startswith("_"):
|
|
continue
|
|
if category in merged:
|
|
merged[category] = deep_merge(merged[category], tokens)
|
|
else:
|
|
merged[category] = tokens
|
|
|
|
# Resolve all references using primitives
|
|
resolved = resolve_references(merged, primitives)
|
|
|
|
# Clean up internal metadata
|
|
def clean_tokens(obj):
|
|
if isinstance(obj, dict):
|
|
return {
|
|
k: clean_tokens(v)
|
|
for k, v in obj.items()
|
|
if not k.startswith("_")
|
|
}
|
|
return obj
|
|
|
|
resolved = clean_tokens(resolved)
|
|
|
|
return resolved
|
|
|
|
|
|
def main():
|
|
parser = argparse.ArgumentParser(description="Resolve DSS 3-layer token cascade")
|
|
parser.add_argument("--skin", default="shadcn", help="Skin to use (default: shadcn)")
|
|
parser.add_argument("--theme", default="default", help="Theme to use (default: default)")
|
|
parser.add_argument("--output", default=".dss/data/_system/tokens/tokens.json",
|
|
help="Output path for resolved tokens")
|
|
parser.add_argument("--dry-run", action="store_true", help="Print tokens without saving")
|
|
args = parser.parse_args()
|
|
|
|
print("=" * 60)
|
|
print("DSS TOKEN RESOLVER - 3-Layer Cascade")
|
|
print("=" * 60)
|
|
print("")
|
|
|
|
# Resolve tokens
|
|
tokens = resolve_tokens(args.skin, args.theme)
|
|
|
|
# Count tokens
|
|
total = 0
|
|
for category, items in tokens.items():
|
|
if isinstance(items, dict):
|
|
total += len(items)
|
|
|
|
print(f"\n[OK] Resolved {total} tokens")
|
|
|
|
if args.dry_run:
|
|
print("\n[DRY RUN] Resolved tokens:")
|
|
print(json.dumps(tokens, indent=2))
|
|
else:
|
|
# Save to output path
|
|
output_path = Path(args.output)
|
|
if not output_path.is_absolute():
|
|
output_path = DSS_ROOT / output_path
|
|
|
|
output_path.parent.mkdir(parents=True, exist_ok=True)
|
|
with open(output_path, "w") as f:
|
|
json.dump(tokens, f, indent=2)
|
|
print(f"[OK] Saved to {output_path}")
|
|
|
|
# Also save metadata
|
|
meta_path = output_path.parent / "resolved-meta.json"
|
|
with open(meta_path, "w") as f:
|
|
json.dump({
|
|
"resolved_at": datetime.now().isoformat(),
|
|
"skin": args.skin,
|
|
"theme": args.theme,
|
|
"token_count": total,
|
|
"layers": ["core/primitives", f"skins/{args.skin}", f"themes/{args.theme}"]
|
|
}, f, indent=2)
|
|
|
|
print("")
|
|
print("[OK] Token resolution complete!")
|
|
print(" Next: Run style-dictionary build")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|