feat: Add DSS infrastructure, remove legacy admin-ui code
Some checks failed
DSS Project Analysis / dss-context-update (push) Has been cancelled
Some checks failed
DSS Project Analysis / dss-context-update (push) Has been cancelled
- Remove legacy admin-ui/js/ vanilla JS components - Add .dss/ directory with core tokens, skins, themes - Add Storybook configuration and generated stories - Add DSS management scripts (dss-services, dss-init, dss-setup, dss-reset) - Add MCP command definitions for DSS plugin - Add Figma sync architecture and scripts - Update pre-commit hooks with documentation validation - Fix JSON trailing commas in skin files 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
604
scripts/dss-init.sh
Executable file
604
scripts/dss-init.sh
Executable file
@@ -0,0 +1,604 @@
|
||||
#!/bin/bash
|
||||
# DSS Initialization Script - Full Workflow
|
||||
# Sets up complete DSS structure and manages the entire pipeline
|
||||
#
|
||||
# Usage: scripts/dss-init.sh [--reset] [--skip-analysis]
|
||||
#
|
||||
# Full Workflow:
|
||||
# 1. Reset to clean state (optional, with --reset)
|
||||
# 2. Create directory structure and database
|
||||
# 3. Analyze target projects (admin-ui, storybook)
|
||||
# 4. Figma sync (requires FIGMA_TOKEN, uses MCP)
|
||||
# 5. Build CSS with style-dictionary
|
||||
# 6. admin-ui imports from .dss/data/_system/themes/
|
||||
|
||||
set -e
|
||||
|
||||
DSS_ROOT="$(cd "$(dirname "$0")/.." && pwd)"
|
||||
cd "$DSS_ROOT"
|
||||
|
||||
# Parse arguments
|
||||
RESET=false
|
||||
SKIP_ANALYSIS=false
|
||||
for arg in "$@"; do
|
||||
case $arg in
|
||||
--reset) RESET=true ;;
|
||||
--skip-analysis) SKIP_ANALYSIS=true ;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Colors
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
CYAN='\033[0;36m'
|
||||
NC='\033[0m'
|
||||
|
||||
log_step() { echo -e "${BLUE}[STEP]${NC} $1"; }
|
||||
log_ok() { echo -e "${GREEN}[OK]${NC} $1"; }
|
||||
log_warn() { echo -e "${YELLOW}[WARN]${NC} $1"; }
|
||||
log_error() { echo -e "${RED}[ERROR]${NC} $1"; }
|
||||
log_info() { echo -e "${CYAN}[INFO]${NC} $1"; }
|
||||
|
||||
echo "╔══════════════════════════════════════════════════════════════╗"
|
||||
echo "║ DSS INITIALIZATION ║"
|
||||
echo "║ Full Workflow Pipeline ║"
|
||||
echo "╚══════════════════════════════════════════════════════════════╝"
|
||||
echo ""
|
||||
|
||||
# ============================================================================
|
||||
# STEP 1: Reset (if requested)
|
||||
# ============================================================================
|
||||
if [ "$RESET" = true ]; then
|
||||
log_step "1. Resetting DSS to clean state..."
|
||||
|
||||
# Clear data directories
|
||||
rm -rf .dss/data/projects/* .dss/data/teams/* .dss/data/_system/cache/* .dss/data/_system/activity/* 2>/dev/null || true
|
||||
rm -rf .dss/data/_system/tokens/* .dss/data/_system/themes/* .dss/data/_system/components/* 2>/dev/null || true
|
||||
|
||||
# Reset database
|
||||
rm -f .dss/dss.db .dss/dss.db.old
|
||||
|
||||
# Clear admin-ui generated files
|
||||
rm -f admin-ui/css/dss-*.css 2>/dev/null || true
|
||||
rm -f admin-ui/src/components/*.stories.js admin-ui/src/components/ds-*.js 2>/dev/null || true
|
||||
|
||||
# Reset core_tokens
|
||||
mkdir -p dss/core_tokens
|
||||
cat > dss/core_tokens/tokens.json << 'EOF'
|
||||
{
|
||||
"_meta": {
|
||||
"version": "1.0.0",
|
||||
"generated": null,
|
||||
"source": "awaiting Figma sync",
|
||||
"status": "empty"
|
||||
},
|
||||
"tokens": {}
|
||||
}
|
||||
EOF
|
||||
|
||||
# Reset skins
|
||||
for skin in base classic workbench; do
|
||||
if [ "$skin" = "base" ]; then
|
||||
cat > "dss-claude-plugin/core/skins/${skin}.json" << EOF
|
||||
{
|
||||
"meta": {
|
||||
"id": "${skin}",
|
||||
"version": "1.0.0",
|
||||
"description": "${skin^} skin - awaiting Figma sync"
|
||||
},
|
||||
"tokens": {}
|
||||
}
|
||||
EOF
|
||||
else
|
||||
cat > "dss-claude-plugin/core/skins/${skin}.json" << EOF
|
||||
{
|
||||
"meta": {
|
||||
"id": "${skin}",
|
||||
"version": "1.0.0",
|
||||
"description": "${skin^} skin - awaiting Figma sync",
|
||||
"extends": "base"
|
||||
},
|
||||
"tokens": {}
|
||||
}
|
||||
EOF
|
||||
fi
|
||||
done
|
||||
|
||||
# Clear logs
|
||||
rm -f .dss/logs/*.jsonl 2>/dev/null || true
|
||||
rm -rf .dss/logs/browser-logs/* 2>/dev/null || true
|
||||
|
||||
log_ok "Reset complete"
|
||||
echo ""
|
||||
else
|
||||
log_step "1. Skipping reset (use --reset to clear first)"
|
||||
echo ""
|
||||
fi
|
||||
|
||||
# ============================================================================
|
||||
# STEP 2: Validate Environment
|
||||
# ============================================================================
|
||||
log_step "2. Validating DSS environment..."
|
||||
|
||||
STYLE_DICT_AVAILABLE=false
|
||||
FIGMA_AVAILABLE=false
|
||||
|
||||
# Check Python
|
||||
if ! command -v python3 &> /dev/null; then
|
||||
log_error "Python3 not found"
|
||||
exit 1
|
||||
fi
|
||||
log_ok "Python3: $(python3 --version | cut -d' ' -f2)"
|
||||
|
||||
# Check Node.js
|
||||
if command -v node &> /dev/null; then
|
||||
log_ok "Node.js: $(node --version)"
|
||||
else
|
||||
log_warn "Node.js not found - Storybook features limited"
|
||||
fi
|
||||
|
||||
# Check style-dictionary (project, home, or global)
|
||||
if command -v style-dictionary &> /dev/null; then
|
||||
STYLE_DICT_AVAILABLE=true
|
||||
log_ok "style-dictionary: available"
|
||||
elif [ -f "node_modules/.bin/style-dictionary" ]; then
|
||||
STYLE_DICT_AVAILABLE=true
|
||||
log_ok "style-dictionary: local install"
|
||||
elif [ -f "$HOME/node_modules/.bin/style-dictionary" ]; then
|
||||
STYLE_DICT_AVAILABLE=true
|
||||
log_ok "style-dictionary: installed (~)"
|
||||
else
|
||||
log_warn "style-dictionary not installed (npm install -g style-dictionary)"
|
||||
fi
|
||||
|
||||
# Check Figma token - load from config if not in environment
|
||||
FIGMA_CONFIG=".dss/config/figma.json"
|
||||
if [ -z "$FIGMA_TOKEN" ] && [ -f "$FIGMA_CONFIG" ]; then
|
||||
FIGMA_TOKEN=$(python3 -c "import json; print(json.load(open('$FIGMA_CONFIG')).get('token',''))" 2>/dev/null)
|
||||
export FIGMA_TOKEN
|
||||
fi
|
||||
|
||||
if [ -n "$FIGMA_TOKEN" ]; then
|
||||
FIGMA_AVAILABLE=true
|
||||
log_ok "FIGMA_TOKEN: configured (from .dss/config/figma.json)"
|
||||
# Also show UIKit reference if available
|
||||
if [ -f "$FIGMA_CONFIG" ]; then
|
||||
UIKIT_NAME=$(python3 -c "import json; d=json.load(open('$FIGMA_CONFIG')); print(d.get('uikit_reference',{}).get('name',''))" 2>/dev/null)
|
||||
[ -n "$UIKIT_NAME" ] && log_ok "UIKit reference: $UIKIT_NAME"
|
||||
fi
|
||||
else
|
||||
log_warn "FIGMA_TOKEN not set - add to .dss/config/figma.json"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
|
||||
# ============================================================================
|
||||
# STEP 3: Create Directory Structure
|
||||
# ============================================================================
|
||||
log_step "3. Creating DSS directory structure..."
|
||||
|
||||
mkdir -p .dss/data/{_system/{tokens,themes,components,cache,activity},projects,teams}
|
||||
mkdir -p .dss/schema
|
||||
mkdir -p .dss/logs/browser-logs
|
||||
touch .dss/logs/dss-operations.jsonl
|
||||
touch .dss/logs/git-hooks.jsonl
|
||||
|
||||
# Create _system ds.config.json
|
||||
if [ ! -f ".dss/data/_system/ds.config.json" ]; then
|
||||
cat > .dss/data/_system/ds.config.json << 'EOF'
|
||||
{
|
||||
"name": "dss-system",
|
||||
"version": "1.0.0",
|
||||
"description": "DSS internal design system (dogfooding)",
|
||||
"skin": "base",
|
||||
"base_theme": "light",
|
||||
"targets": [
|
||||
{
|
||||
"name": "admin-ui",
|
||||
"path": "./admin-ui",
|
||||
"type": "web-app",
|
||||
"framework": "vanilla"
|
||||
},
|
||||
{
|
||||
"name": "storybook",
|
||||
"path": "./storybook",
|
||||
"type": "documentation",
|
||||
"framework": "storybook"
|
||||
}
|
||||
],
|
||||
"output": {
|
||||
"tokens_dir": "./.dss/data/_system/tokens",
|
||||
"themes_dir": "./.dss/data/_system/themes",
|
||||
"components_dir": "./.dss/data/_system/components",
|
||||
"formats": ["css", "scss", "json"]
|
||||
}
|
||||
}
|
||||
EOF
|
||||
fi
|
||||
log_ok "Directory structure ready"
|
||||
|
||||
echo ""
|
||||
|
||||
# ============================================================================
|
||||
# STEP 4: Initialize Database
|
||||
# ============================================================================
|
||||
log_step "4. Initializing database..."
|
||||
|
||||
if [ ! -f ".dss/dss.db" ]; then
|
||||
python3 << 'PYEOF'
|
||||
import sqlite3
|
||||
conn = sqlite3.connect(".dss/dss.db")
|
||||
c = conn.cursor()
|
||||
c.execute('''CREATE TABLE IF NOT EXISTS projects (
|
||||
id TEXT PRIMARY KEY, name TEXT NOT NULL, path TEXT,
|
||||
config TEXT, created_at TEXT, updated_at TEXT)''')
|
||||
c.execute('''CREATE TABLE IF NOT EXISTS tokens (
|
||||
id TEXT PRIMARY KEY, project_id TEXT, category TEXT,
|
||||
name TEXT, value TEXT, source TEXT, created_at TEXT,
|
||||
FOREIGN KEY (project_id) REFERENCES projects(id))''')
|
||||
c.execute('''CREATE TABLE IF NOT EXISTS components (
|
||||
id TEXT PRIMARY KEY, project_id TEXT, name TEXT,
|
||||
path TEXT, analysis TEXT, created_at TEXT,
|
||||
FOREIGN KEY (project_id) REFERENCES projects(id))''')
|
||||
c.execute('''CREATE TABLE IF NOT EXISTS figma_syncs (
|
||||
id TEXT PRIMARY KEY, file_key TEXT, file_name TEXT,
|
||||
tokens_count INTEGER, status TEXT, synced_at TEXT)''')
|
||||
conn.commit()
|
||||
conn.close()
|
||||
PYEOF
|
||||
log_ok "Database initialized"
|
||||
else
|
||||
log_ok "Database exists"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
|
||||
# ============================================================================
|
||||
# STEP 5: Analyze Target Projects
|
||||
# ============================================================================
|
||||
if [ "$SKIP_ANALYSIS" = false ]; then
|
||||
log_step "5. Analyzing target projects..."
|
||||
|
||||
# Analyze admin-ui
|
||||
if [ -d "admin-ui" ]; then
|
||||
JS_COUNT=$(find admin-ui -name "*.js" -not -path "*/node_modules/*" 2>/dev/null | wc -l)
|
||||
CSS_COUNT=$(find admin-ui -name "*.css" -not -path "*/node_modules/*" 2>/dev/null | wc -l)
|
||||
HTML_COUNT=$(find admin-ui -name "*.html" -not -path "*/node_modules/*" 2>/dev/null | wc -l)
|
||||
|
||||
cat > .dss/data/_system/analysis-admin-ui.json << EOF
|
||||
{"target":"admin-ui","analyzed_at":"$(date -Iseconds)","stats":{"js":$JS_COUNT,"css":$CSS_COUNT,"html":$HTML_COUNT},"status":"analyzed"}
|
||||
EOF
|
||||
log_ok "admin-ui: $JS_COUNT js, $CSS_COUNT css, $HTML_COUNT html"
|
||||
else
|
||||
log_warn "admin-ui directory not found"
|
||||
fi
|
||||
|
||||
# Analyze storybook
|
||||
STORIES_COUNT=$(find . -name "*.stories.js" -o -name "*.stories.ts" 2>/dev/null | grep -v node_modules | wc -l)
|
||||
MDX_COUNT=$(find . -name "*.mdx" 2>/dev/null | grep -v node_modules | wc -l)
|
||||
|
||||
cat > .dss/data/_system/analysis-storybook.json << EOF
|
||||
{"target":"storybook","analyzed_at":"$(date -Iseconds)","stats":{"stories":$STORIES_COUNT,"mdx":$MDX_COUNT},"status":"analyzed"}
|
||||
EOF
|
||||
log_ok "storybook: $STORIES_COUNT stories, $MDX_COUNT mdx"
|
||||
|
||||
echo ""
|
||||
else
|
||||
log_step "5. Skipping analysis (--skip-analysis)"
|
||||
echo ""
|
||||
fi
|
||||
|
||||
# ============================================================================
|
||||
# STEP 6: Initialize Token Structure
|
||||
# ============================================================================
|
||||
log_step "6. Checking token structure..."
|
||||
|
||||
mkdir -p .dss/data/_system/tokens
|
||||
if [ ! -f ".dss/data/_system/tokens/base.json" ]; then
|
||||
cat > .dss/data/_system/tokens/base.json << 'EOF'
|
||||
{
|
||||
"_meta": {
|
||||
"version": "1.0.0",
|
||||
"generated": null,
|
||||
"source": "awaiting Figma sync",
|
||||
"status": "empty"
|
||||
},
|
||||
"tokens": {}
|
||||
}
|
||||
EOF
|
||||
log_ok "Empty token structure created"
|
||||
else
|
||||
# Check if tokens have content
|
||||
TOKEN_STATUS=$(python3 -c "import json; d=json.load(open('.dss/data/_system/tokens/base.json')); print(d.get('_meta',{}).get('status','unknown'))" 2>/dev/null || echo "unknown")
|
||||
if [ "$TOKEN_STATUS" = "empty" ]; then
|
||||
log_warn "Tokens empty - run Figma sync to populate"
|
||||
else
|
||||
log_ok "Tokens present (status: $TOKEN_STATUS)"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Create style-dictionary config (paths relative to .dss/data/_system/ where it runs)
|
||||
cat > .dss/data/_system/style-dictionary.config.json << 'EOF'
|
||||
{
|
||||
"source": ["tokens/tokens.json"],
|
||||
"platforms": {
|
||||
"css": {
|
||||
"transformGroup": "css",
|
||||
"buildPath": "themes/",
|
||||
"files": [{"destination": "tokens.css", "format": "css/variables"}]
|
||||
},
|
||||
"scss": {
|
||||
"transformGroup": "scss",
|
||||
"buildPath": "themes/",
|
||||
"files": [{"destination": "_tokens.scss", "format": "scss/variables"}]
|
||||
},
|
||||
"json": {
|
||||
"transformGroup": "js",
|
||||
"buildPath": "themes/",
|
||||
"files": [{"destination": "tokens.json", "format": "json/flat"}]
|
||||
}
|
||||
}
|
||||
}
|
||||
EOF
|
||||
|
||||
echo ""
|
||||
|
||||
# ============================================================================
|
||||
# STEP 7: Validate 3-Layer Architecture
|
||||
# ============================================================================
|
||||
log_step "7. Validating 3-layer architecture..."
|
||||
|
||||
# Check core primitives
|
||||
if [ -f ".dss/core/primitives.json" ]; then
|
||||
PRIM_COUNT=$(python3 -c "import json; d=json.load(open('.dss/core/primitives.json')); print(sum(len(v) if isinstance(v,dict) else 0 for k,v in d.items() if not k.startswith('_')))" 2>/dev/null || echo "0")
|
||||
log_ok "Core primitives: $PRIM_COUNT tokens"
|
||||
else
|
||||
log_warn "Core primitives not found (.dss/core/primitives.json)"
|
||||
fi
|
||||
|
||||
# Check skin contract
|
||||
if [ -f ".dss/schema/skin-contract.json" ]; then
|
||||
log_ok "Skin contract defined"
|
||||
else
|
||||
log_warn "Skin contract not found (.dss/schema/skin-contract.json)"
|
||||
fi
|
||||
|
||||
# Check skins
|
||||
SKIN_COUNT=$(find .dss/skins -name "tokens.json" 2>/dev/null | wc -l)
|
||||
if [ "$SKIN_COUNT" -gt 0 ]; then
|
||||
log_ok "Skins available: $SKIN_COUNT"
|
||||
for skin in $(find .dss/skins -type d -mindepth 1 -maxdepth 1 2>/dev/null); do
|
||||
SKIN_NAME=$(basename "$skin")
|
||||
log_info " - $SKIN_NAME"
|
||||
done
|
||||
else
|
||||
log_warn "No skins found (.dss/skins/)"
|
||||
fi
|
||||
|
||||
# Check themes
|
||||
THEME_COUNT=$(find .dss/themes -name "*.json" 2>/dev/null | grep -v "_" | wc -l)
|
||||
if [ "$THEME_COUNT" -gt 0 ]; then
|
||||
log_ok "Themes available: $THEME_COUNT"
|
||||
for theme in $(find .dss/themes -name "*.json" 2>/dev/null | grep -v "_"); do
|
||||
THEME_NAME=$(basename "$theme" .json)
|
||||
log_info " - $THEME_NAME"
|
||||
done
|
||||
else
|
||||
log_warn "No themes found (.dss/themes/)"
|
||||
fi
|
||||
|
||||
# Run validation if available
|
||||
if [ -f "scripts/validate-theme.py" ] && [ -f ".dss/schema/skin-contract.json" ]; then
|
||||
log_info "Running theme/skin validation..."
|
||||
if python3 scripts/validate-theme.py --validate-skin --quiet 2>&1 | while read line; do
|
||||
echo " $line"
|
||||
done; then
|
||||
log_ok "All validations passed"
|
||||
else
|
||||
log_warn "Validation had issues - check output above"
|
||||
fi
|
||||
fi
|
||||
|
||||
echo ""
|
||||
|
||||
# ============================================================================
|
||||
# STEP 8: Figma Sync
|
||||
# ============================================================================
|
||||
log_step "8. Figma sync..."
|
||||
|
||||
# Check if tokens are empty and Figma is available
|
||||
TOKEN_STATUS=$(python3 -c "import json; d=json.load(open('.dss/data/_system/tokens/base.json')); print(d.get('_meta',{}).get('status','empty'))" 2>/dev/null || echo "empty")
|
||||
|
||||
if [ "$FIGMA_AVAILABLE" = true ] && [ "$TOKEN_STATUS" = "empty" ]; then
|
||||
log_info "Tokens empty - syncing from Figma..."
|
||||
|
||||
# Get file key from config
|
||||
FIGMA_FILE_KEY=$(python3 -c "import json; print(json.load(open('$FIGMA_CONFIG')).get('uikit_reference',{}).get('file_key',''))" 2>/dev/null)
|
||||
|
||||
if [ -n "$FIGMA_FILE_KEY" ]; then
|
||||
log_info "File: $FIGMA_FILE_KEY"
|
||||
|
||||
# Run Figma sync script
|
||||
if python3 "$DSS_ROOT/scripts/figma-sync.py" --file-key "$FIGMA_FILE_KEY" 2>&1 | while read line; do
|
||||
echo " $line"
|
||||
done; then
|
||||
log_ok "Figma sync complete"
|
||||
else
|
||||
log_warn "Figma sync had issues - check output above"
|
||||
fi
|
||||
else
|
||||
log_warn "No UIKit file_key in config - add to .dss/config/figma.json"
|
||||
fi
|
||||
elif [ "$TOKEN_STATUS" != "empty" ]; then
|
||||
log_ok "Tokens already synced (status: $TOKEN_STATUS)"
|
||||
else
|
||||
log_warn "FIGMA_TOKEN not set - add to .dss/config/figma.json"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
|
||||
# ============================================================================
|
||||
# STEP 9: Resolve 3-Layer Token Cascade
|
||||
# ============================================================================
|
||||
log_step "9. Resolving 3-layer token cascade..."
|
||||
|
||||
# Check if we have the 3-layer structure
|
||||
HAS_PRIMITIVES=$([ -f ".dss/core/primitives.json" ] && echo "yes" || echo "no")
|
||||
HAS_SKINS=$([ -d ".dss/skins/shadcn" ] && echo "yes" || echo "no")
|
||||
|
||||
if [ "$HAS_PRIMITIVES" = "yes" ] && [ "$HAS_SKINS" = "yes" ]; then
|
||||
log_info "Resolving: Core → Skin → Theme"
|
||||
if python3 scripts/resolve-tokens.py 2>&1 | while read line; do
|
||||
echo " $line"
|
||||
done; then
|
||||
log_ok "Token cascade resolved"
|
||||
else
|
||||
log_warn "Token resolution had issues"
|
||||
fi
|
||||
else
|
||||
log_warn "3-layer structure incomplete - using legacy tokens"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
|
||||
# ============================================================================
|
||||
# STEP 10: Build CSS with style-dictionary
|
||||
# ============================================================================
|
||||
log_step "10. Building CSS from tokens..."
|
||||
|
||||
# Re-check if tokens have content (may have been resolved in step 9)
|
||||
# Check tokens.json which is the style-dictionary input file
|
||||
HAS_TOKENS=$(python3 -c "
|
||||
import json
|
||||
try:
|
||||
d = json.load(open('.dss/data/_system/tokens/tokens.json'))
|
||||
has_content = bool(d) # tokens.json has direct categories like typography, effect
|
||||
print('yes' if has_content else 'no')
|
||||
except:
|
||||
print('no')
|
||||
" 2>/dev/null || echo "no")
|
||||
|
||||
# Find style-dictionary (local, parent, home, or global)
|
||||
STYLE_DICT_CMD=""
|
||||
if [ -f "$DSS_ROOT/node_modules/.bin/style-dictionary" ]; then
|
||||
STYLE_DICT_CMD="$DSS_ROOT/node_modules/.bin/style-dictionary"
|
||||
elif [ -f "$HOME/node_modules/.bin/style-dictionary" ]; then
|
||||
STYLE_DICT_CMD="$HOME/node_modules/.bin/style-dictionary"
|
||||
elif [ -f "$(dirname "$DSS_ROOT")/node_modules/.bin/style-dictionary" ]; then
|
||||
STYLE_DICT_CMD="$(dirname "$DSS_ROOT")/node_modules/.bin/style-dictionary"
|
||||
elif command -v style-dictionary &> /dev/null; then
|
||||
STYLE_DICT_CMD="style-dictionary"
|
||||
fi
|
||||
|
||||
if [ "$HAS_TOKENS" = "yes" ] && [ -n "$STYLE_DICT_CMD" ]; then
|
||||
log_info "Running style-dictionary build..."
|
||||
cd .dss/data/_system
|
||||
if $STYLE_DICT_CMD build --config style-dictionary.config.json 2>&1 | grep -v "^$"; then
|
||||
log_ok "CSS/SCSS/JSON generated in .dss/data/_system/themes/"
|
||||
else
|
||||
log_warn "style-dictionary build had issues"
|
||||
fi
|
||||
cd "$DSS_ROOT"
|
||||
|
||||
# Show what was generated
|
||||
if [ -f ".dss/data/_system/themes/tokens.css" ]; then
|
||||
CSS_VARS=$(grep -c "^ --" .dss/data/_system/themes/tokens.css 2>/dev/null || echo "0")
|
||||
log_ok "Generated $CSS_VARS CSS variables"
|
||||
fi
|
||||
elif [ "$HAS_TOKENS" = "no" ]; then
|
||||
log_warn "No tokens - run Figma sync first"
|
||||
elif [ -z "$STYLE_DICT_CMD" ]; then
|
||||
log_warn "style-dictionary not found"
|
||||
log_info "Install: npm install style-dictionary (in project root)"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
|
||||
# ============================================================================
|
||||
# STEP 11: Generate Storybook Stories
|
||||
# ============================================================================
|
||||
log_step "11. Generating Storybook stories..."
|
||||
|
||||
if [ "$HAS_TOKENS" = "yes" ] && [ -f "scripts/generate-storybook.py" ]; then
|
||||
log_info "Building stories from tokens..."
|
||||
if python3 scripts/generate-storybook.py 2>&1 | while read line; do
|
||||
echo " $line"
|
||||
done; then
|
||||
STORY_COUNT=$(find admin-ui/src/stories -name "*.stories.js" 2>/dev/null | wc -l)
|
||||
log_ok "Generated $STORY_COUNT Storybook stories"
|
||||
else
|
||||
log_warn "Storybook generation had issues"
|
||||
fi
|
||||
else
|
||||
if [ "$HAS_TOKENS" = "no" ]; then
|
||||
log_warn "No tokens - skipping Storybook generation"
|
||||
else
|
||||
log_warn "Storybook generator not found (scripts/generate-storybook.py)"
|
||||
fi
|
||||
fi
|
||||
|
||||
echo ""
|
||||
|
||||
# ============================================================================
|
||||
# STEP 12: Regenerate Hash Manifest
|
||||
# ============================================================================
|
||||
log_step "12. Updating hash manifest..."
|
||||
|
||||
if [ -f "scripts/regenerate-core-hashes.sh" ]; then
|
||||
./scripts/regenerate-core-hashes.sh 2>/dev/null
|
||||
log_ok "Hash manifest updated"
|
||||
else
|
||||
log_warn "Hash script not found"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
|
||||
# ============================================================================
|
||||
# SUMMARY
|
||||
# ============================================================================
|
||||
|
||||
# Re-check token status after Figma sync
|
||||
FINAL_TOKEN_STATUS=$(python3 -c "import json; d=json.load(open('.dss/data/_system/tokens/base.json')); print(d.get('_meta',{}).get('status','empty'))" 2>/dev/null || echo "empty")
|
||||
FINAL_TOKEN_COUNT=$(python3 -c "import json; d=json.load(open('.dss/data/_system/tokens/base.json')); print(sum(len(v) for v in d.get('tokens',{}).values()))" 2>/dev/null || echo "0")
|
||||
|
||||
# Determine overall status
|
||||
if [ -f ".dss/data/_system/themes/tokens.css" ]; then
|
||||
STATUS="READY"
|
||||
STATUS_MSG="admin-ui can import from .dss/data/_system/themes/"
|
||||
elif [ "$FINAL_TOKEN_STATUS" = "synced" ]; then
|
||||
STATUS="TOKENS SYNCED ($FINAL_TOKEN_COUNT tokens)"
|
||||
STATUS_MSG="Install style-dictionary to build CSS: npm install -g style-dictionary"
|
||||
elif [ "$FIGMA_AVAILABLE" = true ]; then
|
||||
STATUS="AWAITING FIGMA SYNC"
|
||||
STATUS_MSG="Run: dss_sync_figma or /dss-figma to populate tokens"
|
||||
else
|
||||
STATUS="AWAITING SETUP"
|
||||
STATUS_MSG="Set FIGMA_TOKEN environment variable first"
|
||||
fi
|
||||
|
||||
echo "╔══════════════════════════════════════════════════════════════╗"
|
||||
echo "║ DSS INITIALIZATION COMPLETE ║"
|
||||
echo "╚══════════════════════════════════════════════════════════════╝"
|
||||
echo ""
|
||||
echo " Status: $STATUS"
|
||||
echo " → $STATUS_MSG"
|
||||
echo ""
|
||||
echo " 3-Layer Architecture:"
|
||||
echo " .dss/core/primitives.json ← Core Tailwind primitives (immutable)"
|
||||
echo " .dss/skins/*/tokens.json ← Skins (semantic mappings)"
|
||||
echo " .dss/themes/*.json ← Themes (brand overrides)"
|
||||
echo " .dss/schema/skin-contract ← Contract for skin compatibility"
|
||||
echo ""
|
||||
echo " Output:"
|
||||
echo " .dss/data/_system/themes/ ← CSS output for admin-ui"
|
||||
echo " .dss/data/_system/tokens/ ← Token JSON files"
|
||||
echo " admin-ui/src/stories/ ← Storybook stories"
|
||||
echo ""
|
||||
echo " Workflow:"
|
||||
echo " 1. scripts/dss-init.sh --reset # Fresh start"
|
||||
echo " 2. dss_sync_figma # Populate from Figma"
|
||||
echo " 3. scripts/dss-init.sh # Validate + Build CSS"
|
||||
echo " 4. admin-ui imports themes/ # Ready to use"
|
||||
echo ""
|
||||
126
scripts/dss-reset.sh
Executable file
126
scripts/dss-reset.sh
Executable file
@@ -0,0 +1,126 @@
|
||||
#!/bin/bash
|
||||
# DSS Full Reset Script
|
||||
# Clears all DSS data, skins, tokens, and generated files
|
||||
#
|
||||
# Usage: scripts/dss-reset.sh [--confirm]
|
||||
# Without --confirm, runs in dry-run mode showing what would be deleted
|
||||
|
||||
set -e
|
||||
|
||||
DSS_ROOT="$(cd "$(dirname "$0")/.." && pwd)"
|
||||
cd "$DSS_ROOT"
|
||||
|
||||
DRY_RUN=true
|
||||
if [ "$1" = "--confirm" ]; then
|
||||
DRY_RUN=false
|
||||
fi
|
||||
|
||||
echo "=========================================="
|
||||
echo " DSS Full Reset"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
|
||||
if [ "$DRY_RUN" = true ]; then
|
||||
echo "🔍 DRY RUN MODE - No changes will be made"
|
||||
echo " Run with --confirm to execute reset"
|
||||
echo ""
|
||||
fi
|
||||
|
||||
run_or_show() {
|
||||
if [ "$DRY_RUN" = true ]; then
|
||||
echo " Would run: $*"
|
||||
else
|
||||
eval "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
# 1. Clear .dss/data/ (preserves .dss/config/ which is IMMUTABLE)
|
||||
echo "1. Clearing .dss/data/ structure..."
|
||||
echo " (preserving .dss/config/ - Figma credentials)"
|
||||
run_or_show "rm -rf .dss/data/projects/* .dss/data/teams/* .dss/data/_system/cache/* .dss/data/_system/activity/* 2>/dev/null || true"
|
||||
run_or_show "rm -rf .dss/data/_system/tokens/* .dss/data/_system/themes/* .dss/data/_system/components/* 2>/dev/null || true"
|
||||
run_or_show "mkdir -p .dss/data/{projects,teams,_system/{cache,activity,tokens,themes,components}}"
|
||||
|
||||
# 2. Reset database
|
||||
echo "2. Resetting database..."
|
||||
run_or_show "rm -f .dss/dss.db .dss/dss.db.old"
|
||||
|
||||
# 3. Remove admin-ui DSS CSS (keep non-dss files)
|
||||
echo "3. Removing admin-ui DSS CSS files..."
|
||||
run_or_show "rm -f admin-ui/css/dss-*.css"
|
||||
|
||||
# 4. Remove generated stories and components
|
||||
echo "4. Removing generated stories and components..."
|
||||
run_or_show "rm -f admin-ui/src/components/*.stories.js admin-ui/src/components/ds-*.js"
|
||||
|
||||
# 5. Reset core_tokens
|
||||
echo "5. Resetting core_tokens..."
|
||||
if [ "$DRY_RUN" = false ]; then
|
||||
cat > dss/core_tokens/tokens.json << 'EOF'
|
||||
{
|
||||
"_meta": {
|
||||
"version": "1.0.0",
|
||||
"generated": null,
|
||||
"source": "awaiting Figma sync",
|
||||
"status": "empty"
|
||||
},
|
||||
"tokens": {}
|
||||
}
|
||||
EOF
|
||||
else
|
||||
echo " Would reset: dss/core_tokens/tokens.json"
|
||||
fi
|
||||
|
||||
# 6. Reset skins to empty
|
||||
echo "6. Resetting skins..."
|
||||
for skin in base classic workbench; do
|
||||
if [ "$DRY_RUN" = false ]; then
|
||||
if [ "$skin" = "base" ]; then
|
||||
extends_line=""
|
||||
else
|
||||
extends_line='"extends": "base",'
|
||||
fi
|
||||
cat > "dss-claude-plugin/core/skins/${skin}.json" << EOF
|
||||
{
|
||||
"meta": {
|
||||
"id": "${skin}",
|
||||
"version": "1.0.0",
|
||||
"description": "${skin^} skin - awaiting Figma sync"${extends_line:+,
|
||||
$extends_line}
|
||||
},
|
||||
"tokens": {}
|
||||
}
|
||||
EOF
|
||||
else
|
||||
echo " Would reset: dss-claude-plugin/core/skins/${skin}.json"
|
||||
fi
|
||||
done
|
||||
|
||||
# 7. Clear caches and logs
|
||||
echo "7. Clearing caches and logs..."
|
||||
run_or_show "rm -f .dss/logs/*.jsonl 2>/dev/null || true"
|
||||
run_or_show "rm -rf .dss/logs/browser-logs/* 2>/dev/null || true"
|
||||
run_or_show "touch .dss/logs/dss-operations.jsonl .dss/logs/git-hooks.jsonl"
|
||||
|
||||
# 8. Regenerate hash manifest
|
||||
echo "8. Regenerating hash manifest..."
|
||||
if [ "$DRY_RUN" = false ]; then
|
||||
./scripts/regenerate-core-hashes.sh
|
||||
else
|
||||
echo " Would run: ./scripts/regenerate-core-hashes.sh"
|
||||
fi
|
||||
|
||||
# 9. Stop Storybook if running
|
||||
echo "9. Stopping Storybook..."
|
||||
run_or_show "pkill -f storybook 2>/dev/null || true"
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
if [ "$DRY_RUN" = true ]; then
|
||||
echo " DRY RUN COMPLETE"
|
||||
echo " Run with --confirm to execute"
|
||||
else
|
||||
echo " DSS RESET COMPLETE"
|
||||
echo " Ready for fresh Figma sync"
|
||||
fi
|
||||
echo "=========================================="
|
||||
311
scripts/dss-services.sh
Executable file
311
scripts/dss-services.sh
Executable file
@@ -0,0 +1,311 @@
|
||||
#!/bin/bash
|
||||
# DSS Services Manager
|
||||
# Start, stop, and manage all DSS development services
|
||||
#
|
||||
# Usage: scripts/dss-services.sh <action> [--service NAME]
|
||||
#
|
||||
# Actions:
|
||||
# start - Start all services (or specific service)
|
||||
# stop - Stop all services (or specific service)
|
||||
# status - Show status of all services
|
||||
# restart - Restart all services (or specific service)
|
||||
# logs - Show service logs
|
||||
|
||||
set -e
|
||||
|
||||
DSS_ROOT="$(cd "$(dirname "$0")/.." && pwd)"
|
||||
cd "$DSS_ROOT"
|
||||
|
||||
# Colors
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
CYAN='\033[0;36m'
|
||||
NC='\033[0m'
|
||||
|
||||
# Service configuration
|
||||
declare -A SERVICES=(
|
||||
["api"]="8000"
|
||||
["admin-ui"]="3456"
|
||||
["storybook"]="6006"
|
||||
)
|
||||
|
||||
declare -A SERVICE_CMDS=(
|
||||
["api"]="uvicorn apps.api.server:app --host 0.0.0.0 --port 8000 --reload"
|
||||
["admin-ui"]="npm run dev"
|
||||
["storybook"]="npm run storybook"
|
||||
)
|
||||
|
||||
declare -A SERVICE_DIRS=(
|
||||
["api"]="$DSS_ROOT"
|
||||
["admin-ui"]="$DSS_ROOT/admin-ui"
|
||||
["storybook"]="$DSS_ROOT/admin-ui"
|
||||
)
|
||||
|
||||
declare -A SERVICE_LOGS=(
|
||||
["api"]="/tmp/dss-api.log"
|
||||
["admin-ui"]="/tmp/dss-admin-ui.log"
|
||||
["storybook"]="/tmp/dss-storybook.log"
|
||||
)
|
||||
|
||||
# Logging
|
||||
log_info() { echo -e "${CYAN}[INFO]${NC} $1"; }
|
||||
log_ok() { echo -e "${GREEN}[OK]${NC} $1"; }
|
||||
log_warn() { echo -e "${YELLOW}[WARN]${NC} $1"; }
|
||||
log_error() { echo -e "${RED}[ERROR]${NC} $1"; }
|
||||
|
||||
# Check if port is in use
|
||||
is_port_active() {
|
||||
local port=$1
|
||||
lsof -i ":$port" &>/dev/null
|
||||
}
|
||||
|
||||
# Get PID using port
|
||||
get_port_pid() {
|
||||
local port=$1
|
||||
lsof -ti ":$port" 2>/dev/null | head -1
|
||||
}
|
||||
|
||||
# Start a single service
|
||||
start_service() {
|
||||
local service=$1
|
||||
local port=${SERVICES[$service]}
|
||||
local cmd=${SERVICE_CMDS[$service]}
|
||||
local dir=${SERVICE_DIRS[$service]}
|
||||
local log=${SERVICE_LOGS[$service]}
|
||||
|
||||
if is_port_active "$port"; then
|
||||
log_warn "$service already running on port $port"
|
||||
return 0
|
||||
fi
|
||||
|
||||
log_info "Starting $service on port $port..."
|
||||
|
||||
cd "$dir"
|
||||
|
||||
if [ "$service" = "api" ]; then
|
||||
# Activate venv for API server
|
||||
source "$DSS_ROOT/.venv/bin/activate"
|
||||
nohup $cmd > "$log" 2>&1 &
|
||||
else
|
||||
nohup $cmd > "$log" 2>&1 &
|
||||
fi
|
||||
|
||||
local pid=$!
|
||||
cd "$DSS_ROOT"
|
||||
|
||||
# Wait for service to start
|
||||
local max_wait=30
|
||||
local waited=0
|
||||
while [ $waited -lt $max_wait ]; do
|
||||
if is_port_active "$port"; then
|
||||
log_ok "$service started (PID: $(get_port_pid $port), port: $port)"
|
||||
return 0
|
||||
fi
|
||||
sleep 1
|
||||
((waited++))
|
||||
done
|
||||
|
||||
log_error "$service failed to start (check $log)"
|
||||
return 1
|
||||
}
|
||||
|
||||
# Stop a single service
|
||||
stop_service() {
|
||||
local service=$1
|
||||
local port=${SERVICES[$service]}
|
||||
|
||||
if ! is_port_active "$port"; then
|
||||
log_info "$service not running"
|
||||
return 0
|
||||
fi
|
||||
|
||||
local pid=$(get_port_pid "$port")
|
||||
log_info "Stopping $service (PID: $pid)..."
|
||||
|
||||
kill "$pid" 2>/dev/null || true
|
||||
sleep 1
|
||||
|
||||
# Force kill if still running
|
||||
if is_port_active "$port"; then
|
||||
kill -9 "$(get_port_pid $port)" 2>/dev/null || true
|
||||
fi
|
||||
|
||||
log_ok "$service stopped"
|
||||
}
|
||||
|
||||
# Show status of all services
|
||||
show_status() {
|
||||
echo ""
|
||||
echo "╔══════════════════════════════════════════════════════════════╗"
|
||||
echo "║ DSS SERVICES STATUS ║"
|
||||
echo "╚══════════════════════════════════════════════════════════════╝"
|
||||
echo ""
|
||||
printf " %-12s %-8s %-8s %-30s\n" "SERVICE" "PORT" "STATUS" "URL"
|
||||
echo " ──────────────────────────────────────────────────────────────"
|
||||
|
||||
for service in api admin-ui storybook; do
|
||||
local port=${SERVICES[$service]}
|
||||
local status="STOPPED"
|
||||
local color=$RED
|
||||
local url="-"
|
||||
|
||||
if is_port_active "$port"; then
|
||||
status="RUNNING"
|
||||
color=$GREEN
|
||||
url="http://localhost:$port"
|
||||
fi
|
||||
|
||||
printf " %-12s %-8s ${color}%-8s${NC} %-30s\n" "$service" "$port" "$status" "$url"
|
||||
done
|
||||
|
||||
echo ""
|
||||
echo " Logs:"
|
||||
for service in api admin-ui storybook; do
|
||||
echo " $service: ${SERVICE_LOGS[$service]}"
|
||||
done
|
||||
echo ""
|
||||
}
|
||||
|
||||
# Show logs for a service
|
||||
show_logs() {
|
||||
local service=$1
|
||||
local log=${SERVICE_LOGS[$service]}
|
||||
|
||||
if [ -z "$log" ]; then
|
||||
log_error "Unknown service: $service"
|
||||
return 1
|
||||
fi
|
||||
|
||||
if [ ! -f "$log" ]; then
|
||||
log_warn "No log file found: $log"
|
||||
return 0
|
||||
fi
|
||||
|
||||
echo "=== $service logs ($log) ==="
|
||||
tail -50 "$log"
|
||||
}
|
||||
|
||||
# Main command handling
|
||||
ACTION=${1:-status}
|
||||
SPECIFIC_SERVICE=""
|
||||
|
||||
# Parse arguments
|
||||
shift || true
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
--service|-s)
|
||||
SPECIFIC_SERVICE="$2"
|
||||
shift 2
|
||||
;;
|
||||
api|admin-ui|storybook)
|
||||
SPECIFIC_SERVICE="$1"
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
shift
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
case $ACTION in
|
||||
start)
|
||||
echo ""
|
||||
echo "╔══════════════════════════════════════════════════════════════╗"
|
||||
echo "║ STARTING DSS SERVICES ║"
|
||||
echo "╚══════════════════════════════════════════════════════════════╝"
|
||||
echo ""
|
||||
|
||||
if [ -n "$SPECIFIC_SERVICE" ]; then
|
||||
start_service "$SPECIFIC_SERVICE"
|
||||
else
|
||||
for service in api admin-ui storybook; do
|
||||
start_service "$service"
|
||||
done
|
||||
fi
|
||||
|
||||
echo ""
|
||||
show_status
|
||||
;;
|
||||
|
||||
stop)
|
||||
echo ""
|
||||
echo "╔══════════════════════════════════════════════════════════════╗"
|
||||
echo "║ STOPPING DSS SERVICES ║"
|
||||
echo "╚══════════════════════════════════════════════════════════════╝"
|
||||
echo ""
|
||||
|
||||
if [ -n "$SPECIFIC_SERVICE" ]; then
|
||||
stop_service "$SPECIFIC_SERVICE"
|
||||
else
|
||||
for service in api admin-ui storybook; do
|
||||
stop_service "$service"
|
||||
done
|
||||
fi
|
||||
|
||||
echo ""
|
||||
show_status
|
||||
;;
|
||||
|
||||
restart)
|
||||
echo ""
|
||||
echo "╔══════════════════════════════════════════════════════════════╗"
|
||||
echo "║ RESTARTING DSS SERVICES ║"
|
||||
echo "╚══════════════════════════════════════════════════════════════╝"
|
||||
echo ""
|
||||
|
||||
if [ -n "$SPECIFIC_SERVICE" ]; then
|
||||
stop_service "$SPECIFIC_SERVICE"
|
||||
sleep 1
|
||||
start_service "$SPECIFIC_SERVICE"
|
||||
else
|
||||
for service in api admin-ui storybook; do
|
||||
stop_service "$service"
|
||||
done
|
||||
sleep 2
|
||||
for service in api admin-ui storybook; do
|
||||
start_service "$service"
|
||||
done
|
||||
fi
|
||||
|
||||
echo ""
|
||||
show_status
|
||||
;;
|
||||
|
||||
status)
|
||||
show_status
|
||||
;;
|
||||
|
||||
logs)
|
||||
if [ -n "$SPECIFIC_SERVICE" ]; then
|
||||
show_logs "$SPECIFIC_SERVICE"
|
||||
else
|
||||
for service in api admin-ui storybook; do
|
||||
show_logs "$service"
|
||||
echo ""
|
||||
done
|
||||
fi
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "DSS Services Manager"
|
||||
echo ""
|
||||
echo "Usage: $0 <action> [--service NAME]"
|
||||
echo ""
|
||||
echo "Actions:"
|
||||
echo " start Start all services (or specific service)"
|
||||
echo " stop Stop all services (or specific service)"
|
||||
echo " status Show status of all services"
|
||||
echo " restart Restart all services (or specific service)"
|
||||
echo " logs Show service logs"
|
||||
echo ""
|
||||
echo "Services: api, admin-ui, storybook"
|
||||
echo ""
|
||||
echo "Examples:"
|
||||
echo " $0 start # Start all services"
|
||||
echo " $0 start --service api # Start only API server"
|
||||
echo " $0 stop storybook # Stop Storybook"
|
||||
echo " $0 logs admin-ui # Show admin-ui logs"
|
||||
;;
|
||||
esac
|
||||
178
scripts/dss-setup.sh
Executable file
178
scripts/dss-setup.sh
Executable file
@@ -0,0 +1,178 @@
|
||||
#!/bin/bash
|
||||
# DSS Complete Setup Script
|
||||
# Sets up MCP, initializes DSS structure, and starts services
|
||||
#
|
||||
# Usage: scripts/dss-setup.sh [--reset] [--skip-servers]
|
||||
#
|
||||
# Flow:
|
||||
# 1. Generate MCP configuration
|
||||
# 2. Install dependencies if needed
|
||||
# 3. Initialize DSS structure (dss-init.sh)
|
||||
# 4. Start development servers
|
||||
|
||||
set -e
|
||||
|
||||
DSS_ROOT="$(cd "$(dirname "$0")/.." && pwd)"
|
||||
cd "$DSS_ROOT"
|
||||
|
||||
# Parse arguments
|
||||
RESET=false
|
||||
SKIP_SERVERS=false
|
||||
for arg in "$@"; do
|
||||
case $arg in
|
||||
--reset) RESET=true ;;
|
||||
--skip-servers) SKIP_SERVERS=true ;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Colors
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
CYAN='\033[0;36m'
|
||||
NC='\033[0m'
|
||||
|
||||
log_step() { echo -e "${BLUE}[SETUP]${NC} $1"; }
|
||||
log_ok() { echo -e "${GREEN}[OK]${NC} $1"; }
|
||||
log_warn() { echo -e "${YELLOW}[WARN]${NC} $1"; }
|
||||
log_info() { echo -e "${CYAN}[INFO]${NC} $1"; }
|
||||
|
||||
echo "╔══════════════════════════════════════════════════════════════╗"
|
||||
echo "║ DSS COMPLETE SETUP ║"
|
||||
echo "╚══════════════════════════════════════════════════════════════╝"
|
||||
echo ""
|
||||
|
||||
# ============================================================================
|
||||
# STEP 1: Generate MCP Configuration
|
||||
# ============================================================================
|
||||
log_step "1. Generating MCP configuration..."
|
||||
|
||||
cat > "$DSS_ROOT/.mcp.json" << EOF
|
||||
{
|
||||
"\$schema": "https://raw.githubusercontent.com/anthropics/claude-code/main/schemas/mcp-servers.schema.json",
|
||||
"mcpServers": {
|
||||
"dss": {
|
||||
"command": "$DSS_ROOT/.venv/bin/python3",
|
||||
"args": ["$DSS_ROOT/dss-claude-plugin/servers/dss-mcp-server.py"],
|
||||
"env": {
|
||||
"PYTHONPATH": "$DSS_ROOT:$DSS_ROOT/dss-claude-plugin",
|
||||
"DSS_HOME": "$DSS_ROOT/.dss",
|
||||
"DSS_DATABASE": "$DSS_ROOT/.dss/dss.db",
|
||||
"DSS_CACHE": "$DSS_ROOT/.dss/cache",
|
||||
"DSS_BASE_PATH": "$DSS_ROOT"
|
||||
},
|
||||
"description": "Design System Server MCP - local development"
|
||||
}
|
||||
}
|
||||
}
|
||||
EOF
|
||||
log_ok "MCP config generated: .mcp.json"
|
||||
|
||||
echo ""
|
||||
|
||||
# ============================================================================
|
||||
# STEP 2: Check/Install Dependencies
|
||||
# ============================================================================
|
||||
log_step "2. Checking dependencies..."
|
||||
|
||||
# Check Python venv
|
||||
if [ ! -d "$DSS_ROOT/.venv" ]; then
|
||||
log_info "Creating Python virtual environment..."
|
||||
python3 -m venv "$DSS_ROOT/.venv"
|
||||
fi
|
||||
|
||||
# Activate venv and check packages
|
||||
source "$DSS_ROOT/.venv/bin/activate"
|
||||
if ! python3 -c "import mcp" 2>/dev/null; then
|
||||
log_info "Installing MCP package..."
|
||||
pip install mcp 2>/dev/null || log_warn "MCP package install failed"
|
||||
fi
|
||||
log_ok "Python venv ready"
|
||||
|
||||
# Check admin-ui node_modules
|
||||
if [ ! -d "$DSS_ROOT/admin-ui/node_modules" ]; then
|
||||
log_info "Installing admin-ui dependencies..."
|
||||
cd "$DSS_ROOT/admin-ui" && npm install
|
||||
cd "$DSS_ROOT"
|
||||
fi
|
||||
log_ok "Node dependencies ready"
|
||||
|
||||
echo ""
|
||||
|
||||
# ============================================================================
|
||||
# STEP 3: Initialize DSS Structure
|
||||
# ============================================================================
|
||||
log_step "3. Running DSS initialization..."
|
||||
|
||||
if [ "$RESET" = true ]; then
|
||||
"$DSS_ROOT/scripts/dss-init.sh" --reset
|
||||
else
|
||||
"$DSS_ROOT/scripts/dss-init.sh"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
|
||||
# ============================================================================
|
||||
# STEP 4: Start Development Servers
|
||||
# ============================================================================
|
||||
if [ "$SKIP_SERVERS" = false ]; then
|
||||
log_step "4. Starting development servers..."
|
||||
|
||||
# Kill existing processes
|
||||
pkill -f "vite.*admin-ui" 2>/dev/null || true
|
||||
pkill -f "storybook.*6006" 2>/dev/null || true
|
||||
sleep 1
|
||||
|
||||
# Start admin-ui (Vite)
|
||||
cd "$DSS_ROOT/admin-ui"
|
||||
nohup npm run dev > /tmp/dss-admin-ui.log 2>&1 &
|
||||
VITE_PID=$!
|
||||
log_info "admin-ui starting (PID: $VITE_PID)..."
|
||||
|
||||
# Start Storybook
|
||||
nohup npm run storybook > /tmp/dss-storybook.log 2>&1 &
|
||||
SB_PID=$!
|
||||
log_info "Storybook starting (PID: $SB_PID)..."
|
||||
|
||||
cd "$DSS_ROOT"
|
||||
|
||||
# Wait for servers
|
||||
sleep 5
|
||||
|
||||
# Check status
|
||||
if curl -s -o /dev/null -w "" http://localhost:3456 2>/dev/null; then
|
||||
log_ok "admin-ui running on http://localhost:3456"
|
||||
else
|
||||
log_warn "admin-ui not responding yet (check /tmp/dss-admin-ui.log)"
|
||||
fi
|
||||
|
||||
if curl -s -o /dev/null -w "" http://localhost:6006 2>/dev/null; then
|
||||
log_ok "Storybook running on http://localhost:6006"
|
||||
else
|
||||
log_warn "Storybook not responding yet (check /tmp/dss-storybook.log)"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
else
|
||||
log_step "4. Skipping servers (--skip-servers)"
|
||||
echo ""
|
||||
fi
|
||||
|
||||
# ============================================================================
|
||||
# SUMMARY
|
||||
# ============================================================================
|
||||
echo "╔══════════════════════════════════════════════════════════════╗"
|
||||
echo "║ DSS SETUP COMPLETE ║"
|
||||
echo "╚══════════════════════════════════════════════════════════════╝"
|
||||
echo ""
|
||||
echo " Services:"
|
||||
echo " admin-ui: http://localhost:3456"
|
||||
echo " Storybook: http://localhost:6006"
|
||||
echo ""
|
||||
echo " Logs:"
|
||||
echo " /tmp/dss-admin-ui.log"
|
||||
echo " /tmp/dss-storybook.log"
|
||||
echo ""
|
||||
echo " Next: Restart Claude Code to load DSS MCP server"
|
||||
echo ""
|
||||
1105
scripts/figma-sync.py
Executable file
1105
scripts/figma-sync.py
Executable file
File diff suppressed because it is too large
Load Diff
905
scripts/generate-storybook.py
Executable file
905
scripts/generate-storybook.py
Executable file
@@ -0,0 +1,905 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
DSS Storybook Generator
|
||||
Generates Storybook stories from DSS tokens and component registry.
|
||||
|
||||
Hierarchy:
|
||||
1. Primitives (Foundation) - colors, spacing, typography, radius, shadows
|
||||
2. Semantic Tokens (Design Tokens) - from skin
|
||||
3. Components - from shadcn registry
|
||||
|
||||
Usage: python3 scripts/generate-storybook.py [--output PATH] [--skin SKIN]
|
||||
|
||||
Default output: admin-ui/src/stories/
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import json
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any, List
|
||||
|
||||
DSS_ROOT = Path(__file__).parent.parent
|
||||
DSS_DATA = DSS_ROOT / ".dss"
|
||||
|
||||
|
||||
def load_json(path: Path) -> dict:
|
||||
"""Load JSON file, return empty dict if not found"""
|
||||
if not path.exists():
|
||||
return {}
|
||||
with open(path) as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
def ensure_dir(path: Path):
|
||||
"""Ensure directory exists"""
|
||||
path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
def generate_color_primitives_story(primitives: dict, output_dir: Path):
|
||||
"""Generate story for color primitives (full Tailwind palette)"""
|
||||
colors = primitives.get("color", {})
|
||||
if not colors:
|
||||
return
|
||||
|
||||
# Build organized sections
|
||||
base_section = ""
|
||||
neutral_section = ""
|
||||
semantic_section = ""
|
||||
|
||||
# Base colors
|
||||
base = colors.get("base", {})
|
||||
base_swatches = []
|
||||
for name, data in base.items():
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
if isinstance(data, dict) and "value" in data:
|
||||
border = "border: 1px solid #e5e7eb;" if data["value"] in ["#ffffff", "transparent"] else ""
|
||||
base_swatches.append(f'''
|
||||
<div class="color-swatch">
|
||||
<div class="swatch" style="background: {data['value']}; {border}"></div>
|
||||
<div class="label">{name}</div>
|
||||
<div class="value">{data['value']}</div>
|
||||
</div>''')
|
||||
if base_swatches:
|
||||
base_section = f'''
|
||||
<div class="color-section">
|
||||
<h2>Base</h2>
|
||||
<div class="swatch-row">{''.join(base_swatches)}</div>
|
||||
</div>'''
|
||||
|
||||
# Neutral scales
|
||||
neutrals = colors.get("neutral", {})
|
||||
neutral_palettes = []
|
||||
for scale_name, scale in neutrals.items():
|
||||
if scale_name.startswith("_"):
|
||||
continue
|
||||
if isinstance(scale, dict):
|
||||
shades = []
|
||||
for shade, data in sorted(scale.items(), key=lambda x: int(x[0]) if x[0].isdigit() else 0):
|
||||
if isinstance(data, dict) and "value" in data:
|
||||
text_color = "#000" if int(shade) < 500 else "#fff"
|
||||
shades.append(f'''
|
||||
<div class="shade" style="background: {data['value']}; color: {text_color};">
|
||||
<span>{shade}</span>
|
||||
</div>''')
|
||||
if shades:
|
||||
neutral_palettes.append(f'''
|
||||
<div class="color-palette">
|
||||
<h3>{scale_name}</h3>
|
||||
<div class="shades">{''.join(shades)}</div>
|
||||
</div>''')
|
||||
if neutral_palettes:
|
||||
neutral_section = f'''
|
||||
<div class="color-section">
|
||||
<h2>Neutral Scales</h2>
|
||||
<div class="palette-grid">{''.join(neutral_palettes)}</div>
|
||||
</div>'''
|
||||
|
||||
# Semantic scales
|
||||
semantics = colors.get("semantic", {})
|
||||
semantic_palettes = []
|
||||
for scale_name, scale in semantics.items():
|
||||
if scale_name.startswith("_"):
|
||||
continue
|
||||
if isinstance(scale, dict):
|
||||
shades = []
|
||||
for shade, data in sorted(scale.items(), key=lambda x: int(x[0]) if x[0].isdigit() else 0):
|
||||
if isinstance(data, dict) and "value" in data:
|
||||
text_color = "#000" if int(shade) < 500 else "#fff"
|
||||
shades.append(f'''
|
||||
<div class="shade" style="background: {data['value']}; color: {text_color};">
|
||||
<span>{shade}</span>
|
||||
</div>''')
|
||||
if shades:
|
||||
semantic_palettes.append(f'''
|
||||
<div class="color-palette">
|
||||
<h3>{scale_name}</h3>
|
||||
<div class="shades">{''.join(shades)}</div>
|
||||
</div>''')
|
||||
if semantic_palettes:
|
||||
semantic_section = f'''
|
||||
<div class="color-section">
|
||||
<h2>Semantic Scales</h2>
|
||||
<div class="palette-grid">{''.join(semantic_palettes)}</div>
|
||||
</div>'''
|
||||
|
||||
story = f'''/**
|
||||
* Color Primitives - Foundation
|
||||
* Full Tailwind color palette organized by category
|
||||
* @generated {datetime.now().isoformat()}
|
||||
*/
|
||||
export default {{
|
||||
title: 'Foundation/Colors/Primitives',
|
||||
tags: ['autodocs'],
|
||||
parameters: {{
|
||||
docs: {{
|
||||
description: {{
|
||||
component: 'Core color primitives from Tailwind palette. Organized into Base, Neutral, and Semantic scales.'
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}};
|
||||
|
||||
const styles = `
|
||||
.color-container {{ font-family: system-ui, sans-serif; }}
|
||||
.color-section {{ margin-bottom: 3rem; }}
|
||||
.color-section h2 {{ font-size: 1.25rem; border-bottom: 1px solid #e5e7eb; padding-bottom: 0.5rem; margin-bottom: 1rem; }}
|
||||
.swatch-row {{ display: flex; flex-wrap: wrap; gap: 1rem; }}
|
||||
.color-swatch {{ text-align: center; }}
|
||||
.swatch {{ width: 80px; height: 80px; border-radius: 8px; margin-bottom: 0.5rem; }}
|
||||
.label {{ font-weight: 500; font-size: 0.875rem; }}
|
||||
.value {{ font-family: monospace; font-size: 0.75rem; color: #6b7280; }}
|
||||
.palette-grid {{ display: flex; flex-wrap: wrap; gap: 2rem; }}
|
||||
.color-palette {{ min-width: 140px; }}
|
||||
.color-palette h3 {{ margin: 0 0 0.5rem; text-transform: capitalize; font-size: 0.875rem; font-weight: 600; }}
|
||||
.shades {{ display: flex; flex-direction: column; border-radius: 8px; overflow: hidden; }}
|
||||
.shade {{ padding: 0.5rem 0.75rem; font-family: monospace; font-size: 0.65rem; }}
|
||||
`;
|
||||
|
||||
export const AllColors = {{
|
||||
render: () => `
|
||||
<style>${{styles}}</style>
|
||||
<div class="color-container">
|
||||
{base_section}
|
||||
{neutral_section}
|
||||
{semantic_section}
|
||||
</div>
|
||||
`
|
||||
}};
|
||||
'''
|
||||
|
||||
with open(output_dir / "ColorPrimitives.stories.js", "w") as f:
|
||||
f.write(story)
|
||||
print(f" [OK] ColorPrimitives.stories.js")
|
||||
|
||||
|
||||
def generate_spacing_story(primitives: dict, output_dir: Path):
|
||||
"""Generate story for spacing primitives"""
|
||||
spacing = primitives.get("spacing", {})
|
||||
if not spacing:
|
||||
return
|
||||
|
||||
items = []
|
||||
for name, data in sorted(spacing.items(), key=lambda x: float(x[0]) if x[0].replace('.', '').isdigit() else -1):
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
if isinstance(data, dict) and "value" in data:
|
||||
comment = data.get("_comment", "")
|
||||
items.append(f'''
|
||||
<div class="spacing-item">
|
||||
<div class="bar" style="width: {data['value']};"></div>
|
||||
<div class="info">
|
||||
<span class="name">{name}</span>
|
||||
<span class="value">{data['value']}</span>
|
||||
<span class="comment">{comment}</span>
|
||||
</div>
|
||||
</div>''')
|
||||
|
||||
story = f'''/**
|
||||
* Spacing Primitives - Foundation
|
||||
* @generated {datetime.now().isoformat()}
|
||||
*/
|
||||
export default {{
|
||||
title: 'Foundation/Spacing',
|
||||
tags: ['autodocs'],
|
||||
parameters: {{
|
||||
docs: {{
|
||||
description: {{
|
||||
component: 'Spacing scale based on 4px grid. Use for margins, padding, and gaps.'
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}};
|
||||
|
||||
const styles = `
|
||||
.spacing-grid {{ display: flex; flex-direction: column; gap: 0.25rem; max-width: 600px; }}
|
||||
.spacing-item {{ display: flex; align-items: center; gap: 1rem; }}
|
||||
.bar {{ height: 20px; background: var(--color-primary, #18181b); border-radius: 2px; min-width: 2px; }}
|
||||
.info {{ display: flex; gap: 1rem; font-family: monospace; font-size: 0.75rem; }}
|
||||
.name {{ font-weight: 600; min-width: 32px; }}
|
||||
.value {{ color: #6b7280; min-width: 80px; }}
|
||||
.comment {{ color: #9ca3af; font-size: 0.65rem; }}
|
||||
`;
|
||||
|
||||
export const SpacingScale = {{
|
||||
render: () => `
|
||||
<style>${{styles}}</style>
|
||||
<div class="spacing-grid">
|
||||
{''.join(items)}
|
||||
</div>
|
||||
`
|
||||
}};
|
||||
'''
|
||||
|
||||
with open(output_dir / "Spacing.stories.js", "w") as f:
|
||||
f.write(story)
|
||||
print(f" [OK] Spacing.stories.js")
|
||||
|
||||
|
||||
def generate_typography_story(primitives: dict, resolved: dict, output_dir: Path):
|
||||
"""Generate story for typography tokens"""
|
||||
font = primitives.get("font", {})
|
||||
typography = resolved.get("typography", {})
|
||||
|
||||
# Font families
|
||||
families = font.get("family", {})
|
||||
family_samples = []
|
||||
for name, data in families.items():
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
if isinstance(data, dict) and "value" in data:
|
||||
family_samples.append(f'''
|
||||
<div class="font-sample">
|
||||
<div class="sample-text" style="font-family: {data['value']};">
|
||||
The quick brown fox jumps over the lazy dog
|
||||
</div>
|
||||
<div class="meta">
|
||||
<span class="name">{name}</span>
|
||||
<span class="value">{data['value'][:40]}...</span>
|
||||
</div>
|
||||
</div>''')
|
||||
|
||||
# Font sizes
|
||||
sizes = font.get("size", {})
|
||||
size_samples = []
|
||||
for name, data in sorted(sizes.items(), key=lambda x: float(x[0].replace('xl', '').replace('x', '')) if x[0].replace('xl', '').replace('x', '').replace('.', '').isdigit() else 0):
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
if isinstance(data, dict) and "value" in data:
|
||||
size_samples.append(f'''
|
||||
<div class="size-sample">
|
||||
<span class="text" style="font-size: {data['value']};">Aa</span>
|
||||
<span class="name">{name}</span>
|
||||
<span class="value">{data['value']}</span>
|
||||
</div>''')
|
||||
|
||||
# Font weights
|
||||
weights = font.get("weight", {})
|
||||
weight_samples = []
|
||||
for name, data in weights.items():
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
if isinstance(data, dict) and "value" in data:
|
||||
weight_samples.append(f'''
|
||||
<div class="weight-sample">
|
||||
<span class="text" style="font-weight: {data['value']};">Aa</span>
|
||||
<span class="name">{name}</span>
|
||||
<span class="value">{data['value']}</span>
|
||||
</div>''')
|
||||
|
||||
# Typography styles from resolved tokens
|
||||
style_samples = []
|
||||
for name, props in typography.items():
|
||||
if isinstance(props, dict):
|
||||
font_family = props.get("font-family", {}).get("value", "Inter")
|
||||
font_size = props.get("font-size", {}).get("value", "16px")
|
||||
font_weight = props.get("font-weight", {}).get("value", 400)
|
||||
line_height = props.get("line-height", {}).get("value", "1.5")
|
||||
|
||||
style_samples.append(f'''
|
||||
<div class="style-sample">
|
||||
<div class="text" style="font-family: {font_family}; font-size: {font_size}; font-weight: {font_weight}; line-height: {line_height};">
|
||||
The quick brown fox
|
||||
</div>
|
||||
<div class="meta">
|
||||
<span class="name">{name}</span>
|
||||
<span class="props">{font_size} / {font_weight}</span>
|
||||
</div>
|
||||
</div>''')
|
||||
|
||||
story = f'''/**
|
||||
* Typography - Foundation
|
||||
* Font families, sizes, weights, and composed styles
|
||||
* @generated {datetime.now().isoformat()}
|
||||
*/
|
||||
export default {{
|
||||
title: 'Foundation/Typography',
|
||||
tags: ['autodocs'],
|
||||
parameters: {{
|
||||
docs: {{
|
||||
description: {{
|
||||
component: 'Typography primitives and composed text styles.'
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}};
|
||||
|
||||
const styles = `
|
||||
.typo-container {{ font-family: system-ui, sans-serif; }}
|
||||
.section {{ margin-bottom: 3rem; }}
|
||||
.section h2 {{ font-size: 1.25rem; border-bottom: 1px solid #e5e7eb; padding-bottom: 0.5rem; margin-bottom: 1rem; }}
|
||||
.font-sample {{ margin-bottom: 1.5rem; }}
|
||||
.sample-text {{ font-size: 1.5rem; margin-bottom: 0.25rem; }}
|
||||
.meta {{ font-size: 0.75rem; font-family: monospace; }}
|
||||
.meta .name {{ font-weight: 600; margin-right: 1rem; }}
|
||||
.meta .value {{ color: #6b7280; }}
|
||||
.size-grid, .weight-grid {{ display: flex; flex-wrap: wrap; gap: 1.5rem; }}
|
||||
.size-sample, .weight-sample {{ text-align: center; min-width: 60px; }}
|
||||
.size-sample .text {{ display: block; margin-bottom: 0.25rem; }}
|
||||
.weight-sample .text {{ display: block; margin-bottom: 0.25rem; font-size: 1.5rem; }}
|
||||
.name {{ font-size: 0.75rem; font-weight: 500; display: block; }}
|
||||
.value {{ font-size: 0.65rem; color: #6b7280; font-family: monospace; }}
|
||||
.props {{ color: #6b7280; font-family: monospace; }}
|
||||
.style-sample {{ margin-bottom: 1rem; border-bottom: 1px solid #f3f4f6; padding-bottom: 1rem; }}
|
||||
.style-sample .text {{ margin-bottom: 0.25rem; color: var(--color-foreground, #18181b); }}
|
||||
`;
|
||||
|
||||
export const FontFamilies = {{
|
||||
render: () => `
|
||||
<style>${{styles}}</style>
|
||||
<div class="typo-container">
|
||||
<div class="section">
|
||||
<h2>Font Families</h2>
|
||||
{''.join(family_samples)}
|
||||
</div>
|
||||
</div>
|
||||
`
|
||||
}};
|
||||
|
||||
export const FontSizes = {{
|
||||
render: () => `
|
||||
<style>${{styles}}</style>
|
||||
<div class="typo-container">
|
||||
<div class="section">
|
||||
<h2>Font Sizes</h2>
|
||||
<div class="size-grid">{''.join(size_samples)}</div>
|
||||
</div>
|
||||
</div>
|
||||
`
|
||||
}};
|
||||
|
||||
export const FontWeights = {{
|
||||
render: () => `
|
||||
<style>${{styles}}</style>
|
||||
<div class="typo-container">
|
||||
<div class="section">
|
||||
<h2>Font Weights</h2>
|
||||
<div class="weight-grid">{''.join(weight_samples)}</div>
|
||||
</div>
|
||||
</div>
|
||||
`
|
||||
}};
|
||||
|
||||
export const TextStyles = {{
|
||||
render: () => `
|
||||
<style>${{styles}}</style>
|
||||
<div class="typo-container">
|
||||
<div class="section">
|
||||
<h2>Composed Text Styles</h2>
|
||||
{''.join(style_samples)}
|
||||
</div>
|
||||
</div>
|
||||
`
|
||||
}};
|
||||
'''
|
||||
|
||||
with open(output_dir / "Typography.stories.js", "w") as f:
|
||||
f.write(story)
|
||||
print(f" [OK] Typography.stories.js")
|
||||
|
||||
|
||||
def generate_shadows_story(primitives: dict, resolved: dict, output_dir: Path):
|
||||
"""Generate story for shadow tokens"""
|
||||
shadows = primitives.get("shadow", {})
|
||||
effects = resolved.get("effect", {})
|
||||
|
||||
# Combine both sources
|
||||
all_shadows = {}
|
||||
for name, data in shadows.items():
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
if isinstance(data, dict) and "value" in data:
|
||||
all_shadows[name] = data["value"]
|
||||
|
||||
for name, data in effects.items():
|
||||
if isinstance(data, dict) and "value" in data and "shadow" in name:
|
||||
all_shadows[name] = data["value"]
|
||||
|
||||
items = []
|
||||
for name, value in all_shadows.items():
|
||||
items.append(f'''
|
||||
<div class="shadow-card">
|
||||
<div class="box" style="box-shadow: {value};"></div>
|
||||
<div class="name">{name}</div>
|
||||
<div class="value">{value[:50]}{"..." if len(value) > 50 else ""}</div>
|
||||
</div>''')
|
||||
|
||||
story = f'''/**
|
||||
* Shadows - Foundation
|
||||
* Box shadow scale
|
||||
* @generated {datetime.now().isoformat()}
|
||||
*/
|
||||
export default {{
|
||||
title: 'Foundation/Effects/Shadows',
|
||||
tags: ['autodocs'],
|
||||
parameters: {{
|
||||
docs: {{
|
||||
description: {{
|
||||
component: 'Box shadow tokens for elevation and depth.'
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}};
|
||||
|
||||
const styles = `
|
||||
.shadows-grid {{ display: flex; flex-wrap: wrap; gap: 2rem; padding: 2rem; background: #f9fafb; }}
|
||||
.shadow-card {{ text-align: center; }}
|
||||
.box {{ width: 120px; height: 80px; background: white; border-radius: 8px; margin-bottom: 0.5rem; }}
|
||||
.name {{ font-size: 0.75rem; font-weight: 500; }}
|
||||
.value {{ font-size: 0.6rem; color: #6b7280; font-family: monospace; max-width: 120px; word-wrap: break-word; }}
|
||||
`;
|
||||
|
||||
export const AllShadows = {{
|
||||
render: () => `
|
||||
<style>${{styles}}</style>
|
||||
<div class="shadows-grid">
|
||||
{''.join(items)}
|
||||
</div>
|
||||
`
|
||||
}};
|
||||
'''
|
||||
|
||||
with open(output_dir / "Shadows.stories.js", "w") as f:
|
||||
f.write(story)
|
||||
print(f" [OK] Shadows.stories.js")
|
||||
|
||||
|
||||
def generate_radius_story(primitives: dict, output_dir: Path):
|
||||
"""Generate story for border radius tokens"""
|
||||
radius = primitives.get("radius", {})
|
||||
if not radius:
|
||||
return
|
||||
|
||||
items = []
|
||||
for name, data in radius.items():
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
if isinstance(data, dict) and "value" in data:
|
||||
comment = data.get("_comment", "")
|
||||
items.append(f'''
|
||||
<div class="radius-item">
|
||||
<div class="box" style="border-radius: {data['value']};"></div>
|
||||
<div class="name">{name}</div>
|
||||
<div class="value">{data['value']}</div>
|
||||
<div class="comment">{comment}</div>
|
||||
</div>''')
|
||||
|
||||
story = f'''/**
|
||||
* Border Radius - Foundation
|
||||
* @generated {datetime.now().isoformat()}
|
||||
*/
|
||||
export default {{
|
||||
title: 'Foundation/Radius',
|
||||
tags: ['autodocs'],
|
||||
parameters: {{
|
||||
docs: {{
|
||||
description: {{
|
||||
component: 'Border radius scale for consistent rounded corners.'
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}};
|
||||
|
||||
const styles = `
|
||||
.radius-grid {{ display: flex; flex-wrap: wrap; gap: 2rem; }}
|
||||
.radius-item {{ text-align: center; }}
|
||||
.box {{ width: 80px; height: 80px; background: var(--color-primary, #18181b); margin-bottom: 0.5rem; }}
|
||||
.name {{ font-weight: 500; font-size: 0.875rem; }}
|
||||
.value {{ font-family: monospace; font-size: 0.75rem; color: #6b7280; }}
|
||||
.comment {{ font-size: 0.65rem; color: #9ca3af; }}
|
||||
`;
|
||||
|
||||
export const RadiusScale = {{
|
||||
render: () => `
|
||||
<style>${{styles}}</style>
|
||||
<div class="radius-grid">
|
||||
{''.join(items)}
|
||||
</div>
|
||||
`
|
||||
}};
|
||||
'''
|
||||
|
||||
with open(output_dir / "Radius.stories.js", "w") as f:
|
||||
f.write(story)
|
||||
print(f" [OK] Radius.stories.js")
|
||||
|
||||
|
||||
def generate_semantic_colors_story(resolved: dict, output_dir: Path):
|
||||
"""Generate story for semantic color tokens"""
|
||||
colors = resolved.get("color", {})
|
||||
if not colors:
|
||||
return
|
||||
|
||||
# Group by semantic category
|
||||
groups = {
|
||||
"Surface": [],
|
||||
"Primary": [],
|
||||
"Secondary": [],
|
||||
"Accent": [],
|
||||
"Muted": [],
|
||||
"Destructive": [],
|
||||
"Other": []
|
||||
}
|
||||
|
||||
for name, data in colors.items():
|
||||
if isinstance(data, dict) and "value" in data:
|
||||
value = data["value"]
|
||||
comment = data.get("comment", "")
|
||||
card = f'''
|
||||
<div class="token-card">
|
||||
<div class="swatch" style="background: {value};"></div>
|
||||
<div class="name">{name}</div>
|
||||
<div class="value">{value}</div>
|
||||
<div class="comment">{comment}</div>
|
||||
</div>'''
|
||||
|
||||
if any(x in name for x in ["background", "foreground", "card", "popover"]):
|
||||
groups["Surface"].append(card)
|
||||
elif "primary" in name:
|
||||
groups["Primary"].append(card)
|
||||
elif "secondary" in name:
|
||||
groups["Secondary"].append(card)
|
||||
elif "accent" in name:
|
||||
groups["Accent"].append(card)
|
||||
elif "muted" in name:
|
||||
groups["Muted"].append(card)
|
||||
elif "destructive" in name:
|
||||
groups["Destructive"].append(card)
|
||||
else:
|
||||
groups["Other"].append(card)
|
||||
|
||||
sections = []
|
||||
for group_name, cards in groups.items():
|
||||
if cards:
|
||||
sections.append(f'''
|
||||
<div class="token-group">
|
||||
<h3>{group_name}</h3>
|
||||
<div class="token-row">{''.join(cards)}</div>
|
||||
</div>''')
|
||||
|
||||
story = f'''/**
|
||||
* Semantic Colors - Design Tokens
|
||||
* Resolved color tokens for light theme
|
||||
* @generated {datetime.now().isoformat()}
|
||||
*/
|
||||
export default {{
|
||||
title: 'Tokens/Semantic Colors',
|
||||
tags: ['autodocs'],
|
||||
parameters: {{
|
||||
docs: {{
|
||||
description: {{
|
||||
component: 'Semantic color tokens mapped from primitives. Use these in components via CSS variables.'
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}};
|
||||
|
||||
const styles = `
|
||||
.semantic-colors {{ display: flex; flex-direction: column; gap: 2rem; }}
|
||||
.token-group h3 {{ margin: 0 0 1rem; font-size: 1rem; border-bottom: 1px solid #e5e7eb; padding-bottom: 0.5rem; }}
|
||||
.token-row {{ display: flex; flex-wrap: wrap; gap: 1rem; }}
|
||||
.token-card {{ text-align: center; }}
|
||||
.swatch {{ width: 80px; height: 60px; border-radius: 8px; border: 1px solid #e5e7eb; margin-bottom: 0.5rem; }}
|
||||
.name {{ font-size: 0.75rem; font-weight: 500; }}
|
||||
.value {{ font-family: monospace; font-size: 0.65rem; color: #6b7280; }}
|
||||
.comment {{ font-size: 0.6rem; color: #9ca3af; max-width: 80px; }}
|
||||
`;
|
||||
|
||||
export const LightTheme = {{
|
||||
render: () => `
|
||||
<style>${{styles}}</style>
|
||||
<div class="semantic-colors">
|
||||
{''.join(sections)}
|
||||
</div>
|
||||
`
|
||||
}};
|
||||
'''
|
||||
|
||||
with open(output_dir / "SemanticColors.stories.js", "w") as f:
|
||||
f.write(story)
|
||||
print(f" [OK] SemanticColors.stories.js")
|
||||
|
||||
|
||||
def generate_component_stories(registry: dict, output_dir: Path):
|
||||
"""Generate stories for shadcn components from registry"""
|
||||
components = registry.get("components", {})
|
||||
categories = registry.get("categories", {})
|
||||
|
||||
if not components:
|
||||
print(" [SKIP] No components in registry")
|
||||
return
|
||||
|
||||
# Generate a story file per category
|
||||
for cat_id, cat_data in categories.items():
|
||||
cat_name = cat_data.get("name", cat_id.title())
|
||||
cat_desc = cat_data.get("description", "")
|
||||
cat_components = cat_data.get("components", [])
|
||||
|
||||
component_cards = []
|
||||
for comp_id in cat_components:
|
||||
comp = components.get(comp_id, {})
|
||||
if not comp:
|
||||
continue
|
||||
|
||||
name = comp.get("name", comp_id)
|
||||
desc = comp.get("description", "")
|
||||
variants = comp.get("variants", {})
|
||||
radix = comp.get("radixPrimitive", "")
|
||||
deps = comp.get("dependencies", [])
|
||||
|
||||
variant_badges = ""
|
||||
if variants:
|
||||
for var_name, var_values in variants.items():
|
||||
badges = " ".join([f'<span class="badge">{v}</span>' for v in var_values[:4]])
|
||||
variant_badges += f'<div class="variant-row"><span class="var-name">{var_name}:</span> {badges}</div>'
|
||||
|
||||
radix_badge = f'<span class="radix-badge">{radix}</span>' if radix else ""
|
||||
deps_text = ", ".join(deps[:3]) if deps else ""
|
||||
|
||||
component_cards.append(f'''
|
||||
<div class="component-card">
|
||||
<div class="card-header">
|
||||
<h3>{name}</h3>
|
||||
{radix_badge}
|
||||
</div>
|
||||
<p class="description">{desc}</p>
|
||||
<div class="variants">{variant_badges}</div>
|
||||
{f'<div class="deps">deps: {deps_text}</div>' if deps_text else ''}
|
||||
</div>''')
|
||||
|
||||
story = f'''/**
|
||||
* {cat_name}
|
||||
* {cat_desc}
|
||||
* @generated {datetime.now().isoformat()}
|
||||
*/
|
||||
export default {{
|
||||
title: 'Components/{cat_name}',
|
||||
tags: ['autodocs'],
|
||||
parameters: {{
|
||||
docs: {{
|
||||
description: {{
|
||||
component: '{cat_desc}'
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}};
|
||||
|
||||
const styles = `
|
||||
.component-grid {{ display: grid; grid-template-columns: repeat(auto-fill, minmax(280px, 1fr)); gap: 1.5rem; }}
|
||||
.component-card {{
|
||||
border: 1px solid #e5e7eb;
|
||||
border-radius: 8px;
|
||||
padding: 1rem;
|
||||
background: white;
|
||||
}}
|
||||
.card-header {{
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
margin-bottom: 0.5rem;
|
||||
}}
|
||||
.card-header h3 {{ margin: 0; font-size: 1rem; }}
|
||||
.radix-badge {{
|
||||
font-size: 0.6rem;
|
||||
background: #dbeafe;
|
||||
color: #1d4ed8;
|
||||
padding: 0.125rem 0.375rem;
|
||||
border-radius: 4px;
|
||||
font-family: monospace;
|
||||
}}
|
||||
.description {{ font-size: 0.8rem; color: #6b7280; margin: 0 0 0.75rem; }}
|
||||
.variants {{ margin-bottom: 0.5rem; }}
|
||||
.variant-row {{ font-size: 0.7rem; margin-bottom: 0.25rem; }}
|
||||
.var-name {{ font-weight: 500; margin-right: 0.25rem; }}
|
||||
.badge {{
|
||||
display: inline-block;
|
||||
background: #f3f4f6;
|
||||
padding: 0.125rem 0.375rem;
|
||||
border-radius: 4px;
|
||||
margin-right: 0.25rem;
|
||||
font-family: monospace;
|
||||
font-size: 0.65rem;
|
||||
}}
|
||||
.deps {{ font-size: 0.65rem; color: #9ca3af; font-family: monospace; }}
|
||||
`;
|
||||
|
||||
export const Overview = {{
|
||||
name: 'Component Catalog',
|
||||
render: () => `
|
||||
<style>${{styles}}</style>
|
||||
<div class="component-grid">
|
||||
{''.join(component_cards)}
|
||||
</div>
|
||||
`
|
||||
}};
|
||||
'''
|
||||
|
||||
# Create safe filename
|
||||
filename = f"Components{cat_name.replace(' ', '')}.stories.js"
|
||||
with open(output_dir / filename, "w") as f:
|
||||
f.write(story)
|
||||
print(f" [OK] {filename} ({len(cat_components)} components)")
|
||||
|
||||
|
||||
def generate_overview_story(output_dir: Path):
|
||||
"""Generate overview/introduction story"""
|
||||
story = f'''/**
|
||||
* Design System Overview
|
||||
* @generated {datetime.now().isoformat()}
|
||||
*/
|
||||
export default {{
|
||||
title: 'Overview',
|
||||
tags: ['autodocs'],
|
||||
parameters: {{
|
||||
docs: {{
|
||||
description: {{
|
||||
component: 'DSS Design System - Token documentation and component library'
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}};
|
||||
|
||||
const styles = `
|
||||
.overview {{ max-width: 800px; font-family: system-ui, sans-serif; }}
|
||||
.overview h1 {{ font-size: 2rem; margin-bottom: 0.5rem; }}
|
||||
.overview .subtitle {{ color: #6b7280; margin-bottom: 2rem; }}
|
||||
.overview h2 {{ font-size: 1.25rem; margin-top: 2rem; border-bottom: 1px solid #e5e7eb; padding-bottom: 0.5rem; }}
|
||||
.overview ul {{ padding-left: 1.5rem; }}
|
||||
.overview li {{ margin: 0.5rem 0; }}
|
||||
.overview code {{ background: #f3f4f6; padding: 0.125rem 0.375rem; border-radius: 4px; font-size: 0.875rem; }}
|
||||
.layer {{ display: flex; align-items: center; gap: 0.5rem; padding: 0.75rem; background: #f9fafb; border-radius: 8px; margin: 0.5rem 0; }}
|
||||
.layer-num {{ width: 24px; height: 24px; background: #18181b; color: white; border-radius: 50%; display: flex; align-items: center; justify-content: center; font-size: 0.75rem; font-weight: 600; }}
|
||||
.stats {{ display: flex; gap: 1rem; margin: 1rem 0; }}
|
||||
.stat {{ background: #f9fafb; padding: 1rem; border-radius: 8px; text-align: center; }}
|
||||
.stat-value {{ font-size: 2rem; font-weight: 700; color: #18181b; }}
|
||||
.stat-label {{ font-size: 0.75rem; color: #6b7280; }}
|
||||
`;
|
||||
|
||||
export const Introduction = {{
|
||||
render: () => `
|
||||
<style>${{styles}}</style>
|
||||
<div class="overview">
|
||||
<h1>DSS Design System</h1>
|
||||
<p class="subtitle">Token-driven design system with 3-layer architecture</p>
|
||||
|
||||
<div class="stats">
|
||||
<div class="stat">
|
||||
<div class="stat-value">22</div>
|
||||
<div class="stat-label">Color Scales</div>
|
||||
</div>
|
||||
<div class="stat">
|
||||
<div class="stat-value">59</div>
|
||||
<div class="stat-label">Components</div>
|
||||
</div>
|
||||
<div class="stat">
|
||||
<div class="stat-value">6</div>
|
||||
<div class="stat-label">Categories</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<h2>Architecture</h2>
|
||||
<div class="layer">
|
||||
<span class="layer-num">1</span>
|
||||
<strong>Core Primitives</strong> - Raw Tailwind values (colors, spacing, fonts)
|
||||
</div>
|
||||
<div class="layer">
|
||||
<span class="layer-num">2</span>
|
||||
<strong>Skin</strong> - Semantic mapping (primary, secondary, etc.)
|
||||
</div>
|
||||
<div class="layer">
|
||||
<span class="layer-num">3</span>
|
||||
<strong>Theme</strong> - Brand overrides
|
||||
</div>
|
||||
|
||||
<h2>Navigation</h2>
|
||||
<ul>
|
||||
<li><strong>Foundation</strong> - Core primitives (colors, spacing, typography, radius, shadows)</li>
|
||||
<li><strong>Tokens</strong> - Semantic design tokens from skin</li>
|
||||
<li><strong>Components</strong> - 59 shadcn/ui components organized by category</li>
|
||||
</ul>
|
||||
|
||||
<h2>Component Categories</h2>
|
||||
<ul>
|
||||
<li><strong>Form</strong> - Button, Input, Select, Checkbox, etc.</li>
|
||||
<li><strong>Data Display</strong> - Table, Badge, Avatar, Chart, etc.</li>
|
||||
<li><strong>Feedback</strong> - Alert, Toast, Progress, Spinner, etc.</li>
|
||||
<li><strong>Navigation</strong> - Tabs, Breadcrumb, Sidebar, etc.</li>
|
||||
<li><strong>Overlay</strong> - Dialog, Sheet, Dropdown, Tooltip, etc.</li>
|
||||
<li><strong>Layout</strong> - Card, Separator, Scroll Area, etc.</li>
|
||||
</ul>
|
||||
|
||||
<h2>Usage</h2>
|
||||
<p>Import tokens CSS in your project:</p>
|
||||
<pre><code>import '.dss/data/_system/themes/tokens.css';</code></pre>
|
||||
<p>Use CSS variables in your styles:</p>
|
||||
<pre><code>color: var(--color-primary);
|
||||
background: var(--color-background);</code></pre>
|
||||
</div>
|
||||
`
|
||||
}};
|
||||
'''
|
||||
|
||||
with open(output_dir / "Overview.stories.js", "w") as f:
|
||||
f.write(story)
|
||||
print(f" [OK] Overview.stories.js")
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Generate Storybook stories from DSS tokens")
|
||||
parser.add_argument("--output", default="admin-ui/src/stories",
|
||||
help="Output directory for stories")
|
||||
parser.add_argument("--skin", default="shadcn", help="Skin to use")
|
||||
args = parser.parse_args()
|
||||
|
||||
print("=" * 60)
|
||||
print("DSS STORYBOOK GENERATOR")
|
||||
print("=" * 60)
|
||||
print("")
|
||||
|
||||
# Determine output directory
|
||||
output_dir = Path(args.output)
|
||||
if not output_dir.is_absolute():
|
||||
output_dir = DSS_ROOT / output_dir
|
||||
|
||||
ensure_dir(output_dir)
|
||||
print(f"[INFO] Output: {output_dir}")
|
||||
|
||||
# Load token sources
|
||||
primitives = load_json(DSS_DATA / "core" / "primitives.json")
|
||||
skin = load_json(DSS_DATA / "skins" / args.skin / "tokens.json")
|
||||
resolved = load_json(DSS_DATA / "data" / "_system" / "tokens" / "tokens.json")
|
||||
registry = load_json(DSS_DATA / "components" / "shadcn-registry.json")
|
||||
|
||||
print(f"[INFO] Core primitives: {len(primitives)} categories")
|
||||
print(f"[INFO] Skin: {args.skin}")
|
||||
print(f"[INFO] Resolved tokens: {sum(len(v) if isinstance(v, dict) else 0 for v in resolved.values())} tokens")
|
||||
print(f"[INFO] Component registry: {len(registry.get('components', {}))} components")
|
||||
print("")
|
||||
|
||||
print("[STEP] Generating Foundation stories...")
|
||||
generate_overview_story(output_dir)
|
||||
generate_color_primitives_story(primitives, output_dir)
|
||||
generate_spacing_story(primitives, output_dir)
|
||||
generate_typography_story(primitives, resolved, output_dir)
|
||||
generate_radius_story(primitives, output_dir)
|
||||
generate_shadows_story(primitives, resolved, output_dir)
|
||||
|
||||
print("")
|
||||
print("[STEP] Generating Token stories...")
|
||||
generate_semantic_colors_story(resolved, output_dir)
|
||||
|
||||
print("")
|
||||
print("[STEP] Generating Component stories...")
|
||||
generate_component_stories(registry, output_dir)
|
||||
|
||||
print("")
|
||||
story_count = len(list(output_dir.glob("*.stories.js")))
|
||||
print(f"[OK] Generated {story_count} story files")
|
||||
print(f"[OK] Run: cd admin-ui && npm run storybook")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
44
scripts/regenerate-core-hashes.sh
Executable file
44
scripts/regenerate-core-hashes.sh
Executable file
@@ -0,0 +1,44 @@
|
||||
#!/bin/bash
|
||||
# Regenerate DSS Core Structure Hashes
|
||||
# Called after successful Figma sync to update hash manifest
|
||||
#
|
||||
# Usage: scripts/regenerate-core-hashes.sh
|
||||
# This script should ONLY be called by the Figma sync pipeline
|
||||
|
||||
set -e
|
||||
|
||||
HASH_FILE=".dss/core-hashes.sha256"
|
||||
|
||||
echo "Regenerating DSS core hashes..."
|
||||
|
||||
{
|
||||
echo "# DSS Core Structure Hashes"
|
||||
echo "# Generated: $(date -Iseconds)"
|
||||
echo "# Source: Figma sync pipeline"
|
||||
echo "# DO NOT EDIT MANUALLY"
|
||||
echo ""
|
||||
echo "# Format: SHA256 filepath"
|
||||
|
||||
# Hash schema files
|
||||
for f in .dss/schema/*.json; do
|
||||
[ -f "$f" ] && sha256sum "$f"
|
||||
done
|
||||
|
||||
# Hash skin files
|
||||
for f in dss-claude-plugin/core/skins/*.json; do
|
||||
[ -f "$f" ] && sha256sum "$f"
|
||||
done
|
||||
|
||||
# Hash core tokens
|
||||
[ -f "dss/core_tokens/tokens.json" ] && sha256sum "dss/core_tokens/tokens.json"
|
||||
|
||||
# Hash _system output if exists
|
||||
if [ -d ".dss/data/_system" ]; then
|
||||
find .dss/data/_system -type f -name "*.json" -o -name "*.css" -o -name "*.scss" 2>/dev/null | while read f; do
|
||||
sha256sum "$f"
|
||||
done
|
||||
fi
|
||||
} > "$HASH_FILE"
|
||||
|
||||
echo "Hash manifest updated: $HASH_FILE"
|
||||
echo "Files hashed: $(grep -c "^[a-f0-9]" "$HASH_FILE")"
|
||||
272
scripts/resolve-tokens.py
Executable file
272
scripts/resolve-tokens.py
Executable file
@@ -0,0 +1,272 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
DSS Token Resolver - 3-Layer Cascade
|
||||
Merges tokens from Core → Skin → Theme into a single style-dictionary input.
|
||||
|
||||
Usage: python3 scripts/resolve-tokens.py [--skin SKIN] [--theme THEME] [--output PATH]
|
||||
|
||||
Default: --skin shadcn --theme default --output .dss/data/_system/tokens/tokens.json
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import json
|
||||
import re
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any
|
||||
|
||||
DSS_ROOT = Path(__file__).parent.parent
|
||||
DSS_DATA = DSS_ROOT / ".dss"
|
||||
|
||||
# Primitive path aliases - map shorthand refs to full primitive paths
|
||||
# The translation dictionary maps: {color.white} → color.base.white
|
||||
PRIMITIVE_ALIASES = {
|
||||
# Base colors
|
||||
"color.white": "color.base.white",
|
||||
"color.black": "color.base.black",
|
||||
"color.transparent": "color.base.transparent",
|
||||
}
|
||||
|
||||
# Generate aliases for neutral color scales (zinc, slate, gray, etc.)
|
||||
NEUTRAL_SCALES = ["slate", "gray", "zinc", "neutral", "stone"]
|
||||
SEMANTIC_SCALES = ["red", "orange", "amber", "yellow", "lime", "green", "emerald",
|
||||
"teal", "cyan", "sky", "blue", "indigo", "violet", "purple",
|
||||
"fuchsia", "pink", "rose"]
|
||||
SCALE_VALUES = ["50", "100", "200", "300", "400", "500", "600", "700", "800", "900", "950"]
|
||||
|
||||
for scale in NEUTRAL_SCALES:
|
||||
for val in SCALE_VALUES:
|
||||
# color.zinc.50 → color.neutral.zinc.50
|
||||
PRIMITIVE_ALIASES[f"color.{scale}.{val}"] = f"color.neutral.{scale}.{val}"
|
||||
|
||||
for scale in SEMANTIC_SCALES:
|
||||
for val in SCALE_VALUES:
|
||||
# color.red.500 → color.semantic.red.500
|
||||
PRIMITIVE_ALIASES[f"color.{scale}.{val}"] = f"color.semantic.{scale}.{val}"
|
||||
|
||||
|
||||
def load_json(path: Path) -> dict:
|
||||
"""Load JSON file, return empty dict if not found"""
|
||||
if not path.exists():
|
||||
return {}
|
||||
with open(path) as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
def deep_merge(base: dict, override: dict) -> dict:
|
||||
"""Deep merge override into base, returning new dict"""
|
||||
result = base.copy()
|
||||
for key, value in override.items():
|
||||
if key.startswith("_"):
|
||||
continue # Skip metadata keys
|
||||
if key in result and isinstance(result[key], dict) and isinstance(value, dict):
|
||||
result[key] = deep_merge(result[key], value)
|
||||
else:
|
||||
result[key] = value
|
||||
return result
|
||||
|
||||
|
||||
def resolve_references(tokens: dict, primitives: dict) -> dict:
|
||||
"""
|
||||
Resolve token references like {color.zinc.500} using primitives.
|
||||
Uses translation dictionary (PRIMITIVE_ALIASES) to map shorthand paths.
|
||||
Works recursively through the token structure.
|
||||
"""
|
||||
ref_pattern = re.compile(r'\{([^}]+)\}')
|
||||
unresolved = []
|
||||
|
||||
def get_nested(obj: dict, path: str) -> Any:
|
||||
"""Get nested value from dict using dot notation"""
|
||||
parts = path.split(".")
|
||||
current = obj
|
||||
for part in parts:
|
||||
if isinstance(current, dict) and part in current:
|
||||
current = current[part]
|
||||
else:
|
||||
return None
|
||||
# If we found a token object with 'value', return the value
|
||||
if isinstance(current, dict) and "value" in current:
|
||||
return current["value"]
|
||||
return current
|
||||
|
||||
def resolve_value(value: Any) -> Any:
|
||||
"""Resolve references in a value"""
|
||||
if not isinstance(value, str):
|
||||
return value
|
||||
|
||||
def replacer(match):
|
||||
ref_path = match.group(1)
|
||||
|
||||
# First: try direct lookup in primitives
|
||||
resolved = get_nested(primitives, ref_path)
|
||||
if resolved is not None:
|
||||
return str(resolved)
|
||||
|
||||
# Second: try using translation dictionary (alias mapping)
|
||||
aliased_path = PRIMITIVE_ALIASES.get(ref_path)
|
||||
if aliased_path:
|
||||
resolved = get_nested(primitives, aliased_path)
|
||||
if resolved is not None:
|
||||
return str(resolved)
|
||||
|
||||
# Third: try in tokens themselves (for self-references)
|
||||
resolved = get_nested(tokens, ref_path)
|
||||
if resolved is not None:
|
||||
return str(resolved)
|
||||
|
||||
# Track unresolved for debugging
|
||||
if ref_path not in unresolved:
|
||||
unresolved.append(ref_path)
|
||||
|
||||
# Return original if not found
|
||||
return match.group(0)
|
||||
|
||||
return ref_pattern.sub(replacer, value)
|
||||
|
||||
def resolve_obj(obj: Any) -> Any:
|
||||
"""Recursively resolve references in object"""
|
||||
if isinstance(obj, dict):
|
||||
result = {}
|
||||
for key, value in obj.items():
|
||||
if key.startswith("_"):
|
||||
continue # Skip metadata
|
||||
if key == "value":
|
||||
result[key] = resolve_value(value)
|
||||
else:
|
||||
result[key] = resolve_obj(value)
|
||||
return result
|
||||
elif isinstance(obj, list):
|
||||
return [resolve_obj(item) for item in obj]
|
||||
else:
|
||||
return obj
|
||||
|
||||
resolved = resolve_obj(tokens)
|
||||
|
||||
# Report unresolved references
|
||||
if unresolved:
|
||||
print(f"[WARN] {len(unresolved)} unresolved token references:")
|
||||
for ref in unresolved[:10]: # Show first 10
|
||||
alias = PRIMITIVE_ALIASES.get(ref, "no alias")
|
||||
print(f" - {{{ref}}} (alias: {alias})")
|
||||
if len(unresolved) > 10:
|
||||
print(f" ... and {len(unresolved) - 10} more")
|
||||
|
||||
return resolved
|
||||
|
||||
|
||||
def resolve_tokens(
|
||||
skin_name: str = "shadcn",
|
||||
theme_name: str = "default"
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Resolve tokens through the 3-layer cascade:
|
||||
1. Core primitives (base values)
|
||||
2. Skin tokens (semantic mappings)
|
||||
3. Theme overrides (brand customization)
|
||||
"""
|
||||
# Load each layer
|
||||
primitives = load_json(DSS_DATA / "core" / "primitives.json")
|
||||
skin = load_json(DSS_DATA / "skins" / skin_name / "tokens.json")
|
||||
theme = load_json(DSS_DATA / "themes" / f"{theme_name}.json")
|
||||
|
||||
# Report what we're loading
|
||||
print(f"[INFO] Resolving tokens:")
|
||||
print(f" Core: {len(primitives)} categories")
|
||||
print(f" Skin: {skin_name}")
|
||||
print(f" Theme: {theme_name}")
|
||||
|
||||
# Start with skin as base (it references primitives)
|
||||
merged = {}
|
||||
|
||||
# Copy skin tokens
|
||||
for category, tokens in skin.items():
|
||||
if category.startswith("_"):
|
||||
continue
|
||||
merged[category] = tokens.copy() if isinstance(tokens, dict) else tokens
|
||||
|
||||
# Override with theme tokens
|
||||
for category, tokens in theme.items():
|
||||
if category.startswith("_"):
|
||||
continue
|
||||
if category in merged:
|
||||
merged[category] = deep_merge(merged[category], tokens)
|
||||
else:
|
||||
merged[category] = tokens
|
||||
|
||||
# Resolve all references using primitives
|
||||
resolved = resolve_references(merged, primitives)
|
||||
|
||||
# Clean up internal metadata
|
||||
def clean_tokens(obj):
|
||||
if isinstance(obj, dict):
|
||||
return {
|
||||
k: clean_tokens(v)
|
||||
for k, v in obj.items()
|
||||
if not k.startswith("_")
|
||||
}
|
||||
return obj
|
||||
|
||||
resolved = clean_tokens(resolved)
|
||||
|
||||
return resolved
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Resolve DSS 3-layer token cascade")
|
||||
parser.add_argument("--skin", default="shadcn", help="Skin to use (default: shadcn)")
|
||||
parser.add_argument("--theme", default="default", help="Theme to use (default: default)")
|
||||
parser.add_argument("--output", default=".dss/data/_system/tokens/tokens.json",
|
||||
help="Output path for resolved tokens")
|
||||
parser.add_argument("--dry-run", action="store_true", help="Print tokens without saving")
|
||||
args = parser.parse_args()
|
||||
|
||||
print("=" * 60)
|
||||
print("DSS TOKEN RESOLVER - 3-Layer Cascade")
|
||||
print("=" * 60)
|
||||
print("")
|
||||
|
||||
# Resolve tokens
|
||||
tokens = resolve_tokens(args.skin, args.theme)
|
||||
|
||||
# Count tokens
|
||||
total = 0
|
||||
for category, items in tokens.items():
|
||||
if isinstance(items, dict):
|
||||
total += len(items)
|
||||
|
||||
print(f"\n[OK] Resolved {total} tokens")
|
||||
|
||||
if args.dry_run:
|
||||
print("\n[DRY RUN] Resolved tokens:")
|
||||
print(json.dumps(tokens, indent=2))
|
||||
else:
|
||||
# Save to output path
|
||||
output_path = Path(args.output)
|
||||
if not output_path.is_absolute():
|
||||
output_path = DSS_ROOT / output_path
|
||||
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with open(output_path, "w") as f:
|
||||
json.dump(tokens, f, indent=2)
|
||||
print(f"[OK] Saved to {output_path}")
|
||||
|
||||
# Also save metadata
|
||||
meta_path = output_path.parent / "resolved-meta.json"
|
||||
with open(meta_path, "w") as f:
|
||||
json.dump({
|
||||
"resolved_at": datetime.now().isoformat(),
|
||||
"skin": args.skin,
|
||||
"theme": args.theme,
|
||||
"token_count": total,
|
||||
"layers": ["core/primitives", f"skins/{args.skin}", f"themes/{args.theme}"]
|
||||
}, f, indent=2)
|
||||
|
||||
print("")
|
||||
print("[OK] Token resolution complete!")
|
||||
print(" Next: Run style-dictionary build")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,27 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Generate .mcp.json with absolute paths for current setup
|
||||
|
||||
DSS_ROOT="$(cd "$(dirname "$0")/.." && pwd)"
|
||||
|
||||
cat > "$DSS_ROOT/.mcp.json" << EOF
|
||||
{
|
||||
"\$schema": "https://raw.githubusercontent.com/anthropics/claude-code/main/schemas/mcp-servers.schema.json",
|
||||
"mcpServers": {
|
||||
"dss": {
|
||||
"command": "$DSS_ROOT/venv/bin/python3",
|
||||
"args": ["$DSS_ROOT/dss-claude-plugin/servers/dss-mcp-server.py"],
|
||||
"env": {
|
||||
"PYTHONPATH": "$DSS_ROOT:$DSS_ROOT/dss-claude-plugin",
|
||||
"DSS_HOME": "$DSS_ROOT/.dss",
|
||||
"DSS_DATABASE": "$DSS_ROOT/.dss/dss.db",
|
||||
"DSS_CACHE": "$DSS_ROOT/.dss/cache",
|
||||
"DSS_BASE_PATH": "$DSS_ROOT"
|
||||
},
|
||||
"description": "Design System Server MCP - local development"
|
||||
}
|
||||
}
|
||||
}
|
||||
EOF
|
||||
|
||||
echo "Generated .mcp.json for: $DSS_ROOT"
|
||||
echo "Restart Claude Code to load the DSS MCP server."
|
||||
278
scripts/validate-theme.py
Executable file
278
scripts/validate-theme.py
Executable file
@@ -0,0 +1,278 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
DSS Theme Validation Script
|
||||
Validates that themes only override tokens defined in the skin contract.
|
||||
|
||||
Usage: python3 scripts/validate-theme.py [--theme THEME_NAME] [--skin SKIN_NAME]
|
||||
|
||||
Defaults to validating all themes against the skin contract.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import json
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Set, Tuple
|
||||
|
||||
DSS_ROOT = Path(__file__).parent.parent
|
||||
DSS_DATA = DSS_ROOT / ".dss"
|
||||
|
||||
|
||||
def load_json(path: Path) -> dict:
|
||||
"""Load JSON file"""
|
||||
if not path.exists():
|
||||
return {}
|
||||
with open(path) as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
def get_contract_tokens(contract: dict) -> Dict[str, Set[str]]:
|
||||
"""Extract required token names from contract by category"""
|
||||
required = contract.get("required_tokens", {})
|
||||
result = {}
|
||||
for category, data in required.items():
|
||||
if isinstance(data, dict) and "required" in data:
|
||||
result[category] = set(data["required"])
|
||||
return result
|
||||
|
||||
|
||||
def get_theme_tokens(theme: dict) -> Dict[str, Set[str]]:
|
||||
"""Extract token names from theme by category"""
|
||||
result = {}
|
||||
for key, value in theme.items():
|
||||
if key.startswith("_"):
|
||||
continue
|
||||
if isinstance(value, dict):
|
||||
# Check if it's a token (has 'value' key) or a category
|
||||
if "value" in value:
|
||||
# Single token at root level
|
||||
if "root" not in result:
|
||||
result["root"] = set()
|
||||
result["root"].add(key)
|
||||
else:
|
||||
# Category with nested tokens
|
||||
tokens = set()
|
||||
for token_name, token_data in value.items():
|
||||
if isinstance(token_data, dict):
|
||||
tokens.add(token_name)
|
||||
if tokens:
|
||||
result[key] = tokens
|
||||
return result
|
||||
|
||||
|
||||
def get_skin_tokens(skin: dict) -> Dict[str, Set[str]]:
|
||||
"""Extract token names from skin by category"""
|
||||
return get_theme_tokens(skin) # Same structure
|
||||
|
||||
|
||||
def validate_theme(
|
||||
theme_path: Path,
|
||||
contract_path: Path,
|
||||
skin_path: Path = None
|
||||
) -> Tuple[bool, List[str], List[str]]:
|
||||
"""
|
||||
Validate a theme against the skin contract.
|
||||
|
||||
Returns: (is_valid, errors, warnings)
|
||||
"""
|
||||
errors = []
|
||||
warnings = []
|
||||
|
||||
contract = load_json(contract_path)
|
||||
theme = load_json(theme_path)
|
||||
|
||||
if not contract:
|
||||
errors.append(f"Contract not found: {contract_path}")
|
||||
return False, errors, warnings
|
||||
|
||||
if not theme:
|
||||
errors.append(f"Theme not found: {theme_path}")
|
||||
return False, errors, warnings
|
||||
|
||||
contract_tokens = get_contract_tokens(contract)
|
||||
theme_tokens = get_theme_tokens(theme)
|
||||
|
||||
# Load skin if provided for additional context
|
||||
skin_tokens = {}
|
||||
if skin_path and skin_path.exists():
|
||||
skin = load_json(skin_path)
|
||||
skin_tokens = get_skin_tokens(skin)
|
||||
|
||||
# Check each category in the theme
|
||||
for category, tokens in theme_tokens.items():
|
||||
# Handle dark mode variants
|
||||
base_category = category.replace("-dark", "")
|
||||
|
||||
if base_category not in contract_tokens:
|
||||
# Category not in contract - check if skin provides it
|
||||
if base_category in skin_tokens:
|
||||
warnings.append(
|
||||
f"Category '{category}' not in contract but exists in skin. "
|
||||
f"Consider adding to contract for stability."
|
||||
)
|
||||
else:
|
||||
errors.append(
|
||||
f"Category '{category}' is not defined in the skin contract. "
|
||||
f"Theme should only override contract-defined tokens."
|
||||
)
|
||||
continue
|
||||
|
||||
# Check each token in the category
|
||||
contract_category = contract_tokens[base_category]
|
||||
for token in tokens:
|
||||
if token not in contract_category:
|
||||
# Token not in contract
|
||||
if skin_tokens.get(base_category) and token in skin_tokens[base_category]:
|
||||
warnings.append(
|
||||
f"Token '{category}.{token}' exists in skin but not in contract. "
|
||||
f"May break on skin updates."
|
||||
)
|
||||
else:
|
||||
errors.append(
|
||||
f"Token '{category}.{token}' is not in the skin contract. "
|
||||
f"Valid tokens: {sorted(contract_category)}"
|
||||
)
|
||||
|
||||
is_valid = len(errors) == 0
|
||||
return is_valid, errors, warnings
|
||||
|
||||
|
||||
def validate_skin(
|
||||
skin_path: Path,
|
||||
contract_path: Path
|
||||
) -> Tuple[bool, List[str], List[str]]:
|
||||
"""
|
||||
Validate that a skin provides all required contract tokens.
|
||||
|
||||
Returns: (is_valid, errors, warnings)
|
||||
"""
|
||||
errors = []
|
||||
warnings = []
|
||||
|
||||
contract = load_json(contract_path)
|
||||
skin = load_json(skin_path)
|
||||
|
||||
if not contract:
|
||||
errors.append(f"Contract not found: {contract_path}")
|
||||
return False, errors, warnings
|
||||
|
||||
if not skin:
|
||||
errors.append(f"Skin not found: {skin_path}")
|
||||
return False, errors, warnings
|
||||
|
||||
contract_tokens = get_contract_tokens(contract)
|
||||
skin_tokens = get_skin_tokens(skin)
|
||||
|
||||
# Check all required categories exist
|
||||
for category, required in contract_tokens.items():
|
||||
if category not in skin_tokens:
|
||||
errors.append(
|
||||
f"Skin missing required category: '{category}'. "
|
||||
f"Required tokens: {sorted(required)}"
|
||||
)
|
||||
continue
|
||||
|
||||
# Check all required tokens exist
|
||||
skin_category = skin_tokens[category]
|
||||
missing = required - skin_category
|
||||
if missing:
|
||||
errors.append(
|
||||
f"Skin missing required tokens in '{category}': {sorted(missing)}"
|
||||
)
|
||||
|
||||
# Note extra tokens (not an error, just info)
|
||||
extra = skin_category - required
|
||||
if extra:
|
||||
warnings.append(
|
||||
f"Skin has extra tokens in '{category}' (OK): {sorted(extra)}"
|
||||
)
|
||||
|
||||
is_valid = len(errors) == 0
|
||||
return is_valid, errors, warnings
|
||||
|
||||
|
||||
def main():
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(description="Validate DSS themes and skins")
|
||||
parser.add_argument("--theme", help="Theme name to validate (default: all)")
|
||||
parser.add_argument("--skin", help="Skin name to validate (default: shadcn)")
|
||||
parser.add_argument("--validate-skin", action="store_true", help="Validate skin against contract")
|
||||
parser.add_argument("--quiet", "-q", action="store_true", help="Only show errors")
|
||||
args = parser.parse_args()
|
||||
|
||||
contract_path = DSS_DATA / "schema" / "skin-contract.json"
|
||||
|
||||
print("=" * 60)
|
||||
print("DSS THEME/SKIN VALIDATION")
|
||||
print("=" * 60)
|
||||
|
||||
all_valid = True
|
||||
|
||||
# Validate skin if requested
|
||||
if args.validate_skin or args.skin:
|
||||
skin_name = args.skin or "shadcn"
|
||||
skin_path = DSS_DATA / "skins" / skin_name / "tokens.json"
|
||||
|
||||
print(f"\n[SKIN] Validating: {skin_name}")
|
||||
print("-" * 40)
|
||||
|
||||
is_valid, errors, warnings = validate_skin(skin_path, contract_path)
|
||||
|
||||
if errors:
|
||||
all_valid = False
|
||||
for err in errors:
|
||||
print(f" [ERROR] {err}")
|
||||
|
||||
if warnings and not args.quiet:
|
||||
for warn in warnings:
|
||||
print(f" [WARN] {warn}")
|
||||
|
||||
if is_valid:
|
||||
print(f" [OK] Skin '{skin_name}' provides all contract tokens")
|
||||
|
||||
# Validate themes
|
||||
themes_dir = DSS_DATA / "themes"
|
||||
skin_path = DSS_DATA / "skins" / (args.skin or "shadcn") / "tokens.json"
|
||||
|
||||
if args.theme:
|
||||
themes = [args.theme]
|
||||
else:
|
||||
themes = [
|
||||
p.stem for p in themes_dir.glob("*.json")
|
||||
if not p.stem.startswith("_")
|
||||
] if themes_dir.exists() else []
|
||||
|
||||
for theme_name in themes:
|
||||
theme_path = themes_dir / f"{theme_name}.json"
|
||||
|
||||
print(f"\n[THEME] Validating: {theme_name}")
|
||||
print("-" * 40)
|
||||
|
||||
is_valid, errors, warnings = validate_theme(
|
||||
theme_path, contract_path, skin_path
|
||||
)
|
||||
|
||||
if errors:
|
||||
all_valid = False
|
||||
for err in errors:
|
||||
print(f" [ERROR] {err}")
|
||||
|
||||
if warnings and not args.quiet:
|
||||
for warn in warnings:
|
||||
print(f" [WARN] {warn}")
|
||||
|
||||
if is_valid:
|
||||
print(f" [OK] Theme '{theme_name}' is valid")
|
||||
|
||||
print("\n" + "=" * 60)
|
||||
if all_valid:
|
||||
print("[OK] All validations passed!")
|
||||
sys.exit(0)
|
||||
else:
|
||||
print("[FAIL] Validation errors found")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user