feat: Add DSS infrastructure, remove legacy admin-ui code
Some checks failed
DSS Project Analysis / dss-context-update (push) Has been cancelled
Some checks failed
DSS Project Analysis / dss-context-update (push) Has been cancelled
- Remove legacy admin-ui/js/ vanilla JS components - Add .dss/ directory with core tokens, skins, themes - Add Storybook configuration and generated stories - Add DSS management scripts (dss-services, dss-init, dss-setup, dss-reset) - Add MCP command definitions for DSS plugin - Add Figma sync architecture and scripts - Update pre-commit hooks with documentation validation - Fix JSON trailing commas in skin files 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
@@ -4,12 +4,22 @@
|
||||
|
||||
echo "🛡️ DSS Immutability Check..."
|
||||
|
||||
# List of protected files (core principles only)
|
||||
# List of protected files (core principles and config)
|
||||
PROTECTED_FILES=(
|
||||
".knowledge/dss-principles.json"
|
||||
".knowledge/dss-architecture.json"
|
||||
".clauderc"
|
||||
"PROJECT_CONFIG.md"
|
||||
".dss/config/figma.json"
|
||||
)
|
||||
|
||||
# DSS Core Structure - ONLY modifiable by Figma sync
|
||||
# These paths require ALLOW_FIGMA_SYNC=true to modify
|
||||
DSS_CORE_PATHS=(
|
||||
".dss/data/_system/"
|
||||
".dss/schema/"
|
||||
"dss-claude-plugin/core/skins/"
|
||||
"dss/core_tokens/"
|
||||
)
|
||||
|
||||
# Check if any protected files are being modified
|
||||
@@ -43,6 +53,76 @@ if [ ${#MODIFIED_PROTECTED[@]} -gt 0 ]; then
|
||||
echo "✅ ALLOW_CORE_CHANGES=true detected. Proceeding with commit."
|
||||
fi
|
||||
|
||||
# Check DSS Core paths (Figma sync only)
|
||||
MODIFIED_DSS_CORE=()
|
||||
for path in "${DSS_CORE_PATHS[@]}"; do
|
||||
if git diff --cached --name-only | grep -q "^${path}"; then
|
||||
while IFS= read -r file; do
|
||||
MODIFIED_DSS_CORE+=("$file")
|
||||
done < <(git diff --cached --name-only | grep "^${path}")
|
||||
fi
|
||||
done
|
||||
|
||||
if [ ${#MODIFIED_DSS_CORE[@]} -gt 0 ]; then
|
||||
echo ""
|
||||
echo "🔒 DSS CORE STRUCTURE PROTECTION"
|
||||
echo " The following paths can ONLY be modified via Figma sync:"
|
||||
for file in "${MODIFIED_DSS_CORE[@]}"; do
|
||||
echo " - $file"
|
||||
done
|
||||
echo ""
|
||||
echo " Source of truth: Figma → DSS Pipeline → These files"
|
||||
echo ""
|
||||
echo " To proceed (Figma sync only): ALLOW_FIGMA_SYNC=true"
|
||||
echo ""
|
||||
|
||||
if [ "$ALLOW_FIGMA_SYNC" != "true" ]; then
|
||||
echo "❌ Commit blocked. DSS core structure is Figma-sync only."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ ALLOW_FIGMA_SYNC=true detected. Proceeding with Figma sync commit."
|
||||
|
||||
# Verify hash manifest is also being updated
|
||||
if ! git diff --cached --name-only | grep -q "^.dss/core-hashes.sha256$"; then
|
||||
echo ""
|
||||
echo "⚠️ WARNING: core-hashes.sha256 not updated!"
|
||||
echo " Figma sync should regenerate: .dss/core-hashes.sha256"
|
||||
echo " Run: scripts/regenerate-core-hashes.sh"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Hash verification for DSS core files
|
||||
HASH_FILE=".dss/core-hashes.sha256"
|
||||
if [ -f "$HASH_FILE" ] && [ ${#MODIFIED_DSS_CORE[@]} -gt 0 ]; then
|
||||
echo ""
|
||||
echo "🔐 Verifying DSS core file hashes..."
|
||||
HASH_FAILURES=()
|
||||
|
||||
for file in "${MODIFIED_DSS_CORE[@]}"; do
|
||||
if grep -q " ${file}$" "$HASH_FILE" 2>/dev/null; then
|
||||
EXPECTED=$(grep " ${file}$" "$HASH_FILE" | cut -d' ' -f1)
|
||||
# Get hash from staged version
|
||||
ACTUAL=$(git show ":${file}" 2>/dev/null | sha256sum | cut -d' ' -f1)
|
||||
if [ "$EXPECTED" != "$ACTUAL" ] && [ "$ALLOW_FIGMA_SYNC" != "true" ]; then
|
||||
HASH_FAILURES+=("$file")
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
if [ ${#HASH_FAILURES[@]} -gt 0 ]; then
|
||||
echo "❌ Hash verification failed for:"
|
||||
for file in "${HASH_FAILURES[@]}"; do
|
||||
echo " - $file"
|
||||
done
|
||||
echo ""
|
||||
echo " These files have been modified outside Figma sync pipeline."
|
||||
echo " Revert changes or run Figma sync to update properly."
|
||||
exit 1
|
||||
fi
|
||||
echo "✅ Hash verification passed."
|
||||
fi
|
||||
|
||||
echo "✅ Immutability check passed."
|
||||
echo ""
|
||||
|
||||
@@ -59,6 +139,24 @@ else
|
||||
echo "⚠️ Warning: scripts/verify-quality.sh not found, skipping quality checks"
|
||||
fi
|
||||
|
||||
# Run Python validation hook (documentation, schemas, terminology)
|
||||
echo ""
|
||||
echo "📚 Running Documentation & Schema Checks..."
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
if [ -f "${SCRIPT_DIR}/pre-commit-python" ]; then
|
||||
if ! python3 "${SCRIPT_DIR}/pre-commit-python"; then
|
||||
echo ""
|
||||
echo "❌ Validation checks failed. Please fix the errors above."
|
||||
echo "To bypass (not recommended): git commit --no-verify"
|
||||
exit 1
|
||||
fi
|
||||
elif [ -f ".git/hooks/pre-commit" ] && file ".git/hooks/pre-commit" | grep -q Python; then
|
||||
if ! python3 ".git/hooks/pre-commit"; then
|
||||
echo "❌ Python validation checks failed."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "✅ All pre-commit checks passed!"
|
||||
exit 0
|
||||
|
||||
364
.githooks/pre-commit-python
Executable file
364
.githooks/pre-commit-python
Executable file
@@ -0,0 +1,364 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
DSS Pre-Commit Hook
|
||||
Enforces DSS architectural guardrails before allowing commits
|
||||
|
||||
Validators:
|
||||
1. Immutable file protection
|
||||
2. Temp folder discipline
|
||||
3. Schema validation
|
||||
4. Terminology checks
|
||||
5. Audit logging
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import json
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
import re
|
||||
|
||||
# Configuration
|
||||
DSS_ROOT = Path("/home/overbits/dss")
|
||||
IMMUTABLE_FILES = [
|
||||
".dss/schema/*.schema.json",
|
||||
".dss-boundaries.yaml",
|
||||
"API_SPECIFICATION_IMMUTABLE.md",
|
||||
"dss-claude-plugin/.mcp.json",
|
||||
"dss-mvp1/dss/validators/schema.py",
|
||||
]
|
||||
|
||||
AUDIT_LOG = DSS_ROOT / ".dss/logs/git-hooks.jsonl"
|
||||
TEMP_DIR = DSS_ROOT / ".dss/temp"
|
||||
|
||||
class Colors:
|
||||
RED = '\033[0;31m'
|
||||
GREEN = '\033[0;32m'
|
||||
YELLOW = '\033[1;33m'
|
||||
NC = '\033[0m' # No Color
|
||||
|
||||
def log_audit(validator, status, details):
|
||||
"""Log hook events to audit trail"""
|
||||
AUDIT_LOG.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
log_entry = {
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"hook": "pre-commit",
|
||||
"validator": validator,
|
||||
"status": status,
|
||||
"details": details,
|
||||
}
|
||||
|
||||
with open(AUDIT_LOG, "a") as f:
|
||||
f.write(json.dumps(log_entry) + "\n")
|
||||
|
||||
def get_staged_files():
|
||||
"""Get list of staged files"""
|
||||
result = subprocess.run(
|
||||
["git", "diff", "--cached", "--name-only", "--diff-filter=ACM"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
cwd=DSS_ROOT
|
||||
)
|
||||
return [Path(f) for f in result.stdout.strip().split("\n") if f]
|
||||
|
||||
def check_immutable_files(staged_files):
|
||||
"""Validate that immutable files are not modified"""
|
||||
from fnmatch import fnmatch
|
||||
|
||||
violations = []
|
||||
|
||||
for file_path in staged_files:
|
||||
for pattern in IMMUTABLE_FILES:
|
||||
if fnmatch(str(file_path), pattern):
|
||||
# Only block if file exists in last commit (modification, not addition)
|
||||
result = subprocess.run(
|
||||
["git", "ls-tree", "--name-only", "HEAD", str(file_path)],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
cwd=DSS_ROOT
|
||||
)
|
||||
if result.stdout.strip(): # File exists in HEAD
|
||||
violations.append(str(file_path))
|
||||
|
||||
if violations:
|
||||
# Check for bypass via environment variable or commit message
|
||||
bypass = os.environ.get("DSS_IMMUTABLE_BYPASS") == "1"
|
||||
|
||||
if not bypass:
|
||||
# Try to get commit message from various sources
|
||||
commit_msg_file = DSS_ROOT / ".git/COMMIT_EDITMSG"
|
||||
if commit_msg_file.exists():
|
||||
commit_msg = commit_msg_file.read_text()
|
||||
if "[IMMUTABLE-UPDATE]" in commit_msg:
|
||||
bypass = True
|
||||
log_audit("immutable_files", "bypass", {
|
||||
"files": violations,
|
||||
"commit_message": commit_msg.split("\n")[0],
|
||||
"method": "commit_message"
|
||||
})
|
||||
|
||||
if bypass:
|
||||
log_audit("immutable_files", "bypass", {
|
||||
"files": violations,
|
||||
"method": "environment_variable"
|
||||
})
|
||||
|
||||
if not bypass:
|
||||
print(f"{Colors.RED}✗ IMMUTABLE FILE VIOLATION{Colors.NC}")
|
||||
print(f"\nThe following protected files cannot be modified:")
|
||||
for v in violations:
|
||||
print(f" - {v}")
|
||||
print(f"\nTo update immutable files:")
|
||||
print(f" 1. Use commit message: [IMMUTABLE-UPDATE] Reason for change")
|
||||
print(f" 2. Include justification in commit body")
|
||||
print(f"\nProtected files:")
|
||||
for pattern in IMMUTABLE_FILES:
|
||||
print(f" - {pattern}")
|
||||
|
||||
log_audit("immutable_files", "rejected", {"files": violations})
|
||||
return False
|
||||
|
||||
log_audit("immutable_files", "passed", {"files_checked": len(staged_files)})
|
||||
return True
|
||||
|
||||
def check_temp_folder(staged_files):
|
||||
"""Validate that temp files are only in .dss/temp/"""
|
||||
violations = []
|
||||
|
||||
# Patterns that indicate temp files
|
||||
temp_patterns = [
|
||||
r".*\.tmp$",
|
||||
r".*\.temp$",
|
||||
r".*~$",
|
||||
r".*\.swp$",
|
||||
r".*\.swo$",
|
||||
r".*\.backup$",
|
||||
r".*\.bak$",
|
||||
r"^temp/",
|
||||
r"^tmp/",
|
||||
r"^scratch/",
|
||||
]
|
||||
|
||||
for file_path in staged_files:
|
||||
file_str = str(file_path)
|
||||
|
||||
# Check if it matches temp patterns but is NOT in .dss/temp/
|
||||
if any(re.match(pattern, file_str) for pattern in temp_patterns):
|
||||
if not file_str.startswith(".dss/temp/"):
|
||||
violations.append(file_str)
|
||||
|
||||
if violations:
|
||||
print(f"{Colors.RED}✗ TEMP FOLDER VIOLATION{Colors.NC}")
|
||||
print(f"\nTemp files must be created in .dss/temp/ only:")
|
||||
for v in violations:
|
||||
print(f" - {v}")
|
||||
print(f"\nAll temporary files MUST go in: .dss/temp/[session-id]/")
|
||||
print(f"Use the get_temp_dir() helper function.")
|
||||
|
||||
log_audit("temp_folder", "rejected", {"files": violations})
|
||||
return False
|
||||
|
||||
log_audit("temp_folder", "passed", {"files_checked": len(staged_files)})
|
||||
return True
|
||||
|
||||
def check_schemas(staged_files):
|
||||
"""Validate JSON and YAML schemas"""
|
||||
violations = []
|
||||
|
||||
for file_path in staged_files:
|
||||
if file_path.suffix in [".json", ".yaml", ".yml"]:
|
||||
full_path = DSS_ROOT / file_path
|
||||
|
||||
try:
|
||||
if file_path.suffix == ".json":
|
||||
with open(full_path) as f:
|
||||
json.load(f)
|
||||
elif file_path.suffix in [".yaml", ".yml"]:
|
||||
try:
|
||||
import yaml
|
||||
with open(full_path) as f:
|
||||
yaml.safe_load(f)
|
||||
except ImportError:
|
||||
# YAML not available, skip validation
|
||||
continue
|
||||
except Exception as e:
|
||||
violations.append({
|
||||
"file": str(file_path),
|
||||
"error": str(e)
|
||||
})
|
||||
|
||||
if violations:
|
||||
print(f"{Colors.RED}✗ SCHEMA VALIDATION FAILED{Colors.NC}")
|
||||
print(f"\nInvalid JSON/YAML files:")
|
||||
for v in violations:
|
||||
print(f" - {v['file']}")
|
||||
print(f" Error: {v['error']}")
|
||||
|
||||
log_audit("schema_validation", "rejected", {"violations": violations})
|
||||
return False
|
||||
|
||||
log_audit("schema_validation", "passed", {"files_checked": len(staged_files)})
|
||||
return True
|
||||
|
||||
def check_documentation(staged_files):
|
||||
"""Check that new implementations have documentation"""
|
||||
violations = []
|
||||
warnings = []
|
||||
|
||||
# Track new Python files that need docstrings
|
||||
python_files = [f for f in staged_files if f.suffix == ".py"]
|
||||
|
||||
for file_path in python_files:
|
||||
full_path = DSS_ROOT / file_path
|
||||
|
||||
if not full_path.exists():
|
||||
continue
|
||||
|
||||
try:
|
||||
content = full_path.read_text()
|
||||
|
||||
# Check for classes without docstrings
|
||||
class_pattern = r'class\s+(\w+)[^:]*:\s*\n\s*(?!""")'
|
||||
missing_class_docs = re.findall(class_pattern, content)
|
||||
|
||||
# Check for public functions without docstrings (not starting with _)
|
||||
func_pattern = r'def\s+([a-zA-Z][^_][^(]*)\([^)]*\):\s*\n\s*(?!""")'
|
||||
missing_func_docs = re.findall(func_pattern, content)
|
||||
|
||||
if missing_class_docs:
|
||||
warnings.append({
|
||||
"file": str(file_path),
|
||||
"type": "class",
|
||||
"items": missing_class_docs[:5] # Limit to first 5
|
||||
})
|
||||
|
||||
if missing_func_docs:
|
||||
warnings.append({
|
||||
"file": str(file_path),
|
||||
"type": "function",
|
||||
"items": missing_func_docs[:5] # Limit to first 5
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
continue
|
||||
|
||||
# Check if significant code changes have knowledge updates
|
||||
code_extensions = [".py", ".ts", ".tsx", ".js", ".jsx"]
|
||||
code_files_changed = [f for f in staged_files if f.suffix in code_extensions]
|
||||
knowledge_files_changed = [f for f in staged_files if ".knowledge" in str(f)]
|
||||
|
||||
# If many code files changed but no knowledge updates, warn
|
||||
if len(code_files_changed) > 5 and len(knowledge_files_changed) == 0:
|
||||
warnings.append({
|
||||
"file": "general",
|
||||
"type": "knowledge",
|
||||
"items": [f"Changed {len(code_files_changed)} code files but no .knowledge/ updates"]
|
||||
})
|
||||
|
||||
if warnings:
|
||||
print(f"{Colors.YELLOW}⚠ DOCUMENTATION WARNING{Colors.NC}")
|
||||
print(f"\nMissing documentation found (non-blocking):")
|
||||
for w in warnings:
|
||||
if w["type"] == "class":
|
||||
print(f" - {w['file']}: Classes without docstrings: {', '.join(w['items'])}")
|
||||
elif w["type"] == "function":
|
||||
print(f" - {w['file']}: Functions without docstrings: {', '.join(w['items'])}")
|
||||
elif w["type"] == "knowledge":
|
||||
print(f" - {w['items'][0]}")
|
||||
print(f"\n Tip: Add docstrings to new classes/functions")
|
||||
print(f" Tip: Update .knowledge/ files when adding major features\n")
|
||||
|
||||
log_audit("documentation", "warning", {"warnings": warnings})
|
||||
else:
|
||||
log_audit("documentation", "passed", {"files_checked": len(staged_files)})
|
||||
|
||||
# Always return True (warnings only) - change to False to make blocking
|
||||
return True
|
||||
|
||||
|
||||
def check_terminology(staged_files):
|
||||
"""Check for deprecated terminology (warn only)"""
|
||||
warnings = []
|
||||
|
||||
deprecated_terms = {
|
||||
"swarm": "Design System Server / DSS",
|
||||
"organism": "component",
|
||||
}
|
||||
|
||||
for file_path in staged_files:
|
||||
# Only check text files
|
||||
if file_path.suffix in [".py", ".js", ".ts", ".md", ".txt", ".json", ".yaml", ".yml"]:
|
||||
full_path = DSS_ROOT / file_path
|
||||
|
||||
try:
|
||||
content = full_path.read_text()
|
||||
|
||||
for old_term, new_term in deprecated_terms.items():
|
||||
if re.search(rf"\b{old_term}\b", content, re.IGNORECASE):
|
||||
warnings.append({
|
||||
"file": str(file_path),
|
||||
"term": old_term,
|
||||
"suggested": new_term
|
||||
})
|
||||
except:
|
||||
# Skip binary or unreadable files
|
||||
continue
|
||||
|
||||
if warnings:
|
||||
print(f"{Colors.YELLOW}⚠ TERMINOLOGY WARNING{Colors.NC}")
|
||||
print(f"\nDeprecated terminology found (non-blocking):")
|
||||
for w in warnings:
|
||||
print(f" - {w['file']}: '{w['term']}' → use '{w['suggested']}'")
|
||||
print()
|
||||
|
||||
log_audit("terminology", "warning", {"warnings": warnings})
|
||||
else:
|
||||
log_audit("terminology", "passed", {"files_checked": len(staged_files)})
|
||||
|
||||
# Always return True (warnings only)
|
||||
return True
|
||||
|
||||
def main():
|
||||
"""Run all validators"""
|
||||
print(f"{Colors.GREEN}Running DSS pre-commit validations...{Colors.NC}\n")
|
||||
|
||||
staged_files = get_staged_files()
|
||||
|
||||
if not staged_files:
|
||||
print("No files to validate.")
|
||||
return 0
|
||||
|
||||
validators = [
|
||||
("Immutable File Protection", check_immutable_files),
|
||||
("Temp Folder Discipline", check_temp_folder),
|
||||
("Schema Validation", check_schemas),
|
||||
("Documentation Check", check_documentation),
|
||||
("Terminology Check", check_terminology),
|
||||
]
|
||||
|
||||
results = []
|
||||
for name, validator in validators:
|
||||
print(f"• {name}...", end=" ")
|
||||
result = validator(staged_files)
|
||||
results.append(result)
|
||||
|
||||
if result:
|
||||
print(f"{Colors.GREEN}✓{Colors.NC}")
|
||||
else:
|
||||
print(f"{Colors.RED}✗{Colors.NC}")
|
||||
print()
|
||||
|
||||
if all(results):
|
||||
print(f"\n{Colors.GREEN}✓ All validations passed{Colors.NC}")
|
||||
log_audit("pre_commit", "success", {"files": len(staged_files)})
|
||||
return 0
|
||||
else:
|
||||
print(f"\n{Colors.RED}✗ Pre-commit validation failed{Colors.NC}")
|
||||
print(f"Fix the issues above and try again.\n")
|
||||
log_audit("pre_commit", "failed", {"files": len(staged_files)})
|
||||
return 1
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
Reference in New Issue
Block a user