Initial commit: Clean DSS implementation

Migrated from design-system-swarm with fresh git history.
Old project history preserved in /home/overbits/apps/design-system-swarm

Core components:
- MCP Server (Python FastAPI with mcp 1.23.1)
- Claude Plugin (agents, commands, skills, strategies, hooks, core)
- DSS Backend (dss-mvp1 - token translation, Figma sync)
- Admin UI (Node.js/React)
- Server (Node.js/Express)
- Storybook integration (dss-mvp1/.storybook)

Self-contained configuration:
- All paths relative or use DSS_BASE_PATH=/home/overbits/dss
- PYTHONPATH configured for dss-mvp1 and dss-claude-plugin
- .env file with all configuration
- Claude plugin uses ${CLAUDE_PLUGIN_ROOT} for portability

Migration completed: $(date)
🤖 Clean migration with full functionality preserved
This commit is contained in:
Digital Production Factory
2025-12-09 18:45:48 -03:00
commit 276ed71f31
884 changed files with 373737 additions and 0 deletions

View File

@@ -0,0 +1,121 @@
#!/bin/bash
#
# DSS - Docker Discovery
# Container status, images, networks, volumes
#
set -e
PROJECT_PATH="${1:-.}"
# Check if Docker is available
if ! command -v docker &> /dev/null; then
cat <<EOF
{
"scan_type": "docker",
"available": false,
"message": "Docker not installed or not in PATH"
}
EOF
exit 0
fi
# Check if Docker daemon is running
if ! docker info &> /dev/null; then
cat <<EOF
{
"scan_type": "docker",
"available": true,
"daemon_running": false,
"message": "Docker daemon not running or no permissions"
}
EOF
exit 0
fi
# Get running containers
get_containers() {
local containers=()
while IFS= read -r line; do
if [[ -n "$line" ]]; then
local id=$(echo "$line" | cut -d'|' -f1)
local name=$(echo "$line" | cut -d'|' -f2)
local image=$(echo "$line" | cut -d'|' -f3)
local status=$(echo "$line" | cut -d'|' -f4)
local ports=$(echo "$line" | cut -d'|' -f5 | sed 's/"/\\"/g')
containers+=("{\"id\":\"$id\",\"name\":\"$name\",\"image\":\"$image\",\"status\":\"$status\",\"ports\":\"$ports\"}")
fi
done < <(docker ps --format '{{.ID}}|{{.Names}}|{{.Image}}|{{.Status}}|{{.Ports}}' 2>/dev/null)
echo "${containers[@]}"
}
# Get images
get_images() {
local images=()
while IFS= read -r line; do
if [[ -n "$line" ]]; then
local repo=$(echo "$line" | cut -d'|' -f1)
local tag=$(echo "$line" | cut -d'|' -f2)
local size=$(echo "$line" | cut -d'|' -f3)
images+=("{\"repository\":\"$repo\",\"tag\":\"$tag\",\"size\":\"$size\"}")
fi
done < <(docker images --format '{{.Repository}}|{{.Tag}}|{{.Size}}' 2>/dev/null | head -20)
echo "${images[@]}"
}
# Check for docker-compose files
get_compose_info() {
local compose_files=()
for file in "docker-compose.yml" "docker-compose.yaml" "compose.yml" "compose.yaml"; do
if [[ -f "$PROJECT_PATH/$file" ]]; then
local services=$(grep -E "^ [a-zA-Z]" "$PROJECT_PATH/$file" 2>/dev/null | sed 's/://g' | tr -d ' ' | head -10)
compose_files+=("{\"file\":\"$file\",\"services\":$(echo "$services" | jq -R -s 'split("\n") | map(select(. != ""))')}")
fi
done
echo "${compose_files[@]}"
}
# Get resource usage
get_stats() {
local stats=$(docker stats --no-stream --format '{"name":"{{.Name}}","cpu":"{{.CPUPerc}}","memory":"{{.MemUsage}}"}' 2>/dev/null | head -10 | tr '\n' ',' | sed 's/,$//')
echo "[$stats]"
}
# Build output
containers=$(get_containers)
images=$(get_images)
compose=$(get_compose_info)
stats=$(get_stats)
containers_json=$(IFS=,; echo "${containers[*]}")
images_json=$(IFS=,; echo "${images[*]}")
compose_json=$(IFS=,; echo "${compose[*]}")
cat <<EOF
{
"scan_type": "docker",
"timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
"available": true,
"daemon_running": true,
"docker_version": "$(docker --version | cut -d' ' -f3 | tr -d ',')",
"containers": {
"running": $(docker ps -q 2>/dev/null | wc -l),
"total": $(docker ps -aq 2>/dev/null | wc -l),
"list": [${containers_json:-}]
},
"images": {
"total": $(docker images -q 2>/dev/null | wc -l),
"list": [${images_json:-}]
},
"compose_files": [${compose_json:-}],
"resource_usage": ${stats:-[]}
}
EOF

View File

@@ -0,0 +1,153 @@
#!/bin/bash
#
# DSS - Environment Variable Analysis
# Checks environment configuration (names only, no values)
#
set -e
PROJECT_PATH="${1:-.}"
# Common env vars that should be set
REQUIRED_VARS=(
"NODE_ENV"
"PORT"
)
# Optional but recommended
RECOMMENDED_VARS=(
"LOG_LEVEL"
"DATABASE_URL"
"API_URL"
)
# Sensitive vars that should NOT be in code
SENSITIVE_PATTERNS=(
"API_KEY"
"SECRET"
"PASSWORD"
"TOKEN"
"PRIVATE"
"AWS_"
"FIGMA_TOKEN"
)
# Find env files
find_env_files() {
local files=()
for pattern in ".env" ".env.local" ".env.development" ".env.production" ".env.example"; do
if [[ -f "$PROJECT_PATH/$pattern" ]]; then
local var_count=$(grep -cE "^[A-Z_]+=" "$PROJECT_PATH/$pattern" 2>/dev/null || echo 0)
local has_values="false"
# Check if file has actual values (not just placeholders)
if grep -qE "^[A-Z_]+=.+" "$PROJECT_PATH/$pattern" 2>/dev/null; then
if ! grep -qE "^[A-Z_]+=(your_|<|placeholder)" "$PROJECT_PATH/$pattern" 2>/dev/null; then
has_values="true"
fi
fi
files+=("{\"file\":\"$pattern\",\"variables\":$var_count,\"has_real_values\":$has_values}")
fi
done
echo "${files[@]}"
}
# Get var names from env files (not values)
get_env_var_names() {
local vars=()
for file in "$PROJECT_PATH/.env"* 2>/dev/null; do
if [[ -f "$file" ]]; then
while IFS= read -r varname; do
if [[ -n "$varname" && ! " ${vars[*]} " =~ " $varname " ]]; then
vars+=("\"$varname\"")
fi
done < <(grep -oE "^[A-Z_][A-Z0-9_]*" "$file" 2>/dev/null)
fi
done
echo "${vars[@]}"
}
# Check for hardcoded sensitive vars in code
check_hardcoded_secrets() {
local findings=()
for pattern in "${SENSITIVE_PATTERNS[@]}"; do
local found=$(grep -rEl "${pattern}.*=.*['\"][^'\"]+['\"]" "$PROJECT_PATH" \
--include="*.js" --include="*.ts" --include="*.py" \
! -path "*/node_modules/*" ! -path "*/.git/*" \
2>/dev/null | head -5)
if [[ -n "$found" ]]; then
while IFS= read -r file; do
if [[ -n "$file" ]]; then
findings+=("{\"file\":\"${file#$PROJECT_PATH/}\",\"pattern\":\"$pattern\"}")
fi
done <<< "$found"
fi
done
echo "${findings[@]}"
}
# Check current environment
check_current_env() {
local status=()
for var in "${REQUIRED_VARS[@]}"; do
if [[ -n "${!var}" ]]; then
status+=("{\"var\":\"$var\",\"status\":\"set\"}")
else
status+=("{\"var\":\"$var\",\"status\":\"missing\"}")
fi
done
for var in "${RECOMMENDED_VARS[@]}"; do
if [[ -n "${!var}" ]]; then
status+=("{\"var\":\"$var\",\"status\":\"set\",\"required\":false}")
fi
done
echo "${status[@]}"
}
# Build output
env_files=$(find_env_files)
var_names=$(get_env_var_names)
hardcoded=$(check_hardcoded_secrets)
current_env=$(check_current_env)
files_json=$(IFS=,; echo "${env_files[*]}")
names_json=$(IFS=,; echo "${var_names[*]}")
hardcoded_json=$(IFS=,; echo "${hardcoded[*]}")
current_json=$(IFS=,; echo "${current_env[*]}")
# Calculate readiness score
total_files=${#env_files[@]}
hardcoded_count=${#hardcoded[@]}
readiness="ready"
[[ $total_files -eq 0 ]] && readiness="missing_config"
[[ $hardcoded_count -gt 0 ]] && readiness="has_hardcoded_secrets"
cat <<EOF
{
"scan_type": "environment",
"timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
"project_path": "$PROJECT_PATH",
"readiness": "$readiness",
"env_files": [${files_json:-}],
"variables_defined": [${names_json:-}],
"current_environment": [${current_json:-}],
"hardcoded_secrets": [${hardcoded_json:-}],
"recommendations": [
"Use .env.example for template (no real values)",
"Add .env* to .gitignore",
"Use environment variables for all secrets",
"Consider using a secrets manager for production"
]
}
EOF

View File

@@ -0,0 +1,102 @@
#!/bin/bash
#
# DSS - Service & Port Discovery
# Lists running services, bound ports, and process relationships
#
set -e
# Get listening ports
get_listening_ports() {
local ports=()
# Use ss if available, fallback to netstat
if command -v ss &> /dev/null; then
while IFS= read -r line; do
local port=$(echo "$line" | awk '{print $5}' | grep -oE '[0-9]+$')
local process=$(echo "$line" | awk '{print $7}' | sed 's/users:(("//' | sed 's/",.*//')
if [[ -n "$port" && "$port" =~ ^[0-9]+$ ]]; then
ports+=("{\"port\":$port,\"process\":\"$process\",\"state\":\"LISTEN\"}")
fi
done < <(ss -tlnp 2>/dev/null | tail -n +2)
elif command -v netstat &> /dev/null; then
while IFS= read -r line; do
local port=$(echo "$line" | awk '{print $4}' | grep -oE '[0-9]+$')
local process=$(echo "$line" | awk '{print $7}')
if [[ -n "$port" && "$port" =~ ^[0-9]+$ ]]; then
ports+=("{\"port\":$port,\"process\":\"$process\",\"state\":\"LISTEN\"}")
fi
done < <(netstat -tlnp 2>/dev/null | grep LISTEN)
fi
echo "${ports[@]}"
}
# Check common development ports
check_dev_ports() {
local common_ports=(
"3000:Node.js/React Dev"
"3456:DSS Worker"
"5000:Flask/Python"
"5173:Vite"
"8000:Django/FastAPI"
"8080:Generic HTTP"
"8888:Jupyter"
"9000:PHP-FPM"
"27017:MongoDB"
"5432:PostgreSQL"
"3306:MySQL"
"6379:Redis"
)
local status=()
for entry in "${common_ports[@]}"; do
local port="${entry%%:*}"
local name="${entry#*:}"
if ss -tln 2>/dev/null | grep -q ":$port " || netstat -tln 2>/dev/null | grep -q ":$port "; then
status+=("{\"port\":$port,\"name\":\"$name\",\"active\":true}")
fi
done
echo "${status[@]}"
}
# Get service health for known ports
check_health() {
local results=()
# Check DSS Worker
if curl -s --connect-timeout 2 "http://localhost:3456/health" > /dev/null 2>&1; then
local health=$(curl -s "http://localhost:3456/health" 2>/dev/null)
results+=("{\"service\":\"dss-worker\",\"port\":3456,\"healthy\":true,\"response\":$health}")
fi
# Check if port 8000 responds
if curl -s --connect-timeout 2 "http://localhost:8000" > /dev/null 2>&1; then
results+=("{\"service\":\"orchestrator\",\"port\":8000,\"healthy\":true}")
fi
echo "${results[@]}"
}
# Build output
listening=$(get_listening_ports)
dev_ports=$(check_dev_ports)
health=$(check_health)
listening_json=$(IFS=,; echo "${listening[*]}")
dev_json=$(IFS=,; echo "${dev_ports[*]}")
health_json=$(IFS=,; echo "${health[*]}")
cat <<EOF
{
"scan_type": "ports",
"timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
"hostname": "$(hostname)",
"listening_ports": [${listening_json:-}],
"dev_services": [${dev_json:-}],
"health_checks": [${health_json:-}]
}
EOF

View File

@@ -0,0 +1,117 @@
#!/bin/bash
#
# DSS - Secret Scanner
# Non-destructive scan for potential exposed secrets
# Outputs JSON with risk report (no actual secret values)
#
set -e
PROJECT_PATH="${1:-.}"
# Patterns to detect (regex)
SECRET_PATTERNS=(
"password\s*[:=]\s*['\"][^'\"]+['\"]"
"api[_-]?key\s*[:=]\s*['\"][^'\"]+['\"]"
"secret[_-]?key\s*[:=]\s*['\"][^'\"]+['\"]"
"access[_-]?token\s*[:=]\s*['\"][^'\"]+['\"]"
"private[_-]?key\s*[:=]\s*['\"][^'\"]+['\"]"
"aws[_-]?access"
"AKIA[0-9A-Z]{16}"
"ghp_[a-zA-Z0-9]{36}"
"sk-[a-zA-Z0-9]{48}"
)
# Files to ignore
IGNORE_DIRS="node_modules|\.git|dist|build|__pycache__|\.next|venv"
# Initialize results
declare -a findings
scan_for_secrets() {
local pattern="$1"
local results
results=$(grep -rEil "$pattern" "$PROJECT_PATH" \
--include="*.js" --include="*.ts" --include="*.py" \
--include="*.json" --include="*.yaml" --include="*.yml" \
--include="*.env*" --include="*.config.*" \
2>/dev/null | grep -vE "$IGNORE_DIRS" | head -20 || true)
if [[ -n "$results" ]]; then
while IFS= read -r file; do
if [[ -n "$file" ]]; then
# Get line count without revealing content
local count=$(grep -cEi "$pattern" "$file" 2>/dev/null || echo 0)
findings+=("{\"file\":\"${file#$PROJECT_PATH/}\",\"pattern\":\"${pattern:0:30}...\",\"matches\":$count}")
fi
done <<< "$results"
fi
}
# Check for common secret files
check_secret_files() {
local risky_files=(
".env"
".env.local"
".env.production"
"credentials.json"
"secrets.json"
"config/secrets.yml"
".aws/credentials"
"id_rsa"
"id_ed25519"
"*.pem"
"*.key"
)
for pattern in "${risky_files[@]}"; do
local found=$(find "$PROJECT_PATH" -name "$pattern" -type f ! -path "*/$IGNORE_DIRS/*" 2>/dev/null | head -5)
if [[ -n "$found" ]]; then
while IFS= read -r file; do
if [[ -n "$file" ]]; then
# Check if file is in .gitignore
local in_gitignore="false"
if [[ -f "$PROJECT_PATH/.gitignore" ]]; then
grep -q "$(basename "$file")" "$PROJECT_PATH/.gitignore" 2>/dev/null && in_gitignore="true"
fi
findings+=("{\"file\":\"${file#$PROJECT_PATH/}\",\"type\":\"risky_file\",\"in_gitignore\":$in_gitignore}")
fi
done <<< "$found"
fi
done
}
# Run scans
for pattern in "${SECRET_PATTERNS[@]}"; do
scan_for_secrets "$pattern"
done
check_secret_files
# Calculate risk score
total_findings=${#findings[@]}
risk_score="low"
[[ $total_findings -gt 5 ]] && risk_score="medium"
[[ $total_findings -gt 15 ]] && risk_score="high"
[[ $total_findings -gt 30 ]] && risk_score="critical"
# Output JSON
joined=$(IFS=,; echo "${findings[*]}")
cat <<EOF
{
"scan_type": "secrets",
"timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
"project_path": "$PROJECT_PATH",
"risk_level": "$risk_score",
"total_findings": $total_findings,
"findings": [${joined:-}],
"recommendations": [
"Review all findings and remove hardcoded secrets",
"Use environment variables for sensitive data",
"Add secret files to .gitignore",
"Consider using a secrets manager"
]
}
EOF

330
demo/tools/discovery/discover.sh Executable file
View File

@@ -0,0 +1,330 @@
#!/bin/bash
#
# Design System Server (DSS) - Project Discovery Script
#
# Non-intrusive analysis of project structure, dependencies, and health.
# Outputs JSON for UI consumption.
#
# Usage: ./discover.sh [project_path] [--full]
#
set -e
PROJECT_PATH="${1:-.}"
FULL_SCAN="${2:-}"
OUTPUT_DIR="${PROJECT_PATH}/.dss"
TIMESTAMP=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
# Create output directory
mkdir -p "$OUTPUT_DIR"
# Colors for terminal output (only if interactive)
if [ -t 1 ]; then
GREEN='\033[0;32m'
BLUE='\033[0;34m'
YELLOW='\033[1;33m'
NC='\033[0m'
else
GREEN=''
BLUE=''
YELLOW=''
NC=''
fi
log() {
echo -e "${BLUE}[DSS]${NC} $1" >&2
}
# === Project Type Detection ===
detect_project_type() {
local types=()
[ -f "$PROJECT_PATH/package.json" ] && types+=("nodejs")
[ -f "$PROJECT_PATH/requirements.txt" ] || [ -f "$PROJECT_PATH/pyproject.toml" ] && types+=("python")
[ -f "$PROJECT_PATH/Cargo.toml" ] && types+=("rust")
[ -f "$PROJECT_PATH/go.mod" ] && types+=("go")
[ -f "$PROJECT_PATH/pom.xml" ] || [ -f "$PROJECT_PATH/build.gradle" ] && types+=("java")
[ -f "$PROJECT_PATH/Gemfile" ] && types+=("ruby")
[ -f "$PROJECT_PATH/composer.json" ] && types+=("php")
echo "${types[@]:-unknown}"
}
# === Framework Detection ===
detect_frameworks() {
local frameworks=()
if [ -f "$PROJECT_PATH/package.json" ]; then
local pkg=$(cat "$PROJECT_PATH/package.json")
echo "$pkg" | grep -q '"react"' && frameworks+=("react")
echo "$pkg" | grep -q '"vue"' && frameworks+=("vue")
echo "$pkg" | grep -q '"@angular/core"' && frameworks+=("angular")
echo "$pkg" | grep -q '"svelte"' && frameworks+=("svelte")
echo "$pkg" | grep -q '"next"' && frameworks+=("nextjs")
echo "$pkg" | grep -q '"nuxt"' && frameworks+=("nuxt")
echo "$pkg" | grep -q '"express"' && frameworks+=("express")
echo "$pkg" | grep -q '"fastify"' && frameworks+=("fastify")
echo "$pkg" | grep -q '"tailwindcss"' && frameworks+=("tailwind")
echo "$pkg" | grep -q '"@emotion"' && frameworks+=("emotion")
echo "$pkg" | grep -q '"styled-components"' && frameworks+=("styled-components")
fi
if [ -f "$PROJECT_PATH/requirements.txt" ]; then
grep -q "fastapi" "$PROJECT_PATH/requirements.txt" && frameworks+=("fastapi")
grep -q "django" "$PROJECT_PATH/requirements.txt" && frameworks+=("django")
grep -q "flask" "$PROJECT_PATH/requirements.txt" && frameworks+=("flask")
fi
echo "${frameworks[@]:-none}"
}
# === Design System Detection ===
detect_design_system() {
local ds_info='{"detected":false}'
# Check for common design system indicators
if [ -f "$PROJECT_PATH/package.json" ]; then
local pkg=$(cat "$PROJECT_PATH/package.json")
if echo "$pkg" | grep -qE '"(@chakra-ui|@mui|antd|@radix-ui|@headlessui)"'; then
ds_info='{"detected":true,"type":"library"}'
fi
fi
# Check for custom design tokens
if find "$PROJECT_PATH" -maxdepth 3 -name "tokens.css" -o -name "tokens.json" -o -name "design-tokens.*" 2>/dev/null | grep -q .; then
ds_info='{"detected":true,"type":"custom","has_tokens":true}'
fi
# Check for Figma integration
if find "$PROJECT_PATH" -maxdepth 3 -name ".figmarc" -o -name "figma.config.*" 2>/dev/null | grep -q .; then
ds_info=$(echo "$ds_info" | sed 's/}$/,"figma_connected":true}/')
fi
echo "$ds_info"
}
# === File Statistics ===
get_file_stats() {
local total_files=$(find "$PROJECT_PATH" -type f ! -path "*/node_modules/*" ! -path "*/.git/*" ! -path "*/dist/*" ! -path "*/__pycache__/*" 2>/dev/null | wc -l)
local js_files=$(find "$PROJECT_PATH" -type f \( -name "*.js" -o -name "*.jsx" -o -name "*.ts" -o -name "*.tsx" \) ! -path "*/node_modules/*" 2>/dev/null | wc -l)
local css_files=$(find "$PROJECT_PATH" -type f \( -name "*.css" -o -name "*.scss" -o -name "*.less" \) ! -path "*/node_modules/*" 2>/dev/null | wc -l)
local py_files=$(find "$PROJECT_PATH" -type f -name "*.py" ! -path "*/__pycache__/*" 2>/dev/null | wc -l)
local component_files=$(find "$PROJECT_PATH" -type f \( -name "*.jsx" -o -name "*.tsx" -o -name "*.vue" -o -name "*.svelte" \) ! -path "*/node_modules/*" 2>/dev/null | wc -l)
cat <<EOF
{
"total": $total_files,
"javascript": $js_files,
"css": $css_files,
"python": $py_files,
"components": $component_files
}
EOF
}
# === Dependency Analysis ===
analyze_dependencies() {
local deps='{"production":[],"development":[],"total":0}'
if [ -f "$PROJECT_PATH/package.json" ]; then
local prod_count=$(jq '.dependencies | length // 0' "$PROJECT_PATH/package.json" 2>/dev/null || echo 0)
local dev_count=$(jq '.devDependencies | length // 0' "$PROJECT_PATH/package.json" 2>/dev/null || echo 0)
local total=$((prod_count + dev_count))
deps="{\"production\":$prod_count,\"development\":$dev_count,\"total\":$total}"
fi
if [ -f "$PROJECT_PATH/requirements.txt" ]; then
local py_deps=$(grep -v "^#" "$PROJECT_PATH/requirements.txt" | grep -v "^$" | wc -l)
deps="{\"python\":$py_deps,\"total\":$py_deps}"
fi
echo "$deps"
}
# === Git Analysis ===
analyze_git() {
if [ ! -d "$PROJECT_PATH/.git" ]; then
echo '{"is_repo":false}'
return
fi
cd "$PROJECT_PATH"
local branch=$(git branch --show-current 2>/dev/null || echo "unknown")
local commits=$(git rev-list --count HEAD 2>/dev/null || echo 0)
local contributors=$(git log --format='%ae' | sort -u | wc -l 2>/dev/null || echo 0)
local last_commit=$(git log -1 --format='%ci' 2>/dev/null || echo "unknown")
local uncommitted=$(git status --porcelain 2>/dev/null | wc -l || echo 0)
cat <<EOF
{
"is_repo": true,
"branch": "$branch",
"commits": $commits,
"contributors": $contributors,
"last_commit": "$last_commit",
"uncommitted_changes": $uncommitted
}
EOF
}
# === Component Discovery ===
discover_components() {
local components=()
# Find component files
while IFS= read -r file; do
if [ -n "$file" ]; then
local name=$(basename "$file" | sed 's/\.[^.]*$//')
local dir=$(dirname "$file" | sed "s|^$PROJECT_PATH/||")
components+=("{\"name\":\"$name\",\"path\":\"$dir\",\"file\":\"$(basename "$file")\"}")
fi
done < <(find "$PROJECT_PATH" -type f \( -name "*.jsx" -o -name "*.tsx" -o -name "*.vue" \) ! -path "*/node_modules/*" ! -path "*/.next/*" ! -path "*/dist/*" 2>/dev/null | head -50)
# Join array
local joined=$(IFS=,; echo "${components[*]}")
echo "[$joined]"
}
# === Health Score ===
calculate_health_score() {
local score=100
local issues=()
# Check for package-lock or yarn.lock
if [ -f "$PROJECT_PATH/package.json" ]; then
if [ ! -f "$PROJECT_PATH/package-lock.json" ] && [ ! -f "$PROJECT_PATH/yarn.lock" ] && [ ! -f "$PROJECT_PATH/pnpm-lock.yaml" ]; then
score=$((score - 10))
issues+=("\"No lock file found\"")
fi
fi
# Check for .gitignore
if [ -d "$PROJECT_PATH/.git" ] && [ ! -f "$PROJECT_PATH/.gitignore" ]; then
score=$((score - 5))
issues+=("\"Missing .gitignore\"")
fi
# Check for README
if [ ! -f "$PROJECT_PATH/README.md" ] && [ ! -f "$PROJECT_PATH/README" ]; then
score=$((score - 5))
issues+=("\"Missing README\"")
fi
# Check for tests
if ! find "$PROJECT_PATH" -maxdepth 3 -type d \( -name "test" -o -name "tests" -o -name "__tests__" -o -name "spec" \) 2>/dev/null | grep -q .; then
score=$((score - 10))
issues+=("\"No test directory found\"")
fi
# Check for TypeScript
if [ -f "$PROJECT_PATH/package.json" ] && ! [ -f "$PROJECT_PATH/tsconfig.json" ]; then
if grep -q "typescript" "$PROJECT_PATH/package.json" 2>/dev/null; then
score=$((score - 5))
issues+=("\"TypeScript installed but no tsconfig.json\"")
fi
fi
local joined_issues=$(IFS=,; echo "${issues[*]}")
cat <<EOF
{
"score": $score,
"grade": "$([ $score -ge 90 ] && echo 'A' || ([ $score -ge 80 ] && echo 'B' || ([ $score -ge 70 ] && echo 'C' || ([ $score -ge 60 ] && echo 'D' || echo 'F'))))",
"issues": [$joined_issues]
}
EOF
}
# === CSS Analysis ===
analyze_css() {
local css_files=$(find "$PROJECT_PATH" -type f \( -name "*.css" -o -name "*.scss" \) ! -path "*/node_modules/*" 2>/dev/null)
local total_files=$(echo "$css_files" | grep -c . || echo 0)
local has_variables=false
local has_custom_properties=false
local preprocessor="none"
if echo "$css_files" | grep -q ".scss"; then
preprocessor="sass"
fi
if [ -n "$css_files" ]; then
for file in $css_files; do
if grep -q -- "--" "$file" 2>/dev/null; then
has_custom_properties=true
fi
if grep -q "\\\$" "$file" 2>/dev/null; then
has_variables=true
fi
done
fi
cat <<EOF
{
"files": $total_files,
"preprocessor": "$preprocessor",
"has_css_variables": $has_custom_properties,
"has_preprocessor_variables": $has_variables
}
EOF
}
# === Main Discovery ===
log "Starting project discovery..."
PROJECT_TYPES=$(detect_project_type)
FRAMEWORKS=$(detect_frameworks)
DESIGN_SYSTEM=$(detect_design_system)
FILE_STATS=$(get_file_stats)
DEPENDENCIES=$(analyze_dependencies)
GIT_INFO=$(analyze_git)
HEALTH=$(calculate_health_score)
CSS_INFO=$(analyze_css)
if [ "$FULL_SCAN" = "--full" ]; then
COMPONENTS=$(discover_components)
else
COMPONENTS="[]"
fi
# Build final JSON
cat > "$OUTPUT_DIR/discovery.json" <<EOF
{
"meta": {
"version": "1.0.0",
"timestamp": "$TIMESTAMP",
"project_path": "$PROJECT_PATH",
"full_scan": $([ "$FULL_SCAN" = "--full" ] && echo true || echo false)
},
"project": {
"types": $(echo "$PROJECT_TYPES" | jq -R 'split(" ")' 2>/dev/null || echo '["unknown"]'),
"frameworks": $(echo "$FRAMEWORKS" | jq -R 'split(" ")' 2>/dev/null || echo '[]')
},
"design_system": $DESIGN_SYSTEM,
"files": $FILE_STATS,
"dependencies": $DEPENDENCIES,
"git": $GIT_INFO,
"health": $HEALTH,
"css": $CSS_INFO,
"components": $COMPONENTS
}
EOF
log "Discovery complete: $OUTPUT_DIR/discovery.json"
# Output the JSON
cat "$OUTPUT_DIR/discovery.json"