feat(ci): Convert CI/CD pipeline from GitLab to Gitea Actions

This commit is contained in:
Digital Production Factory
2025-12-10 11:11:21 -03:00
6 changed files with 422 additions and 455 deletions

View File

@@ -0,0 +1,57 @@
name: DSS Project Analysis
# This workflow runs on every push to any branch.
on: [push]
jobs:
dss-context-update:
runs-on: ubuntu-latest
steps:
# Step 1: Check out the repository code
- name: Checkout code
uses: actions/checkout@v4
with:
# We need to fetch the full history to be able to push back
fetch-depth: 0
# Step 2: Set up the environment
- name: Set up Environment
run: |
echo "Setting up Python and Node.js environment..."
# Gitea's ubuntu-latest runner may not have everything, so we install dependencies.
# This is an example; a custom Docker image would be more efficient.
sudo apt-get update && sudo apt-get install -y python3-pip
pip3 install -r requirements.txt
cd dss-mvp1 && npm install && cd ..
# Step 3: Configure Git
- name: Configure Git
run: |
git config --global user.name "DSS Agent"
git config --global user.email "dss-agent@overbits.luz.uy"
# Step 4: Run the DSS Analysis
- name: Run DSS Analysis
run: |
echo "Running DSS project analysis..."
python3 dss-mvp1/dss-cli.py analyze --project-path .
# Step 5: Commit and Push Changes if any
- name: Commit and Push Context Changes
run: |
# Check if the analysis graph file has been changed
if ! git diff --quiet .dss/analysis_graph.json; then
echo "Change detected in analysis_graph.json. Committing and pushing..."
# Add the file, commit, and push back to the same branch.
# The GITEA_TOKEN is a secret you must configure in your project settings.
git add .dss/analysis_graph.json
git commit -m "chore(dss): Update project analysis context [skip ci]"
# Use the GITEA_TOKEN for authentication
# GITEA_SERVER_URL and GITEA_REPOSITORY are default environment variables in Gitea Actions.
git push https://dss-agent:${{ secrets.GITEA_TOKEN }}@${GITEA_SERVER_URL}/${GITEA_REPOSITORY}.git HEAD:${GITEA_REF_NAME}
else
echo "No changes detected in project context. Nothing to commit."
fi

View File

@@ -1,30 +1,16 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
""" """
🧠 DSS MCP SERVER - Design System Organism Neural Interface DSS MCP Server
The MCP server is how AI agents interface with the DSS organism through MCP (Model Context Protocol) interface for DSS. Exposes design system
the Model Context Protocol. It exposes the organism's neural pathways (API) operations as tools for AI agents.
as tools that Claude and other AI agents can use to:
- Perceive the organism's current state (health checks) Tool Categories (32 tools):
- Direct the organism's sensory organs (Figma perception) - Status & Discovery (4): get_status, list_projects, create_project, get_project
- Control token ingestion and circulation (nutrient management) - Token Ingestion (7): ingest_css, ingest_scss, ingest_tailwind, ingest_json, merge, export, validate
- Analyze the organism's codebase (code intelligence) - Analysis (11): discover_project, analyze_react, find_inline_styles, find_patterns, etc.
- Generate Storybook documentation (organism presentation) - Storybook (5): scan, generate_story, generate_batch, theme, coverage
- Diagnose and fix health issues (debugging and fixing) - Utilities (5): extract_tokens, extract_components, sync_tokens, etc.
Think of MCP tools as the organism's neural interface - the way external
intelligence (AI agents) can communicate with and direct the organism.
The organism responds to 32 different MCP tools (neural commands):
- Status & Discovery (4 tools)
- Token Ingestion (7 tools)
- Analysis (11 tools)
- Storybook Generation (5 tools)
- Utilities (5 tools)
When an AI agent calls a tool, it's sending a command through the organism's
nervous system to activate specific organs and functions.
""" """
import os import os
@@ -75,38 +61,22 @@ mcp = FastMCP("dss-design-system", host=MCP_HOST, port=MCP_PORT)
@mcp.tool() @mcp.tool()
async def get_status() -> str: async def get_status() -> str:
""" """Get DSS server status and statistics."""
🏥 ORGANISM STATUS CHECK - Get complete vital signs report
Returns the DSS organism's vital signs:
- Is the organism conscious and responsive?
- What is its awareness level (statistics)?
- Can the organism perceive Figma (sensory organs working)?
- What version of the organism is this?
"""
stats = get_stats() stats = get_stats()
figma_configured = bool(FIGMA_TOKEN) figma_configured = bool(FIGMA_TOKEN)
return json.dumps({ return json.dumps({
"organism_status": "🟢 Alive and conscious" if FIGMA_TOKEN else "🟡 Alive but blind", "status": "ready",
"sensory_organs": { "figma": "connected" if figma_configured else "mock mode",
"figma_eyes": "👁️ Perceiving" if figma_configured else "👁️ Closed" "mode": "live" if figma_configured else "mock",
}, "statistics": stats,
"mode": "living in reality" if figma_configured else "living in imagination (mock mode)", "version": "0.8.0"
"organism_statistics": stats,
"version": "0.8.0",
"message": "The organism is ready to work" if figma_configured else "Configure Figma token to unlock visual perception"
}, indent=2) }, indent=2)
@mcp.tool() @mcp.tool()
async def list_projects() -> str: async def list_projects() -> str:
""" """List all registered projects."""
🏥 ORGANISM CONSCIOUSNESS - View all design system organisms
Lists all living design system organisms that have been born
(created) and are under observation.
"""
projects = Projects.list_all() projects = Projects.list_all()
return json.dumps([p.to_dict() for p in projects], indent=2) return json.dumps([p.to_dict() for p in projects], indent=2)
@@ -114,20 +84,15 @@ async def list_projects() -> str:
@mcp.tool() @mcp.tool()
async def create_project(name: str, description: str = "", figma_file_key: str = "") -> str: async def create_project(name: str, description: str = "", figma_file_key: str = "") -> str:
""" """
🧬 ORGANISM GENESIS - Birth of a new design system organism Create a new design system project.
Creates a new design system organism - a living, breathing instance
that will ingest tokens, circulate nutrients, and grow over time.
Args: Args:
name: The organism's name (identity) name: Project name
description: The organism's purpose and characteristics description: Project description
figma_file_key: Link to the organism's visual genetic blueprint (Figma) figma_file_key: Figma file key for design source
Returns: The newly born organism's vital information
""" """
project = Projects.create(name, description, figma_file_key) project = Projects.create(name, description, figma_file_key)
return json.dumps({"success": True, "organism_born": True, "project": project.to_dict()}, indent=2) return json.dumps({"success": True, "project": project.to_dict()}, indent=2)
@mcp.tool() @mcp.tool()

View File

@@ -1,23 +1,19 @@
""" """
🔌 DSS NERVOUS SYSTEM - FastAPI Server DSS API Server
The nervous system is how the DSS component communicates with the external world. REST API for design system operations.
This REST API serves as the component's neural pathways - transmitting signals
between external systems and the DSS internal organs.
Portal Endpoints (Synapses): Endpoints:
- 📊 Project management (CRUD operations) - Project management (CRUD)
- 👁️ Figma integration (sensory perception) - Figma integration (token extraction, component sync)
- 🏥 Health checks and diagnostics - Health checks
- 📝 Activity tracking (component consciousness) - Activity tracking
- ⚙️ Runtime configuration management - Configuration management
- 🔍 Service discovery (companion ecosystem) - Service discovery
Operational Modes: Modes:
- **Server Mode** 🌐 - Deployed remotely, distributes tokens to teams - Server: Remote deployment, team distribution
- **Local Mode** 🏠 - Dev companion, local service integration - Local: Development companion
Foundation: SQLite database (❤️ Heart) stores all component experiences
""" """
# Load environment variables from .env file FIRST (before any other imports) # Load environment variables from .env file FIRST (before any other imports)
@@ -66,7 +62,8 @@ from browser_logger import router as browser_log_router
from config import config from config import config
from storage.json_store import ( from storage.json_store import (
Projects, Components, SyncHistory, ActivityLog, Teams, Cache, get_stats, Projects, Components, SyncHistory, ActivityLog, Teams, Cache, get_stats,
FigmaFiles, CodeMetrics, TestResults, TokenDrift, Tokens, Styles FigmaFiles, CodeMetrics, TestResults, TokenDrift, Tokens, Styles,
Integrations, IntegrationHealth
) )
from figma.figma_tools import FigmaToolSuite from figma.figma_tools import FigmaToolSuite
@@ -173,11 +170,8 @@ ProjectManager.ensure_schema()
class ServiceDiscovery: class ServiceDiscovery:
""" """
🔌 SENSORY ORGAN PERCEPTION - Service discovery system Service discovery for companion services (Storybook, Chromatic, dev servers).
Checks known ports to discover running services.
The sensory organs perceive companion services (Storybook, Chromatic, dev servers)
running in the ecosystem. This discovery mechanism helps the component understand
what external tools are available for integration.
""" """
KNOWN_SERVICES = { KNOWN_SERVICES = {
@@ -453,10 +447,10 @@ async def health():
}, },
"version": "0.8.0", "version": "0.8.0",
"timestamp": datetime.utcnow().isoformat() + "Z", "timestamp": datetime.utcnow().isoformat() + "Z",
"organs": { "services": {
"heart": "💚 Beating normally" if db_ok else "🖤 Heart failure", "storage": "ok" if db_ok else "error",
"brain": "🧠 Thinking clearly" if mcp_ok else "🧠 Brain fog", "mcp": "ok" if mcp_ok else "error",
"sensory_eyes": "👁️ Perceiving Figma" if config.figma.is_configured else "👁️ Eyes closed" "figma": "connected" if config.figma.is_configured else "not configured"
} }
} }
@@ -821,32 +815,21 @@ async def list_components(project_id: str):
@app.post("/api/figma/extract-variables") @app.post("/api/figma/extract-variables")
async def extract_variables(request: FigmaExtractRequest, background_tasks: BackgroundTasks): async def extract_variables(request: FigmaExtractRequest, background_tasks: BackgroundTasks):
""" """Extract design tokens from Figma variables."""
🩸 NUTRIENT EXTRACTION - Extract design tokens from Figma
The sensory organs perceive Figma designs, then the digestive system
breaks them down into nutrient particles (design tokens) that the
component can absorb and circulate through its body.
"""
try: try:
result = await figma_suite.extract_variables(request.file_key, request.format) result = await figma_suite.extract_variables(request.file_key, request.format)
ActivityLog.log( ActivityLog.log(
action="figma_extract_variables", action="figma_extract_variables",
entity_type="figma", entity_type="figma",
details={"file_key": request.file_key, "format": request.format, "nutrient_count": result.get("tokens_count")} details={"file_key": request.file_key, "format": request.format, "tokens_count": result.get("tokens_count")}
) )
return result return result
except Exception as e: except Exception as e:
raise HTTPException(status_code=500, detail=f"🛡️ IMMUNE ALERT: Nutrient extraction failed: {str(e)}") raise HTTPException(status_code=500, detail=f"Token extraction failed: {str(e)}")
@app.post("/api/figma/extract-components") @app.post("/api/figma/extract-components")
async def extract_components(request: FigmaExtractRequest): async def extract_components(request: FigmaExtractRequest):
""" """Extract component definitions from Figma."""
🧬 GENETIC BLUEPRINT EXTRACTION - Extract component DNA from Figma
Components are the component's tissue structures. This extracts
the genetic blueprints (component definitions) from Figma.
"""
try: try:
result = await figma_suite.extract_components(request.file_key) result = await figma_suite.extract_components(request.file_key)
ActivityLog.log( ActivityLog.log(
@@ -860,46 +843,30 @@ async def extract_components(request: FigmaExtractRequest):
@app.post("/api/figma/extract-styles") @app.post("/api/figma/extract-styles")
async def extract_styles(request: FigmaExtractRequest): async def extract_styles(request: FigmaExtractRequest):
""" """Extract style definitions from Figma."""
🎨 PHENOTYPE EXTRACTION - Extract visual styles from Figma
The component's appearance (styles) is extracted from Figma designs.
This feeds the skin (presentation) system.
"""
try: try:
result = await figma_suite.extract_styles(request.file_key) result = await figma_suite.extract_styles(request.file_key)
return result return result
except Exception as e: except Exception as e:
raise HTTPException(status_code=500, detail=f"🛡️ IMMUNE ALERT: Style extraction failed: {str(e)}") raise HTTPException(status_code=500, detail=f"Style extraction failed: {str(e)}")
@app.post("/api/figma/sync-tokens") @app.post("/api/figma/sync-tokens")
async def sync_tokens(request: FigmaSyncRequest): async def sync_tokens(request: FigmaSyncRequest):
""" """Sync tokens from Figma to target file."""
🩸 CIRCULATORY DISTRIBUTION - Sync tokens throughout the component
The circulatory system distributes nutrients (tokens) to all parts
of the component. This endpoint broadcasts extracted tokens to the
target output paths.
"""
try: try:
result = await figma_suite.sync_tokens(request.file_key, request.target_path, request.format) result = await figma_suite.sync_tokens(request.file_key, request.target_path, request.format)
ActivityLog.log( ActivityLog.log(
action="figma_sync_tokens", action="figma_sync_tokens",
entity_type="figma", entity_type="figma",
details={"file_key": request.file_key, "target": request.target_path, "nutrients_distributed": result.get("tokens_synced")} details={"file_key": request.file_key, "target": request.target_path, "tokens_synced": result.get("tokens_synced")}
) )
return result return result
except Exception as e: except Exception as e:
raise HTTPException(status_code=500, detail=f"🛡️ IMMUNE ALERT: Token circulation failed: {str(e)}") raise HTTPException(status_code=500, detail=f"Token sync failed: {str(e)}")
@app.post("/api/figma/validate") @app.post("/api/figma/validate")
async def validate_components(request: FigmaExtractRequest): async def validate_components(request: FigmaExtractRequest):
""" """Validate component definitions against design system rules."""
🛡️ GENETIC INTEGRITY VALIDATION - Check component health
The immune system examines components to ensure they follow
design system rules. Invalid components are quarantined.
"""
try: try:
result = await figma_suite.validate_components(request.file_key) result = await figma_suite.validate_components(request.file_key)
return result return result
@@ -917,17 +884,12 @@ async def generate_code(file_key: str, component_name: str, framework: str = "we
@app.get("/api/figma/health") @app.get("/api/figma/health")
async def figma_health(): async def figma_health():
""" """Check Figma connection status."""
👁️ SENSORY ORGAN HEALTH CHECK - Figma perception status
The sensory organs (eyes) perceive visual designs from Figma.
This endpoint checks if the component's eyes can see external designs.
"""
is_live = figma_suite.mode == 'live' is_live = figma_suite.mode == 'live'
return { return {
"status": "ok" if is_live else "degraded", "status": "ok" if is_live else "degraded",
"sensory_mode": figma_suite.mode, "mode": figma_suite.mode,
"message": "👁️ Eyes perceiving Figma clearly" if is_live else "👁️ Eyes closed - running with imagination (mock mode). Configure Figma token to see reality." "message": "Figma connected" if is_live else "Running in mock mode. Configure FIGMA_TOKEN for live API."
} }
@@ -2587,38 +2549,21 @@ class IntegrationUpdate(BaseModel):
@app.get("/api/mcp/integrations") @app.get("/api/mcp/integrations")
async def list_all_integrations(): async def list_all_integrations():
"""List all available integration types and their health status.""" """List all available integration types and their health status."""
from storage.database import get_connection health_list = IntegrationHealth.list_all()
try: if not health_list:
with get_connection() as conn: # Return defaults if no health data exists
health_rows = conn.execute(
"SELECT * FROM integration_health ORDER BY integration_type"
).fetchall()
integrations = []
for row in health_rows:
integrations.append({
"integration_type": row["integration_type"],
"is_healthy": bool(row["is_healthy"]),
"failure_count": row["failure_count"],
"last_failure_at": row["last_failure_at"],
"last_success_at": row["last_success_at"],
"circuit_open_until": row["circuit_open_until"]
})
return {"integrations": integrations}
except Exception as e:
# Table may not exist yet
return { return {
"integrations": [ "integrations": [
{"integration_type": "figma", "is_healthy": True, "failure_count": 0}, {"integration_type": "figma", "is_healthy": True, "failure_count": 0},
{"integration_type": "jira", "is_healthy": True, "failure_count": 0}, {"integration_type": "jira", "is_healthy": True, "failure_count": 0},
{"integration_type": "confluence", "is_healthy": True, "failure_count": 0}, {"integration_type": "confluence", "is_healthy": True, "failure_count": 0},
{"integration_type": "sequential-thinking", "is_healthy": True, "failure_count": 0} {"integration_type": "sequential-thinking", "is_healthy": True, "failure_count": 0}
], ]
"note": "Integration tables not yet initialized"
} }
return {"integrations": health_list}
@app.get("/api/projects/{project_id}/integrations") @app.get("/api/projects/{project_id}/integrations")
async def list_project_integrations( async def list_project_integrations(
@@ -2629,34 +2574,8 @@ async def list_project_integrations(
if not Projects.get(project_id): if not Projects.get(project_id):
raise HTTPException(status_code=404, detail="Project not found") raise HTTPException(status_code=404, detail="Project not found")
from storage.database import get_connection integrations = Integrations.list(project_id, user_id)
return {"integrations": integrations}
try:
with get_connection() as conn:
if user_id:
rows = conn.execute(
"""
SELECT id, integration_type, enabled, created_at, updated_at, last_used_at
FROM project_integrations
WHERE project_id = ? AND user_id = ?
ORDER BY integration_type
""",
(project_id, user_id)
).fetchall()
else:
rows = conn.execute(
"""
SELECT id, user_id, integration_type, enabled, created_at, updated_at, last_used_at
FROM project_integrations
WHERE project_id = ?
ORDER BY integration_type
""",
(project_id,)
).fetchall()
return {"integrations": [dict(row) for row in rows]}
except Exception as e:
return {"integrations": [], "error": str(e)}
@app.post("/api/projects/{project_id}/integrations") @app.post("/api/projects/{project_id}/integrations")
@@ -2669,7 +2588,6 @@ async def create_integration(
if not Projects.get(project_id): if not Projects.get(project_id):
raise HTTPException(status_code=404, detail="Project not found") raise HTTPException(status_code=404, detail="Project not found")
from storage.database import get_connection
from dss_mcp.config import mcp_config from dss_mcp.config import mcp_config
# Encrypt config # Encrypt config
@@ -2681,31 +2599,27 @@ async def create_integration(
encrypted_config = config_json # Store unencrypted if no key encrypted_config = config_json # Store unencrypted if no key
try: try:
with get_connection() as conn: Integrations.upsert(
# Upsert project_id=project_id,
conn.execute( user_id=user_id,
""" integration_type=integration.integration_type,
INSERT INTO project_integrations (project_id, user_id, integration_type, config, enabled, updated_at) config=encrypted_config,
VALUES (?, ?, ?, ?, ?, CURRENT_TIMESTAMP) enabled=integration.enabled
ON CONFLICT(project_id, user_id, integration_type) )
DO UPDATE SET config = excluded.config, enabled = excluded.enabled, updated_at = CURRENT_TIMESTAMP
""",
(project_id, user_id, integration.integration_type, encrypted_config, integration.enabled)
)
ActivityLog.log( ActivityLog.log(
action="integration_configured", action="integration_configured",
entity_type="integration", entity_type="integration",
entity_id=integration.integration_type, entity_id=integration.integration_type,
project_id=project_id, project_id=project_id,
details={"user_id": user_id, "enabled": integration.enabled} details={"user_id": user_id, "enabled": integration.enabled}
) )
return { return {
"success": True, "success": True,
"integration_type": integration.integration_type, "integration_type": integration.integration_type,
"enabled": integration.enabled "enabled": integration.enabled
} }
except Exception as e: except Exception as e:
raise HTTPException(status_code=500, detail=str(e)) raise HTTPException(status_code=500, detail=str(e))
@@ -2721,44 +2635,35 @@ async def update_integration(
if not Projects.get(project_id): if not Projects.get(project_id):
raise HTTPException(status_code=404, detail="Project not found") raise HTTPException(status_code=404, detail="Project not found")
from storage.database import get_connection
from dss_mcp.config import mcp_config from dss_mcp.config import mcp_config
try: try:
with get_connection() as conn: encrypted_config = None
updates = [] if update.config is not None:
params = [] config_json = json.dumps(update.config)
cipher = mcp_config.get_cipher()
if cipher:
encrypted_config = cipher.encrypt(config_json.encode()).decode()
else:
encrypted_config = config_json
if update.config is not None: if update.config is None and update.enabled is None:
config_json = json.dumps(update.config) return {"success": False, "message": "No updates provided"}
cipher = mcp_config.get_cipher()
if cipher:
encrypted_config = cipher.encrypt(config_json.encode()).decode()
else:
encrypted_config = config_json
updates.append("config = ?")
params.append(encrypted_config)
if update.enabled is not None: result = Integrations.update(
updates.append("enabled = ?") project_id=project_id,
params.append(update.enabled) user_id=user_id,
integration_type=integration_type,
config=encrypted_config,
enabled=update.enabled
)
if not updates: if not result:
return {"success": False, "message": "No updates provided"} raise HTTPException(status_code=404, detail="Integration not found")
updates.append("updated_at = CURRENT_TIMESTAMP") return {"success": True, "integration_type": integration_type}
params.extend([project_id, user_id, integration_type]) except HTTPException:
raise
conn.execute(
f"""
UPDATE project_integrations
SET {', '.join(updates)}
WHERE project_id = ? AND user_id = ? AND integration_type = ?
""",
params
)
return {"success": True, "integration_type": integration_type}
except Exception as e: except Exception as e:
raise HTTPException(status_code=500, detail=str(e)) raise HTTPException(status_code=500, detail=str(e))
@@ -2773,30 +2678,21 @@ async def delete_integration(
if not Projects.get(project_id): if not Projects.get(project_id):
raise HTTPException(status_code=404, detail="Project not found") raise HTTPException(status_code=404, detail="Project not found")
from storage.database import get_connection
try: try:
with get_connection() as conn: deleted = Integrations.delete(project_id, user_id, integration_type)
result = conn.execute(
"""
DELETE FROM project_integrations
WHERE project_id = ? AND user_id = ? AND integration_type = ?
""",
(project_id, user_id, integration_type)
)
if result.rowcount == 0: if not deleted:
raise HTTPException(status_code=404, detail="Integration not found") raise HTTPException(status_code=404, detail="Integration not found")
ActivityLog.log( ActivityLog.log(
action="integration_deleted", action="integration_deleted",
entity_type="integration", entity_type="integration",
entity_id=integration_type, entity_id=integration_type,
project_id=project_id, project_id=project_id,
details={"user_id": user_id} details={"user_id": user_id}
) )
return {"success": True} return {"success": True}
except HTTPException: except HTTPException:
raise raise
except Exception as e: except Exception as e:
@@ -3077,12 +2973,39 @@ if UI_DIR.exists():
app.mount("/", StaticFiles(directory=str(UI_DIR), html=True), name="ui") app.mount("/", StaticFiles(directory=str(UI_DIR), html=True), name="ui")
def kill_port(port: int, wait: float = 0.5) -> None:
"""Kill any process using the specified port."""
import subprocess
import time
try:
# Get PIDs using the port
result = subprocess.run(
["lsof", "-ti", f":{port}"],
capture_output=True, text=True
)
pids = result.stdout.strip().split('\n')
killed = False
for pid in pids:
if pid:
subprocess.run(["kill", "-9", pid], capture_output=True)
print(f"[DSS] Killed process {pid} on port {port}")
killed = True
if killed and wait:
time.sleep(wait) # Wait for port to be released
except Exception:
pass # Port was free
if __name__ == "__main__": if __name__ == "__main__":
import uvicorn import uvicorn
port = int(os.getenv("PORT", "3456")) port = int(os.getenv("PORT", "3456"))
host = os.getenv("HOST", "0.0.0.0") host = os.getenv("HOST", "0.0.0.0")
# Kill any existing process on the port (twice to handle respawning)
kill_port(port, wait=1.0)
kill_port(port, wait=0.5)
url = f"http://{host}:{port}" url = f"http://{host}:{port}"
print(f""" print(f"""
╔═══════════════════════════════════════════════════════════════╗ ╔═══════════════════════════════════════════════════════════════╗

View File

@@ -248,48 +248,14 @@ class ProjectManager:
def _update_root_path(self, project_id: str, root_path: str) -> None: def _update_root_path(self, project_id: str, root_path: str) -> None:
""" """
Update root_path in database. Update root_path in JSON storage.
Uses raw SQL since the column may not be in the existing model.
""" """
from storage.database import get_connection self.db.update(project_id, root_path=root_path)
with get_connection() as conn:
# Ensure column exists
try:
conn.execute("""
ALTER TABLE projects ADD COLUMN root_path TEXT DEFAULT ''
""")
logger.info("Added root_path column to projects table")
except Exception:
# Column already exists
pass
# Update the value
conn.execute(
"UPDATE projects SET root_path = ? WHERE id = ?",
(root_path, project_id)
)
@staticmethod @staticmethod
def ensure_schema(): def ensure_schema():
""" """
Ensure database schema has root_path column. Legacy schema migration - no longer needed with JSON storage.
Kept for API compatibility.
Call this on startup to migrate existing databases.
""" """
from storage.database import get_connection logger.debug("Schema check: Using JSON storage, no migration needed")
with get_connection() as conn:
cursor = conn.cursor()
# Check if column exists
cursor.execute("PRAGMA table_info(projects)")
columns = [col[1] for col in cursor.fetchall()]
if 'root_path' not in columns:
cursor.execute("""
ALTER TABLE projects ADD COLUMN root_path TEXT DEFAULT ''
""")
logger.info("Migration: Added root_path column to projects table")
else:
logger.debug("Schema check: root_path column exists")

View File

@@ -1,27 +1,19 @@
""" """
DSS SENSORY ORGANS - Figma Integration Toolkit DSS Figma Integration
The DSS sensory organs allow the design system organism to perceive and Extracts design system data from Figma:
digest visual designs from Figma. This toolkit extracts genetic information - Tokens (colors, spacing, typography)
(tokens, components, styles) from the Figma sensory perception and transforms - Components (definitions, variants)
it into nutrients for the organism. - Styles (text, fill, effect styles)
Tool Suite (Sensory Perception Functions): Tools:
1. figma_extract_variables - 🩸 Perceive design tokens as blood nutrients 1. figma_extract_variables - Extract design tokens
2. figma_extract_components - 🧬 Perceive component DNA blueprints 2. figma_extract_components - Extract component definitions
3. figma_extract_styles - 🎨 Perceive visual expressions and patterns 3. figma_extract_styles - Extract style definitions
4. figma_sync_tokens - 🔄 Distribute nutrients through circulatory system 4. figma_sync_tokens - Sync tokens to codebase
5. figma_visual_diff - 👁️ Detect changes in visual expression 5. figma_visual_diff - Compare versions
6. figma_validate_components - 🧬 Verify genetic code integrity 6. figma_validate_components - Validate component structure
7. figma_generate_code - 📝 Encode genetic information into code 7. figma_generate_code - Generate component code
Architecture:
- Sensory Perception: HTTPx client with SQLite caching (organism's memory)
- Token Metabolism: Design token transformation pipeline
- Code Generation: Genetic encoding into multiple framework languages
Framework: DSS Organism Framework
See: docs/DSS_ORGANISM_GUIDE.md#sensory-organs
""" """
import json import json
@@ -66,57 +58,34 @@ class StyleDefinition:
class FigmaClient: class FigmaClient:
""" """
👁️ FIGMA SENSORY RECEPTOR - Organism's visual perception system Figma API client with caching.
The sensory receptor connects the DSS organism to Figma's visual information.
It perceives visual designs and caches genetic information (tokens, components)
in the organism's short-term memory (SQLite cache) for efficient digestion.
Features: Features:
- Real-time sensory perception (live Figma API connection) - Live API connection or mock mode
- Memory caching (SQLite persistence with TTL) - Response caching with TTL
- Rate limiting awareness (respects Figma's biological constraints) - Rate limit handling
- Mock perception mode (for organism development without external connection)
""" """
def __init__(self, token: Optional[str] = None): def __init__(self, token: Optional[str] = None):
# Establish sensory connection (use provided token or config default)
self.token = token or config.figma.token self.token = token or config.figma.token
self.base_url = "https://api.figma.com/v1" self.base_url = "https://api.figma.com/v1"
self.cache_ttl = config.figma.cache_ttl self.cache_ttl = config.figma.cache_ttl
self._use_real_api = bool(self.token) # Real sensory perception vs mock dreams self._use_real_api = bool(self.token)
def _cache_key(self, endpoint: str) -> str: def _cache_key(self, endpoint: str) -> str:
return f"figma:{hashlib.md5(endpoint.encode()).hexdigest()}" return f"figma:{hashlib.md5(endpoint.encode()).hexdigest()}"
async def _request(self, endpoint: str) -> Dict[str, Any]: async def _request(self, endpoint: str) -> Dict[str, Any]:
""" """Fetch data from Figma API with caching."""
👁️ SENSORY PERCEPTION - Fetch visual information from Figma
The sensory receptor reaches out to Figma to perceive visual designs.
If the organism is in development mode, it uses dream data (mocks).
Otherwise, it queries the external Figma organism and stores perceived
information in its own memory (SQLite cache) for quick recall.
Flow:
1. Check if sensory is in development mode (mock perception)
2. Check organism's memory cache for previous perception
3. If memory miss, perceive from external source (Figma API)
4. Store new perception in memory for future recognition
5. Log the perceptual event
"""
if not self._use_real_api: if not self._use_real_api:
# Sensory hallucinations for development (mock perception)
return self._get_mock_data(endpoint) return self._get_mock_data(endpoint)
cache_key = self._cache_key(endpoint) cache_key = self._cache_key(endpoint)
# Check organism memory first (short-term memory - SQLite)
cached = Cache.get(cache_key) cached = Cache.get(cache_key)
if cached is not None: if cached is not None:
return cached return cached
# Perceive from external source (live Figma perception)
async with httpx.AsyncClient(timeout=30.0) as client: async with httpx.AsyncClient(timeout=30.0) as client:
response = await client.get( response = await client.get(
f"{self.base_url}{endpoint}", f"{self.base_url}{endpoint}",
@@ -125,14 +94,12 @@ class FigmaClient:
response.raise_for_status() response.raise_for_status()
data = response.json() data = response.json()
# Store perception in organism memory for future recognition
Cache.set(cache_key, data, ttl=self.cache_ttl) Cache.set(cache_key, data, ttl=self.cache_ttl)
# Log the perceptual event
ActivityLog.log( ActivityLog.log(
action="figma_sensory_perception", action="figma_api_request",
entity_type="sensory_organs", entity_type="figma",
details={"endpoint": endpoint, "cached": False, "perception": "live"} details={"endpoint": endpoint, "cached": False}
) )
return data return data
@@ -219,22 +186,16 @@ class FigmaClient:
class FigmaToolSuite: class FigmaToolSuite:
""" """
👁️ SENSORY ORGANS DIGESTION CENTER - Transform visual perception into nutrients Figma extraction toolkit.
The sensory digestion center transforms raw visual information from Figma Capabilities:
into usable nutrients (tokens, components) that the DSS organism can - Extract tokens, components, styles from Figma
incorporate into its body. This complete toolkit: - Validate component structure
- Generate component code (React, Vue, Web Components)
- Sync tokens to codebase
- Compare visual versions
- Perceives visual designs (sensory organs) Modes: live (API) or mock (development)
- Extracts genetic code (tokens, components, styles)
- Validates genetic integrity (schema validation)
- Encodes information (code generation for multiple frameworks)
- Distributes nutrients (token syncing to codebase)
- Detects mutations (visual diffs)
The organism can operate in two modes:
- LIVE: Directly perceiving from external Figma organism
- MOCK: Using dream data for development without external dependency
""" """
def __init__(self, token: Optional[str] = None, output_dir: str = "./output"): def __init__(self, token: Optional[str] = None, output_dir: str = "./output"):
@@ -245,34 +206,21 @@ class FigmaToolSuite:
@property @property
def mode(self) -> str: def mode(self) -> str:
""" """Return mode: 'live' (API) or 'mock' (development)."""
Return sensory perception mode: 'live' (external Figma) or 'mock' (dreams/development)
"""
return "live" if self._is_real_api else "mock" return "live" if self._is_real_api else "mock"
# === Tool 1: Extract Variables/Tokens === # === Tool 1: Extract Variables/Tokens ===
async def extract_variables(self, file_key: str, format: str = "css") -> Dict[str, Any]: async def extract_variables(self, file_key: str, format: str = "css") -> Dict[str, Any]:
""" """
🩸 EXTRACT CIRCULATORY TOKENS - Perceive design tokens as nutrients Extract design tokens from Figma variables.
The sensory organs perceive design tokens (variables) from Figma and
convert them into circulatory nutrients (design tokens) that flow through
the organism's body. These are the fundamental nutrients that color blood,
determine tissue spacing, and define typographic patterns.
Args: Args:
file_key: Figma file key (visual perception target) file_key: Figma file key
format: Output format for encoded nutrients (css, json, scss, js) format: Output format (css, json, scss, js)
Returns: Returns:
Dict with extracted tokens ready for circulation: Dict with: success, tokens_count, collections, output_path, tokens, formatted_output
- success: Perception completed without errors
- tokens_count: Number of nutrients extracted
- collections: Token collections (by system)
- output_path: File where nutrients are stored
- tokens: Complete nutrient definitions
- formatted_output: Encoded output in requested format
""" """
data = await self.client.get_variables(file_key) data = await self.client.get_variables(file_key)
@@ -326,23 +274,13 @@ class FigmaToolSuite:
async def extract_components(self, file_key: str) -> Dict[str, Any]: async def extract_components(self, file_key: str) -> Dict[str, Any]:
""" """
🧬 EXTRACT GENETIC BLUEPRINTS - Perceive component DNA Extract component definitions from Figma.
The sensory organs perceive component definitions (visual DNA) from Figma
and extract genetic blueprints that describe how tissues are constructed.
Components are the fundamental building blocks (genes) that encode
the organism's form, function, and behavior patterns.
Args: Args:
file_key: Figma file key (visual genetic source) file_key: Figma file key
Returns: Returns:
Dict with extracted component DNA: Dict with: success, components_count, component_sets_count, output_path, components
- success: Genetic extraction successful
- components_count: Number of DNA blueprints found
- component_sets_count: Number of genetic variant groups
- output_path: File where genetic information is stored
- components: Complete component definitions with properties
""" """
definitions: List[ComponentDefinition] = [] definitions: List[ComponentDefinition] = []
component_sets_count = 0 component_sets_count = 0
@@ -423,23 +361,13 @@ class FigmaToolSuite:
async def extract_styles(self, file_key: str) -> Dict[str, Any]: async def extract_styles(self, file_key: str) -> Dict[str, Any]:
""" """
🎨 EXTRACT VISUAL EXPRESSION PATTERNS - Perceive style definitions Extract style definitions from Figma.
The sensory organs perceive visual expressions (text, color, effect styles)
from Figma and categorize them by their biological purpose: how text
appears (typography), how colors flow (pigmentation), and how depth
and dimension manifest through effects.
Args: Args:
file_key: Figma file key (visual style source) file_key: Figma file key
Returns: Returns:
Dict with extracted style definitions organized by type: Dict with: success, styles_count, by_type, output_path, styles
- success: Style extraction successful
- styles_count: Total style definitions found
- by_type: Styles organized by category (TEXT, FILL, EFFECT, GRID)
- output_path: File where style definitions are stored
- styles: Complete style information by type
""" """
definitions: List[StyleDefinition] = [] definitions: List[StyleDefinition] = []
by_type = {"TEXT": [], "FILL": [], "EFFECT": [], "GRID": []} by_type = {"TEXT": [], "FILL": [], "EFFECT": [], "GRID": []}
@@ -510,25 +438,15 @@ class FigmaToolSuite:
async def sync_tokens(self, file_key: str, target_path: str, format: str = "css") -> Dict[str, Any]: async def sync_tokens(self, file_key: str, target_path: str, format: str = "css") -> Dict[str, Any]:
""" """
🔄 CIRCULATE NUTRIENTS - Distribute tokens through the organism Sync design tokens from Figma to codebase.
The organism absorbs nutrients from Figma's visual designs and circulates
them through its body by syncing to the code codebase. This ensures the
organism's physical form (code) stays synchronized with its genetic design
(Figma tokens).
Args: Args:
file_key: Figma file key (nutrient source) file_key: Figma file key
target_path: Codebase file path (circulation destination) target_path: Target file path
format: Output format for encoded nutrients format: Output format
Returns: Returns:
Dict with sync result: Dict with: success, has_changes, tokens_synced, target_path, backup_created
- success: Circulation completed
- has_changes: Whether genetic material changed
- tokens_synced: Number of nutrients distributed
- target_path: Location where nutrients were circulated
- backup_created: Whether old nutrients were preserved
""" """
# Extract current tokens # Extract current tokens
result = await self.extract_variables(file_key, format) result = await self.extract_variables(file_key, format)
@@ -597,47 +515,37 @@ class FigmaToolSuite:
async def validate_components(self, file_key: str, schema_path: Optional[str] = None) -> Dict[str, Any]: async def validate_components(self, file_key: str, schema_path: Optional[str] = None) -> Dict[str, Any]:
""" """
🧬 GENETIC INTEGRITY CHECK - Validate component DNA health Validate component definitions against rules.
The immune system examines extracted component DNA against genetic
rules (schema) to ensure all components are healthy, properly named,
and fully documented. Invalid components are flagged as mutations that
could endanger the organism's health.
Args: Args:
file_key: Figma file key (genetic source) file_key: Figma file key
schema_path: Optional path to validation rules (genetic schema) schema_path: Optional validation schema path
Returns: Returns:
Dict with validation results: Dict with: success, valid, components_checked, issues, summary
- success: Validation completed without system errors
- valid: Whether all genetic material is healthy
- components_checked: Number of DNA blueprints examined
- issues: List of genetic problems found
- summary: Count of errors, warnings, and info messages
""" """
components = await self.extract_components(file_key) components = await self.extract_components(file_key)
issues: List[Dict[str, Any]] = [] issues: List[Dict[str, Any]] = []
# Run genetic integrity checks # Run validation checks
for comp in components["components"]: for comp in components["components"]:
# Rule 1: 🧬 Genetic naming convention (capitalize first letter) # Rule 1: Naming convention (capitalize first letter)
if not comp["name"][0].isupper(): if not comp["name"][0].isupper():
issues.append({ issues.append({
"component": comp["name"], "component": comp["name"],
"rule": "naming-convention", "rule": "naming-convention",
"severity": "warning", "severity": "warning",
"message": f"🧬 Genetic mutation detected: '{comp['name']}' should follow naming convention (start with capital letter)" "message": f"'{comp['name']}' should start with capital letter"
}) })
# Rule 2: 📋 Genetic documentation (description required) # Rule 2: Description required
if not comp.get("description"): if not comp.get("description"):
issues.append({ issues.append({
"component": comp["name"], "component": comp["name"],
"rule": "description-required", "rule": "description-required",
"severity": "info", "severity": "info",
"message": f"📝 Genetic annotation missing: '{comp['name']}' should have a description to document its biological purpose" "message": f"'{comp['name']}' should have a description"
}) })
return { return {
@@ -657,25 +565,15 @@ class FigmaToolSuite:
async def generate_code(self, file_key: str, component_name: str, async def generate_code(self, file_key: str, component_name: str,
framework: str = "webcomponent") -> Dict[str, Any]: framework: str = "webcomponent") -> Dict[str, Any]:
""" """
📝 ENCODE GENETIC MATERIAL - Generate component code from DNA Generate component code from Figma definition.
The organism translates genetic blueprints (component DNA) from Figma
into executable code that can be expressed in multiple biological contexts
(frameworks). This genetic encoding allows the component DNA to manifest
as living tissue in different ecosystems.
Args: Args:
file_key: Figma file key (genetic source) file_key: Figma file key
component_name: Name of component DNA to encode component_name: Component to generate
framework: Target biological context (webcomponent, react, vue) framework: Target framework (webcomponent, react, vue)
Returns: Returns:
Dict with generated code: Dict with: success, component, framework, output_path, code
- success: Genetic encoding successful
- component: Component name
- framework: Target framework
- output_path: File where genetic code is written
- code: The encoded genetic material ready for expression
""" """
components = await self.extract_components(file_key) components = await self.extract_components(file_key)
@@ -685,7 +583,7 @@ class FigmaToolSuite:
if not comp: if not comp:
return { return {
"success": False, "success": False,
"error": f"🛡️ Genetic material not found: Component '{component_name}' does not exist in the perceived DNA" "error": f"Component '{component_name}' not found"
} }
# Generate code based on framework # Generate code based on framework

View File

@@ -960,6 +960,164 @@ class TokenDrift:
return None return None
# === Integrations ===
class Integrations:
"""Project integration configuration storage."""
@staticmethod
def _integrations_path(project_id: str) -> Path:
return PROJECTS_DIR / project_id / "integrations.json"
@staticmethod
def list(project_id: str, user_id: int = None) -> List[Dict]:
"""List integrations for a project."""
data = read_json(Integrations._integrations_path(project_id), {"integrations": []})
integrations = data.get("integrations", [])
if user_id is not None:
integrations = [i for i in integrations if i.get("user_id") == user_id]
return integrations
@staticmethod
def get(project_id: str, user_id: int, integration_type: str) -> Optional[Dict]:
"""Get specific integration."""
integrations = Integrations.list(project_id, user_id)
for i in integrations:
if i.get("integration_type") == integration_type:
return i
return None
@staticmethod
def upsert(project_id: str, user_id: int, integration_type: str,
config: str, enabled: bool = True) -> Dict:
"""Create or update integration."""
path = Integrations._integrations_path(project_id)
data = read_json(path, {"integrations": []})
now = datetime.utcnow().isoformat()
# Find existing
for i in data["integrations"]:
if i.get("user_id") == user_id and i.get("integration_type") == integration_type:
i["config"] = config
i["enabled"] = enabled
i["updated_at"] = now
write_json(path, data)
return i
# Create new
new_integration = {
"id": str(uuid.uuid4())[:8],
"project_id": project_id,
"user_id": user_id,
"integration_type": integration_type,
"config": config,
"enabled": enabled,
"created_at": now,
"updated_at": now,
"last_used_at": None
}
data["integrations"].append(new_integration)
write_json(path, data)
return new_integration
@staticmethod
def update(project_id: str, user_id: int, integration_type: str,
config: str = None, enabled: bool = None) -> Optional[Dict]:
"""Update integration fields."""
path = Integrations._integrations_path(project_id)
data = read_json(path, {"integrations": []})
for i in data["integrations"]:
if i.get("user_id") == user_id and i.get("integration_type") == integration_type:
if config is not None:
i["config"] = config
if enabled is not None:
i["enabled"] = enabled
i["updated_at"] = datetime.utcnow().isoformat()
write_json(path, data)
return i
return None
@staticmethod
def delete(project_id: str, user_id: int, integration_type: str) -> bool:
"""Delete integration."""
path = Integrations._integrations_path(project_id)
data = read_json(path, {"integrations": []})
original_len = len(data["integrations"])
data["integrations"] = [
i for i in data["integrations"]
if not (i.get("user_id") == user_id and i.get("integration_type") == integration_type)
]
if len(data["integrations"]) < original_len:
write_json(path, data)
return True
return False
class IntegrationHealth:
"""Integration health tracking."""
@staticmethod
def _health_path() -> Path:
return SYSTEM_DIR / "integration_health.json"
@staticmethod
def list_all() -> List[Dict]:
"""List all integration health status."""
data = read_json(IntegrationHealth._health_path(), {"health": {}})
return [
{"integration_type": k, **v}
for k, v in data.get("health", {}).items()
]
@staticmethod
def get(integration_type: str) -> Optional[Dict]:
"""Get health for specific integration."""
data = read_json(IntegrationHealth._health_path(), {"health": {}})
if integration_type in data.get("health", {}):
return {"integration_type": integration_type, **data["health"][integration_type]}
return None
@staticmethod
def update(integration_type: str, is_healthy: bool = True,
failure_count: int = None, circuit_open_until: str = None) -> Dict:
"""Update integration health."""
path = IntegrationHealth._health_path()
data = read_json(path, {"health": {}})
if integration_type not in data["health"]:
data["health"][integration_type] = {
"is_healthy": True,
"failure_count": 0,
"last_failure_at": None,
"last_success_at": None,
"circuit_open_until": None
}
now = datetime.utcnow().isoformat()
data["health"][integration_type]["is_healthy"] = is_healthy
if is_healthy:
data["health"][integration_type]["last_success_at"] = now
else:
data["health"][integration_type]["last_failure_at"] = now
if failure_count is not None:
data["health"][integration_type]["failure_count"] = failure_count
if circuit_open_until is not None:
data["health"][integration_type]["circuit_open_until"] = circuit_open_until
write_json(path, data)
return {"integration_type": integration_type, **data["health"][integration_type]}
# === Stats === # === Stats ===
def get_stats() -> Dict: def get_stats() -> Dict: