feat(ci): Convert CI/CD pipeline from GitLab to Gitea Actions

This commit is contained in:
Digital Production Factory
2025-12-10 11:11:21 -03:00
6 changed files with 422 additions and 455 deletions

View File

@@ -0,0 +1,57 @@
name: DSS Project Analysis
# This workflow runs on every push to any branch.
on: [push]
jobs:
dss-context-update:
runs-on: ubuntu-latest
steps:
# Step 1: Check out the repository code
- name: Checkout code
uses: actions/checkout@v4
with:
# We need to fetch the full history to be able to push back
fetch-depth: 0
# Step 2: Set up the environment
- name: Set up Environment
run: |
echo "Setting up Python and Node.js environment..."
# Gitea's ubuntu-latest runner may not have everything, so we install dependencies.
# This is an example; a custom Docker image would be more efficient.
sudo apt-get update && sudo apt-get install -y python3-pip
pip3 install -r requirements.txt
cd dss-mvp1 && npm install && cd ..
# Step 3: Configure Git
- name: Configure Git
run: |
git config --global user.name "DSS Agent"
git config --global user.email "dss-agent@overbits.luz.uy"
# Step 4: Run the DSS Analysis
- name: Run DSS Analysis
run: |
echo "Running DSS project analysis..."
python3 dss-mvp1/dss-cli.py analyze --project-path .
# Step 5: Commit and Push Changes if any
- name: Commit and Push Context Changes
run: |
# Check if the analysis graph file has been changed
if ! git diff --quiet .dss/analysis_graph.json; then
echo "Change detected in analysis_graph.json. Committing and pushing..."
# Add the file, commit, and push back to the same branch.
# The GITEA_TOKEN is a secret you must configure in your project settings.
git add .dss/analysis_graph.json
git commit -m "chore(dss): Update project analysis context [skip ci]"
# Use the GITEA_TOKEN for authentication
# GITEA_SERVER_URL and GITEA_REPOSITORY are default environment variables in Gitea Actions.
git push https://dss-agent:${{ secrets.GITEA_TOKEN }}@${GITEA_SERVER_URL}/${GITEA_REPOSITORY}.git HEAD:${GITEA_REF_NAME}
else
echo "No changes detected in project context. Nothing to commit."
fi

View File

@@ -1,30 +1,16 @@
#!/usr/bin/env python3
"""
🧠 DSS MCP SERVER - Design System Organism Neural Interface
DSS MCP Server
The MCP server is how AI agents interface with the DSS organism through
the Model Context Protocol. It exposes the organism's neural pathways (API)
as tools that Claude and other AI agents can use to:
MCP (Model Context Protocol) interface for DSS. Exposes design system
operations as tools for AI agents.
- Perceive the organism's current state (health checks)
- Direct the organism's sensory organs (Figma perception)
- Control token ingestion and circulation (nutrient management)
- Analyze the organism's codebase (code intelligence)
- Generate Storybook documentation (organism presentation)
- Diagnose and fix health issues (debugging and fixing)
Think of MCP tools as the organism's neural interface - the way external
intelligence (AI agents) can communicate with and direct the organism.
The organism responds to 32 different MCP tools (neural commands):
- Status & Discovery (4 tools)
- Token Ingestion (7 tools)
- Analysis (11 tools)
- Storybook Generation (5 tools)
- Utilities (5 tools)
When an AI agent calls a tool, it's sending a command through the organism's
nervous system to activate specific organs and functions.
Tool Categories (32 tools):
- Status & Discovery (4): get_status, list_projects, create_project, get_project
- Token Ingestion (7): ingest_css, ingest_scss, ingest_tailwind, ingest_json, merge, export, validate
- Analysis (11): discover_project, analyze_react, find_inline_styles, find_patterns, etc.
- Storybook (5): scan, generate_story, generate_batch, theme, coverage
- Utilities (5): extract_tokens, extract_components, sync_tokens, etc.
"""
import os
@@ -75,38 +61,22 @@ mcp = FastMCP("dss-design-system", host=MCP_HOST, port=MCP_PORT)
@mcp.tool()
async def get_status() -> str:
"""
🏥 ORGANISM STATUS CHECK - Get complete vital signs report
Returns the DSS organism's vital signs:
- Is the organism conscious and responsive?
- What is its awareness level (statistics)?
- Can the organism perceive Figma (sensory organs working)?
- What version of the organism is this?
"""
"""Get DSS server status and statistics."""
stats = get_stats()
figma_configured = bool(FIGMA_TOKEN)
return json.dumps({
"organism_status": "🟢 Alive and conscious" if FIGMA_TOKEN else "🟡 Alive but blind",
"sensory_organs": {
"figma_eyes": "👁️ Perceiving" if figma_configured else "👁️ Closed"
},
"mode": "living in reality" if figma_configured else "living in imagination (mock mode)",
"organism_statistics": stats,
"version": "0.8.0",
"message": "The organism is ready to work" if figma_configured else "Configure Figma token to unlock visual perception"
"status": "ready",
"figma": "connected" if figma_configured else "mock mode",
"mode": "live" if figma_configured else "mock",
"statistics": stats,
"version": "0.8.0"
}, indent=2)
@mcp.tool()
async def list_projects() -> str:
"""
🏥 ORGANISM CONSCIOUSNESS - View all design system organisms
Lists all living design system organisms that have been born
(created) and are under observation.
"""
"""List all registered projects."""
projects = Projects.list_all()
return json.dumps([p.to_dict() for p in projects], indent=2)
@@ -114,20 +84,15 @@ async def list_projects() -> str:
@mcp.tool()
async def create_project(name: str, description: str = "", figma_file_key: str = "") -> str:
"""
🧬 ORGANISM GENESIS - Birth of a new design system organism
Creates a new design system organism - a living, breathing instance
that will ingest tokens, circulate nutrients, and grow over time.
Create a new design system project.
Args:
name: The organism's name (identity)
description: The organism's purpose and characteristics
figma_file_key: Link to the organism's visual genetic blueprint (Figma)
Returns: The newly born organism's vital information
name: Project name
description: Project description
figma_file_key: Figma file key for design source
"""
project = Projects.create(name, description, figma_file_key)
return json.dumps({"success": True, "organism_born": True, "project": project.to_dict()}, indent=2)
return json.dumps({"success": True, "project": project.to_dict()}, indent=2)
@mcp.tool()

View File

@@ -1,23 +1,19 @@
"""
🔌 DSS NERVOUS SYSTEM - FastAPI Server
DSS API Server
The nervous system is how the DSS component communicates with the external world.
This REST API serves as the component's neural pathways - transmitting signals
between external systems and the DSS internal organs.
REST API for design system operations.
Portal Endpoints (Synapses):
- 📊 Project management (CRUD operations)
- 👁️ Figma integration (sensory perception)
- 🏥 Health checks and diagnostics
- 📝 Activity tracking (component consciousness)
- ⚙️ Runtime configuration management
- 🔍 Service discovery (companion ecosystem)
Endpoints:
- Project management (CRUD)
- Figma integration (token extraction, component sync)
- Health checks
- Activity tracking
- Configuration management
- Service discovery
Operational Modes:
- **Server Mode** 🌐 - Deployed remotely, distributes tokens to teams
- **Local Mode** 🏠 - Dev companion, local service integration
Foundation: SQLite database (❤️ Heart) stores all component experiences
Modes:
- Server: Remote deployment, team distribution
- Local: Development companion
"""
# Load environment variables from .env file FIRST (before any other imports)
@@ -66,7 +62,8 @@ from browser_logger import router as browser_log_router
from config import config
from storage.json_store import (
Projects, Components, SyncHistory, ActivityLog, Teams, Cache, get_stats,
FigmaFiles, CodeMetrics, TestResults, TokenDrift, Tokens, Styles
FigmaFiles, CodeMetrics, TestResults, TokenDrift, Tokens, Styles,
Integrations, IntegrationHealth
)
from figma.figma_tools import FigmaToolSuite
@@ -173,11 +170,8 @@ ProjectManager.ensure_schema()
class ServiceDiscovery:
"""
🔌 SENSORY ORGAN PERCEPTION - Service discovery system
The sensory organs perceive companion services (Storybook, Chromatic, dev servers)
running in the ecosystem. This discovery mechanism helps the component understand
what external tools are available for integration.
Service discovery for companion services (Storybook, Chromatic, dev servers).
Checks known ports to discover running services.
"""
KNOWN_SERVICES = {
@@ -453,10 +447,10 @@ async def health():
},
"version": "0.8.0",
"timestamp": datetime.utcnow().isoformat() + "Z",
"organs": {
"heart": "💚 Beating normally" if db_ok else "🖤 Heart failure",
"brain": "🧠 Thinking clearly" if mcp_ok else "🧠 Brain fog",
"sensory_eyes": "👁️ Perceiving Figma" if config.figma.is_configured else "👁️ Eyes closed"
"services": {
"storage": "ok" if db_ok else "error",
"mcp": "ok" if mcp_ok else "error",
"figma": "connected" if config.figma.is_configured else "not configured"
}
}
@@ -821,32 +815,21 @@ async def list_components(project_id: str):
@app.post("/api/figma/extract-variables")
async def extract_variables(request: FigmaExtractRequest, background_tasks: BackgroundTasks):
"""
🩸 NUTRIENT EXTRACTION - Extract design tokens from Figma
The sensory organs perceive Figma designs, then the digestive system
breaks them down into nutrient particles (design tokens) that the
component can absorb and circulate through its body.
"""
"""Extract design tokens from Figma variables."""
try:
result = await figma_suite.extract_variables(request.file_key, request.format)
ActivityLog.log(
action="figma_extract_variables",
entity_type="figma",
details={"file_key": request.file_key, "format": request.format, "nutrient_count": result.get("tokens_count")}
details={"file_key": request.file_key, "format": request.format, "tokens_count": result.get("tokens_count")}
)
return result
except Exception as e:
raise HTTPException(status_code=500, detail=f"🛡️ IMMUNE ALERT: Nutrient extraction failed: {str(e)}")
raise HTTPException(status_code=500, detail=f"Token extraction failed: {str(e)}")
@app.post("/api/figma/extract-components")
async def extract_components(request: FigmaExtractRequest):
"""
🧬 GENETIC BLUEPRINT EXTRACTION - Extract component DNA from Figma
Components are the component's tissue structures. This extracts
the genetic blueprints (component definitions) from Figma.
"""
"""Extract component definitions from Figma."""
try:
result = await figma_suite.extract_components(request.file_key)
ActivityLog.log(
@@ -860,46 +843,30 @@ async def extract_components(request: FigmaExtractRequest):
@app.post("/api/figma/extract-styles")
async def extract_styles(request: FigmaExtractRequest):
"""
🎨 PHENOTYPE EXTRACTION - Extract visual styles from Figma
The component's appearance (styles) is extracted from Figma designs.
This feeds the skin (presentation) system.
"""
"""Extract style definitions from Figma."""
try:
result = await figma_suite.extract_styles(request.file_key)
return result
except Exception as e:
raise HTTPException(status_code=500, detail=f"🛡️ IMMUNE ALERT: Style extraction failed: {str(e)}")
raise HTTPException(status_code=500, detail=f"Style extraction failed: {str(e)}")
@app.post("/api/figma/sync-tokens")
async def sync_tokens(request: FigmaSyncRequest):
"""
🩸 CIRCULATORY DISTRIBUTION - Sync tokens throughout the component
The circulatory system distributes nutrients (tokens) to all parts
of the component. This endpoint broadcasts extracted tokens to the
target output paths.
"""
"""Sync tokens from Figma to target file."""
try:
result = await figma_suite.sync_tokens(request.file_key, request.target_path, request.format)
ActivityLog.log(
action="figma_sync_tokens",
entity_type="figma",
details={"file_key": request.file_key, "target": request.target_path, "nutrients_distributed": result.get("tokens_synced")}
details={"file_key": request.file_key, "target": request.target_path, "tokens_synced": result.get("tokens_synced")}
)
return result
except Exception as e:
raise HTTPException(status_code=500, detail=f"🛡️ IMMUNE ALERT: Token circulation failed: {str(e)}")
raise HTTPException(status_code=500, detail=f"Token sync failed: {str(e)}")
@app.post("/api/figma/validate")
async def validate_components(request: FigmaExtractRequest):
"""
🛡️ GENETIC INTEGRITY VALIDATION - Check component health
The immune system examines components to ensure they follow
design system rules. Invalid components are quarantined.
"""
"""Validate component definitions against design system rules."""
try:
result = await figma_suite.validate_components(request.file_key)
return result
@@ -917,17 +884,12 @@ async def generate_code(file_key: str, component_name: str, framework: str = "we
@app.get("/api/figma/health")
async def figma_health():
"""
👁️ SENSORY ORGAN HEALTH CHECK - Figma perception status
The sensory organs (eyes) perceive visual designs from Figma.
This endpoint checks if the component's eyes can see external designs.
"""
"""Check Figma connection status."""
is_live = figma_suite.mode == 'live'
return {
"status": "ok" if is_live else "degraded",
"sensory_mode": figma_suite.mode,
"message": "👁️ Eyes perceiving Figma clearly" if is_live else "👁️ Eyes closed - running with imagination (mock mode). Configure Figma token to see reality."
"mode": figma_suite.mode,
"message": "Figma connected" if is_live else "Running in mock mode. Configure FIGMA_TOKEN for live API."
}
@@ -2587,38 +2549,21 @@ class IntegrationUpdate(BaseModel):
@app.get("/api/mcp/integrations")
async def list_all_integrations():
"""List all available integration types and their health status."""
from storage.database import get_connection
health_list = IntegrationHealth.list_all()
try:
with get_connection() as conn:
health_rows = conn.execute(
"SELECT * FROM integration_health ORDER BY integration_type"
).fetchall()
integrations = []
for row in health_rows:
integrations.append({
"integration_type": row["integration_type"],
"is_healthy": bool(row["is_healthy"]),
"failure_count": row["failure_count"],
"last_failure_at": row["last_failure_at"],
"last_success_at": row["last_success_at"],
"circuit_open_until": row["circuit_open_until"]
})
return {"integrations": integrations}
except Exception as e:
# Table may not exist yet
if not health_list:
# Return defaults if no health data exists
return {
"integrations": [
{"integration_type": "figma", "is_healthy": True, "failure_count": 0},
{"integration_type": "jira", "is_healthy": True, "failure_count": 0},
{"integration_type": "confluence", "is_healthy": True, "failure_count": 0},
{"integration_type": "sequential-thinking", "is_healthy": True, "failure_count": 0}
],
"note": "Integration tables not yet initialized"
]
}
return {"integrations": health_list}
@app.get("/api/projects/{project_id}/integrations")
async def list_project_integrations(
@@ -2629,34 +2574,8 @@ async def list_project_integrations(
if not Projects.get(project_id):
raise HTTPException(status_code=404, detail="Project not found")
from storage.database import get_connection
try:
with get_connection() as conn:
if user_id:
rows = conn.execute(
"""
SELECT id, integration_type, enabled, created_at, updated_at, last_used_at
FROM project_integrations
WHERE project_id = ? AND user_id = ?
ORDER BY integration_type
""",
(project_id, user_id)
).fetchall()
else:
rows = conn.execute(
"""
SELECT id, user_id, integration_type, enabled, created_at, updated_at, last_used_at
FROM project_integrations
WHERE project_id = ?
ORDER BY integration_type
""",
(project_id,)
).fetchall()
return {"integrations": [dict(row) for row in rows]}
except Exception as e:
return {"integrations": [], "error": str(e)}
integrations = Integrations.list(project_id, user_id)
return {"integrations": integrations}
@app.post("/api/projects/{project_id}/integrations")
@@ -2669,7 +2588,6 @@ async def create_integration(
if not Projects.get(project_id):
raise HTTPException(status_code=404, detail="Project not found")
from storage.database import get_connection
from dss_mcp.config import mcp_config
# Encrypt config
@@ -2681,16 +2599,12 @@ async def create_integration(
encrypted_config = config_json # Store unencrypted if no key
try:
with get_connection() as conn:
# Upsert
conn.execute(
"""
INSERT INTO project_integrations (project_id, user_id, integration_type, config, enabled, updated_at)
VALUES (?, ?, ?, ?, ?, CURRENT_TIMESTAMP)
ON CONFLICT(project_id, user_id, integration_type)
DO UPDATE SET config = excluded.config, enabled = excluded.enabled, updated_at = CURRENT_TIMESTAMP
""",
(project_id, user_id, integration.integration_type, encrypted_config, integration.enabled)
Integrations.upsert(
project_id=project_id,
user_id=user_id,
integration_type=integration.integration_type,
config=encrypted_config,
enabled=integration.enabled
)
ActivityLog.log(
@@ -2721,14 +2635,10 @@ async def update_integration(
if not Projects.get(project_id):
raise HTTPException(status_code=404, detail="Project not found")
from storage.database import get_connection
from dss_mcp.config import mcp_config
try:
with get_connection() as conn:
updates = []
params = []
encrypted_config = None
if update.config is not None:
config_json = json.dumps(update.config)
cipher = mcp_config.get_cipher()
@@ -2736,29 +2646,24 @@ async def update_integration(
encrypted_config = cipher.encrypt(config_json.encode()).decode()
else:
encrypted_config = config_json
updates.append("config = ?")
params.append(encrypted_config)
if update.enabled is not None:
updates.append("enabled = ?")
params.append(update.enabled)
if not updates:
if update.config is None and update.enabled is None:
return {"success": False, "message": "No updates provided"}
updates.append("updated_at = CURRENT_TIMESTAMP")
params.extend([project_id, user_id, integration_type])
conn.execute(
f"""
UPDATE project_integrations
SET {', '.join(updates)}
WHERE project_id = ? AND user_id = ? AND integration_type = ?
""",
params
result = Integrations.update(
project_id=project_id,
user_id=user_id,
integration_type=integration_type,
config=encrypted_config,
enabled=update.enabled
)
if not result:
raise HTTPException(status_code=404, detail="Integration not found")
return {"success": True, "integration_type": integration_type}
except HTTPException:
raise
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@@ -2773,19 +2678,10 @@ async def delete_integration(
if not Projects.get(project_id):
raise HTTPException(status_code=404, detail="Project not found")
from storage.database import get_connection
try:
with get_connection() as conn:
result = conn.execute(
"""
DELETE FROM project_integrations
WHERE project_id = ? AND user_id = ? AND integration_type = ?
""",
(project_id, user_id, integration_type)
)
deleted = Integrations.delete(project_id, user_id, integration_type)
if result.rowcount == 0:
if not deleted:
raise HTTPException(status_code=404, detail="Integration not found")
ActivityLog.log(
@@ -3077,12 +2973,39 @@ if UI_DIR.exists():
app.mount("/", StaticFiles(directory=str(UI_DIR), html=True), name="ui")
def kill_port(port: int, wait: float = 0.5) -> None:
"""Kill any process using the specified port."""
import subprocess
import time
try:
# Get PIDs using the port
result = subprocess.run(
["lsof", "-ti", f":{port}"],
capture_output=True, text=True
)
pids = result.stdout.strip().split('\n')
killed = False
for pid in pids:
if pid:
subprocess.run(["kill", "-9", pid], capture_output=True)
print(f"[DSS] Killed process {pid} on port {port}")
killed = True
if killed and wait:
time.sleep(wait) # Wait for port to be released
except Exception:
pass # Port was free
if __name__ == "__main__":
import uvicorn
port = int(os.getenv("PORT", "3456"))
host = os.getenv("HOST", "0.0.0.0")
# Kill any existing process on the port (twice to handle respawning)
kill_port(port, wait=1.0)
kill_port(port, wait=0.5)
url = f"http://{host}:{port}"
print(f"""
╔═══════════════════════════════════════════════════════════════╗

View File

@@ -248,48 +248,14 @@ class ProjectManager:
def _update_root_path(self, project_id: str, root_path: str) -> None:
"""
Update root_path in database.
Uses raw SQL since the column may not be in the existing model.
Update root_path in JSON storage.
"""
from storage.database import get_connection
with get_connection() as conn:
# Ensure column exists
try:
conn.execute("""
ALTER TABLE projects ADD COLUMN root_path TEXT DEFAULT ''
""")
logger.info("Added root_path column to projects table")
except Exception:
# Column already exists
pass
# Update the value
conn.execute(
"UPDATE projects SET root_path = ? WHERE id = ?",
(root_path, project_id)
)
self.db.update(project_id, root_path=root_path)
@staticmethod
def ensure_schema():
"""
Ensure database schema has root_path column.
Call this on startup to migrate existing databases.
Legacy schema migration - no longer needed with JSON storage.
Kept for API compatibility.
"""
from storage.database import get_connection
with get_connection() as conn:
cursor = conn.cursor()
# Check if column exists
cursor.execute("PRAGMA table_info(projects)")
columns = [col[1] for col in cursor.fetchall()]
if 'root_path' not in columns:
cursor.execute("""
ALTER TABLE projects ADD COLUMN root_path TEXT DEFAULT ''
""")
logger.info("Migration: Added root_path column to projects table")
else:
logger.debug("Schema check: root_path column exists")
logger.debug("Schema check: Using JSON storage, no migration needed")

View File

@@ -1,27 +1,19 @@
"""
DSS SENSORY ORGANS - Figma Integration Toolkit
DSS Figma Integration
The DSS sensory organs allow the design system organism to perceive and
digest visual designs from Figma. This toolkit extracts genetic information
(tokens, components, styles) from the Figma sensory perception and transforms
it into nutrients for the organism.
Extracts design system data from Figma:
- Tokens (colors, spacing, typography)
- Components (definitions, variants)
- Styles (text, fill, effect styles)
Tool Suite (Sensory Perception Functions):
1. figma_extract_variables - 🩸 Perceive design tokens as blood nutrients
2. figma_extract_components - 🧬 Perceive component DNA blueprints
3. figma_extract_styles - 🎨 Perceive visual expressions and patterns
4. figma_sync_tokens - 🔄 Distribute nutrients through circulatory system
5. figma_visual_diff - 👁️ Detect changes in visual expression
6. figma_validate_components - 🧬 Verify genetic code integrity
7. figma_generate_code - 📝 Encode genetic information into code
Architecture:
- Sensory Perception: HTTPx client with SQLite caching (organism's memory)
- Token Metabolism: Design token transformation pipeline
- Code Generation: Genetic encoding into multiple framework languages
Framework: DSS Organism Framework
See: docs/DSS_ORGANISM_GUIDE.md#sensory-organs
Tools:
1. figma_extract_variables - Extract design tokens
2. figma_extract_components - Extract component definitions
3. figma_extract_styles - Extract style definitions
4. figma_sync_tokens - Sync tokens to codebase
5. figma_visual_diff - Compare versions
6. figma_validate_components - Validate component structure
7. figma_generate_code - Generate component code
"""
import json
@@ -66,57 +58,34 @@ class StyleDefinition:
class FigmaClient:
"""
👁️ FIGMA SENSORY RECEPTOR - Organism's visual perception system
The sensory receptor connects the DSS organism to Figma's visual information.
It perceives visual designs and caches genetic information (tokens, components)
in the organism's short-term memory (SQLite cache) for efficient digestion.
Figma API client with caching.
Features:
- Real-time sensory perception (live Figma API connection)
- Memory caching (SQLite persistence with TTL)
- Rate limiting awareness (respects Figma's biological constraints)
- Mock perception mode (for organism development without external connection)
- Live API connection or mock mode
- Response caching with TTL
- Rate limit handling
"""
def __init__(self, token: Optional[str] = None):
# Establish sensory connection (use provided token or config default)
self.token = token or config.figma.token
self.base_url = "https://api.figma.com/v1"
self.cache_ttl = config.figma.cache_ttl
self._use_real_api = bool(self.token) # Real sensory perception vs mock dreams
self._use_real_api = bool(self.token)
def _cache_key(self, endpoint: str) -> str:
return f"figma:{hashlib.md5(endpoint.encode()).hexdigest()}"
async def _request(self, endpoint: str) -> Dict[str, Any]:
"""
👁️ SENSORY PERCEPTION - Fetch visual information from Figma
The sensory receptor reaches out to Figma to perceive visual designs.
If the organism is in development mode, it uses dream data (mocks).
Otherwise, it queries the external Figma organism and stores perceived
information in its own memory (SQLite cache) for quick recall.
Flow:
1. Check if sensory is in development mode (mock perception)
2. Check organism's memory cache for previous perception
3. If memory miss, perceive from external source (Figma API)
4. Store new perception in memory for future recognition
5. Log the perceptual event
"""
"""Fetch data from Figma API with caching."""
if not self._use_real_api:
# Sensory hallucinations for development (mock perception)
return self._get_mock_data(endpoint)
cache_key = self._cache_key(endpoint)
# Check organism memory first (short-term memory - SQLite)
cached = Cache.get(cache_key)
if cached is not None:
return cached
# Perceive from external source (live Figma perception)
async with httpx.AsyncClient(timeout=30.0) as client:
response = await client.get(
f"{self.base_url}{endpoint}",
@@ -125,14 +94,12 @@ class FigmaClient:
response.raise_for_status()
data = response.json()
# Store perception in organism memory for future recognition
Cache.set(cache_key, data, ttl=self.cache_ttl)
# Log the perceptual event
ActivityLog.log(
action="figma_sensory_perception",
entity_type="sensory_organs",
details={"endpoint": endpoint, "cached": False, "perception": "live"}
action="figma_api_request",
entity_type="figma",
details={"endpoint": endpoint, "cached": False}
)
return data
@@ -219,22 +186,16 @@ class FigmaClient:
class FigmaToolSuite:
"""
👁️ SENSORY ORGANS DIGESTION CENTER - Transform visual perception into nutrients
Figma extraction toolkit.
The sensory digestion center transforms raw visual information from Figma
into usable nutrients (tokens, components) that the DSS organism can
incorporate into its body. This complete toolkit:
Capabilities:
- Extract tokens, components, styles from Figma
- Validate component structure
- Generate component code (React, Vue, Web Components)
- Sync tokens to codebase
- Compare visual versions
- Perceives visual designs (sensory organs)
- Extracts genetic code (tokens, components, styles)
- Validates genetic integrity (schema validation)
- Encodes information (code generation for multiple frameworks)
- Distributes nutrients (token syncing to codebase)
- Detects mutations (visual diffs)
The organism can operate in two modes:
- LIVE: Directly perceiving from external Figma organism
- MOCK: Using dream data for development without external dependency
Modes: live (API) or mock (development)
"""
def __init__(self, token: Optional[str] = None, output_dir: str = "./output"):
@@ -245,34 +206,21 @@ class FigmaToolSuite:
@property
def mode(self) -> str:
"""
Return sensory perception mode: 'live' (external Figma) or 'mock' (dreams/development)
"""
"""Return mode: 'live' (API) or 'mock' (development)."""
return "live" if self._is_real_api else "mock"
# === Tool 1: Extract Variables/Tokens ===
async def extract_variables(self, file_key: str, format: str = "css") -> Dict[str, Any]:
"""
🩸 EXTRACT CIRCULATORY TOKENS - Perceive design tokens as nutrients
The sensory organs perceive design tokens (variables) from Figma and
convert them into circulatory nutrients (design tokens) that flow through
the organism's body. These are the fundamental nutrients that color blood,
determine tissue spacing, and define typographic patterns.
Extract design tokens from Figma variables.
Args:
file_key: Figma file key (visual perception target)
format: Output format for encoded nutrients (css, json, scss, js)
file_key: Figma file key
format: Output format (css, json, scss, js)
Returns:
Dict with extracted tokens ready for circulation:
- success: Perception completed without errors
- tokens_count: Number of nutrients extracted
- collections: Token collections (by system)
- output_path: File where nutrients are stored
- tokens: Complete nutrient definitions
- formatted_output: Encoded output in requested format
Dict with: success, tokens_count, collections, output_path, tokens, formatted_output
"""
data = await self.client.get_variables(file_key)
@@ -326,23 +274,13 @@ class FigmaToolSuite:
async def extract_components(self, file_key: str) -> Dict[str, Any]:
"""
🧬 EXTRACT GENETIC BLUEPRINTS - Perceive component DNA
The sensory organs perceive component definitions (visual DNA) from Figma
and extract genetic blueprints that describe how tissues are constructed.
Components are the fundamental building blocks (genes) that encode
the organism's form, function, and behavior patterns.
Extract component definitions from Figma.
Args:
file_key: Figma file key (visual genetic source)
file_key: Figma file key
Returns:
Dict with extracted component DNA:
- success: Genetic extraction successful
- components_count: Number of DNA blueprints found
- component_sets_count: Number of genetic variant groups
- output_path: File where genetic information is stored
- components: Complete component definitions with properties
Dict with: success, components_count, component_sets_count, output_path, components
"""
definitions: List[ComponentDefinition] = []
component_sets_count = 0
@@ -423,23 +361,13 @@ class FigmaToolSuite:
async def extract_styles(self, file_key: str) -> Dict[str, Any]:
"""
🎨 EXTRACT VISUAL EXPRESSION PATTERNS - Perceive style definitions
The sensory organs perceive visual expressions (text, color, effect styles)
from Figma and categorize them by their biological purpose: how text
appears (typography), how colors flow (pigmentation), and how depth
and dimension manifest through effects.
Extract style definitions from Figma.
Args:
file_key: Figma file key (visual style source)
file_key: Figma file key
Returns:
Dict with extracted style definitions organized by type:
- success: Style extraction successful
- styles_count: Total style definitions found
- by_type: Styles organized by category (TEXT, FILL, EFFECT, GRID)
- output_path: File where style definitions are stored
- styles: Complete style information by type
Dict with: success, styles_count, by_type, output_path, styles
"""
definitions: List[StyleDefinition] = []
by_type = {"TEXT": [], "FILL": [], "EFFECT": [], "GRID": []}
@@ -510,25 +438,15 @@ class FigmaToolSuite:
async def sync_tokens(self, file_key: str, target_path: str, format: str = "css") -> Dict[str, Any]:
"""
🔄 CIRCULATE NUTRIENTS - Distribute tokens through the organism
The organism absorbs nutrients from Figma's visual designs and circulates
them through its body by syncing to the code codebase. This ensures the
organism's physical form (code) stays synchronized with its genetic design
(Figma tokens).
Sync design tokens from Figma to codebase.
Args:
file_key: Figma file key (nutrient source)
target_path: Codebase file path (circulation destination)
format: Output format for encoded nutrients
file_key: Figma file key
target_path: Target file path
format: Output format
Returns:
Dict with sync result:
- success: Circulation completed
- has_changes: Whether genetic material changed
- tokens_synced: Number of nutrients distributed
- target_path: Location where nutrients were circulated
- backup_created: Whether old nutrients were preserved
Dict with: success, has_changes, tokens_synced, target_path, backup_created
"""
# Extract current tokens
result = await self.extract_variables(file_key, format)
@@ -597,47 +515,37 @@ class FigmaToolSuite:
async def validate_components(self, file_key: str, schema_path: Optional[str] = None) -> Dict[str, Any]:
"""
🧬 GENETIC INTEGRITY CHECK - Validate component DNA health
The immune system examines extracted component DNA against genetic
rules (schema) to ensure all components are healthy, properly named,
and fully documented. Invalid components are flagged as mutations that
could endanger the organism's health.
Validate component definitions against rules.
Args:
file_key: Figma file key (genetic source)
schema_path: Optional path to validation rules (genetic schema)
file_key: Figma file key
schema_path: Optional validation schema path
Returns:
Dict with validation results:
- success: Validation completed without system errors
- valid: Whether all genetic material is healthy
- components_checked: Number of DNA blueprints examined
- issues: List of genetic problems found
- summary: Count of errors, warnings, and info messages
Dict with: success, valid, components_checked, issues, summary
"""
components = await self.extract_components(file_key)
issues: List[Dict[str, Any]] = []
# Run genetic integrity checks
# Run validation checks
for comp in components["components"]:
# Rule 1: 🧬 Genetic naming convention (capitalize first letter)
# Rule 1: Naming convention (capitalize first letter)
if not comp["name"][0].isupper():
issues.append({
"component": comp["name"],
"rule": "naming-convention",
"severity": "warning",
"message": f"🧬 Genetic mutation detected: '{comp['name']}' should follow naming convention (start with capital letter)"
"message": f"'{comp['name']}' should start with capital letter"
})
# Rule 2: 📋 Genetic documentation (description required)
# Rule 2: Description required
if not comp.get("description"):
issues.append({
"component": comp["name"],
"rule": "description-required",
"severity": "info",
"message": f"📝 Genetic annotation missing: '{comp['name']}' should have a description to document its biological purpose"
"message": f"'{comp['name']}' should have a description"
})
return {
@@ -657,25 +565,15 @@ class FigmaToolSuite:
async def generate_code(self, file_key: str, component_name: str,
framework: str = "webcomponent") -> Dict[str, Any]:
"""
📝 ENCODE GENETIC MATERIAL - Generate component code from DNA
The organism translates genetic blueprints (component DNA) from Figma
into executable code that can be expressed in multiple biological contexts
(frameworks). This genetic encoding allows the component DNA to manifest
as living tissue in different ecosystems.
Generate component code from Figma definition.
Args:
file_key: Figma file key (genetic source)
component_name: Name of component DNA to encode
framework: Target biological context (webcomponent, react, vue)
file_key: Figma file key
component_name: Component to generate
framework: Target framework (webcomponent, react, vue)
Returns:
Dict with generated code:
- success: Genetic encoding successful
- component: Component name
- framework: Target framework
- output_path: File where genetic code is written
- code: The encoded genetic material ready for expression
Dict with: success, component, framework, output_path, code
"""
components = await self.extract_components(file_key)
@@ -685,7 +583,7 @@ class FigmaToolSuite:
if not comp:
return {
"success": False,
"error": f"🛡️ Genetic material not found: Component '{component_name}' does not exist in the perceived DNA"
"error": f"Component '{component_name}' not found"
}
# Generate code based on framework

View File

@@ -960,6 +960,164 @@ class TokenDrift:
return None
# === Integrations ===
class Integrations:
"""Project integration configuration storage."""
@staticmethod
def _integrations_path(project_id: str) -> Path:
return PROJECTS_DIR / project_id / "integrations.json"
@staticmethod
def list(project_id: str, user_id: int = None) -> List[Dict]:
"""List integrations for a project."""
data = read_json(Integrations._integrations_path(project_id), {"integrations": []})
integrations = data.get("integrations", [])
if user_id is not None:
integrations = [i for i in integrations if i.get("user_id") == user_id]
return integrations
@staticmethod
def get(project_id: str, user_id: int, integration_type: str) -> Optional[Dict]:
"""Get specific integration."""
integrations = Integrations.list(project_id, user_id)
for i in integrations:
if i.get("integration_type") == integration_type:
return i
return None
@staticmethod
def upsert(project_id: str, user_id: int, integration_type: str,
config: str, enabled: bool = True) -> Dict:
"""Create or update integration."""
path = Integrations._integrations_path(project_id)
data = read_json(path, {"integrations": []})
now = datetime.utcnow().isoformat()
# Find existing
for i in data["integrations"]:
if i.get("user_id") == user_id and i.get("integration_type") == integration_type:
i["config"] = config
i["enabled"] = enabled
i["updated_at"] = now
write_json(path, data)
return i
# Create new
new_integration = {
"id": str(uuid.uuid4())[:8],
"project_id": project_id,
"user_id": user_id,
"integration_type": integration_type,
"config": config,
"enabled": enabled,
"created_at": now,
"updated_at": now,
"last_used_at": None
}
data["integrations"].append(new_integration)
write_json(path, data)
return new_integration
@staticmethod
def update(project_id: str, user_id: int, integration_type: str,
config: str = None, enabled: bool = None) -> Optional[Dict]:
"""Update integration fields."""
path = Integrations._integrations_path(project_id)
data = read_json(path, {"integrations": []})
for i in data["integrations"]:
if i.get("user_id") == user_id and i.get("integration_type") == integration_type:
if config is not None:
i["config"] = config
if enabled is not None:
i["enabled"] = enabled
i["updated_at"] = datetime.utcnow().isoformat()
write_json(path, data)
return i
return None
@staticmethod
def delete(project_id: str, user_id: int, integration_type: str) -> bool:
"""Delete integration."""
path = Integrations._integrations_path(project_id)
data = read_json(path, {"integrations": []})
original_len = len(data["integrations"])
data["integrations"] = [
i for i in data["integrations"]
if not (i.get("user_id") == user_id and i.get("integration_type") == integration_type)
]
if len(data["integrations"]) < original_len:
write_json(path, data)
return True
return False
class IntegrationHealth:
"""Integration health tracking."""
@staticmethod
def _health_path() -> Path:
return SYSTEM_DIR / "integration_health.json"
@staticmethod
def list_all() -> List[Dict]:
"""List all integration health status."""
data = read_json(IntegrationHealth._health_path(), {"health": {}})
return [
{"integration_type": k, **v}
for k, v in data.get("health", {}).items()
]
@staticmethod
def get(integration_type: str) -> Optional[Dict]:
"""Get health for specific integration."""
data = read_json(IntegrationHealth._health_path(), {"health": {}})
if integration_type in data.get("health", {}):
return {"integration_type": integration_type, **data["health"][integration_type]}
return None
@staticmethod
def update(integration_type: str, is_healthy: bool = True,
failure_count: int = None, circuit_open_until: str = None) -> Dict:
"""Update integration health."""
path = IntegrationHealth._health_path()
data = read_json(path, {"health": {}})
if integration_type not in data["health"]:
data["health"][integration_type] = {
"is_healthy": True,
"failure_count": 0,
"last_failure_at": None,
"last_success_at": None,
"circuit_open_until": None
}
now = datetime.utcnow().isoformat()
data["health"][integration_type]["is_healthy"] = is_healthy
if is_healthy:
data["health"][integration_type]["last_success_at"] = now
else:
data["health"][integration_type]["last_failure_at"] = now
if failure_count is not None:
data["health"][integration_type]["failure_count"] = failure_count
if circuit_open_until is not None:
data["health"][integration_type]["circuit_open_until"] = circuit_open_until
write_json(path, data)
return {"integration_type": integration_type, **data["health"][integration_type]}
# === Stats ===
def get_stats() -> Dict: