Replace SQLite with JSON file storage
- Remove database.py (SQLite) from tools/storage/ and dss-mvp1/ - Add json_store.py with full JSON-based storage layer - Update 16 files to use new json_store imports - Storage now mirrors DSS canonical structure: .dss/data/ ├── _system/ (config, cache, activity) ├── projects/ (per-project: tokens, components, styles) └── teams/ (team definitions) - Remove Docker files (not needed) - Update DSS_CORE.json to v1.1.0 Philosophy: "Eat our own food" - storage structure matches DSS design 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -1,62 +0,0 @@
|
||||
# Python
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
*.so
|
||||
.Python
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
*.egg-info/
|
||||
dist/
|
||||
build/
|
||||
|
||||
# DSS data
|
||||
.dss/
|
||||
*.db
|
||||
*.db-wal
|
||||
*.db-shm
|
||||
|
||||
# Environment
|
||||
.env
|
||||
.env.local
|
||||
.env.production
|
||||
|
||||
# IDE
|
||||
.vscode/
|
||||
.idea/
|
||||
*.swp
|
||||
*.swo
|
||||
*~
|
||||
|
||||
# Git
|
||||
.git/
|
||||
.gitignore
|
||||
|
||||
# Documentation
|
||||
docs/
|
||||
*.md
|
||||
!README.md
|
||||
|
||||
# Tests
|
||||
tests/
|
||||
pytest_cache/
|
||||
.coverage
|
||||
htmlcov/
|
||||
|
||||
# Node
|
||||
node_modules/
|
||||
npm-debug.log
|
||||
|
||||
# OS
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Backups
|
||||
backups/
|
||||
*.backup
|
||||
*.bak
|
||||
|
||||
# Logs
|
||||
*.log
|
||||
logs/
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"$schema": "dss-core-v1",
|
||||
"version": "1.0.0",
|
||||
"version": "1.1.0",
|
||||
"last_updated": "2025-12-10",
|
||||
"purpose": "Single source of truth for AI agents working with DSS",
|
||||
|
||||
@@ -53,7 +53,7 @@
|
||||
"layers": {
|
||||
"router": "MCP Server (36 tools), REST API (34 endpoints), CLI",
|
||||
"messaging": "Circuit breaker, Activity log, Event emitter",
|
||||
"workflows": "Figma client, Token ingestion, Storybook generator, Analysis engine, Context compiler, Storage (SQLite)"
|
||||
"workflows": "Figma client, Token ingestion, Storybook generator, Analysis engine, Context compiler, Storage (JSON files)"
|
||||
},
|
||||
"ports": {
|
||||
"rest_api": 3456,
|
||||
@@ -62,8 +62,17 @@
|
||||
"dependencies": {
|
||||
"python": ">=3.10",
|
||||
"node": ">=18",
|
||||
"db": "sqlite3",
|
||||
"services": ["figma-api", "storybook", "nginx"]
|
||||
"services": ["figma-api", "storybook"]
|
||||
},
|
||||
"storage": {
|
||||
"type": "JSON files",
|
||||
"location": ".dss/data/",
|
||||
"structure": {
|
||||
"_system": "config, cache, activity logs",
|
||||
"projects/{id}": "manifest, tokens/, components/, styles/, figma/, metrics/",
|
||||
"teams/{id}": "manifest, members, access"
|
||||
},
|
||||
"philosophy": "Eat our own food - storage mirrors DSS canonical structure"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -98,7 +107,8 @@
|
||||
"rest_api": "tools/api/server.py",
|
||||
"token_parsers": "tools/ingest/",
|
||||
"analysis": "tools/analyze/",
|
||||
"database": ".dss/dss.db",
|
||||
"storage": "tools/storage/json_store.py",
|
||||
"data": ".dss/data/",
|
||||
"schemas": ".dss/schema/",
|
||||
"admin_ui": "admin-ui/",
|
||||
"skills": "dss-claude-plugin/skills/",
|
||||
@@ -201,8 +211,8 @@
|
||||
"debounce_ms": 250
|
||||
},
|
||||
"storage": {
|
||||
"db": ".dss/dss.db",
|
||||
"cache": ".dss/cache"
|
||||
"data": ".dss/data/",
|
||||
"cache": ".dss/data/_system/cache/"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -213,6 +223,7 @@
|
||||
},
|
||||
|
||||
"changelog": [
|
||||
{"version": "1.1.0", "date": "2025-12-10", "notes": "Migrate from SQLite to JSON file storage"},
|
||||
{"version": "1.0.0", "date": "2025-12-10", "notes": "Initial core definition"}
|
||||
]
|
||||
}
|
||||
|
||||
57
Dockerfile
57
Dockerfile
@@ -1,57 +0,0 @@
|
||||
# Design System Server (DSS) - Docker Image
|
||||
# Version: 0.8.0
|
||||
|
||||
FROM python:3.11-slim
|
||||
|
||||
LABEL maintainer="DSS Team"
|
||||
LABEL version="0.8.0"
|
||||
LABEL description="Design System Server with MCP integration"
|
||||
|
||||
# Set environment variables
|
||||
ENV PYTHONUNBUFFERED=1 \
|
||||
PYTHONDONTWRITEBYTECODE=1 \
|
||||
PIP_NO_CACHE_DIR=1 \
|
||||
PIP_DISABLE_PIP_VERSION_CHECK=1
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y \
|
||||
sqlite3 \
|
||||
curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Create app user
|
||||
RUN useradd -m -u 1000 dss && \
|
||||
mkdir -p /app && \
|
||||
chown -R dss:dss /app
|
||||
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
|
||||
# Copy requirements
|
||||
COPY requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy application code
|
||||
COPY --chown=dss:dss . .
|
||||
|
||||
# Create data directories
|
||||
RUN mkdir -p /app/.dss/cache && \
|
||||
chown -R dss:dss /app/.dss
|
||||
|
||||
# Switch to app user
|
||||
USER dss
|
||||
|
||||
# Expose port
|
||||
EXPOSE 3456
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \
|
||||
CMD curl -f http://localhost:3456/health || exit 1
|
||||
|
||||
# Set working directory to tools/api
|
||||
WORKDIR /app/tools/api
|
||||
|
||||
# Run server
|
||||
CMD ["python3", "-m", "uvicorn", "server:app", "--host", "0.0.0.0", "--port", "3456"]
|
||||
@@ -34,7 +34,7 @@ import sys
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from config import config
|
||||
from storage.database import (
|
||||
from storage.json_store import (
|
||||
Projects, Components, SyncHistory, ActivityLog, Teams, Cache, get_stats
|
||||
)
|
||||
from figma.figma_tools import FigmaToolSuite
|
||||
|
||||
@@ -1,54 +0,0 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
dss:
|
||||
build: .
|
||||
container_name: dss-server
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "3456:3456"
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
- NODE_ENV=production
|
||||
- HOST=0.0.0.0
|
||||
- PORT=3456
|
||||
- DATABASE_PATH=/app/.dss/dss.db
|
||||
- PYTHONPATH=/app/tools
|
||||
volumes:
|
||||
# Persistent data
|
||||
- dss-data:/app/.dss
|
||||
# Optional: Mount custom config
|
||||
# - ./custom.env:/app/.env:ro
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:3456/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 40s
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "10m"
|
||||
max-file: "3"
|
||||
|
||||
# Optional: Redis for caching (if using Celery)
|
||||
# redis:
|
||||
# image: redis:7-alpine
|
||||
# container_name: dss-redis
|
||||
# restart: unless-stopped
|
||||
# ports:
|
||||
# - "127.0.0.1:6379:6379"
|
||||
# volumes:
|
||||
# - redis-data:/data
|
||||
# command: redis-server --appendonly yes
|
||||
|
||||
volumes:
|
||||
dss-data:
|
||||
driver: local
|
||||
# redis-data:
|
||||
# driver: local
|
||||
|
||||
networks:
|
||||
default:
|
||||
name: dss-network
|
||||
@@ -10,7 +10,7 @@ from .security import TimestampConflictResolver
|
||||
from ..models.project import Project
|
||||
from ..models.theme import DesignToken
|
||||
from ..models.component import Component
|
||||
from ..storage.database import get_connection
|
||||
from storage.json_store import Projects, Components, Tokens
|
||||
|
||||
|
||||
MergeStrategy = Literal["overwrite", "keep_local", "fork", "skip"]
|
||||
|
||||
@@ -20,7 +20,7 @@ from .importer import DSSArchiveImporter, ImportAnalysis
|
||||
from .merger import SmartMerger, ConflictResolutionMode, MergeAnalysis
|
||||
from .security import DatabaseLockingStrategy, MemoryLimitManager
|
||||
from ..models.project import Project
|
||||
from ..storage.database import get_connection
|
||||
from storage.json_store import Projects, ActivityLog
|
||||
|
||||
|
||||
@dataclass
|
||||
|
||||
@@ -190,7 +190,7 @@ class StatusDashboard:
|
||||
|
||||
# Database stats
|
||||
try:
|
||||
from dss.storage.database import get_stats, ActivityLog, SyncHistory, Projects, Components
|
||||
from storage.json_store import get_stats, ActivityLog, SyncHistory, Projects, Components
|
||||
|
||||
stats = get_stats()
|
||||
data.projects_count = stats.get("projects", 0)
|
||||
|
||||
@@ -1,848 +0,0 @@
|
||||
"""
|
||||
Design System Server (DSS) - SQLite Storage Layer
|
||||
|
||||
High-efficiency local-first database for:
|
||||
- Component definitions (relational)
|
||||
- Sync history (time-series)
|
||||
- Team/User RBAC
|
||||
- Figma API cache (TTL-based)
|
||||
|
||||
Design tokens stored as flat JSON files for git-friendly diffs.
|
||||
"""
|
||||
|
||||
import sqlite3
|
||||
import json
|
||||
import time
|
||||
import hashlib
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
from typing import Optional, Dict, List, Any
|
||||
from contextlib import contextmanager
|
||||
from dataclasses import dataclass, asdict
|
||||
|
||||
# Database location
|
||||
DB_DIR = Path(__file__).parent.parent.parent / ".dss"
|
||||
DB_PATH = DB_DIR / "dss.db"
|
||||
|
||||
# Ensure directory exists
|
||||
DB_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def get_connection():
|
||||
"""Context manager for database connections with WAL mode for performance."""
|
||||
conn = sqlite3.connect(DB_PATH, timeout=30.0)
|
||||
conn.row_factory = sqlite3.Row
|
||||
conn.execute("PRAGMA journal_mode=WAL") # Write-Ahead Logging for concurrency
|
||||
conn.execute("PRAGMA synchronous=NORMAL") # Balance safety/speed
|
||||
conn.execute("PRAGMA cache_size=-64000") # 64MB cache
|
||||
conn.execute("PRAGMA temp_store=MEMORY") # Temp tables in memory
|
||||
try:
|
||||
yield conn
|
||||
conn.commit()
|
||||
except Exception:
|
||||
conn.rollback()
|
||||
raise
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
def init_database():
|
||||
"""Initialize all database tables."""
|
||||
with get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# === Projects ===
|
||||
cursor.execute("""
|
||||
CREATE TABLE IF NOT EXISTS projects (
|
||||
id TEXT PRIMARY KEY,
|
||||
uuid TEXT UNIQUE,
|
||||
name TEXT NOT NULL,
|
||||
description TEXT,
|
||||
figma_file_key TEXT,
|
||||
status TEXT DEFAULT 'active',
|
||||
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TEXT DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
""")
|
||||
|
||||
# === Components ===
|
||||
cursor.execute("""
|
||||
CREATE TABLE IF NOT EXISTS components (
|
||||
id TEXT PRIMARY KEY,
|
||||
uuid TEXT UNIQUE,
|
||||
project_id TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
figma_key TEXT,
|
||||
description TEXT,
|
||||
properties TEXT, -- JSON
|
||||
variants TEXT, -- JSON array
|
||||
code_generated INTEGER DEFAULT 0,
|
||||
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (project_id) REFERENCES projects(id)
|
||||
)
|
||||
""")
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_components_project ON components(project_id)")
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_components_name ON components(name)")
|
||||
|
||||
# === Styles ===
|
||||
cursor.execute("""
|
||||
CREATE TABLE IF NOT EXISTS styles (
|
||||
id TEXT PRIMARY KEY,
|
||||
project_id TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
type TEXT NOT NULL, -- TEXT, FILL, EFFECT, GRID
|
||||
figma_key TEXT,
|
||||
properties TEXT, -- JSON
|
||||
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (project_id) REFERENCES projects(id)
|
||||
)
|
||||
""")
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_styles_project ON styles(project_id)")
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_styles_type ON styles(type)")
|
||||
|
||||
# === Tokens (metadata, actual values in JSON files) ===
|
||||
cursor.execute("""
|
||||
CREATE TABLE IF NOT EXISTS token_collections (
|
||||
id TEXT PRIMARY KEY,
|
||||
project_id TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
file_path TEXT NOT NULL,
|
||||
token_count INTEGER DEFAULT 0,
|
||||
last_synced TEXT,
|
||||
FOREIGN KEY (project_id) REFERENCES projects(id)
|
||||
)
|
||||
""")
|
||||
|
||||
# === Sync History (append-only, time-series) ===
|
||||
cursor.execute("""
|
||||
CREATE TABLE IF NOT EXISTS sync_history (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
project_id TEXT NOT NULL,
|
||||
sync_type TEXT NOT NULL, -- tokens, components, styles, full
|
||||
status TEXT NOT NULL, -- success, failed, partial
|
||||
items_synced INTEGER DEFAULT 0,
|
||||
changes TEXT, -- JSON diff summary
|
||||
error_message TEXT,
|
||||
started_at TEXT NOT NULL,
|
||||
completed_at TEXT,
|
||||
duration_ms INTEGER,
|
||||
FOREIGN KEY (project_id) REFERENCES projects(id)
|
||||
)
|
||||
""")
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_sync_project ON sync_history(project_id)")
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_sync_time ON sync_history(started_at DESC)")
|
||||
|
||||
# === Activity Log (Enhanced Audit Trail) ===
|
||||
cursor.execute("""
|
||||
CREATE TABLE IF NOT EXISTS activity_log (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
project_id TEXT,
|
||||
user_id TEXT,
|
||||
user_name TEXT, -- Denormalized for faster display
|
||||
team_context TEXT, -- ui, ux, qa, all
|
||||
action TEXT NOT NULL, -- Created, Updated, Deleted, Extracted, Synced, etc.
|
||||
entity_type TEXT, -- project, component, token, figma_file, etc.
|
||||
entity_id TEXT,
|
||||
entity_name TEXT, -- Denormalized for faster display
|
||||
category TEXT, -- design_system, code, configuration, team
|
||||
severity TEXT DEFAULT 'info', -- info, warning, critical
|
||||
description TEXT, -- Human-readable description
|
||||
details TEXT, -- JSON with full context
|
||||
ip_address TEXT, -- For security audit
|
||||
user_agent TEXT, -- Browser/client info
|
||||
created_at TEXT DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
""")
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_activity_time ON activity_log(created_at DESC)")
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_activity_project ON activity_log(project_id)")
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_activity_user ON activity_log(user_id)")
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_activity_action ON activity_log(action)")
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_activity_category ON activity_log(category)")
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_activity_entity ON activity_log(entity_type, entity_id)")
|
||||
|
||||
# === Teams ===
|
||||
cursor.execute("""
|
||||
CREATE TABLE IF NOT EXISTS teams (
|
||||
id TEXT PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
description TEXT,
|
||||
settings TEXT, -- JSON
|
||||
created_at TEXT DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
""")
|
||||
|
||||
# === Users ===
|
||||
cursor.execute("""
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id TEXT PRIMARY KEY,
|
||||
email TEXT UNIQUE NOT NULL,
|
||||
name TEXT,
|
||||
avatar_url TEXT,
|
||||
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
||||
last_login TEXT
|
||||
)
|
||||
""")
|
||||
|
||||
# === Team Members (RBAC) ===
|
||||
cursor.execute("""
|
||||
CREATE TABLE IF NOT EXISTS team_members (
|
||||
team_id TEXT NOT NULL,
|
||||
user_id TEXT NOT NULL,
|
||||
role TEXT NOT NULL, -- SUPER_ADMIN, TEAM_LEAD, DEVELOPER, VIEWER
|
||||
joined_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (team_id, user_id),
|
||||
FOREIGN KEY (team_id) REFERENCES teams(id),
|
||||
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||
)
|
||||
""")
|
||||
|
||||
# === Project Team Access ===
|
||||
cursor.execute("""
|
||||
CREATE TABLE IF NOT EXISTS project_access (
|
||||
project_id TEXT NOT NULL,
|
||||
team_id TEXT NOT NULL,
|
||||
access_level TEXT DEFAULT 'read', -- read, write, admin
|
||||
granted_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (project_id, team_id),
|
||||
FOREIGN KEY (project_id) REFERENCES projects(id),
|
||||
FOREIGN KEY (team_id) REFERENCES teams(id)
|
||||
)
|
||||
""")
|
||||
|
||||
# === Figma Cache (TTL-based) ===
|
||||
cursor.execute("""
|
||||
CREATE TABLE IF NOT EXISTS figma_cache (
|
||||
cache_key TEXT PRIMARY KEY,
|
||||
value BLOB NOT NULL,
|
||||
created_at INTEGER NOT NULL,
|
||||
expires_at INTEGER NOT NULL
|
||||
)
|
||||
""")
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_cache_expires ON figma_cache(expires_at)")
|
||||
|
||||
conn.commit()
|
||||
print(f"[Storage] Database initialized at {DB_PATH}")
|
||||
|
||||
|
||||
# === Cache Operations ===
|
||||
|
||||
class Cache:
|
||||
"""TTL-based cache using SQLite."""
|
||||
|
||||
DEFAULT_TTL = 300 # 5 minutes
|
||||
|
||||
@staticmethod
|
||||
def set(key: str, value: Any, ttl: int = DEFAULT_TTL) -> None:
|
||||
"""Store a value with TTL."""
|
||||
now = int(time.time())
|
||||
expires = now + ttl
|
||||
data = json.dumps(value).encode() if not isinstance(value, bytes) else value
|
||||
|
||||
with get_connection() as conn:
|
||||
conn.execute(
|
||||
"INSERT OR REPLACE INTO figma_cache (cache_key, value, created_at, expires_at) VALUES (?, ?, ?, ?)",
|
||||
(key, data, now, expires)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get(key: str) -> Optional[Any]:
|
||||
"""Get a value if not expired."""
|
||||
now = int(time.time())
|
||||
|
||||
with get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(
|
||||
"SELECT value FROM figma_cache WHERE cache_key = ? AND expires_at > ?",
|
||||
(key, now)
|
||||
)
|
||||
row = cursor.fetchone()
|
||||
|
||||
if row:
|
||||
try:
|
||||
return json.loads(row[0])
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
return row[0]
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def delete(key: str) -> None:
|
||||
"""Delete a cache entry."""
|
||||
with get_connection() as conn:
|
||||
conn.execute("DELETE FROM figma_cache WHERE cache_key = ?", (key,))
|
||||
|
||||
@staticmethod
|
||||
def clear_expired() -> int:
|
||||
"""Remove all expired entries. Returns count deleted."""
|
||||
now = int(time.time())
|
||||
with get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("DELETE FROM figma_cache WHERE expires_at <= ?", (now,))
|
||||
return cursor.rowcount
|
||||
|
||||
@staticmethod
|
||||
def clear_all() -> None:
|
||||
"""Clear entire cache."""
|
||||
with get_connection() as conn:
|
||||
conn.execute("DELETE FROM figma_cache")
|
||||
|
||||
|
||||
# === Project Operations ===
|
||||
|
||||
class Projects:
|
||||
"""Project CRUD operations."""
|
||||
|
||||
@staticmethod
|
||||
def create(id: str, name: str, description: str = "", figma_file_key: str = "") -> Dict:
|
||||
with get_connection() as conn:
|
||||
conn.execute(
|
||||
"INSERT INTO projects (id, name, description, figma_file_key) VALUES (?, ?, ?, ?)",
|
||||
(id, name, description, figma_file_key)
|
||||
)
|
||||
return Projects.get(id)
|
||||
|
||||
@staticmethod
|
||||
def get(id: str) -> Optional[Dict]:
|
||||
with get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT * FROM projects WHERE id = ?", (id,))
|
||||
row = cursor.fetchone()
|
||||
return dict(row) if row else None
|
||||
|
||||
@staticmethod
|
||||
def list(status: str = None) -> List[Dict]:
|
||||
with get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
if status:
|
||||
cursor.execute("SELECT * FROM projects WHERE status = ? ORDER BY updated_at DESC", (status,))
|
||||
else:
|
||||
cursor.execute("SELECT * FROM projects ORDER BY updated_at DESC")
|
||||
return [dict(row) for row in cursor.fetchall()]
|
||||
|
||||
@staticmethod
|
||||
def update(id: str, **kwargs) -> Optional[Dict]:
|
||||
if not kwargs:
|
||||
return Projects.get(id)
|
||||
|
||||
fields = ", ".join(f"{k} = ?" for k in kwargs.keys())
|
||||
values = list(kwargs.values()) + [id]
|
||||
|
||||
with get_connection() as conn:
|
||||
conn.execute(
|
||||
f"UPDATE projects SET {fields}, updated_at = CURRENT_TIMESTAMP WHERE id = ?",
|
||||
values
|
||||
)
|
||||
return Projects.get(id)
|
||||
|
||||
@staticmethod
|
||||
def delete(id: str) -> bool:
|
||||
with get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("DELETE FROM projects WHERE id = ?", (id,))
|
||||
return cursor.rowcount > 0
|
||||
|
||||
|
||||
# === Component Operations ===
|
||||
|
||||
class Components:
|
||||
"""Component CRUD operations."""
|
||||
|
||||
@staticmethod
|
||||
def upsert(project_id: str, components: List[Dict]) -> int:
|
||||
"""Bulk upsert components. Returns count."""
|
||||
with get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
count = 0
|
||||
for comp in components:
|
||||
cursor.execute("""
|
||||
INSERT OR REPLACE INTO components
|
||||
(id, project_id, name, figma_key, description, properties, variants, updated_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP)
|
||||
""", (
|
||||
comp.get('id') or f"{project_id}-{comp['name']}",
|
||||
project_id,
|
||||
comp['name'],
|
||||
comp.get('figma_key') or comp.get('key'),
|
||||
comp.get('description', ''),
|
||||
json.dumps(comp.get('properties', {})),
|
||||
json.dumps(comp.get('variants', []))
|
||||
))
|
||||
count += 1
|
||||
return count
|
||||
|
||||
@staticmethod
|
||||
def list(project_id: str) -> List[Dict]:
|
||||
with get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(
|
||||
"SELECT * FROM components WHERE project_id = ? ORDER BY name",
|
||||
(project_id,)
|
||||
)
|
||||
results = []
|
||||
for row in cursor.fetchall():
|
||||
comp = dict(row)
|
||||
comp['properties'] = json.loads(comp['properties'] or '{}')
|
||||
comp['variants'] = json.loads(comp['variants'] or '[]')
|
||||
results.append(comp)
|
||||
return results
|
||||
|
||||
@staticmethod
|
||||
def get(id: str) -> Optional[Dict]:
|
||||
with get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT * FROM components WHERE id = ?", (id,))
|
||||
row = cursor.fetchone()
|
||||
if row:
|
||||
comp = dict(row)
|
||||
comp['properties'] = json.loads(comp['properties'] or '{}')
|
||||
comp['variants'] = json.loads(comp['variants'] or '[]')
|
||||
return comp
|
||||
return None
|
||||
|
||||
|
||||
# === Sync History ===
|
||||
|
||||
class SyncHistory:
|
||||
"""Append-only sync history log."""
|
||||
|
||||
@staticmethod
|
||||
def start(project_id: str, sync_type: str) -> int:
|
||||
"""Start a sync, returns sync ID."""
|
||||
with get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(
|
||||
"INSERT INTO sync_history (project_id, sync_type, status, started_at) VALUES (?, ?, 'running', ?)",
|
||||
(project_id, sync_type, datetime.utcnow().isoformat())
|
||||
)
|
||||
return cursor.lastrowid
|
||||
|
||||
@staticmethod
|
||||
def complete(sync_id: int, status: str, items_synced: int = 0, changes: Dict = None, error: str = None):
|
||||
"""Complete a sync with results."""
|
||||
started = None
|
||||
with get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT started_at FROM sync_history WHERE id = ?", (sync_id,))
|
||||
row = cursor.fetchone()
|
||||
if row:
|
||||
started = datetime.fromisoformat(row[0])
|
||||
|
||||
completed = datetime.utcnow()
|
||||
duration_ms = int((completed - started).total_seconds() * 1000) if started else 0
|
||||
|
||||
with get_connection() as conn:
|
||||
conn.execute("""
|
||||
UPDATE sync_history SET
|
||||
status = ?, items_synced = ?, changes = ?, error_message = ?,
|
||||
completed_at = ?, duration_ms = ?
|
||||
WHERE id = ?
|
||||
""", (
|
||||
status, items_synced,
|
||||
json.dumps(changes) if changes else None,
|
||||
error,
|
||||
completed.isoformat(), duration_ms,
|
||||
sync_id
|
||||
))
|
||||
|
||||
@staticmethod
|
||||
def recent(project_id: str = None, limit: int = 20) -> List[Dict]:
|
||||
with get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
if project_id:
|
||||
cursor.execute(
|
||||
"SELECT * FROM sync_history WHERE project_id = ? ORDER BY started_at DESC LIMIT ?",
|
||||
(project_id, limit)
|
||||
)
|
||||
else:
|
||||
cursor.execute(
|
||||
"SELECT * FROM sync_history ORDER BY started_at DESC LIMIT ?",
|
||||
(limit,)
|
||||
)
|
||||
results = []
|
||||
for row in cursor.fetchall():
|
||||
sync = dict(row)
|
||||
sync['changes'] = json.loads(sync['changes']) if sync['changes'] else None
|
||||
results.append(sync)
|
||||
return results
|
||||
|
||||
|
||||
# === Activity Log (Enhanced Audit System) ===
|
||||
|
||||
class ActivityLog:
|
||||
"""Enhanced activity tracking for comprehensive audit trail."""
|
||||
|
||||
# Action categories for better organization
|
||||
CATEGORIES = {
|
||||
'design_system': ['extract_tokens', 'extract_components', 'sync_tokens', 'validate_tokens'],
|
||||
'code': ['analyze_components', 'find_inline_styles', 'generate_code', 'get_quick_wins'],
|
||||
'configuration': ['config_updated', 'figma_token_updated', 'mode_changed', 'service_configured'],
|
||||
'project': ['project_created', 'project_updated', 'project_deleted'],
|
||||
'team': ['team_context_changed', 'project_context_changed'],
|
||||
'storybook': ['scan_storybook', 'generate_story', 'generate_theme']
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def log(action: str,
|
||||
entity_type: str = None,
|
||||
entity_id: str = None,
|
||||
entity_name: str = None,
|
||||
project_id: str = None,
|
||||
user_id: str = None,
|
||||
user_name: str = None,
|
||||
team_context: str = None,
|
||||
description: str = None,
|
||||
category: str = None,
|
||||
severity: str = 'info',
|
||||
details: Dict = None,
|
||||
ip_address: str = None,
|
||||
user_agent: str = None):
|
||||
"""
|
||||
Log an activity with enhanced audit information.
|
||||
|
||||
Args:
|
||||
action: Action performed (e.g., 'project_created', 'tokens_extracted')
|
||||
entity_type: Type of entity affected (e.g., 'project', 'component')
|
||||
entity_id: ID of the affected entity
|
||||
entity_name: Human-readable name of the entity
|
||||
project_id: Project context
|
||||
user_id: User who performed the action
|
||||
user_name: Human-readable user name
|
||||
team_context: Team context (ui, ux, qa, all)
|
||||
description: Human-readable description of the action
|
||||
category: Category (design_system, code, configuration, etc.)
|
||||
severity: info, warning, critical
|
||||
details: Additional JSON details
|
||||
ip_address: Client IP for security audit
|
||||
user_agent: Browser/client information
|
||||
"""
|
||||
# Auto-detect category if not provided
|
||||
if not category:
|
||||
for cat, actions in ActivityLog.CATEGORIES.items():
|
||||
if action in actions:
|
||||
category = cat
|
||||
break
|
||||
if not category:
|
||||
category = 'other'
|
||||
|
||||
# Generate description if not provided
|
||||
if not description:
|
||||
description = ActivityLog._generate_description(action, entity_type, entity_name, details)
|
||||
|
||||
with get_connection() as conn:
|
||||
conn.execute("""
|
||||
INSERT INTO activity_log (
|
||||
project_id, user_id, user_name, team_context,
|
||||
action, entity_type, entity_id, entity_name,
|
||||
category, severity, description, details,
|
||||
ip_address, user_agent
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""", (
|
||||
project_id, user_id, user_name, team_context,
|
||||
action, entity_type, entity_id, entity_name,
|
||||
category, severity, description,
|
||||
json.dumps(details) if details else None,
|
||||
ip_address, user_agent
|
||||
))
|
||||
|
||||
@staticmethod
|
||||
def _generate_description(action: str, entity_type: str, entity_name: str, details: Dict) -> str:
|
||||
"""Generate human-readable description from action data."""
|
||||
entity_str = f"{entity_type} '{entity_name}'" if entity_name else (entity_type or "item")
|
||||
|
||||
action_map = {
|
||||
'project_created': f"Created project {entity_str}",
|
||||
'project_updated': f"Updated {entity_str}",
|
||||
'project_deleted': f"Deleted {entity_str}",
|
||||
'extract_tokens': f"Extracted design tokens from Figma",
|
||||
'extract_components': f"Extracted components from Figma",
|
||||
'sync_tokens': f"Synced tokens to file",
|
||||
'config_updated': "Updated configuration",
|
||||
'figma_token_updated': "Updated Figma API token",
|
||||
'team_context_changed': f"Switched to team context",
|
||||
'project_context_changed': f"Switched to project {entity_name}",
|
||||
}
|
||||
|
||||
return action_map.get(action, f"{action.replace('_', ' ').title()}")
|
||||
|
||||
@staticmethod
|
||||
def recent(project_id: str = None, limit: int = 50, offset: int = 0) -> List[Dict]:
|
||||
"""Get recent activity with pagination."""
|
||||
with get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
if project_id:
|
||||
cursor.execute(
|
||||
"SELECT * FROM activity_log WHERE project_id = ? ORDER BY created_at DESC LIMIT ? OFFSET ?",
|
||||
(project_id, limit, offset)
|
||||
)
|
||||
else:
|
||||
cursor.execute(
|
||||
"SELECT * FROM activity_log ORDER BY created_at DESC LIMIT ? OFFSET ?",
|
||||
(limit, offset)
|
||||
)
|
||||
results = []
|
||||
for row in cursor.fetchall():
|
||||
activity = dict(row)
|
||||
activity['details'] = json.loads(activity['details']) if activity['details'] else None
|
||||
results.append(activity)
|
||||
return results
|
||||
|
||||
@staticmethod
|
||||
def search(
|
||||
project_id: str = None,
|
||||
user_id: str = None,
|
||||
action: str = None,
|
||||
category: str = None,
|
||||
entity_type: str = None,
|
||||
severity: str = None,
|
||||
start_date: str = None,
|
||||
end_date: str = None,
|
||||
limit: int = 100,
|
||||
offset: int = 0
|
||||
) -> List[Dict]:
|
||||
"""Advanced search/filter for audit logs."""
|
||||
conditions = []
|
||||
params = []
|
||||
|
||||
if project_id:
|
||||
conditions.append("project_id = ?")
|
||||
params.append(project_id)
|
||||
if user_id:
|
||||
conditions.append("user_id = ?")
|
||||
params.append(user_id)
|
||||
if action:
|
||||
conditions.append("action = ?")
|
||||
params.append(action)
|
||||
if category:
|
||||
conditions.append("category = ?")
|
||||
params.append(category)
|
||||
if entity_type:
|
||||
conditions.append("entity_type = ?")
|
||||
params.append(entity_type)
|
||||
if severity:
|
||||
conditions.append("severity = ?")
|
||||
params.append(severity)
|
||||
if start_date:
|
||||
conditions.append("created_at >= ?")
|
||||
params.append(start_date)
|
||||
if end_date:
|
||||
conditions.append("created_at <= ?")
|
||||
params.append(end_date)
|
||||
|
||||
where_clause = " AND ".join(conditions) if conditions else "1=1"
|
||||
params.extend([limit, offset])
|
||||
|
||||
with get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(f"""
|
||||
SELECT * FROM activity_log
|
||||
WHERE {where_clause}
|
||||
ORDER BY created_at DESC
|
||||
LIMIT ? OFFSET ?
|
||||
""", params)
|
||||
|
||||
results = []
|
||||
for row in cursor.fetchall():
|
||||
activity = dict(row)
|
||||
activity['details'] = json.loads(activity['details']) if activity['details'] else None
|
||||
results.append(activity)
|
||||
return results
|
||||
|
||||
@staticmethod
|
||||
def count(
|
||||
project_id: str = None,
|
||||
user_id: str = None,
|
||||
action: str = None,
|
||||
category: str = None
|
||||
) -> int:
|
||||
"""Count activities matching filters."""
|
||||
conditions = []
|
||||
params = []
|
||||
|
||||
if project_id:
|
||||
conditions.append("project_id = ?")
|
||||
params.append(project_id)
|
||||
if user_id:
|
||||
conditions.append("user_id = ?")
|
||||
params.append(user_id)
|
||||
if action:
|
||||
conditions.append("action = ?")
|
||||
params.append(action)
|
||||
if category:
|
||||
conditions.append("category = ?")
|
||||
params.append(category)
|
||||
|
||||
where_clause = " AND ".join(conditions) if conditions else "1=1"
|
||||
|
||||
with get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(f"SELECT COUNT(*) FROM activity_log WHERE {where_clause}", params)
|
||||
return cursor.fetchone()[0]
|
||||
|
||||
@staticmethod
|
||||
def get_categories() -> List[str]:
|
||||
"""Get list of all categories used."""
|
||||
with get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT DISTINCT category FROM activity_log WHERE category IS NOT NULL ORDER BY category")
|
||||
return [row[0] for row in cursor.fetchall()]
|
||||
|
||||
@staticmethod
|
||||
def get_actions() -> List[str]:
|
||||
"""Get list of all actions used."""
|
||||
with get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT DISTINCT action FROM activity_log ORDER BY action")
|
||||
return [row[0] for row in cursor.fetchall()]
|
||||
|
||||
@staticmethod
|
||||
def get_stats_by_category() -> Dict[str, int]:
|
||||
"""Get activity count by category."""
|
||||
with get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("""
|
||||
SELECT category, COUNT(*) as count
|
||||
FROM activity_log
|
||||
GROUP BY category
|
||||
ORDER BY count DESC
|
||||
""")
|
||||
return {row[0]: row[1] for row in cursor.fetchall()}
|
||||
|
||||
@staticmethod
|
||||
def get_stats_by_user() -> Dict[str, int]:
|
||||
"""Get activity count by user."""
|
||||
with get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("""
|
||||
SELECT COALESCE(user_name, user_id, 'Unknown') as user, COUNT(*) as count
|
||||
FROM activity_log
|
||||
GROUP BY user_name, user_id
|
||||
ORDER BY count DESC
|
||||
""")
|
||||
return {row[0]: row[1] for row in cursor.fetchall()}
|
||||
|
||||
|
||||
# === Teams & RBAC ===
|
||||
|
||||
class Teams:
|
||||
"""Team and role management."""
|
||||
|
||||
@staticmethod
|
||||
def create(id: str, name: str, description: str = "") -> Dict:
|
||||
with get_connection() as conn:
|
||||
conn.execute(
|
||||
"INSERT INTO teams (id, name, description) VALUES (?, ?, ?)",
|
||||
(id, name, description)
|
||||
)
|
||||
return Teams.get(id)
|
||||
|
||||
@staticmethod
|
||||
def get(id: str) -> Optional[Dict]:
|
||||
with get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT * FROM teams WHERE id = ?", (id,))
|
||||
row = cursor.fetchone()
|
||||
if row:
|
||||
team = dict(row)
|
||||
team['settings'] = json.loads(team['settings']) if team['settings'] else {}
|
||||
return team
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def list() -> List[Dict]:
|
||||
with get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT * FROM teams ORDER BY name")
|
||||
return [dict(row) for row in cursor.fetchall()]
|
||||
|
||||
@staticmethod
|
||||
def add_member(team_id: str, user_id: str, role: str):
|
||||
with get_connection() as conn:
|
||||
conn.execute(
|
||||
"INSERT OR REPLACE INTO team_members (team_id, user_id, role) VALUES (?, ?, ?)",
|
||||
(team_id, user_id, role)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_members(team_id: str) -> List[Dict]:
|
||||
with get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("""
|
||||
SELECT u.*, tm.role, tm.joined_at
|
||||
FROM team_members tm
|
||||
JOIN users u ON u.id = tm.user_id
|
||||
WHERE tm.team_id = ?
|
||||
ORDER BY tm.role, u.name
|
||||
""", (team_id,))
|
||||
return [dict(row) for row in cursor.fetchall()]
|
||||
|
||||
@staticmethod
|
||||
def get_user_role(team_id: str, user_id: str) -> Optional[str]:
|
||||
with get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(
|
||||
"SELECT role FROM team_members WHERE team_id = ? AND user_id = ?",
|
||||
(team_id, user_id)
|
||||
)
|
||||
row = cursor.fetchone()
|
||||
return row[0] if row else None
|
||||
|
||||
|
||||
# === Database Stats ===
|
||||
|
||||
def get_stats() -> Dict:
|
||||
"""Get database statistics."""
|
||||
with get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
|
||||
stats = {}
|
||||
|
||||
# Table counts
|
||||
tables = ['projects', 'components', 'styles', 'sync_history', 'activity_log', 'teams', 'users', 'figma_cache']
|
||||
for table in tables:
|
||||
cursor.execute(f"SELECT COUNT(*) FROM {table}")
|
||||
stats[table] = cursor.fetchone()[0]
|
||||
|
||||
# Database file size
|
||||
if DB_PATH.exists():
|
||||
stats['db_size_mb'] = round(DB_PATH.stat().st_size / (1024 * 1024), 2)
|
||||
|
||||
# Cache stats
|
||||
now = int(time.time())
|
||||
cursor.execute("SELECT COUNT(*) FROM figma_cache WHERE expires_at > ?", (now,))
|
||||
stats['cache_valid'] = cursor.fetchone()[0]
|
||||
|
||||
return stats
|
||||
|
||||
|
||||
# Initialize on import
|
||||
init_database()
|
||||
|
||||
|
||||
# === CLI for testing ===
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
if len(sys.argv) > 1:
|
||||
cmd = sys.argv[1]
|
||||
|
||||
if cmd == "stats":
|
||||
print(json.dumps(get_stats(), indent=2))
|
||||
|
||||
elif cmd == "init":
|
||||
init_database()
|
||||
print("Database initialized")
|
||||
|
||||
elif cmd == "cache-test":
|
||||
Cache.set("test_key", {"foo": "bar"}, ttl=60)
|
||||
print(f"Set: test_key")
|
||||
print(f"Get: {Cache.get('test_key')}")
|
||||
|
||||
elif cmd == "clear-cache":
|
||||
Cache.clear_all()
|
||||
print("Cache cleared")
|
||||
|
||||
else:
|
||||
print("Usage: python database.py [stats|init|cache-test|clear-cache]")
|
||||
print(f"\nDatabase: {DB_PATH}")
|
||||
print(f"Stats: {json.dumps(get_stats(), indent=2)}")
|
||||
@@ -39,7 +39,7 @@ sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from mcp.server.fastmcp import FastMCP
|
||||
from config import config
|
||||
from storage.database import Projects, Components, SyncHistory, ActivityLog, get_stats
|
||||
from storage.json_store import Projects, Components, SyncHistory, ActivityLog, get_stats
|
||||
from figma.figma_tools import FigmaToolSuite
|
||||
|
||||
# Import new ingestion modules
|
||||
|
||||
@@ -64,10 +64,9 @@ from browser_logger import router as browser_log_router
|
||||
|
||||
# Legacy imports (will gradually migrate these)
|
||||
from config import config
|
||||
from storage.database import (
|
||||
from storage.json_store import (
|
||||
Projects, Components, SyncHistory, ActivityLog, Teams, Cache, get_stats,
|
||||
FigmaFiles, ESREDefinitions, TokenDriftDetector, CodeMetrics, TestResults,
|
||||
get_connection
|
||||
FigmaFiles, CodeMetrics, TestResults, TokenDrift, Tokens, Styles
|
||||
)
|
||||
from figma.figma_tools import FigmaToolSuite
|
||||
|
||||
@@ -405,16 +404,15 @@ async def health():
|
||||
import psutil
|
||||
from pathlib import Path
|
||||
|
||||
# ❤️ Check Heart (database) connectivity
|
||||
# ❤️ Check Heart (storage) connectivity
|
||||
db_ok = False
|
||||
try:
|
||||
with get_connection() as conn:
|
||||
conn.execute("SELECT 1").fetchone()
|
||||
db_ok = True
|
||||
from storage.json_store import DATA_DIR
|
||||
db_ok = DATA_DIR.exists()
|
||||
except Exception as e:
|
||||
import traceback
|
||||
error_trace = traceback.format_exc()
|
||||
print(f"🏥 VITAL SIGN: Heart (database) error: {type(e).__name__}: {e}", flush=True)
|
||||
print(f"🏥 VITAL SIGN: Heart (storage) error: {type(e).__name__}: {e}", flush=True)
|
||||
print(f" Traceback:\n{error_trace}", flush=True)
|
||||
pass
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ from datetime import datetime, timedelta
|
||||
from typing import Optional, Dict, Any
|
||||
from atlassian import Jira, Confluence
|
||||
|
||||
from storage.database import get_connection
|
||||
from storage.json_store import read_json, write_json, SYSTEM_DIR
|
||||
|
||||
|
||||
class AtlassianAuth:
|
||||
|
||||
@@ -11,7 +11,7 @@ from typing import Optional, Dict, Any
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
|
||||
from storage.database import get_connection # Use absolute import (tools/ is in sys.path)
|
||||
from storage.json_store import ActivityLog, append_jsonl, read_jsonl, SYSTEM_DIR # JSON storage
|
||||
|
||||
|
||||
class AuditEventType(Enum):
|
||||
|
||||
@@ -17,7 +17,7 @@ from pathlib import Path
|
||||
import sys
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
|
||||
|
||||
from storage.database import get_connection, Projects
|
||||
from storage.json_store import Projects, Components, Tokens
|
||||
from analyze.scanner import ProjectScanner
|
||||
from ..config import mcp_config
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ from pathlib import Path
|
||||
# Note: sys.path is set up by the importing module (server.py)
|
||||
# Do NOT modify sys.path here as it causes relative import issues
|
||||
|
||||
from storage.database import get_connection
|
||||
from storage.json_store import Projects, ActivityLog
|
||||
from .config import mcp_config, integration_config
|
||||
from .context.project_context import get_context_manager, ProjectContext
|
||||
from .tools.project_tools import PROJECT_TOOLS, ProjectTools
|
||||
|
||||
@@ -12,7 +12,7 @@ from datetime import datetime, timedelta
|
||||
from enum import Enum
|
||||
|
||||
from ..config import mcp_config
|
||||
from storage.database import get_connection
|
||||
from storage.json_store import Cache, read_json, write_json, SYSTEM_DIR
|
||||
|
||||
|
||||
class CircuitState(Enum):
|
||||
|
||||
@@ -13,7 +13,7 @@ from datetime import datetime
|
||||
from enum import Enum
|
||||
|
||||
from .config import mcp_config
|
||||
from storage.database import get_connection # Use absolute import (tools/ is in sys.path)
|
||||
from storage.json_store import ActivityLog, read_json, write_json, DATA_DIR # JSON storage
|
||||
|
||||
|
||||
class OperationStatus(Enum):
|
||||
|
||||
@@ -16,7 +16,7 @@ from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
|
||||
from .config import mcp_config
|
||||
from storage.database import get_connection # Use absolute import (tools/ is in sys.path)
|
||||
from storage.json_store import read_json, write_json, SYSTEM_DIR # JSON storage
|
||||
|
||||
|
||||
class CredentialVault:
|
||||
|
||||
@@ -20,7 +20,7 @@ from mcp import types
|
||||
from ..context.project_context import get_context_manager
|
||||
from ..security import CredentialVault
|
||||
from ..audit import AuditLog, AuditEventType
|
||||
from storage.database import get_connection # Use absolute import (tools/ is in sys.path)
|
||||
from storage.json_store import Projects, Components, Tokens, ActivityLog # JSON storage
|
||||
|
||||
|
||||
# Tool definitions (metadata for Claude)
|
||||
|
||||
@@ -38,7 +38,7 @@ import httpx
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from config import config
|
||||
from storage.database import Cache, ActivityLog
|
||||
from storage.json_store import Cache, ActivityLog
|
||||
|
||||
@dataclass
|
||||
class DesignToken:
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
1026
tools/storage/json_store.py
Normal file
1026
tools/storage/json_store.py
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user