Files
dss/tools/storage/json_store.py
Bruno Sarlo d6c25cb4db Simplify code documentation, remove organism terminology
- Remove biological metaphors from docstrings (organism, sensory, genetic, nutrient, etc.)
- Simplify documentation to be minimal and structured for fast model parsing
- Complete SQLite to JSON storage migration (project_manager.py, json_store.py)
- Add Integrations and IntegrationHealth classes to json_store.py
- Add kill_port() function to server.py for port conflict handling
- All 33 tests pass

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2025-12-10 11:02:00 -03:00

1185 lines
38 KiB
Python

"""
DSS JSON Storage Layer
Pure JSON file-based storage following DSS canonical structure.
No SQLite - everything is JSON for git-friendly diffs.
Structure:
.dss/data/
├── _system/ # DSS internal (config, cache, activity)
├── projects/ # Per-project data (tokens, components, etc.)
└── teams/ # Team definitions
"""
import json
import time
import hashlib
import fcntl
from pathlib import Path
from datetime import datetime, date
from typing import Optional, Dict, List, Any, Union
from contextlib import contextmanager
from dataclasses import dataclass, asdict, field
import uuid
import os
# Base paths
DATA_DIR = Path(__file__).parent.parent.parent / ".dss" / "data"
SYSTEM_DIR = DATA_DIR / "_system"
PROJECTS_DIR = DATA_DIR / "projects"
TEAMS_DIR = DATA_DIR / "teams"
# Ensure directories exist
for d in [DATA_DIR, SYSTEM_DIR, SYSTEM_DIR / "cache", SYSTEM_DIR / "activity", PROJECTS_DIR, TEAMS_DIR]:
d.mkdir(parents=True, exist_ok=True)
# === File Locking Utilities ===
@contextmanager
def file_lock(path: Path, exclusive: bool = True):
"""Context manager for file locking."""
lock_path = path.with_suffix(path.suffix + ".lock")
lock_path.parent.mkdir(parents=True, exist_ok=True)
with open(lock_path, 'w') as lock_file:
try:
fcntl.flock(lock_file.fileno(), fcntl.LOCK_EX if exclusive else fcntl.LOCK_SH)
yield
finally:
fcntl.flock(lock_file.fileno(), fcntl.LOCK_UN)
def read_json(path: Path, default: Any = None) -> Any:
"""Read JSON file with locking."""
if not path.exists():
return default
with file_lock(path, exclusive=False):
try:
return json.loads(path.read_text())
except (json.JSONDecodeError, IOError):
return default
def write_json(path: Path, data: Any, indent: int = 2) -> None:
"""Write JSON file with locking."""
path.parent.mkdir(parents=True, exist_ok=True)
with file_lock(path, exclusive=True):
path.write_text(json.dumps(data, indent=indent, default=str))
def append_jsonl(path: Path, record: Dict) -> None:
"""Append to JSON Lines file."""
path.parent.mkdir(parents=True, exist_ok=True)
with file_lock(path, exclusive=True):
with open(path, 'a') as f:
f.write(json.dumps(record, default=str) + '\n')
def read_jsonl(path: Path, limit: int = None, offset: int = 0) -> List[Dict]:
"""Read JSON Lines file with pagination."""
if not path.exists():
return []
records = []
with file_lock(path, exclusive=False):
with open(path, 'r') as f:
lines = f.readlines()
# Reverse for newest first
lines = list(reversed(lines))
for i, line in enumerate(lines):
if i < offset:
continue
if limit and len(records) >= limit:
break
try:
records.append(json.loads(line.strip()))
except json.JSONDecodeError:
continue
return records
# === Cache (TTL-based) ===
class Cache:
"""TTL-based cache using JSON files."""
CACHE_DIR = SYSTEM_DIR / "cache"
DEFAULT_TTL = 300 # 5 minutes
@staticmethod
def _key_to_path(key: str) -> Path:
"""Convert cache key to file path."""
key_hash = hashlib.md5(key.encode()).hexdigest()
return Cache.CACHE_DIR / f"{key_hash}.json"
@staticmethod
def set(key: str, value: Any, ttl: int = None) -> None:
"""Store a value with TTL."""
ttl = ttl or Cache.DEFAULT_TTL
data = {
"key": key,
"value": value,
"created_at": int(time.time()),
"expires_at": int(time.time()) + ttl
}
write_json(Cache._key_to_path(key), data)
@staticmethod
def get(key: str) -> Optional[Any]:
"""Get a value if not expired."""
path = Cache._key_to_path(key)
data = read_json(path)
if not data:
return None
if data.get("expires_at", 0) <= int(time.time()):
path.unlink(missing_ok=True)
return None
return data.get("value")
@staticmethod
def delete(key: str) -> None:
"""Delete a cache entry."""
Cache._key_to_path(key).unlink(missing_ok=True)
@staticmethod
def clear_expired() -> int:
"""Remove all expired entries."""
count = 0
now = int(time.time())
for path in Cache.CACHE_DIR.glob("*.json"):
data = read_json(path)
if data and data.get("expires_at", 0) <= now:
path.unlink(missing_ok=True)
count += 1
return count
@staticmethod
def clear_all() -> None:
"""Clear entire cache."""
for path in Cache.CACHE_DIR.glob("*.json"):
path.unlink(missing_ok=True)
# === Projects ===
class Projects:
"""Project CRUD operations using JSON files."""
@staticmethod
def _project_dir(project_id: str) -> Path:
return PROJECTS_DIR / project_id
@staticmethod
def _manifest_path(project_id: str) -> Path:
return Projects._project_dir(project_id) / "manifest.json"
@staticmethod
def _init_project_structure(project_id: str) -> None:
"""Initialize project folder structure."""
base = Projects._project_dir(project_id)
for subdir in ["tokens", "components", "styles", "figma", "metrics"]:
(base / subdir).mkdir(parents=True, exist_ok=True)
@staticmethod
def create(id: str, name: str, description: str = "", figma_file_key: str = "") -> Dict:
"""Create a new project."""
Projects._init_project_structure(id)
now = datetime.utcnow().isoformat()
manifest = {
"id": id,
"name": name,
"description": description,
"figma_file_key": figma_file_key,
"status": "active",
"created_at": now,
"updated_at": now
}
write_json(Projects._manifest_path(id), manifest)
# Initialize empty token files for canonical structure
for token_type in ["colors", "spacing", "typography", "borders", "shadows", "motion"]:
token_path = Projects._project_dir(id) / "tokens" / f"{token_type}.json"
if not token_path.exists():
write_json(token_path, {"$type": token_type, "tokens": {}})
return manifest
@staticmethod
def get(id: str) -> Optional[Dict]:
"""Get project by ID."""
return read_json(Projects._manifest_path(id))
@staticmethod
def list(status: str = None) -> List[Dict]:
"""List all projects."""
projects = []
for project_dir in PROJECTS_DIR.iterdir():
if project_dir.is_dir() and not project_dir.name.startswith("_"):
manifest = read_json(project_dir / "manifest.json")
if manifest:
if status is None or manifest.get("status") == status:
projects.append(manifest)
# Sort by updated_at descending
projects.sort(key=lambda p: p.get("updated_at", ""), reverse=True)
return projects
@staticmethod
def update(id: str, **kwargs) -> Optional[Dict]:
"""Update project fields."""
manifest = Projects.get(id)
if not manifest:
return None
manifest.update(kwargs)
manifest["updated_at"] = datetime.utcnow().isoformat()
write_json(Projects._manifest_path(id), manifest)
return manifest
@staticmethod
def delete(id: str) -> bool:
"""Delete a project (moves to _archived)."""
project_dir = Projects._project_dir(id)
if not project_dir.exists():
return False
# Move to archived instead of hard delete
archived_dir = PROJECTS_DIR / "_archived"
archived_dir.mkdir(exist_ok=True)
import shutil
shutil.move(str(project_dir), str(archived_dir / f"{id}_{int(time.time())}"))
return True
# === Components ===
class Components:
"""Component operations using JSON files."""
@staticmethod
def _components_dir(project_id: str) -> Path:
return PROJECTS_DIR / project_id / "components"
@staticmethod
def _component_path(project_id: str, component_id: str) -> Path:
# Sanitize component name for filesystem
safe_name = component_id.replace("/", "_").replace("\\", "_")
return Components._components_dir(project_id) / f"{safe_name}.json"
@staticmethod
def upsert(project_id: str, components: List[Dict]) -> int:
"""Bulk upsert components."""
count = 0
now = datetime.utcnow().isoformat()
for comp in components:
comp_id = comp.get("id") or f"{project_id}-{comp['name']}"
existing = Components.get(comp_id, project_id)
component_data = {
"id": comp_id,
"project_id": project_id,
"name": comp["name"],
"figma_key": comp.get("figma_key") or comp.get("key"),
"description": comp.get("description", ""),
"properties": comp.get("properties", {}),
"variants": comp.get("variants", []),
"code_generated": comp.get("code_generated", False),
"created_at": existing.get("created_at", now) if existing else now,
"updated_at": now
}
write_json(Components._component_path(project_id, comp_id), component_data)
count += 1
return count
@staticmethod
def list(project_id: str) -> List[Dict]:
"""List all components for a project."""
components = []
comp_dir = Components._components_dir(project_id)
if not comp_dir.exists():
return []
for path in comp_dir.glob("*.json"):
comp = read_json(path)
if comp:
components.append(comp)
components.sort(key=lambda c: c.get("name", ""))
return components
@staticmethod
def get(id: str, project_id: str = None) -> Optional[Dict]:
"""Get component by ID."""
if project_id:
return read_json(Components._component_path(project_id, id))
# Search all projects
for project_dir in PROJECTS_DIR.iterdir():
if project_dir.is_dir():
comp = read_json(Components._component_path(project_dir.name, id))
if comp:
return comp
return None
# === Tokens ===
class Tokens:
"""Token operations following DSS canonical structure."""
CANONICAL_TYPES = ["colors", "spacing", "typography", "borders", "shadows", "motion"]
@staticmethod
def _tokens_dir(project_id: str) -> Path:
return PROJECTS_DIR / project_id / "tokens"
@staticmethod
def get_all(project_id: str) -> Dict[str, Dict]:
"""Get all tokens for a project, organized by type."""
tokens = {}
tokens_dir = Tokens._tokens_dir(project_id)
for token_type in Tokens.CANONICAL_TYPES:
path = tokens_dir / f"{token_type}.json"
data = read_json(path, {"$type": token_type, "tokens": {}})
tokens[token_type] = data.get("tokens", {})
return tokens
@staticmethod
def get_by_type(project_id: str, token_type: str) -> Dict:
"""Get tokens of a specific type."""
path = Tokens._tokens_dir(project_id) / f"{token_type}.json"
data = read_json(path, {"$type": token_type, "tokens": {}})
return data.get("tokens", {})
@staticmethod
def set_by_type(project_id: str, token_type: str, tokens: Dict) -> None:
"""Set tokens of a specific type."""
path = Tokens._tokens_dir(project_id) / f"{token_type}.json"
write_json(path, {
"$type": token_type,
"updated_at": datetime.utcnow().isoformat(),
"tokens": tokens
})
@staticmethod
def merge(project_id: str, token_type: str, new_tokens: Dict, strategy: str = "LAST") -> Dict:
"""Merge tokens with strategy."""
existing = Tokens.get_by_type(project_id, token_type)
if strategy == "FIRST":
# Keep existing, only add new
merged = {**new_tokens, **existing}
elif strategy == "LAST":
# Override with new
merged = {**existing, **new_tokens}
elif strategy == "MERGE_METADATA":
# Deep merge
merged = existing.copy()
for key, value in new_tokens.items():
if key in merged and isinstance(merged[key], dict) and isinstance(value, dict):
merged[key] = {**merged[key], **value}
else:
merged[key] = value
else:
merged = {**existing, **new_tokens}
Tokens.set_by_type(project_id, token_type, merged)
return merged
# === Styles ===
class Styles:
"""Style operations."""
STYLE_TYPES = ["TEXT", "FILL", "EFFECT", "GRID"]
@staticmethod
def _styles_dir(project_id: str) -> Path:
return PROJECTS_DIR / project_id / "styles"
@staticmethod
def upsert(project_id: str, style_type: str, styles: List[Dict]) -> int:
"""Upsert styles of a given type."""
path = Styles._styles_dir(project_id) / f"{style_type.lower()}.json"
existing_data = read_json(path, {"$type": style_type, "styles": []})
existing_styles = {s["id"]: s for s in existing_data.get("styles", [])}
now = datetime.utcnow().isoformat()
for style in styles:
style["updated_at"] = now
if style["id"] not in existing_styles:
style["created_at"] = now
existing_styles[style["id"]] = style
write_json(path, {
"$type": style_type,
"updated_at": now,
"styles": list(existing_styles.values())
})
return len(styles)
@staticmethod
def list(project_id: str, style_type: str = None) -> List[Dict]:
"""List styles, optionally filtered by type."""
styles = []
styles_dir = Styles._styles_dir(project_id)
if not styles_dir.exists():
return []
types_to_check = [style_type.lower()] if style_type else [t.lower() for t in Styles.STYLE_TYPES]
for st in types_to_check:
path = styles_dir / f"{st}.json"
data = read_json(path)
if data:
styles.extend(data.get("styles", []))
return styles
# === Sync History ===
class SyncHistory:
"""Sync history using JSON Lines."""
@staticmethod
def _history_path(project_id: str) -> Path:
return PROJECTS_DIR / project_id / "figma" / "sync-history.jsonl"
@staticmethod
def start(project_id: str, sync_type: str) -> str:
"""Start a sync, returns sync ID."""
sync_id = str(uuid.uuid4())[:8]
record = {
"id": sync_id,
"project_id": project_id,
"sync_type": sync_type,
"status": "running",
"started_at": datetime.utcnow().isoformat(),
"completed_at": None,
"items_synced": 0,
"changes": None,
"error_message": None,
"duration_ms": None
}
append_jsonl(SyncHistory._history_path(project_id), record)
return sync_id
@staticmethod
def complete(project_id: str, sync_id: str, status: str, items_synced: int = 0,
changes: Dict = None, error: str = None) -> None:
"""Complete a sync."""
path = SyncHistory._history_path(project_id)
records = read_jsonl(path, limit=1000)
# Find and update the record
completed_at = datetime.utcnow().isoformat()
for record in records:
if record.get("id") == sync_id:
started = datetime.fromisoformat(record["started_at"])
duration_ms = int((datetime.utcnow() - started).total_seconds() * 1000)
# Append completion record
completion = {
"id": sync_id,
"project_id": project_id,
"sync_type": record.get("sync_type"),
"status": status,
"started_at": record["started_at"],
"completed_at": completed_at,
"items_synced": items_synced,
"changes": changes,
"error_message": error,
"duration_ms": duration_ms
}
append_jsonl(path, completion)
break
@staticmethod
def recent(project_id: str = None, limit: int = 20) -> List[Dict]:
"""Get recent sync history."""
if project_id:
return read_jsonl(SyncHistory._history_path(project_id), limit=limit)
# Aggregate from all projects
all_records = []
for project_dir in PROJECTS_DIR.iterdir():
if project_dir.is_dir() and not project_dir.name.startswith("_"):
records = read_jsonl(project_dir / "figma" / "sync-history.jsonl", limit=limit)
all_records.extend(records)
# Sort by started_at descending
all_records.sort(key=lambda r: r.get("started_at", ""), reverse=True)
return all_records[:limit]
# === Activity Log ===
class ActivityLog:
"""Activity logging using daily JSON Lines files."""
CATEGORIES = {
'design_system': ['extract_tokens', 'extract_components', 'sync_tokens', 'validate_tokens'],
'code': ['analyze_components', 'find_inline_styles', 'generate_code', 'get_quick_wins'],
'configuration': ['config_updated', 'figma_token_updated', 'mode_changed', 'service_configured'],
'project': ['project_created', 'project_updated', 'project_deleted'],
'team': ['team_context_changed', 'project_context_changed'],
'storybook': ['scan_storybook', 'generate_story', 'generate_theme']
}
@staticmethod
def _log_path(day: date = None) -> Path:
day = day or date.today()
return SYSTEM_DIR / "activity" / f"{day.isoformat()}.jsonl"
@staticmethod
def log(action: str,
entity_type: str = None,
entity_id: str = None,
entity_name: str = None,
project_id: str = None,
user_id: str = None,
user_name: str = None,
team_context: str = None,
description: str = None,
category: str = None,
severity: str = 'info',
details: Dict = None,
ip_address: str = None,
user_agent: str = None) -> None:
"""Log an activity."""
# Auto-detect category
if not category:
for cat, actions in ActivityLog.CATEGORIES.items():
if action in actions:
category = cat
break
category = category or 'other'
# Generate description if not provided
if not description:
entity_str = f"{entity_type} '{entity_name}'" if entity_name else (entity_type or "item")
description = f"{action.replace('_', ' ').title()} {entity_str}"
record = {
"id": str(uuid.uuid4())[:12],
"timestamp": datetime.utcnow().isoformat(),
"action": action,
"entity_type": entity_type,
"entity_id": entity_id,
"entity_name": entity_name,
"project_id": project_id,
"user_id": user_id,
"user_name": user_name,
"team_context": team_context,
"category": category,
"severity": severity,
"description": description,
"details": details,
"ip_address": ip_address,
"user_agent": user_agent
}
append_jsonl(ActivityLog._log_path(), record)
@staticmethod
def recent(project_id: str = None, limit: int = 50, offset: int = 0, days: int = 7) -> List[Dict]:
"""Get recent activity."""
all_records = []
# Read from recent days
for i in range(days):
day = date.today() - __import__('datetime').timedelta(days=i)
records = read_jsonl(ActivityLog._log_path(day), limit=limit * 2)
if project_id:
records = [r for r in records if r.get("project_id") == project_id]
all_records.extend(records)
# Sort by timestamp descending
all_records.sort(key=lambda r: r.get("timestamp", ""), reverse=True)
return all_records[offset:offset + limit]
@staticmethod
def search(project_id: str = None, user_id: str = None, action: str = None,
category: str = None, severity: str = None, days: int = 30,
limit: int = 100, offset: int = 0) -> List[Dict]:
"""Search activity logs."""
all_records = []
for i in range(days):
day = date.today() - __import__('datetime').timedelta(days=i)
records = read_jsonl(ActivityLog._log_path(day))
for r in records:
if project_id and r.get("project_id") != project_id:
continue
if user_id and r.get("user_id") != user_id:
continue
if action and r.get("action") != action:
continue
if category and r.get("category") != category:
continue
if severity and r.get("severity") != severity:
continue
all_records.append(r)
all_records.sort(key=lambda r: r.get("timestamp", ""), reverse=True)
return all_records[offset:offset + limit]
# === Teams ===
class Teams:
"""Team management using JSON files."""
@staticmethod
def _team_dir(team_id: str) -> Path:
return TEAMS_DIR / team_id
@staticmethod
def _manifest_path(team_id: str) -> Path:
return Teams._team_dir(team_id) / "manifest.json"
@staticmethod
def create(id: str, name: str, description: str = "") -> Dict:
"""Create a team."""
team_dir = Teams._team_dir(id)
team_dir.mkdir(parents=True, exist_ok=True)
now = datetime.utcnow().isoformat()
manifest = {
"id": id,
"name": name,
"description": description,
"settings": {},
"created_at": now
}
write_json(Teams._manifest_path(id), manifest)
write_json(team_dir / "members.json", {"members": []})
write_json(team_dir / "access.json", {"projects": {}})
return manifest
@staticmethod
def get(id: str) -> Optional[Dict]:
"""Get team by ID."""
return read_json(Teams._manifest_path(id))
@staticmethod
def list() -> List[Dict]:
"""List all teams."""
teams = []
for team_dir in TEAMS_DIR.iterdir():
if team_dir.is_dir():
manifest = read_json(team_dir / "manifest.json")
if manifest:
teams.append(manifest)
teams.sort(key=lambda t: t.get("name", ""))
return teams
@staticmethod
def add_member(team_id: str, user_id: str, role: str) -> None:
"""Add or update team member."""
path = Teams._team_dir(team_id) / "members.json"
data = read_json(path, {"members": []})
# Update or add
members = data.get("members", [])
for m in members:
if m.get("user_id") == user_id:
m["role"] = role
m["updated_at"] = datetime.utcnow().isoformat()
break
else:
members.append({
"user_id": user_id,
"role": role,
"joined_at": datetime.utcnow().isoformat()
})
data["members"] = members
write_json(path, data)
@staticmethod
def get_members(team_id: str) -> List[Dict]:
"""Get team members."""
path = Teams._team_dir(team_id) / "members.json"
data = read_json(path, {"members": []})
return data.get("members", [])
@staticmethod
def get_user_role(team_id: str, user_id: str) -> Optional[str]:
"""Get user's role in team."""
members = Teams.get_members(team_id)
for m in members:
if m.get("user_id") == user_id:
return m.get("role")
return None
@staticmethod
def set_project_access(team_id: str, project_id: str, access_level: str) -> None:
"""Set team's access level to a project."""
path = Teams._team_dir(team_id) / "access.json"
data = read_json(path, {"projects": {}})
data["projects"][project_id] = {
"access_level": access_level,
"granted_at": datetime.utcnow().isoformat()
}
write_json(path, data)
# === Figma Files ===
class FigmaFiles:
"""Figma file management."""
@staticmethod
def _files_path(project_id: str) -> Path:
return PROJECTS_DIR / project_id / "figma" / "files.json"
@staticmethod
def create(project_id: str, figma_url: str, file_name: str, file_key: str) -> Dict:
"""Add a Figma file to project."""
path = FigmaFiles._files_path(project_id)
data = read_json(path, {"files": []})
file_id = str(uuid.uuid4())[:8]
now = datetime.utcnow().isoformat()
new_file = {
"id": file_id,
"project_id": project_id,
"figma_url": figma_url,
"file_name": file_name,
"file_key": file_key,
"sync_status": "pending",
"last_synced": None,
"created_at": now
}
data["files"].append(new_file)
write_json(path, data)
return new_file
@staticmethod
def list(project_id: str) -> List[Dict]:
"""List Figma files for project."""
data = read_json(FigmaFiles._files_path(project_id), {"files": []})
return data.get("files", [])
@staticmethod
def update_sync_status(project_id: str, file_id: str, status: str) -> Optional[Dict]:
"""Update sync status."""
path = FigmaFiles._files_path(project_id)
data = read_json(path, {"files": []})
for f in data.get("files", []):
if f.get("id") == file_id:
f["sync_status"] = status
if status == "synced":
f["last_synced"] = datetime.utcnow().isoformat()
write_json(path, data)
return f
return None
# === Metrics ===
class CodeMetrics:
"""Code metrics storage."""
@staticmethod
def _metrics_path(project_id: str) -> Path:
return PROJECTS_DIR / project_id / "metrics" / "code.json"
@staticmethod
def record(project_id: str, component_id: str, metrics: Dict) -> None:
"""Record code metrics for component."""
path = CodeMetrics._metrics_path(project_id)
data = read_json(path, {"components": {}})
metrics["updated_at"] = datetime.utcnow().isoformat()
data["components"][component_id] = metrics
write_json(path, data)
@staticmethod
def get(project_id: str, component_id: str = None) -> Union[Dict, List[Dict]]:
"""Get metrics."""
data = read_json(CodeMetrics._metrics_path(project_id), {"components": {}})
if component_id:
return data["components"].get(component_id)
return data["components"]
class TestResults:
"""Test results storage."""
@staticmethod
def _results_path(project_id: str) -> Path:
return PROJECTS_DIR / project_id / "metrics" / "tests.json"
@staticmethod
def record(project_id: str, component_id: str, test_type: str,
passed: bool, score: float = None, failures: List[str] = None) -> Dict:
"""Record test result."""
path = TestResults._results_path(project_id)
data = read_json(path, {"results": []})
result = {
"id": str(uuid.uuid4())[:8],
"component_id": component_id,
"test_type": test_type,
"passed": passed,
"score": score,
"failures": failures or [],
"run_at": datetime.utcnow().isoformat()
}
data["results"].append(result)
write_json(path, data)
return result
@staticmethod
def list(project_id: str, component_id: str = None, test_type: str = None) -> List[Dict]:
"""List test results."""
data = read_json(TestResults._results_path(project_id), {"results": []})
results = data.get("results", [])
if component_id:
results = [r for r in results if r.get("component_id") == component_id]
if test_type:
results = [r for r in results if r.get("test_type") == test_type]
results.sort(key=lambda r: r.get("run_at", ""), reverse=True)
return results
class TokenDrift:
"""Token drift tracking."""
@staticmethod
def _drift_path(project_id: str) -> Path:
return PROJECTS_DIR / project_id / "metrics" / "drift.json"
@staticmethod
def record(project_id: str, component_id: str, property_name: str,
hardcoded_value: str, file_path: str, line_number: int,
severity: str = "warning", suggested_token: str = None) -> Dict:
"""Record token drift."""
path = TokenDrift._drift_path(project_id)
data = read_json(path, {"drift": []})
drift = {
"id": str(uuid.uuid4())[:8],
"component_id": component_id,
"property_name": property_name,
"hardcoded_value": hardcoded_value,
"suggested_token": suggested_token,
"severity": severity,
"file_path": file_path,
"line_number": line_number,
"status": "pending",
"detected_at": datetime.utcnow().isoformat()
}
data["drift"].append(drift)
write_json(path, data)
return drift
@staticmethod
def list(project_id: str, status: str = None, severity: str = None) -> List[Dict]:
"""List drift issues."""
data = read_json(TokenDrift._drift_path(project_id), {"drift": []})
drift = data.get("drift", [])
if status:
drift = [d for d in drift if d.get("status") == status]
if severity:
drift = [d for d in drift if d.get("severity") == severity]
return drift
@staticmethod
def update_status(project_id: str, drift_id: str, status: str) -> Optional[Dict]:
"""Update drift status."""
path = TokenDrift._drift_path(project_id)
data = read_json(path, {"drift": []})
for d in data.get("drift", []):
if d.get("id") == drift_id:
d["status"] = status
write_json(path, data)
return d
return None
# === Integrations ===
class Integrations:
"""Project integration configuration storage."""
@staticmethod
def _integrations_path(project_id: str) -> Path:
return PROJECTS_DIR / project_id / "integrations.json"
@staticmethod
def list(project_id: str, user_id: int = None) -> List[Dict]:
"""List integrations for a project."""
data = read_json(Integrations._integrations_path(project_id), {"integrations": []})
integrations = data.get("integrations", [])
if user_id is not None:
integrations = [i for i in integrations if i.get("user_id") == user_id]
return integrations
@staticmethod
def get(project_id: str, user_id: int, integration_type: str) -> Optional[Dict]:
"""Get specific integration."""
integrations = Integrations.list(project_id, user_id)
for i in integrations:
if i.get("integration_type") == integration_type:
return i
return None
@staticmethod
def upsert(project_id: str, user_id: int, integration_type: str,
config: str, enabled: bool = True) -> Dict:
"""Create or update integration."""
path = Integrations._integrations_path(project_id)
data = read_json(path, {"integrations": []})
now = datetime.utcnow().isoformat()
# Find existing
for i in data["integrations"]:
if i.get("user_id") == user_id and i.get("integration_type") == integration_type:
i["config"] = config
i["enabled"] = enabled
i["updated_at"] = now
write_json(path, data)
return i
# Create new
new_integration = {
"id": str(uuid.uuid4())[:8],
"project_id": project_id,
"user_id": user_id,
"integration_type": integration_type,
"config": config,
"enabled": enabled,
"created_at": now,
"updated_at": now,
"last_used_at": None
}
data["integrations"].append(new_integration)
write_json(path, data)
return new_integration
@staticmethod
def update(project_id: str, user_id: int, integration_type: str,
config: str = None, enabled: bool = None) -> Optional[Dict]:
"""Update integration fields."""
path = Integrations._integrations_path(project_id)
data = read_json(path, {"integrations": []})
for i in data["integrations"]:
if i.get("user_id") == user_id and i.get("integration_type") == integration_type:
if config is not None:
i["config"] = config
if enabled is not None:
i["enabled"] = enabled
i["updated_at"] = datetime.utcnow().isoformat()
write_json(path, data)
return i
return None
@staticmethod
def delete(project_id: str, user_id: int, integration_type: str) -> bool:
"""Delete integration."""
path = Integrations._integrations_path(project_id)
data = read_json(path, {"integrations": []})
original_len = len(data["integrations"])
data["integrations"] = [
i for i in data["integrations"]
if not (i.get("user_id") == user_id and i.get("integration_type") == integration_type)
]
if len(data["integrations"]) < original_len:
write_json(path, data)
return True
return False
class IntegrationHealth:
"""Integration health tracking."""
@staticmethod
def _health_path() -> Path:
return SYSTEM_DIR / "integration_health.json"
@staticmethod
def list_all() -> List[Dict]:
"""List all integration health status."""
data = read_json(IntegrationHealth._health_path(), {"health": {}})
return [
{"integration_type": k, **v}
for k, v in data.get("health", {}).items()
]
@staticmethod
def get(integration_type: str) -> Optional[Dict]:
"""Get health for specific integration."""
data = read_json(IntegrationHealth._health_path(), {"health": {}})
if integration_type in data.get("health", {}):
return {"integration_type": integration_type, **data["health"][integration_type]}
return None
@staticmethod
def update(integration_type: str, is_healthy: bool = True,
failure_count: int = None, circuit_open_until: str = None) -> Dict:
"""Update integration health."""
path = IntegrationHealth._health_path()
data = read_json(path, {"health": {}})
if integration_type not in data["health"]:
data["health"][integration_type] = {
"is_healthy": True,
"failure_count": 0,
"last_failure_at": None,
"last_success_at": None,
"circuit_open_until": None
}
now = datetime.utcnow().isoformat()
data["health"][integration_type]["is_healthy"] = is_healthy
if is_healthy:
data["health"][integration_type]["last_success_at"] = now
else:
data["health"][integration_type]["last_failure_at"] = now
if failure_count is not None:
data["health"][integration_type]["failure_count"] = failure_count
if circuit_open_until is not None:
data["health"][integration_type]["circuit_open_until"] = circuit_open_until
write_json(path, data)
return {"integration_type": integration_type, **data["health"][integration_type]}
# === Stats ===
def get_stats() -> Dict:
"""Get storage statistics."""
stats = {
"projects": len(list(PROJECTS_DIR.iterdir())) - 1 if PROJECTS_DIR.exists() else 0, # -1 for _archived
"teams": len(list(TEAMS_DIR.iterdir())) if TEAMS_DIR.exists() else 0,
"cache_files": len(list((SYSTEM_DIR / "cache").glob("*.json"))) if (SYSTEM_DIR / "cache").exists() else 0,
"activity_days": len(list((SYSTEM_DIR / "activity").glob("*.jsonl"))) if (SYSTEM_DIR / "activity").exists() else 0,
}
# Calculate total size
total_size = 0
for path in DATA_DIR.rglob("*"):
if path.is_file():
total_size += path.stat().st_size
stats["total_size_mb"] = round(total_size / (1024 * 1024), 2)
return stats
# === Initialization ===
def init_storage() -> None:
"""Initialize storage directories."""
for d in [DATA_DIR, SYSTEM_DIR, SYSTEM_DIR / "cache", SYSTEM_DIR / "activity", PROJECTS_DIR, TEAMS_DIR]:
d.mkdir(parents=True, exist_ok=True)
print(f"[Storage] JSON storage initialized at {DATA_DIR}")
# Initialize on import
init_storage()
# === CLI ===
if __name__ == "__main__":
import sys
if len(sys.argv) > 1:
cmd = sys.argv[1]
if cmd == "stats":
print(json.dumps(get_stats(), indent=2))
elif cmd == "init":
init_storage()
print("Storage initialized")
elif cmd == "cache-test":
Cache.set("test_key", {"foo": "bar"}, ttl=60)
print(f"Set: test_key")
print(f"Get: {Cache.get('test_key')}")
elif cmd == "clear-cache":
Cache.clear_all()
print("Cache cleared")
else:
print("Usage: python json_store.py [stats|init|cache-test|clear-cache]")
print(f"\nData directory: {DATA_DIR}")
print(f"Stats: {json.dumps(get_stats(), indent=2)}")