feat: Enterprise DSS architecture implementation
Some checks failed
DSS Project Analysis / dss-context-update (push) Has been cancelled
Some checks failed
DSS Project Analysis / dss-context-update (push) Has been cancelled
Complete implementation of enterprise design system validation: Phase 1 - @dss/rules npm package: - CLI with validate and init commands - 16 rules across 5 categories (colors, spacing, typography, components, a11y) - dss-ignore support (inline and next-line) - Break-glass [dss-skip] for emergency merges - CI workflow templates (Gitea, GitHub, GitLab) Phase 2 - Metrics dashboard: - FastAPI metrics API with SQLite storage - Portfolio-wide metrics aggregation - Project drill-down with file:line:column violations - Trend charts and history tracking Phase 3 - Local analysis cache: - LocalAnalysisCache for offline-capable validation - Mode detection (LOCAL/REMOTE/CI) - Stale cache warnings with recommendations Phase 4 - Project onboarding: - dss-init command for project setup - Creates ds.config.json, .dss/ folder structure - Updates .gitignore and package.json scripts - Optional CI workflow setup Architecture decisions: - No commit-back: CI uploads to dashboard, not git - Three-tier: Dashboard (read-only) → CI (authoritative) → Local (advisory) - Pull-based rules via npm for version control 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
@@ -2,11 +2,21 @@
|
||||
DSS Core Module - Configuration and Context Management.
|
||||
|
||||
Extended with Context Compiler for design system context resolution.
|
||||
|
||||
Enterprise Architecture:
|
||||
- LOCAL mode: Uses LocalAnalysisCache for fast, offline-capable validation
|
||||
- REMOTE mode: Full analysis via API
|
||||
- CI mode: Authoritative enforcement, uploads metrics to dashboard
|
||||
"""
|
||||
|
||||
from .compiler import EMERGENCY_SKIN, ContextCompiler
|
||||
from .config import DSSConfig, DSSMode
|
||||
from .context import DSSContext
|
||||
from .local_cache import (
|
||||
LocalAnalysisCache,
|
||||
LocalCacheValidator,
|
||||
get_project_cache,
|
||||
)
|
||||
from .mcp_extensions import (
|
||||
COMPILER,
|
||||
get_active_context,
|
||||
@@ -23,6 +33,9 @@ __all__ = [
|
||||
"DSSContext",
|
||||
"ContextCompiler",
|
||||
"EMERGENCY_SKIN",
|
||||
"LocalAnalysisCache",
|
||||
"LocalCacheValidator",
|
||||
"get_project_cache",
|
||||
"get_active_context",
|
||||
"resolve_token",
|
||||
"validate_manifest",
|
||||
|
||||
@@ -3,8 +3,14 @@ DSS Configuration Module
|
||||
========================
|
||||
|
||||
Handles configuration management for the Design System Server (DSS) Claude Plugin.
|
||||
Supports local/remote mode detection, persistent configuration storage, and
|
||||
Supports local/remote/CI mode detection, persistent configuration storage, and
|
||||
environment variable overrides.
|
||||
|
||||
Enterprise Architecture:
|
||||
- LOCAL: Developer workstation, reads from .dss/ cache, advisory validation
|
||||
- REMOTE: Headless/server mode, full analysis, metrics upload
|
||||
- CI: CI/CD pipeline, authoritative enforcement, blocking validation
|
||||
- AUTO: Detect environment automatically (CI env vars -> CI, else LOCAL with cache)
|
||||
"""
|
||||
|
||||
import json
|
||||
@@ -13,6 +19,7 @@ import os
|
||||
import uuid
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import aiohttp
|
||||
from pydantic import BaseModel, Field, ValidationError
|
||||
@@ -24,14 +31,28 @@ CONFIG_DIR = Path.home() / ".dss"
|
||||
CONFIG_FILE = CONFIG_DIR / "config.json"
|
||||
DEFAULT_REMOTE_URL = "https://dss.overbits.luz.uy"
|
||||
DEFAULT_LOCAL_URL = "http://localhost:6006"
|
||||
DEFAULT_DASHBOARD_URL = "https://dss.overbits.luz.uy/api/metrics"
|
||||
|
||||
# CI environment variables that indicate we're running in a pipeline
|
||||
CI_ENV_VARS = [
|
||||
"CI",
|
||||
"GITEA_ACTIONS",
|
||||
"GITHUB_ACTIONS",
|
||||
"GITLAB_CI",
|
||||
"JENKINS_URL",
|
||||
"CIRCLECI",
|
||||
"TRAVIS",
|
||||
"BUILDKITE",
|
||||
]
|
||||
|
||||
|
||||
class DSSMode(str, Enum):
|
||||
"""Operation modes for the DSS plugin."""
|
||||
|
||||
LOCAL = "local"
|
||||
REMOTE = "remote"
|
||||
AUTO = "auto"
|
||||
LOCAL = "local" # Developer workstation - advisory, uses cache
|
||||
REMOTE = "remote" # Headless server - full analysis
|
||||
CI = "ci" # CI/CD pipeline - authoritative enforcement
|
||||
AUTO = "auto" # Auto-detect based on environment
|
||||
|
||||
|
||||
class DSSConfig(BaseModel):
|
||||
@@ -42,15 +63,21 @@ class DSSConfig(BaseModel):
|
||||
mode (DSSMode): The configured operation mode (default: AUTO).
|
||||
remote_url (str): URL for the remote DSS API.
|
||||
local_url (str): URL for the local DSS API (usually localhost).
|
||||
dashboard_url (str): URL for metrics dashboard API.
|
||||
session_id (str): Unique identifier for this client instance.
|
||||
project_path (str): Current project path (for local analysis).
|
||||
rules_version (str): Pinned @dss/rules version for this project.
|
||||
"""
|
||||
|
||||
mode: DSSMode = Field(default=DSSMode.AUTO, description="Operation mode preference")
|
||||
remote_url: str = Field(default=DEFAULT_REMOTE_URL, description="Remote API endpoint")
|
||||
local_url: str = Field(default=DEFAULT_LOCAL_URL, description="Local API endpoint")
|
||||
dashboard_url: str = Field(default=DEFAULT_DASHBOARD_URL, description="Metrics dashboard API")
|
||||
session_id: str = Field(
|
||||
default_factory=lambda: str(uuid.uuid4()), description="Persistent session ID"
|
||||
)
|
||||
project_path: Optional[str] = Field(default=None, description="Current project path")
|
||||
rules_version: Optional[str] = Field(default=None, description="Pinned @dss/rules version")
|
||||
|
||||
class Config:
|
||||
validate_assignment = True
|
||||
@@ -101,38 +128,75 @@ class DSSConfig(BaseModel):
|
||||
Determine the actual runtime mode based on priority rules.
|
||||
|
||||
Priority:
|
||||
1. DSS_MODE environment variable
|
||||
2. Configured 'mode' (if not AUTO)
|
||||
3. Auto-detection (ping local health endpoint)
|
||||
4. Fallback to REMOTE
|
||||
1. DSS_MODE environment variable (explicit override)
|
||||
2. CI environment detection (GITEA_ACTIONS, CI, GITHUB_ACTIONS, etc.)
|
||||
3. Configured 'mode' (if not AUTO)
|
||||
4. Auto-detection (check for .dss/ folder, ping local health)
|
||||
5. Fallback to LOCAL (developer-first)
|
||||
|
||||
Returns:
|
||||
DSSMode: The resolved active mode (LOCAL or REMOTE).
|
||||
DSSMode: The resolved active mode (LOCAL, REMOTE, or CI).
|
||||
"""
|
||||
# 1. Check Environment Variable
|
||||
# 1. Check Environment Variable (explicit override)
|
||||
env_mode = os.getenv("DSS_MODE")
|
||||
if env_mode:
|
||||
try:
|
||||
# Normalize string to enum
|
||||
return DSSMode(env_mode.lower())
|
||||
resolved = DSSMode(env_mode.lower())
|
||||
logger.info(f"Mode set via DSS_MODE env var: {resolved.value}")
|
||||
return resolved
|
||||
except ValueError:
|
||||
logger.warning(f"Invalid DSS_MODE env var '{env_mode}', ignoring.")
|
||||
|
||||
# 2. Check Configuration (if explicit)
|
||||
# 2. Check CI environment variables
|
||||
if self._is_ci_environment():
|
||||
logger.info("CI environment detected. Using CI mode (authoritative enforcement).")
|
||||
return DSSMode.CI
|
||||
|
||||
# 3. Check Configuration (if explicit, not AUTO)
|
||||
if self.mode != DSSMode.AUTO:
|
||||
logger.info(f"Using configured mode: {self.mode.value}")
|
||||
return self.mode
|
||||
|
||||
# 3. Auto-detect
|
||||
# 4. Auto-detect based on environment
|
||||
logger.info("Auto-detecting DSS mode...")
|
||||
is_local_healthy = await self._check_local_health()
|
||||
|
||||
if is_local_healthy:
|
||||
logger.info(f"Local server detected at {self.local_url}. Switching to LOCAL mode.")
|
||||
# Check for local .dss/ folder (indicates project setup)
|
||||
if self._has_local_dss_folder():
|
||||
logger.info("Found .dss/ folder. Using LOCAL mode with cache.")
|
||||
return DSSMode.LOCAL
|
||||
else:
|
||||
logger.info("Local server unreachable. Fallback to REMOTE mode.")
|
||||
# 4. Fallback
|
||||
return DSSMode.REMOTE
|
||||
|
||||
# Check if local server is running
|
||||
is_local_healthy = await self._check_local_health()
|
||||
if is_local_healthy:
|
||||
logger.info(f"Local server detected at {self.local_url}. Using LOCAL mode.")
|
||||
return DSSMode.LOCAL
|
||||
|
||||
# 5. Fallback to LOCAL (developer-first, will use stale cache if available)
|
||||
logger.info("Fallback to LOCAL mode (offline-capable with cache).")
|
||||
return DSSMode.LOCAL
|
||||
|
||||
def _is_ci_environment(self) -> bool:
|
||||
"""Check if running in a CI/CD environment."""
|
||||
for env_var in CI_ENV_VARS:
|
||||
if os.getenv(env_var):
|
||||
logger.debug(f"CI detected via {env_var} env var")
|
||||
return True
|
||||
return False
|
||||
|
||||
def _has_local_dss_folder(self) -> bool:
|
||||
"""Check if current directory or project has .dss/ folder."""
|
||||
# Check current working directory
|
||||
cwd_dss = Path.cwd() / ".dss"
|
||||
if cwd_dss.exists() and cwd_dss.is_dir():
|
||||
return True
|
||||
|
||||
# Check configured project path
|
||||
if self.project_path:
|
||||
project_dss = Path(self.project_path) / ".dss"
|
||||
if project_dss.exists() and project_dss.is_dir():
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
async def _check_local_health(self) -> bool:
|
||||
"""
|
||||
@@ -161,3 +225,46 @@ class DSSConfig(BaseModel):
|
||||
if active_mode == DSSMode.LOCAL:
|
||||
return self.local_url
|
||||
return self.remote_url
|
||||
|
||||
def get_mode_behavior(self, active_mode: DSSMode) -> dict:
|
||||
"""
|
||||
Get behavior configuration for the active mode.
|
||||
|
||||
Returns dict with:
|
||||
- blocking: Whether validation errors block operations
|
||||
- upload_metrics: Whether to upload metrics to dashboard
|
||||
- use_cache: Whether to use local .dss/ cache
|
||||
- cache_ttl: Cache time-to-live in seconds
|
||||
"""
|
||||
behaviors = {
|
||||
DSSMode.LOCAL: {
|
||||
"blocking": False, # Advisory only
|
||||
"upload_metrics": False,
|
||||
"use_cache": True,
|
||||
"cache_ttl": 3600, # 1 hour
|
||||
"show_stale_warning": True,
|
||||
},
|
||||
DSSMode.REMOTE: {
|
||||
"blocking": True,
|
||||
"upload_metrics": True,
|
||||
"use_cache": False,
|
||||
"cache_ttl": 0,
|
||||
"show_stale_warning": False,
|
||||
},
|
||||
DSSMode.CI: {
|
||||
"blocking": True, # Authoritative enforcement
|
||||
"upload_metrics": True,
|
||||
"use_cache": False,
|
||||
"cache_ttl": 0,
|
||||
"show_stale_warning": False,
|
||||
},
|
||||
DSSMode.AUTO: {
|
||||
# AUTO resolves to another mode, shouldn't reach here
|
||||
"blocking": False,
|
||||
"upload_metrics": False,
|
||||
"use_cache": True,
|
||||
"cache_ttl": 3600,
|
||||
"show_stale_warning": True,
|
||||
},
|
||||
}
|
||||
return behaviors.get(active_mode, behaviors[DSSMode.LOCAL])
|
||||
|
||||
@@ -4,6 +4,11 @@ DSS Context Module
|
||||
|
||||
Singleton context manager for the DSS Plugin.
|
||||
Handles configuration loading, mode detection, and strategy instantiation.
|
||||
|
||||
Enterprise Architecture:
|
||||
- LOCAL: Uses LocalAnalysisCache for fast, offline-capable validation
|
||||
- REMOTE: Full analysis via API
|
||||
- CI: Authoritative enforcement, uploads metrics to dashboard
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
@@ -11,6 +16,7 @@ import logging
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from .config import DSSConfig, DSSMode
|
||||
from .local_cache import LocalAnalysisCache, LocalCacheValidator, get_project_cache
|
||||
|
||||
# Logger setup
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -44,6 +50,8 @@ class DSSContext:
|
||||
self._capabilities: Dict[str, bool] = {}
|
||||
self._strategy_cache: Dict[str, Strategy] = {}
|
||||
self.session_id: Optional[str] = None
|
||||
self._local_cache: Optional[LocalAnalysisCache] = None
|
||||
self._cache_validator: Optional[LocalCacheValidator] = None
|
||||
|
||||
@classmethod
|
||||
async def get_instance(cls) -> "DSSContext":
|
||||
@@ -91,7 +99,11 @@ class DSSContext:
|
||||
f"DSSContext initialized. Mode: {self.active_mode.value}, Session: {self.session_id}"
|
||||
)
|
||||
|
||||
# 3. Cache Capabilities
|
||||
# 3. Initialize local cache for LOCAL mode
|
||||
if self.active_mode == DSSMode.LOCAL:
|
||||
self._init_local_cache()
|
||||
|
||||
# 4. Cache Capabilities
|
||||
self._cache_capabilities()
|
||||
|
||||
except Exception as e:
|
||||
@@ -100,6 +112,27 @@ class DSSContext:
|
||||
self.active_mode = DSSMode.REMOTE
|
||||
self._capabilities = {"limited": True}
|
||||
|
||||
def _init_local_cache(self) -> None:
|
||||
"""Initialize local cache for LOCAL mode."""
|
||||
try:
|
||||
project_path = self.config.project_path if self.config else None
|
||||
self._local_cache = get_project_cache(project_path)
|
||||
self._cache_validator = LocalCacheValidator(self._local_cache)
|
||||
|
||||
# Log cache status
|
||||
status = self._local_cache.get_cache_status()
|
||||
if status.get("exists"):
|
||||
if status.get("is_stale"):
|
||||
logger.warning(f"Local cache is stale: {status.get('recommendation')}")
|
||||
else:
|
||||
logger.info(f"Local cache ready. Rules version: {status.get('rules_version')}")
|
||||
else:
|
||||
logger.info("No local cache found. Run `npx dss-rules validate` to populate.")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to initialize local cache: {e}")
|
||||
self._local_cache = None
|
||||
self._cache_validator = None
|
||||
|
||||
def _cache_capabilities(self) -> None:
|
||||
"""Determines what the plugin can do based on the active mode."""
|
||||
# Base capabilities
|
||||
@@ -192,3 +225,88 @@ class DSSContext:
|
||||
# Cache and return
|
||||
self._strategy_cache[strategy_type] = strategy_instance
|
||||
return strategy_instance
|
||||
|
||||
# === Local Cache Access Methods ===
|
||||
|
||||
def get_local_cache(self) -> Optional[LocalAnalysisCache]:
|
||||
"""
|
||||
Get the local analysis cache instance.
|
||||
|
||||
Returns:
|
||||
LocalAnalysisCache instance or None if not in LOCAL mode.
|
||||
"""
|
||||
return self._local_cache
|
||||
|
||||
def get_cache_validator(self) -> Optional[LocalCacheValidator]:
|
||||
"""
|
||||
Get the local cache validator instance.
|
||||
|
||||
Returns:
|
||||
LocalCacheValidator instance or None if not in LOCAL mode.
|
||||
"""
|
||||
return self._cache_validator
|
||||
|
||||
def get_cache_status(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Get current cache status.
|
||||
|
||||
Returns:
|
||||
Cache status dict with freshness info and recommendations.
|
||||
"""
|
||||
if self._local_cache is None:
|
||||
return {
|
||||
"available": False,
|
||||
"mode": self.active_mode.value,
|
||||
"message": f"Local cache not available in {self.active_mode.value} mode"
|
||||
}
|
||||
|
||||
status = self._local_cache.get_cache_status()
|
||||
status["available"] = True
|
||||
status["mode"] = self.active_mode.value
|
||||
return status
|
||||
|
||||
def validate_file_local(self, file_path: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Validate a file using local cache (LOCAL mode only).
|
||||
|
||||
Args:
|
||||
file_path: Path to file to validate.
|
||||
|
||||
Returns:
|
||||
Validation result dict.
|
||||
"""
|
||||
if self._cache_validator is None:
|
||||
return {
|
||||
"file": file_path,
|
||||
"error": "Local cache not available",
|
||||
"mode": self.active_mode.value
|
||||
}
|
||||
|
||||
return self._cache_validator.validate_file(file_path)
|
||||
|
||||
def get_validation_summary(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Get summary of validation state from local cache.
|
||||
|
||||
Returns:
|
||||
Summary dict with counts and status.
|
||||
"""
|
||||
if self._cache_validator is None:
|
||||
return {
|
||||
"error": "Local cache not available",
|
||||
"mode": self.active_mode.value
|
||||
}
|
||||
|
||||
return self._cache_validator.get_summary()
|
||||
|
||||
def get_mode_behavior(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Get behavior configuration for current mode.
|
||||
|
||||
Returns:
|
||||
Dict with blocking, upload_metrics, use_cache flags.
|
||||
"""
|
||||
if self.config is None:
|
||||
return {"blocking": False, "upload_metrics": False, "use_cache": False}
|
||||
|
||||
return self.config.get_mode_behavior(self.active_mode)
|
||||
|
||||
402
dss-claude-plugin/core/local_cache.py
Normal file
402
dss-claude-plugin/core/local_cache.py
Normal file
@@ -0,0 +1,402 @@
|
||||
"""
|
||||
DSS Local Analysis Cache Module.
|
||||
|
||||
Handles reading and writing to the local .dss/ folder for developer workstation mode.
|
||||
Provides offline-capable validation using cached analysis results.
|
||||
|
||||
Enterprise Architecture:
|
||||
- LOCAL mode reads from .dss/cache/ for fast, offline-capable feedback
|
||||
- Cache is populated by `dss-rules validate` or periodic sync
|
||||
- Stale cache shows warnings but doesn't block (advisory mode)
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Cache file names within .dss/
|
||||
ANALYSIS_CACHE_FILE = "analysis_cache.json"
|
||||
RULES_CACHE_FILE = "rules_cache.json"
|
||||
VIOLATIONS_CACHE_FILE = "violations_cache.json"
|
||||
METADATA_FILE = "metadata.json"
|
||||
|
||||
# Default cache TTL in seconds (1 hour)
|
||||
DEFAULT_CACHE_TTL = 3600
|
||||
|
||||
# Stale cache threshold (24 hours - show warning but still use)
|
||||
STALE_THRESHOLD = 86400
|
||||
|
||||
|
||||
class LocalAnalysisCache:
|
||||
"""
|
||||
Manages local .dss/ folder cache for developer workstations.
|
||||
|
||||
Provides:
|
||||
- Fast, offline-capable validation results
|
||||
- Cached rule definitions from @dss/rules
|
||||
- Violation history for incremental feedback
|
||||
"""
|
||||
|
||||
def __init__(self, project_path: Optional[str] = None):
|
||||
"""
|
||||
Initialize cache with project path.
|
||||
|
||||
Args:
|
||||
project_path: Path to project root. Defaults to current directory.
|
||||
"""
|
||||
self.project_path = Path(project_path) if project_path else Path.cwd()
|
||||
self.dss_dir = self.project_path / ".dss"
|
||||
self.cache_dir = self.dss_dir / "cache"
|
||||
self._ensure_structure()
|
||||
|
||||
def _ensure_structure(self) -> None:
|
||||
"""Ensure .dss/ folder structure exists."""
|
||||
try:
|
||||
self.dss_dir.mkdir(parents=True, exist_ok=True)
|
||||
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Create .gitignore if it doesn't exist
|
||||
gitignore_path = self.dss_dir / ".gitignore"
|
||||
if not gitignore_path.exists():
|
||||
gitignore_path.write_text("# DSS local cache - do not commit\n*\n!.gitignore\n")
|
||||
logger.debug(f"Created .gitignore in {self.dss_dir}")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to create .dss/ structure: {e}")
|
||||
|
||||
def get_cache_status(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Get current cache status including freshness.
|
||||
|
||||
Returns:
|
||||
Dict with cache status, age, and recommendation.
|
||||
"""
|
||||
metadata = self._read_metadata()
|
||||
|
||||
if not metadata:
|
||||
return {
|
||||
"exists": False,
|
||||
"age_seconds": None,
|
||||
"is_fresh": False,
|
||||
"is_stale": True,
|
||||
"recommendation": "Run `npx dss-rules validate` to populate cache"
|
||||
}
|
||||
|
||||
last_updated = metadata.get("last_updated")
|
||||
if not last_updated:
|
||||
return {
|
||||
"exists": True,
|
||||
"age_seconds": None,
|
||||
"is_fresh": False,
|
||||
"is_stale": True,
|
||||
"recommendation": "Cache missing timestamp, run validation"
|
||||
}
|
||||
|
||||
try:
|
||||
last_dt = datetime.fromisoformat(last_updated.replace("Z", "+00:00"))
|
||||
now = datetime.now(timezone.utc)
|
||||
age_seconds = (now - last_dt).total_seconds()
|
||||
|
||||
is_fresh = age_seconds < DEFAULT_CACHE_TTL
|
||||
is_stale = age_seconds > STALE_THRESHOLD
|
||||
|
||||
if is_fresh:
|
||||
recommendation = "Cache is fresh"
|
||||
elif is_stale:
|
||||
recommendation = f"Cache is {int(age_seconds / 3600)}h old. Run `npx dss-rules validate` to refresh"
|
||||
else:
|
||||
recommendation = "Cache is usable but consider refreshing"
|
||||
|
||||
return {
|
||||
"exists": True,
|
||||
"age_seconds": int(age_seconds),
|
||||
"is_fresh": is_fresh,
|
||||
"is_stale": is_stale,
|
||||
"last_updated": last_updated,
|
||||
"rules_version": metadata.get("rules_version"),
|
||||
"recommendation": recommendation
|
||||
}
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to parse cache timestamp: {e}")
|
||||
return {
|
||||
"exists": True,
|
||||
"age_seconds": None,
|
||||
"is_fresh": False,
|
||||
"is_stale": True,
|
||||
"recommendation": "Cache timestamp invalid, run validation"
|
||||
}
|
||||
|
||||
def get_analysis_results(self) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get cached analysis results.
|
||||
|
||||
Returns:
|
||||
Analysis results dict or None if not cached.
|
||||
"""
|
||||
return self._read_cache_file(ANALYSIS_CACHE_FILE)
|
||||
|
||||
def get_violations(self, file_path: Optional[str] = None) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get cached violations, optionally filtered by file.
|
||||
|
||||
Args:
|
||||
file_path: Optional file path to filter violations.
|
||||
|
||||
Returns:
|
||||
List of violation dicts.
|
||||
"""
|
||||
violations = self._read_cache_file(VIOLATIONS_CACHE_FILE)
|
||||
if not violations:
|
||||
return []
|
||||
|
||||
violation_list = violations.get("violations", [])
|
||||
|
||||
if file_path:
|
||||
# Normalize path for comparison
|
||||
norm_path = str(Path(file_path).resolve())
|
||||
return [v for v in violation_list if v.get("file", "").endswith(file_path) or norm_path in v.get("file", "")]
|
||||
|
||||
return violation_list
|
||||
|
||||
def get_rules(self) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get cached rule definitions.
|
||||
|
||||
Returns:
|
||||
Rules dict or None if not cached.
|
||||
"""
|
||||
return self._read_cache_file(RULES_CACHE_FILE)
|
||||
|
||||
def save_analysis_results(self, results: Dict[str, Any]) -> bool:
|
||||
"""
|
||||
Save analysis results to cache.
|
||||
|
||||
Args:
|
||||
results: Analysis results from validation.
|
||||
|
||||
Returns:
|
||||
True if saved successfully.
|
||||
"""
|
||||
success = self._write_cache_file(ANALYSIS_CACHE_FILE, results)
|
||||
if success:
|
||||
self._update_metadata({"last_analysis": datetime.now(timezone.utc).isoformat()})
|
||||
return success
|
||||
|
||||
def save_violations(self, violations: List[Dict[str, Any]], metadata: Optional[Dict[str, Any]] = None) -> bool:
|
||||
"""
|
||||
Save violations to cache.
|
||||
|
||||
Args:
|
||||
violations: List of violation dicts.
|
||||
metadata: Optional metadata (rules_version, commit, etc.)
|
||||
|
||||
Returns:
|
||||
True if saved successfully.
|
||||
"""
|
||||
data = {
|
||||
"violations": violations,
|
||||
"count": len(violations),
|
||||
"saved_at": datetime.now(timezone.utc).isoformat(),
|
||||
**(metadata or {})
|
||||
}
|
||||
success = self._write_cache_file(VIOLATIONS_CACHE_FILE, data)
|
||||
if success:
|
||||
self._update_metadata({
|
||||
"last_updated": datetime.now(timezone.utc).isoformat(),
|
||||
"violation_count": len(violations),
|
||||
"rules_version": metadata.get("rules_version") if metadata else None
|
||||
})
|
||||
return success
|
||||
|
||||
def save_rules(self, rules: Dict[str, Any], version: str) -> bool:
|
||||
"""
|
||||
Save rule definitions to cache.
|
||||
|
||||
Args:
|
||||
rules: Rule definitions dict.
|
||||
version: Rules package version.
|
||||
|
||||
Returns:
|
||||
True if saved successfully.
|
||||
"""
|
||||
data = {
|
||||
"rules": rules,
|
||||
"version": version,
|
||||
"cached_at": datetime.now(timezone.utc).isoformat()
|
||||
}
|
||||
success = self._write_cache_file(RULES_CACHE_FILE, data)
|
||||
if success:
|
||||
self._update_metadata({"rules_version": version})
|
||||
return success
|
||||
|
||||
def clear_cache(self) -> bool:
|
||||
"""
|
||||
Clear all cached data.
|
||||
|
||||
Returns:
|
||||
True if cleared successfully.
|
||||
"""
|
||||
try:
|
||||
for file in [ANALYSIS_CACHE_FILE, VIOLATIONS_CACHE_FILE, RULES_CACHE_FILE]:
|
||||
cache_file = self.cache_dir / file
|
||||
if cache_file.exists():
|
||||
cache_file.unlink()
|
||||
|
||||
# Reset metadata
|
||||
self._write_metadata({"cleared_at": datetime.now(timezone.utc).isoformat()})
|
||||
logger.info("Cache cleared")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to clear cache: {e}")
|
||||
return False
|
||||
|
||||
def _read_cache_file(self, filename: str) -> Optional[Dict[str, Any]]:
|
||||
"""Read a cache file."""
|
||||
cache_file = self.cache_dir / filename
|
||||
if not cache_file.exists():
|
||||
return None
|
||||
|
||||
try:
|
||||
return json.loads(cache_file.read_text(encoding="utf-8"))
|
||||
except (json.JSONDecodeError, Exception) as e:
|
||||
logger.warning(f"Failed to read cache file {filename}: {e}")
|
||||
return None
|
||||
|
||||
def _write_cache_file(self, filename: str, data: Dict[str, Any]) -> bool:
|
||||
"""Write a cache file."""
|
||||
cache_file = self.cache_dir / filename
|
||||
try:
|
||||
cache_file.write_text(json.dumps(data, indent=2), encoding="utf-8")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to write cache file {filename}: {e}")
|
||||
return False
|
||||
|
||||
def _read_metadata(self) -> Optional[Dict[str, Any]]:
|
||||
"""Read metadata file."""
|
||||
metadata_file = self.dss_dir / METADATA_FILE
|
||||
if not metadata_file.exists():
|
||||
return None
|
||||
|
||||
try:
|
||||
return json.loads(metadata_file.read_text(encoding="utf-8"))
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def _write_metadata(self, data: Dict[str, Any]) -> bool:
|
||||
"""Write metadata file."""
|
||||
metadata_file = self.dss_dir / METADATA_FILE
|
||||
try:
|
||||
metadata_file.write_text(json.dumps(data, indent=2), encoding="utf-8")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to write metadata: {e}")
|
||||
return False
|
||||
|
||||
def _update_metadata(self, updates: Dict[str, Any]) -> bool:
|
||||
"""Update metadata file with new values."""
|
||||
existing = self._read_metadata() or {}
|
||||
existing.update(updates)
|
||||
return self._write_metadata(existing)
|
||||
|
||||
|
||||
class LocalCacheValidator:
|
||||
"""
|
||||
Validator that uses local cache for offline-capable feedback.
|
||||
|
||||
Used in LOCAL mode to provide fast, advisory validation without
|
||||
requiring network access to the dashboard.
|
||||
"""
|
||||
|
||||
def __init__(self, cache: LocalAnalysisCache):
|
||||
"""
|
||||
Initialize validator with cache.
|
||||
|
||||
Args:
|
||||
cache: LocalAnalysisCache instance.
|
||||
"""
|
||||
self.cache = cache
|
||||
|
||||
def validate_file(self, file_path: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Validate a single file using cached violations.
|
||||
|
||||
Args:
|
||||
file_path: Path to file to validate.
|
||||
|
||||
Returns:
|
||||
Validation result dict.
|
||||
"""
|
||||
cache_status = self.cache.get_cache_status()
|
||||
violations = self.cache.get_violations(file_path)
|
||||
|
||||
result = {
|
||||
"file": file_path,
|
||||
"violations": violations,
|
||||
"error_count": len([v for v in violations if v.get("severity") == "error"]),
|
||||
"warning_count": len([v for v in violations if v.get("severity") == "warning"]),
|
||||
"cache_status": cache_status,
|
||||
"mode": "local_cache"
|
||||
}
|
||||
|
||||
if cache_status.get("is_stale"):
|
||||
result["warning"] = cache_status["recommendation"]
|
||||
|
||||
return result
|
||||
|
||||
def get_file_status(self, file_path: str) -> str:
|
||||
"""
|
||||
Get simple status for a file.
|
||||
|
||||
Returns:
|
||||
'pass', 'fail', or 'unknown'.
|
||||
"""
|
||||
violations = self.cache.get_violations(file_path)
|
||||
errors = [v for v in violations if v.get("severity") == "error"]
|
||||
|
||||
if not violations:
|
||||
# No cached data for this file
|
||||
return "unknown"
|
||||
|
||||
return "fail" if errors else "pass"
|
||||
|
||||
def get_summary(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Get summary of cached validation state.
|
||||
|
||||
Returns:
|
||||
Summary dict with counts and status.
|
||||
"""
|
||||
cache_status = self.cache.get_cache_status()
|
||||
analysis = self.cache.get_analysis_results()
|
||||
all_violations = self.cache.get_violations()
|
||||
|
||||
errors = [v for v in all_violations if v.get("severity") == "error"]
|
||||
warnings = [v for v in all_violations if v.get("severity") == "warning"]
|
||||
|
||||
return {
|
||||
"cache_status": cache_status,
|
||||
"total_violations": len(all_violations),
|
||||
"error_count": len(errors),
|
||||
"warning_count": len(warnings),
|
||||
"rules_version": cache_status.get("rules_version"),
|
||||
"last_updated": cache_status.get("last_updated"),
|
||||
"analysis": analysis
|
||||
}
|
||||
|
||||
|
||||
def get_project_cache(project_path: Optional[str] = None) -> LocalAnalysisCache:
|
||||
"""
|
||||
Factory function to get cache for a project.
|
||||
|
||||
Args:
|
||||
project_path: Path to project root.
|
||||
|
||||
Returns:
|
||||
LocalAnalysisCache instance.
|
||||
"""
|
||||
return LocalAnalysisCache(project_path)
|
||||
Reference in New Issue
Block a user