Files
dss/apps/api/browser_logger.py
Bruno Sarlo 41fba59bf7 Major refactor: Consolidate DSS into unified package structure
- Create new dss/ Python package at project root
- Move MCP core from tools/dss_mcp/ to dss/mcp/
- Move storage layer from tools/storage/ to dss/storage/
- Move domain logic from dss-mvp1/dss/ to dss/
- Move services from tools/api/services/ to dss/services/
- Move API server to apps/api/
- Move CLI to apps/cli/
- Move Storybook assets to storybook/
- Create unified dss/__init__.py with comprehensive exports
- Merge configuration into dss/settings.py (Pydantic-based)
- Create pyproject.toml for proper package management
- Update startup scripts for new paths
- Remove old tools/ and dss-mvp1/ directories

Architecture changes:
- DSS is now MCP-first with 40+ tools for Claude Code
- Clean imports: from dss import Projects, Components, FigmaToolSuite
- No more sys.path.insert() hacking
- apps/ contains thin application wrappers (API, CLI)
- Single unified Python package for all DSS logic

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2025-12-10 12:46:43 -03:00

69 lines
2.2 KiB
Python

import os
import logging
from logging.handlers import RotatingFileHandler
from fastapi import APIRouter, HTTPException
from pydantic import BaseModel
from typing import List, Any, Optional
# --- Configuration ---
# Use project-local logs directory to avoid permission issues
_current_file = os.path.dirname(os.path.abspath(__file__))
_project_root = os.path.dirname(os.path.dirname(_current_file))
LOG_DIR = os.path.join(_project_root, ".dss", "logs", "browser-logs")
LOG_FILE = os.path.join(LOG_DIR, "browser.log")
# Ensure log directory exists
os.makedirs(LOG_DIR, exist_ok=True)
# --- Logging Setup ---
# We use a specific logger for browser logs to separate them from app logs
browser_logger = logging.getLogger("browser_logger")
browser_logger.setLevel(logging.INFO)
# Rotating file handler: 10MB max size, keep last 5 backups
handler = RotatingFileHandler(LOG_FILE, maxBytes=10*1024*1024, backupCount=5)
formatter = logging.Formatter(
'%(asctime)s [%(levelname)s] [BROWSER] %(message)s'
)
handler.setFormatter(formatter)
browser_logger.addHandler(handler)
# --- API Router ---
router = APIRouter()
class LogEntry(BaseModel):
level: str
timestamp: str
message: str
data: Optional[List[Any]] = None
class LogBatch(BaseModel):
logs: List[LogEntry]
@router.post("/api/logs/browser")
async def receive_browser_logs(batch: LogBatch):
"""
Receives a batch of logs from the browser and writes them to the log file.
"""
try:
for log in batch.logs:
# Map browser levels to python logging levels
level = log.level.lower()
log_message = f"[{log.timestamp}] {log.message}"
if level == 'error':
browser_logger.error(log_message)
elif level == 'warn':
browser_logger.warning(log_message)
elif level == 'debug':
browser_logger.debug(log_message)
else:
browser_logger.info(log_message)
return {"status": "ok", "count": len(batch.logs)}
except Exception as e:
# Fallback to standard logger if something breaks deeply
logging.error(f"Failed to process browser logs: {str(e)}")
raise HTTPException(status_code=500, detail="Internal processing error")