auto-backup: 2025-12-11 20:35:05 (68 files: +19 ~23 -25)
Generated by DSS Git Backup Hook
This commit is contained in:
@@ -7,6 +7,7 @@ Handles model-specific API calls and tool execution
|
||||
import asyncio
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
@@ -39,19 +40,68 @@ class AIProvider(ABC):
|
||||
class ClaudeProvider(AIProvider):
|
||||
"""Anthropic Claude provider."""
|
||||
|
||||
# SoFi LLM Proxy configuration
|
||||
PROXY_BASE_URL = "https://internal.sofitest.com/llm-proxy"
|
||||
API_KEY_HELPER = os.path.expanduser("~/.local/bin/llm-proxy-keys")
|
||||
|
||||
def __init__(self):
|
||||
self.api_key = os.getenv("ANTHROPIC_API_KEY")
|
||||
self.base_url = os.getenv("ANTHROPIC_BASE_URL", self.PROXY_BASE_URL)
|
||||
self.default_model = "claude-sonnet-4-5-20250929"
|
||||
self._proxy_key = None
|
||||
|
||||
def _get_proxy_key(self) -> Optional[str]:
|
||||
"""Get API key from SoFi LLM proxy helper script"""
|
||||
if self._proxy_key:
|
||||
return self._proxy_key
|
||||
|
||||
try:
|
||||
if os.path.exists(self.API_KEY_HELPER):
|
||||
result = subprocess.run(
|
||||
[self.API_KEY_HELPER],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=10
|
||||
)
|
||||
if result.returncode == 0:
|
||||
# Extract the key from output (last line with sk- prefix)
|
||||
for line in result.stdout.strip().split('\n'):
|
||||
if line.startswith('sk-'):
|
||||
self._proxy_key = line.strip()
|
||||
return self._proxy_key
|
||||
except Exception as e:
|
||||
print(f"Error getting proxy key: {e}")
|
||||
return None
|
||||
|
||||
def is_available(self) -> bool:
|
||||
"""Check if Claude is available."""
|
||||
try:
|
||||
from anthropic import Anthropic
|
||||
|
||||
return bool(self.api_key)
|
||||
# Available if SDK is installed (proxy may have keys)
|
||||
return True
|
||||
except ImportError:
|
||||
return False
|
||||
|
||||
def _create_client(self):
|
||||
"""Create Anthropic client configured for SoFi proxy"""
|
||||
from anthropic import Anthropic
|
||||
import httpx
|
||||
|
||||
# Create httpx client that skips SSL verification (for corporate proxy)
|
||||
http_client = httpx.Client(verify=False)
|
||||
|
||||
# Get API key: prefer env var, then proxy helper
|
||||
api_key = self.api_key or self._get_proxy_key()
|
||||
|
||||
if not api_key:
|
||||
raise ValueError("No API key available. Set ANTHROPIC_API_KEY or ensure llm-proxy-keys is installed.")
|
||||
|
||||
return Anthropic(
|
||||
api_key=api_key,
|
||||
base_url=self.base_url,
|
||||
http_client=http_client
|
||||
)
|
||||
|
||||
async def chat(
|
||||
self,
|
||||
message: str,
|
||||
@@ -67,7 +117,7 @@ class ClaudeProvider(AIProvider):
|
||||
if not self.is_available():
|
||||
return {
|
||||
"success": False,
|
||||
"response": "Claude not available. Install anthropic SDK or set ANTHROPIC_API_KEY.",
|
||||
"response": "Claude not available. Install anthropic SDK.",
|
||||
"model": "error",
|
||||
"tools_used": [],
|
||||
"stop_reason": "error",
|
||||
@@ -75,7 +125,17 @@ class ClaudeProvider(AIProvider):
|
||||
|
||||
from anthropic import Anthropic
|
||||
|
||||
client = Anthropic(api_key=self.api_key)
|
||||
# Create client with SoFi proxy settings
|
||||
try:
|
||||
client = self._create_client()
|
||||
except ValueError as e:
|
||||
return {
|
||||
"success": False,
|
||||
"response": str(e),
|
||||
"model": "error",
|
||||
"tools_used": [],
|
||||
"stop_reason": "error"
|
||||
}
|
||||
|
||||
# Build messages
|
||||
messages = []
|
||||
@@ -99,8 +159,20 @@ class ClaudeProvider(AIProvider):
|
||||
if tools:
|
||||
api_params["tools"] = tools
|
||||
|
||||
# Initial call
|
||||
response = await asyncio.to_thread(client.messages.create, **api_params)
|
||||
# Make API call via SoFi proxy
|
||||
try:
|
||||
response = await asyncio.to_thread(
|
||||
client.messages.create,
|
||||
**api_params
|
||||
)
|
||||
except Exception as e:
|
||||
return {
|
||||
"success": False,
|
||||
"response": f"Claude API error: {str(e)}",
|
||||
"model": "error",
|
||||
"tools_used": [],
|
||||
"stop_reason": "error"
|
||||
}
|
||||
|
||||
# Handle tool use loop
|
||||
tools_used = []
|
||||
|
||||
@@ -494,8 +494,7 @@ async def health():
|
||||
if str(project_root) not in sys.path:
|
||||
sys.path.insert(0, str(project_root))
|
||||
|
||||
from dss.mcp.handler import get_mcp_handler
|
||||
|
||||
from dss.mcp_server.handler import get_mcp_handler
|
||||
handler = get_mcp_handler()
|
||||
mcp_ok = handler is not None
|
||||
except Exception as e:
|
||||
@@ -2425,10 +2424,16 @@ async def claude_chat(request_data: ClaudeChatRequest):
|
||||
"model": "error",
|
||||
}
|
||||
|
||||
# Import MCP handler
|
||||
from dss_mcp.handler import MCPContext, get_mcp_handler
|
||||
|
||||
mcp_handler = get_mcp_handler()
|
||||
# Import MCP handler (may fail if database not migrated)
|
||||
mcp_handler = None
|
||||
MCPContext = None
|
||||
try:
|
||||
from dss_mcp.handler import get_mcp_handler, MCPContext as _MCPContext
|
||||
MCPContext = _MCPContext
|
||||
mcp_handler = get_mcp_handler()
|
||||
except Exception as e:
|
||||
# MCP handler not available, proceed without tools
|
||||
enable_tools = False
|
||||
|
||||
# Build system prompt with design system context
|
||||
system_prompt = """You are a design system assistant with access to DSS (Design System Server) tools.
|
||||
@@ -2449,7 +2454,7 @@ RULES:
|
||||
- Always provide actionable insights from tool data"""
|
||||
|
||||
# Add project context if available
|
||||
if project_id:
|
||||
if project_id and mcp_handler:
|
||||
try:
|
||||
project_context = await mcp_handler.get_project_context(project_id, user_id)
|
||||
if project_context:
|
||||
@@ -2462,6 +2467,8 @@ CURRENT PROJECT CONTEXT:
|
||||
- Integrations: {', '.join(project_context.integrations.keys()) if project_context.integrations else 'None configured'}"""
|
||||
except:
|
||||
system_prompt += f"\n\nProject ID: {project_id} (context not loaded)"
|
||||
elif project_id:
|
||||
system_prompt += f"\n\nProject ID: {project_id}"
|
||||
|
||||
# Add user context
|
||||
if context:
|
||||
@@ -2477,11 +2484,16 @@ CURRENT PROJECT CONTEXT:
|
||||
|
||||
# Get tools if enabled
|
||||
tools = None
|
||||
if enable_tools and project_id:
|
||||
if enable_tools and project_id and mcp_handler:
|
||||
tools = mcp_handler.get_tools_for_claude()
|
||||
|
||||
# Create MCP context
|
||||
mcp_context = MCPContext(project_id=project_id, user_id=user_id)
|
||||
# Create MCP context (or None if MCP not available)
|
||||
mcp_context = None
|
||||
if MCPContext is not None:
|
||||
mcp_context = MCPContext(
|
||||
project_id=project_id,
|
||||
user_id=user_id
|
||||
)
|
||||
|
||||
# Call AI provider with all context
|
||||
result = await provider.chat(
|
||||
@@ -3087,7 +3099,8 @@ def kill_port(port: int, wait: float = 0.5) -> None:
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
port = int(os.getenv("PORT", "3456"))
|
||||
# DSS Ports: API=6220, Admin=6221, MCP=6222, Storybook=6226
|
||||
port = int(os.getenv("DSS_API_PORT", "6220"))
|
||||
host = os.getenv("HOST", "0.0.0.0")
|
||||
|
||||
# Kill any existing process on the port (twice to handle respawning)
|
||||
|
||||
Reference in New Issue
Block a user