Migrated from design-system-swarm with fresh git history.
Old project history preserved in /home/overbits/apps/design-system-swarm
Core components:
- MCP Server (Python FastAPI with mcp 1.23.1)
- Claude Plugin (agents, commands, skills, strategies, hooks, core)
- DSS Backend (dss-mvp1 - token translation, Figma sync)
- Admin UI (Node.js/React)
- Server (Node.js/Express)
- Storybook integration (dss-mvp1/.storybook)
Self-contained configuration:
- All paths relative or use DSS_BASE_PATH=/home/overbits/dss
- PYTHONPATH configured for dss-mvp1 and dss-claude-plugin
- .env file with all configuration
- Claude plugin uses ${CLAUDE_PLUGIN_ROOT} for portability
Migration completed: $(date)
🤖 Clean migration with full functionality preserved
1458 lines
53 KiB
Python
1458 lines
53 KiB
Python
"""
|
|
Translation Dictionary Integration for MCP
|
|
|
|
Provides tools for managing translation dictionaries, theme configuration,
|
|
and code generation for design system tokens.
|
|
|
|
This module wraps the dss.translations Python core to expose these
|
|
capabilities through MCP tools.
|
|
"""
|
|
|
|
import asyncio
|
|
import json
|
|
from typing import Dict, Any, Optional, List
|
|
from pathlib import Path
|
|
from datetime import datetime
|
|
from mcp import types
|
|
|
|
from .base import BaseIntegration
|
|
from ..context.project_context import get_context_manager
|
|
|
|
|
|
# =============================================================================
|
|
# MCP Tool Definitions
|
|
# =============================================================================
|
|
|
|
TRANSLATION_TOOLS = [
|
|
# Category 1: Dictionary Management (5 tools)
|
|
types.Tool(
|
|
name="translation_list_dictionaries",
|
|
description="List all available translation dictionaries for a project. Returns dictionary types (figma, css, heroui, custom, etc.), mapping counts, and validation status.",
|
|
inputSchema={
|
|
"type": "object",
|
|
"properties": {
|
|
"project_id": {
|
|
"type": "string",
|
|
"description": "Project ID"
|
|
},
|
|
"include_stats": {
|
|
"type": "boolean",
|
|
"description": "Include mapping statistics (default: true)",
|
|
"default": True
|
|
}
|
|
},
|
|
"required": ["project_id"]
|
|
}
|
|
),
|
|
types.Tool(
|
|
name="translation_get_dictionary",
|
|
description="Get detailed information about a specific translation dictionary including all token mappings, component mappings, and custom props.",
|
|
inputSchema={
|
|
"type": "object",
|
|
"properties": {
|
|
"project_id": {
|
|
"type": "string",
|
|
"description": "Project ID"
|
|
},
|
|
"source": {
|
|
"type": "string",
|
|
"description": "Dictionary source type",
|
|
"enum": ["figma", "css", "scss", "heroui", "shadcn", "tailwind", "json", "custom"]
|
|
},
|
|
"include_unmapped": {
|
|
"type": "boolean",
|
|
"description": "Include list of unmapped source tokens (default: true)",
|
|
"default": True
|
|
}
|
|
},
|
|
"required": ["project_id", "source"]
|
|
}
|
|
),
|
|
types.Tool(
|
|
name="translation_create_dictionary",
|
|
description="Create a new translation dictionary for a project. Maps external tokens (Figma, CSS, etc.) to DSS canonical tokens.",
|
|
inputSchema={
|
|
"type": "object",
|
|
"properties": {
|
|
"project_id": {
|
|
"type": "string",
|
|
"description": "Project ID"
|
|
},
|
|
"source": {
|
|
"type": "string",
|
|
"description": "Source type for the dictionary",
|
|
"enum": ["figma", "css", "scss", "heroui", "shadcn", "tailwind", "json", "custom"]
|
|
},
|
|
"token_mappings": {
|
|
"type": "object",
|
|
"description": "Token mappings: source_token -> DSS canonical path",
|
|
"additionalProperties": {
|
|
"type": "string"
|
|
}
|
|
},
|
|
"component_mappings": {
|
|
"type": "object",
|
|
"description": "Component mappings: source_component -> DSS component",
|
|
"additionalProperties": {
|
|
"type": "string"
|
|
}
|
|
},
|
|
"custom_props": {
|
|
"type": "object",
|
|
"description": "Custom properties (must use DSS namespace like 'color.brand.myproject.primary')",
|
|
"additionalProperties": {}
|
|
},
|
|
"notes": {
|
|
"type": "array",
|
|
"items": {"type": "string"},
|
|
"description": "Human-readable notes"
|
|
}
|
|
},
|
|
"required": ["project_id", "source"]
|
|
}
|
|
),
|
|
types.Tool(
|
|
name="translation_update_dictionary",
|
|
description="Update an existing translation dictionary. Add or modify token mappings, component mappings, or custom props.",
|
|
inputSchema={
|
|
"type": "object",
|
|
"properties": {
|
|
"project_id": {
|
|
"type": "string",
|
|
"description": "Project ID"
|
|
},
|
|
"source": {
|
|
"type": "string",
|
|
"description": "Dictionary source type to update",
|
|
"enum": ["figma", "css", "scss", "heroui", "shadcn", "tailwind", "json", "custom"]
|
|
},
|
|
"token_mappings": {
|
|
"type": "object",
|
|
"description": "Token mappings to add/update",
|
|
"additionalProperties": {"type": "string"}
|
|
},
|
|
"component_mappings": {
|
|
"type": "object",
|
|
"description": "Component mappings to add/update",
|
|
"additionalProperties": {"type": "string"}
|
|
},
|
|
"custom_props": {
|
|
"type": "object",
|
|
"description": "Custom props to add/update",
|
|
"additionalProperties": {}
|
|
},
|
|
"remove_tokens": {
|
|
"type": "array",
|
|
"items": {"type": "string"},
|
|
"description": "Source tokens to remove from mappings"
|
|
},
|
|
"notes": {
|
|
"type": "array",
|
|
"items": {"type": "string"},
|
|
"description": "Notes to append"
|
|
}
|
|
},
|
|
"required": ["project_id", "source"]
|
|
}
|
|
),
|
|
types.Tool(
|
|
name="translation_validate_dictionary",
|
|
description="Validate a translation dictionary. Checks schema compliance, DSS token path validity, and detects conflicts.",
|
|
inputSchema={
|
|
"type": "object",
|
|
"properties": {
|
|
"project_id": {
|
|
"type": "string",
|
|
"description": "Project ID"
|
|
},
|
|
"source": {
|
|
"type": "string",
|
|
"description": "Dictionary source type to validate",
|
|
"enum": ["figma", "css", "scss", "heroui", "shadcn", "tailwind", "json", "custom"]
|
|
},
|
|
"strict": {
|
|
"type": "boolean",
|
|
"description": "Strict mode - unknown tokens are errors (default: false)",
|
|
"default": False
|
|
}
|
|
},
|
|
"required": ["project_id", "source"]
|
|
}
|
|
),
|
|
|
|
# Category 2: Theme Configuration (4 tools)
|
|
types.Tool(
|
|
name="theme_get_config",
|
|
description="Get project theme configuration including base theme, loaded dictionaries, and custom props summary.",
|
|
inputSchema={
|
|
"type": "object",
|
|
"properties": {
|
|
"project_id": {
|
|
"type": "string",
|
|
"description": "Project ID"
|
|
}
|
|
},
|
|
"required": ["project_id"]
|
|
}
|
|
),
|
|
types.Tool(
|
|
name="theme_resolve",
|
|
description="Resolve complete project theme by merging base theme with translation dictionaries and custom props. Returns fully resolved tokens with provenance.",
|
|
inputSchema={
|
|
"type": "object",
|
|
"properties": {
|
|
"project_id": {
|
|
"type": "string",
|
|
"description": "Project ID"
|
|
},
|
|
"base_theme": {
|
|
"type": "string",
|
|
"description": "Base theme to use",
|
|
"enum": ["light", "dark"],
|
|
"default": "light"
|
|
},
|
|
"include_provenance": {
|
|
"type": "boolean",
|
|
"description": "Include token resolution provenance chain (default: false)",
|
|
"default": False
|
|
}
|
|
},
|
|
"required": ["project_id"]
|
|
}
|
|
),
|
|
types.Tool(
|
|
name="theme_add_custom_prop",
|
|
description="Add a custom property to the project's custom.json translation dictionary. Custom props extend DSS with project-specific tokens.",
|
|
inputSchema={
|
|
"type": "object",
|
|
"properties": {
|
|
"project_id": {
|
|
"type": "string",
|
|
"description": "Project ID"
|
|
},
|
|
"prop_name": {
|
|
"type": "string",
|
|
"description": "Property name using DSS namespace (e.g., 'color.brand.acme.primary')"
|
|
},
|
|
"prop_value": {
|
|
"description": "Property value (string, number, or object)"
|
|
},
|
|
"description": {
|
|
"type": "string",
|
|
"description": "Optional description of the custom prop"
|
|
}
|
|
},
|
|
"required": ["project_id", "prop_name", "prop_value"]
|
|
}
|
|
),
|
|
types.Tool(
|
|
name="theme_get_canonical_tokens",
|
|
description="Get the DSS canonical token structure. Useful for understanding available tokens and valid mapping targets.",
|
|
inputSchema={
|
|
"type": "object",
|
|
"properties": {
|
|
"category": {
|
|
"type": "string",
|
|
"description": "Filter by category (optional)",
|
|
"enum": ["color", "spacing", "typography", "border", "shadow", "motion", "zIndex", "opacity", "breakpoint"]
|
|
},
|
|
"include_aliases": {
|
|
"type": "boolean",
|
|
"description": "Include token aliases (default: true)",
|
|
"default": True
|
|
},
|
|
"include_components": {
|
|
"type": "boolean",
|
|
"description": "Include canonical components (default: false)",
|
|
"default": False
|
|
}
|
|
},
|
|
"required": []
|
|
}
|
|
),
|
|
|
|
# Category 3: Code Generation (3 tools)
|
|
types.Tool(
|
|
name="codegen_export_css",
|
|
description="Generate CSS custom properties from resolved project theme. Outputs :root variables and optional utility classes.",
|
|
inputSchema={
|
|
"type": "object",
|
|
"properties": {
|
|
"project_id": {
|
|
"type": "string",
|
|
"description": "Project ID"
|
|
},
|
|
"base_theme": {
|
|
"type": "string",
|
|
"description": "Base theme to use",
|
|
"enum": ["light", "dark"],
|
|
"default": "light"
|
|
},
|
|
"selector": {
|
|
"type": "string",
|
|
"description": "CSS selector for variables (default: ':root')",
|
|
"default": ":root"
|
|
},
|
|
"prefix": {
|
|
"type": "string",
|
|
"description": "CSS variable prefix (default: 'dss')",
|
|
"default": "dss"
|
|
},
|
|
"include_comments": {
|
|
"type": "boolean",
|
|
"description": "Include provenance comments (default: true)",
|
|
"default": True
|
|
},
|
|
"output_path": {
|
|
"type": "string",
|
|
"description": "Optional: Write to file instead of returning content"
|
|
}
|
|
},
|
|
"required": ["project_id"]
|
|
}
|
|
),
|
|
types.Tool(
|
|
name="codegen_export_scss",
|
|
description="Generate SCSS variables from resolved project theme. Outputs $variables and optional mixins.",
|
|
inputSchema={
|
|
"type": "object",
|
|
"properties": {
|
|
"project_id": {
|
|
"type": "string",
|
|
"description": "Project ID"
|
|
},
|
|
"base_theme": {
|
|
"type": "string",
|
|
"description": "Base theme to use",
|
|
"enum": ["light", "dark"],
|
|
"default": "light"
|
|
},
|
|
"prefix": {
|
|
"type": "string",
|
|
"description": "SCSS variable prefix (default: 'dss')",
|
|
"default": "dss"
|
|
},
|
|
"generate_map": {
|
|
"type": "boolean",
|
|
"description": "Generate SCSS map in addition to variables (default: true)",
|
|
"default": True
|
|
},
|
|
"output_path": {
|
|
"type": "string",
|
|
"description": "Optional: Write to file instead of returning content"
|
|
}
|
|
},
|
|
"required": ["project_id"]
|
|
}
|
|
),
|
|
types.Tool(
|
|
name="codegen_export_json",
|
|
description="Export resolved theme as JSON. Useful for design tool integrations and token documentation.",
|
|
inputSchema={
|
|
"type": "object",
|
|
"properties": {
|
|
"project_id": {
|
|
"type": "string",
|
|
"description": "Project ID"
|
|
},
|
|
"base_theme": {
|
|
"type": "string",
|
|
"description": "Base theme to use",
|
|
"enum": ["light", "dark"],
|
|
"default": "light"
|
|
},
|
|
"format": {
|
|
"type": "string",
|
|
"description": "JSON structure format",
|
|
"enum": ["flat", "nested", "style-dictionary"],
|
|
"default": "flat"
|
|
},
|
|
"include_metadata": {
|
|
"type": "boolean",
|
|
"description": "Include resolution metadata (default: true)",
|
|
"default": True
|
|
},
|
|
"output_path": {
|
|
"type": "string",
|
|
"description": "Optional: Write to file instead of returning content"
|
|
}
|
|
},
|
|
"required": ["project_id"]
|
|
}
|
|
),
|
|
]
|
|
|
|
|
|
# =============================================================================
|
|
# Integration Class
|
|
# =============================================================================
|
|
|
|
class TranslationIntegration(BaseIntegration):
|
|
"""Translation dictionary integration wrapper for DSS tools"""
|
|
|
|
def __init__(self, config: Optional[Dict[str, Any]] = None):
|
|
"""
|
|
Initialize Translation integration.
|
|
|
|
Args:
|
|
config: Optional configuration
|
|
"""
|
|
super().__init__("translations", config or {})
|
|
self.context_manager = get_context_manager()
|
|
|
|
async def _get_project_path(self, project_id: str) -> Path:
|
|
"""
|
|
Get project path from context manager.
|
|
|
|
Args:
|
|
project_id: Project ID
|
|
|
|
Returns:
|
|
Project path as Path object
|
|
"""
|
|
context = await self.context_manager.get_context(project_id)
|
|
if not context or not context.path:
|
|
raise ValueError(f"Project not found: {project_id}")
|
|
return Path(context.path)
|
|
|
|
# =========================================================================
|
|
# Category 1: Dictionary Management
|
|
# =========================================================================
|
|
|
|
async def list_dictionaries(
|
|
self,
|
|
project_id: str,
|
|
include_stats: bool = True
|
|
) -> Dict[str, Any]:
|
|
"""
|
|
List all translation dictionaries for project.
|
|
|
|
Args:
|
|
project_id: Project ID
|
|
include_stats: Include statistics
|
|
|
|
Returns:
|
|
Dictionary list with metadata
|
|
"""
|
|
try:
|
|
from dss.translations.loader import TranslationDictionaryLoader
|
|
|
|
project_path = await self._get_project_path(project_id)
|
|
loader = TranslationDictionaryLoader(str(project_path))
|
|
|
|
# Get available dictionaries
|
|
available_files = []
|
|
if loader.translations_dir.exists():
|
|
available_files = [f.stem for f in loader.translations_dir.glob("*.json")]
|
|
|
|
result = {
|
|
"project_id": project_id,
|
|
"dictionaries": [],
|
|
"has_translations": bool(available_files),
|
|
"translations_dir": str(loader.get_translations_dir())
|
|
}
|
|
|
|
if include_stats and available_files:
|
|
# Load registry to get statistics
|
|
registry = await loader.load_all()
|
|
for source in available_files:
|
|
dict_info = {
|
|
"source": source,
|
|
"file": f"{source}.json"
|
|
}
|
|
if source in registry.dictionaries:
|
|
d = registry.dictionaries[source]
|
|
dict_info["token_count"] = len(d.mappings.tokens)
|
|
dict_info["component_count"] = len(d.mappings.components)
|
|
dict_info["custom_prop_count"] = len(d.custom_props)
|
|
dict_info["unmapped_count"] = len(d.unmapped)
|
|
result["dictionaries"].append(dict_info)
|
|
|
|
result["conflicts"] = registry.conflicts
|
|
else:
|
|
result["dictionaries"] = [{"source": s} for s in available_files]
|
|
|
|
return result
|
|
|
|
except Exception as e:
|
|
return {
|
|
"error": f"Failed to list dictionaries: {str(e)}",
|
|
"project_id": project_id
|
|
}
|
|
|
|
async def get_dictionary(
|
|
self,
|
|
project_id: str,
|
|
source: str,
|
|
include_unmapped: bool = True
|
|
) -> Dict[str, Any]:
|
|
"""
|
|
Get translation dictionary details.
|
|
|
|
Args:
|
|
project_id: Project ID
|
|
source: Dictionary source type
|
|
include_unmapped: Include unmapped tokens
|
|
|
|
Returns:
|
|
Dictionary details
|
|
"""
|
|
try:
|
|
from dss.translations.loader import TranslationDictionaryLoader
|
|
|
|
project_path = await self._get_project_path(project_id)
|
|
loader = TranslationDictionaryLoader(str(project_path))
|
|
|
|
dictionary = await loader.load_dictionary(source)
|
|
if not dictionary:
|
|
available = [f.stem for f in loader.translations_dir.glob("*.json")] if loader.translations_dir.exists() else []
|
|
return {
|
|
"error": f"Dictionary not found: {source}",
|
|
"project_id": project_id,
|
|
"available": available
|
|
}
|
|
|
|
result = {
|
|
"project_id": project_id,
|
|
"source": source,
|
|
"uuid": str(dictionary.uuid),
|
|
"version": dictionary.version,
|
|
"created_at": dictionary.created_at.isoformat() if hasattr(dictionary.created_at, 'isoformat') else str(dictionary.created_at),
|
|
"updated_at": dictionary.updated_at.isoformat() if hasattr(dictionary.updated_at, 'isoformat') else str(dictionary.updated_at),
|
|
"mappings": {
|
|
"tokens": dictionary.mappings.tokens,
|
|
"components": dictionary.mappings.components,
|
|
"patterns": dictionary.mappings.patterns
|
|
},
|
|
"custom_props": dictionary.custom_props,
|
|
"notes": dictionary.notes or []
|
|
}
|
|
|
|
if include_unmapped:
|
|
result["unmapped"] = dictionary.unmapped
|
|
|
|
return result
|
|
|
|
except Exception as e:
|
|
return {
|
|
"error": f"Failed to get dictionary: {str(e)}",
|
|
"project_id": project_id,
|
|
"source": source
|
|
}
|
|
|
|
async def create_dictionary(
|
|
self,
|
|
project_id: str,
|
|
source: str,
|
|
token_mappings: Optional[Dict[str, str]] = None,
|
|
component_mappings: Optional[Dict[str, str]] = None,
|
|
custom_props: Optional[Dict[str, Any]] = None,
|
|
notes: Optional[List[str]] = None
|
|
) -> Dict[str, Any]:
|
|
"""
|
|
Create new translation dictionary.
|
|
|
|
Args:
|
|
project_id: Project ID
|
|
source: Source type
|
|
token_mappings: Token mappings
|
|
component_mappings: Component mappings
|
|
custom_props: Custom properties
|
|
notes: Notes
|
|
|
|
Returns:
|
|
Creation result
|
|
"""
|
|
try:
|
|
from dss.translations.writer import TranslationDictionaryWriter
|
|
from dss.translations.validator import TranslationValidator
|
|
|
|
project_path = await self._get_project_path(project_id)
|
|
writer = TranslationDictionaryWriter(str(project_path))
|
|
|
|
# Validate before creating
|
|
validator = TranslationValidator()
|
|
test_data = {
|
|
"$schema": "dss-translation-v1",
|
|
"project": project_id,
|
|
"source": source,
|
|
"mappings": {
|
|
"tokens": token_mappings or {},
|
|
"components": component_mappings or {}
|
|
},
|
|
"custom_props": custom_props or {}
|
|
}
|
|
|
|
validation_result = validator.validate_dictionary(test_data)
|
|
if not validation_result.is_valid:
|
|
return {
|
|
"error": "Validation failed",
|
|
"errors": [str(e) for e in validation_result.errors],
|
|
"warnings": [str(w) for w in validation_result.warnings]
|
|
}
|
|
|
|
# Create the dictionary
|
|
dictionary = await writer.create(
|
|
source=source,
|
|
project=project_id,
|
|
token_mappings=token_mappings,
|
|
component_mappings=component_mappings,
|
|
custom_props=custom_props,
|
|
notes=notes
|
|
)
|
|
|
|
return {
|
|
"project_id": project_id,
|
|
"source": source,
|
|
"uuid": str(dictionary.uuid),
|
|
"created": True,
|
|
"file_path": str(writer.translations_dir / f"{source}.json"),
|
|
"token_count": len(dictionary.mappings.tokens),
|
|
"component_count": len(dictionary.mappings.components),
|
|
"custom_prop_count": len(dictionary.custom_props),
|
|
"warnings": [str(w) for w in validation_result.warnings] if validation_result.warnings else []
|
|
}
|
|
|
|
except Exception as e:
|
|
return {
|
|
"error": f"Failed to create dictionary: {str(e)}",
|
|
"project_id": project_id,
|
|
"source": source
|
|
}
|
|
|
|
async def update_dictionary(
|
|
self,
|
|
project_id: str,
|
|
source: str,
|
|
token_mappings: Optional[Dict[str, str]] = None,
|
|
component_mappings: Optional[Dict[str, str]] = None,
|
|
custom_props: Optional[Dict[str, Any]] = None,
|
|
remove_tokens: Optional[List[str]] = None,
|
|
notes: Optional[List[str]] = None
|
|
) -> Dict[str, Any]:
|
|
"""
|
|
Update existing translation dictionary.
|
|
|
|
Args:
|
|
project_id: Project ID
|
|
source: Source type
|
|
token_mappings: Token mappings to add/update
|
|
component_mappings: Component mappings to add/update
|
|
custom_props: Custom props to add/update
|
|
remove_tokens: Tokens to remove
|
|
notes: Notes to append
|
|
|
|
Returns:
|
|
Update result
|
|
"""
|
|
try:
|
|
from dss.translations.loader import TranslationDictionaryLoader
|
|
from dss.translations.writer import TranslationDictionaryWriter
|
|
|
|
project_path = await self._get_project_path(project_id)
|
|
loader = TranslationDictionaryLoader(str(project_path))
|
|
writer = TranslationDictionaryWriter(str(project_path))
|
|
|
|
# Load existing dictionary
|
|
existing = await loader.load_dictionary(source)
|
|
if not existing:
|
|
return {
|
|
"error": f"Dictionary not found: {source}",
|
|
"project_id": project_id
|
|
}
|
|
|
|
# Merge updates
|
|
updated_tokens = dict(existing.mappings.tokens)
|
|
if token_mappings:
|
|
updated_tokens.update(token_mappings)
|
|
if remove_tokens:
|
|
for token in remove_tokens:
|
|
updated_tokens.pop(token, None)
|
|
|
|
updated_components = dict(existing.mappings.components)
|
|
if component_mappings:
|
|
updated_components.update(component_mappings)
|
|
|
|
updated_custom = dict(existing.custom_props)
|
|
if custom_props:
|
|
updated_custom.update(custom_props)
|
|
|
|
updated_notes = list(existing.notes or [])
|
|
if notes:
|
|
updated_notes.extend(notes)
|
|
|
|
# Write updated dictionary
|
|
dictionary = await writer.create(
|
|
source=source,
|
|
project=project_id,
|
|
token_mappings=updated_tokens,
|
|
component_mappings=updated_components,
|
|
custom_props=updated_custom,
|
|
notes=updated_notes
|
|
)
|
|
|
|
return {
|
|
"project_id": project_id,
|
|
"source": source,
|
|
"uuid": str(dictionary.uuid),
|
|
"updated": True,
|
|
"file_path": str(writer.translations_dir / f"{source}.json"),
|
|
"token_count": len(dictionary.mappings.tokens),
|
|
"component_count": len(dictionary.mappings.components),
|
|
"custom_prop_count": len(dictionary.custom_props),
|
|
"notes_count": len(dictionary.notes or [])
|
|
}
|
|
|
|
except Exception as e:
|
|
return {
|
|
"error": f"Failed to update dictionary: {str(e)}",
|
|
"project_id": project_id,
|
|
"source": source
|
|
}
|
|
|
|
async def validate_dictionary(
|
|
self,
|
|
project_id: str,
|
|
source: str,
|
|
strict: bool = False
|
|
) -> Dict[str, Any]:
|
|
"""
|
|
Validate a translation dictionary.
|
|
|
|
Args:
|
|
project_id: Project ID
|
|
source: Source type
|
|
strict: Strict validation mode
|
|
|
|
Returns:
|
|
Validation result
|
|
"""
|
|
try:
|
|
from dss.translations.loader import TranslationDictionaryLoader
|
|
from dss.translations.validator import TranslationValidator
|
|
|
|
project_path = await self._get_project_path(project_id)
|
|
loader = TranslationDictionaryLoader(str(project_path))
|
|
|
|
dictionary = await loader.load_dictionary(source)
|
|
if not dictionary:
|
|
return {
|
|
"error": f"Dictionary not found: {source}",
|
|
"project_id": project_id,
|
|
"valid": False
|
|
}
|
|
|
|
validator = TranslationValidator()
|
|
|
|
# Convert dictionary to dict for validation
|
|
dict_data = {
|
|
"$schema": "dss-translation-v1",
|
|
"uuid": str(dictionary.uuid),
|
|
"project": project_id,
|
|
"source": source,
|
|
"version": dictionary.version,
|
|
"created_at": dictionary.created_at.isoformat() if hasattr(dictionary.created_at, 'isoformat') else str(dictionary.created_at),
|
|
"updated_at": dictionary.updated_at.isoformat() if hasattr(dictionary.updated_at, 'isoformat') else str(dictionary.updated_at),
|
|
"mappings": {
|
|
"tokens": dictionary.mappings.tokens,
|
|
"components": dictionary.mappings.components,
|
|
"patterns": dictionary.mappings.patterns
|
|
},
|
|
"custom_props": dictionary.custom_props,
|
|
"unmapped": dictionary.unmapped,
|
|
"notes": dictionary.notes or []
|
|
}
|
|
|
|
result = validator.validate_dictionary(dict_data, strict=strict)
|
|
|
|
return {
|
|
"project_id": project_id,
|
|
"source": source,
|
|
"valid": result.is_valid,
|
|
"errors": [str(e) for e in (result.errors or [])],
|
|
"warnings": [str(w) for w in (result.warnings or [])]
|
|
}
|
|
|
|
except Exception as e:
|
|
return {
|
|
"error": f"Failed to validate dictionary: {str(e)}",
|
|
"project_id": project_id,
|
|
"source": source,
|
|
"valid": False
|
|
}
|
|
|
|
# =========================================================================
|
|
# Category 2: Theme Configuration
|
|
# =========================================================================
|
|
|
|
async def get_config(self, project_id: str) -> Dict[str, Any]:
|
|
"""
|
|
Get project theme configuration.
|
|
|
|
Args:
|
|
project_id: Project ID
|
|
|
|
Returns:
|
|
Theme configuration
|
|
"""
|
|
try:
|
|
from dss.translations.loader import TranslationDictionaryLoader
|
|
|
|
project_path = await self._get_project_path(project_id)
|
|
loader = TranslationDictionaryLoader(str(project_path))
|
|
|
|
# Load registry to get full picture
|
|
registry = await loader.load_all()
|
|
|
|
return {
|
|
"project_id": project_id,
|
|
"base_themes_available": ["light", "dark"],
|
|
"translation_dictionaries": list(registry.dictionaries.keys()),
|
|
"total_token_mappings": len(registry.combined_token_map),
|
|
"total_component_mappings": len(registry.combined_component_map),
|
|
"total_custom_props": len(registry.all_custom_props),
|
|
"conflicts": registry.conflicts,
|
|
"has_config": loader.has_translations()
|
|
}
|
|
|
|
except Exception as e:
|
|
return {
|
|
"error": f"Failed to get config: {str(e)}",
|
|
"project_id": project_id
|
|
}
|
|
|
|
async def resolve_theme(
|
|
self,
|
|
project_id: str,
|
|
base_theme: str = "light",
|
|
include_provenance: bool = False
|
|
) -> Dict[str, Any]:
|
|
"""
|
|
Resolve complete project theme.
|
|
|
|
Args:
|
|
project_id: Project ID
|
|
base_theme: Base theme (light or dark)
|
|
include_provenance: Include provenance information
|
|
|
|
Returns:
|
|
Resolved theme with tokens
|
|
"""
|
|
try:
|
|
from dss.translations.loader import TranslationDictionaryLoader
|
|
from dss.translations.merger import ThemeMerger
|
|
|
|
project_path = await self._get_project_path(project_id)
|
|
|
|
# Load translation registry
|
|
loader = TranslationDictionaryLoader(str(project_path))
|
|
registry = await loader.load_all()
|
|
|
|
# Create merger and resolve
|
|
merger = ThemeMerger(registry)
|
|
resolved = await merger.merge(
|
|
base_theme=base_theme,
|
|
project_name=project_id
|
|
)
|
|
|
|
# Format tokens for output
|
|
tokens = {}
|
|
for dss_path, resolved_token in resolved.tokens.items():
|
|
token_data = {
|
|
"value": str(resolved_token.value),
|
|
"source_token": resolved_token.source_token,
|
|
"is_custom": resolved_token.is_custom
|
|
}
|
|
if include_provenance and hasattr(resolved_token, 'provenance'):
|
|
token_data["provenance"] = resolved_token.provenance
|
|
tokens[dss_path] = token_data
|
|
|
|
custom_props = {}
|
|
for dss_path, resolved_token in resolved.custom_props.items():
|
|
prop_data = {
|
|
"value": str(resolved_token.value)
|
|
}
|
|
if include_provenance and hasattr(resolved_token, 'provenance'):
|
|
prop_data["provenance"] = resolved_token.provenance
|
|
custom_props[dss_path] = prop_data
|
|
|
|
return {
|
|
"project_id": project_id,
|
|
"name": resolved.name,
|
|
"base_theme": resolved.base_theme,
|
|
"version": resolved.version,
|
|
"resolved_at": resolved.resolved_at.isoformat() if hasattr(resolved.resolved_at, 'isoformat') else str(resolved.resolved_at),
|
|
"translations_applied": resolved.translations_applied,
|
|
"token_count": len(tokens),
|
|
"custom_prop_count": len(custom_props),
|
|
"tokens": tokens,
|
|
"custom_props": custom_props
|
|
}
|
|
|
|
except Exception as e:
|
|
return {
|
|
"error": f"Failed to resolve theme: {str(e)}",
|
|
"project_id": project_id
|
|
}
|
|
|
|
async def add_custom_prop(
|
|
self,
|
|
project_id: str,
|
|
prop_name: str,
|
|
prop_value: Any,
|
|
description: Optional[str] = None
|
|
) -> Dict[str, Any]:
|
|
"""
|
|
Add custom property to custom.json.
|
|
|
|
Args:
|
|
project_id: Project ID
|
|
prop_name: Property name (DSS namespace)
|
|
prop_value: Property value
|
|
description: Optional description
|
|
|
|
Returns:
|
|
Update result
|
|
"""
|
|
try:
|
|
from dss.translations.loader import TranslationDictionaryLoader
|
|
from dss.translations.writer import TranslationDictionaryWriter
|
|
|
|
project_path = await self._get_project_path(project_id)
|
|
loader = TranslationDictionaryLoader(str(project_path))
|
|
writer = TranslationDictionaryWriter(str(project_path))
|
|
|
|
# Load or create custom dictionary
|
|
custom_dict = await loader.load_dictionary("custom")
|
|
if custom_dict:
|
|
custom_props = dict(custom_dict.custom_props)
|
|
else:
|
|
custom_props = {}
|
|
|
|
# Add new property
|
|
custom_props[prop_name] = prop_value
|
|
|
|
# Write updated custom dictionary
|
|
dictionary = await writer.create(
|
|
source="custom",
|
|
project=project_id,
|
|
custom_props=custom_props,
|
|
notes=[f"Added {prop_name}"] if not description else [description]
|
|
)
|
|
|
|
return {
|
|
"project_id": project_id,
|
|
"prop_name": prop_name,
|
|
"prop_value": prop_value,
|
|
"added": True,
|
|
"custom_prop_count": len(dictionary.custom_props)
|
|
}
|
|
|
|
except Exception as e:
|
|
return {
|
|
"error": f"Failed to add custom prop: {str(e)}",
|
|
"project_id": project_id,
|
|
"prop_name": prop_name
|
|
}
|
|
|
|
async def get_canonical_tokens(
|
|
self,
|
|
category: Optional[str] = None,
|
|
include_aliases: bool = True,
|
|
include_components: bool = False
|
|
) -> Dict[str, Any]:
|
|
"""
|
|
Get DSS canonical token structure.
|
|
|
|
Args:
|
|
category: Filter by category
|
|
include_aliases: Include aliases
|
|
include_components: Include components
|
|
|
|
Returns:
|
|
Canonical token structure
|
|
"""
|
|
try:
|
|
from dss.translations.canonical import (
|
|
DSS_CANONICAL_TOKENS,
|
|
DSS_TOKEN_ALIASES,
|
|
DSS_CANONICAL_COMPONENTS,
|
|
get_canonical_token_categories
|
|
)
|
|
|
|
result = {
|
|
"total_tokens": len(DSS_CANONICAL_TOKENS)
|
|
}
|
|
|
|
if category:
|
|
# Filter by category
|
|
categories = get_canonical_token_categories()
|
|
result["category"] = category
|
|
result["tokens"] = categories.get(category, [])
|
|
result["token_count"] = len(categories.get(category, []))
|
|
else:
|
|
# Return all organized by category
|
|
categories = get_canonical_token_categories()
|
|
result["tokens_by_category"] = {k: list(v) for k, v in categories.items()}
|
|
|
|
if include_aliases:
|
|
result["aliases"] = DSS_TOKEN_ALIASES
|
|
|
|
if include_components:
|
|
result["components"] = list(DSS_CANONICAL_COMPONENTS)
|
|
result["component_count"] = len(DSS_CANONICAL_COMPONENTS)
|
|
|
|
return result
|
|
|
|
except Exception as e:
|
|
return {
|
|
"error": f"Failed to get canonical tokens: {str(e)}"
|
|
}
|
|
|
|
# =========================================================================
|
|
# Category 3: Code Generation
|
|
# =========================================================================
|
|
|
|
async def export_css(
|
|
self,
|
|
project_id: str,
|
|
base_theme: str = "light",
|
|
selector: str = ":root",
|
|
prefix: str = "dss",
|
|
include_comments: bool = True,
|
|
output_path: Optional[str] = None
|
|
) -> Dict[str, Any]:
|
|
"""
|
|
Generate CSS variables from resolved theme.
|
|
|
|
Args:
|
|
project_id: Project ID
|
|
base_theme: Base theme
|
|
selector: CSS selector
|
|
prefix: CSS variable prefix
|
|
include_comments: Include comments
|
|
output_path: Optional output file path
|
|
|
|
Returns:
|
|
CSS export result
|
|
"""
|
|
try:
|
|
# Resolve theme first
|
|
resolved_result = await self.resolve_theme(
|
|
project_id,
|
|
base_theme,
|
|
include_provenance=include_comments
|
|
)
|
|
|
|
if "error" in resolved_result:
|
|
return resolved_result
|
|
|
|
# Generate CSS
|
|
css_lines = []
|
|
if include_comments:
|
|
css_lines.append(f"/* DSS Theme: {resolved_result['name']} */")
|
|
css_lines.append(f"/* Base: {base_theme} | Generated: {resolved_result['resolved_at']} */")
|
|
css_lines.append(f"/* Translations: {', '.join(resolved_result['translations_applied'])} */")
|
|
css_lines.append("")
|
|
|
|
css_lines.append(f"{selector} {{")
|
|
|
|
# Core tokens
|
|
for dss_path, token_data in resolved_result["tokens"].items():
|
|
var_name = f"--{prefix}-{dss_path.replace('.', '-')}"
|
|
value = token_data["value"]
|
|
|
|
if include_comments and token_data.get("source_token"):
|
|
css_lines.append(f" /* Source: {token_data['source_token']} */")
|
|
|
|
css_lines.append(f" {var_name}: {value};")
|
|
|
|
# Custom props
|
|
if resolved_result["custom_props"]:
|
|
css_lines.append("")
|
|
css_lines.append(" /* Custom Properties */")
|
|
for dss_path, prop_data in resolved_result["custom_props"].items():
|
|
var_name = f"--{prefix}-{dss_path.replace('.', '-')}"
|
|
css_lines.append(f" {var_name}: {prop_data['value']};")
|
|
|
|
css_lines.append("}")
|
|
|
|
css_content = "\n".join(css_lines)
|
|
|
|
if output_path:
|
|
project_path = await self._get_project_path(project_id)
|
|
full_path = (project_path / output_path).resolve()
|
|
|
|
# Validate path is within project directory (prevent traversal)
|
|
try:
|
|
full_path.relative_to(project_path)
|
|
except ValueError:
|
|
return {
|
|
"error": "Output path must be within project directory",
|
|
"project_id": project_id
|
|
}
|
|
|
|
full_path.parent.mkdir(parents=True, exist_ok=True)
|
|
# Use asyncio.to_thread to avoid blocking event loop
|
|
await asyncio.to_thread(full_path.write_text, css_content)
|
|
|
|
return {
|
|
"project_id": project_id,
|
|
"output_path": str(full_path),
|
|
"written": True,
|
|
"token_count": resolved_result["token_count"],
|
|
"custom_prop_count": resolved_result["custom_prop_count"]
|
|
}
|
|
|
|
return {
|
|
"project_id": project_id,
|
|
"content": css_content,
|
|
"token_count": resolved_result["token_count"],
|
|
"custom_prop_count": resolved_result["custom_prop_count"]
|
|
}
|
|
|
|
except Exception as e:
|
|
return {
|
|
"error": f"Failed to export CSS: {str(e)}",
|
|
"project_id": project_id
|
|
}
|
|
|
|
async def export_scss(
|
|
self,
|
|
project_id: str,
|
|
base_theme: str = "light",
|
|
prefix: str = "dss",
|
|
generate_map: bool = True,
|
|
output_path: Optional[str] = None
|
|
) -> Dict[str, Any]:
|
|
"""
|
|
Generate SCSS variables from resolved theme.
|
|
|
|
Args:
|
|
project_id: Project ID
|
|
base_theme: Base theme
|
|
prefix: SCSS variable prefix
|
|
generate_map: Generate SCSS map
|
|
output_path: Optional output file path
|
|
|
|
Returns:
|
|
SCSS export result
|
|
"""
|
|
try:
|
|
# Resolve theme first
|
|
resolved_result = await self.resolve_theme(project_id, base_theme, include_provenance=False)
|
|
|
|
if "error" in resolved_result:
|
|
return resolved_result
|
|
|
|
# Generate SCSS
|
|
scss_lines = []
|
|
scss_lines.append(f"// DSS Theme: {resolved_result['name']}")
|
|
scss_lines.append(f"// Base: {base_theme} | Generated: {resolved_result['resolved_at']}")
|
|
scss_lines.append(f"// Translations: {', '.join(resolved_result['translations_applied'])}")
|
|
scss_lines.append("")
|
|
|
|
# Variables
|
|
for dss_path, token_data in resolved_result["tokens"].items():
|
|
var_name = f"${prefix}-{dss_path.replace('.', '-')}"
|
|
value = token_data["value"]
|
|
scss_lines.append(f"{var_name}: {value};")
|
|
|
|
# Custom props
|
|
if resolved_result["custom_props"]:
|
|
scss_lines.append("")
|
|
for dss_path, prop_data in resolved_result["custom_props"].items():
|
|
var_name = f"${prefix}-{dss_path.replace('.', '-')}"
|
|
scss_lines.append(f"{var_name}: {prop_data['value']};")
|
|
|
|
# Generate map if requested
|
|
if generate_map:
|
|
scss_lines.append("")
|
|
scss_lines.append(f"${prefix}-tokens: (")
|
|
|
|
# Add all tokens to map
|
|
token_list = list(resolved_result["tokens"].items())
|
|
for i, (dss_path, token_data) in enumerate(token_list):
|
|
key = dss_path.replace('.', '-')
|
|
value = token_data["value"]
|
|
comma = "," if i < len(token_list) - 1 else ""
|
|
scss_lines.append(f' "{key}": {value}{comma}')
|
|
|
|
# Add custom props to map
|
|
custom_list = list(resolved_result["custom_props"].items())
|
|
if custom_list and token_list:
|
|
scss_lines[-1] = scss_lines[-1] + ","
|
|
for i, (dss_path, prop_data) in enumerate(custom_list):
|
|
key = dss_path.replace('.', '-')
|
|
value = prop_data["value"]
|
|
comma = "," if i < len(custom_list) - 1 else ""
|
|
scss_lines.append(f' "{key}": {value}{comma}')
|
|
|
|
scss_lines.append(");")
|
|
|
|
scss_content = "\n".join(scss_lines)
|
|
|
|
if output_path:
|
|
project_path = await self._get_project_path(project_id)
|
|
full_path = (project_path / output_path).resolve()
|
|
|
|
# Validate path is within project directory (prevent traversal)
|
|
try:
|
|
full_path.relative_to(project_path)
|
|
except ValueError:
|
|
return {
|
|
"error": "Output path must be within project directory",
|
|
"project_id": project_id
|
|
}
|
|
|
|
full_path.parent.mkdir(parents=True, exist_ok=True)
|
|
# Use asyncio.to_thread to avoid blocking event loop
|
|
await asyncio.to_thread(full_path.write_text, scss_content)
|
|
|
|
return {
|
|
"project_id": project_id,
|
|
"output_path": str(full_path),
|
|
"written": True,
|
|
"token_count": resolved_result["token_count"],
|
|
"custom_prop_count": resolved_result["custom_prop_count"]
|
|
}
|
|
|
|
return {
|
|
"project_id": project_id,
|
|
"content": scss_content,
|
|
"token_count": resolved_result["token_count"],
|
|
"custom_prop_count": resolved_result["custom_prop_count"]
|
|
}
|
|
|
|
except Exception as e:
|
|
return {
|
|
"error": f"Failed to export SCSS: {str(e)}",
|
|
"project_id": project_id
|
|
}
|
|
|
|
async def export_json(
|
|
self,
|
|
project_id: str,
|
|
base_theme: str = "light",
|
|
format: str = "flat",
|
|
include_metadata: bool = True,
|
|
output_path: Optional[str] = None
|
|
) -> Dict[str, Any]:
|
|
"""
|
|
Export resolved theme as JSON.
|
|
|
|
Args:
|
|
project_id: Project ID
|
|
base_theme: Base theme
|
|
format: JSON format (flat, nested, style-dictionary)
|
|
include_metadata: Include metadata
|
|
output_path: Optional output file path
|
|
|
|
Returns:
|
|
JSON export result
|
|
"""
|
|
try:
|
|
# Resolve theme first
|
|
resolved_result = await self.resolve_theme(project_id, base_theme, include_provenance=False)
|
|
|
|
if "error" in resolved_result:
|
|
return resolved_result
|
|
|
|
# Build JSON structure based on format
|
|
if format == "nested":
|
|
# Nested format: organize by category
|
|
json_data = self._build_nested_tokens(resolved_result)
|
|
elif format == "style-dictionary":
|
|
# Style Dictionary format
|
|
json_data = self._build_style_dictionary_tokens(resolved_result)
|
|
else:
|
|
# Flat format (default)
|
|
json_data = {
|
|
"tokens": resolved_result["tokens"],
|
|
"customProps": resolved_result["custom_props"]
|
|
}
|
|
|
|
if include_metadata:
|
|
json_data["metadata"] = {
|
|
"project_id": project_id,
|
|
"base_theme": base_theme,
|
|
"generated_at": resolved_result["resolved_at"],
|
|
"token_count": resolved_result["token_count"],
|
|
"custom_prop_count": resolved_result["custom_prop_count"],
|
|
"translations": resolved_result["translations_applied"]
|
|
}
|
|
|
|
json_content = json.dumps(json_data, indent=2)
|
|
|
|
if output_path:
|
|
project_path = await self._get_project_path(project_id)
|
|
full_path = (project_path / output_path).resolve()
|
|
|
|
# Validate path is within project directory (prevent traversal)
|
|
try:
|
|
full_path.relative_to(project_path)
|
|
except ValueError:
|
|
return {
|
|
"error": "Output path must be within project directory",
|
|
"project_id": project_id
|
|
}
|
|
|
|
full_path.parent.mkdir(parents=True, exist_ok=True)
|
|
# Use asyncio.to_thread to avoid blocking event loop
|
|
await asyncio.to_thread(full_path.write_text, json_content)
|
|
|
|
return {
|
|
"project_id": project_id,
|
|
"output_path": str(full_path),
|
|
"written": True,
|
|
"token_count": resolved_result["token_count"],
|
|
"custom_prop_count": resolved_result["custom_prop_count"]
|
|
}
|
|
|
|
return {
|
|
"project_id": project_id,
|
|
"content": json_data,
|
|
"token_count": resolved_result["token_count"],
|
|
"custom_prop_count": resolved_result["custom_prop_count"]
|
|
}
|
|
|
|
except Exception as e:
|
|
return {
|
|
"error": f"Failed to export JSON: {str(e)}",
|
|
"project_id": project_id
|
|
}
|
|
|
|
def _build_nested_tokens(self, resolved_result: Dict[str, Any]) -> Dict[str, Any]:
|
|
"""Build nested token structure from flat tokens."""
|
|
nested = {}
|
|
|
|
for dss_path, token_data in resolved_result["tokens"].items():
|
|
parts = dss_path.split('.')
|
|
current = nested
|
|
for part in parts[:-1]:
|
|
if part not in current:
|
|
current[part] = {}
|
|
current = current[part]
|
|
current[parts[-1]] = token_data["value"]
|
|
|
|
# Add custom props
|
|
if "customProps" not in nested:
|
|
nested["customProps"] = {}
|
|
for dss_path, prop_data in resolved_result["custom_props"].items():
|
|
parts = dss_path.split('.')
|
|
current = nested["customProps"]
|
|
for part in parts[:-1]:
|
|
if part not in current:
|
|
current[part] = {}
|
|
current = current[part]
|
|
current[parts[-1]] = prop_data["value"]
|
|
|
|
return nested
|
|
|
|
def _build_style_dictionary_tokens(self, resolved_result: Dict[str, Any]) -> Dict[str, Any]:
|
|
"""Build Style Dictionary format from flat tokens."""
|
|
style_dict = {}
|
|
|
|
for dss_path, token_data in resolved_result["tokens"].items():
|
|
parts = dss_path.split('.')
|
|
current = style_dict
|
|
for part in parts[:-1]:
|
|
if part not in current:
|
|
current[part] = {}
|
|
current = current[part]
|
|
current[parts[-1]] = {
|
|
"value": token_data["value"],
|
|
"type": self._infer_token_type(dss_path),
|
|
"description": f"DSS token {dss_path}"
|
|
}
|
|
|
|
# Add custom props
|
|
if "custom" not in style_dict:
|
|
style_dict["custom"] = {}
|
|
for dss_path, prop_data in resolved_result["custom_props"].items():
|
|
parts = dss_path.split('.')
|
|
current = style_dict["custom"]
|
|
for part in parts[:-1]:
|
|
if part not in current:
|
|
current[part] = {}
|
|
current = current[part]
|
|
current[parts[-1]] = {
|
|
"value": prop_data["value"],
|
|
"type": "custom",
|
|
"description": f"Custom token {dss_path}"
|
|
}
|
|
|
|
return style_dict
|
|
|
|
def _infer_token_type(self, token_path: str) -> str:
|
|
"""Infer token type from path."""
|
|
if token_path.startswith("color"):
|
|
return "color"
|
|
elif token_path.startswith("spacing"):
|
|
return "dimension"
|
|
elif token_path.startswith("typography"):
|
|
return "typography"
|
|
elif token_path.startswith("border"):
|
|
return "border"
|
|
elif token_path.startswith("shadow"):
|
|
return "shadow"
|
|
else:
|
|
return "string"
|
|
|
|
|
|
# =============================================================================
|
|
# MCP Tool Executor
|
|
# =============================================================================
|
|
|
|
class TranslationTools:
|
|
"""MCP tool executor for translation integration"""
|
|
|
|
def __init__(self, config: Optional[Dict[str, Any]] = None):
|
|
"""
|
|
Initialize Translation tools.
|
|
|
|
Args:
|
|
config: Optional configuration
|
|
"""
|
|
self.translations = TranslationIntegration(config)
|
|
|
|
async def execute_tool(self, tool_name: str, arguments: Dict[str, Any]) -> Dict[str, Any]:
|
|
"""
|
|
Execute translation tool.
|
|
|
|
Args:
|
|
tool_name: Name of tool to execute
|
|
arguments: Tool arguments
|
|
|
|
Returns:
|
|
Tool execution result
|
|
"""
|
|
handlers = {
|
|
# Category 1
|
|
"translation_list_dictionaries": self.translations.list_dictionaries,
|
|
"translation_get_dictionary": self.translations.get_dictionary,
|
|
"translation_create_dictionary": self.translations.create_dictionary,
|
|
"translation_update_dictionary": self.translations.update_dictionary,
|
|
"translation_validate_dictionary": self.translations.validate_dictionary,
|
|
# Category 2
|
|
"theme_get_config": self.translations.get_config,
|
|
"theme_resolve": self.translations.resolve_theme,
|
|
"theme_add_custom_prop": self.translations.add_custom_prop,
|
|
"theme_get_canonical_tokens": self.translations.get_canonical_tokens,
|
|
# Category 3
|
|
"codegen_export_css": self.translations.export_css,
|
|
"codegen_export_scss": self.translations.export_scss,
|
|
"codegen_export_json": self.translations.export_json,
|
|
}
|
|
|
|
handler = handlers.get(tool_name)
|
|
if not handler:
|
|
return {"error": f"Unknown translation tool: {tool_name}"}
|
|
|
|
try:
|
|
# Remove internal prefixes and execute
|
|
clean_args = {k: v for k, v in arguments.items() if not k.startswith("_")}
|
|
return await handler(**clean_args)
|
|
except Exception as e:
|
|
return {"error": f"Tool execution failed: {str(e)}", "tool": tool_name}
|