Migrated from design-system-swarm with fresh git history.
Old project history preserved in /home/overbits/apps/design-system-swarm
Core components:
- MCP Server (Python FastAPI with mcp 1.23.1)
- Claude Plugin (agents, commands, skills, strategies, hooks, core)
- DSS Backend (dss-mvp1 - token translation, Figma sync)
- Admin UI (Node.js/React)
- Server (Node.js/Express)
- Storybook integration (dss-mvp1/.storybook)
Self-contained configuration:
- All paths relative or use DSS_BASE_PATH=/home/overbits/dss
- PYTHONPATH configured for dss-mvp1 and dss-claude-plugin
- .env file with all configuration
- Claude plugin uses ${CLAUDE_PLUGIN_ROOT} for portability
Migration completed: $(date)
🤖 Clean migration with full functionality preserved
288 lines
8.9 KiB
Python
288 lines
8.9 KiB
Python
"""
|
|
Translation Dictionary Writer
|
|
|
|
Writes and updates translation dictionary files.
|
|
"""
|
|
|
|
import json
|
|
from datetime import datetime, timezone
|
|
from pathlib import Path
|
|
from typing import Any, Dict, List, Optional, Union
|
|
|
|
from .models import TranslationDictionary, TranslationMappings, TranslationSource
|
|
|
|
|
|
class TranslationDictionaryWriter:
|
|
"""
|
|
Writes translation dictionaries to project .dss/translations/ directory.
|
|
|
|
Usage:
|
|
writer = TranslationDictionaryWriter("/path/to/project")
|
|
|
|
# Create new dictionary
|
|
await writer.create(
|
|
source=TranslationSource.CSS,
|
|
project="my-project",
|
|
token_mappings={"--brand-blue": "color.primary.500"}
|
|
)
|
|
|
|
# Add mapping to existing dictionary
|
|
await writer.add_mapping(
|
|
source=TranslationSource.CSS,
|
|
source_token="--brand-green",
|
|
dss_token="color.success.500"
|
|
)
|
|
"""
|
|
|
|
DEFAULT_DIR = ".dss/translations"
|
|
|
|
def __init__(
|
|
self,
|
|
project_path: Union[str, Path],
|
|
translations_dir: Optional[str] = None,
|
|
):
|
|
"""
|
|
Initialize writer.
|
|
|
|
Args:
|
|
project_path: Root path to project
|
|
translations_dir: Custom translations directory
|
|
"""
|
|
self.project_path = Path(project_path).resolve()
|
|
translations_subdir = translations_dir or self.DEFAULT_DIR
|
|
self.translations_dir = self._validate_safe_path(self.project_path / translations_subdir)
|
|
|
|
def _validate_safe_path(self, path: Path) -> Path:
|
|
"""
|
|
Validate that path is within project directory (prevent path traversal).
|
|
|
|
Args:
|
|
path: Path to validate
|
|
|
|
Returns:
|
|
Validated path
|
|
|
|
Raises:
|
|
ValueError: If path is outside project directory
|
|
"""
|
|
resolved = path.resolve()
|
|
try:
|
|
resolved.relative_to(self.project_path)
|
|
return resolved
|
|
except ValueError:
|
|
raise ValueError(f"Path {path} is outside project directory {self.project_path}")
|
|
|
|
async def create(
|
|
self,
|
|
source: Union[str, TranslationSource],
|
|
project: str,
|
|
token_mappings: Optional[Dict[str, str]] = None,
|
|
component_mappings: Optional[Dict[str, str]] = None,
|
|
custom_props: Optional[Dict[str, Any]] = None,
|
|
notes: Optional[List[str]] = None,
|
|
) -> TranslationDictionary:
|
|
"""
|
|
Create a new translation dictionary.
|
|
|
|
Args:
|
|
source: Source type
|
|
project: Project identifier
|
|
token_mappings: Initial token mappings
|
|
component_mappings: Initial component mappings
|
|
custom_props: Initial custom props
|
|
notes: Optional notes
|
|
|
|
Returns:
|
|
Created TranslationDictionary
|
|
"""
|
|
if isinstance(source, str):
|
|
source = TranslationSource(source)
|
|
|
|
# Ensure directory exists
|
|
self.translations_dir.mkdir(parents=True, exist_ok=True)
|
|
|
|
# Create dictionary
|
|
dictionary = TranslationDictionary(
|
|
project=project,
|
|
source=source,
|
|
mappings=TranslationMappings(
|
|
tokens=token_mappings or {},
|
|
components=component_mappings or {},
|
|
),
|
|
custom_props=custom_props or {},
|
|
notes=notes or [],
|
|
)
|
|
|
|
# Write to file
|
|
file_path = self.translations_dir / f"{source.value}.json"
|
|
await self._write_file(file_path, dictionary)
|
|
|
|
return dictionary
|
|
|
|
async def update(
|
|
self,
|
|
source: Union[str, TranslationSource],
|
|
token_mappings: Optional[Dict[str, str]] = None,
|
|
component_mappings: Optional[Dict[str, str]] = None,
|
|
custom_props: Optional[Dict[str, Any]] = None,
|
|
notes: Optional[List[str]] = None,
|
|
) -> TranslationDictionary:
|
|
"""
|
|
Update an existing translation dictionary.
|
|
|
|
Args:
|
|
source: Source type
|
|
token_mappings: Token mappings to add/update
|
|
component_mappings: Component mappings to add/update
|
|
custom_props: Custom props to add/update
|
|
notes: Notes to append
|
|
|
|
Returns:
|
|
Updated TranslationDictionary
|
|
"""
|
|
if isinstance(source, str):
|
|
source = TranslationSource(source)
|
|
|
|
file_path = self.translations_dir / f"{source.value}.json"
|
|
if not file_path.exists():
|
|
raise FileNotFoundError(f"Dictionary not found: {file_path}. Use create() first.")
|
|
|
|
# Load existing
|
|
with open(file_path, "r", encoding="utf-8") as f:
|
|
data = json.load(f)
|
|
|
|
dictionary = TranslationDictionary(**data)
|
|
|
|
# Update mappings
|
|
if token_mappings:
|
|
dictionary.mappings.tokens.update(token_mappings)
|
|
if component_mappings:
|
|
dictionary.mappings.components.update(component_mappings)
|
|
if custom_props:
|
|
dictionary.custom_props.update(custom_props)
|
|
if notes:
|
|
dictionary.notes.extend(notes)
|
|
|
|
dictionary.updated_at = datetime.now(timezone.utc)
|
|
|
|
# Write back
|
|
await self._write_file(file_path, dictionary)
|
|
|
|
return dictionary
|
|
|
|
async def add_mapping(
|
|
self, source: Union[str, TranslationSource], source_token: str, dss_token: str
|
|
) -> None:
|
|
"""
|
|
Add a single token mapping to a dictionary.
|
|
|
|
Args:
|
|
source: Source type
|
|
source_token: Source token name
|
|
dss_token: DSS canonical path
|
|
"""
|
|
await self.update(source=source, token_mappings={source_token: dss_token})
|
|
|
|
async def add_custom_prop(
|
|
self, source: Union[str, TranslationSource], prop_name: str, prop_value: Any
|
|
) -> None:
|
|
"""
|
|
Add a custom prop to a dictionary.
|
|
|
|
Args:
|
|
source: Source type
|
|
prop_name: Property name (must use DSS namespace)
|
|
prop_value: Property value
|
|
"""
|
|
# Validate namespace
|
|
if "." not in prop_name:
|
|
raise ValueError(f"Custom prop must use dot-notation namespace: {prop_name}")
|
|
|
|
await self.update(source=source, custom_props={prop_name: prop_value})
|
|
|
|
async def remove_mapping(self, source: Union[str, TranslationSource], source_token: str) -> None:
|
|
"""
|
|
Remove a token mapping from a dictionary.
|
|
|
|
Args:
|
|
source: Source type
|
|
source_token: Source token to remove
|
|
"""
|
|
if isinstance(source, str):
|
|
source = TranslationSource(source)
|
|
|
|
file_path = self.translations_dir / f"{source.value}.json"
|
|
if not file_path.exists():
|
|
return
|
|
|
|
with open(file_path, "r", encoding="utf-8") as f:
|
|
data = json.load(f)
|
|
|
|
dictionary = TranslationDictionary(**data)
|
|
|
|
if source_token in dictionary.mappings.tokens:
|
|
del dictionary.mappings.tokens[source_token]
|
|
dictionary.updated_at = datetime.now(timezone.utc)
|
|
await self._write_file(file_path, dictionary)
|
|
|
|
async def mark_unmapped(
|
|
self, source: Union[str, TranslationSource], unmapped_tokens: List[str]
|
|
) -> None:
|
|
"""
|
|
Add tokens to unmapped list.
|
|
|
|
Args:
|
|
source: Source type
|
|
unmapped_tokens: List of tokens that couldn't be mapped
|
|
"""
|
|
if isinstance(source, str):
|
|
source = TranslationSource(source)
|
|
|
|
file_path = self.translations_dir / f"{source.value}.json"
|
|
if not file_path.exists():
|
|
return
|
|
|
|
with open(file_path, "r", encoding="utf-8") as f:
|
|
data = json.load(f)
|
|
|
|
dictionary = TranslationDictionary(**data)
|
|
|
|
# Add unique unmapped tokens
|
|
existing = set(dictionary.unmapped)
|
|
for token in unmapped_tokens:
|
|
if token not in existing:
|
|
dictionary.unmapped.append(token)
|
|
|
|
dictionary.updated_at = datetime.now(timezone.utc)
|
|
await self._write_file(file_path, dictionary)
|
|
|
|
async def _write_file(self, file_path: Path, dictionary: TranslationDictionary) -> None:
|
|
"""Write dictionary to JSON file."""
|
|
data = dictionary.model_dump(by_alias=True, mode="json")
|
|
|
|
# Convert datetime to ISO format
|
|
data["created_at"] = dictionary.created_at.isoformat()
|
|
data["updated_at"] = dictionary.updated_at.isoformat()
|
|
|
|
with open(file_path, "w", encoding="utf-8") as f:
|
|
json.dump(data, f, indent=2, ensure_ascii=False)
|
|
|
|
def delete(self, source: Union[str, TranslationSource]) -> bool:
|
|
"""
|
|
Delete a translation dictionary file.
|
|
|
|
Args:
|
|
source: Source type
|
|
|
|
Returns:
|
|
True if deleted, False if not found
|
|
"""
|
|
if isinstance(source, str):
|
|
source = TranslationSource(source)
|
|
|
|
file_path = self.translations_dir / f"{source.value}.json"
|
|
if file_path.exists():
|
|
file_path.unlink()
|
|
return True
|
|
return False
|