Initial commit: Clean DSS implementation
Migrated from design-system-swarm with fresh git history.
Old project history preserved in /home/overbits/apps/design-system-swarm
Core components:
- MCP Server (Python FastAPI with mcp 1.23.1)
- Claude Plugin (agents, commands, skills, strategies, hooks, core)
- DSS Backend (dss-mvp1 - token translation, Figma sync)
- Admin UI (Node.js/React)
- Server (Node.js/Express)
- Storybook integration (dss-mvp1/.storybook)
Self-contained configuration:
- All paths relative or use DSS_BASE_PATH=/home/overbits/dss
- PYTHONPATH configured for dss-mvp1 and dss-claude-plugin
- .env file with all configuration
- Claude plugin uses ${CLAUDE_PLUGIN_ROOT} for portability
Migration completed: $(date)
🤖 Clean migration with full functionality preserved
This commit is contained in:
40
demo/tools/analyze/__init__.py
Normal file
40
demo/tools/analyze/__init__.py
Normal file
@@ -0,0 +1,40 @@
|
||||
"""
|
||||
DSS Code Analysis Module
|
||||
|
||||
Provides tools for analyzing React projects, detecting style patterns,
|
||||
building dependency graphs, and identifying quick-win improvements.
|
||||
"""
|
||||
|
||||
from .base import (
|
||||
ProjectAnalysis,
|
||||
StylePattern,
|
||||
QuickWin,
|
||||
QuickWinType,
|
||||
QuickWinPriority,
|
||||
Location,
|
||||
ComponentInfo,
|
||||
StyleFile,
|
||||
)
|
||||
from .scanner import ProjectScanner
|
||||
from .react import ReactAnalyzer
|
||||
from .styles import StyleAnalyzer
|
||||
from .graph import DependencyGraph
|
||||
from .quick_wins import QuickWinFinder
|
||||
|
||||
__all__ = [
|
||||
# Data classes
|
||||
"ProjectAnalysis",
|
||||
"StylePattern",
|
||||
"QuickWin",
|
||||
"QuickWinType",
|
||||
"QuickWinPriority",
|
||||
"Location",
|
||||
"ComponentInfo",
|
||||
"StyleFile",
|
||||
# Analyzers
|
||||
"ProjectScanner",
|
||||
"ReactAnalyzer",
|
||||
"StyleAnalyzer",
|
||||
"DependencyGraph",
|
||||
"QuickWinFinder",
|
||||
]
|
||||
298
demo/tools/analyze/base.py
Normal file
298
demo/tools/analyze/base.py
Normal file
@@ -0,0 +1,298 @@
|
||||
"""
|
||||
Base classes and data structures for code analysis.
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from typing import List, Dict, Any, Optional, Set
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
class QuickWinType(str, Enum):
|
||||
"""Types of quick-win improvements."""
|
||||
INLINE_STYLE = "inline_style" # Inline styles that can be extracted
|
||||
DUPLICATE_VALUE = "duplicate_value" # Duplicate color/spacing values
|
||||
UNUSED_STYLE = "unused_style" # Unused CSS/SCSS
|
||||
HARDCODED_VALUE = "hardcoded_value" # Hardcoded values that should be tokens
|
||||
NAMING_INCONSISTENCY = "naming" # Inconsistent naming patterns
|
||||
DEPRECATED_PATTERN = "deprecated" # Deprecated styling patterns
|
||||
ACCESSIBILITY = "accessibility" # A11y improvements
|
||||
PERFORMANCE = "performance" # Performance improvements
|
||||
|
||||
|
||||
class QuickWinPriority(str, Enum):
|
||||
"""Priority levels for quick-wins."""
|
||||
CRITICAL = "critical" # Must fix - breaking issues
|
||||
HIGH = "high" # Should fix - significant improvement
|
||||
MEDIUM = "medium" # Nice to fix - moderate improvement
|
||||
LOW = "low" # Optional - minor improvement
|
||||
|
||||
|
||||
class StylingApproach(str, Enum):
|
||||
"""Detected styling approaches in a project."""
|
||||
CSS_MODULES = "css-modules"
|
||||
STYLED_COMPONENTS = "styled-components"
|
||||
EMOTION = "emotion"
|
||||
TAILWIND = "tailwind"
|
||||
INLINE_STYLES = "inline-styles"
|
||||
CSS_IN_JS = "css-in-js"
|
||||
SASS_SCSS = "sass-scss"
|
||||
LESS = "less"
|
||||
VANILLA_CSS = "vanilla-css"
|
||||
CSS_VARIABLES = "css-variables"
|
||||
|
||||
|
||||
class Framework(str, Enum):
|
||||
"""Detected UI frameworks."""
|
||||
REACT = "react"
|
||||
NEXT = "next"
|
||||
VUE = "vue"
|
||||
NUXT = "nuxt"
|
||||
ANGULAR = "angular"
|
||||
SVELTE = "svelte"
|
||||
SOLID = "solid"
|
||||
UNKNOWN = "unknown"
|
||||
|
||||
|
||||
@dataclass
|
||||
class Location:
|
||||
"""Represents a location in source code."""
|
||||
file_path: str
|
||||
line: int
|
||||
column: int = 0
|
||||
end_line: Optional[int] = None
|
||||
end_column: Optional[int] = None
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.file_path}:{self.line}"
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"file": self.file_path,
|
||||
"line": self.line,
|
||||
"column": self.column,
|
||||
"end_line": self.end_line,
|
||||
"end_column": self.end_column,
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class StyleFile:
|
||||
"""Represents a style file in the project."""
|
||||
path: str
|
||||
type: str # css, scss, less, styled, etc.
|
||||
size_bytes: int = 0
|
||||
line_count: int = 0
|
||||
variable_count: int = 0
|
||||
selector_count: int = 0
|
||||
imports: List[str] = field(default_factory=list)
|
||||
imported_by: List[str] = field(default_factory=list)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"path": self.path,
|
||||
"type": self.type,
|
||||
"size_bytes": self.size_bytes,
|
||||
"line_count": self.line_count,
|
||||
"variable_count": self.variable_count,
|
||||
"selector_count": self.selector_count,
|
||||
"imports": self.imports,
|
||||
"imported_by": self.imported_by,
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class ComponentInfo:
|
||||
"""Information about a React component."""
|
||||
name: str
|
||||
path: str
|
||||
type: str = "functional" # functional, class, forwardRef, memo
|
||||
props: List[str] = field(default_factory=list)
|
||||
has_styles: bool = False
|
||||
style_files: List[str] = field(default_factory=list)
|
||||
inline_style_count: int = 0
|
||||
imports: List[str] = field(default_factory=list)
|
||||
exports: List[str] = field(default_factory=list)
|
||||
children: List[str] = field(default_factory=list) # Child components used
|
||||
line_count: int = 0
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"name": self.name,
|
||||
"path": self.path,
|
||||
"type": self.type,
|
||||
"props": self.props,
|
||||
"has_styles": self.has_styles,
|
||||
"style_files": self.style_files,
|
||||
"inline_style_count": self.inline_style_count,
|
||||
"imports": self.imports,
|
||||
"exports": self.exports,
|
||||
"children": self.children,
|
||||
"line_count": self.line_count,
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class StylePattern:
|
||||
"""A detected style pattern in code."""
|
||||
type: StylingApproach
|
||||
locations: List[Location] = field(default_factory=list)
|
||||
count: int = 0
|
||||
examples: List[str] = field(default_factory=list)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"type": self.type.value,
|
||||
"count": self.count,
|
||||
"locations": [loc.to_dict() for loc in self.locations[:10]],
|
||||
"examples": self.examples[:5],
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class TokenCandidate:
|
||||
"""A value that could be extracted as a design token."""
|
||||
value: str # The actual value (e.g., "#3B82F6")
|
||||
suggested_name: str # Suggested token name
|
||||
category: str # colors, spacing, typography, etc.
|
||||
occurrences: int = 1 # How many times it appears
|
||||
locations: List[Location] = field(default_factory=list)
|
||||
confidence: float = 0.0 # 0-1 confidence score
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"value": self.value,
|
||||
"suggested_name": self.suggested_name,
|
||||
"category": self.category,
|
||||
"occurrences": self.occurrences,
|
||||
"locations": [loc.to_dict() for loc in self.locations[:5]],
|
||||
"confidence": self.confidence,
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class QuickWin:
|
||||
"""A quick improvement opportunity."""
|
||||
type: QuickWinType
|
||||
priority: QuickWinPriority
|
||||
title: str
|
||||
description: str
|
||||
location: Optional[Location] = None
|
||||
affected_files: List[str] = field(default_factory=list)
|
||||
estimated_impact: str = "" # e.g., "Remove 50 lines of duplicate code"
|
||||
fix_suggestion: str = "" # Suggested fix
|
||||
auto_fixable: bool = False # Can be auto-fixed
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"type": self.type.value,
|
||||
"priority": self.priority.value,
|
||||
"title": self.title,
|
||||
"description": self.description,
|
||||
"location": self.location.to_dict() if self.location else None,
|
||||
"affected_files": self.affected_files,
|
||||
"estimated_impact": self.estimated_impact,
|
||||
"fix_suggestion": self.fix_suggestion,
|
||||
"auto_fixable": self.auto_fixable,
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class ProjectAnalysis:
|
||||
"""Complete analysis result for a project."""
|
||||
# Basic info
|
||||
project_path: str
|
||||
analyzed_at: datetime = field(default_factory=datetime.now)
|
||||
|
||||
# Framework detection
|
||||
framework: Framework = Framework.UNKNOWN
|
||||
framework_version: str = ""
|
||||
|
||||
# Styling detection
|
||||
styling_approaches: List[StylePattern] = field(default_factory=list)
|
||||
primary_styling: Optional[StylingApproach] = None
|
||||
|
||||
# Components
|
||||
components: List[ComponentInfo] = field(default_factory=list)
|
||||
component_count: int = 0
|
||||
|
||||
# Style files
|
||||
style_files: List[StyleFile] = field(default_factory=list)
|
||||
style_file_count: int = 0
|
||||
|
||||
# Issues and opportunities
|
||||
inline_style_locations: List[Location] = field(default_factory=list)
|
||||
token_candidates: List[TokenCandidate] = field(default_factory=list)
|
||||
quick_wins: List[QuickWin] = field(default_factory=list)
|
||||
|
||||
# Dependency graph
|
||||
dependency_graph: Dict[str, List[str]] = field(default_factory=dict)
|
||||
|
||||
# Statistics
|
||||
stats: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
def __post_init__(self):
|
||||
if not self.stats:
|
||||
self.stats = {
|
||||
"total_files_scanned": 0,
|
||||
"total_lines": 0,
|
||||
"component_count": 0,
|
||||
"style_file_count": 0,
|
||||
"inline_style_count": 0,
|
||||
"token_candidates": 0,
|
||||
"quick_wins_count": 0,
|
||||
}
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"project_path": self.project_path,
|
||||
"analyzed_at": self.analyzed_at.isoformat(),
|
||||
"framework": self.framework.value,
|
||||
"framework_version": self.framework_version,
|
||||
"styling_approaches": [sp.to_dict() for sp in self.styling_approaches],
|
||||
"primary_styling": self.primary_styling.value if self.primary_styling else None,
|
||||
"component_count": self.component_count,
|
||||
"style_file_count": self.style_file_count,
|
||||
"inline_style_count": len(self.inline_style_locations),
|
||||
"token_candidates_count": len(self.token_candidates),
|
||||
"quick_wins_count": len(self.quick_wins),
|
||||
"stats": self.stats,
|
||||
}
|
||||
|
||||
def summary(self) -> str:
|
||||
"""Generate human-readable summary."""
|
||||
lines = [
|
||||
f"Project Analysis: {self.project_path}",
|
||||
"=" * 50,
|
||||
f"Framework: {self.framework.value} {self.framework_version}",
|
||||
f"Components: {self.component_count}",
|
||||
f"Style files: {self.style_file_count}",
|
||||
"",
|
||||
"Styling Approaches:",
|
||||
]
|
||||
|
||||
for sp in self.styling_approaches:
|
||||
lines.append(f" • {sp.type.value}: {sp.count} occurrences")
|
||||
|
||||
lines.extend([
|
||||
"",
|
||||
f"Inline styles found: {len(self.inline_style_locations)}",
|
||||
f"Token candidates: {len(self.token_candidates)}",
|
||||
f"Quick wins: {len(self.quick_wins)}",
|
||||
"",
|
||||
"Quick Wins by Priority:",
|
||||
])
|
||||
|
||||
by_priority = {}
|
||||
for qw in self.quick_wins:
|
||||
if qw.priority not in by_priority:
|
||||
by_priority[qw.priority] = []
|
||||
by_priority[qw.priority].append(qw)
|
||||
|
||||
for priority in [QuickWinPriority.CRITICAL, QuickWinPriority.HIGH,
|
||||
QuickWinPriority.MEDIUM, QuickWinPriority.LOW]:
|
||||
if priority in by_priority:
|
||||
lines.append(f" [{priority.value.upper()}] {len(by_priority[priority])} items")
|
||||
|
||||
return "\n".join(lines)
|
||||
419
demo/tools/analyze/graph.py
Normal file
419
demo/tools/analyze/graph.py
Normal file
@@ -0,0 +1,419 @@
|
||||
"""
|
||||
Dependency Graph Builder
|
||||
|
||||
Builds component and style dependency graphs for visualization
|
||||
and analysis of project structure.
|
||||
"""
|
||||
|
||||
import re
|
||||
import json
|
||||
from pathlib import Path
|
||||
from typing import List, Dict, Any, Optional, Set, Tuple
|
||||
from dataclasses import dataclass, field
|
||||
from collections import defaultdict
|
||||
|
||||
|
||||
@dataclass
|
||||
class GraphNode:
|
||||
"""A node in the dependency graph."""
|
||||
id: str
|
||||
name: str
|
||||
type: str # 'component', 'style', 'util', 'hook'
|
||||
path: str
|
||||
size: int = 0 # file size or importance metric
|
||||
children: List[str] = field(default_factory=list)
|
||||
parents: List[str] = field(default_factory=list)
|
||||
metadata: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
'id': self.id,
|
||||
'name': self.name,
|
||||
'type': self.type,
|
||||
'path': self.path,
|
||||
'size': self.size,
|
||||
'children': self.children,
|
||||
'parents': self.parents,
|
||||
'metadata': self.metadata,
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class GraphEdge:
|
||||
"""An edge in the dependency graph."""
|
||||
source: str
|
||||
target: str
|
||||
type: str # 'import', 'uses', 'styles'
|
||||
weight: int = 1
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
'source': self.source,
|
||||
'target': self.target,
|
||||
'type': self.type,
|
||||
'weight': self.weight,
|
||||
}
|
||||
|
||||
|
||||
class DependencyGraph:
|
||||
"""
|
||||
Builds and analyzes dependency graphs for a project.
|
||||
|
||||
Tracks:
|
||||
- Component imports/exports
|
||||
- Style file dependencies
|
||||
- Component usage relationships
|
||||
"""
|
||||
|
||||
def __init__(self, root_path: str):
|
||||
self.root = Path(root_path).resolve()
|
||||
self.nodes: Dict[str, GraphNode] = {}
|
||||
self.edges: List[GraphEdge] = []
|
||||
|
||||
async def build(self, depth: int = 3) -> Dict[str, Any]:
|
||||
"""
|
||||
Build the full dependency graph.
|
||||
|
||||
Args:
|
||||
depth: Maximum depth for traversing dependencies
|
||||
|
||||
Returns:
|
||||
Graph representation with nodes and edges
|
||||
"""
|
||||
# Clear existing graph
|
||||
self.nodes.clear()
|
||||
self.edges.clear()
|
||||
|
||||
# Find all relevant files
|
||||
await self._scan_files()
|
||||
|
||||
# Build edges from imports
|
||||
await self._build_import_edges()
|
||||
|
||||
# Build edges from component usage
|
||||
await self._build_usage_edges()
|
||||
|
||||
return self.to_dict()
|
||||
|
||||
async def _scan_files(self) -> None:
|
||||
"""Scan project files and create nodes."""
|
||||
skip_dirs = {'node_modules', '.git', 'dist', 'build', '.next'}
|
||||
|
||||
# Component files
|
||||
for ext in ['*.jsx', '*.tsx']:
|
||||
for file_path in self.root.rglob(ext):
|
||||
if any(skip in file_path.parts for skip in skip_dirs):
|
||||
continue
|
||||
|
||||
rel_path = str(file_path.relative_to(self.root))
|
||||
node_id = self._path_to_id(rel_path)
|
||||
|
||||
self.nodes[node_id] = GraphNode(
|
||||
id=node_id,
|
||||
name=file_path.stem,
|
||||
type='component',
|
||||
path=rel_path,
|
||||
size=file_path.stat().st_size,
|
||||
)
|
||||
|
||||
# Style files
|
||||
for ext in ['*.css', '*.scss', '*.sass', '*.less']:
|
||||
for file_path in self.root.rglob(ext):
|
||||
if any(skip in file_path.parts for skip in skip_dirs):
|
||||
continue
|
||||
|
||||
rel_path = str(file_path.relative_to(self.root))
|
||||
node_id = self._path_to_id(rel_path)
|
||||
|
||||
self.nodes[node_id] = GraphNode(
|
||||
id=node_id,
|
||||
name=file_path.stem,
|
||||
type='style',
|
||||
path=rel_path,
|
||||
size=file_path.stat().st_size,
|
||||
)
|
||||
|
||||
# Utility/Hook files
|
||||
for ext in ['*.js', '*.ts']:
|
||||
for file_path in self.root.rglob(ext):
|
||||
if any(skip in file_path.parts for skip in skip_dirs):
|
||||
continue
|
||||
|
||||
name = file_path.stem.lower()
|
||||
rel_path = str(file_path.relative_to(self.root))
|
||||
node_id = self._path_to_id(rel_path)
|
||||
|
||||
# Classify file type
|
||||
if 'hook' in name or name.startswith('use'):
|
||||
node_type = 'hook'
|
||||
elif any(x in name for x in ['util', 'helper', 'lib']):
|
||||
node_type = 'util'
|
||||
else:
|
||||
continue # Skip other JS/TS files
|
||||
|
||||
self.nodes[node_id] = GraphNode(
|
||||
id=node_id,
|
||||
name=file_path.stem,
|
||||
type=node_type,
|
||||
path=rel_path,
|
||||
size=file_path.stat().st_size,
|
||||
)
|
||||
|
||||
async def _build_import_edges(self) -> None:
|
||||
"""Build edges from import statements."""
|
||||
import_pattern = re.compile(
|
||||
r'import\s+(?:\{[^}]+\}|\*\s+as\s+\w+|\w+)?\s*(?:,\s*\{[^}]+\})?\s*from\s+["\']([^"\']+)["\']',
|
||||
re.MULTILINE
|
||||
)
|
||||
|
||||
for node_id, node in self.nodes.items():
|
||||
if node.type not in ['component', 'hook', 'util']:
|
||||
continue
|
||||
|
||||
file_path = self.root / node.path
|
||||
if not file_path.exists():
|
||||
continue
|
||||
|
||||
try:
|
||||
content = file_path.read_text(encoding='utf-8', errors='ignore')
|
||||
|
||||
for match in import_pattern.finditer(content):
|
||||
import_path = match.group(1)
|
||||
|
||||
# Resolve relative imports
|
||||
target_id = self._resolve_import(node.path, import_path)
|
||||
|
||||
if target_id and target_id in self.nodes:
|
||||
# Add edge
|
||||
self.edges.append(GraphEdge(
|
||||
source=node_id,
|
||||
target=target_id,
|
||||
type='import',
|
||||
))
|
||||
|
||||
# Update parent/child relationships
|
||||
node.children.append(target_id)
|
||||
self.nodes[target_id].parents.append(node_id)
|
||||
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
async def _build_usage_edges(self) -> None:
|
||||
"""Build edges from component usage in JSX."""
|
||||
# Pattern to find JSX component usage
|
||||
jsx_pattern = re.compile(r'<([A-Z][A-Za-z0-9]*)')
|
||||
|
||||
# Build name -> id mapping for components
|
||||
name_to_id = {}
|
||||
for node_id, node in self.nodes.items():
|
||||
if node.type == 'component':
|
||||
name_to_id[node.name] = node_id
|
||||
|
||||
for node_id, node in self.nodes.items():
|
||||
if node.type != 'component':
|
||||
continue
|
||||
|
||||
file_path = self.root / node.path
|
||||
if not file_path.exists():
|
||||
continue
|
||||
|
||||
try:
|
||||
content = file_path.read_text(encoding='utf-8', errors='ignore')
|
||||
|
||||
used_components = set()
|
||||
for match in jsx_pattern.finditer(content):
|
||||
comp_name = match.group(1)
|
||||
if comp_name in name_to_id and name_to_id[comp_name] != node_id:
|
||||
used_components.add(name_to_id[comp_name])
|
||||
|
||||
for target_id in used_components:
|
||||
self.edges.append(GraphEdge(
|
||||
source=node_id,
|
||||
target=target_id,
|
||||
type='uses',
|
||||
))
|
||||
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
def _path_to_id(self, path: str) -> str:
|
||||
"""Convert file path to node ID."""
|
||||
# Remove extension and normalize
|
||||
path = re.sub(r'\.(jsx?|tsx?|css|scss|sass|less)$', '', path)
|
||||
return path.replace('/', '_').replace('\\', '_').replace('.', '_')
|
||||
|
||||
def _resolve_import(self, source_path: str, import_path: str) -> Optional[str]:
|
||||
"""Resolve import path to node ID."""
|
||||
if not import_path.startswith('.'):
|
||||
return None # Skip node_modules imports
|
||||
|
||||
source_dir = Path(source_path).parent
|
||||
|
||||
# Handle various import patterns
|
||||
if import_path.startswith('./'):
|
||||
resolved = source_dir / import_path[2:]
|
||||
elif import_path.startswith('../'):
|
||||
resolved = source_dir / import_path
|
||||
else:
|
||||
resolved = source_dir / import_path
|
||||
|
||||
# Try to resolve with extensions
|
||||
extensions = ['.tsx', '.ts', '.jsx', '.js', '.css', '.scss', '/index.tsx', '/index.ts', '/index.jsx', '/index.js']
|
||||
|
||||
resolved_str = str(resolved)
|
||||
for ext in extensions:
|
||||
test_id = self._path_to_id(resolved_str + ext)
|
||||
if test_id in self.nodes:
|
||||
return test_id
|
||||
|
||||
# Try without additional extension (if path already has one)
|
||||
test_id = self._path_to_id(resolved_str)
|
||||
if test_id in self.nodes:
|
||||
return test_id
|
||||
|
||||
return None
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert graph to dictionary for serialization."""
|
||||
return {
|
||||
'nodes': [node.to_dict() for node in self.nodes.values()],
|
||||
'edges': [edge.to_dict() for edge in self.edges],
|
||||
'stats': {
|
||||
'total_nodes': len(self.nodes),
|
||||
'total_edges': len(self.edges),
|
||||
'components': len([n for n in self.nodes.values() if n.type == 'component']),
|
||||
'styles': len([n for n in self.nodes.values() if n.type == 'style']),
|
||||
'hooks': len([n for n in self.nodes.values() if n.type == 'hook']),
|
||||
'utils': len([n for n in self.nodes.values() if n.type == 'util']),
|
||||
}
|
||||
}
|
||||
|
||||
def to_json(self, pretty: bool = True) -> str:
|
||||
"""Convert graph to JSON string."""
|
||||
return json.dumps(self.to_dict(), indent=2 if pretty else None)
|
||||
|
||||
def get_component_tree(self) -> Dict[str, List[str]]:
|
||||
"""Get simplified component dependency tree."""
|
||||
tree = {}
|
||||
for node_id, node in self.nodes.items():
|
||||
if node.type == 'component':
|
||||
tree[node.name] = [
|
||||
self.nodes[child_id].name
|
||||
for child_id in node.children
|
||||
if child_id in self.nodes and self.nodes[child_id].type == 'component'
|
||||
]
|
||||
return tree
|
||||
|
||||
def find_orphans(self) -> List[str]:
|
||||
"""Find components with no parents (not imported anywhere)."""
|
||||
orphans = []
|
||||
for node_id, node in self.nodes.items():
|
||||
if node.type == 'component' and not node.parents:
|
||||
# Exclude entry points (index, App, etc.)
|
||||
if node.name.lower() not in ['app', 'index', 'main', 'root']:
|
||||
orphans.append(node.path)
|
||||
return orphans
|
||||
|
||||
def find_hubs(self, min_connections: int = 5) -> List[Dict[str, Any]]:
|
||||
"""Find highly connected nodes (potential refactoring targets)."""
|
||||
hubs = []
|
||||
for node_id, node in self.nodes.items():
|
||||
connections = len(node.children) + len(node.parents)
|
||||
if connections >= min_connections:
|
||||
hubs.append({
|
||||
'name': node.name,
|
||||
'path': node.path,
|
||||
'type': node.type,
|
||||
'imports': len(node.children),
|
||||
'imported_by': len(node.parents),
|
||||
'total_connections': connections,
|
||||
})
|
||||
|
||||
hubs.sort(key=lambda x: x['total_connections'], reverse=True)
|
||||
return hubs
|
||||
|
||||
def find_circular_dependencies(self) -> List[List[str]]:
|
||||
"""Find circular dependency chains."""
|
||||
cycles = []
|
||||
visited = set()
|
||||
rec_stack = set()
|
||||
|
||||
def dfs(node_id: str, path: List[str]) -> None:
|
||||
visited.add(node_id)
|
||||
rec_stack.add(node_id)
|
||||
path.append(node_id)
|
||||
|
||||
for child_id in self.nodes.get(node_id, GraphNode('', '', '', '')).children:
|
||||
if child_id not in visited:
|
||||
dfs(child_id, path.copy())
|
||||
elif child_id in rec_stack:
|
||||
# Found cycle
|
||||
cycle_start = path.index(child_id)
|
||||
cycle = path[cycle_start:] + [child_id]
|
||||
cycles.append([self.nodes[n].name for n in cycle])
|
||||
|
||||
rec_stack.remove(node_id)
|
||||
|
||||
for node_id in self.nodes:
|
||||
if node_id not in visited:
|
||||
dfs(node_id, [])
|
||||
|
||||
return cycles
|
||||
|
||||
def get_subgraph(self, node_id: str, depth: int = 2) -> Dict[str, Any]:
|
||||
"""Get subgraph centered on a specific node."""
|
||||
if node_id not in self.nodes:
|
||||
return {'nodes': [], 'edges': []}
|
||||
|
||||
# BFS to find nodes within depth
|
||||
included_nodes = {node_id}
|
||||
frontier = {node_id}
|
||||
|
||||
for _ in range(depth):
|
||||
new_frontier = set()
|
||||
for nid in frontier:
|
||||
node = self.nodes.get(nid)
|
||||
if node:
|
||||
new_frontier.update(node.children)
|
||||
new_frontier.update(node.parents)
|
||||
included_nodes.update(new_frontier)
|
||||
frontier = new_frontier
|
||||
|
||||
# Filter nodes and edges
|
||||
subgraph_nodes = [
|
||||
self.nodes[nid].to_dict()
|
||||
for nid in included_nodes
|
||||
if nid in self.nodes
|
||||
]
|
||||
|
||||
subgraph_edges = [
|
||||
edge.to_dict()
|
||||
for edge in self.edges
|
||||
if edge.source in included_nodes and edge.target in included_nodes
|
||||
]
|
||||
|
||||
return {
|
||||
'nodes': subgraph_nodes,
|
||||
'edges': subgraph_edges,
|
||||
'center': node_id,
|
||||
'depth': depth,
|
||||
}
|
||||
|
||||
def get_style_dependencies(self) -> Dict[str, List[str]]:
|
||||
"""Get mapping of components to their style dependencies."""
|
||||
style_deps = {}
|
||||
|
||||
for node_id, node in self.nodes.items():
|
||||
if node.type != 'component':
|
||||
continue
|
||||
|
||||
style_children = [
|
||||
self.nodes[child_id].path
|
||||
for child_id in node.children
|
||||
if child_id in self.nodes and self.nodes[child_id].type == 'style'
|
||||
]
|
||||
|
||||
if style_children:
|
||||
style_deps[node.path] = style_children
|
||||
|
||||
return style_deps
|
||||
418
demo/tools/analyze/quick_wins.py
Normal file
418
demo/tools/analyze/quick_wins.py
Normal file
@@ -0,0 +1,418 @@
|
||||
"""
|
||||
Quick-Win Finder
|
||||
|
||||
Identifies easy improvement opportunities in a codebase:
|
||||
- Inline styles that can be extracted
|
||||
- Duplicate values that should be tokens
|
||||
- Unused styles
|
||||
- Naming inconsistencies
|
||||
- Accessibility issues
|
||||
"""
|
||||
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import List, Dict, Any, Optional
|
||||
from dataclasses import dataclass
|
||||
|
||||
from .base import (
|
||||
QuickWin,
|
||||
QuickWinType,
|
||||
QuickWinPriority,
|
||||
Location,
|
||||
ProjectAnalysis,
|
||||
)
|
||||
from .styles import StyleAnalyzer
|
||||
from .react import ReactAnalyzer
|
||||
|
||||
|
||||
class QuickWinFinder:
|
||||
"""
|
||||
Finds quick improvement opportunities in a project.
|
||||
|
||||
Categories:
|
||||
- INLINE_STYLE: Inline styles that can be extracted to CSS/tokens
|
||||
- DUPLICATE_VALUE: Repeated values that should be tokens
|
||||
- UNUSED_STYLE: CSS that's defined but not used
|
||||
- HARDCODED_VALUE: Magic numbers/colors that should be tokens
|
||||
- NAMING_INCONSISTENCY: Inconsistent naming patterns
|
||||
- DEPRECATED_PATTERN: Outdated styling approaches
|
||||
- ACCESSIBILITY: A11y improvements
|
||||
"""
|
||||
|
||||
def __init__(self, root_path: str):
|
||||
self.root = Path(root_path).resolve()
|
||||
self.style_analyzer = StyleAnalyzer(root_path)
|
||||
self.react_analyzer = ReactAnalyzer(root_path)
|
||||
|
||||
async def find_all(self) -> List[QuickWin]:
|
||||
"""
|
||||
Find all quick-win opportunities.
|
||||
|
||||
Returns:
|
||||
List of QuickWin objects sorted by priority
|
||||
"""
|
||||
quick_wins = []
|
||||
|
||||
# Find inline styles
|
||||
inline_wins = await self._find_inline_style_wins()
|
||||
quick_wins.extend(inline_wins)
|
||||
|
||||
# Find duplicate values
|
||||
duplicate_wins = await self._find_duplicate_value_wins()
|
||||
quick_wins.extend(duplicate_wins)
|
||||
|
||||
# Find unused styles
|
||||
unused_wins = await self._find_unused_style_wins()
|
||||
quick_wins.extend(unused_wins)
|
||||
|
||||
# Find hardcoded values
|
||||
hardcoded_wins = await self._find_hardcoded_value_wins()
|
||||
quick_wins.extend(hardcoded_wins)
|
||||
|
||||
# Find naming inconsistencies
|
||||
naming_wins = await self._find_naming_inconsistency_wins()
|
||||
quick_wins.extend(naming_wins)
|
||||
|
||||
# Find accessibility issues
|
||||
a11y_wins = await self._find_accessibility_wins()
|
||||
quick_wins.extend(a11y_wins)
|
||||
|
||||
# Sort by priority
|
||||
priority_order = {
|
||||
QuickWinPriority.CRITICAL: 0,
|
||||
QuickWinPriority.HIGH: 1,
|
||||
QuickWinPriority.MEDIUM: 2,
|
||||
QuickWinPriority.LOW: 3,
|
||||
}
|
||||
quick_wins.sort(key=lambda x: priority_order[x.priority])
|
||||
|
||||
return quick_wins
|
||||
|
||||
async def _find_inline_style_wins(self) -> List[QuickWin]:
|
||||
"""Find inline styles that should be extracted."""
|
||||
wins = []
|
||||
|
||||
inline_styles = await self.react_analyzer.find_inline_styles()
|
||||
|
||||
if not inline_styles:
|
||||
return wins
|
||||
|
||||
# Group by file
|
||||
by_file = {}
|
||||
for style in inline_styles:
|
||||
file_path = style['file']
|
||||
if file_path not in by_file:
|
||||
by_file[file_path] = []
|
||||
by_file[file_path].append(style)
|
||||
|
||||
# Create quick-wins for files with multiple inline styles
|
||||
for file_path, styles in by_file.items():
|
||||
if len(styles) >= 3: # Only flag if 3+ inline styles
|
||||
wins.append(QuickWin(
|
||||
type=QuickWinType.INLINE_STYLE,
|
||||
priority=QuickWinPriority.HIGH,
|
||||
title=f"Extract {len(styles)} inline styles",
|
||||
description=f"File {file_path} has {len(styles)} inline style declarations that could be extracted to CSS classes or design tokens.",
|
||||
location=Location(file_path, styles[0]['line']),
|
||||
affected_files=[file_path],
|
||||
estimated_impact=f"Reduce inline styles, improve maintainability",
|
||||
fix_suggestion="Extract repeated style properties to CSS classes or design tokens. Use className instead of style prop.",
|
||||
auto_fixable=True,
|
||||
))
|
||||
|
||||
# Create summary if many files have inline styles
|
||||
total_inline = len(inline_styles)
|
||||
if total_inline >= 10:
|
||||
wins.insert(0, QuickWin(
|
||||
type=QuickWinType.INLINE_STYLE,
|
||||
priority=QuickWinPriority.HIGH,
|
||||
title=f"Project has {total_inline} inline styles",
|
||||
description=f"Found {total_inline} inline style declarations across {len(by_file)} files. Consider migrating to CSS classes or design tokens.",
|
||||
affected_files=list(by_file.keys())[:10],
|
||||
estimated_impact=f"Improve code maintainability and bundle size",
|
||||
fix_suggestion="Run 'dss migrate inline-styles' to preview migration options.",
|
||||
auto_fixable=True,
|
||||
))
|
||||
|
||||
return wins
|
||||
|
||||
async def _find_duplicate_value_wins(self) -> List[QuickWin]:
|
||||
"""Find duplicate values that should be tokens."""
|
||||
wins = []
|
||||
|
||||
analysis = await self.style_analyzer.analyze()
|
||||
duplicates = analysis.get('duplicates', [])
|
||||
|
||||
# Find high-occurrence duplicates
|
||||
for dup in duplicates[:10]: # Top 10 duplicates
|
||||
if dup['count'] >= 5: # Only if used 5+ times
|
||||
priority = QuickWinPriority.HIGH if dup['count'] >= 10 else QuickWinPriority.MEDIUM
|
||||
|
||||
wins.append(QuickWin(
|
||||
type=QuickWinType.DUPLICATE_VALUE,
|
||||
priority=priority,
|
||||
title=f"Duplicate value '{dup['value']}' used {dup['count']} times",
|
||||
description=f"The value '{dup['value']}' appears {dup['count']} times across {len(dup['files'])} files. This should be a design token.",
|
||||
affected_files=dup['files'],
|
||||
estimated_impact=f"Create single source of truth, easier theme updates",
|
||||
fix_suggestion=f"Create token for this value and replace all occurrences.",
|
||||
auto_fixable=True,
|
||||
))
|
||||
|
||||
return wins
|
||||
|
||||
async def _find_unused_style_wins(self) -> List[QuickWin]:
|
||||
"""Find unused CSS styles."""
|
||||
wins = []
|
||||
|
||||
unused = await self.style_analyzer.find_unused_styles()
|
||||
|
||||
if len(unused) >= 5:
|
||||
wins.append(QuickWin(
|
||||
type=QuickWinType.UNUSED_STYLE,
|
||||
priority=QuickWinPriority.MEDIUM,
|
||||
title=f"Found {len(unused)} potentially unused CSS classes",
|
||||
description=f"These CSS classes are defined but don't appear to be used in the codebase. Review and remove if confirmed unused.",
|
||||
affected_files=list(set(u['file'] for u in unused))[:10],
|
||||
estimated_impact=f"Reduce CSS bundle size by removing dead code",
|
||||
fix_suggestion="Review each class and remove if unused. Some may be dynamically generated.",
|
||||
auto_fixable=False, # Needs human review
|
||||
))
|
||||
|
||||
return wins
|
||||
|
||||
async def _find_hardcoded_value_wins(self) -> List[QuickWin]:
|
||||
"""Find hardcoded magic values."""
|
||||
wins = []
|
||||
|
||||
analysis = await self.style_analyzer.analyze()
|
||||
candidates = analysis.get('token_candidates', [])
|
||||
|
||||
# Find high-confidence candidates
|
||||
high_confidence = [c for c in candidates if c.confidence >= 0.7]
|
||||
|
||||
if high_confidence:
|
||||
wins.append(QuickWin(
|
||||
type=QuickWinType.HARDCODED_VALUE,
|
||||
priority=QuickWinPriority.MEDIUM,
|
||||
title=f"Found {len(high_confidence)} values that should be tokens",
|
||||
description="These hardcoded values appear multiple times and should be extracted as design tokens for consistency.",
|
||||
estimated_impact="Improve theme consistency and make updates easier",
|
||||
fix_suggestion="Use 'dss extract-tokens' to create tokens from these values.",
|
||||
auto_fixable=True,
|
||||
))
|
||||
|
||||
# Add specific wins for top candidates
|
||||
for candidate in high_confidence[:5]:
|
||||
wins.append(QuickWin(
|
||||
type=QuickWinType.HARDCODED_VALUE,
|
||||
priority=QuickWinPriority.LOW,
|
||||
title=f"Extract '{candidate.value}' as token",
|
||||
description=f"Value '{candidate.value}' appears {candidate.occurrences} times. Suggested token: {candidate.suggested_name}",
|
||||
location=candidate.locations[0] if candidate.locations else None,
|
||||
affected_files=[loc.file_path for loc in candidate.locations[:5]],
|
||||
estimated_impact=f"Single source of truth for this value",
|
||||
fix_suggestion=f"Create token '{candidate.suggested_name}' with value '{candidate.value}'",
|
||||
auto_fixable=True,
|
||||
))
|
||||
|
||||
return wins
|
||||
|
||||
async def _find_naming_inconsistency_wins(self) -> List[QuickWin]:
|
||||
"""Find naming inconsistencies."""
|
||||
wins = []
|
||||
|
||||
naming = await self.style_analyzer.analyze_naming_consistency()
|
||||
|
||||
if naming.get('inconsistencies'):
|
||||
primary = naming.get('primary_pattern', 'unknown')
|
||||
inconsistent_count = len(naming['inconsistencies'])
|
||||
|
||||
wins.append(QuickWin(
|
||||
type=QuickWinType.NAMING_INCONSISTENCY,
|
||||
priority=QuickWinPriority.LOW,
|
||||
title=f"Found {inconsistent_count} naming inconsistencies",
|
||||
description=f"The project primarily uses {primary} naming, but {inconsistent_count} classes use different conventions.",
|
||||
affected_files=list(set(i['file'] for i in naming['inconsistencies']))[:10],
|
||||
estimated_impact="Improve code consistency and readability",
|
||||
fix_suggestion=f"Standardize all class names to use {primary} convention.",
|
||||
auto_fixable=True,
|
||||
))
|
||||
|
||||
return wins
|
||||
|
||||
async def _find_accessibility_wins(self) -> List[QuickWin]:
|
||||
"""Find accessibility issues."""
|
||||
wins = []
|
||||
skip_dirs = {'node_modules', '.git', 'dist', 'build'}
|
||||
|
||||
a11y_issues = []
|
||||
|
||||
for ext in ['*.jsx', '*.tsx']:
|
||||
for file_path in self.root.rglob(ext):
|
||||
if any(skip in file_path.parts for skip in skip_dirs):
|
||||
continue
|
||||
|
||||
try:
|
||||
content = file_path.read_text(encoding='utf-8', errors='ignore')
|
||||
rel_path = str(file_path.relative_to(self.root))
|
||||
|
||||
# Check for images without alt
|
||||
img_no_alt = re.findall(r'<img[^>]+(?<!alt=")[^>]*>', content)
|
||||
if img_no_alt:
|
||||
for match in img_no_alt[:3]:
|
||||
if 'alt=' not in match:
|
||||
line = content[:content.find(match)].count('\n') + 1
|
||||
a11y_issues.append({
|
||||
'type': 'img-no-alt',
|
||||
'file': rel_path,
|
||||
'line': line,
|
||||
})
|
||||
|
||||
# Check for buttons without accessible text
|
||||
icon_only_buttons = re.findall(
|
||||
r'<button[^>]*>\s*<(?:svg|Icon|img)[^>]*/?>\s*</button>',
|
||||
content,
|
||||
re.IGNORECASE
|
||||
)
|
||||
if icon_only_buttons:
|
||||
a11y_issues.append({
|
||||
'type': 'icon-button-no-label',
|
||||
'file': rel_path,
|
||||
})
|
||||
|
||||
# Check for click handlers on non-interactive elements
|
||||
div_onclick = re.findall(r'<div[^>]+onClick', content)
|
||||
if div_onclick:
|
||||
a11y_issues.append({
|
||||
'type': 'div-click-handler',
|
||||
'file': rel_path,
|
||||
'count': len(div_onclick),
|
||||
})
|
||||
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
# Group issues by type
|
||||
if a11y_issues:
|
||||
img_issues = [i for i in a11y_issues if i['type'] == 'img-no-alt']
|
||||
if img_issues:
|
||||
wins.append(QuickWin(
|
||||
type=QuickWinType.ACCESSIBILITY,
|
||||
priority=QuickWinPriority.HIGH,
|
||||
title=f"Found {len(img_issues)} images without alt text",
|
||||
description="Images should have alt attributes for screen readers. Empty alt='' is acceptable for decorative images.",
|
||||
affected_files=list(set(i['file'] for i in img_issues))[:10],
|
||||
estimated_impact="Improve accessibility for screen reader users",
|
||||
fix_suggestion="Add descriptive alt text to images or alt='' for decorative images.",
|
||||
auto_fixable=False,
|
||||
))
|
||||
|
||||
div_issues = [i for i in a11y_issues if i['type'] == 'div-click-handler']
|
||||
if div_issues:
|
||||
wins.append(QuickWin(
|
||||
type=QuickWinType.ACCESSIBILITY,
|
||||
priority=QuickWinPriority.MEDIUM,
|
||||
title=f"Found click handlers on div elements",
|
||||
description="Using onClick on div elements makes them inaccessible to keyboard users. Use button or add proper ARIA attributes.",
|
||||
affected_files=list(set(i['file'] for i in div_issues))[:10],
|
||||
estimated_impact="Improve keyboard navigation accessibility",
|
||||
fix_suggestion="Replace <div onClick> with <button> or add role='button' and tabIndex={0}.",
|
||||
auto_fixable=True,
|
||||
))
|
||||
|
||||
return wins
|
||||
|
||||
async def get_summary(self) -> Dict[str, Any]:
|
||||
"""Get summary of all quick-wins."""
|
||||
wins = await self.find_all()
|
||||
|
||||
by_type = {}
|
||||
by_priority = {}
|
||||
|
||||
for win in wins:
|
||||
type_key = win.type.value
|
||||
priority_key = win.priority.value
|
||||
|
||||
if type_key not in by_type:
|
||||
by_type[type_key] = 0
|
||||
by_type[type_key] += 1
|
||||
|
||||
if priority_key not in by_priority:
|
||||
by_priority[priority_key] = 0
|
||||
by_priority[priority_key] += 1
|
||||
|
||||
return {
|
||||
'total': len(wins),
|
||||
'by_type': by_type,
|
||||
'by_priority': by_priority,
|
||||
'auto_fixable': len([w for w in wins if w.auto_fixable]),
|
||||
'top_wins': [w.to_dict() for w in wins[:10]],
|
||||
}
|
||||
|
||||
async def get_actionable_report(self) -> str:
|
||||
"""Generate human-readable report of quick-wins."""
|
||||
wins = await self.find_all()
|
||||
|
||||
if not wins:
|
||||
return "No quick-wins found. Your codebase looks clean!"
|
||||
|
||||
lines = [
|
||||
"QUICK-WIN OPPORTUNITIES",
|
||||
"=" * 50,
|
||||
"",
|
||||
]
|
||||
|
||||
# Group by priority
|
||||
by_priority = {
|
||||
QuickWinPriority.CRITICAL: [],
|
||||
QuickWinPriority.HIGH: [],
|
||||
QuickWinPriority.MEDIUM: [],
|
||||
QuickWinPriority.LOW: [],
|
||||
}
|
||||
|
||||
for win in wins:
|
||||
by_priority[win.priority].append(win)
|
||||
|
||||
# Report by priority
|
||||
priority_labels = {
|
||||
QuickWinPriority.CRITICAL: "CRITICAL",
|
||||
QuickWinPriority.HIGH: "HIGH PRIORITY",
|
||||
QuickWinPriority.MEDIUM: "MEDIUM PRIORITY",
|
||||
QuickWinPriority.LOW: "LOW PRIORITY",
|
||||
}
|
||||
|
||||
for priority, label in priority_labels.items():
|
||||
priority_wins = by_priority[priority]
|
||||
if not priority_wins:
|
||||
continue
|
||||
|
||||
lines.extend([
|
||||
f"\n[{label}] ({len(priority_wins)} items)",
|
||||
"-" * 40,
|
||||
])
|
||||
|
||||
for i, win in enumerate(priority_wins[:5], 1):
|
||||
lines.extend([
|
||||
f"\n{i}. {win.title}",
|
||||
f" {win.description[:100]}...",
|
||||
f" Impact: {win.estimated_impact}",
|
||||
])
|
||||
if win.auto_fixable:
|
||||
lines.append(" [Auto-fixable]")
|
||||
|
||||
if len(priority_wins) > 5:
|
||||
lines.append(f"\n ... and {len(priority_wins) - 5} more")
|
||||
|
||||
# Summary
|
||||
lines.extend([
|
||||
"",
|
||||
"=" * 50,
|
||||
"SUMMARY",
|
||||
f"Total quick-wins: {len(wins)}",
|
||||
f"Auto-fixable: {len([w for w in wins if w.auto_fixable])}",
|
||||
"",
|
||||
"Run 'dss fix --preview' to see suggested changes.",
|
||||
])
|
||||
|
||||
return "\n".join(lines)
|
||||
441
demo/tools/analyze/react.py
Normal file
441
demo/tools/analyze/react.py
Normal file
@@ -0,0 +1,441 @@
|
||||
"""
|
||||
React Project Analyzer
|
||||
|
||||
Analyzes React codebases to extract component information,
|
||||
detect patterns, and identify style usage.
|
||||
"""
|
||||
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import List, Dict, Any, Optional, Set, Tuple
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from .base import (
|
||||
ComponentInfo,
|
||||
Location,
|
||||
StylePattern,
|
||||
StylingApproach,
|
||||
)
|
||||
|
||||
|
||||
# Patterns for React component detection
|
||||
FUNCTIONAL_COMPONENT = re.compile(
|
||||
r'(?:export\s+)?(?:const|let|var|function)\s+([A-Z][A-Za-z0-9]*)\s*(?::\s*(?:React\.)?FC)?'
|
||||
r'\s*(?:=\s*(?:\([^)]*\)|[a-zA-Z_]\w*)\s*=>|\()',
|
||||
re.MULTILINE
|
||||
)
|
||||
|
||||
CLASS_COMPONENT = re.compile(
|
||||
r'class\s+([A-Z][A-Za-z0-9]*)\s+extends\s+(?:React\.)?(?:Component|PureComponent)',
|
||||
re.MULTILINE
|
||||
)
|
||||
|
||||
FORWARD_REF = re.compile(
|
||||
r'(?:export\s+)?(?:const|let)\s+([A-Z][A-Za-z0-9]*)\s*=\s*(?:React\.)?forwardRef',
|
||||
re.MULTILINE
|
||||
)
|
||||
|
||||
MEMO_COMPONENT = re.compile(
|
||||
r'(?:export\s+)?(?:const|let)\s+([A-Z][A-Za-z0-9]*)\s*=\s*(?:React\.)?memo\(',
|
||||
re.MULTILINE
|
||||
)
|
||||
|
||||
# Import patterns
|
||||
IMPORT_PATTERN = re.compile(
|
||||
r'import\s+(?:\{[^}]+\}|\*\s+as\s+\w+|\w+)\s+from\s+["\']([^"\']+)["\']',
|
||||
re.MULTILINE
|
||||
)
|
||||
|
||||
STYLE_IMPORT = re.compile(
|
||||
r'import\s+(?:(\w+)\s+from\s+)?["\']([^"\']+\.(?:css|scss|sass|less|styl))["\']',
|
||||
re.MULTILINE
|
||||
)
|
||||
|
||||
# Inline style patterns
|
||||
INLINE_STYLE_OBJECT = re.compile(
|
||||
r'style\s*=\s*\{\s*\{([^}]+)\}\s*\}',
|
||||
re.MULTILINE | re.DOTALL
|
||||
)
|
||||
|
||||
INLINE_STYLE_VAR = re.compile(
|
||||
r'style\s*=\s*\{(\w+)\}',
|
||||
re.MULTILINE
|
||||
)
|
||||
|
||||
# Props extraction
|
||||
PROPS_DESTRUCTURE = re.compile(
|
||||
r'\(\s*\{\s*([^}]+)\s*\}\s*(?::\s*[^)]+)?\)',
|
||||
re.MULTILINE
|
||||
)
|
||||
|
||||
PROPS_INTERFACE = re.compile(
|
||||
r'interface\s+\w*Props\s*\{([^}]+)\}',
|
||||
re.MULTILINE | re.DOTALL
|
||||
)
|
||||
|
||||
PROPS_TYPE = re.compile(
|
||||
r'type\s+\w*Props\s*=\s*\{([^}]+)\}',
|
||||
re.MULTILINE | re.DOTALL
|
||||
)
|
||||
|
||||
|
||||
class ReactAnalyzer:
|
||||
"""
|
||||
Analyzes React projects for component structure and style usage.
|
||||
"""
|
||||
|
||||
def __init__(self, root_path: str):
|
||||
self.root = Path(root_path).resolve()
|
||||
|
||||
async def analyze(
|
||||
self,
|
||||
component_files: Optional[List[Path]] = None
|
||||
) -> List[ComponentInfo]:
|
||||
"""
|
||||
Analyze React components in the project.
|
||||
|
||||
Args:
|
||||
component_files: Optional list of files to analyze.
|
||||
If None, scans the project.
|
||||
|
||||
Returns:
|
||||
List of ComponentInfo for each detected component.
|
||||
"""
|
||||
if component_files is None:
|
||||
component_files = self._find_component_files()
|
||||
|
||||
components = []
|
||||
|
||||
for file_path in component_files:
|
||||
try:
|
||||
file_components = await self._analyze_file(file_path)
|
||||
components.extend(file_components)
|
||||
except Exception as e:
|
||||
# Log error but continue
|
||||
continue
|
||||
|
||||
return components
|
||||
|
||||
def _find_component_files(self) -> List[Path]:
|
||||
"""Find all potential React component files."""
|
||||
skip_dirs = {'node_modules', '.git', 'dist', 'build', '.next'}
|
||||
component_files = []
|
||||
|
||||
for ext in ['*.jsx', '*.tsx']:
|
||||
for path in self.root.rglob(ext):
|
||||
if not any(skip in path.parts for skip in skip_dirs):
|
||||
component_files.append(path)
|
||||
|
||||
# Also check .js/.ts files that look like components
|
||||
for ext in ['*.js', '*.ts']:
|
||||
for path in self.root.rglob(ext):
|
||||
if any(skip in path.parts for skip in skip_dirs):
|
||||
continue
|
||||
# Skip config and utility files
|
||||
if any(x in path.name.lower() for x in ['config', 'util', 'helper', 'hook', 'context']):
|
||||
continue
|
||||
# Check if PascalCase (likely component)
|
||||
if path.stem[0].isupper():
|
||||
component_files.append(path)
|
||||
|
||||
return component_files
|
||||
|
||||
async def _analyze_file(self, file_path: Path) -> List[ComponentInfo]:
|
||||
"""Analyze a single file for React components."""
|
||||
content = file_path.read_text(encoding='utf-8', errors='ignore')
|
||||
components = []
|
||||
|
||||
# Find all components in the file
|
||||
component_matches = []
|
||||
|
||||
# Functional components
|
||||
for match in FUNCTIONAL_COMPONENT.finditer(content):
|
||||
name = match.group(1)
|
||||
if self._is_valid_component_name(name):
|
||||
component_matches.append((name, 'functional', match.start()))
|
||||
|
||||
# Class components
|
||||
for match in CLASS_COMPONENT.finditer(content):
|
||||
name = match.group(1)
|
||||
component_matches.append((name, 'class', match.start()))
|
||||
|
||||
# forwardRef components
|
||||
for match in FORWARD_REF.finditer(content):
|
||||
name = match.group(1)
|
||||
component_matches.append((name, 'forwardRef', match.start()))
|
||||
|
||||
# memo components
|
||||
for match in MEMO_COMPONENT.finditer(content):
|
||||
name = match.group(1)
|
||||
component_matches.append((name, 'memo', match.start()))
|
||||
|
||||
# Dedupe by name (keep first occurrence)
|
||||
seen_names = set()
|
||||
unique_matches = []
|
||||
for name, comp_type, pos in component_matches:
|
||||
if name not in seen_names:
|
||||
seen_names.add(name)
|
||||
unique_matches.append((name, comp_type, pos))
|
||||
|
||||
# Extract imports (shared across all components in file)
|
||||
imports = self._extract_imports(content)
|
||||
style_files = self._extract_style_imports(content)
|
||||
inline_styles = self._find_inline_styles(content)
|
||||
|
||||
# Create ComponentInfo for each
|
||||
for name, comp_type, pos in unique_matches:
|
||||
# Extract props for this component
|
||||
props = self._extract_props(content, name)
|
||||
|
||||
# Find child components used
|
||||
children = self._find_child_components(content, seen_names)
|
||||
|
||||
# Check if component has styles
|
||||
has_styles = bool(style_files) or bool(inline_styles)
|
||||
|
||||
components.append(ComponentInfo(
|
||||
name=name,
|
||||
path=str(file_path.relative_to(self.root)),
|
||||
type=comp_type,
|
||||
props=props,
|
||||
has_styles=has_styles,
|
||||
style_files=style_files,
|
||||
inline_style_count=len(inline_styles),
|
||||
imports=imports,
|
||||
exports=self._find_exports(content, name),
|
||||
children=children,
|
||||
line_count=content.count('\n') + 1,
|
||||
))
|
||||
|
||||
return components
|
||||
|
||||
def _is_valid_component_name(self, name: str) -> bool:
|
||||
"""Check if a name is a valid React component name."""
|
||||
# Must be PascalCase
|
||||
if not name[0].isupper():
|
||||
return False
|
||||
|
||||
# Filter out common non-component patterns
|
||||
invalid_names = {
|
||||
'React', 'Component', 'PureComponent', 'Fragment',
|
||||
'Suspense', 'Provider', 'Consumer', 'Context',
|
||||
'Error', 'ErrorBoundary', 'Wrapper', 'Container',
|
||||
'Props', 'State', 'Type', 'Interface',
|
||||
}
|
||||
|
||||
return name not in invalid_names
|
||||
|
||||
def _extract_imports(self, content: str) -> List[str]:
|
||||
"""Extract import paths from file."""
|
||||
imports = []
|
||||
for match in IMPORT_PATTERN.finditer(content):
|
||||
import_path = match.group(1)
|
||||
# Skip node_modules style imports for brevity
|
||||
if not import_path.startswith('.') and '/' not in import_path:
|
||||
continue
|
||||
imports.append(import_path)
|
||||
return imports
|
||||
|
||||
def _extract_style_imports(self, content: str) -> List[str]:
|
||||
"""Extract style file imports."""
|
||||
style_files = []
|
||||
for match in STYLE_IMPORT.finditer(content):
|
||||
style_path = match.group(2)
|
||||
style_files.append(style_path)
|
||||
return style_files
|
||||
|
||||
def _find_inline_styles(self, content: str) -> List[Location]:
|
||||
"""Find inline style usage locations."""
|
||||
locations = []
|
||||
|
||||
# style={{ ... }}
|
||||
for match in INLINE_STYLE_OBJECT.finditer(content):
|
||||
line = content[:match.start()].count('\n') + 1
|
||||
locations.append(Location(
|
||||
file_path="", # Will be set by caller
|
||||
line=line,
|
||||
))
|
||||
|
||||
return locations
|
||||
|
||||
def _extract_props(self, content: str, component_name: str) -> List[str]:
|
||||
"""Extract props for a component."""
|
||||
props = set()
|
||||
|
||||
# Look for destructured props
|
||||
for match in PROPS_DESTRUCTURE.finditer(content):
|
||||
props_str = match.group(1)
|
||||
# Extract prop names from destructuring
|
||||
for prop in re.findall(r'(\w+)(?:\s*[=:])?', props_str):
|
||||
if prop and not prop[0].isupper(): # Skip types
|
||||
props.add(prop)
|
||||
|
||||
# Look for Props interface/type
|
||||
for pattern in [PROPS_INTERFACE, PROPS_TYPE]:
|
||||
for match in pattern.finditer(content):
|
||||
props_str = match.group(1)
|
||||
# Extract prop names
|
||||
for line in props_str.split('\n'):
|
||||
prop_match = re.match(r'\s*(\w+)\s*[?:]', line)
|
||||
if prop_match:
|
||||
props.add(prop_match.group(1))
|
||||
|
||||
return list(props)
|
||||
|
||||
def _find_child_components(
|
||||
self,
|
||||
content: str,
|
||||
current_components: Set[str]
|
||||
) -> List[str]:
|
||||
"""Find child components used in JSX."""
|
||||
children = set()
|
||||
|
||||
# Find JSX elements that look like components (PascalCase)
|
||||
jsx_pattern = re.compile(r'<([A-Z][A-Za-z0-9]*)')
|
||||
for match in jsx_pattern.finditer(content):
|
||||
component_name = match.group(1)
|
||||
# Skip current file's components and React built-ins
|
||||
if component_name not in current_components:
|
||||
if component_name not in {'Fragment', 'Suspense', 'Provider'}:
|
||||
children.add(component_name)
|
||||
|
||||
return list(children)
|
||||
|
||||
def _find_exports(self, content: str, component_name: str) -> List[str]:
|
||||
"""Find export type for component."""
|
||||
exports = []
|
||||
|
||||
# Default export
|
||||
if re.search(rf'export\s+default\s+{component_name}\b', content):
|
||||
exports.append('default')
|
||||
if re.search(rf'export\s+default\s+(?:function|const)\s+{component_name}\b', content):
|
||||
exports.append('default')
|
||||
|
||||
# Named export
|
||||
if re.search(rf'export\s+(?:const|function|class)\s+{component_name}\b', content):
|
||||
exports.append('named')
|
||||
if re.search(r'export\s*\{[^}]*\b' + re.escape(component_name) + r'\b[^}]*\}', content):
|
||||
exports.append('named')
|
||||
|
||||
return exports
|
||||
|
||||
async def find_inline_styles(self, path: Optional[str] = None) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Find all inline style usage in the project.
|
||||
|
||||
Returns list of inline style occurrences with:
|
||||
- file path
|
||||
- line number
|
||||
- style content
|
||||
- component name (if detectable)
|
||||
"""
|
||||
search_path = Path(path) if path else self.root
|
||||
results = []
|
||||
|
||||
for ext in ['*.jsx', '*.tsx', '*.js', '*.ts']:
|
||||
for file_path in search_path.rglob(ext):
|
||||
if any(skip in file_path.parts for skip in
|
||||
{'node_modules', '.git', 'dist', 'build'}):
|
||||
continue
|
||||
|
||||
try:
|
||||
content = file_path.read_text(encoding='utf-8', errors='ignore')
|
||||
|
||||
# Find style={{ ... }}
|
||||
for match in INLINE_STYLE_OBJECT.finditer(content):
|
||||
line = content[:match.start()].count('\n') + 1
|
||||
style_content = match.group(1).strip()
|
||||
|
||||
results.append({
|
||||
'file': str(file_path.relative_to(self.root)),
|
||||
'line': line,
|
||||
'content': style_content[:200],
|
||||
'type': 'object',
|
||||
})
|
||||
|
||||
# Find style={variable}
|
||||
for match in INLINE_STYLE_VAR.finditer(content):
|
||||
line = content[:match.start()].count('\n') + 1
|
||||
var_name = match.group(1)
|
||||
|
||||
results.append({
|
||||
'file': str(file_path.relative_to(self.root)),
|
||||
'line': line,
|
||||
'content': f'style={{{var_name}}}',
|
||||
'type': 'variable',
|
||||
'variable': var_name,
|
||||
})
|
||||
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
return results
|
||||
|
||||
async def get_component_tree(self) -> Dict[str, List[str]]:
|
||||
"""
|
||||
Build component dependency tree.
|
||||
|
||||
Returns dict mapping component names to their child components.
|
||||
"""
|
||||
components = await self.analyze()
|
||||
|
||||
tree = {}
|
||||
for comp in components:
|
||||
tree[comp.name] = comp.children
|
||||
|
||||
return tree
|
||||
|
||||
async def find_style_patterns(self) -> Dict[str, List[Dict]]:
|
||||
"""
|
||||
Find different styling patterns used across the project.
|
||||
|
||||
Returns dict with pattern types and their occurrences.
|
||||
"""
|
||||
patterns = {
|
||||
'inline_styles': [],
|
||||
'css_modules': [],
|
||||
'styled_components': [],
|
||||
'emotion': [],
|
||||
'tailwind': [],
|
||||
'css_classes': [],
|
||||
}
|
||||
|
||||
component_files = self._find_component_files()
|
||||
|
||||
for file_path in component_files:
|
||||
try:
|
||||
content = file_path.read_text(encoding='utf-8', errors='ignore')
|
||||
rel_path = str(file_path.relative_to(self.root))
|
||||
|
||||
# CSS Modules
|
||||
if re.search(r'import\s+\w+\s+from\s+["\'].*\.module\.', content):
|
||||
patterns['css_modules'].append({'file': rel_path})
|
||||
|
||||
# styled-components
|
||||
if re.search(r'styled\.|from\s+["\']styled-components', content):
|
||||
patterns['styled_components'].append({'file': rel_path})
|
||||
|
||||
# Emotion
|
||||
if re.search(r'@emotion|css`', content):
|
||||
patterns['emotion'].append({'file': rel_path})
|
||||
|
||||
# Tailwind (className with utility classes)
|
||||
if re.search(r'className\s*=\s*["\'][^"\']*(?:flex|grid|p-\d|m-\d|bg-)', content):
|
||||
patterns['tailwind'].append({'file': rel_path})
|
||||
|
||||
# Regular CSS classes
|
||||
if re.search(r'className\s*=\s*["\'][a-zA-Z]', content):
|
||||
patterns['css_classes'].append({'file': rel_path})
|
||||
|
||||
# Inline styles
|
||||
for match in INLINE_STYLE_OBJECT.finditer(content):
|
||||
line = content[:match.start()].count('\n') + 1
|
||||
patterns['inline_styles'].append({
|
||||
'file': rel_path,
|
||||
'line': line,
|
||||
})
|
||||
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
return patterns
|
||||
502
demo/tools/analyze/scanner.py
Normal file
502
demo/tools/analyze/scanner.py
Normal file
@@ -0,0 +1,502 @@
|
||||
"""
|
||||
Project Scanner
|
||||
|
||||
Scans file system to discover project structure, frameworks, and style files.
|
||||
"""
|
||||
|
||||
import json
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import List, Dict, Any, Optional, Set, Tuple
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from .base import (
|
||||
Framework,
|
||||
StylingApproach,
|
||||
StyleFile,
|
||||
ProjectAnalysis,
|
||||
)
|
||||
|
||||
|
||||
# Directories to skip during scanning
|
||||
SKIP_DIRS = {
|
||||
'node_modules',
|
||||
'.git',
|
||||
'.next',
|
||||
'.nuxt',
|
||||
'dist',
|
||||
'build',
|
||||
'out',
|
||||
'.cache',
|
||||
'coverage',
|
||||
'__pycache__',
|
||||
'.venv',
|
||||
'venv',
|
||||
'.turbo',
|
||||
'.vercel',
|
||||
}
|
||||
|
||||
# File extensions to scan
|
||||
SCAN_EXTENSIONS = {
|
||||
# JavaScript/TypeScript
|
||||
'.js', '.jsx', '.ts', '.tsx', '.mjs', '.cjs',
|
||||
# Styles
|
||||
'.css', '.scss', '.sass', '.less', '.styl',
|
||||
# Config
|
||||
'.json',
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class ScanResult:
|
||||
"""Result of file system scan."""
|
||||
files: List[Path] = field(default_factory=list)
|
||||
style_files: List[Path] = field(default_factory=list)
|
||||
component_files: List[Path] = field(default_factory=list)
|
||||
config_files: Dict[str, Path] = field(default_factory=dict)
|
||||
total_lines: int = 0
|
||||
|
||||
|
||||
class ProjectScanner:
|
||||
"""
|
||||
Scans a project directory to identify:
|
||||
- Framework (React, Next, Vue, etc.)
|
||||
- Styling approach (CSS modules, styled-components, Tailwind, etc.)
|
||||
- Component files
|
||||
- Style files
|
||||
|
||||
Results are cached in memory for the session.
|
||||
"""
|
||||
|
||||
# Class-level cache: path -> (timestamp, analysis)
|
||||
_cache: Dict[str, Tuple[float, ProjectAnalysis]] = {}
|
||||
_cache_ttl: float = 60.0 # Cache for 60 seconds
|
||||
|
||||
def __init__(self, root_path: str, use_cache: bool = True):
|
||||
self.root = Path(root_path).resolve()
|
||||
self.use_cache = use_cache
|
||||
if not self.root.exists():
|
||||
raise FileNotFoundError(f"Project path not found: {root_path}")
|
||||
|
||||
async def scan(self) -> ProjectAnalysis:
|
||||
"""
|
||||
Perform full project scan.
|
||||
|
||||
Returns:
|
||||
ProjectAnalysis with detected framework, styles, and files
|
||||
"""
|
||||
# Check cache if enabled
|
||||
if self.use_cache:
|
||||
import time
|
||||
cache_key = str(self.root)
|
||||
if cache_key in self._cache:
|
||||
timestamp, cached_analysis = self._cache[cache_key]
|
||||
if time.time() - timestamp < self._cache_ttl:
|
||||
return cached_analysis
|
||||
|
||||
# Scan file system
|
||||
scan_result = self._scan_files()
|
||||
|
||||
# Detect framework
|
||||
framework, version = self._detect_framework(scan_result.config_files)
|
||||
|
||||
# Detect styling approaches
|
||||
styling = self._detect_styling(scan_result)
|
||||
|
||||
# Collect style files
|
||||
style_files = self._analyze_style_files(scan_result.style_files)
|
||||
|
||||
# Build analysis result
|
||||
analysis = ProjectAnalysis(
|
||||
project_path=str(self.root),
|
||||
framework=framework,
|
||||
framework_version=version,
|
||||
style_files=style_files,
|
||||
style_file_count=len(style_files),
|
||||
stats={
|
||||
"total_files_scanned": len(scan_result.files),
|
||||
"total_lines": scan_result.total_lines,
|
||||
"component_files": len(scan_result.component_files),
|
||||
"style_files": len(scan_result.style_files),
|
||||
}
|
||||
)
|
||||
|
||||
# Determine primary styling approach
|
||||
if styling:
|
||||
analysis.styling_approaches = styling
|
||||
# Primary is the one with most occurrences
|
||||
analysis.primary_styling = max(
|
||||
styling, key=lambda x: x.count
|
||||
).type if styling else None
|
||||
|
||||
# Cache result if enabled
|
||||
if self.use_cache:
|
||||
import time
|
||||
cache_key = str(self.root)
|
||||
self._cache[cache_key] = (time.time(), analysis)
|
||||
|
||||
return analysis
|
||||
|
||||
def _scan_files(self) -> ScanResult:
|
||||
"""Scan directory for relevant files."""
|
||||
result = ScanResult()
|
||||
|
||||
for path in self.root.rglob("*"):
|
||||
# Skip directories in skip list
|
||||
if any(skip in path.parts for skip in SKIP_DIRS):
|
||||
continue
|
||||
|
||||
if not path.is_file():
|
||||
continue
|
||||
|
||||
suffix = path.suffix.lower()
|
||||
if suffix not in SCAN_EXTENSIONS:
|
||||
continue
|
||||
|
||||
result.files.append(path)
|
||||
|
||||
# Categorize files
|
||||
if suffix in {'.css', '.scss', '.sass', '.less', '.styl'}:
|
||||
result.style_files.append(path)
|
||||
elif suffix in {'.jsx', '.tsx'}:
|
||||
result.component_files.append(path)
|
||||
elif suffix in {'.js', '.ts'}:
|
||||
# Check if it's a component or config
|
||||
name = path.name.lower()
|
||||
if any(cfg in name for cfg in ['config', 'rc', '.config']):
|
||||
result.config_files[name] = path
|
||||
elif self._looks_like_component(path):
|
||||
result.component_files.append(path)
|
||||
|
||||
# Count lines (approximate for large files)
|
||||
try:
|
||||
content = path.read_text(encoding='utf-8', errors='ignore')
|
||||
result.total_lines += content.count('\n') + 1
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Look for specific config files
|
||||
config_names = [
|
||||
'package.json',
|
||||
'tsconfig.json',
|
||||
'tailwind.config.js',
|
||||
'tailwind.config.ts',
|
||||
'next.config.js',
|
||||
'next.config.mjs',
|
||||
'vite.config.js',
|
||||
'vite.config.ts',
|
||||
'nuxt.config.js',
|
||||
'nuxt.config.ts',
|
||||
'.eslintrc.json',
|
||||
'.eslintrc.js',
|
||||
]
|
||||
|
||||
for name in config_names:
|
||||
config_path = self.root / name
|
||||
if config_path.exists():
|
||||
result.config_files[name] = config_path
|
||||
|
||||
return result
|
||||
|
||||
def _looks_like_component(self, path: Path) -> bool:
|
||||
"""Check if a JS/TS file looks like a React component."""
|
||||
name = path.stem
|
||||
# PascalCase is a strong indicator
|
||||
if name[0].isupper() and not name.isupper():
|
||||
return True
|
||||
# Common component patterns
|
||||
if any(x in name.lower() for x in ['component', 'page', 'view', 'screen']):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _detect_framework(
|
||||
self,
|
||||
config_files: Dict[str, Path]
|
||||
) -> Tuple[Framework, str]:
|
||||
"""Detect the UI framework and version."""
|
||||
# Check package.json for dependencies
|
||||
pkg_json = config_files.get('package.json')
|
||||
if not pkg_json:
|
||||
return Framework.UNKNOWN, ""
|
||||
|
||||
try:
|
||||
pkg = json.loads(pkg_json.read_text())
|
||||
deps = {
|
||||
**pkg.get('dependencies', {}),
|
||||
**pkg.get('devDependencies', {}),
|
||||
}
|
||||
|
||||
# Check for Next.js first (it includes React)
|
||||
if 'next' in deps:
|
||||
return Framework.NEXT, deps.get('next', '').lstrip('^~')
|
||||
|
||||
# Check for Nuxt (Vue-based)
|
||||
if 'nuxt' in deps:
|
||||
return Framework.NUXT, deps.get('nuxt', '').lstrip('^~')
|
||||
|
||||
# Check for other frameworks
|
||||
if 'react' in deps:
|
||||
return Framework.REACT, deps.get('react', '').lstrip('^~')
|
||||
|
||||
if 'vue' in deps:
|
||||
return Framework.VUE, deps.get('vue', '').lstrip('^~')
|
||||
|
||||
if '@angular/core' in deps:
|
||||
return Framework.ANGULAR, deps.get('@angular/core', '').lstrip('^~')
|
||||
|
||||
if 'svelte' in deps:
|
||||
return Framework.SVELTE, deps.get('svelte', '').lstrip('^~')
|
||||
|
||||
if 'solid-js' in deps:
|
||||
return Framework.SOLID, deps.get('solid-js', '').lstrip('^~')
|
||||
|
||||
except (json.JSONDecodeError, KeyError):
|
||||
pass
|
||||
|
||||
return Framework.UNKNOWN, ""
|
||||
|
||||
def _detect_styling(self, scan_result: ScanResult) -> List:
|
||||
"""Detect styling approaches used in the project."""
|
||||
from .base import StylePattern, Location
|
||||
|
||||
patterns: Dict[StylingApproach, StylePattern] = {}
|
||||
|
||||
# Check config files for styling indicators
|
||||
pkg_json = scan_result.config_files.get('package.json')
|
||||
if pkg_json:
|
||||
try:
|
||||
pkg = json.loads(pkg_json.read_text())
|
||||
deps = {
|
||||
**pkg.get('dependencies', {}),
|
||||
**pkg.get('devDependencies', {}),
|
||||
}
|
||||
|
||||
# Tailwind
|
||||
if 'tailwindcss' in deps:
|
||||
patterns[StylingApproach.TAILWIND] = StylePattern(
|
||||
type=StylingApproach.TAILWIND,
|
||||
count=1,
|
||||
examples=["tailwindcss in dependencies"]
|
||||
)
|
||||
|
||||
# styled-components
|
||||
if 'styled-components' in deps:
|
||||
patterns[StylingApproach.STYLED_COMPONENTS] = StylePattern(
|
||||
type=StylingApproach.STYLED_COMPONENTS,
|
||||
count=1,
|
||||
examples=["styled-components in dependencies"]
|
||||
)
|
||||
|
||||
# Emotion
|
||||
if '@emotion/react' in deps or '@emotion/styled' in deps:
|
||||
patterns[StylingApproach.EMOTION] = StylePattern(
|
||||
type=StylingApproach.EMOTION,
|
||||
count=1,
|
||||
examples=["@emotion in dependencies"]
|
||||
)
|
||||
|
||||
# SASS/SCSS
|
||||
if 'sass' in deps or 'node-sass' in deps:
|
||||
patterns[StylingApproach.SASS_SCSS] = StylePattern(
|
||||
type=StylingApproach.SASS_SCSS,
|
||||
count=1,
|
||||
examples=["sass in dependencies"]
|
||||
)
|
||||
|
||||
except (json.JSONDecodeError, KeyError):
|
||||
pass
|
||||
|
||||
# Check tailwind config
|
||||
if 'tailwind.config.js' in scan_result.config_files or \
|
||||
'tailwind.config.ts' in scan_result.config_files:
|
||||
if StylingApproach.TAILWIND not in patterns:
|
||||
patterns[StylingApproach.TAILWIND] = StylePattern(
|
||||
type=StylingApproach.TAILWIND,
|
||||
count=1,
|
||||
examples=["tailwind.config found"]
|
||||
)
|
||||
|
||||
# Scan component files for styling patterns
|
||||
for comp_file in scan_result.component_files[:100]: # Limit for performance
|
||||
try:
|
||||
content = comp_file.read_text(encoding='utf-8', errors='ignore')
|
||||
self._detect_patterns_in_file(
|
||||
content, str(comp_file), patterns
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Check style files
|
||||
for style_file in scan_result.style_files:
|
||||
suffix = style_file.suffix.lower()
|
||||
|
||||
if suffix == '.css':
|
||||
# Check for CSS modules
|
||||
if '.module.css' in style_file.name.lower():
|
||||
approach = StylingApproach.CSS_MODULES
|
||||
else:
|
||||
approach = StylingApproach.VANILLA_CSS
|
||||
|
||||
if approach not in patterns:
|
||||
patterns[approach] = StylePattern(type=approach)
|
||||
patterns[approach].count += 1
|
||||
patterns[approach].locations.append(
|
||||
Location(str(style_file), 1)
|
||||
)
|
||||
|
||||
elif suffix in {'.scss', '.sass'}:
|
||||
if StylingApproach.SASS_SCSS not in patterns:
|
||||
patterns[StylingApproach.SASS_SCSS] = StylePattern(
|
||||
type=StylingApproach.SASS_SCSS
|
||||
)
|
||||
patterns[StylingApproach.SASS_SCSS].count += 1
|
||||
|
||||
return list(patterns.values())
|
||||
|
||||
def _detect_patterns_in_file(
|
||||
self,
|
||||
content: str,
|
||||
file_path: str,
|
||||
patterns: Dict[StylingApproach, Any]
|
||||
) -> None:
|
||||
"""Detect styling patterns in a single file."""
|
||||
from .base import StylePattern, Location
|
||||
|
||||
# CSS Modules import
|
||||
css_module_pattern = re.compile(
|
||||
r"import\s+\w+\s+from\s+['\"].*\.module\.(css|scss|sass)['\"]"
|
||||
)
|
||||
for match in css_module_pattern.finditer(content):
|
||||
if StylingApproach.CSS_MODULES not in patterns:
|
||||
patterns[StylingApproach.CSS_MODULES] = StylePattern(
|
||||
type=StylingApproach.CSS_MODULES
|
||||
)
|
||||
patterns[StylingApproach.CSS_MODULES].count += 1
|
||||
line_num = content[:match.start()].count('\n') + 1
|
||||
patterns[StylingApproach.CSS_MODULES].locations.append(
|
||||
Location(file_path, line_num)
|
||||
)
|
||||
|
||||
# styled-components
|
||||
styled_pattern = re.compile(
|
||||
r"(styled\.|styled\()|(from\s+['\"]styled-components['\"])"
|
||||
)
|
||||
for match in styled_pattern.finditer(content):
|
||||
if StylingApproach.STYLED_COMPONENTS not in patterns:
|
||||
patterns[StylingApproach.STYLED_COMPONENTS] = StylePattern(
|
||||
type=StylingApproach.STYLED_COMPONENTS
|
||||
)
|
||||
patterns[StylingApproach.STYLED_COMPONENTS].count += 1
|
||||
|
||||
# Emotion
|
||||
emotion_pattern = re.compile(
|
||||
r"(css`|@emotion|from\s+['\"]@emotion)"
|
||||
)
|
||||
for match in emotion_pattern.finditer(content):
|
||||
if StylingApproach.EMOTION not in patterns:
|
||||
patterns[StylingApproach.EMOTION] = StylePattern(
|
||||
type=StylingApproach.EMOTION
|
||||
)
|
||||
patterns[StylingApproach.EMOTION].count += 1
|
||||
|
||||
# Inline styles
|
||||
inline_pattern = re.compile(
|
||||
r'style\s*=\s*\{\s*\{[^}]+\}\s*\}'
|
||||
)
|
||||
for match in inline_pattern.finditer(content):
|
||||
if StylingApproach.INLINE_STYLES not in patterns:
|
||||
patterns[StylingApproach.INLINE_STYLES] = StylePattern(
|
||||
type=StylingApproach.INLINE_STYLES
|
||||
)
|
||||
patterns[StylingApproach.INLINE_STYLES].count += 1
|
||||
line_num = content[:match.start()].count('\n') + 1
|
||||
patterns[StylingApproach.INLINE_STYLES].locations.append(
|
||||
Location(file_path, line_num)
|
||||
)
|
||||
patterns[StylingApproach.INLINE_STYLES].examples.append(
|
||||
match.group(0)[:100]
|
||||
)
|
||||
|
||||
# Tailwind classes
|
||||
tailwind_pattern = re.compile(
|
||||
r'className\s*=\s*["\'][^"\']*(?:flex|grid|p-|m-|bg-|text-|border-)[^"\']*["\']'
|
||||
)
|
||||
for match in tailwind_pattern.finditer(content):
|
||||
if StylingApproach.TAILWIND not in patterns:
|
||||
patterns[StylingApproach.TAILWIND] = StylePattern(
|
||||
type=StylingApproach.TAILWIND
|
||||
)
|
||||
patterns[StylingApproach.TAILWIND].count += 1
|
||||
|
||||
def _analyze_style_files(self, style_paths: List[Path]) -> List[StyleFile]:
|
||||
"""Analyze style files for metadata."""
|
||||
style_files = []
|
||||
|
||||
for path in style_paths:
|
||||
try:
|
||||
content = path.read_text(encoding='utf-8', errors='ignore')
|
||||
|
||||
# Determine type
|
||||
suffix = path.suffix.lower()
|
||||
if '.module.' in path.name.lower():
|
||||
file_type = 'css-module'
|
||||
elif suffix == '.scss':
|
||||
file_type = 'scss'
|
||||
elif suffix == '.sass':
|
||||
file_type = 'sass'
|
||||
elif suffix == '.less':
|
||||
file_type = 'less'
|
||||
else:
|
||||
file_type = 'css'
|
||||
|
||||
# Count variables
|
||||
var_count = 0
|
||||
if file_type == 'css' or file_type == 'css-module':
|
||||
var_count = len(re.findall(r'--[\w-]+\s*:', content))
|
||||
elif file_type in {'scss', 'sass'}:
|
||||
var_count = len(re.findall(r'\$[\w-]+\s*:', content))
|
||||
|
||||
# Count selectors (approximate)
|
||||
selector_count = len(re.findall(r'[.#][\w-]+\s*\{', content))
|
||||
|
||||
# Find imports
|
||||
imports = re.findall(r'@import\s+["\']([^"\']+)["\']', content)
|
||||
|
||||
style_files.append(StyleFile(
|
||||
path=str(path.relative_to(self.root)),
|
||||
type=file_type,
|
||||
size_bytes=path.stat().st_size,
|
||||
line_count=content.count('\n') + 1,
|
||||
variable_count=var_count,
|
||||
selector_count=selector_count,
|
||||
imports=imports,
|
||||
))
|
||||
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
return style_files
|
||||
|
||||
def get_file_tree(self, max_depth: int = 3) -> Dict[str, Any]:
|
||||
"""Get project file tree structure."""
|
||||
def build_tree(path: Path, depth: int) -> Dict[str, Any]:
|
||||
if depth > max_depth:
|
||||
return {"...": "truncated"}
|
||||
|
||||
result = {}
|
||||
try:
|
||||
for item in sorted(path.iterdir()):
|
||||
if item.name in SKIP_DIRS:
|
||||
continue
|
||||
|
||||
if item.is_dir():
|
||||
result[item.name + "/"] = build_tree(item, depth + 1)
|
||||
elif item.suffix in SCAN_EXTENSIONS:
|
||||
result[item.name] = item.stat().st_size
|
||||
|
||||
except PermissionError:
|
||||
pass
|
||||
|
||||
return result
|
||||
|
||||
return build_tree(self.root, 0)
|
||||
527
demo/tools/analyze/styles.py
Normal file
527
demo/tools/analyze/styles.py
Normal file
@@ -0,0 +1,527 @@
|
||||
"""
|
||||
Style Pattern Analyzer
|
||||
|
||||
Detects and analyzes style patterns in code to identify:
|
||||
- Hardcoded values that should be tokens
|
||||
- Duplicate values across files
|
||||
- Inconsistent naming patterns
|
||||
- Unused styles
|
||||
"""
|
||||
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import List, Dict, Any, Optional, Set, Tuple
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from .base import (
|
||||
Location,
|
||||
TokenCandidate,
|
||||
StylePattern,
|
||||
StylingApproach,
|
||||
)
|
||||
|
||||
|
||||
# Color patterns
|
||||
HEX_COLOR = re.compile(r'#(?:[0-9a-fA-F]{3}){1,2}\b')
|
||||
RGB_COLOR = re.compile(r'rgba?\s*\(\s*\d+\s*,\s*\d+\s*,\s*\d+(?:\s*,\s*[\d.]+)?\s*\)')
|
||||
HSL_COLOR = re.compile(r'hsla?\s*\(\s*\d+\s*,\s*[\d.]+%\s*,\s*[\d.]+%(?:\s*,\s*[\d.]+)?\s*\)')
|
||||
OKLCH_COLOR = re.compile(r'oklch\s*\([^)]+\)')
|
||||
|
||||
# Dimension patterns
|
||||
PX_VALUE = re.compile(r'\b(\d+(?:\.\d+)?)\s*px\b')
|
||||
REM_VALUE = re.compile(r'\b(\d+(?:\.\d+)?)\s*rem\b')
|
||||
EM_VALUE = re.compile(r'\b(\d+(?:\.\d+)?)\s*em\b')
|
||||
PERCENT_VALUE = re.compile(r'\b(\d+(?:\.\d+)?)\s*%\b')
|
||||
|
||||
# Font patterns
|
||||
FONT_SIZE = re.compile(r'font-size\s*:\s*([^;]+)')
|
||||
FONT_FAMILY = re.compile(r'font-family\s*:\s*([^;]+)')
|
||||
FONT_WEIGHT = re.compile(r'font-weight\s*:\s*(\d+|normal|bold|lighter|bolder)')
|
||||
LINE_HEIGHT = re.compile(r'line-height\s*:\s*([^;]+)')
|
||||
|
||||
# Spacing patterns
|
||||
MARGIN_PADDING = re.compile(r'(?:margin|padding)(?:-(?:top|right|bottom|left))?\s*:\s*([^;]+)')
|
||||
GAP = re.compile(r'gap\s*:\s*([^;]+)')
|
||||
|
||||
# Border patterns
|
||||
BORDER_RADIUS = re.compile(r'border-radius\s*:\s*([^;]+)')
|
||||
BORDER_WIDTH = re.compile(r'border(?:-(?:top|right|bottom|left))?-width\s*:\s*([^;]+)')
|
||||
|
||||
# Shadow patterns
|
||||
BOX_SHADOW = re.compile(r'box-shadow\s*:\s*([^;]+)')
|
||||
|
||||
# Z-index
|
||||
Z_INDEX = re.compile(r'z-index\s*:\s*(\d+)')
|
||||
|
||||
|
||||
@dataclass
|
||||
class ValueOccurrence:
|
||||
"""Tracks where a value appears."""
|
||||
value: str
|
||||
file: str
|
||||
line: int
|
||||
property: str # CSS property name
|
||||
context: str # Surrounding code
|
||||
|
||||
|
||||
class StyleAnalyzer:
|
||||
"""
|
||||
Analyzes style files and inline styles to find:
|
||||
- Hardcoded values that should be tokens
|
||||
- Duplicate values
|
||||
- Inconsistent patterns
|
||||
"""
|
||||
|
||||
def __init__(self, root_path: str):
|
||||
self.root = Path(root_path).resolve()
|
||||
self.values: Dict[str, List[ValueOccurrence]] = defaultdict(list)
|
||||
self.color_values: Dict[str, List[ValueOccurrence]] = defaultdict(list)
|
||||
self.spacing_values: Dict[str, List[ValueOccurrence]] = defaultdict(list)
|
||||
self.font_values: Dict[str, List[ValueOccurrence]] = defaultdict(list)
|
||||
|
||||
async def analyze(
|
||||
self,
|
||||
include_inline: bool = True,
|
||||
include_css: bool = True
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Analyze all styles in the project.
|
||||
|
||||
Returns:
|
||||
Dict with analysis results including duplicates and candidates
|
||||
"""
|
||||
# Reset collectors
|
||||
self.values.clear()
|
||||
self.color_values.clear()
|
||||
self.spacing_values.clear()
|
||||
self.font_values.clear()
|
||||
|
||||
# Scan CSS/SCSS files
|
||||
if include_css:
|
||||
await self._scan_style_files()
|
||||
|
||||
# Scan inline styles in JS/TS files
|
||||
if include_inline:
|
||||
await self._scan_inline_styles()
|
||||
|
||||
# Analyze results
|
||||
duplicates = self._find_duplicates()
|
||||
candidates = self._generate_token_candidates()
|
||||
|
||||
return {
|
||||
'total_values_found': sum(len(v) for v in self.values.values()),
|
||||
'unique_colors': len(self.color_values),
|
||||
'unique_spacing': len(self.spacing_values),
|
||||
'duplicates': duplicates,
|
||||
'token_candidates': candidates,
|
||||
}
|
||||
|
||||
async def _scan_style_files(self) -> None:
|
||||
"""Scan CSS and SCSS files for values."""
|
||||
skip_dirs = {'node_modules', '.git', 'dist', 'build'}
|
||||
|
||||
for pattern in ['**/*.css', '**/*.scss', '**/*.sass', '**/*.less']:
|
||||
for file_path in self.root.rglob(pattern):
|
||||
if any(skip in file_path.parts for skip in skip_dirs):
|
||||
continue
|
||||
|
||||
try:
|
||||
content = file_path.read_text(encoding='utf-8', errors='ignore')
|
||||
rel_path = str(file_path.relative_to(self.root))
|
||||
self._extract_values_from_css(content, rel_path)
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
async def _scan_inline_styles(self) -> None:
|
||||
"""Scan JS/TS files for inline style values."""
|
||||
skip_dirs = {'node_modules', '.git', 'dist', 'build'}
|
||||
|
||||
for pattern in ['**/*.jsx', '**/*.tsx', '**/*.js', '**/*.ts']:
|
||||
for file_path in self.root.rglob(pattern):
|
||||
if any(skip in file_path.parts for skip in skip_dirs):
|
||||
continue
|
||||
|
||||
try:
|
||||
content = file_path.read_text(encoding='utf-8', errors='ignore')
|
||||
rel_path = str(file_path.relative_to(self.root))
|
||||
self._extract_values_from_jsx(content, rel_path)
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
def _extract_values_from_css(self, content: str, file_path: str) -> None:
|
||||
"""Extract style values from CSS content."""
|
||||
lines = content.split('\n')
|
||||
|
||||
for line_num, line in enumerate(lines, 1):
|
||||
# Skip comments and empty lines
|
||||
if not line.strip() or line.strip().startswith('//') or line.strip().startswith('/*'):
|
||||
continue
|
||||
|
||||
# Extract colors
|
||||
for pattern in [HEX_COLOR, RGB_COLOR, HSL_COLOR, OKLCH_COLOR]:
|
||||
for match in pattern.finditer(line):
|
||||
value = match.group(0).lower()
|
||||
self._record_color(value, file_path, line_num, line.strip())
|
||||
|
||||
# Extract dimensions
|
||||
for match in PX_VALUE.finditer(line):
|
||||
value = f"{match.group(1)}px"
|
||||
self._record_spacing(value, file_path, line_num, line.strip())
|
||||
|
||||
for match in REM_VALUE.finditer(line):
|
||||
value = f"{match.group(1)}rem"
|
||||
self._record_spacing(value, file_path, line_num, line.strip())
|
||||
|
||||
# Extract font properties
|
||||
for match in FONT_SIZE.finditer(line):
|
||||
value = match.group(1).strip()
|
||||
self._record_font(value, file_path, line_num, 'font-size', line.strip())
|
||||
|
||||
for match in FONT_WEIGHT.finditer(line):
|
||||
value = match.group(1).strip()
|
||||
self._record_font(value, file_path, line_num, 'font-weight', line.strip())
|
||||
|
||||
# Extract z-index
|
||||
for match in Z_INDEX.finditer(line):
|
||||
value = match.group(1)
|
||||
self._record_value(f"z-{value}", file_path, line_num, 'z-index', line.strip())
|
||||
|
||||
def _extract_values_from_jsx(self, content: str, file_path: str) -> None:
|
||||
"""Extract style values from JSX inline styles."""
|
||||
# Find style={{ ... }} blocks
|
||||
style_pattern = re.compile(r'style\s*=\s*\{\s*\{([^}]+)\}\s*\}', re.DOTALL)
|
||||
|
||||
for match in style_pattern.finditer(content):
|
||||
style_content = match.group(1)
|
||||
line_num = content[:match.start()].count('\n') + 1
|
||||
|
||||
# Parse the style object
|
||||
# Look for property: value patterns
|
||||
prop_pattern = re.compile(r'(\w+)\s*:\s*["\']?([^,\n"\']+)["\']?')
|
||||
|
||||
for prop_match in prop_pattern.finditer(style_content):
|
||||
prop_name = prop_match.group(1)
|
||||
prop_value = prop_match.group(2).strip()
|
||||
|
||||
# Check for colors
|
||||
if any(c in prop_name.lower() for c in ['color', 'background']):
|
||||
if HEX_COLOR.search(prop_value) or RGB_COLOR.search(prop_value):
|
||||
self._record_color(prop_value.lower(), file_path, line_num, style_content[:100])
|
||||
|
||||
# Check for dimensions
|
||||
if PX_VALUE.search(prop_value):
|
||||
self._record_spacing(prop_value, file_path, line_num, style_content[:100])
|
||||
|
||||
if 'fontSize' in prop_name or 'fontWeight' in prop_name:
|
||||
self._record_font(prop_value, file_path, line_num, prop_name, style_content[:100])
|
||||
|
||||
def _record_color(self, value: str, file: str, line: int, context: str) -> None:
|
||||
"""Record a color value occurrence."""
|
||||
normalized = self._normalize_color(value)
|
||||
self.color_values[normalized].append(ValueOccurrence(
|
||||
value=value,
|
||||
file=file,
|
||||
line=line,
|
||||
property='color',
|
||||
context=context,
|
||||
))
|
||||
self.values[normalized].append(ValueOccurrence(
|
||||
value=value,
|
||||
file=file,
|
||||
line=line,
|
||||
property='color',
|
||||
context=context,
|
||||
))
|
||||
|
||||
def _record_spacing(self, value: str, file: str, line: int, context: str) -> None:
|
||||
"""Record a spacing/dimension value occurrence."""
|
||||
self.spacing_values[value].append(ValueOccurrence(
|
||||
value=value,
|
||||
file=file,
|
||||
line=line,
|
||||
property='spacing',
|
||||
context=context,
|
||||
))
|
||||
self.values[value].append(ValueOccurrence(
|
||||
value=value,
|
||||
file=file,
|
||||
line=line,
|
||||
property='spacing',
|
||||
context=context,
|
||||
))
|
||||
|
||||
def _record_font(self, value: str, file: str, line: int, prop: str, context: str) -> None:
|
||||
"""Record a font-related value occurrence."""
|
||||
self.font_values[value].append(ValueOccurrence(
|
||||
value=value,
|
||||
file=file,
|
||||
line=line,
|
||||
property=prop,
|
||||
context=context,
|
||||
))
|
||||
self.values[value].append(ValueOccurrence(
|
||||
value=value,
|
||||
file=file,
|
||||
line=line,
|
||||
property=prop,
|
||||
context=context,
|
||||
))
|
||||
|
||||
def _record_value(self, value: str, file: str, line: int, prop: str, context: str) -> None:
|
||||
"""Record a generic value occurrence."""
|
||||
self.values[value].append(ValueOccurrence(
|
||||
value=value,
|
||||
file=file,
|
||||
line=line,
|
||||
property=prop,
|
||||
context=context,
|
||||
))
|
||||
|
||||
def _normalize_color(self, color: str) -> str:
|
||||
"""Normalize color value for comparison."""
|
||||
color = color.lower().strip()
|
||||
# Expand 3-digit hex to 6-digit
|
||||
if re.match(r'^#[0-9a-f]{3}$', color):
|
||||
color = f"#{color[1]*2}{color[2]*2}{color[3]*2}"
|
||||
return color
|
||||
|
||||
def _find_duplicates(self) -> List[Dict[str, Any]]:
|
||||
"""Find values that appear multiple times."""
|
||||
duplicates = []
|
||||
|
||||
for value, occurrences in self.values.items():
|
||||
if len(occurrences) >= 2:
|
||||
# Get unique files
|
||||
files = list(set(o.file for o in occurrences))
|
||||
|
||||
duplicates.append({
|
||||
'value': value,
|
||||
'count': len(occurrences),
|
||||
'files': files[:5], # Limit to 5 files
|
||||
'category': occurrences[0].property,
|
||||
'locations': [
|
||||
{'file': o.file, 'line': o.line}
|
||||
for o in occurrences[:5]
|
||||
],
|
||||
})
|
||||
|
||||
# Sort by count (most duplicated first)
|
||||
duplicates.sort(key=lambda x: x['count'], reverse=True)
|
||||
|
||||
return duplicates[:50] # Return top 50
|
||||
|
||||
def _generate_token_candidates(self) -> List[TokenCandidate]:
|
||||
"""Generate token suggestions for repeated values."""
|
||||
candidates = []
|
||||
|
||||
# Color candidates
|
||||
for value, occurrences in self.color_values.items():
|
||||
if len(occurrences) >= 2:
|
||||
suggested_name = self._suggest_color_name(value)
|
||||
candidates.append(TokenCandidate(
|
||||
value=value,
|
||||
suggested_name=suggested_name,
|
||||
category='colors',
|
||||
occurrences=len(occurrences),
|
||||
locations=[
|
||||
Location(o.file, o.line) for o in occurrences[:5]
|
||||
],
|
||||
confidence=min(0.9, 0.3 + (len(occurrences) * 0.1)),
|
||||
))
|
||||
|
||||
# Spacing candidates
|
||||
for value, occurrences in self.spacing_values.items():
|
||||
if len(occurrences) >= 3: # Higher threshold for spacing
|
||||
suggested_name = self._suggest_spacing_name(value)
|
||||
candidates.append(TokenCandidate(
|
||||
value=value,
|
||||
suggested_name=suggested_name,
|
||||
category='spacing',
|
||||
occurrences=len(occurrences),
|
||||
locations=[
|
||||
Location(o.file, o.line) for o in occurrences[:5]
|
||||
],
|
||||
confidence=min(0.8, 0.2 + (len(occurrences) * 0.05)),
|
||||
))
|
||||
|
||||
# Sort by confidence
|
||||
candidates.sort(key=lambda x: x.confidence, reverse=True)
|
||||
|
||||
return candidates[:30] # Return top 30
|
||||
|
||||
def _suggest_color_name(self, color: str) -> str:
|
||||
"""Suggest a token name for a color value."""
|
||||
# Common color mappings
|
||||
common_colors = {
|
||||
'#ffffff': 'color.white',
|
||||
'#000000': 'color.black',
|
||||
'#f3f4f6': 'color.neutral.100',
|
||||
'#e5e7eb': 'color.neutral.200',
|
||||
'#d1d5db': 'color.neutral.300',
|
||||
'#9ca3af': 'color.neutral.400',
|
||||
'#6b7280': 'color.neutral.500',
|
||||
'#4b5563': 'color.neutral.600',
|
||||
'#374151': 'color.neutral.700',
|
||||
'#1f2937': 'color.neutral.800',
|
||||
'#111827': 'color.neutral.900',
|
||||
}
|
||||
|
||||
if color in common_colors:
|
||||
return common_colors[color]
|
||||
|
||||
# Detect color family by hue (simplified)
|
||||
if color.startswith('#'):
|
||||
return f"color.custom.{color[1:7]}"
|
||||
|
||||
return f"color.custom.value"
|
||||
|
||||
def _suggest_spacing_name(self, value: str) -> str:
|
||||
"""Suggest a token name for a spacing value."""
|
||||
# Common spacing values
|
||||
spacing_map = {
|
||||
'0px': 'spacing.0',
|
||||
'4px': 'spacing.xs',
|
||||
'8px': 'spacing.sm',
|
||||
'12px': 'spacing.md',
|
||||
'16px': 'spacing.lg',
|
||||
'20px': 'spacing.lg',
|
||||
'24px': 'spacing.xl',
|
||||
'32px': 'spacing.2xl',
|
||||
'48px': 'spacing.3xl',
|
||||
'64px': 'spacing.4xl',
|
||||
'0.25rem': 'spacing.xs',
|
||||
'0.5rem': 'spacing.sm',
|
||||
'0.75rem': 'spacing.md',
|
||||
'1rem': 'spacing.lg',
|
||||
'1.5rem': 'spacing.xl',
|
||||
'2rem': 'spacing.2xl',
|
||||
}
|
||||
|
||||
if value in spacing_map:
|
||||
return spacing_map[value]
|
||||
|
||||
return f"spacing.custom.{value.replace('px', '').replace('rem', 'r')}"
|
||||
|
||||
async def find_unused_styles(self) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Find CSS classes/selectors that are not used in the codebase.
|
||||
|
||||
Returns list of potentially unused styles.
|
||||
"""
|
||||
# Collect all CSS class definitions
|
||||
css_classes = set()
|
||||
class_locations = {}
|
||||
|
||||
skip_dirs = {'node_modules', '.git', 'dist', 'build'}
|
||||
|
||||
for pattern in ['**/*.css', '**/*.scss']:
|
||||
for file_path in self.root.rglob(pattern):
|
||||
if any(skip in file_path.parts for skip in skip_dirs):
|
||||
continue
|
||||
|
||||
try:
|
||||
content = file_path.read_text(encoding='utf-8', errors='ignore')
|
||||
rel_path = str(file_path.relative_to(self.root))
|
||||
|
||||
# Find class definitions
|
||||
for match in re.finditer(r'\.([a-zA-Z_][\w-]*)\s*[{,]', content):
|
||||
class_name = match.group(1)
|
||||
css_classes.add(class_name)
|
||||
class_locations[class_name] = rel_path
|
||||
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
# Collect all class usage in JS/JSX/TS/TSX
|
||||
used_classes = set()
|
||||
|
||||
for pattern in ['**/*.jsx', '**/*.tsx', '**/*.js', '**/*.ts']:
|
||||
for file_path in self.root.rglob(pattern):
|
||||
if any(skip in file_path.parts for skip in skip_dirs):
|
||||
continue
|
||||
|
||||
try:
|
||||
content = file_path.read_text(encoding='utf-8', errors='ignore')
|
||||
|
||||
# Find className usage
|
||||
for match in re.finditer(r'className\s*=\s*["\']([^"\']+)["\']', content):
|
||||
classes = match.group(1).split()
|
||||
used_classes.update(classes)
|
||||
|
||||
# Find styles.xxx usage (CSS modules)
|
||||
for match in re.finditer(r'styles\.(\w+)', content):
|
||||
used_classes.add(match.group(1))
|
||||
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
# Find unused
|
||||
unused = css_classes - used_classes
|
||||
|
||||
return [
|
||||
{
|
||||
'class': cls,
|
||||
'file': class_locations.get(cls, 'unknown'),
|
||||
}
|
||||
for cls in sorted(unused)
|
||||
][:50] # Limit results
|
||||
|
||||
async def analyze_naming_consistency(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Analyze naming consistency across style files.
|
||||
|
||||
Returns analysis of naming patterns and inconsistencies.
|
||||
"""
|
||||
patterns = {
|
||||
'kebab-case': [], # my-class-name
|
||||
'camelCase': [], # myClassName
|
||||
'snake_case': [], # my_class_name
|
||||
'BEM': [], # block__element--modifier
|
||||
}
|
||||
|
||||
skip_dirs = {'node_modules', '.git', 'dist', 'build'}
|
||||
|
||||
for pattern in ['**/*.css', '**/*.scss']:
|
||||
for file_path in self.root.rglob(pattern):
|
||||
if any(skip in file_path.parts for skip in skip_dirs):
|
||||
continue
|
||||
|
||||
try:
|
||||
content = file_path.read_text(encoding='utf-8', errors='ignore')
|
||||
rel_path = str(file_path.relative_to(self.root))
|
||||
|
||||
# Find class names
|
||||
for match in re.finditer(r'\.([a-zA-Z_][\w-]*)', content):
|
||||
name = match.group(1)
|
||||
line = content[:match.start()].count('\n') + 1
|
||||
|
||||
# Classify naming pattern
|
||||
if '__' in name or '--' in name:
|
||||
patterns['BEM'].append({'name': name, 'file': rel_path, 'line': line})
|
||||
elif '_' in name:
|
||||
patterns['snake_case'].append({'name': name, 'file': rel_path, 'line': line})
|
||||
elif '-' in name:
|
||||
patterns['kebab-case'].append({'name': name, 'file': rel_path, 'line': line})
|
||||
elif name != name.lower():
|
||||
patterns['camelCase'].append({'name': name, 'file': rel_path, 'line': line})
|
||||
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
# Calculate primary pattern
|
||||
pattern_counts = {k: len(v) for k, v in patterns.items()}
|
||||
primary = max(pattern_counts, key=pattern_counts.get) if any(pattern_counts.values()) else None
|
||||
|
||||
# Find inconsistencies (patterns different from primary)
|
||||
inconsistencies = []
|
||||
if primary:
|
||||
for pattern_type, items in patterns.items():
|
||||
if pattern_type != primary and items:
|
||||
inconsistencies.extend(items[:10])
|
||||
|
||||
return {
|
||||
'pattern_counts': pattern_counts,
|
||||
'primary_pattern': primary,
|
||||
'inconsistencies': inconsistencies[:20],
|
||||
}
|
||||
Reference in New Issue
Block a user