Unify MCP across clients; remove legacy plugin server
Some checks failed
DSS Project Analysis / dss-context-update (push) Has been cancelled

This commit is contained in:
DSS
2025-12-12 14:33:18 -03:00
parent 1d53ec341d
commit ec09a0a662
60 changed files with 3451 additions and 4668 deletions

View File

@@ -1,113 +1,187 @@
"""This module provides tools for analyzing a project."""
"""High-level project analysis orchestration used by CLI and MCP tooling."""
from __future__ import annotations
import asyncio
import json
import logging
import subprocess
from dataclasses import asdict, is_dataclass
from datetime import datetime
from enum import Enum
from pathlib import Path
from typing import Dict
from typing import Any, Dict, List, Optional, Tuple
from dss.analyze.base import ProjectAnalysis
log = logging.getLogger(__name__)
# Path to the node.js parser script.
# This assumes the script is located in the same directory as this file.
parser_script_path = Path(__file__).parent / "parser.js"
from .base import ProjectAnalysis
from .graph import DependencyGraph
from .quick_wins import QuickWinFinder
from .react import ReactAnalyzer
from .scanner import ProjectScanner
from .styles import StyleAnalyzer
def analyze_project(
path: str,
output_graph: bool = False,
prune: bool = False,
visualize: bool = False,
) -> ProjectAnalysis:
"""
Analyzes a project, including all its components and their dependencies.
def _safe_serialize(obj: Any) -> Any:
if obj is None or isinstance(obj, (str, int, float, bool)):
return obj
if isinstance(obj, datetime):
return obj.isoformat()
if isinstance(obj, Enum):
return obj.value
if isinstance(obj, Path):
return str(obj)
if isinstance(obj, dict):
return {str(k): _safe_serialize(v) for k, v in obj.items()}
if isinstance(obj, (list, tuple, set)):
return [_safe_serialize(v) for v in obj]
if hasattr(obj, "to_dict") and callable(obj.to_dict):
return _safe_serialize(obj.to_dict())
if is_dataclass(obj):
return _safe_serialize(asdict(obj))
return str(obj)
Args:
path: The path to the project to analyze.
output_graph: Whether to output the dependency graph.
prune: Whether to prune the dependency graph.
visualize: Whether to visualize the dependency graph.
Returns:
A ProjectAnalysis object containing the analysis results.
"""
project_path = Path(path).resolve()
log.info(f"Analyzing project at {project_path}...")
async def _build_analysis(
project_root: Path,
) -> Tuple[ProjectAnalysis, DependencyGraph, Dict[str, Any], List[Any]]:
scanner = ProjectScanner(str(project_root), use_cache=False)
analysis = await scanner.scan()
# Get all component files in the project.
component_files = list(project_path.glob("**/*.js")) + list(project_path.glob("**/*.jsx"))
react = ReactAnalyzer(str(project_root))
style = StyleAnalyzer(str(project_root))
graph = DependencyGraph(str(project_root))
quick_wins_finder = QuickWinFinder(str(project_root))
# For each component file, get its AST.
for file_path in component_files:
if file_path.is_file():
# Call the external node.js parser
result = subprocess.run(
["node", str(parser_script_path), file_path],
capture_output=True,
text=True,
check=True,
)
# The AST is now in result.stdout as a JSON string.
ast = json.loads(result.stdout)
# TODO: Do something with the AST.
components_task = react.analyze()
style_task = style.analyze()
graph_task = graph.build()
quick_wins_task = quick_wins_finder.find_all()
# TODO: Populate the ProjectAnalysis object with the analysis results.
analysis = ProjectAnalysis(
project_name=project_path.name,
project_path=str(project_path),
total_files=len(component_files),
components={},
components, style_result, _graph_dict, quick_wins = await asyncio.gather(
components_task, style_task, graph_task, quick_wins_task
)
log.info(f"Analysis complete for {project_path.name}.")
analysis.components = components
analysis.component_count = len(components)
analysis.token_candidates = style_result.get("token_candidates", []) # type: ignore[assignment]
analysis.stats["token_candidates"] = len(analysis.token_candidates)
analysis.quick_wins = quick_wins
analysis.stats["quick_wins_count"] = len(quick_wins)
return analysis, graph, style_result, quick_wins
def analyze_project(path: str) -> ProjectAnalysis:
"""Synchronous wrapper around the async analyzers."""
project_root = Path(path).expanduser().resolve()
if not project_root.exists():
raise FileNotFoundError(f"Project path not found: {project_root}")
if not project_root.is_dir():
raise NotADirectoryError(f"Project path is not a directory: {project_root}")
analysis, _graph, _style_result, _quick_wins = asyncio.run(_build_analysis(project_root))
return analysis
def export_project_context(analysis: ProjectAnalysis, output_path: str):
def run_project_analysis(project_path: str, output_file: Optional[str] = None) -> Dict[str, Any]:
"""
Exports the project context to a JSON file.
Run full analysis and write a portable graph JSON file to `<project>/.dss/analysis_graph.json`.
Returns a JSON-serializable dict with both the graph and a summary analysis payload.
"""
log.info(f"Exporting project context to {output_path}...")
with open(output_path, "w") as f:
json.dump(analysis.dict(), f, indent=2)
log.info("Export complete.")
project_root = Path(project_path).expanduser().resolve()
if not project_root.exists():
raise FileNotFoundError(f"Project path not found: {project_root}")
if not project_root.is_dir():
raise NotADirectoryError(f"Project path is not a directory: {project_root}")
analysis, graph, style_result, quick_wins = asyncio.run(_build_analysis(project_root))
graph_dict = graph.to_dict()
insights = {
"orphans": graph.find_orphans(),
"hubs": graph.find_hubs(),
"cycles": graph.find_circular_dependencies(),
}
style_summary = {k: v for k, v in style_result.items() if k != "token_candidates"}
result: Dict[str, Any] = {
"project_path": str(project_root),
"generated_at": datetime.now().isoformat(),
# Keep a stable, graph-friendly top-level shape.
"nodes": graph_dict.get("nodes", []),
"edges": graph_dict.get("edges", []),
"links": graph_dict.get("edges", []), # legacy alias
"stats": graph_dict.get("stats", {}),
# Extended payloads.
"analysis": _safe_serialize(analysis),
"style_summary": _safe_serialize(style_summary),
"quick_wins": _safe_serialize(quick_wins),
"graph_insights": _safe_serialize(insights),
}
if output_file:
output_path = Path(output_file).expanduser()
else:
output_path = project_root / ".dss" / "analysis_graph.json"
output_path.parent.mkdir(parents=True, exist_ok=True)
output_path.write_text(json.dumps(result, indent=2), encoding="utf-8")
return result
def get_ast(file_path: str) -> Dict:
def export_project_context(project_path: str) -> Dict[str, Any]:
"""
Gets the AST of a file using a node.js parser.
Export a lightweight, AI-friendly project context as a JSON-serializable dict.
This intentionally avoids embedding full source files.
"""
log.info(f"Getting AST for {file_path}...")
result = subprocess.run(
["node", str(parser_script_path), file_path],
capture_output=True,
text=True,
check=True,
)
log.info("AST retrieved.")
return json.loads(result.stdout)
project_root = Path(project_path).expanduser().resolve()
if not project_root.exists():
raise FileNotFoundError(f"Project path not found: {project_root}")
if not project_root.is_dir():
raise NotADirectoryError(f"Project path is not a directory: {project_root}")
analysis, graph, style_result, quick_wins = asyncio.run(_build_analysis(project_root))
def main():
"""
Main function for the project analyzer.
"""
import argparse
graph_dict = graph.to_dict()
hubs = graph.find_hubs()
cycles = graph.find_circular_dependencies()
orphans = graph.find_orphans()
parser = argparse.ArgumentParser(description="Analyze a project.")
parser.add_argument("path", help="The path to the project to analyze.")
parser.add_argument("--output-graph", action="store_true", help="Output the dependency graph.")
parser.add_argument("--prune", action="store_true", help="Prune the dependency graph.")
parser.add_argument("--visualize", action="store_true", help="Visualize the dependency graph.")
parser.add_argument("--export-context", help="Export the project context to a JSON file.")
args = parser.parse_args()
# Keep this small enough for prompt injection.
components_preview = [
{
"name": c.name,
"path": c.path,
"type": c.type,
"has_styles": c.has_styles,
"props": c.props[:10],
}
for c in analysis.components[:50]
]
analysis = analyze_project(args.path, args.output_graph, args.prune, args.visualize)
token_candidates = style_result.get("token_candidates", [])
token_candidates_preview = [_safe_serialize(c) for c in token_candidates[:25]]
if args.export_context:
export_project_context(analysis, args.export_context)
quick_wins_preview = [_safe_serialize(w) for w in quick_wins[:25]]
if __name__ == "__main__":
main()
return {
"project_path": str(project_root),
"generated_at": datetime.now().isoformat(),
"framework": analysis.framework.value,
"framework_version": analysis.framework_version,
"primary_styling": analysis.primary_styling.value if analysis.primary_styling else None,
"stats": _safe_serialize(analysis.stats),
"components": components_preview,
"style_summary": _safe_serialize({k: v for k, v in style_result.items() if k != "token_candidates"}),
"token_candidates": token_candidates_preview,
"quick_wins": quick_wins_preview,
"dependency_graph": {
"stats": graph_dict.get("stats", {}),
"orphans": orphans[:50],
"hubs": hubs[:25],
"cycles": cycles[:10],
},
}