feat: Enterprise DSS architecture implementation
Some checks failed
DSS Project Analysis / dss-context-update (push) Has been cancelled
Some checks failed
DSS Project Analysis / dss-context-update (push) Has been cancelled
Complete implementation of enterprise design system validation: Phase 1 - @dss/rules npm package: - CLI with validate and init commands - 16 rules across 5 categories (colors, spacing, typography, components, a11y) - dss-ignore support (inline and next-line) - Break-glass [dss-skip] for emergency merges - CI workflow templates (Gitea, GitHub, GitLab) Phase 2 - Metrics dashboard: - FastAPI metrics API with SQLite storage - Portfolio-wide metrics aggregation - Project drill-down with file:line:column violations - Trend charts and history tracking Phase 3 - Local analysis cache: - LocalAnalysisCache for offline-capable validation - Mode detection (LOCAL/REMOTE/CI) - Stale cache warnings with recommendations Phase 4 - Project onboarding: - dss-init command for project setup - Creates ds.config.json, .dss/ folder structure - Updates .gitignore and package.json scripts - Optional CI workflow setup Architecture decisions: - No commit-back: CI uploads to dashboard, not git - Three-tier: Dashboard (read-only) → CI (authoritative) → Local (advisory) - Pull-based rules via npm for version control 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
424
admin-ui/src/components/PortfolioDashboard.tsx
Normal file
424
admin-ui/src/components/PortfolioDashboard.tsx
Normal file
@@ -0,0 +1,424 @@
|
|||||||
|
import { JSX } from 'preact';
|
||||||
|
import { useState, useEffect } from 'preact/hooks';
|
||||||
|
import { Card, CardHeader, CardContent } from './base/Card';
|
||||||
|
import { Badge } from './base/Badge';
|
||||||
|
import { Button } from './base/Button';
|
||||||
|
import { Spinner } from './base/Spinner';
|
||||||
|
|
||||||
|
interface ProjectMetrics {
|
||||||
|
project: string;
|
||||||
|
total_files: number;
|
||||||
|
passed_files: number;
|
||||||
|
failed_files: number;
|
||||||
|
total_errors: number;
|
||||||
|
total_warnings: number;
|
||||||
|
rules_version: string;
|
||||||
|
last_updated: string;
|
||||||
|
adoption_score: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface PortfolioData {
|
||||||
|
total_projects: number;
|
||||||
|
projects_passing: number;
|
||||||
|
projects_failing: number;
|
||||||
|
total_errors: number;
|
||||||
|
total_warnings: number;
|
||||||
|
average_adoption_score: number;
|
||||||
|
projects: ProjectMetrics[];
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ViolationLocation {
|
||||||
|
file: string;
|
||||||
|
rule: string;
|
||||||
|
line: number;
|
||||||
|
column: number;
|
||||||
|
severity: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface TrendDataPoint {
|
||||||
|
date: string;
|
||||||
|
errors: number;
|
||||||
|
warnings: number;
|
||||||
|
pass_rate: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
const API_BASE = '/api/metrics';
|
||||||
|
|
||||||
|
export function PortfolioDashboard(): JSX.Element {
|
||||||
|
const [loading, setLoading] = useState(true);
|
||||||
|
const [portfolio, setPortfolio] = useState<PortfolioData | null>(null);
|
||||||
|
const [selectedProject, setSelectedProject] = useState<string | null>(null);
|
||||||
|
const [projectDetails, setProjectDetails] = useState<any>(null);
|
||||||
|
const [trends, setTrends] = useState<TrendDataPoint[]>([]);
|
||||||
|
const [error, setError] = useState<string | null>(null);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
loadPortfolio();
|
||||||
|
loadTrends();
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
async function loadPortfolio() {
|
||||||
|
setLoading(true);
|
||||||
|
setError(null);
|
||||||
|
try {
|
||||||
|
const response = await fetch(`${API_BASE}/portfolio?days=30`);
|
||||||
|
if (!response.ok) throw new Error('Failed to load portfolio data');
|
||||||
|
const data = await response.json();
|
||||||
|
setPortfolio(data);
|
||||||
|
} catch (err) {
|
||||||
|
setError(err instanceof Error ? err.message : 'Unknown error');
|
||||||
|
} finally {
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function loadTrends() {
|
||||||
|
try {
|
||||||
|
const response = await fetch(`${API_BASE}/trends?days=30`);
|
||||||
|
if (!response.ok) return;
|
||||||
|
const data = await response.json();
|
||||||
|
setTrends(data.data || []);
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to load trends:', err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function loadProjectDetails(projectName: string) {
|
||||||
|
setSelectedProject(projectName);
|
||||||
|
try {
|
||||||
|
const response = await fetch(`${API_BASE}/projects/${encodeURIComponent(projectName)}`);
|
||||||
|
if (!response.ok) throw new Error('Failed to load project details');
|
||||||
|
const data = await response.json();
|
||||||
|
setProjectDetails(data);
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to load project details:', err);
|
||||||
|
setProjectDetails(null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (loading) {
|
||||||
|
return (
|
||||||
|
<div className="portfolio-dashboard portfolio-loading">
|
||||||
|
<Spinner size="lg" />
|
||||||
|
<span>Loading portfolio metrics...</span>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
return (
|
||||||
|
<div className="portfolio-dashboard portfolio-error">
|
||||||
|
<Card variant="bordered" padding="lg">
|
||||||
|
<CardContent>
|
||||||
|
<div className="error-message">
|
||||||
|
<Badge variant="error">Error</Badge>
|
||||||
|
<p>{error}</p>
|
||||||
|
<Button variant="outline" onClick={loadPortfolio}>Retry</Button>
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!portfolio) {
|
||||||
|
return (
|
||||||
|
<div className="portfolio-dashboard">
|
||||||
|
<Card variant="bordered" padding="lg">
|
||||||
|
<CardContent>
|
||||||
|
<p className="text-muted">No portfolio data available. Run DSS validation in your CI pipelines to collect metrics.</p>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="portfolio-dashboard">
|
||||||
|
<div className="portfolio-header">
|
||||||
|
<h1>Design System Portfolio</h1>
|
||||||
|
<p className="subtitle">Adoption metrics across {portfolio.total_projects} projects</p>
|
||||||
|
<Button variant="ghost" size="sm" onClick={loadPortfolio}>Refresh</Button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Portfolio Summary */}
|
||||||
|
<div className="metrics-grid portfolio-metrics">
|
||||||
|
<MetricCard
|
||||||
|
label="Total Projects"
|
||||||
|
value={portfolio.total_projects}
|
||||||
|
/>
|
||||||
|
<MetricCard
|
||||||
|
label="Passing"
|
||||||
|
value={portfolio.projects_passing}
|
||||||
|
variant="success"
|
||||||
|
/>
|
||||||
|
<MetricCard
|
||||||
|
label="Failing"
|
||||||
|
value={portfolio.projects_failing}
|
||||||
|
variant={portfolio.projects_failing > 0 ? 'error' : 'default'}
|
||||||
|
/>
|
||||||
|
<MetricCard
|
||||||
|
label="Adoption Score"
|
||||||
|
value={`${portfolio.average_adoption_score}%`}
|
||||||
|
variant={portfolio.average_adoption_score >= 80 ? 'success' : portfolio.average_adoption_score >= 60 ? 'warning' : 'error'}
|
||||||
|
/>
|
||||||
|
<MetricCard
|
||||||
|
label="Total Errors"
|
||||||
|
value={portfolio.total_errors}
|
||||||
|
variant={portfolio.total_errors > 0 ? 'error' : 'success'}
|
||||||
|
/>
|
||||||
|
<MetricCard
|
||||||
|
label="Total Warnings"
|
||||||
|
value={portfolio.total_warnings}
|
||||||
|
variant={portfolio.total_warnings > 0 ? 'warning' : 'default'}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Trend Chart (simple text-based for now) */}
|
||||||
|
{trends.length > 0 && (
|
||||||
|
<Card variant="bordered" padding="md">
|
||||||
|
<CardHeader title="30-Day Trend" subtitle="Errors and warnings over time" />
|
||||||
|
<CardContent>
|
||||||
|
<div className="trend-chart">
|
||||||
|
{trends.slice(-7).map((point, idx) => (
|
||||||
|
<div key={idx} className="trend-bar">
|
||||||
|
<div className="trend-date">{point.date.slice(5)}</div>
|
||||||
|
<div className="trend-values">
|
||||||
|
<span className="trend-errors" title={`${point.errors} errors`}>
|
||||||
|
{point.errors > 0 && '●'.repeat(Math.min(point.errors, 10))}
|
||||||
|
</span>
|
||||||
|
<span className="trend-warnings" title={`${point.warnings} warnings`}>
|
||||||
|
{point.warnings > 0 && '○'.repeat(Math.min(Math.ceil(point.warnings / 10), 10))}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<div className="trend-rate">{point.pass_rate}%</div>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Projects Table */}
|
||||||
|
<Card variant="bordered" padding="md">
|
||||||
|
<CardHeader
|
||||||
|
title="Projects"
|
||||||
|
subtitle="Click a project to see violation details"
|
||||||
|
/>
|
||||||
|
<CardContent>
|
||||||
|
<div className="projects-table">
|
||||||
|
<div className="table-header">
|
||||||
|
<span className="col-project">Project</span>
|
||||||
|
<span className="col-score">Score</span>
|
||||||
|
<span className="col-files">Files</span>
|
||||||
|
<span className="col-errors">Errors</span>
|
||||||
|
<span className="col-warnings">Warnings</span>
|
||||||
|
<span className="col-version">Rules</span>
|
||||||
|
<span className="col-updated">Updated</span>
|
||||||
|
</div>
|
||||||
|
{portfolio.projects.map(project => (
|
||||||
|
<div
|
||||||
|
key={project.project}
|
||||||
|
className={`table-row ${selectedProject === project.project ? 'selected' : ''}`}
|
||||||
|
onClick={() => loadProjectDetails(project.project)}
|
||||||
|
>
|
||||||
|
<span className="col-project">{formatProjectName(project.project)}</span>
|
||||||
|
<span className="col-score">
|
||||||
|
<Badge
|
||||||
|
variant={project.adoption_score >= 80 ? 'success' : project.adoption_score >= 60 ? 'warning' : 'error'}
|
||||||
|
size="sm"
|
||||||
|
>
|
||||||
|
{project.adoption_score.toFixed(0)}%
|
||||||
|
</Badge>
|
||||||
|
</span>
|
||||||
|
<span className="col-files">{project.passed_files}/{project.total_files}</span>
|
||||||
|
<span className="col-errors">
|
||||||
|
{project.total_errors > 0 ? (
|
||||||
|
<Badge variant="error" size="sm">{project.total_errors}</Badge>
|
||||||
|
) : (
|
||||||
|
<span className="text-success">0</span>
|
||||||
|
)}
|
||||||
|
</span>
|
||||||
|
<span className="col-warnings">
|
||||||
|
{project.total_warnings > 0 ? (
|
||||||
|
<Badge variant="warning" size="sm">{project.total_warnings}</Badge>
|
||||||
|
) : (
|
||||||
|
<span className="text-muted">0</span>
|
||||||
|
)}
|
||||||
|
</span>
|
||||||
|
<span className="col-version">{project.rules_version}</span>
|
||||||
|
<span className="col-updated">{formatTimeAgo(project.last_updated)}</span>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* Project Details Panel */}
|
||||||
|
{selectedProject && projectDetails && (
|
||||||
|
<ProjectDetailsPanel
|
||||||
|
project={selectedProject}
|
||||||
|
details={projectDetails}
|
||||||
|
onClose={() => setSelectedProject(null)}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
interface MetricCardProps {
|
||||||
|
label: string;
|
||||||
|
value: string | number;
|
||||||
|
variant?: 'default' | 'success' | 'warning' | 'error';
|
||||||
|
}
|
||||||
|
|
||||||
|
function MetricCard({ label, value, variant = 'default' }: MetricCardProps): JSX.Element {
|
||||||
|
return (
|
||||||
|
<Card variant="bordered" padding="md">
|
||||||
|
<div className={`metric-display metric-${variant}`}>
|
||||||
|
<span className="metric-label">{label}</span>
|
||||||
|
<span className="metric-value">{value}</span>
|
||||||
|
</div>
|
||||||
|
</Card>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ProjectDetailsPanelProps {
|
||||||
|
project: string;
|
||||||
|
details: any;
|
||||||
|
onClose: () => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
function ProjectDetailsPanel({ project, details, onClose }: ProjectDetailsPanelProps): JSX.Element {
|
||||||
|
const [activeTab, setActiveTab] = useState<'overview' | 'violations' | 'history'>('overview');
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Card variant="elevated" padding="lg" className="project-details-panel">
|
||||||
|
<CardHeader
|
||||||
|
title={formatProjectName(project)}
|
||||||
|
subtitle="Detailed metrics and violations"
|
||||||
|
action={<Button variant="ghost" size="sm" onClick={onClose}>×</Button>}
|
||||||
|
/>
|
||||||
|
<CardContent>
|
||||||
|
{/* Tabs */}
|
||||||
|
<div className="tabs">
|
||||||
|
<button
|
||||||
|
className={`tab ${activeTab === 'overview' ? 'active' : ''}`}
|
||||||
|
onClick={() => setActiveTab('overview')}
|
||||||
|
>
|
||||||
|
Overview
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
className={`tab ${activeTab === 'violations' ? 'active' : ''}`}
|
||||||
|
onClick={() => setActiveTab('violations')}
|
||||||
|
>
|
||||||
|
Violations
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
className={`tab ${activeTab === 'history' ? 'active' : ''}`}
|
||||||
|
onClick={() => setActiveTab('history')}
|
||||||
|
>
|
||||||
|
History
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Tab Content */}
|
||||||
|
{activeTab === 'overview' && details.latest && (
|
||||||
|
<div className="tab-content">
|
||||||
|
<div className="detail-grid">
|
||||||
|
<div className="detail-item">
|
||||||
|
<span className="detail-label">Branch</span>
|
||||||
|
<span className="detail-value">{details.latest.branch}</span>
|
||||||
|
</div>
|
||||||
|
<div className="detail-item">
|
||||||
|
<span className="detail-label">Commit</span>
|
||||||
|
<span className="detail-value">{details.latest.commit?.slice(0, 7)}</span>
|
||||||
|
</div>
|
||||||
|
<div className="detail-item">
|
||||||
|
<span className="detail-label">Rules Version</span>
|
||||||
|
<span className="detail-value">{details.latest.rules_version}</span>
|
||||||
|
</div>
|
||||||
|
<div className="detail-item">
|
||||||
|
<span className="detail-label">Adoption Score</span>
|
||||||
|
<span className="detail-value">{details.latest.adoption_score?.toFixed(1)}%</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Violations by Rule */}
|
||||||
|
{details.violations_by_rule && Object.keys(details.violations_by_rule).length > 0 && (
|
||||||
|
<div className="violations-breakdown">
|
||||||
|
<h4>Violations by Rule</h4>
|
||||||
|
{Object.entries(details.violations_by_rule).map(([rule, count]) => (
|
||||||
|
<div key={rule} className="rule-row">
|
||||||
|
<span className="rule-name">{rule}</span>
|
||||||
|
<Badge variant="error" size="sm">{count as number}</Badge>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{activeTab === 'violations' && (
|
||||||
|
<div className="tab-content violations-list">
|
||||||
|
{details.violation_locations?.length === 0 ? (
|
||||||
|
<p className="text-muted">No violations found</p>
|
||||||
|
) : (
|
||||||
|
details.violation_locations?.map((v: ViolationLocation, idx: number) => (
|
||||||
|
<div key={idx} className="violation-item">
|
||||||
|
<div className="violation-location">
|
||||||
|
<code>{v.file}:{v.line}:{v.column}</code>
|
||||||
|
</div>
|
||||||
|
<div className="violation-rule">
|
||||||
|
<Badge variant={v.severity === 'error' ? 'error' : 'warning'} size="sm">
|
||||||
|
{v.rule}
|
||||||
|
</Badge>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
))
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{activeTab === 'history' && (
|
||||||
|
<div className="tab-content history-list">
|
||||||
|
{details.history?.map((h: any, idx: number) => (
|
||||||
|
<div key={idx} className="history-item">
|
||||||
|
<span className="history-commit">{h.commit?.slice(0, 7)}</span>
|
||||||
|
<span className="history-branch">{h.branch}</span>
|
||||||
|
<span className="history-errors">
|
||||||
|
{h.errors > 0 ? <Badge variant="error" size="sm">{h.errors}</Badge> : '0'}
|
||||||
|
</span>
|
||||||
|
<span className="history-time">{formatTimeAgo(h.timestamp)}</span>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Utility functions
|
||||||
|
function formatProjectName(name: string): string {
|
||||||
|
// Format "org/repo" or just "repo"
|
||||||
|
const parts = name.split('/');
|
||||||
|
return parts[parts.length - 1];
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatTimeAgo(timestamp: string): string {
|
||||||
|
if (!timestamp) return 'Unknown';
|
||||||
|
const date = new Date(timestamp);
|
||||||
|
const now = new Date();
|
||||||
|
const diffMs = now.getTime() - date.getTime();
|
||||||
|
const diffMins = Math.floor(diffMs / 60000);
|
||||||
|
|
||||||
|
if (diffMins < 1) return 'Just now';
|
||||||
|
if (diffMins < 60) return `${diffMins}m ago`;
|
||||||
|
const diffHours = Math.floor(diffMins / 60);
|
||||||
|
if (diffHours < 24) return `${diffHours}h ago`;
|
||||||
|
const diffDays = Math.floor(diffHours / 24);
|
||||||
|
return `${diffDays}d ago`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export default PortfolioDashboard;
|
||||||
360
admin-ui/src/styles/portfolio.css
Normal file
360
admin-ui/src/styles/portfolio.css
Normal file
@@ -0,0 +1,360 @@
|
|||||||
|
/* Portfolio Dashboard Styles */
|
||||||
|
|
||||||
|
.portfolio-dashboard {
|
||||||
|
padding: var(--spacing-lg);
|
||||||
|
max-width: 1400px;
|
||||||
|
margin: 0 auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.portfolio-header {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: var(--spacing-md);
|
||||||
|
margin-bottom: var(--spacing-xl);
|
||||||
|
}
|
||||||
|
|
||||||
|
.portfolio-header h1 {
|
||||||
|
font-size: var(--font-size-2xl);
|
||||||
|
font-weight: 600;
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.portfolio-header .subtitle {
|
||||||
|
color: var(--color-text-muted);
|
||||||
|
margin: 0;
|
||||||
|
flex: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.portfolio-loading,
|
||||||
|
.portfolio-error {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
min-height: 400px;
|
||||||
|
gap: var(--spacing-md);
|
||||||
|
}
|
||||||
|
|
||||||
|
.portfolio-metrics {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: repeat(auto-fit, minmax(150px, 1fr));
|
||||||
|
gap: var(--spacing-md);
|
||||||
|
margin-bottom: var(--spacing-xl);
|
||||||
|
}
|
||||||
|
|
||||||
|
.metric-display {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.metric-label {
|
||||||
|
font-size: var(--font-size-sm);
|
||||||
|
color: var(--color-text-muted);
|
||||||
|
margin-bottom: var(--spacing-xs);
|
||||||
|
}
|
||||||
|
|
||||||
|
.metric-value {
|
||||||
|
font-size: var(--font-size-2xl);
|
||||||
|
font-weight: 700;
|
||||||
|
}
|
||||||
|
|
||||||
|
.metric-success .metric-value {
|
||||||
|
color: var(--color-success);
|
||||||
|
}
|
||||||
|
|
||||||
|
.metric-warning .metric-value {
|
||||||
|
color: var(--color-warning);
|
||||||
|
}
|
||||||
|
|
||||||
|
.metric-error .metric-value {
|
||||||
|
color: var(--color-error);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Trend Chart */
|
||||||
|
.trend-chart {
|
||||||
|
display: flex;
|
||||||
|
gap: var(--spacing-sm);
|
||||||
|
overflow-x: auto;
|
||||||
|
padding: var(--spacing-sm) 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.trend-bar {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
min-width: 80px;
|
||||||
|
padding: var(--spacing-xs);
|
||||||
|
border-radius: var(--radius-sm);
|
||||||
|
background: var(--color-bg-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.trend-date {
|
||||||
|
font-size: var(--font-size-xs);
|
||||||
|
color: var(--color-text-muted);
|
||||||
|
margin-bottom: var(--spacing-xs);
|
||||||
|
}
|
||||||
|
|
||||||
|
.trend-values {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
min-height: 40px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.trend-errors {
|
||||||
|
color: var(--color-error);
|
||||||
|
font-size: 10px;
|
||||||
|
letter-spacing: -2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.trend-warnings {
|
||||||
|
color: var(--color-warning);
|
||||||
|
font-size: 8px;
|
||||||
|
letter-spacing: -2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.trend-rate {
|
||||||
|
font-size: var(--font-size-xs);
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--color-text);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Projects Table */
|
||||||
|
.projects-table {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.table-header,
|
||||||
|
.table-row {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: 2fr 80px 80px 80px 80px 80px 100px;
|
||||||
|
gap: var(--spacing-sm);
|
||||||
|
padding: var(--spacing-sm) var(--spacing-md);
|
||||||
|
align-items: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.table-header {
|
||||||
|
font-size: var(--font-size-xs);
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--color-text-muted);
|
||||||
|
text-transform: uppercase;
|
||||||
|
border-bottom: 1px solid var(--color-border);
|
||||||
|
}
|
||||||
|
|
||||||
|
.table-row {
|
||||||
|
border-radius: var(--radius-sm);
|
||||||
|
cursor: pointer;
|
||||||
|
transition: background-color 0.15s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.table-row:hover {
|
||||||
|
background: var(--color-bg-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.table-row.selected {
|
||||||
|
background: var(--color-bg-tertiary);
|
||||||
|
border-left: 3px solid var(--color-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.col-project {
|
||||||
|
font-weight: 500;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
.col-score,
|
||||||
|
.col-files,
|
||||||
|
.col-errors,
|
||||||
|
.col-warnings,
|
||||||
|
.col-version,
|
||||||
|
.col-updated {
|
||||||
|
text-align: center;
|
||||||
|
font-size: var(--font-size-sm);
|
||||||
|
}
|
||||||
|
|
||||||
|
.col-updated {
|
||||||
|
color: var(--color-text-muted);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Project Details Panel */
|
||||||
|
.project-details-panel {
|
||||||
|
margin-top: var(--spacing-lg);
|
||||||
|
animation: slideUp 0.2s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes slideUp {
|
||||||
|
from {
|
||||||
|
opacity: 0;
|
||||||
|
transform: translateY(10px);
|
||||||
|
}
|
||||||
|
to {
|
||||||
|
opacity: 1;
|
||||||
|
transform: translateY(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.tabs {
|
||||||
|
display: flex;
|
||||||
|
gap: var(--spacing-xs);
|
||||||
|
border-bottom: 1px solid var(--color-border);
|
||||||
|
margin-bottom: var(--spacing-md);
|
||||||
|
}
|
||||||
|
|
||||||
|
.tab {
|
||||||
|
padding: var(--spacing-sm) var(--spacing-md);
|
||||||
|
border: none;
|
||||||
|
background: none;
|
||||||
|
cursor: pointer;
|
||||||
|
font-size: var(--font-size-sm);
|
||||||
|
color: var(--color-text-muted);
|
||||||
|
border-bottom: 2px solid transparent;
|
||||||
|
transition: all 0.15s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tab:hover {
|
||||||
|
color: var(--color-text);
|
||||||
|
}
|
||||||
|
|
||||||
|
.tab.active {
|
||||||
|
color: var(--color-primary);
|
||||||
|
border-bottom-color: var(--color-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.tab-content {
|
||||||
|
padding: var(--spacing-md) 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.detail-grid {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: repeat(auto-fit, minmax(150px, 1fr));
|
||||||
|
gap: var(--spacing-md);
|
||||||
|
margin-bottom: var(--spacing-lg);
|
||||||
|
}
|
||||||
|
|
||||||
|
.detail-item {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: var(--spacing-xs);
|
||||||
|
}
|
||||||
|
|
||||||
|
.detail-label {
|
||||||
|
font-size: var(--font-size-xs);
|
||||||
|
color: var(--color-text-muted);
|
||||||
|
text-transform: uppercase;
|
||||||
|
}
|
||||||
|
|
||||||
|
.detail-value {
|
||||||
|
font-weight: 500;
|
||||||
|
}
|
||||||
|
|
||||||
|
.violations-breakdown {
|
||||||
|
margin-top: var(--spacing-lg);
|
||||||
|
}
|
||||||
|
|
||||||
|
.violations-breakdown h4 {
|
||||||
|
font-size: var(--font-size-sm);
|
||||||
|
font-weight: 600;
|
||||||
|
margin-bottom: var(--spacing-sm);
|
||||||
|
}
|
||||||
|
|
||||||
|
.rule-row {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: center;
|
||||||
|
padding: var(--spacing-xs) 0;
|
||||||
|
border-bottom: 1px solid var(--color-border-light);
|
||||||
|
}
|
||||||
|
|
||||||
|
.rule-name {
|
||||||
|
font-family: var(--font-mono);
|
||||||
|
font-size: var(--font-size-sm);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Violations List */
|
||||||
|
.violations-list {
|
||||||
|
max-height: 400px;
|
||||||
|
overflow-y: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.violation-item {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: center;
|
||||||
|
padding: var(--spacing-sm);
|
||||||
|
border-bottom: 1px solid var(--color-border-light);
|
||||||
|
}
|
||||||
|
|
||||||
|
.violation-location code {
|
||||||
|
font-size: var(--font-size-sm);
|
||||||
|
color: var(--color-text);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* History List */
|
||||||
|
.history-list {
|
||||||
|
max-height: 300px;
|
||||||
|
overflow-y: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.history-item {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: 80px 100px 60px 1fr;
|
||||||
|
gap: var(--spacing-sm);
|
||||||
|
padding: var(--spacing-sm) 0;
|
||||||
|
border-bottom: 1px solid var(--color-border-light);
|
||||||
|
font-size: var(--font-size-sm);
|
||||||
|
}
|
||||||
|
|
||||||
|
.history-commit {
|
||||||
|
font-family: var(--font-mono);
|
||||||
|
}
|
||||||
|
|
||||||
|
.history-branch {
|
||||||
|
color: var(--color-text-muted);
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
}
|
||||||
|
|
||||||
|
.history-time {
|
||||||
|
text-align: right;
|
||||||
|
color: var(--color-text-muted);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Text utilities */
|
||||||
|
.text-success {
|
||||||
|
color: var(--color-success);
|
||||||
|
}
|
||||||
|
|
||||||
|
.text-warning {
|
||||||
|
color: var(--color-warning);
|
||||||
|
}
|
||||||
|
|
||||||
|
.text-error {
|
||||||
|
color: var(--color-error);
|
||||||
|
}
|
||||||
|
|
||||||
|
.text-muted {
|
||||||
|
color: var(--color-text-muted);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Responsive */
|
||||||
|
@media (max-width: 768px) {
|
||||||
|
.table-header,
|
||||||
|
.table-row {
|
||||||
|
grid-template-columns: 1fr 60px 60px 60px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.col-version,
|
||||||
|
.col-updated,
|
||||||
|
.col-warnings {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.portfolio-metrics {
|
||||||
|
grid-template-columns: repeat(2, 1fr);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -7,8 +7,10 @@ import { Input, Select } from '../components/base/Input';
|
|||||||
import { Spinner } from '../components/base/Spinner';
|
import { Spinner } from '../components/base/Spinner';
|
||||||
import { endpoints } from '../api/client';
|
import { endpoints } from '../api/client';
|
||||||
import { currentProject } from '../state/project';
|
import { currentProject } from '../state/project';
|
||||||
|
import { PortfolioDashboard } from '../components/PortfolioDashboard';
|
||||||
import type { TokenDrift, Component, FigmaExtractResult } from '../api/types';
|
import type { TokenDrift, Component, FigmaExtractResult } from '../api/types';
|
||||||
import './Workdesk.css';
|
import './Workdesk.css';
|
||||||
|
import '../styles/portfolio.css';
|
||||||
|
|
||||||
interface UIWorkdeskProps {
|
interface UIWorkdeskProps {
|
||||||
activeTool: string | null;
|
activeTool: string | null;
|
||||||
@@ -26,6 +28,7 @@ export default function UIWorkdesk({ activeTool }: UIWorkdeskProps) {
|
|||||||
'code-generator': <CodeGeneratorTool />,
|
'code-generator': <CodeGeneratorTool />,
|
||||||
'quick-wins': <QuickWinsTool />,
|
'quick-wins': <QuickWinsTool />,
|
||||||
'token-drift': <TokenDriftTool />,
|
'token-drift': <TokenDriftTool />,
|
||||||
|
'portfolio': <PortfolioDashboard />,
|
||||||
};
|
};
|
||||||
|
|
||||||
return toolViews[activeTool] || <ToolPlaceholder name={activeTool} />;
|
return toolViews[activeTool] || <ToolPlaceholder name={activeTool} />;
|
||||||
|
|||||||
663
apps/api/metrics.py
Normal file
663
apps/api/metrics.py
Normal file
@@ -0,0 +1,663 @@
|
|||||||
|
"""
|
||||||
|
DSS Metrics API Module.
|
||||||
|
|
||||||
|
Handles metrics collection from CI pipelines and provides dashboard data
|
||||||
|
for UI designers to view portfolio-wide design system adoption.
|
||||||
|
|
||||||
|
Enterprise Architecture:
|
||||||
|
- Tier 1 (Dashboard): Read-only aggregated metrics for UI designers
|
||||||
|
- Receives uploads from Tier 2 (CI/CD pipelines)
|
||||||
|
- No write operations from dashboard - only CI uploads
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sqlite3
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Header, HTTPException, Query
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
# Router for metrics endpoints
|
||||||
|
router = APIRouter(prefix="/api/metrics", tags=["metrics"])
|
||||||
|
|
||||||
|
# Database path
|
||||||
|
DB_PATH = Path(os.getenv("DSS_DB_PATH", Path.home() / ".dss" / "metrics.db"))
|
||||||
|
|
||||||
|
|
||||||
|
# === Pydantic Models ===
|
||||||
|
|
||||||
|
|
||||||
|
class ViolationLocation(BaseModel):
|
||||||
|
"""Location of a rule violation in source code."""
|
||||||
|
|
||||||
|
rule: str
|
||||||
|
line: int
|
||||||
|
column: Optional[int] = None
|
||||||
|
file: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class FileMetrics(BaseModel):
|
||||||
|
"""Metrics for a single file."""
|
||||||
|
|
||||||
|
file: str
|
||||||
|
errors: int
|
||||||
|
warnings: int
|
||||||
|
violations: List[ViolationLocation] = []
|
||||||
|
|
||||||
|
|
||||||
|
class MetricsUpload(BaseModel):
|
||||||
|
"""Metrics payload uploaded from CI."""
|
||||||
|
|
||||||
|
project: str
|
||||||
|
branch: str
|
||||||
|
commit: str
|
||||||
|
timestamp: Optional[str] = None
|
||||||
|
metrics: Dict[str, Any]
|
||||||
|
fileResults: Optional[List[FileMetrics]] = []
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectMetricsSummary(BaseModel):
|
||||||
|
"""Summary metrics for a project."""
|
||||||
|
|
||||||
|
project: str
|
||||||
|
total_files: int
|
||||||
|
passed_files: int
|
||||||
|
failed_files: int
|
||||||
|
total_errors: int
|
||||||
|
total_warnings: int
|
||||||
|
rules_version: str
|
||||||
|
last_updated: str
|
||||||
|
adoption_score: float
|
||||||
|
|
||||||
|
|
||||||
|
class PortfolioMetrics(BaseModel):
|
||||||
|
"""Portfolio-wide metrics aggregation."""
|
||||||
|
|
||||||
|
total_projects: int
|
||||||
|
projects_passing: int
|
||||||
|
projects_failing: int
|
||||||
|
total_errors: int
|
||||||
|
total_warnings: int
|
||||||
|
average_adoption_score: float
|
||||||
|
projects: List[ProjectMetricsSummary]
|
||||||
|
|
||||||
|
|
||||||
|
# === Database Setup ===
|
||||||
|
|
||||||
|
|
||||||
|
def init_db():
|
||||||
|
"""Initialize the metrics database."""
|
||||||
|
DB_PATH.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
conn = sqlite3.connect(str(DB_PATH))
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
# Metrics uploads table
|
||||||
|
cursor.execute(
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS metrics_uploads (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
project TEXT NOT NULL,
|
||||||
|
branch TEXT NOT NULL,
|
||||||
|
commit_sha TEXT NOT NULL,
|
||||||
|
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
total_files INTEGER DEFAULT 0,
|
||||||
|
passed_files INTEGER DEFAULT 0,
|
||||||
|
failed_files INTEGER DEFAULT 0,
|
||||||
|
total_errors INTEGER DEFAULT 0,
|
||||||
|
total_warnings INTEGER DEFAULT 0,
|
||||||
|
rules_version TEXT,
|
||||||
|
raw_data JSON
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Violations table for detailed tracking
|
||||||
|
cursor.execute(
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS violations (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
upload_id INTEGER NOT NULL,
|
||||||
|
project TEXT NOT NULL,
|
||||||
|
file_path TEXT NOT NULL,
|
||||||
|
rule TEXT NOT NULL,
|
||||||
|
line INTEGER,
|
||||||
|
column_num INTEGER,
|
||||||
|
severity TEXT,
|
||||||
|
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
FOREIGN KEY (upload_id) REFERENCES metrics_uploads(id)
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Component usage tracking for UI designers
|
||||||
|
cursor.execute(
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS component_usage (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
project TEXT NOT NULL,
|
||||||
|
component_name TEXT NOT NULL,
|
||||||
|
file_path TEXT NOT NULL,
|
||||||
|
line INTEGER,
|
||||||
|
import_source TEXT,
|
||||||
|
is_ds_component BOOLEAN DEFAULT 0,
|
||||||
|
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Indexes for performance
|
||||||
|
cursor.execute(
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_uploads_project ON metrics_uploads(project)"
|
||||||
|
)
|
||||||
|
cursor.execute(
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_uploads_timestamp ON metrics_uploads(timestamp)"
|
||||||
|
)
|
||||||
|
cursor.execute(
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_violations_project ON violations(project)"
|
||||||
|
)
|
||||||
|
cursor.execute(
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_component_project ON component_usage(project)"
|
||||||
|
)
|
||||||
|
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
|
||||||
|
# Initialize on module load
|
||||||
|
init_db()
|
||||||
|
|
||||||
|
|
||||||
|
# === Helper Functions ===
|
||||||
|
|
||||||
|
|
||||||
|
def get_db():
|
||||||
|
"""Get database connection."""
|
||||||
|
return sqlite3.connect(str(DB_PATH))
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_adoption_score(passed: int, total: int, errors: int) -> float:
|
||||||
|
"""Calculate adoption score (0-100)."""
|
||||||
|
if total == 0:
|
||||||
|
return 100.0
|
||||||
|
base_score = (passed / total) * 100
|
||||||
|
# Penalty for errors
|
||||||
|
penalty = min(errors * 2, 50)
|
||||||
|
return max(0, base_score - penalty)
|
||||||
|
|
||||||
|
|
||||||
|
# === API Endpoints ===
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/upload")
|
||||||
|
async def upload_metrics(
|
||||||
|
payload: MetricsUpload,
|
||||||
|
authorization: Optional[str] = Header(None),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Upload metrics from CI pipeline.
|
||||||
|
|
||||||
|
This is the only write endpoint - called by CI after validation runs.
|
||||||
|
Authentication via DSS_API_TOKEN in Authorization header.
|
||||||
|
"""
|
||||||
|
# Validate token (if configured)
|
||||||
|
expected_token = os.getenv("DSS_API_TOKEN")
|
||||||
|
if expected_token:
|
||||||
|
if not authorization:
|
||||||
|
raise HTTPException(status_code=401, detail="Authorization required")
|
||||||
|
token = authorization.replace("Bearer ", "")
|
||||||
|
if token != expected_token:
|
||||||
|
raise HTTPException(status_code=403, detail="Invalid token")
|
||||||
|
|
||||||
|
conn = get_db()
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Extract metrics
|
||||||
|
metrics = payload.metrics
|
||||||
|
total_files = metrics.get("totalFiles", 0)
|
||||||
|
passed_files = metrics.get("passedFiles", 0)
|
||||||
|
failed_files = metrics.get("failedFiles", 0)
|
||||||
|
total_errors = metrics.get("totalErrors", 0)
|
||||||
|
total_warnings = metrics.get("totalWarnings", 0)
|
||||||
|
rules_version = metrics.get("rulesVersion", "unknown")
|
||||||
|
|
||||||
|
# Insert main metrics record
|
||||||
|
cursor.execute(
|
||||||
|
"""
|
||||||
|
INSERT INTO metrics_uploads
|
||||||
|
(project, branch, commit_sha, total_files, passed_files, failed_files,
|
||||||
|
total_errors, total_warnings, rules_version, raw_data)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
|
""",
|
||||||
|
(
|
||||||
|
payload.project,
|
||||||
|
payload.branch,
|
||||||
|
payload.commit,
|
||||||
|
total_files,
|
||||||
|
passed_files,
|
||||||
|
failed_files,
|
||||||
|
total_errors,
|
||||||
|
total_warnings,
|
||||||
|
rules_version,
|
||||||
|
json.dumps(payload.model_dump()),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
upload_id = cursor.lastrowid
|
||||||
|
|
||||||
|
# Insert violations with file locations
|
||||||
|
if payload.fileResults:
|
||||||
|
for file_result in payload.fileResults:
|
||||||
|
for violation in file_result.violations:
|
||||||
|
cursor.execute(
|
||||||
|
"""
|
||||||
|
INSERT INTO violations
|
||||||
|
(upload_id, project, file_path, rule, line, column_num, severity)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||||
|
""",
|
||||||
|
(
|
||||||
|
upload_id,
|
||||||
|
payload.project,
|
||||||
|
file_result.file,
|
||||||
|
violation.rule,
|
||||||
|
violation.line,
|
||||||
|
violation.column,
|
||||||
|
"error" if "error" in violation.rule.lower() else "warning",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": "success",
|
||||||
|
"upload_id": upload_id,
|
||||||
|
"project": payload.project,
|
||||||
|
"metrics": {
|
||||||
|
"files": total_files,
|
||||||
|
"errors": total_errors,
|
||||||
|
"warnings": total_warnings,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
conn.rollback()
|
||||||
|
raise HTTPException(status_code=500, detail=f"Failed to store metrics: {str(e)}")
|
||||||
|
finally:
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/portfolio")
|
||||||
|
async def get_portfolio_metrics(
|
||||||
|
days: int = Query(default=30, description="Number of days to include"),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Get portfolio-wide metrics aggregation.
|
||||||
|
|
||||||
|
Returns summary for all projects - designed for UI designer dashboard.
|
||||||
|
"""
|
||||||
|
conn = get_db()
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Get latest metrics for each project
|
||||||
|
cursor.execute(
|
||||||
|
"""
|
||||||
|
SELECT
|
||||||
|
project,
|
||||||
|
total_files,
|
||||||
|
passed_files,
|
||||||
|
failed_files,
|
||||||
|
total_errors,
|
||||||
|
total_warnings,
|
||||||
|
rules_version,
|
||||||
|
MAX(timestamp) as last_updated
|
||||||
|
FROM metrics_uploads
|
||||||
|
WHERE timestamp > datetime('now', ?)
|
||||||
|
GROUP BY project
|
||||||
|
ORDER BY last_updated DESC
|
||||||
|
""",
|
||||||
|
(f"-{days} days",),
|
||||||
|
)
|
||||||
|
|
||||||
|
rows = cursor.fetchall()
|
||||||
|
|
||||||
|
projects = []
|
||||||
|
total_errors = 0
|
||||||
|
total_warnings = 0
|
||||||
|
projects_passing = 0
|
||||||
|
|
||||||
|
for row in rows:
|
||||||
|
(
|
||||||
|
project,
|
||||||
|
total_files,
|
||||||
|
passed_files,
|
||||||
|
failed_files,
|
||||||
|
errors,
|
||||||
|
warnings,
|
||||||
|
rules_version,
|
||||||
|
last_updated,
|
||||||
|
) = row
|
||||||
|
|
||||||
|
adoption_score = calculate_adoption_score(passed_files, total_files, errors)
|
||||||
|
|
||||||
|
projects.append(
|
||||||
|
ProjectMetricsSummary(
|
||||||
|
project=project,
|
||||||
|
total_files=total_files,
|
||||||
|
passed_files=passed_files,
|
||||||
|
failed_files=failed_files,
|
||||||
|
total_errors=errors,
|
||||||
|
total_warnings=warnings,
|
||||||
|
rules_version=rules_version or "unknown",
|
||||||
|
last_updated=last_updated,
|
||||||
|
adoption_score=adoption_score,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
total_errors += errors
|
||||||
|
total_warnings += warnings
|
||||||
|
if errors == 0:
|
||||||
|
projects_passing += 1
|
||||||
|
|
||||||
|
avg_score = (
|
||||||
|
sum(p.adoption_score for p in projects) / len(projects) if projects else 0
|
||||||
|
)
|
||||||
|
|
||||||
|
return PortfolioMetrics(
|
||||||
|
total_projects=len(projects),
|
||||||
|
projects_passing=projects_passing,
|
||||||
|
projects_failing=len(projects) - projects_passing,
|
||||||
|
total_errors=total_errors,
|
||||||
|
total_warnings=total_warnings,
|
||||||
|
average_adoption_score=round(avg_score, 1),
|
||||||
|
projects=projects,
|
||||||
|
)
|
||||||
|
|
||||||
|
finally:
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/projects/{project_name}")
|
||||||
|
async def get_project_metrics(
|
||||||
|
project_name: str,
|
||||||
|
limit: int = Query(default=10, description="Number of recent builds"),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Get detailed metrics for a specific project.
|
||||||
|
|
||||||
|
Includes historical data and violation breakdown.
|
||||||
|
"""
|
||||||
|
conn = get_db()
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Get recent builds
|
||||||
|
cursor.execute(
|
||||||
|
"""
|
||||||
|
SELECT
|
||||||
|
id, branch, commit_sha, timestamp,
|
||||||
|
total_files, passed_files, failed_files,
|
||||||
|
total_errors, total_warnings, rules_version
|
||||||
|
FROM metrics_uploads
|
||||||
|
WHERE project = ?
|
||||||
|
ORDER BY timestamp DESC
|
||||||
|
LIMIT ?
|
||||||
|
""",
|
||||||
|
(project_name, limit),
|
||||||
|
)
|
||||||
|
|
||||||
|
builds = cursor.fetchall()
|
||||||
|
|
||||||
|
if not builds:
|
||||||
|
raise HTTPException(status_code=404, detail="Project not found")
|
||||||
|
|
||||||
|
# Get violation breakdown for latest build
|
||||||
|
latest_id = builds[0][0]
|
||||||
|
cursor.execute(
|
||||||
|
"""
|
||||||
|
SELECT rule, COUNT(*) as count
|
||||||
|
FROM violations
|
||||||
|
WHERE upload_id = ?
|
||||||
|
GROUP BY rule
|
||||||
|
ORDER BY count DESC
|
||||||
|
""",
|
||||||
|
(latest_id,),
|
||||||
|
)
|
||||||
|
|
||||||
|
violations_by_rule = dict(cursor.fetchall())
|
||||||
|
|
||||||
|
# Get file locations for violations (for UI designer "where is this used?")
|
||||||
|
cursor.execute(
|
||||||
|
"""
|
||||||
|
SELECT file_path, rule, line, column_num
|
||||||
|
FROM violations
|
||||||
|
WHERE upload_id = ?
|
||||||
|
ORDER BY file_path, line
|
||||||
|
""",
|
||||||
|
(latest_id,),
|
||||||
|
)
|
||||||
|
|
||||||
|
violation_locations = [
|
||||||
|
{
|
||||||
|
"file": row[0],
|
||||||
|
"rule": row[1],
|
||||||
|
"line": row[2],
|
||||||
|
"column": row[3],
|
||||||
|
}
|
||||||
|
for row in cursor.fetchall()
|
||||||
|
]
|
||||||
|
|
||||||
|
return {
|
||||||
|
"project": project_name,
|
||||||
|
"latest": {
|
||||||
|
"branch": builds[0][1],
|
||||||
|
"commit": builds[0][2],
|
||||||
|
"timestamp": builds[0][3],
|
||||||
|
"total_files": builds[0][4],
|
||||||
|
"passed_files": builds[0][5],
|
||||||
|
"failed_files": builds[0][6],
|
||||||
|
"total_errors": builds[0][7],
|
||||||
|
"total_warnings": builds[0][8],
|
||||||
|
"rules_version": builds[0][9],
|
||||||
|
"adoption_score": calculate_adoption_score(
|
||||||
|
builds[0][5], builds[0][4], builds[0][7]
|
||||||
|
),
|
||||||
|
},
|
||||||
|
"violations_by_rule": violations_by_rule,
|
||||||
|
"violation_locations": violation_locations,
|
||||||
|
"history": [
|
||||||
|
{
|
||||||
|
"branch": b[1],
|
||||||
|
"commit": b[2],
|
||||||
|
"timestamp": b[3],
|
||||||
|
"errors": b[7],
|
||||||
|
"warnings": b[8],
|
||||||
|
}
|
||||||
|
for b in builds
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
finally:
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/projects/{project_name}/violations")
|
||||||
|
async def get_project_violations(
|
||||||
|
project_name: str,
|
||||||
|
rule: Optional[str] = Query(default=None, description="Filter by rule"),
|
||||||
|
file_pattern: Optional[str] = Query(default=None, description="Filter by file pattern"),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Get detailed violation locations for a project.
|
||||||
|
|
||||||
|
Designed for UI designers to answer "Where is Button component used?"
|
||||||
|
"""
|
||||||
|
conn = get_db()
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Get latest upload for project
|
||||||
|
cursor.execute(
|
||||||
|
"""
|
||||||
|
SELECT id FROM metrics_uploads
|
||||||
|
WHERE project = ?
|
||||||
|
ORDER BY timestamp DESC
|
||||||
|
LIMIT 1
|
||||||
|
""",
|
||||||
|
(project_name,),
|
||||||
|
)
|
||||||
|
|
||||||
|
row = cursor.fetchone()
|
||||||
|
if not row:
|
||||||
|
raise HTTPException(status_code=404, detail="Project not found")
|
||||||
|
|
||||||
|
upload_id = row[0]
|
||||||
|
|
||||||
|
# Build query with optional filters
|
||||||
|
query = """
|
||||||
|
SELECT file_path, rule, line, column_num, severity
|
||||||
|
FROM violations
|
||||||
|
WHERE upload_id = ?
|
||||||
|
"""
|
||||||
|
params = [upload_id]
|
||||||
|
|
||||||
|
if rule:
|
||||||
|
query += " AND rule LIKE ?"
|
||||||
|
params.append(f"%{rule}%")
|
||||||
|
|
||||||
|
if file_pattern:
|
||||||
|
query += " AND file_path LIKE ?"
|
||||||
|
params.append(f"%{file_pattern}%")
|
||||||
|
|
||||||
|
query += " ORDER BY file_path, line"
|
||||||
|
|
||||||
|
cursor.execute(query, params)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"project": project_name,
|
||||||
|
"violations": [
|
||||||
|
{
|
||||||
|
"file": row[0],
|
||||||
|
"rule": row[1],
|
||||||
|
"line": row[2],
|
||||||
|
"column": row[3],
|
||||||
|
"severity": row[4],
|
||||||
|
}
|
||||||
|
for row in cursor.fetchall()
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
finally:
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/trends")
|
||||||
|
async def get_trends(
|
||||||
|
project: Optional[str] = Query(default=None, description="Filter by project"),
|
||||||
|
days: int = Query(default=30, description="Number of days"),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Get trend data for charts.
|
||||||
|
|
||||||
|
Shows error/warning counts over time for portfolio or specific project.
|
||||||
|
"""
|
||||||
|
conn = get_db()
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
try:
|
||||||
|
if project:
|
||||||
|
cursor.execute(
|
||||||
|
"""
|
||||||
|
SELECT
|
||||||
|
DATE(timestamp) as date,
|
||||||
|
SUM(total_errors) as errors,
|
||||||
|
SUM(total_warnings) as warnings,
|
||||||
|
AVG(passed_files * 100.0 / NULLIF(total_files, 0)) as pass_rate
|
||||||
|
FROM metrics_uploads
|
||||||
|
WHERE project = ? AND timestamp > datetime('now', ?)
|
||||||
|
GROUP BY DATE(timestamp)
|
||||||
|
ORDER BY date
|
||||||
|
""",
|
||||||
|
(project, f"-{days} days"),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
cursor.execute(
|
||||||
|
"""
|
||||||
|
SELECT
|
||||||
|
DATE(timestamp) as date,
|
||||||
|
SUM(total_errors) as errors,
|
||||||
|
SUM(total_warnings) as warnings,
|
||||||
|
AVG(passed_files * 100.0 / NULLIF(total_files, 0)) as pass_rate
|
||||||
|
FROM metrics_uploads
|
||||||
|
WHERE timestamp > datetime('now', ?)
|
||||||
|
GROUP BY DATE(timestamp)
|
||||||
|
ORDER BY date
|
||||||
|
""",
|
||||||
|
(f"-{days} days",),
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"project": project or "portfolio",
|
||||||
|
"days": days,
|
||||||
|
"data": [
|
||||||
|
{
|
||||||
|
"date": row[0],
|
||||||
|
"errors": row[1] or 0,
|
||||||
|
"warnings": row[2] or 0,
|
||||||
|
"pass_rate": round(row[3] or 0, 1),
|
||||||
|
}
|
||||||
|
for row in cursor.fetchall()
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
finally:
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/rules/usage")
|
||||||
|
async def get_rules_usage(
|
||||||
|
days: int = Query(default=30, description="Number of days"),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Get rule violation statistics across all projects.
|
||||||
|
|
||||||
|
Shows which rules are violated most often - useful for identifying
|
||||||
|
common patterns and potential training needs.
|
||||||
|
"""
|
||||||
|
conn = get_db()
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
try:
|
||||||
|
cursor.execute(
|
||||||
|
"""
|
||||||
|
SELECT
|
||||||
|
rule,
|
||||||
|
COUNT(*) as total_violations,
|
||||||
|
COUNT(DISTINCT project) as affected_projects
|
||||||
|
FROM violations v
|
||||||
|
JOIN metrics_uploads m ON v.upload_id = m.id
|
||||||
|
WHERE m.timestamp > datetime('now', ?)
|
||||||
|
GROUP BY rule
|
||||||
|
ORDER BY total_violations DESC
|
||||||
|
""",
|
||||||
|
(f"-{days} days",),
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"days": days,
|
||||||
|
"rules": [
|
||||||
|
{
|
||||||
|
"rule": row[0],
|
||||||
|
"total_violations": row[1],
|
||||||
|
"affected_projects": row[2],
|
||||||
|
}
|
||||||
|
for row in cursor.fetchall()
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
finally:
|
||||||
|
conn.close()
|
||||||
@@ -32,6 +32,7 @@ from fastapi.staticfiles import StaticFiles
|
|||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
|
||||||
from apps.api.browser_logger import router as browser_log_router
|
from apps.api.browser_logger import router as browser_log_router
|
||||||
|
from apps.api.metrics import router as metrics_router
|
||||||
from dss import settings
|
from dss import settings
|
||||||
|
|
||||||
# Load environment variables from .env file FIRST (before any other imports)
|
# Load environment variables from .env file FIRST (before any other imports)
|
||||||
@@ -313,6 +314,9 @@ app.add_middleware(
|
|||||||
# Include browser logger router for console log forwarding
|
# Include browser logger router for console log forwarding
|
||||||
app.include_router(browser_log_router)
|
app.include_router(browser_log_router)
|
||||||
|
|
||||||
|
# Include metrics router for CI pipeline uploads and dashboard
|
||||||
|
app.include_router(metrics_router)
|
||||||
|
|
||||||
# Mount Admin UI static files
|
# Mount Admin UI static files
|
||||||
UI_DIR = Path(__file__).parent.parent.parent / "admin-ui"
|
UI_DIR = Path(__file__).parent.parent.parent / "admin-ui"
|
||||||
if UI_DIR.exists():
|
if UI_DIR.exists():
|
||||||
|
|||||||
@@ -2,11 +2,21 @@
|
|||||||
DSS Core Module - Configuration and Context Management.
|
DSS Core Module - Configuration and Context Management.
|
||||||
|
|
||||||
Extended with Context Compiler for design system context resolution.
|
Extended with Context Compiler for design system context resolution.
|
||||||
|
|
||||||
|
Enterprise Architecture:
|
||||||
|
- LOCAL mode: Uses LocalAnalysisCache for fast, offline-capable validation
|
||||||
|
- REMOTE mode: Full analysis via API
|
||||||
|
- CI mode: Authoritative enforcement, uploads metrics to dashboard
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from .compiler import EMERGENCY_SKIN, ContextCompiler
|
from .compiler import EMERGENCY_SKIN, ContextCompiler
|
||||||
from .config import DSSConfig, DSSMode
|
from .config import DSSConfig, DSSMode
|
||||||
from .context import DSSContext
|
from .context import DSSContext
|
||||||
|
from .local_cache import (
|
||||||
|
LocalAnalysisCache,
|
||||||
|
LocalCacheValidator,
|
||||||
|
get_project_cache,
|
||||||
|
)
|
||||||
from .mcp_extensions import (
|
from .mcp_extensions import (
|
||||||
COMPILER,
|
COMPILER,
|
||||||
get_active_context,
|
get_active_context,
|
||||||
@@ -23,6 +33,9 @@ __all__ = [
|
|||||||
"DSSContext",
|
"DSSContext",
|
||||||
"ContextCompiler",
|
"ContextCompiler",
|
||||||
"EMERGENCY_SKIN",
|
"EMERGENCY_SKIN",
|
||||||
|
"LocalAnalysisCache",
|
||||||
|
"LocalCacheValidator",
|
||||||
|
"get_project_cache",
|
||||||
"get_active_context",
|
"get_active_context",
|
||||||
"resolve_token",
|
"resolve_token",
|
||||||
"validate_manifest",
|
"validate_manifest",
|
||||||
|
|||||||
@@ -3,8 +3,14 @@ DSS Configuration Module
|
|||||||
========================
|
========================
|
||||||
|
|
||||||
Handles configuration management for the Design System Server (DSS) Claude Plugin.
|
Handles configuration management for the Design System Server (DSS) Claude Plugin.
|
||||||
Supports local/remote mode detection, persistent configuration storage, and
|
Supports local/remote/CI mode detection, persistent configuration storage, and
|
||||||
environment variable overrides.
|
environment variable overrides.
|
||||||
|
|
||||||
|
Enterprise Architecture:
|
||||||
|
- LOCAL: Developer workstation, reads from .dss/ cache, advisory validation
|
||||||
|
- REMOTE: Headless/server mode, full analysis, metrics upload
|
||||||
|
- CI: CI/CD pipeline, authoritative enforcement, blocking validation
|
||||||
|
- AUTO: Detect environment automatically (CI env vars -> CI, else LOCAL with cache)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import json
|
import json
|
||||||
@@ -13,6 +19,7 @@ import os
|
|||||||
import uuid
|
import uuid
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
from pydantic import BaseModel, Field, ValidationError
|
from pydantic import BaseModel, Field, ValidationError
|
||||||
@@ -24,14 +31,28 @@ CONFIG_DIR = Path.home() / ".dss"
|
|||||||
CONFIG_FILE = CONFIG_DIR / "config.json"
|
CONFIG_FILE = CONFIG_DIR / "config.json"
|
||||||
DEFAULT_REMOTE_URL = "https://dss.overbits.luz.uy"
|
DEFAULT_REMOTE_URL = "https://dss.overbits.luz.uy"
|
||||||
DEFAULT_LOCAL_URL = "http://localhost:6006"
|
DEFAULT_LOCAL_URL = "http://localhost:6006"
|
||||||
|
DEFAULT_DASHBOARD_URL = "https://dss.overbits.luz.uy/api/metrics"
|
||||||
|
|
||||||
|
# CI environment variables that indicate we're running in a pipeline
|
||||||
|
CI_ENV_VARS = [
|
||||||
|
"CI",
|
||||||
|
"GITEA_ACTIONS",
|
||||||
|
"GITHUB_ACTIONS",
|
||||||
|
"GITLAB_CI",
|
||||||
|
"JENKINS_URL",
|
||||||
|
"CIRCLECI",
|
||||||
|
"TRAVIS",
|
||||||
|
"BUILDKITE",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class DSSMode(str, Enum):
|
class DSSMode(str, Enum):
|
||||||
"""Operation modes for the DSS plugin."""
|
"""Operation modes for the DSS plugin."""
|
||||||
|
|
||||||
LOCAL = "local"
|
LOCAL = "local" # Developer workstation - advisory, uses cache
|
||||||
REMOTE = "remote"
|
REMOTE = "remote" # Headless server - full analysis
|
||||||
AUTO = "auto"
|
CI = "ci" # CI/CD pipeline - authoritative enforcement
|
||||||
|
AUTO = "auto" # Auto-detect based on environment
|
||||||
|
|
||||||
|
|
||||||
class DSSConfig(BaseModel):
|
class DSSConfig(BaseModel):
|
||||||
@@ -42,15 +63,21 @@ class DSSConfig(BaseModel):
|
|||||||
mode (DSSMode): The configured operation mode (default: AUTO).
|
mode (DSSMode): The configured operation mode (default: AUTO).
|
||||||
remote_url (str): URL for the remote DSS API.
|
remote_url (str): URL for the remote DSS API.
|
||||||
local_url (str): URL for the local DSS API (usually localhost).
|
local_url (str): URL for the local DSS API (usually localhost).
|
||||||
|
dashboard_url (str): URL for metrics dashboard API.
|
||||||
session_id (str): Unique identifier for this client instance.
|
session_id (str): Unique identifier for this client instance.
|
||||||
|
project_path (str): Current project path (for local analysis).
|
||||||
|
rules_version (str): Pinned @dss/rules version for this project.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
mode: DSSMode = Field(default=DSSMode.AUTO, description="Operation mode preference")
|
mode: DSSMode = Field(default=DSSMode.AUTO, description="Operation mode preference")
|
||||||
remote_url: str = Field(default=DEFAULT_REMOTE_URL, description="Remote API endpoint")
|
remote_url: str = Field(default=DEFAULT_REMOTE_URL, description="Remote API endpoint")
|
||||||
local_url: str = Field(default=DEFAULT_LOCAL_URL, description="Local API endpoint")
|
local_url: str = Field(default=DEFAULT_LOCAL_URL, description="Local API endpoint")
|
||||||
|
dashboard_url: str = Field(default=DEFAULT_DASHBOARD_URL, description="Metrics dashboard API")
|
||||||
session_id: str = Field(
|
session_id: str = Field(
|
||||||
default_factory=lambda: str(uuid.uuid4()), description="Persistent session ID"
|
default_factory=lambda: str(uuid.uuid4()), description="Persistent session ID"
|
||||||
)
|
)
|
||||||
|
project_path: Optional[str] = Field(default=None, description="Current project path")
|
||||||
|
rules_version: Optional[str] = Field(default=None, description="Pinned @dss/rules version")
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
validate_assignment = True
|
validate_assignment = True
|
||||||
@@ -101,38 +128,75 @@ class DSSConfig(BaseModel):
|
|||||||
Determine the actual runtime mode based on priority rules.
|
Determine the actual runtime mode based on priority rules.
|
||||||
|
|
||||||
Priority:
|
Priority:
|
||||||
1. DSS_MODE environment variable
|
1. DSS_MODE environment variable (explicit override)
|
||||||
2. Configured 'mode' (if not AUTO)
|
2. CI environment detection (GITEA_ACTIONS, CI, GITHUB_ACTIONS, etc.)
|
||||||
3. Auto-detection (ping local health endpoint)
|
3. Configured 'mode' (if not AUTO)
|
||||||
4. Fallback to REMOTE
|
4. Auto-detection (check for .dss/ folder, ping local health)
|
||||||
|
5. Fallback to LOCAL (developer-first)
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
DSSMode: The resolved active mode (LOCAL or REMOTE).
|
DSSMode: The resolved active mode (LOCAL, REMOTE, or CI).
|
||||||
"""
|
"""
|
||||||
# 1. Check Environment Variable
|
# 1. Check Environment Variable (explicit override)
|
||||||
env_mode = os.getenv("DSS_MODE")
|
env_mode = os.getenv("DSS_MODE")
|
||||||
if env_mode:
|
if env_mode:
|
||||||
try:
|
try:
|
||||||
# Normalize string to enum
|
resolved = DSSMode(env_mode.lower())
|
||||||
return DSSMode(env_mode.lower())
|
logger.info(f"Mode set via DSS_MODE env var: {resolved.value}")
|
||||||
|
return resolved
|
||||||
except ValueError:
|
except ValueError:
|
||||||
logger.warning(f"Invalid DSS_MODE env var '{env_mode}', ignoring.")
|
logger.warning(f"Invalid DSS_MODE env var '{env_mode}', ignoring.")
|
||||||
|
|
||||||
# 2. Check Configuration (if explicit)
|
# 2. Check CI environment variables
|
||||||
|
if self._is_ci_environment():
|
||||||
|
logger.info("CI environment detected. Using CI mode (authoritative enforcement).")
|
||||||
|
return DSSMode.CI
|
||||||
|
|
||||||
|
# 3. Check Configuration (if explicit, not AUTO)
|
||||||
if self.mode != DSSMode.AUTO:
|
if self.mode != DSSMode.AUTO:
|
||||||
|
logger.info(f"Using configured mode: {self.mode.value}")
|
||||||
return self.mode
|
return self.mode
|
||||||
|
|
||||||
# 3. Auto-detect
|
# 4. Auto-detect based on environment
|
||||||
logger.info("Auto-detecting DSS mode...")
|
logger.info("Auto-detecting DSS mode...")
|
||||||
is_local_healthy = await self._check_local_health()
|
|
||||||
|
|
||||||
if is_local_healthy:
|
# Check for local .dss/ folder (indicates project setup)
|
||||||
logger.info(f"Local server detected at {self.local_url}. Switching to LOCAL mode.")
|
if self._has_local_dss_folder():
|
||||||
|
logger.info("Found .dss/ folder. Using LOCAL mode with cache.")
|
||||||
return DSSMode.LOCAL
|
return DSSMode.LOCAL
|
||||||
else:
|
|
||||||
logger.info("Local server unreachable. Fallback to REMOTE mode.")
|
# Check if local server is running
|
||||||
# 4. Fallback
|
is_local_healthy = await self._check_local_health()
|
||||||
return DSSMode.REMOTE
|
if is_local_healthy:
|
||||||
|
logger.info(f"Local server detected at {self.local_url}. Using LOCAL mode.")
|
||||||
|
return DSSMode.LOCAL
|
||||||
|
|
||||||
|
# 5. Fallback to LOCAL (developer-first, will use stale cache if available)
|
||||||
|
logger.info("Fallback to LOCAL mode (offline-capable with cache).")
|
||||||
|
return DSSMode.LOCAL
|
||||||
|
|
||||||
|
def _is_ci_environment(self) -> bool:
|
||||||
|
"""Check if running in a CI/CD environment."""
|
||||||
|
for env_var in CI_ENV_VARS:
|
||||||
|
if os.getenv(env_var):
|
||||||
|
logger.debug(f"CI detected via {env_var} env var")
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _has_local_dss_folder(self) -> bool:
|
||||||
|
"""Check if current directory or project has .dss/ folder."""
|
||||||
|
# Check current working directory
|
||||||
|
cwd_dss = Path.cwd() / ".dss"
|
||||||
|
if cwd_dss.exists() and cwd_dss.is_dir():
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Check configured project path
|
||||||
|
if self.project_path:
|
||||||
|
project_dss = Path(self.project_path) / ".dss"
|
||||||
|
if project_dss.exists() and project_dss.is_dir():
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
async def _check_local_health(self) -> bool:
|
async def _check_local_health(self) -> bool:
|
||||||
"""
|
"""
|
||||||
@@ -161,3 +225,46 @@ class DSSConfig(BaseModel):
|
|||||||
if active_mode == DSSMode.LOCAL:
|
if active_mode == DSSMode.LOCAL:
|
||||||
return self.local_url
|
return self.local_url
|
||||||
return self.remote_url
|
return self.remote_url
|
||||||
|
|
||||||
|
def get_mode_behavior(self, active_mode: DSSMode) -> dict:
|
||||||
|
"""
|
||||||
|
Get behavior configuration for the active mode.
|
||||||
|
|
||||||
|
Returns dict with:
|
||||||
|
- blocking: Whether validation errors block operations
|
||||||
|
- upload_metrics: Whether to upload metrics to dashboard
|
||||||
|
- use_cache: Whether to use local .dss/ cache
|
||||||
|
- cache_ttl: Cache time-to-live in seconds
|
||||||
|
"""
|
||||||
|
behaviors = {
|
||||||
|
DSSMode.LOCAL: {
|
||||||
|
"blocking": False, # Advisory only
|
||||||
|
"upload_metrics": False,
|
||||||
|
"use_cache": True,
|
||||||
|
"cache_ttl": 3600, # 1 hour
|
||||||
|
"show_stale_warning": True,
|
||||||
|
},
|
||||||
|
DSSMode.REMOTE: {
|
||||||
|
"blocking": True,
|
||||||
|
"upload_metrics": True,
|
||||||
|
"use_cache": False,
|
||||||
|
"cache_ttl": 0,
|
||||||
|
"show_stale_warning": False,
|
||||||
|
},
|
||||||
|
DSSMode.CI: {
|
||||||
|
"blocking": True, # Authoritative enforcement
|
||||||
|
"upload_metrics": True,
|
||||||
|
"use_cache": False,
|
||||||
|
"cache_ttl": 0,
|
||||||
|
"show_stale_warning": False,
|
||||||
|
},
|
||||||
|
DSSMode.AUTO: {
|
||||||
|
# AUTO resolves to another mode, shouldn't reach here
|
||||||
|
"blocking": False,
|
||||||
|
"upload_metrics": False,
|
||||||
|
"use_cache": True,
|
||||||
|
"cache_ttl": 3600,
|
||||||
|
"show_stale_warning": True,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return behaviors.get(active_mode, behaviors[DSSMode.LOCAL])
|
||||||
|
|||||||
@@ -4,6 +4,11 @@ DSS Context Module
|
|||||||
|
|
||||||
Singleton context manager for the DSS Plugin.
|
Singleton context manager for the DSS Plugin.
|
||||||
Handles configuration loading, mode detection, and strategy instantiation.
|
Handles configuration loading, mode detection, and strategy instantiation.
|
||||||
|
|
||||||
|
Enterprise Architecture:
|
||||||
|
- LOCAL: Uses LocalAnalysisCache for fast, offline-capable validation
|
||||||
|
- REMOTE: Full analysis via API
|
||||||
|
- CI: Authoritative enforcement, uploads metrics to dashboard
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
@@ -11,6 +16,7 @@ import logging
|
|||||||
from typing import Any, Dict, Optional
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
from .config import DSSConfig, DSSMode
|
from .config import DSSConfig, DSSMode
|
||||||
|
from .local_cache import LocalAnalysisCache, LocalCacheValidator, get_project_cache
|
||||||
|
|
||||||
# Logger setup
|
# Logger setup
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@@ -44,6 +50,8 @@ class DSSContext:
|
|||||||
self._capabilities: Dict[str, bool] = {}
|
self._capabilities: Dict[str, bool] = {}
|
||||||
self._strategy_cache: Dict[str, Strategy] = {}
|
self._strategy_cache: Dict[str, Strategy] = {}
|
||||||
self.session_id: Optional[str] = None
|
self.session_id: Optional[str] = None
|
||||||
|
self._local_cache: Optional[LocalAnalysisCache] = None
|
||||||
|
self._cache_validator: Optional[LocalCacheValidator] = None
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def get_instance(cls) -> "DSSContext":
|
async def get_instance(cls) -> "DSSContext":
|
||||||
@@ -91,7 +99,11 @@ class DSSContext:
|
|||||||
f"DSSContext initialized. Mode: {self.active_mode.value}, Session: {self.session_id}"
|
f"DSSContext initialized. Mode: {self.active_mode.value}, Session: {self.session_id}"
|
||||||
)
|
)
|
||||||
|
|
||||||
# 3. Cache Capabilities
|
# 3. Initialize local cache for LOCAL mode
|
||||||
|
if self.active_mode == DSSMode.LOCAL:
|
||||||
|
self._init_local_cache()
|
||||||
|
|
||||||
|
# 4. Cache Capabilities
|
||||||
self._cache_capabilities()
|
self._cache_capabilities()
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -100,6 +112,27 @@ class DSSContext:
|
|||||||
self.active_mode = DSSMode.REMOTE
|
self.active_mode = DSSMode.REMOTE
|
||||||
self._capabilities = {"limited": True}
|
self._capabilities = {"limited": True}
|
||||||
|
|
||||||
|
def _init_local_cache(self) -> None:
|
||||||
|
"""Initialize local cache for LOCAL mode."""
|
||||||
|
try:
|
||||||
|
project_path = self.config.project_path if self.config else None
|
||||||
|
self._local_cache = get_project_cache(project_path)
|
||||||
|
self._cache_validator = LocalCacheValidator(self._local_cache)
|
||||||
|
|
||||||
|
# Log cache status
|
||||||
|
status = self._local_cache.get_cache_status()
|
||||||
|
if status.get("exists"):
|
||||||
|
if status.get("is_stale"):
|
||||||
|
logger.warning(f"Local cache is stale: {status.get('recommendation')}")
|
||||||
|
else:
|
||||||
|
logger.info(f"Local cache ready. Rules version: {status.get('rules_version')}")
|
||||||
|
else:
|
||||||
|
logger.info("No local cache found. Run `npx dss-rules validate` to populate.")
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Failed to initialize local cache: {e}")
|
||||||
|
self._local_cache = None
|
||||||
|
self._cache_validator = None
|
||||||
|
|
||||||
def _cache_capabilities(self) -> None:
|
def _cache_capabilities(self) -> None:
|
||||||
"""Determines what the plugin can do based on the active mode."""
|
"""Determines what the plugin can do based on the active mode."""
|
||||||
# Base capabilities
|
# Base capabilities
|
||||||
@@ -192,3 +225,88 @@ class DSSContext:
|
|||||||
# Cache and return
|
# Cache and return
|
||||||
self._strategy_cache[strategy_type] = strategy_instance
|
self._strategy_cache[strategy_type] = strategy_instance
|
||||||
return strategy_instance
|
return strategy_instance
|
||||||
|
|
||||||
|
# === Local Cache Access Methods ===
|
||||||
|
|
||||||
|
def get_local_cache(self) -> Optional[LocalAnalysisCache]:
|
||||||
|
"""
|
||||||
|
Get the local analysis cache instance.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
LocalAnalysisCache instance or None if not in LOCAL mode.
|
||||||
|
"""
|
||||||
|
return self._local_cache
|
||||||
|
|
||||||
|
def get_cache_validator(self) -> Optional[LocalCacheValidator]:
|
||||||
|
"""
|
||||||
|
Get the local cache validator instance.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
LocalCacheValidator instance or None if not in LOCAL mode.
|
||||||
|
"""
|
||||||
|
return self._cache_validator
|
||||||
|
|
||||||
|
def get_cache_status(self) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Get current cache status.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Cache status dict with freshness info and recommendations.
|
||||||
|
"""
|
||||||
|
if self._local_cache is None:
|
||||||
|
return {
|
||||||
|
"available": False,
|
||||||
|
"mode": self.active_mode.value,
|
||||||
|
"message": f"Local cache not available in {self.active_mode.value} mode"
|
||||||
|
}
|
||||||
|
|
||||||
|
status = self._local_cache.get_cache_status()
|
||||||
|
status["available"] = True
|
||||||
|
status["mode"] = self.active_mode.value
|
||||||
|
return status
|
||||||
|
|
||||||
|
def validate_file_local(self, file_path: str) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Validate a file using local cache (LOCAL mode only).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_path: Path to file to validate.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Validation result dict.
|
||||||
|
"""
|
||||||
|
if self._cache_validator is None:
|
||||||
|
return {
|
||||||
|
"file": file_path,
|
||||||
|
"error": "Local cache not available",
|
||||||
|
"mode": self.active_mode.value
|
||||||
|
}
|
||||||
|
|
||||||
|
return self._cache_validator.validate_file(file_path)
|
||||||
|
|
||||||
|
def get_validation_summary(self) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Get summary of validation state from local cache.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Summary dict with counts and status.
|
||||||
|
"""
|
||||||
|
if self._cache_validator is None:
|
||||||
|
return {
|
||||||
|
"error": "Local cache not available",
|
||||||
|
"mode": self.active_mode.value
|
||||||
|
}
|
||||||
|
|
||||||
|
return self._cache_validator.get_summary()
|
||||||
|
|
||||||
|
def get_mode_behavior(self) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Get behavior configuration for current mode.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with blocking, upload_metrics, use_cache flags.
|
||||||
|
"""
|
||||||
|
if self.config is None:
|
||||||
|
return {"blocking": False, "upload_metrics": False, "use_cache": False}
|
||||||
|
|
||||||
|
return self.config.get_mode_behavior(self.active_mode)
|
||||||
|
|||||||
402
dss-claude-plugin/core/local_cache.py
Normal file
402
dss-claude-plugin/core/local_cache.py
Normal file
@@ -0,0 +1,402 @@
|
|||||||
|
"""
|
||||||
|
DSS Local Analysis Cache Module.
|
||||||
|
|
||||||
|
Handles reading and writing to the local .dss/ folder for developer workstation mode.
|
||||||
|
Provides offline-capable validation using cached analysis results.
|
||||||
|
|
||||||
|
Enterprise Architecture:
|
||||||
|
- LOCAL mode reads from .dss/cache/ for fast, offline-capable feedback
|
||||||
|
- Cache is populated by `dss-rules validate` or periodic sync
|
||||||
|
- Stale cache shows warnings but doesn't block (advisory mode)
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Cache file names within .dss/
|
||||||
|
ANALYSIS_CACHE_FILE = "analysis_cache.json"
|
||||||
|
RULES_CACHE_FILE = "rules_cache.json"
|
||||||
|
VIOLATIONS_CACHE_FILE = "violations_cache.json"
|
||||||
|
METADATA_FILE = "metadata.json"
|
||||||
|
|
||||||
|
# Default cache TTL in seconds (1 hour)
|
||||||
|
DEFAULT_CACHE_TTL = 3600
|
||||||
|
|
||||||
|
# Stale cache threshold (24 hours - show warning but still use)
|
||||||
|
STALE_THRESHOLD = 86400
|
||||||
|
|
||||||
|
|
||||||
|
class LocalAnalysisCache:
|
||||||
|
"""
|
||||||
|
Manages local .dss/ folder cache for developer workstations.
|
||||||
|
|
||||||
|
Provides:
|
||||||
|
- Fast, offline-capable validation results
|
||||||
|
- Cached rule definitions from @dss/rules
|
||||||
|
- Violation history for incremental feedback
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, project_path: Optional[str] = None):
|
||||||
|
"""
|
||||||
|
Initialize cache with project path.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_path: Path to project root. Defaults to current directory.
|
||||||
|
"""
|
||||||
|
self.project_path = Path(project_path) if project_path else Path.cwd()
|
||||||
|
self.dss_dir = self.project_path / ".dss"
|
||||||
|
self.cache_dir = self.dss_dir / "cache"
|
||||||
|
self._ensure_structure()
|
||||||
|
|
||||||
|
def _ensure_structure(self) -> None:
|
||||||
|
"""Ensure .dss/ folder structure exists."""
|
||||||
|
try:
|
||||||
|
self.dss_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Create .gitignore if it doesn't exist
|
||||||
|
gitignore_path = self.dss_dir / ".gitignore"
|
||||||
|
if not gitignore_path.exists():
|
||||||
|
gitignore_path.write_text("# DSS local cache - do not commit\n*\n!.gitignore\n")
|
||||||
|
logger.debug(f"Created .gitignore in {self.dss_dir}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Failed to create .dss/ structure: {e}")
|
||||||
|
|
||||||
|
def get_cache_status(self) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Get current cache status including freshness.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with cache status, age, and recommendation.
|
||||||
|
"""
|
||||||
|
metadata = self._read_metadata()
|
||||||
|
|
||||||
|
if not metadata:
|
||||||
|
return {
|
||||||
|
"exists": False,
|
||||||
|
"age_seconds": None,
|
||||||
|
"is_fresh": False,
|
||||||
|
"is_stale": True,
|
||||||
|
"recommendation": "Run `npx dss-rules validate` to populate cache"
|
||||||
|
}
|
||||||
|
|
||||||
|
last_updated = metadata.get("last_updated")
|
||||||
|
if not last_updated:
|
||||||
|
return {
|
||||||
|
"exists": True,
|
||||||
|
"age_seconds": None,
|
||||||
|
"is_fresh": False,
|
||||||
|
"is_stale": True,
|
||||||
|
"recommendation": "Cache missing timestamp, run validation"
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
last_dt = datetime.fromisoformat(last_updated.replace("Z", "+00:00"))
|
||||||
|
now = datetime.now(timezone.utc)
|
||||||
|
age_seconds = (now - last_dt).total_seconds()
|
||||||
|
|
||||||
|
is_fresh = age_seconds < DEFAULT_CACHE_TTL
|
||||||
|
is_stale = age_seconds > STALE_THRESHOLD
|
||||||
|
|
||||||
|
if is_fresh:
|
||||||
|
recommendation = "Cache is fresh"
|
||||||
|
elif is_stale:
|
||||||
|
recommendation = f"Cache is {int(age_seconds / 3600)}h old. Run `npx dss-rules validate` to refresh"
|
||||||
|
else:
|
||||||
|
recommendation = "Cache is usable but consider refreshing"
|
||||||
|
|
||||||
|
return {
|
||||||
|
"exists": True,
|
||||||
|
"age_seconds": int(age_seconds),
|
||||||
|
"is_fresh": is_fresh,
|
||||||
|
"is_stale": is_stale,
|
||||||
|
"last_updated": last_updated,
|
||||||
|
"rules_version": metadata.get("rules_version"),
|
||||||
|
"recommendation": recommendation
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Failed to parse cache timestamp: {e}")
|
||||||
|
return {
|
||||||
|
"exists": True,
|
||||||
|
"age_seconds": None,
|
||||||
|
"is_fresh": False,
|
||||||
|
"is_stale": True,
|
||||||
|
"recommendation": "Cache timestamp invalid, run validation"
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_analysis_results(self) -> Optional[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Get cached analysis results.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Analysis results dict or None if not cached.
|
||||||
|
"""
|
||||||
|
return self._read_cache_file(ANALYSIS_CACHE_FILE)
|
||||||
|
|
||||||
|
def get_violations(self, file_path: Optional[str] = None) -> List[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Get cached violations, optionally filtered by file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_path: Optional file path to filter violations.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of violation dicts.
|
||||||
|
"""
|
||||||
|
violations = self._read_cache_file(VIOLATIONS_CACHE_FILE)
|
||||||
|
if not violations:
|
||||||
|
return []
|
||||||
|
|
||||||
|
violation_list = violations.get("violations", [])
|
||||||
|
|
||||||
|
if file_path:
|
||||||
|
# Normalize path for comparison
|
||||||
|
norm_path = str(Path(file_path).resolve())
|
||||||
|
return [v for v in violation_list if v.get("file", "").endswith(file_path) or norm_path in v.get("file", "")]
|
||||||
|
|
||||||
|
return violation_list
|
||||||
|
|
||||||
|
def get_rules(self) -> Optional[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Get cached rule definitions.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Rules dict or None if not cached.
|
||||||
|
"""
|
||||||
|
return self._read_cache_file(RULES_CACHE_FILE)
|
||||||
|
|
||||||
|
def save_analysis_results(self, results: Dict[str, Any]) -> bool:
|
||||||
|
"""
|
||||||
|
Save analysis results to cache.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
results: Analysis results from validation.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if saved successfully.
|
||||||
|
"""
|
||||||
|
success = self._write_cache_file(ANALYSIS_CACHE_FILE, results)
|
||||||
|
if success:
|
||||||
|
self._update_metadata({"last_analysis": datetime.now(timezone.utc).isoformat()})
|
||||||
|
return success
|
||||||
|
|
||||||
|
def save_violations(self, violations: List[Dict[str, Any]], metadata: Optional[Dict[str, Any]] = None) -> bool:
|
||||||
|
"""
|
||||||
|
Save violations to cache.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
violations: List of violation dicts.
|
||||||
|
metadata: Optional metadata (rules_version, commit, etc.)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if saved successfully.
|
||||||
|
"""
|
||||||
|
data = {
|
||||||
|
"violations": violations,
|
||||||
|
"count": len(violations),
|
||||||
|
"saved_at": datetime.now(timezone.utc).isoformat(),
|
||||||
|
**(metadata or {})
|
||||||
|
}
|
||||||
|
success = self._write_cache_file(VIOLATIONS_CACHE_FILE, data)
|
||||||
|
if success:
|
||||||
|
self._update_metadata({
|
||||||
|
"last_updated": datetime.now(timezone.utc).isoformat(),
|
||||||
|
"violation_count": len(violations),
|
||||||
|
"rules_version": metadata.get("rules_version") if metadata else None
|
||||||
|
})
|
||||||
|
return success
|
||||||
|
|
||||||
|
def save_rules(self, rules: Dict[str, Any], version: str) -> bool:
|
||||||
|
"""
|
||||||
|
Save rule definitions to cache.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
rules: Rule definitions dict.
|
||||||
|
version: Rules package version.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if saved successfully.
|
||||||
|
"""
|
||||||
|
data = {
|
||||||
|
"rules": rules,
|
||||||
|
"version": version,
|
||||||
|
"cached_at": datetime.now(timezone.utc).isoformat()
|
||||||
|
}
|
||||||
|
success = self._write_cache_file(RULES_CACHE_FILE, data)
|
||||||
|
if success:
|
||||||
|
self._update_metadata({"rules_version": version})
|
||||||
|
return success
|
||||||
|
|
||||||
|
def clear_cache(self) -> bool:
|
||||||
|
"""
|
||||||
|
Clear all cached data.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if cleared successfully.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
for file in [ANALYSIS_CACHE_FILE, VIOLATIONS_CACHE_FILE, RULES_CACHE_FILE]:
|
||||||
|
cache_file = self.cache_dir / file
|
||||||
|
if cache_file.exists():
|
||||||
|
cache_file.unlink()
|
||||||
|
|
||||||
|
# Reset metadata
|
||||||
|
self._write_metadata({"cleared_at": datetime.now(timezone.utc).isoformat()})
|
||||||
|
logger.info("Cache cleared")
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to clear cache: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _read_cache_file(self, filename: str) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Read a cache file."""
|
||||||
|
cache_file = self.cache_dir / filename
|
||||||
|
if not cache_file.exists():
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
return json.loads(cache_file.read_text(encoding="utf-8"))
|
||||||
|
except (json.JSONDecodeError, Exception) as e:
|
||||||
|
logger.warning(f"Failed to read cache file {filename}: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _write_cache_file(self, filename: str, data: Dict[str, Any]) -> bool:
|
||||||
|
"""Write a cache file."""
|
||||||
|
cache_file = self.cache_dir / filename
|
||||||
|
try:
|
||||||
|
cache_file.write_text(json.dumps(data, indent=2), encoding="utf-8")
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to write cache file {filename}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _read_metadata(self) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Read metadata file."""
|
||||||
|
metadata_file = self.dss_dir / METADATA_FILE
|
||||||
|
if not metadata_file.exists():
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
return json.loads(metadata_file.read_text(encoding="utf-8"))
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _write_metadata(self, data: Dict[str, Any]) -> bool:
|
||||||
|
"""Write metadata file."""
|
||||||
|
metadata_file = self.dss_dir / METADATA_FILE
|
||||||
|
try:
|
||||||
|
metadata_file.write_text(json.dumps(data, indent=2), encoding="utf-8")
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to write metadata: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _update_metadata(self, updates: Dict[str, Any]) -> bool:
|
||||||
|
"""Update metadata file with new values."""
|
||||||
|
existing = self._read_metadata() or {}
|
||||||
|
existing.update(updates)
|
||||||
|
return self._write_metadata(existing)
|
||||||
|
|
||||||
|
|
||||||
|
class LocalCacheValidator:
|
||||||
|
"""
|
||||||
|
Validator that uses local cache for offline-capable feedback.
|
||||||
|
|
||||||
|
Used in LOCAL mode to provide fast, advisory validation without
|
||||||
|
requiring network access to the dashboard.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, cache: LocalAnalysisCache):
|
||||||
|
"""
|
||||||
|
Initialize validator with cache.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
cache: LocalAnalysisCache instance.
|
||||||
|
"""
|
||||||
|
self.cache = cache
|
||||||
|
|
||||||
|
def validate_file(self, file_path: str) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Validate a single file using cached violations.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_path: Path to file to validate.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Validation result dict.
|
||||||
|
"""
|
||||||
|
cache_status = self.cache.get_cache_status()
|
||||||
|
violations = self.cache.get_violations(file_path)
|
||||||
|
|
||||||
|
result = {
|
||||||
|
"file": file_path,
|
||||||
|
"violations": violations,
|
||||||
|
"error_count": len([v for v in violations if v.get("severity") == "error"]),
|
||||||
|
"warning_count": len([v for v in violations if v.get("severity") == "warning"]),
|
||||||
|
"cache_status": cache_status,
|
||||||
|
"mode": "local_cache"
|
||||||
|
}
|
||||||
|
|
||||||
|
if cache_status.get("is_stale"):
|
||||||
|
result["warning"] = cache_status["recommendation"]
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def get_file_status(self, file_path: str) -> str:
|
||||||
|
"""
|
||||||
|
Get simple status for a file.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
'pass', 'fail', or 'unknown'.
|
||||||
|
"""
|
||||||
|
violations = self.cache.get_violations(file_path)
|
||||||
|
errors = [v for v in violations if v.get("severity") == "error"]
|
||||||
|
|
||||||
|
if not violations:
|
||||||
|
# No cached data for this file
|
||||||
|
return "unknown"
|
||||||
|
|
||||||
|
return "fail" if errors else "pass"
|
||||||
|
|
||||||
|
def get_summary(self) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Get summary of cached validation state.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Summary dict with counts and status.
|
||||||
|
"""
|
||||||
|
cache_status = self.cache.get_cache_status()
|
||||||
|
analysis = self.cache.get_analysis_results()
|
||||||
|
all_violations = self.cache.get_violations()
|
||||||
|
|
||||||
|
errors = [v for v in all_violations if v.get("severity") == "error"]
|
||||||
|
warnings = [v for v in all_violations if v.get("severity") == "warning"]
|
||||||
|
|
||||||
|
return {
|
||||||
|
"cache_status": cache_status,
|
||||||
|
"total_violations": len(all_violations),
|
||||||
|
"error_count": len(errors),
|
||||||
|
"warning_count": len(warnings),
|
||||||
|
"rules_version": cache_status.get("rules_version"),
|
||||||
|
"last_updated": cache_status.get("last_updated"),
|
||||||
|
"analysis": analysis
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_project_cache(project_path: Optional[str] = None) -> LocalAnalysisCache:
|
||||||
|
"""
|
||||||
|
Factory function to get cache for a project.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_path: Path to project root.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
LocalAnalysisCache instance.
|
||||||
|
"""
|
||||||
|
return LocalAnalysisCache(project_path)
|
||||||
@@ -1 +1 @@
|
|||||||
1765446683593
|
1765455715015
|
||||||
361
packages/dss-rules/bin/cli.js
Normal file
361
packages/dss-rules/bin/cli.js
Normal file
@@ -0,0 +1,361 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
/**
|
||||||
|
* DSS Rules CLI
|
||||||
|
*
|
||||||
|
* Command-line tool for validating files against DSS design system rules.
|
||||||
|
* Used by CI pipelines, pre-commit hooks, and local development.
|
||||||
|
*/
|
||||||
|
|
||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
const { glob } = require('glob');
|
||||||
|
const rules = require('../lib/index');
|
||||||
|
|
||||||
|
// ANSI colors
|
||||||
|
const c = {
|
||||||
|
red: '\x1b[31m',
|
||||||
|
yellow: '\x1b[33m',
|
||||||
|
green: '\x1b[32m',
|
||||||
|
blue: '\x1b[34m',
|
||||||
|
cyan: '\x1b[36m',
|
||||||
|
dim: '\x1b[2m',
|
||||||
|
reset: '\x1b[0m',
|
||||||
|
bold: '\x1b[1m'
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse command line arguments
|
||||||
|
*/
|
||||||
|
function parseArgs(args) {
|
||||||
|
const options = {
|
||||||
|
files: [],
|
||||||
|
json: false,
|
||||||
|
baseline: null,
|
||||||
|
strict: false,
|
||||||
|
quiet: false,
|
||||||
|
help: false,
|
||||||
|
selfTest: false,
|
||||||
|
version: false,
|
||||||
|
ciMode: false,
|
||||||
|
fetchBaseline: null
|
||||||
|
};
|
||||||
|
|
||||||
|
for (let i = 0; i < args.length; i++) {
|
||||||
|
const arg = args[i];
|
||||||
|
if (arg === '--json') options.json = true;
|
||||||
|
else if (arg === '--strict') options.strict = true;
|
||||||
|
else if (arg === '--quiet' || arg === '-q') options.quiet = true;
|
||||||
|
else if (arg === '--help' || arg === '-h') options.help = true;
|
||||||
|
else if (arg === '--self-test') options.selfTest = true;
|
||||||
|
else if (arg === '--version' || arg === '-v') options.version = true;
|
||||||
|
else if (arg === '--ci') options.ciMode = true;
|
||||||
|
else if (arg === '--baseline' && args[i + 1]) {
|
||||||
|
options.baseline = args[++i];
|
||||||
|
}
|
||||||
|
else if (arg === '--fetch-baseline' && args[i + 1]) {
|
||||||
|
options.fetchBaseline = args[++i];
|
||||||
|
}
|
||||||
|
else if (!arg.startsWith('-')) {
|
||||||
|
options.files.push(arg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return options;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Print help message
|
||||||
|
*/
|
||||||
|
function printHelp() {
|
||||||
|
console.log(`
|
||||||
|
${c.bold}@dss/rules${c.reset} - Design System Rules Validator
|
||||||
|
|
||||||
|
${c.bold}Usage:${c.reset}
|
||||||
|
dss-rules <command> [options]
|
||||||
|
|
||||||
|
${c.bold}Commands:${c.reset}
|
||||||
|
init Initialize DSS in a new project
|
||||||
|
validate Validate files against design system rules (default)
|
||||||
|
|
||||||
|
${c.bold}Validate Options:${c.reset}
|
||||||
|
-h, --help Show this help message
|
||||||
|
-v, --version Show version
|
||||||
|
--json Output results as JSON
|
||||||
|
--quiet, -q Minimal output (errors only)
|
||||||
|
--strict Treat warnings as errors
|
||||||
|
--ci CI mode (auto-detects baseline, version drift warnings)
|
||||||
|
--baseline <file> Compare against baseline JSON to show only new violations
|
||||||
|
--fetch-baseline <url> Fetch baseline from dashboard API
|
||||||
|
--self-test Verify rules package installation
|
||||||
|
|
||||||
|
${c.bold}Init Options:${c.reset}
|
||||||
|
--force, -f Overwrite existing configuration
|
||||||
|
--ci <platform> Set up CI workflow (gitea, github, gitlab)
|
||||||
|
--yes, -y Skip interactive prompts, use defaults
|
||||||
|
|
||||||
|
${c.bold}Examples:${c.reset}
|
||||||
|
dss-rules init # Initialize new project
|
||||||
|
dss-rules init --ci github # Initialize with GitHub Actions
|
||||||
|
dss-rules validate src/**/*.tsx # Validate files
|
||||||
|
dss-rules --ci --strict src/ # CI mode validation
|
||||||
|
dss-rules --json src/ > report.json # JSON output
|
||||||
|
|
||||||
|
${c.bold}Ignore Comments:${c.reset}
|
||||||
|
// dss-ignore Ignore current line
|
||||||
|
// dss-ignore-next-line Ignore next line
|
||||||
|
/* dss-ignore */ Block ignore (CSS)
|
||||||
|
|
||||||
|
${c.bold}Skip CI Validation:${c.reset}
|
||||||
|
git commit -m "fix: hotfix [dss-skip]"
|
||||||
|
|
||||||
|
${c.bold}Exit Codes:${c.reset}
|
||||||
|
0 All checks passed
|
||||||
|
1 Validation errors found
|
||||||
|
2 Configuration error
|
||||||
|
`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Print version info
|
||||||
|
*/
|
||||||
|
function printVersion() {
|
||||||
|
console.log(`@dss/rules v${rules.getVersion()}`);
|
||||||
|
const config = rules.getCIConfig();
|
||||||
|
console.log(` ${config.blockingRules.length} blocking rules`);
|
||||||
|
console.log(` ${config.advisoryRules.length} advisory rules`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Run self-test
|
||||||
|
*/
|
||||||
|
function selfTest() {
|
||||||
|
console.log(`${c.bold}Running @dss/rules self-test...${c.reset}\n`);
|
||||||
|
|
||||||
|
const allRules = rules.loadRules();
|
||||||
|
let passed = true;
|
||||||
|
let totalRules = 0;
|
||||||
|
|
||||||
|
for (const [category, ruleSet] of Object.entries(allRules)) {
|
||||||
|
if (!ruleSet) {
|
||||||
|
console.log(`${c.red}✗${c.reset} ${category}: Failed to load`);
|
||||||
|
passed = false;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const count = ruleSet.rules?.length || 0;
|
||||||
|
totalRules += count;
|
||||||
|
console.log(`${c.green}✓${c.reset} ${category}: ${count} rules (v${ruleSet.version})`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const config = rules.getCIConfig();
|
||||||
|
console.log(`\n${c.bold}Summary:${c.reset}`);
|
||||||
|
console.log(` Package version: ${config.version}`);
|
||||||
|
console.log(` Total rules: ${totalRules}`);
|
||||||
|
console.log(` Blocking (error): ${config.blockingRules.length}`);
|
||||||
|
console.log(` Advisory (warning): ${config.advisoryRules.length}`);
|
||||||
|
|
||||||
|
if (passed) {
|
||||||
|
console.log(`\n${c.green}${c.bold}Self-test passed!${c.reset}`);
|
||||||
|
process.exit(0);
|
||||||
|
} else {
|
||||||
|
console.log(`\n${c.red}${c.bold}Self-test failed!${c.reset}`);
|
||||||
|
process.exit(2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Expand glob patterns to file list
|
||||||
|
*/
|
||||||
|
async function expandGlobs(patterns) {
|
||||||
|
const files = [];
|
||||||
|
for (const pattern of patterns) {
|
||||||
|
if (pattern.includes('*')) {
|
||||||
|
const matches = await glob(pattern, { nodir: true });
|
||||||
|
files.push(...matches);
|
||||||
|
} else if (fs.existsSync(pattern)) {
|
||||||
|
const stat = fs.statSync(pattern);
|
||||||
|
if (stat.isDirectory()) {
|
||||||
|
const dirFiles = await glob(`${pattern}/**/*.{js,jsx,ts,tsx,css,scss,vue,svelte}`, { nodir: true });
|
||||||
|
files.push(...dirFiles);
|
||||||
|
} else {
|
||||||
|
files.push(pattern);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return [...new Set(files)];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Print validation results
|
||||||
|
*/
|
||||||
|
function printResults(results, options) {
|
||||||
|
if (!options.quiet) {
|
||||||
|
console.log(`\n${c.bold}=== DSS Rules Validation ===${c.reset}`);
|
||||||
|
console.log(`${c.dim}Rules version: ${results.rulesVersion}${c.reset}\n`);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const fileResult of results.fileResults) {
|
||||||
|
if (fileResult.errors.length === 0 && fileResult.warnings.length === 0) {
|
||||||
|
if (!options.quiet) {
|
||||||
|
console.log(`${c.green}✓${c.reset} ${fileResult.file}`);
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const icon = fileResult.passed ? c.yellow + '⚠' : c.red + '✗';
|
||||||
|
console.log(`${icon}${c.reset} ${fileResult.file}`);
|
||||||
|
|
||||||
|
for (const error of fileResult.errors) {
|
||||||
|
console.log(` ${c.red}ERROR${c.reset} [${error.rule}] ${error.line}:${error.column}`);
|
||||||
|
console.log(` ${error.message}`);
|
||||||
|
console.log(` ${c.dim}Found: ${c.yellow}${error.match}${c.reset}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const warning of fileResult.warnings) {
|
||||||
|
if (!options.quiet) {
|
||||||
|
console.log(` ${c.yellow}WARN${c.reset} [${warning.rule}] ${warning.line}:${warning.column}`);
|
||||||
|
console.log(` ${warning.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fileResult.ignored.length > 0 && !options.quiet) {
|
||||||
|
console.log(` ${c.dim}(${fileResult.ignored.length} ignored)${c.reset}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Summary
|
||||||
|
console.log(`\n${c.bold}=== Summary ===${c.reset}`);
|
||||||
|
console.log(`Files: ${results.passedFiles}/${results.totalFiles} passed`);
|
||||||
|
console.log(`Errors: ${c.red}${results.totalErrors}${c.reset}`);
|
||||||
|
console.log(`Warnings: ${c.yellow}${results.totalWarnings}${c.reset}`);
|
||||||
|
if (results.totalIgnored > 0) {
|
||||||
|
console.log(`Ignored: ${c.dim}${results.totalIgnored}${c.reset}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// New violations if baseline comparison
|
||||||
|
if (results.newErrors !== undefined) {
|
||||||
|
console.log(`\n${c.bold}New violations:${c.reset} ${results.newErrors.length} errors, ${results.newWarnings.length} warnings`);
|
||||||
|
console.log(`${c.dim}Existing:${c.reset} ${results.existingErrors.length} errors, ${results.existingWarnings.length} warnings`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Final status
|
||||||
|
if (results.totalErrors > 0) {
|
||||||
|
console.log(`\n${c.red}${c.bold}Validation failed!${c.reset}`);
|
||||||
|
} else if (results.totalWarnings > 0 && options.strict) {
|
||||||
|
console.log(`\n${c.yellow}${c.bold}Validation failed (strict mode)!${c.reset}`);
|
||||||
|
} else {
|
||||||
|
console.log(`\n${c.green}${c.bold}Validation passed!${c.reset}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check for version drift and warn
|
||||||
|
*/
|
||||||
|
async function checkVersionDrift(options) {
|
||||||
|
if (!options.ciMode) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Check if there's a newer version available
|
||||||
|
const currentVersion = rules.getVersion();
|
||||||
|
// In real implementation, would check npm registry
|
||||||
|
// For now, just show the current version
|
||||||
|
console.log(`${c.cyan}[CI]${c.reset} Using @dss/rules v${currentVersion}`);
|
||||||
|
} catch (e) {
|
||||||
|
// Ignore version check errors
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Load baseline for comparison
|
||||||
|
*/
|
||||||
|
function loadBaseline(baselinePath) {
|
||||||
|
if (!baselinePath) return null;
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (fs.existsSync(baselinePath)) {
|
||||||
|
return JSON.parse(fs.readFileSync(baselinePath, 'utf-8'));
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.error(`${c.yellow}Warning: Could not load baseline: ${e.message}${c.reset}`);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Main entry point
|
||||||
|
*/
|
||||||
|
async function main() {
|
||||||
|
const args = process.argv.slice(2);
|
||||||
|
|
||||||
|
// Check for init command first
|
||||||
|
if (args[0] === 'init') {
|
||||||
|
// Delegate to init script
|
||||||
|
require('./init');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for validate command (default)
|
||||||
|
const validateArgs = args[0] === 'validate' ? args.slice(1) : args;
|
||||||
|
const options = parseArgs(validateArgs);
|
||||||
|
|
||||||
|
if (options.help) {
|
||||||
|
printHelp();
|
||||||
|
process.exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.version) {
|
||||||
|
printVersion();
|
||||||
|
process.exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.selfTest) {
|
||||||
|
selfTest();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.files.length === 0) {
|
||||||
|
console.error(`${c.red}Error: No files specified${c.reset}`);
|
||||||
|
console.log('Run with --help for usage information');
|
||||||
|
process.exit(2);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check version drift in CI mode
|
||||||
|
await checkVersionDrift(options);
|
||||||
|
|
||||||
|
// Expand globs
|
||||||
|
const files = await expandGlobs(options.files);
|
||||||
|
|
||||||
|
if (files.length === 0) {
|
||||||
|
console.error(`${c.yellow}Warning: No files matched the patterns${c.reset}`);
|
||||||
|
process.exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run validation
|
||||||
|
let results = rules.validateFiles(files);
|
||||||
|
|
||||||
|
// Compare with baseline if provided
|
||||||
|
const baseline = loadBaseline(options.baseline);
|
||||||
|
if (baseline) {
|
||||||
|
results = rules.compareWithBaseline(results, baseline);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Output
|
||||||
|
if (options.json) {
|
||||||
|
console.log(JSON.stringify(results, null, 2));
|
||||||
|
} else {
|
||||||
|
printResults(results, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exit code
|
||||||
|
if (results.totalErrors > 0) {
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
if (options.strict && results.totalWarnings > 0) {
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
process.exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
main().catch(err => {
|
||||||
|
console.error(`${c.red}Error: ${err.message}${c.reset}`);
|
||||||
|
process.exit(2);
|
||||||
|
});
|
||||||
489
packages/dss-rules/bin/init.js
Normal file
489
packages/dss-rules/bin/init.js
Normal file
@@ -0,0 +1,489 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
/**
|
||||||
|
* DSS Project Initialization CLI
|
||||||
|
*
|
||||||
|
* Sets up a new project for DSS validation:
|
||||||
|
* - Creates ds.config.json
|
||||||
|
* - Sets up .dss/ folder with .gitignore
|
||||||
|
* - Configures package.json scripts
|
||||||
|
* - Optionally sets up CI workflow
|
||||||
|
*
|
||||||
|
* Usage:
|
||||||
|
* npx @dss/rules init
|
||||||
|
* npx @dss/rules init --ci gitea
|
||||||
|
* npx @dss/rules init --force
|
||||||
|
*/
|
||||||
|
|
||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
const readline = require('readline');
|
||||||
|
|
||||||
|
const PACKAGE_VERSION = require('../package.json').version;
|
||||||
|
|
||||||
|
// Template paths
|
||||||
|
const TEMPLATES_DIR = path.join(__dirname, '..', 'templates');
|
||||||
|
|
||||||
|
// Default config template
|
||||||
|
const DEFAULT_CONFIG = {
|
||||||
|
"$schema": "https://dss.overbits.luz.uy/schemas/ds.config.json",
|
||||||
|
"project": {
|
||||||
|
"id": "",
|
||||||
|
"name": "",
|
||||||
|
"description": "Design system validation for this project"
|
||||||
|
},
|
||||||
|
"extends": {
|
||||||
|
"skin": "classic"
|
||||||
|
},
|
||||||
|
"validation": {
|
||||||
|
"rules": ["colors", "spacing", "typography", "components", "accessibility"],
|
||||||
|
"severity": {
|
||||||
|
"colors": "error",
|
||||||
|
"spacing": "warning",
|
||||||
|
"typography": "warning",
|
||||||
|
"components": "error",
|
||||||
|
"accessibility": "warning"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"overrides": {
|
||||||
|
"tokens": {}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// CI platform configurations
|
||||||
|
const CI_PLATFORMS = {
|
||||||
|
gitea: {
|
||||||
|
name: 'Gitea Actions',
|
||||||
|
template: 'gitea-workflow.yml',
|
||||||
|
destDir: '.gitea/workflows',
|
||||||
|
destFile: 'dss-validate.yml'
|
||||||
|
},
|
||||||
|
github: {
|
||||||
|
name: 'GitHub Actions',
|
||||||
|
template: 'github-workflow.yml',
|
||||||
|
destDir: '.github/workflows',
|
||||||
|
destFile: 'dss-validate.yml'
|
||||||
|
},
|
||||||
|
gitlab: {
|
||||||
|
name: 'GitLab CI',
|
||||||
|
template: 'gitlab-ci.yml',
|
||||||
|
destDir: '',
|
||||||
|
destFile: '.gitlab-ci.yml'
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
const args = process.argv.slice(2);
|
||||||
|
const options = parseArgs(args);
|
||||||
|
|
||||||
|
console.log('\n🎨 DSS Project Initialization\n');
|
||||||
|
console.log(`Version: ${PACKAGE_VERSION}`);
|
||||||
|
console.log('─'.repeat(40) + '\n');
|
||||||
|
|
||||||
|
const projectRoot = process.cwd();
|
||||||
|
|
||||||
|
// Check if already initialized
|
||||||
|
const configPath = path.join(projectRoot, 'ds.config.json');
|
||||||
|
if (fs.existsSync(configPath) && !options.force) {
|
||||||
|
console.log('⚠️ Project already initialized (ds.config.json exists)');
|
||||||
|
console.log(' Use --force to reinitialize\n');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Interactive mode if not all options provided
|
||||||
|
const config = options.interactive
|
||||||
|
? await interactiveSetup(projectRoot)
|
||||||
|
: await autoSetup(projectRoot, options);
|
||||||
|
|
||||||
|
// 1. Create ds.config.json
|
||||||
|
console.log('📝 Creating ds.config.json...');
|
||||||
|
fs.writeFileSync(configPath, JSON.stringify(config, null, 2));
|
||||||
|
console.log(' ✓ Configuration file created\n');
|
||||||
|
|
||||||
|
// 2. Create .dss/ folder structure
|
||||||
|
console.log('📁 Setting up .dss/ folder...');
|
||||||
|
setupDssFolder(projectRoot);
|
||||||
|
console.log(' ✓ Local cache folder ready\n');
|
||||||
|
|
||||||
|
// 3. Update .gitignore
|
||||||
|
console.log('📋 Updating .gitignore...');
|
||||||
|
updateGitignore(projectRoot);
|
||||||
|
console.log(' ✓ .gitignore updated\n');
|
||||||
|
|
||||||
|
// 4. Add npm scripts
|
||||||
|
console.log('📦 Adding npm scripts...');
|
||||||
|
addNpmScripts(projectRoot);
|
||||||
|
console.log(' ✓ Package.json updated\n');
|
||||||
|
|
||||||
|
// 5. Setup CI if requested
|
||||||
|
if (options.ci) {
|
||||||
|
console.log(`🔧 Setting up ${CI_PLATFORMS[options.ci]?.name || options.ci} CI...`);
|
||||||
|
setupCI(projectRoot, options.ci);
|
||||||
|
console.log(' ✓ CI workflow created\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Success message
|
||||||
|
console.log('─'.repeat(40));
|
||||||
|
console.log('\n✅ DSS initialization complete!\n');
|
||||||
|
console.log('Next steps:');
|
||||||
|
console.log(' 1. Review ds.config.json and customize rules');
|
||||||
|
console.log(' 2. Run: npx dss-rules validate');
|
||||||
|
console.log(' 3. Fix any violations found\n');
|
||||||
|
|
||||||
|
if (!options.ci) {
|
||||||
|
console.log('💡 Tip: Set up CI validation with:');
|
||||||
|
console.log(' npx @dss/rules init --ci gitea\n');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseArgs(args) {
|
||||||
|
const options = {
|
||||||
|
force: false,
|
||||||
|
ci: null,
|
||||||
|
interactive: true,
|
||||||
|
projectId: null,
|
||||||
|
projectName: null
|
||||||
|
};
|
||||||
|
|
||||||
|
for (let i = 0; i < args.length; i++) {
|
||||||
|
const arg = args[i];
|
||||||
|
|
||||||
|
if (arg === '--force' || arg === '-f') {
|
||||||
|
options.force = true;
|
||||||
|
} else if (arg === '--ci') {
|
||||||
|
options.ci = args[++i] || 'gitea';
|
||||||
|
} else if (arg === '--yes' || arg === '-y') {
|
||||||
|
options.interactive = false;
|
||||||
|
} else if (arg === '--id') {
|
||||||
|
options.projectId = args[++i];
|
||||||
|
} else if (arg === '--name') {
|
||||||
|
options.projectName = args[++i];
|
||||||
|
} else if (arg === '--help' || arg === '-h') {
|
||||||
|
showHelp();
|
||||||
|
process.exit(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return options;
|
||||||
|
}
|
||||||
|
|
||||||
|
function showHelp() {
|
||||||
|
console.log(`
|
||||||
|
DSS Project Initialization
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
npx @dss/rules init [options]
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--force, -f Overwrite existing configuration
|
||||||
|
--ci <platform> Set up CI workflow (gitea, github, gitlab)
|
||||||
|
--yes, -y Skip interactive prompts, use defaults
|
||||||
|
--id <id> Project ID (default: directory name)
|
||||||
|
--name <name> Project display name
|
||||||
|
--help, -h Show this help message
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
npx @dss/rules init
|
||||||
|
npx @dss/rules init --ci gitea
|
||||||
|
npx @dss/rules init -y --ci github
|
||||||
|
npx @dss/rules init --id my-app --name "My Application"
|
||||||
|
`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function interactiveSetup(projectRoot) {
|
||||||
|
const rl = readline.createInterface({
|
||||||
|
input: process.stdin,
|
||||||
|
output: process.stdout
|
||||||
|
});
|
||||||
|
|
||||||
|
const question = (prompt) => new Promise(resolve => rl.question(prompt, resolve));
|
||||||
|
|
||||||
|
try {
|
||||||
|
const dirName = path.basename(projectRoot);
|
||||||
|
|
||||||
|
const projectId = await question(`Project ID [${dirName}]: `) || dirName;
|
||||||
|
const projectName = await question(`Project Name [${projectId}]: `) || projectId;
|
||||||
|
const skin = await question('Base skin [classic]: ') || 'classic';
|
||||||
|
|
||||||
|
rl.close();
|
||||||
|
|
||||||
|
const config = { ...DEFAULT_CONFIG };
|
||||||
|
config.project.id = projectId;
|
||||||
|
config.project.name = projectName;
|
||||||
|
config.extends.skin = skin;
|
||||||
|
|
||||||
|
return config;
|
||||||
|
} catch (e) {
|
||||||
|
rl.close();
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function autoSetup(projectRoot, options) {
|
||||||
|
const dirName = path.basename(projectRoot);
|
||||||
|
|
||||||
|
const config = { ...DEFAULT_CONFIG };
|
||||||
|
config.project.id = options.projectId || dirName;
|
||||||
|
config.project.name = options.projectName || config.project.id;
|
||||||
|
|
||||||
|
return config;
|
||||||
|
}
|
||||||
|
|
||||||
|
function setupDssFolder(projectRoot) {
|
||||||
|
const dssDir = path.join(projectRoot, '.dss');
|
||||||
|
const cacheDir = path.join(dssDir, 'cache');
|
||||||
|
|
||||||
|
// Create directories
|
||||||
|
if (!fs.existsSync(dssDir)) {
|
||||||
|
fs.mkdirSync(dssDir, { recursive: true });
|
||||||
|
}
|
||||||
|
if (!fs.existsSync(cacheDir)) {
|
||||||
|
fs.mkdirSync(cacheDir, { recursive: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create .gitignore in .dss/
|
||||||
|
const gitignorePath = path.join(dssDir, '.gitignore');
|
||||||
|
const gitignoreContent = `# DSS local cache - do not commit
|
||||||
|
*
|
||||||
|
!.gitignore
|
||||||
|
`;
|
||||||
|
fs.writeFileSync(gitignorePath, gitignoreContent);
|
||||||
|
|
||||||
|
// Create metadata.json
|
||||||
|
const metadataPath = path.join(dssDir, 'metadata.json');
|
||||||
|
const metadata = {
|
||||||
|
initialized_at: new Date().toISOString(),
|
||||||
|
rules_version: PACKAGE_VERSION,
|
||||||
|
last_updated: null
|
||||||
|
};
|
||||||
|
fs.writeFileSync(metadataPath, JSON.stringify(metadata, null, 2));
|
||||||
|
}
|
||||||
|
|
||||||
|
function updateGitignore(projectRoot) {
|
||||||
|
const gitignorePath = path.join(projectRoot, '.gitignore');
|
||||||
|
|
||||||
|
// Entries to add
|
||||||
|
const entries = [
|
||||||
|
'',
|
||||||
|
'# DSS local analysis cache',
|
||||||
|
'.dss/',
|
||||||
|
'!.dss/.gitignore'
|
||||||
|
];
|
||||||
|
|
||||||
|
let content = '';
|
||||||
|
if (fs.existsSync(gitignorePath)) {
|
||||||
|
content = fs.readFileSync(gitignorePath, 'utf-8');
|
||||||
|
|
||||||
|
// Check if already configured
|
||||||
|
if (content.includes('.dss/')) {
|
||||||
|
console.log(' (already configured)');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Append entries
|
||||||
|
const newContent = content + entries.join('\n') + '\n';
|
||||||
|
fs.writeFileSync(gitignorePath, newContent);
|
||||||
|
}
|
||||||
|
|
||||||
|
function addNpmScripts(projectRoot) {
|
||||||
|
const packagePath = path.join(projectRoot, 'package.json');
|
||||||
|
|
||||||
|
if (!fs.existsSync(packagePath)) {
|
||||||
|
console.log(' (no package.json found, skipping)');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const pkg = JSON.parse(fs.readFileSync(packagePath, 'utf-8'));
|
||||||
|
|
||||||
|
// Add scripts
|
||||||
|
pkg.scripts = pkg.scripts || {};
|
||||||
|
|
||||||
|
if (!pkg.scripts['dss:validate']) {
|
||||||
|
pkg.scripts['dss:validate'] = 'dss-rules validate';
|
||||||
|
}
|
||||||
|
if (!pkg.scripts['dss:validate:ci']) {
|
||||||
|
pkg.scripts['dss:validate:ci'] = 'dss-rules validate --ci --strict --json > .dss/results.json';
|
||||||
|
}
|
||||||
|
if (!pkg.scripts['dss:baseline']) {
|
||||||
|
pkg.scripts['dss:baseline'] = 'dss-rules validate --baseline';
|
||||||
|
}
|
||||||
|
|
||||||
|
fs.writeFileSync(packagePath, JSON.stringify(pkg, null, 2) + '\n');
|
||||||
|
} catch (e) {
|
||||||
|
console.log(` ⚠️ Failed to update package.json: ${e.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function setupCI(projectRoot, platform) {
|
||||||
|
const ciConfig = CI_PLATFORMS[platform];
|
||||||
|
|
||||||
|
if (!ciConfig) {
|
||||||
|
console.log(` ⚠️ Unknown CI platform: ${platform}`);
|
||||||
|
console.log(` Supported: ${Object.keys(CI_PLATFORMS).join(', ')}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read template
|
||||||
|
const templatePath = path.join(TEMPLATES_DIR, ciConfig.template);
|
||||||
|
if (!fs.existsSync(templatePath)) {
|
||||||
|
// Create a default template inline
|
||||||
|
const template = getDefaultCITemplate(platform);
|
||||||
|
writeCIFile(projectRoot, ciConfig, template);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const template = fs.readFileSync(templatePath, 'utf-8');
|
||||||
|
writeCIFile(projectRoot, ciConfig, template);
|
||||||
|
}
|
||||||
|
|
||||||
|
function writeCIFile(projectRoot, ciConfig, content) {
|
||||||
|
const destDir = path.join(projectRoot, ciConfig.destDir);
|
||||||
|
const destPath = path.join(destDir, ciConfig.destFile);
|
||||||
|
|
||||||
|
// Create directory
|
||||||
|
if (ciConfig.destDir && !fs.existsSync(destDir)) {
|
||||||
|
fs.mkdirSync(destDir, { recursive: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
fs.writeFileSync(destPath, content);
|
||||||
|
}
|
||||||
|
|
||||||
|
function getDefaultCITemplate(platform) {
|
||||||
|
if (platform === 'github') {
|
||||||
|
return `# DSS Design System Validation
|
||||||
|
name: DSS Validate
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main, master, develop]
|
||||||
|
pull_request:
|
||||||
|
branches: [main, master]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
validate:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '20'
|
||||||
|
cache: 'npm'
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Check for [dss-skip]
|
||||||
|
id: skip-check
|
||||||
|
run: |
|
||||||
|
if git log -1 --pretty=%B | grep -q '\\[dss-skip\\]'; then
|
||||||
|
echo "skip=true" >> $GITHUB_OUTPUT
|
||||||
|
echo "⚠️ DSS validation skipped via [dss-skip] commit message"
|
||||||
|
else
|
||||||
|
echo "skip=false" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Run DSS validation
|
||||||
|
if: steps.skip-check.outputs.skip != 'true'
|
||||||
|
run: npm run dss:validate:ci
|
||||||
|
|
||||||
|
- name: Upload metrics to dashboard
|
||||||
|
if: steps.skip-check.outputs.skip != 'true'
|
||||||
|
run: |
|
||||||
|
curl -X POST "\${DSS_DASHBOARD_URL}/api/metrics/upload" \\
|
||||||
|
-H "Content-Type: application/json" \\
|
||||||
|
-H "Authorization: Bearer \${DSS_API_TOKEN}" \\
|
||||||
|
-d @.dss/results.json
|
||||||
|
env:
|
||||||
|
DSS_DASHBOARD_URL: \${{ secrets.DSS_DASHBOARD_URL }}
|
||||||
|
DSS_API_TOKEN: \${{ secrets.DSS_API_TOKEN }}
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (platform === 'gitlab') {
|
||||||
|
return `# DSS Design System Validation
|
||||||
|
stages:
|
||||||
|
- validate
|
||||||
|
|
||||||
|
dss-validate:
|
||||||
|
stage: validate
|
||||||
|
image: node:20
|
||||||
|
script:
|
||||||
|
- npm ci
|
||||||
|
- |
|
||||||
|
if git log -1 --pretty=%B | grep -q '\\[dss-skip\\]'; then
|
||||||
|
echo "⚠️ DSS validation skipped via [dss-skip] commit message"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
- npm run dss:validate:ci
|
||||||
|
- |
|
||||||
|
curl -X POST "\${DSS_DASHBOARD_URL}/api/metrics/upload" \\
|
||||||
|
-H "Content-Type: application/json" \\
|
||||||
|
-H "Authorization: Bearer \${DSS_API_TOKEN}" \\
|
||||||
|
-d @.dss/results.json
|
||||||
|
only:
|
||||||
|
- main
|
||||||
|
- master
|
||||||
|
- develop
|
||||||
|
- merge_requests
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default to gitea template (most similar to the one in templates/)
|
||||||
|
return `# DSS Design System Validation
|
||||||
|
name: DSS Validate
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main, master, develop]
|
||||||
|
pull_request:
|
||||||
|
branches: [main, master]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
validate:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '20'
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Check for [dss-skip]
|
||||||
|
id: skip-check
|
||||||
|
run: |
|
||||||
|
if git log -1 --pretty=%B | grep -q '\\[dss-skip\\]'; then
|
||||||
|
echo "skip=true" >> \$GITHUB_OUTPUT
|
||||||
|
echo "::warning::DSS validation skipped via [dss-skip] commit message"
|
||||||
|
else
|
||||||
|
echo "skip=false" >> \$GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Run DSS validation
|
||||||
|
if: steps.skip-check.outputs.skip != 'true'
|
||||||
|
run: npm run dss:validate:ci
|
||||||
|
|
||||||
|
- name: Upload metrics to dashboard
|
||||||
|
if: steps.skip-check.outputs.skip != 'true' && always()
|
||||||
|
run: |
|
||||||
|
curl -X POST "\${DSS_DASHBOARD_URL}/api/metrics/upload" \\
|
||||||
|
-H "Content-Type: application/json" \\
|
||||||
|
-H "Authorization: Bearer \${DSS_API_TOKEN}" \\
|
||||||
|
-d @.dss/results.json
|
||||||
|
env:
|
||||||
|
DSS_DASHBOARD_URL: \${{ secrets.DSS_DASHBOARD_URL }}
|
||||||
|
DSS_API_TOKEN: \${{ secrets.DSS_API_TOKEN }}
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run main
|
||||||
|
main().catch(err => {
|
||||||
|
console.error('\n❌ Initialization failed:', err.message);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
@@ -1,19 +1,23 @@
|
|||||||
/**
|
/**
|
||||||
* @dss/rules - Design System Rules Package
|
* @dss/rules - Design System Rules Package
|
||||||
*
|
*
|
||||||
* Provides versioned rule definitions for enterprise design system enforcement.
|
* Versioned rule definitions for enterprise design system enforcement.
|
||||||
* Pull-based distribution via npm for consistent rule versions across 60+ projects.
|
* Pull-based distribution via npm for consistent rule versions across projects.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
|
|
||||||
// Rule categories
|
|
||||||
const CATEGORIES = ['colors', 'spacing', 'typography', 'components', 'accessibility'];
|
const CATEGORIES = ['colors', 'spacing', 'typography', 'components', 'accessibility'];
|
||||||
|
// Match dss-ignore in various comment styles
|
||||||
|
// - // dss-ignore (JS/TS line comment)
|
||||||
|
// - /* dss-ignore */ (CSS/JS block comment)
|
||||||
|
// - # dss-ignore (Python/YAML/Shell comment)
|
||||||
|
const IGNORE_PATTERN = /\/\/\s*dss-ignore(-next-line)?|\/\*\s*dss-ignore(-next-line)?\s*\*\/|#\s*dss-ignore(-next-line)?/;
|
||||||
|
const SKIP_COMMIT_PATTERN = /\[dss-skip\]/;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Load all rules from the rules directory
|
* Load all rules from the rules directory
|
||||||
* @returns {Object} Rules organized by category
|
|
||||||
*/
|
*/
|
||||||
function loadRules() {
|
function loadRules() {
|
||||||
const rules = {};
|
const rules = {};
|
||||||
@@ -23,22 +27,18 @@ function loadRules() {
|
|||||||
const rulePath = path.join(rulesDir, `${category}.json`);
|
const rulePath = path.join(rulesDir, `${category}.json`);
|
||||||
if (fs.existsSync(rulePath)) {
|
if (fs.existsSync(rulePath)) {
|
||||||
try {
|
try {
|
||||||
const content = fs.readFileSync(rulePath, 'utf-8');
|
rules[category] = JSON.parse(fs.readFileSync(rulePath, 'utf-8'));
|
||||||
rules[category] = JSON.parse(content);
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`Failed to load rules for ${category}:`, error.message);
|
console.error(`Failed to load rules for ${category}:`, error.message);
|
||||||
rules[category] = null;
|
rules[category] = null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return rules;
|
return rules;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get rules for a specific category
|
* Get rules for a specific category
|
||||||
* @param {string} category - Rule category (colors, spacing, etc.)
|
|
||||||
* @returns {Object|null} Rule definitions or null if not found
|
|
||||||
*/
|
*/
|
||||||
function getRulesByCategory(category) {
|
function getRulesByCategory(category) {
|
||||||
const rules = loadRules();
|
const rules = loadRules();
|
||||||
@@ -47,112 +47,244 @@ function getRulesByCategory(category) {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Get all rule IDs across all categories
|
* Get all rule IDs across all categories
|
||||||
* @returns {string[]} Array of rule IDs in format "category/rule-id"
|
|
||||||
*/
|
*/
|
||||||
function getAllRuleIds() {
|
function getAllRuleIds() {
|
||||||
const rules = loadRules();
|
const rules = loadRules();
|
||||||
const ids = [];
|
const ids = [];
|
||||||
|
|
||||||
for (const [category, ruleSet] of Object.entries(rules)) {
|
for (const [category, ruleSet] of Object.entries(rules)) {
|
||||||
if (ruleSet && ruleSet.rules) {
|
if (ruleSet?.rules) {
|
||||||
for (const rule of ruleSet.rules) {
|
for (const rule of ruleSet.rules) {
|
||||||
ids.push(`${category}/${rule.id}`);
|
ids.push(`${category}/${rule.id}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return ids;
|
return ids;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get a specific rule by its full ID
|
* Get a specific rule by full ID (category/rule-id)
|
||||||
* @param {string} ruleId - Full rule ID in format "category/rule-id"
|
|
||||||
* @returns {Object|null} Rule definition or null
|
|
||||||
*/
|
*/
|
||||||
function getRule(ruleId) {
|
function getRule(ruleId) {
|
||||||
const [category, id] = ruleId.split('/');
|
const [category, id] = ruleId.split('/');
|
||||||
const ruleSet = getRulesByCategory(category);
|
const ruleSet = getRulesByCategory(category);
|
||||||
|
if (!ruleSet?.rules) return null;
|
||||||
if (!ruleSet || !ruleSet.rules) return null;
|
|
||||||
|
|
||||||
return ruleSet.rules.find(r => r.id === id) || null;
|
return ruleSet.rules.find(r => r.id === id) || null;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Validate a value against rule patterns
|
* Get rule severity
|
||||||
* @param {string} ruleId - Full rule ID
|
|
||||||
* @param {string} value - Value to validate
|
|
||||||
* @returns {Object} Validation result {valid, violations}
|
|
||||||
*/
|
|
||||||
function validateValue(ruleId, value) {
|
|
||||||
const rule = getRule(ruleId);
|
|
||||||
if (!rule) {
|
|
||||||
return { valid: true, violations: [], error: `Rule not found: ${ruleId}` };
|
|
||||||
}
|
|
||||||
|
|
||||||
const violations = [];
|
|
||||||
|
|
||||||
// Check forbidden patterns
|
|
||||||
if (rule.patterns?.forbidden) {
|
|
||||||
for (const pattern of rule.patterns.forbidden) {
|
|
||||||
const regex = new RegExp(pattern, 'gi');
|
|
||||||
const matches = value.match(regex);
|
|
||||||
if (matches) {
|
|
||||||
violations.push({
|
|
||||||
rule: ruleId,
|
|
||||||
pattern,
|
|
||||||
matches,
|
|
||||||
severity: rule.severity || 'warning',
|
|
||||||
message: `Found forbidden pattern: ${matches.join(', ')}`
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
valid: violations.length === 0,
|
|
||||||
violations
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get required tokens from all rule sets
|
|
||||||
* @returns {Object} Required tokens organized by category
|
|
||||||
*/
|
|
||||||
function getRequiredTokens() {
|
|
||||||
const rules = loadRules();
|
|
||||||
const required = {};
|
|
||||||
|
|
||||||
for (const [category, ruleSet] of Object.entries(rules)) {
|
|
||||||
if (ruleSet?.tokens?.required) {
|
|
||||||
required[category] = ruleSet.tokens.required;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return required;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get severity level for a rule
|
|
||||||
* @param {string} ruleId - Full rule ID
|
|
||||||
* @returns {string} Severity level (error, warning, info)
|
|
||||||
*/
|
*/
|
||||||
function getRuleSeverity(ruleId) {
|
function getRuleSeverity(ruleId) {
|
||||||
const rule = getRule(ruleId);
|
const rule = getRule(ruleId);
|
||||||
if (!rule) return 'warning';
|
if (!rule) return 'warning';
|
||||||
|
|
||||||
// Rule-specific severity overrides category default
|
|
||||||
if (rule.severity) return rule.severity;
|
if (rule.severity) return rule.severity;
|
||||||
|
|
||||||
// Fall back to category default
|
|
||||||
const [category] = ruleId.split('/');
|
const [category] = ruleId.split('/');
|
||||||
const ruleSet = getRulesByCategory(category);
|
const ruleSet = getRulesByCategory(category);
|
||||||
return ruleSet?.severity || 'warning';
|
return ruleSet?.severity || 'warning';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a line has dss-ignore comment
|
||||||
|
*/
|
||||||
|
function isLineIgnored(lines, lineNumber) {
|
||||||
|
if (lineNumber <= 0 || lineNumber > lines.length) return false;
|
||||||
|
|
||||||
|
const currentLine = lines[lineNumber - 1];
|
||||||
|
const previousLine = lineNumber > 1 ? lines[lineNumber - 2] : '';
|
||||||
|
|
||||||
|
// Check current line for inline ignore (on same line as violation)
|
||||||
|
if (IGNORE_PATTERN.test(currentLine)) return true;
|
||||||
|
|
||||||
|
// Check previous line for dss-ignore-next-line OR standalone dss-ignore
|
||||||
|
// A standalone /* dss-ignore */ on its own line ignores the next line
|
||||||
|
if (/dss-ignore-next-line/.test(previousLine)) return true;
|
||||||
|
|
||||||
|
// Check if previous line is ONLY a dss-ignore comment (standalone)
|
||||||
|
// This handles: /* dss-ignore */ on its own line
|
||||||
|
const standaloneIgnore = /^\s*(\/\*\s*dss-ignore\s*\*\/|\/\/\s*dss-ignore|#\s*dss-ignore)\s*$/;
|
||||||
|
if (standaloneIgnore.test(previousLine)) return true;
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate file content against rules with dss-ignore support
|
||||||
|
*/
|
||||||
|
function validateContent(content, filePath, options = {}) {
|
||||||
|
const results = {
|
||||||
|
file: filePath,
|
||||||
|
errors: [],
|
||||||
|
warnings: [],
|
||||||
|
info: [],
|
||||||
|
ignored: [],
|
||||||
|
passed: true
|
||||||
|
};
|
||||||
|
|
||||||
|
const lines = content.split('\n');
|
||||||
|
const ext = path.extname(filePath).toLowerCase();
|
||||||
|
const applicableCategories = getApplicableCategories(ext);
|
||||||
|
|
||||||
|
for (const category of applicableCategories) {
|
||||||
|
const ruleSet = getRulesByCategory(category);
|
||||||
|
if (!ruleSet?.rules) continue;
|
||||||
|
|
||||||
|
for (const rule of ruleSet.rules) {
|
||||||
|
// Skip if file matches exception patterns
|
||||||
|
if (rule.exceptions?.some(exc => {
|
||||||
|
// Handle glob-like patterns more carefully
|
||||||
|
// *.test.* should only match filenames like "foo.test.js", not paths containing "test"
|
||||||
|
if (exc.startsWith('**/')) {
|
||||||
|
// Directory pattern: **/fixtures/** -> match any path containing /fixtures/
|
||||||
|
const dirName = exc.replace(/^\*\*\//, '').replace(/\/\*\*$/, '');
|
||||||
|
return filePath.includes(`/${dirName}/`);
|
||||||
|
} else if (exc.includes('/')) {
|
||||||
|
// Path pattern
|
||||||
|
const pattern = exc.replace(/\*\*/g, '.*').replace(/\*/g, '[^/]*');
|
||||||
|
return new RegExp(pattern).test(filePath);
|
||||||
|
} else if (exc.startsWith('*.') || exc.endsWith('.*')) {
|
||||||
|
// Filename extension pattern: *.test.* matches only the basename
|
||||||
|
const basename = path.basename(filePath);
|
||||||
|
const pattern = '^' + exc.replace(/\./g, '\\.').replace(/\*/g, '.*') + '$';
|
||||||
|
return new RegExp(pattern).test(basename);
|
||||||
|
} else {
|
||||||
|
// Simple value exception (like "transparent", "inherit")
|
||||||
|
return false; // These are value exceptions, not file exceptions
|
||||||
|
}
|
||||||
|
})) continue;
|
||||||
|
|
||||||
|
// Check forbidden patterns
|
||||||
|
if (rule.patterns?.forbidden) {
|
||||||
|
for (const pattern of rule.patterns.forbidden) {
|
||||||
|
try {
|
||||||
|
const regex = new RegExp(pattern, 'gm');
|
||||||
|
let match;
|
||||||
|
while ((match = regex.exec(content)) !== null) {
|
||||||
|
const lineNumber = content.substring(0, match.index).split('\n').length;
|
||||||
|
const column = match.index - content.lastIndexOf('\n', match.index - 1);
|
||||||
|
|
||||||
|
// Check if this line is ignored
|
||||||
|
if (isLineIgnored(lines, lineNumber)) {
|
||||||
|
results.ignored.push({
|
||||||
|
rule: `${category}/${rule.id}`,
|
||||||
|
line: lineNumber,
|
||||||
|
column,
|
||||||
|
match: match[0]
|
||||||
|
});
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const violation = {
|
||||||
|
rule: `${category}/${rule.id}`,
|
||||||
|
name: rule.name,
|
||||||
|
file: filePath,
|
||||||
|
line: lineNumber,
|
||||||
|
column,
|
||||||
|
match: match[0],
|
||||||
|
message: rule.description || `Violation of ${rule.name}`
|
||||||
|
};
|
||||||
|
|
||||||
|
const severity = rule.severity || ruleSet.severity || 'warning';
|
||||||
|
if (severity === 'error') {
|
||||||
|
results.errors.push(violation);
|
||||||
|
results.passed = false;
|
||||||
|
} else if (severity === 'warning') {
|
||||||
|
results.warnings.push(violation);
|
||||||
|
} else {
|
||||||
|
results.info.push(violation);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
// Invalid regex, skip
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate a file from disk
|
||||||
|
*/
|
||||||
|
function validateFile(filePath, options = {}) {
|
||||||
|
if (!fs.existsSync(filePath)) {
|
||||||
|
return {
|
||||||
|
file: filePath,
|
||||||
|
errors: [{ message: `File not found: ${filePath}` }],
|
||||||
|
warnings: [],
|
||||||
|
info: [],
|
||||||
|
ignored: [],
|
||||||
|
passed: false
|
||||||
|
};
|
||||||
|
}
|
||||||
|
const content = fs.readFileSync(filePath, 'utf-8');
|
||||||
|
return validateContent(content, filePath, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Determine applicable rule categories based on file extension
|
||||||
|
*/
|
||||||
|
function getApplicableCategories(ext) {
|
||||||
|
const cssTypes = ['.css', '.scss', '.sass', '.less', '.styl'];
|
||||||
|
const jsTypes = ['.js', '.jsx', '.ts', '.tsx', '.vue', '.svelte'];
|
||||||
|
const htmlTypes = ['.html', '.htm', '.vue', '.svelte', '.jsx', '.tsx'];
|
||||||
|
|
||||||
|
const categories = [];
|
||||||
|
if (cssTypes.includes(ext)) categories.push('colors', 'spacing', 'typography', 'accessibility');
|
||||||
|
if (jsTypes.includes(ext)) categories.push('colors', 'spacing', 'components');
|
||||||
|
if (htmlTypes.includes(ext)) categories.push('accessibility', 'components');
|
||||||
|
return [...new Set(categories)];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate multiple files
|
||||||
|
*/
|
||||||
|
function validateFiles(files, options = {}) {
|
||||||
|
const results = {
|
||||||
|
totalFiles: files.length,
|
||||||
|
passedFiles: 0,
|
||||||
|
failedFiles: 0,
|
||||||
|
totalErrors: 0,
|
||||||
|
totalWarnings: 0,
|
||||||
|
totalIgnored: 0,
|
||||||
|
fileResults: [],
|
||||||
|
rulesVersion: getVersion()
|
||||||
|
};
|
||||||
|
|
||||||
|
for (const file of files) {
|
||||||
|
const fileResult = validateFile(file, options);
|
||||||
|
results.fileResults.push(fileResult);
|
||||||
|
|
||||||
|
if (fileResult.passed) {
|
||||||
|
results.passedFiles++;
|
||||||
|
} else {
|
||||||
|
results.failedFiles++;
|
||||||
|
}
|
||||||
|
results.totalErrors += fileResult.errors.length;
|
||||||
|
results.totalWarnings += fileResult.warnings.length;
|
||||||
|
results.totalIgnored += fileResult.ignored.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get required tokens from all rule sets
|
||||||
|
*/
|
||||||
|
function getRequiredTokens() {
|
||||||
|
const rules = loadRules();
|
||||||
|
const required = {};
|
||||||
|
for (const [category, ruleSet] of Object.entries(rules)) {
|
||||||
|
if (ruleSet?.tokens?.required) {
|
||||||
|
required[category] = ruleSet.tokens.required;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return required;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get package version
|
* Get package version
|
||||||
* @returns {string} Package version
|
|
||||||
*/
|
*/
|
||||||
function getVersion() {
|
function getVersion() {
|
||||||
const packagePath = path.join(__dirname, '..', 'package.json');
|
const packagePath = path.join(__dirname, '..', 'package.json');
|
||||||
@@ -161,38 +293,95 @@ function getVersion() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Export configuration for CI/CD integration
|
* Check if commit message contains skip flag
|
||||||
* @returns {Object} Configuration object for CI pipelines
|
*/
|
||||||
|
function shouldSkipValidation(commitMessage) {
|
||||||
|
return SKIP_COMMIT_PATTERN.test(commitMessage);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get CI configuration
|
||||||
*/
|
*/
|
||||||
function getCIConfig() {
|
function getCIConfig() {
|
||||||
return {
|
return {
|
||||||
version: getVersion(),
|
version: getVersion(),
|
||||||
categories: CATEGORIES,
|
categories: CATEGORIES,
|
||||||
errorSeverities: ['error'],
|
|
||||||
warningSeverities: ['warning'],
|
|
||||||
blockingRules: getAllRuleIds().filter(id => getRuleSeverity(id) === 'error'),
|
blockingRules: getAllRuleIds().filter(id => getRuleSeverity(id) === 'error'),
|
||||||
advisoryRules: getAllRuleIds().filter(id => getRuleSeverity(id) !== 'error')
|
advisoryRules: getAllRuleIds().filter(id => getRuleSeverity(id) !== 'error'),
|
||||||
|
skipPattern: '[dss-skip]'
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compare against baseline to find new violations only
|
||||||
|
*/
|
||||||
|
function compareWithBaseline(current, baseline) {
|
||||||
|
if (!baseline) return current;
|
||||||
|
|
||||||
|
const baselineViolations = new Set(
|
||||||
|
baseline.fileResults?.flatMap(f =>
|
||||||
|
[...f.errors, ...f.warnings].map(v => `${v.file}:${v.rule}:${v.line}`)
|
||||||
|
) || []
|
||||||
|
);
|
||||||
|
|
||||||
|
const newResults = {
|
||||||
|
...current,
|
||||||
|
newErrors: [],
|
||||||
|
newWarnings: [],
|
||||||
|
existingErrors: [],
|
||||||
|
existingWarnings: []
|
||||||
|
};
|
||||||
|
|
||||||
|
for (const fileResult of current.fileResults) {
|
||||||
|
for (const error of fileResult.errors) {
|
||||||
|
const key = `${error.file}:${error.rule}:${error.line}`;
|
||||||
|
if (baselineViolations.has(key)) {
|
||||||
|
newResults.existingErrors.push(error);
|
||||||
|
} else {
|
||||||
|
newResults.newErrors.push(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (const warning of fileResult.warnings) {
|
||||||
|
const key = `${warning.file}:${warning.rule}:${warning.line}`;
|
||||||
|
if (baselineViolations.has(key)) {
|
||||||
|
newResults.existingWarnings.push(warning);
|
||||||
|
} else {
|
||||||
|
newResults.newWarnings.push(warning);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return newResults;
|
||||||
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
// Rule loading
|
// Rule loading
|
||||||
loadRules,
|
loadRules,
|
||||||
getRulesByCategory,
|
getRulesByCategory,
|
||||||
getAllRuleIds,
|
getAllRuleIds,
|
||||||
getRule,
|
getRule,
|
||||||
|
getRuleSeverity,
|
||||||
|
|
||||||
// Validation
|
// Validation
|
||||||
validateValue,
|
validateContent,
|
||||||
getRuleSeverity,
|
validateFile,
|
||||||
|
validateFiles,
|
||||||
|
isLineIgnored,
|
||||||
|
getApplicableCategories,
|
||||||
|
|
||||||
|
// Baseline comparison
|
||||||
|
compareWithBaseline,
|
||||||
|
|
||||||
|
// CI helpers
|
||||||
|
getCIConfig,
|
||||||
|
shouldSkipValidation,
|
||||||
|
|
||||||
// Token helpers
|
// Token helpers
|
||||||
getRequiredTokens,
|
getRequiredTokens,
|
||||||
|
|
||||||
// Metadata
|
// Metadata
|
||||||
getVersion,
|
getVersion,
|
||||||
getCIConfig,
|
CATEGORIES,
|
||||||
|
IGNORE_PATTERN,
|
||||||
// Constants
|
SKIP_COMMIT_PATTERN
|
||||||
CATEGORIES
|
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,17 +1,23 @@
|
|||||||
{
|
{
|
||||||
"name": "@dss/rules",
|
"name": "@dss/rules",
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"description": "DSS Design System Rules - Versioned rule definitions for enterprise design system enforcement",
|
"description": "DSS Design System Rules - Versioned rule definitions for enterprise enforcement",
|
||||||
"main": "lib/index.js",
|
"main": "lib/index.js",
|
||||||
"types": "lib/index.d.ts",
|
"types": "lib/index.d.ts",
|
||||||
|
"bin": {
|
||||||
|
"dss-rules": "bin/cli.js",
|
||||||
|
"dss-init": "bin/init.js"
|
||||||
|
},
|
||||||
"files": [
|
"files": [
|
||||||
"lib",
|
"lib",
|
||||||
|
"bin",
|
||||||
"rules",
|
"rules",
|
||||||
"schemas"
|
"schemas",
|
||||||
|
"templates"
|
||||||
],
|
],
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "tsc",
|
"build": "tsc",
|
||||||
"test": "node lib/validate.js --self-test",
|
"test": "node bin/cli.js --self-test",
|
||||||
"prepublishOnly": "npm run build && npm test"
|
"prepublishOnly": "npm run build && npm test"
|
||||||
},
|
},
|
||||||
"keywords": [
|
"keywords": [
|
||||||
@@ -19,16 +25,14 @@
|
|||||||
"dss",
|
"dss",
|
||||||
"rules",
|
"rules",
|
||||||
"tokens",
|
"tokens",
|
||||||
"enterprise"
|
"enterprise",
|
||||||
|
"linting"
|
||||||
],
|
],
|
||||||
"author": "DSS Team",
|
"author": "DSS Team",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"typescript": "^5.0.0"
|
"typescript": "^5.0.0"
|
||||||
},
|
},
|
||||||
"peerDependencies": {
|
|
||||||
"ajv": "^8.0.0"
|
|
||||||
},
|
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=18.0.0"
|
"node": ">=18.0.0"
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -3,22 +3,20 @@
|
|||||||
"id": "accessibility",
|
"id": "accessibility",
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"name": "Accessibility Rules",
|
"name": "Accessibility Rules",
|
||||||
"description": "WCAG 2.1 AA compliance rules for accessible design",
|
"description": "WCAG 2.1 AA compliance rules (token-based, not computed)",
|
||||||
"category": "accessibility",
|
"category": "accessibility",
|
||||||
"severity": "error",
|
"severity": "error",
|
||||||
"rules": [
|
"rules": [
|
||||||
{
|
{
|
||||||
"id": "images-have-alt",
|
"id": "images-have-alt",
|
||||||
"name": "Images Must Have Alt Text",
|
"name": "Images Must Have Alt Text",
|
||||||
"description": "All img elements must have meaningful alt text or be marked decorative",
|
"description": "All img elements must have alt attribute",
|
||||||
"severity": "error",
|
"severity": "error",
|
||||||
"wcag": "1.1.1",
|
"wcag": "1.1.1",
|
||||||
"validation": {
|
"validation": {
|
||||||
"type": "attribute-required",
|
"type": "attribute-required",
|
||||||
"element": "img",
|
"element": "img",
|
||||||
"attribute": "alt",
|
"attribute": "alt"
|
||||||
"allowEmpty": true,
|
|
||||||
"emptyMeansDecorative": true
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -29,8 +27,7 @@
|
|||||||
"wcag": "4.1.2",
|
"wcag": "4.1.2",
|
||||||
"validation": {
|
"validation": {
|
||||||
"type": "accessible-name",
|
"type": "accessible-name",
|
||||||
"elements": ["button", "[role=button]"],
|
"elements": ["button", "[role=button]"]
|
||||||
"sources": ["text content", "aria-label", "aria-labelledby"]
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -41,71 +38,38 @@
|
|||||||
"wcag": "1.3.1",
|
"wcag": "1.3.1",
|
||||||
"validation": {
|
"validation": {
|
||||||
"type": "label-association",
|
"type": "label-association",
|
||||||
"elements": ["input", "select", "textarea"],
|
"elements": ["input", "select", "textarea"]
|
||||||
"methods": ["for/id", "aria-labelledby", "aria-label", "wrapper"]
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "focus-visible",
|
"id": "no-focus-outline-none",
|
||||||
"name": "Focus Must Be Visible",
|
"name": "Do Not Remove Focus Outline",
|
||||||
"description": "Interactive elements must have visible focus indicators",
|
"description": "Never use outline: none on focusable elements",
|
||||||
"severity": "error",
|
"severity": "error",
|
||||||
"wcag": "2.4.7",
|
"wcag": "2.4.7",
|
||||||
"validation": {
|
"patterns": {
|
||||||
"type": "focus-style",
|
"forbidden": [
|
||||||
"minContrastRatio": 3.0,
|
"outline:\\s*none",
|
||||||
"forbiddenPatterns": ["outline: none", "outline: 0", ":focus { outline: none }"]
|
"outline:\\s*0(?![0-9])",
|
||||||
|
":focus\\s*\\{[^}]*outline:\\s*none"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "color-not-only",
|
|
||||||
"name": "Color Not Only Indicator",
|
|
||||||
"description": "Information must not be conveyed by color alone",
|
|
||||||
"severity": "warning",
|
|
||||||
"wcag": "1.4.1",
|
|
||||||
"guidelines": [
|
|
||||||
"Error states need icon + color + text",
|
|
||||||
"Links in text need underline or other indicator",
|
|
||||||
"Status indicators need icon or pattern"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "touch-target-size",
|
"id": "touch-target-size",
|
||||||
"name": "Minimum Touch Target Size",
|
"name": "Minimum Touch Target Size",
|
||||||
"description": "Interactive elements must be at least 44x44 CSS pixels",
|
"description": "Interactive elements should be at least 44x44 CSS pixels",
|
||||||
"severity": "warning",
|
"severity": "warning",
|
||||||
"wcag": "2.5.5",
|
"wcag": "2.5.5",
|
||||||
"validation": {
|
"guidelines": [
|
||||||
"type": "size-check",
|
"Use Button component which ensures minimum size",
|
||||||
"minWidth": 44,
|
"Ensure clickable areas have sufficient padding"
|
||||||
"minHeight": 44,
|
]
|
||||||
"elements": ["button", "a", "[role=button]", "input[type=checkbox]", "input[type=radio]"]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "keyboard-navigation",
|
|
||||||
"name": "Keyboard Navigation",
|
|
||||||
"description": "All functionality must be accessible via keyboard",
|
|
||||||
"severity": "error",
|
|
||||||
"wcag": "2.1.1",
|
|
||||||
"validation": {
|
|
||||||
"type": "keyboard-accessible",
|
|
||||||
"requirements": [
|
|
||||||
"All interactive elements focusable",
|
|
||||||
"No keyboard traps",
|
|
||||||
"Logical tab order",
|
|
||||||
"Skip links for navigation"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"compliance": {
|
"compliance": {
|
||||||
"level": "AA",
|
"level": "AA",
|
||||||
"standards": ["WCAG 2.1"],
|
"standards": ["WCAG 2.1"],
|
||||||
"testingTools": [
|
"note": "Computed checks (contrast ratio) require runtime analysis"
|
||||||
"axe-core",
|
|
||||||
"pa11y",
|
|
||||||
"lighthouse"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -10,11 +10,11 @@
|
|||||||
{
|
{
|
||||||
"id": "no-hardcoded-colors",
|
"id": "no-hardcoded-colors",
|
||||||
"name": "No Hardcoded Colors",
|
"name": "No Hardcoded Colors",
|
||||||
"description": "All colors must use design tokens, not hardcoded hex/rgb values",
|
"description": "Colors must use design tokens, not hardcoded hex/rgb values",
|
||||||
"severity": "error",
|
"severity": "error",
|
||||||
"patterns": {
|
"patterns": {
|
||||||
"forbidden": [
|
"forbidden": [
|
||||||
"#[0-9a-fA-F]{3,8}",
|
"#[0-9a-fA-F]{3,8}(?![0-9a-fA-F])",
|
||||||
"rgb\\([^)]+\\)",
|
"rgb\\([^)]+\\)",
|
||||||
"rgba\\([^)]+\\)",
|
"rgba\\([^)]+\\)",
|
||||||
"hsl\\([^)]+\\)",
|
"hsl\\([^)]+\\)",
|
||||||
@@ -27,11 +27,7 @@
|
|||||||
"theme\\.[a-z]+"
|
"theme\\.[a-z]+"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"exceptions": [
|
"exceptions": ["*.test.*", "*.spec.*", "**/fixtures/**", "transparent", "inherit", "currentColor"]
|
||||||
"*.test.*",
|
|
||||||
"*.spec.*",
|
|
||||||
"**/fixtures/**"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "semantic-color-naming",
|
"id": "semantic-color-naming",
|
||||||
@@ -56,20 +52,7 @@
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"tokens": {
|
"tokens": {
|
||||||
"required": [
|
"required": ["colors.primary", "colors.secondary", "colors.background", "colors.foreground", "colors.border", "colors.error", "colors.success", "colors.warning"],
|
||||||
"colors.primary",
|
"optional": ["colors.muted", "colors.accent", "colors.info"]
|
||||||
"colors.secondary",
|
|
||||||
"colors.background",
|
|
||||||
"colors.foreground",
|
|
||||||
"colors.border",
|
|
||||||
"colors.error",
|
|
||||||
"colors.success",
|
|
||||||
"colors.warning"
|
|
||||||
],
|
|
||||||
"optional": [
|
|
||||||
"colors.muted",
|
|
||||||
"colors.accent",
|
|
||||||
"colors.info"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -10,7 +10,7 @@
|
|||||||
{
|
{
|
||||||
"id": "use-design-system-components",
|
"id": "use-design-system-components",
|
||||||
"name": "Use Design System Components",
|
"name": "Use Design System Components",
|
||||||
"description": "Prefer design system components over custom implementations",
|
"description": "Prefer design system components over native HTML or custom implementations",
|
||||||
"severity": "error",
|
"severity": "error",
|
||||||
"components": {
|
"components": {
|
||||||
"required": {
|
"required": {
|
||||||
@@ -44,17 +44,12 @@
|
|||||||
"severity": "error",
|
"severity": "error",
|
||||||
"validation": {
|
"validation": {
|
||||||
"Button": {
|
"Button": {
|
||||||
"requiredProps": ["variant", "size"],
|
"requiredProps": ["variant"],
|
||||||
"conditionalProps": {
|
"conditionalProps": { "loading": ["loadingText"], "icon": ["aria-label"] }
|
||||||
"loading": ["loadingText"],
|
|
||||||
"icon": ["aria-label"]
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"Input": {
|
"Input": {
|
||||||
"requiredProps": ["label", "name"],
|
"requiredProps": ["label", "name"],
|
||||||
"conditionalProps": {
|
"conditionalProps": { "error": ["errorMessage"] }
|
||||||
"error": ["errorMessage"]
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"Modal": {
|
"Modal": {
|
||||||
"requiredProps": ["title", "onClose"],
|
"requiredProps": ["title", "onClose"],
|
||||||
@@ -62,52 +57,15 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "component-composition",
|
|
||||||
"name": "Component Composition Patterns",
|
|
||||||
"description": "Follow recommended composition patterns for complex UIs",
|
|
||||||
"severity": "info",
|
|
||||||
"patterns": {
|
|
||||||
"forms": {
|
|
||||||
"structure": ["Form", "FormField", "Input/Select", "Button"],
|
|
||||||
"guidelines": [
|
|
||||||
"Wrap inputs in FormField for consistent labeling",
|
|
||||||
"Use Form component for validation handling",
|
|
||||||
"Place submit button inside Form"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"lists": {
|
|
||||||
"structure": ["List", "ListItem"],
|
|
||||||
"guidelines": [
|
|
||||||
"Use semantic list components for accessibility",
|
|
||||||
"Implement virtualization for 50+ items"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"navigation": {
|
|
||||||
"structure": ["Nav", "NavItem", "NavLink"],
|
|
||||||
"guidelines": [
|
|
||||||
"Use Nav component for main navigation",
|
|
||||||
"Implement active state handling"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "no-inline-styles",
|
"id": "no-inline-styles",
|
||||||
"name": "No Inline Styles on Components",
|
"name": "No Inline Styles on Components",
|
||||||
"description": "Components should use className/variant props, not style attribute",
|
"description": "Components should use className/variant props, not style attribute",
|
||||||
"severity": "warning",
|
"severity": "warning",
|
||||||
"patterns": {
|
"patterns": {
|
||||||
"forbidden": [
|
"forbidden": ["style={{", "style={"]
|
||||||
"style={{",
|
},
|
||||||
"style={{"
|
"exceptions": ["dynamic positioning", "animations", "calculated values"]
|
||||||
],
|
|
||||||
"exceptions": [
|
|
||||||
"dynamic positioning",
|
|
||||||
"animations",
|
|
||||||
"calculated values"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"adoption": {
|
"adoption": {
|
||||||
@@ -116,10 +74,6 @@
|
|||||||
"target": 80,
|
"target": 80,
|
||||||
"excellent": 95
|
"excellent": 95
|
||||||
},
|
},
|
||||||
"metrics": [
|
"metrics": ["percentage_using_ds_components", "custom_component_count", "token_compliance_rate"]
|
||||||
"percentage_using_ds_components",
|
|
||||||
"custom_component_count",
|
|
||||||
"token_compliance_rate"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -10,7 +10,7 @@
|
|||||||
{
|
{
|
||||||
"id": "no-arbitrary-spacing",
|
"id": "no-arbitrary-spacing",
|
||||||
"name": "No Arbitrary Spacing Values",
|
"name": "No Arbitrary Spacing Values",
|
||||||
"description": "Spacing must use token scale (4px increments), not arbitrary values",
|
"description": "Spacing must use token scale, not arbitrary pixel values",
|
||||||
"severity": "warning",
|
"severity": "warning",
|
||||||
"patterns": {
|
"patterns": {
|
||||||
"forbidden": [
|
"forbidden": [
|
||||||
@@ -24,12 +24,7 @@
|
|||||||
"spacing\\.[a-z0-9]+"
|
"spacing\\.[a-z0-9]+"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"exceptions": [
|
"exceptions": ["0", "0px", "auto", "inherit"]
|
||||||
"0",
|
|
||||||
"0px",
|
|
||||||
"auto",
|
|
||||||
"inherit"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "spacing-scale",
|
"id": "spacing-scale",
|
||||||
@@ -40,29 +35,10 @@
|
|||||||
"type": "scale-check",
|
"type": "scale-check",
|
||||||
"allowedValues": [0, 4, 8, 12, 16, 20, 24, 32, 40, 48, 64, 80, 96, 128]
|
"allowedValues": [0, 4, 8, 12, 16, 20, 24, 32, 40, 48, 64, 80, 96, 128]
|
||||||
}
|
}
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "consistent-component-spacing",
|
|
||||||
"name": "Component Internal Spacing",
|
|
||||||
"description": "Components should use consistent internal spacing patterns",
|
|
||||||
"severity": "info",
|
|
||||||
"guidelines": [
|
|
||||||
"Use spacing.xs (4px) for tight groupings",
|
|
||||||
"Use spacing.sm (8px) for related elements",
|
|
||||||
"Use spacing.md (16px) for section separation",
|
|
||||||
"Use spacing.lg (24px) for major sections",
|
|
||||||
"Use spacing.xl (32px+) for page-level separation"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"tokens": {
|
"tokens": {
|
||||||
"required": [
|
"required": ["spacing.xs", "spacing.sm", "spacing.md", "spacing.lg", "spacing.xl"],
|
||||||
"spacing.xs",
|
|
||||||
"spacing.sm",
|
|
||||||
"spacing.md",
|
|
||||||
"spacing.lg",
|
|
||||||
"spacing.xl"
|
|
||||||
],
|
|
||||||
"scale": {
|
"scale": {
|
||||||
"xs": "4px",
|
"xs": "4px",
|
||||||
"sm": "8px",
|
"sm": "8px",
|
||||||
|
|||||||
@@ -10,15 +10,16 @@
|
|||||||
{
|
{
|
||||||
"id": "use-typography-scale",
|
"id": "use-typography-scale",
|
||||||
"name": "Use Typography Scale",
|
"name": "Use Typography Scale",
|
||||||
"description": "Font sizes must use the defined typography scale",
|
"description": "Font sizes must use the defined typography scale tokens",
|
||||||
"severity": "error",
|
"severity": "error",
|
||||||
"patterns": {
|
"patterns": {
|
||||||
"forbidden": [
|
"forbidden": [
|
||||||
"font-size:\\s*[0-9]+px",
|
"font-size:\\s*[0-9]+px",
|
||||||
"fontSize:\\s*[0-9]+"
|
"fontSize:\\s*[0-9]+",
|
||||||
|
"fontSize:\\s*'[0-9]+px'"
|
||||||
],
|
],
|
||||||
"allowed": [
|
"allowed": [
|
||||||
"var\\(--font-size-[a-z]+\\)",
|
"var\\(--font-size-[a-z0-9]+\\)",
|
||||||
"\\$font-size-[a-z]+",
|
"\\$font-size-[a-z]+",
|
||||||
"typography\\.[a-z]+"
|
"typography\\.[a-z]+"
|
||||||
]
|
]
|
||||||
@@ -36,26 +37,25 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "line-height-consistency",
|
"id": "no-font-family-override",
|
||||||
"name": "Consistent Line Heights",
|
"name": "No Font Family Override",
|
||||||
"description": "Line heights should match the typography scale",
|
"description": "Font families should use design system tokens",
|
||||||
"severity": "info",
|
"severity": "warning",
|
||||||
"guidelines": [
|
"patterns": {
|
||||||
"Use lineHeight.tight (1.25) for headings",
|
"forbidden": [
|
||||||
"Use lineHeight.normal (1.5) for body text",
|
"font-family:\\s*['\"][^'\"]+['\"]",
|
||||||
"Use lineHeight.relaxed (1.75) for long-form content"
|
"fontFamily:\\s*['\"][^'\"]+['\"]"
|
||||||
]
|
],
|
||||||
|
"allowed": [
|
||||||
|
"var\\(--font-[a-z]+\\)",
|
||||||
|
"\\$font-[a-z]+",
|
||||||
|
"fonts\\.[a-z]+"
|
||||||
|
]
|
||||||
|
}
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"tokens": {
|
"tokens": {
|
||||||
"required": [
|
"required": ["typography.h1", "typography.h2", "typography.h3", "typography.body", "typography.small"],
|
||||||
"typography.h1",
|
|
||||||
"typography.h2",
|
|
||||||
"typography.h3",
|
|
||||||
"typography.body",
|
|
||||||
"typography.small",
|
|
||||||
"typography.caption"
|
|
||||||
],
|
|
||||||
"scale": {
|
"scale": {
|
||||||
"xs": "12px",
|
"xs": "12px",
|
||||||
"sm": "14px",
|
"sm": "14px",
|
||||||
@@ -64,12 +64,7 @@
|
|||||||
"xl": "20px",
|
"xl": "20px",
|
||||||
"2xl": "24px",
|
"2xl": "24px",
|
||||||
"3xl": "30px",
|
"3xl": "30px",
|
||||||
"4xl": "36px",
|
"4xl": "36px"
|
||||||
"5xl": "48px"
|
|
||||||
},
|
|
||||||
"fontFamilies": {
|
|
||||||
"sans": "Inter, system-ui, sans-serif",
|
|
||||||
"mono": "JetBrains Mono, monospace"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
118
packages/dss-rules/schemas/ds.config.schema.json
Normal file
118
packages/dss-rules/schemas/ds.config.schema.json
Normal file
@@ -0,0 +1,118 @@
|
|||||||
|
{
|
||||||
|
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||||
|
"$id": "https://dss.overbits.luz.uy/schemas/ds.config.schema.json",
|
||||||
|
"title": "DSS Project Configuration",
|
||||||
|
"description": "Configuration schema for DSS-enabled projects",
|
||||||
|
"type": "object",
|
||||||
|
"required": ["name", "rules"],
|
||||||
|
"properties": {
|
||||||
|
"$schema": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"name": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Project name"
|
||||||
|
},
|
||||||
|
"version": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Project version"
|
||||||
|
},
|
||||||
|
"rules": {
|
||||||
|
"type": "object",
|
||||||
|
"required": ["package"],
|
||||||
|
"properties": {
|
||||||
|
"package": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Rules package name (e.g., @dss/rules)"
|
||||||
|
},
|
||||||
|
"version": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Semver version constraint"
|
||||||
|
},
|
||||||
|
"overrides": {
|
||||||
|
"type": "object",
|
||||||
|
"description": "Rule-specific overrides",
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"severity": {
|
||||||
|
"enum": ["error", "warning", "info", "off"]
|
||||||
|
},
|
||||||
|
"enabled": {
|
||||||
|
"type": "boolean"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"analysis": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"include": {
|
||||||
|
"type": "array",
|
||||||
|
"items": { "type": "string" },
|
||||||
|
"description": "Glob patterns for files to analyze"
|
||||||
|
},
|
||||||
|
"exclude": {
|
||||||
|
"type": "array",
|
||||||
|
"items": { "type": "string" },
|
||||||
|
"description": "Glob patterns for files to exclude"
|
||||||
|
},
|
||||||
|
"output": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Output path for analysis graph"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"metrics": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"upload": {
|
||||||
|
"type": "boolean",
|
||||||
|
"description": "Whether to upload metrics to dashboard"
|
||||||
|
},
|
||||||
|
"dashboardUrl": {
|
||||||
|
"type": "string",
|
||||||
|
"format": "uri",
|
||||||
|
"description": "Dashboard API endpoint"
|
||||||
|
},
|
||||||
|
"projectId": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Project identifier in dashboard"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"ci": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"blocking": {
|
||||||
|
"type": "boolean",
|
||||||
|
"description": "Whether errors block the pipeline"
|
||||||
|
},
|
||||||
|
"skipPattern": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Pattern in commit message to skip validation"
|
||||||
|
},
|
||||||
|
"baselineBranch": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Branch to compare against for new violations"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"tokens": {
|
||||||
|
"type": "object",
|
||||||
|
"description": "Token configuration",
|
||||||
|
"properties": {
|
||||||
|
"source": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Path to token definitions"
|
||||||
|
},
|
||||||
|
"format": {
|
||||||
|
"enum": ["css", "scss", "json", "js"],
|
||||||
|
"description": "Token file format"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -7,74 +7,44 @@
|
|||||||
"required": ["id", "version", "name", "category", "rules"],
|
"required": ["id", "version", "name", "category", "rules"],
|
||||||
"properties": {
|
"properties": {
|
||||||
"$schema": {
|
"$schema": {
|
||||||
"type": "string",
|
"type": "string"
|
||||||
"description": "Reference to this schema"
|
|
||||||
},
|
},
|
||||||
"id": {
|
"id": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"pattern": "^[a-z][a-z0-9-]*$",
|
"pattern": "^[a-z][a-z0-9-]*$"
|
||||||
"description": "Unique identifier for this rule set"
|
|
||||||
},
|
},
|
||||||
"version": {
|
"version": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"pattern": "^[0-9]+\\.[0-9]+\\.[0-9]+$",
|
"pattern": "^[0-9]+\\.[0-9]+\\.[0-9]+$"
|
||||||
"description": "Semantic version of this rule set"
|
|
||||||
},
|
},
|
||||||
"name": {
|
"name": {
|
||||||
"type": "string",
|
"type": "string"
|
||||||
"description": "Human-readable name"
|
|
||||||
},
|
},
|
||||||
"description": {
|
"description": {
|
||||||
"type": "string",
|
"type": "string"
|
||||||
"description": "Detailed description of the rule set"
|
|
||||||
},
|
},
|
||||||
"category": {
|
"category": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": ["tokens", "components", "accessibility", "patterns", "naming"],
|
"enum": ["tokens", "components", "accessibility", "patterns", "naming"]
|
||||||
"description": "Category this rule set belongs to"
|
|
||||||
},
|
},
|
||||||
"severity": {
|
"severity": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": ["error", "warning", "info"],
|
"enum": ["error", "warning", "info"],
|
||||||
"default": "warning",
|
"default": "warning"
|
||||||
"description": "Default severity for rules in this set"
|
|
||||||
},
|
},
|
||||||
"rules": {
|
"rules": {
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {
|
"items": {
|
||||||
"$ref": "#/definitions/Rule"
|
"$ref": "#/definitions/Rule"
|
||||||
},
|
}
|
||||||
"description": "Individual rules in this set"
|
|
||||||
},
|
},
|
||||||
"tokens": {
|
"tokens": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"description": "Token requirements and definitions",
|
|
||||||
"properties": {
|
"properties": {
|
||||||
"required": {
|
"required": { "type": "array", "items": { "type": "string" } },
|
||||||
"type": "array",
|
"optional": { "type": "array", "items": { "type": "string" } },
|
||||||
"items": { "type": "string" }
|
"scale": { "type": "object", "additionalProperties": { "type": "string" } }
|
||||||
},
|
|
||||||
"optional": {
|
|
||||||
"type": "array",
|
|
||||||
"items": { "type": "string" }
|
|
||||||
},
|
|
||||||
"scale": {
|
|
||||||
"type": "object",
|
|
||||||
"additionalProperties": { "type": "string" }
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
},
|
|
||||||
"components": {
|
|
||||||
"type": "object",
|
|
||||||
"description": "Component requirements"
|
|
||||||
},
|
|
||||||
"compliance": {
|
|
||||||
"type": "object",
|
|
||||||
"description": "Compliance metadata"
|
|
||||||
},
|
|
||||||
"adoption": {
|
|
||||||
"type": "object",
|
|
||||||
"description": "Adoption threshold definitions"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"definitions": {
|
"definitions": {
|
||||||
@@ -82,61 +52,21 @@
|
|||||||
"type": "object",
|
"type": "object",
|
||||||
"required": ["id", "name"],
|
"required": ["id", "name"],
|
||||||
"properties": {
|
"properties": {
|
||||||
"id": {
|
"id": { "type": "string", "pattern": "^[a-z][a-z0-9-]*$" },
|
||||||
"type": "string",
|
"name": { "type": "string" },
|
||||||
"pattern": "^[a-z][a-z0-9-]*$",
|
"description": { "type": "string" },
|
||||||
"description": "Unique rule identifier"
|
"severity": { "type": "string", "enum": ["error", "warning", "info"] },
|
||||||
},
|
"wcag": { "type": "string" },
|
||||||
"name": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Human-readable rule name"
|
|
||||||
},
|
|
||||||
"description": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "What this rule checks for"
|
|
||||||
},
|
|
||||||
"severity": {
|
|
||||||
"type": "string",
|
|
||||||
"enum": ["error", "warning", "info"],
|
|
||||||
"description": "Rule severity (overrides set default)"
|
|
||||||
},
|
|
||||||
"wcag": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "WCAG criterion reference if applicable"
|
|
||||||
},
|
|
||||||
"patterns": {
|
"patterns": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"forbidden": {
|
"forbidden": { "type": "array", "items": { "type": "string" } },
|
||||||
"type": "array",
|
"allowed": { "type": "array", "items": { "type": "string" } }
|
||||||
"items": { "type": "string" },
|
|
||||||
"description": "Regex patterns that violate this rule"
|
|
||||||
},
|
|
||||||
"allowed": {
|
|
||||||
"type": "array",
|
|
||||||
"items": { "type": "string" },
|
|
||||||
"description": "Regex patterns that satisfy this rule"
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"validation": {
|
"validation": { "type": "object" },
|
||||||
"type": "object",
|
"exceptions": { "type": "array", "items": { "type": "string" } },
|
||||||
"description": "Validation configuration"
|
"guidelines": { "type": "array", "items": { "type": "string" } }
|
||||||
},
|
|
||||||
"exceptions": {
|
|
||||||
"type": "array",
|
|
||||||
"items": { "type": "string" },
|
|
||||||
"description": "File patterns or values to exclude"
|
|
||||||
},
|
|
||||||
"guidelines": {
|
|
||||||
"type": "array",
|
|
||||||
"items": { "type": "string" },
|
|
||||||
"description": "Human-readable guidelines for this rule"
|
|
||||||
},
|
|
||||||
"components": {
|
|
||||||
"type": "object",
|
|
||||||
"description": "Component-specific rule configuration"
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
23
packages/dss-rules/templates/ds.config.json
Normal file
23
packages/dss-rules/templates/ds.config.json
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
"$schema": "https://dss.overbits.luz.uy/schemas/ds.config.schema.json",
|
||||||
|
"name": "{{PROJECT_NAME}}",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"rules": {
|
||||||
|
"package": "@dss/rules",
|
||||||
|
"version": "^1.0.0"
|
||||||
|
},
|
||||||
|
"analysis": {
|
||||||
|
"include": ["src/**/*.{ts,tsx,js,jsx,css,scss}"],
|
||||||
|
"exclude": ["**/node_modules/**", "**/*.test.*", "**/*.spec.*"],
|
||||||
|
"output": ".dss/analysis_graph.json"
|
||||||
|
},
|
||||||
|
"metrics": {
|
||||||
|
"upload": true,
|
||||||
|
"dashboardUrl": "https://dss.overbits.luz.uy/api/metrics"
|
||||||
|
},
|
||||||
|
"ci": {
|
||||||
|
"blocking": true,
|
||||||
|
"skipPattern": "[dss-skip]",
|
||||||
|
"baselineBranch": "main"
|
||||||
|
}
|
||||||
|
}
|
||||||
1
packages/dss-rules/templates/dss-folder/.gitkeep
Normal file
1
packages/dss-rules/templates/dss-folder/.gitkeep
Normal file
@@ -0,0 +1 @@
|
|||||||
|
# This folder is created by DSS initialization
|
||||||
122
packages/dss-rules/templates/gitea-workflow.yml
Normal file
122
packages/dss-rules/templates/gitea-workflow.yml
Normal file
@@ -0,0 +1,122 @@
|
|||||||
|
name: DSS Design System Validation
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: ['*']
|
||||||
|
pull_request:
|
||||||
|
branches: [main, develop]
|
||||||
|
|
||||||
|
env:
|
||||||
|
DSS_MODE: ci
|
||||||
|
DSS_DASHBOARD_URL: ${{ vars.DSS_DASHBOARD_URL || 'https://dss.overbits.luz.uy/api/metrics' }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
dss-validate:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '20'
|
||||||
|
cache: 'npm'
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Check for [dss-skip] in commit message
|
||||||
|
id: skip-check
|
||||||
|
run: |
|
||||||
|
COMMIT_MSG=$(git log -1 --pretty=%B)
|
||||||
|
if echo "$COMMIT_MSG" | grep -q "\[dss-skip\]"; then
|
||||||
|
echo "skip=true" >> $GITHUB_OUTPUT
|
||||||
|
echo "::warning::DSS validation skipped via [dss-skip] flag"
|
||||||
|
else
|
||||||
|
echo "skip=false" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Run DSS Rules Validation
|
||||||
|
if: steps.skip-check.outputs.skip != 'true'
|
||||||
|
id: validate
|
||||||
|
run: |
|
||||||
|
# Run validation and capture output
|
||||||
|
npx dss-rules --ci --json src/ > dss-report.json 2>&1 || true
|
||||||
|
|
||||||
|
# Check results
|
||||||
|
ERRORS=$(jq '.totalErrors' dss-report.json)
|
||||||
|
WARNINGS=$(jq '.totalWarnings' dss-report.json)
|
||||||
|
|
||||||
|
echo "errors=$ERRORS" >> $GITHUB_OUTPUT
|
||||||
|
echo "warnings=$WARNINGS" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
# Print summary
|
||||||
|
echo "## DSS Validation Results" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "- Errors: $ERRORS" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "- Warnings: $WARNINGS" >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
if [ "$ERRORS" -gt 0 ]; then
|
||||||
|
echo "::error::DSS validation failed with $ERRORS errors"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Check for version drift
|
||||||
|
if: steps.skip-check.outputs.skip != 'true'
|
||||||
|
run: |
|
||||||
|
CURRENT_VERSION=$(npm list @dss/rules --json 2>/dev/null | jq -r '.dependencies["@dss/rules"].version // "unknown"')
|
||||||
|
LATEST_VERSION=$(npm view @dss/rules version 2>/dev/null || echo "unknown")
|
||||||
|
|
||||||
|
if [ "$CURRENT_VERSION" != "$LATEST_VERSION" ] && [ "$LATEST_VERSION" != "unknown" ]; then
|
||||||
|
echo "::warning::@dss/rules version drift detected: using $CURRENT_VERSION, latest is $LATEST_VERSION"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Upload metrics to dashboard
|
||||||
|
if: steps.skip-check.outputs.skip != 'true' && always()
|
||||||
|
run: |
|
||||||
|
if [ -f dss-report.json ]; then
|
||||||
|
# Extract metrics for upload
|
||||||
|
jq '{
|
||||||
|
project: "${{ github.repository }}",
|
||||||
|
branch: "${{ github.ref_name }}",
|
||||||
|
commit: "${{ github.sha }}",
|
||||||
|
timestamp: now | todate,
|
||||||
|
metrics: {
|
||||||
|
totalFiles: .totalFiles,
|
||||||
|
passedFiles: .passedFiles,
|
||||||
|
failedFiles: .failedFiles,
|
||||||
|
totalErrors: .totalErrors,
|
||||||
|
totalWarnings: .totalWarnings,
|
||||||
|
rulesVersion: .rulesVersion
|
||||||
|
},
|
||||||
|
fileResults: [.fileResults[] | {
|
||||||
|
file: .file,
|
||||||
|
errors: (.errors | length),
|
||||||
|
warnings: (.warnings | length),
|
||||||
|
violations: [.errors[], .warnings[] | {
|
||||||
|
rule: .rule,
|
||||||
|
line: .line,
|
||||||
|
column: .column
|
||||||
|
}]
|
||||||
|
}]
|
||||||
|
}' dss-report.json > metrics-payload.json
|
||||||
|
|
||||||
|
# Upload to dashboard (non-blocking)
|
||||||
|
curl -X POST \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-H "Authorization: Bearer ${{ secrets.DSS_API_TOKEN }}" \
|
||||||
|
-d @metrics-payload.json \
|
||||||
|
"$DSS_DASHBOARD_URL/upload" \
|
||||||
|
--fail-with-body || echo "::warning::Failed to upload metrics to dashboard"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Upload validation report artifact
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: dss-validation-report
|
||||||
|
path: dss-report.json
|
||||||
|
retention-days: 30
|
||||||
152
packages/dss-rules/templates/github-workflow.yml
Normal file
152
packages/dss-rules/templates/github-workflow.yml
Normal file
@@ -0,0 +1,152 @@
|
|||||||
|
# DSS Design System Validation - GitHub Actions
|
||||||
|
# Generated by @dss/rules init
|
||||||
|
#
|
||||||
|
# This workflow validates design system compliance and uploads metrics
|
||||||
|
# to the DSS dashboard for portfolio-wide visibility.
|
||||||
|
#
|
||||||
|
# Required Secrets:
|
||||||
|
# DSS_DASHBOARD_URL: URL to DSS metrics API (e.g., https://dss.example.com)
|
||||||
|
# DSS_API_TOKEN: Authentication token for metrics upload
|
||||||
|
|
||||||
|
name: DSS Validate
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main, master, develop]
|
||||||
|
pull_request:
|
||||||
|
branches: [main, master]
|
||||||
|
|
||||||
|
env:
|
||||||
|
NODE_VERSION: '20'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
validate:
|
||||||
|
name: Design System Validation
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0 # Full history for baseline comparison
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: ${{ env.NODE_VERSION }}
|
||||||
|
cache: 'npm'
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
# Check for break-glass [dss-skip] in commit message
|
||||||
|
- name: Check for [dss-skip]
|
||||||
|
id: skip-check
|
||||||
|
run: |
|
||||||
|
COMMIT_MSG=$(git log -1 --pretty=%B)
|
||||||
|
if echo "$COMMIT_MSG" | grep -q '\[dss-skip\]'; then
|
||||||
|
echo "skip=true" >> $GITHUB_OUTPUT
|
||||||
|
echo "::warning::DSS validation skipped via [dss-skip] commit message"
|
||||||
|
echo "::warning::Commit: $(git log -1 --pretty='%h %s')"
|
||||||
|
else
|
||||||
|
echo "skip=false" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check @dss/rules version drift
|
||||||
|
- name: Check rules version
|
||||||
|
if: steps.skip-check.outputs.skip != 'true'
|
||||||
|
run: |
|
||||||
|
INSTALLED=$(npm list @dss/rules --json 2>/dev/null | jq -r '.dependencies["@dss/rules"].version // "not-installed"')
|
||||||
|
LATEST=$(npm view @dss/rules version 2>/dev/null || echo "unknown")
|
||||||
|
|
||||||
|
echo "Installed @dss/rules: $INSTALLED"
|
||||||
|
echo "Latest @dss/rules: $LATEST"
|
||||||
|
|
||||||
|
if [ "$INSTALLED" != "$LATEST" ] && [ "$LATEST" != "unknown" ]; then
|
||||||
|
echo "::warning::@dss/rules is outdated ($INSTALLED vs $LATEST). Consider updating."
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Run DSS validation
|
||||||
|
- name: Run DSS validation
|
||||||
|
if: steps.skip-check.outputs.skip != 'true'
|
||||||
|
id: validate
|
||||||
|
run: |
|
||||||
|
# Run validation with CI mode (strict, JSON output)
|
||||||
|
npm run dss:validate:ci || echo "validation_failed=true" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
# Extract summary for PR comment
|
||||||
|
if [ -f .dss/results.json ]; then
|
||||||
|
ERRORS=$(jq -r '.metrics.totalErrors // 0' .dss/results.json)
|
||||||
|
WARNINGS=$(jq -r '.metrics.totalWarnings // 0' .dss/results.json)
|
||||||
|
SCORE=$(jq -r '.metrics.adoptionScore // 0' .dss/results.json)
|
||||||
|
|
||||||
|
echo "errors=$ERRORS" >> $GITHUB_OUTPUT
|
||||||
|
echo "warnings=$WARNINGS" >> $GITHUB_OUTPUT
|
||||||
|
echo "score=$SCORE" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Upload metrics to DSS dashboard
|
||||||
|
- name: Upload metrics to dashboard
|
||||||
|
if: steps.skip-check.outputs.skip != 'true' && always()
|
||||||
|
continue-on-error: true
|
||||||
|
run: |
|
||||||
|
if [ ! -f .dss/results.json ]; then
|
||||||
|
echo "No results file found, skipping upload"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Add git metadata to results
|
||||||
|
jq --arg branch "${{ github.ref_name }}" \
|
||||||
|
--arg commit "${{ github.sha }}" \
|
||||||
|
--arg repo "${{ github.repository }}" \
|
||||||
|
'. + {branch: $branch, commit: $commit, project: $repo}' \
|
||||||
|
.dss/results.json > .dss/upload.json
|
||||||
|
|
||||||
|
curl -X POST "${DSS_DASHBOARD_URL}/api/metrics/upload" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-H "Authorization: Bearer ${DSS_API_TOKEN}" \
|
||||||
|
-d @.dss/upload.json \
|
||||||
|
--fail --silent --show-error
|
||||||
|
env:
|
||||||
|
DSS_DASHBOARD_URL: ${{ secrets.DSS_DASHBOARD_URL }}
|
||||||
|
DSS_API_TOKEN: ${{ secrets.DSS_API_TOKEN }}
|
||||||
|
|
||||||
|
# Comment on PR with results
|
||||||
|
- name: Comment on PR
|
||||||
|
if: github.event_name == 'pull_request' && steps.skip-check.outputs.skip != 'true'
|
||||||
|
uses: actions/github-script@v7
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const errors = '${{ steps.validate.outputs.errors }}' || '0';
|
||||||
|
const warnings = '${{ steps.validate.outputs.warnings }}' || '0';
|
||||||
|
const score = '${{ steps.validate.outputs.score }}' || 'N/A';
|
||||||
|
|
||||||
|
const status = errors === '0' ? '✅' : '❌';
|
||||||
|
const body = `## ${status} DSS Validation Results
|
||||||
|
|
||||||
|
| Metric | Value |
|
||||||
|
|--------|-------|
|
||||||
|
| Adoption Score | ${score}% |
|
||||||
|
| Errors | ${errors} |
|
||||||
|
| Warnings | ${warnings} |
|
||||||
|
|
||||||
|
${errors !== '0' ? '⚠️ Please fix design system violations before merging.' : '🎉 All design system checks passed!'}
|
||||||
|
|
||||||
|
---
|
||||||
|
*Powered by @dss/rules*`;
|
||||||
|
|
||||||
|
github.rest.issues.createComment({
|
||||||
|
issue_number: context.issue.number,
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
body: body
|
||||||
|
});
|
||||||
|
|
||||||
|
# Fail if validation errors (authoritative enforcement)
|
||||||
|
- name: Check validation result
|
||||||
|
if: steps.skip-check.outputs.skip != 'true'
|
||||||
|
run: |
|
||||||
|
if [ "${{ steps.validate.outputs.validation_failed }}" = "true" ]; then
|
||||||
|
echo "::error::DSS validation failed with errors. Please fix violations."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
9
packages/dss-rules/templates/gitignore-additions.txt
Normal file
9
packages/dss-rules/templates/gitignore-additions.txt
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# DSS Design System (generated files - do not commit)
|
||||||
|
.dss/analysis_graph.json
|
||||||
|
.dss/cache/
|
||||||
|
.dss/metrics.json
|
||||||
|
.dss/baseline.json
|
||||||
|
|
||||||
|
# Keep config and schema
|
||||||
|
!.dss/ds.config.json
|
||||||
|
!.dss/.gitkeep
|
||||||
126
packages/dss-rules/templates/gitlab-ci.yml
Normal file
126
packages/dss-rules/templates/gitlab-ci.yml
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
# DSS Design System Validation - GitLab CI
|
||||||
|
# Generated by @dss/rules init
|
||||||
|
#
|
||||||
|
# This workflow validates design system compliance and uploads metrics
|
||||||
|
# to the DSS dashboard for portfolio-wide visibility.
|
||||||
|
#
|
||||||
|
# Required Variables:
|
||||||
|
# DSS_DASHBOARD_URL: URL to DSS metrics API (e.g., https://dss.example.com)
|
||||||
|
# DSS_API_TOKEN: Authentication token for metrics upload
|
||||||
|
|
||||||
|
stages:
|
||||||
|
- validate
|
||||||
|
|
||||||
|
variables:
|
||||||
|
NODE_VERSION: "20"
|
||||||
|
|
||||||
|
.node-cache:
|
||||||
|
cache:
|
||||||
|
key: ${CI_COMMIT_REF_SLUG}
|
||||||
|
paths:
|
||||||
|
- node_modules/
|
||||||
|
- .npm/
|
||||||
|
|
||||||
|
dss-validate:
|
||||||
|
stage: validate
|
||||||
|
image: node:${NODE_VERSION}
|
||||||
|
extends: .node-cache
|
||||||
|
script:
|
||||||
|
# Install dependencies
|
||||||
|
- npm ci --cache .npm --prefer-offline
|
||||||
|
|
||||||
|
# Check for break-glass [dss-skip] in commit message
|
||||||
|
- |
|
||||||
|
COMMIT_MSG=$(git log -1 --pretty=%B)
|
||||||
|
if echo "$COMMIT_MSG" | grep -q '\[dss-skip\]'; then
|
||||||
|
echo "⚠️ DSS validation skipped via [dss-skip] commit message"
|
||||||
|
echo "Commit: $(git log -1 --pretty='%h %s')"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check @dss/rules version drift
|
||||||
|
- |
|
||||||
|
INSTALLED=$(npm list @dss/rules --json 2>/dev/null | jq -r '.dependencies["@dss/rules"].version // "not-installed"')
|
||||||
|
LATEST=$(npm view @dss/rules version 2>/dev/null || echo "unknown")
|
||||||
|
echo "Installed @dss/rules: $INSTALLED"
|
||||||
|
echo "Latest @dss/rules: $LATEST"
|
||||||
|
if [ "$INSTALLED" != "$LATEST" ] && [ "$LATEST" != "unknown" ]; then
|
||||||
|
echo "⚠️ @dss/rules is outdated ($INSTALLED vs $LATEST). Consider updating."
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Run DSS validation
|
||||||
|
- npm run dss:validate:ci || VALIDATION_FAILED=true
|
||||||
|
|
||||||
|
# Upload metrics to dashboard
|
||||||
|
- |
|
||||||
|
if [ -f .dss/results.json ]; then
|
||||||
|
jq --arg branch "$CI_COMMIT_REF_NAME" \
|
||||||
|
--arg commit "$CI_COMMIT_SHA" \
|
||||||
|
--arg repo "$CI_PROJECT_PATH" \
|
||||||
|
'. + {branch: $branch, commit: $commit, project: $repo}' \
|
||||||
|
.dss/results.json > .dss/upload.json
|
||||||
|
|
||||||
|
curl -X POST "${DSS_DASHBOARD_URL}/api/metrics/upload" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-H "Authorization: Bearer ${DSS_API_TOKEN}" \
|
||||||
|
-d @.dss/upload.json \
|
||||||
|
--fail --silent --show-error || echo "⚠️ Failed to upload metrics (non-blocking)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Fail if validation errors
|
||||||
|
- |
|
||||||
|
if [ "$VALIDATION_FAILED" = "true" ]; then
|
||||||
|
echo "❌ DSS validation failed with errors. Please fix violations."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
artifacts:
|
||||||
|
when: always
|
||||||
|
paths:
|
||||||
|
- .dss/results.json
|
||||||
|
expire_in: 1 week
|
||||||
|
reports:
|
||||||
|
codequality: .dss/results.json
|
||||||
|
|
||||||
|
rules:
|
||||||
|
- if: $CI_COMMIT_BRANCH == "main" || $CI_COMMIT_BRANCH == "master" || $CI_COMMIT_BRANCH == "develop"
|
||||||
|
- if: $CI_MERGE_REQUEST_IID
|
||||||
|
|
||||||
|
# Optional: MR comment with results (requires GITLAB_TOKEN with API access)
|
||||||
|
dss-mr-comment:
|
||||||
|
stage: validate
|
||||||
|
image: curlimages/curl:latest
|
||||||
|
needs:
|
||||||
|
- job: dss-validate
|
||||||
|
artifacts: true
|
||||||
|
script:
|
||||||
|
- |
|
||||||
|
if [ ! -f .dss/results.json ]; then
|
||||||
|
echo "No results file, skipping MR comment"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
ERRORS=$(jq -r '.metrics.totalErrors // 0' .dss/results.json)
|
||||||
|
WARNINGS=$(jq -r '.metrics.totalWarnings // 0' .dss/results.json)
|
||||||
|
SCORE=$(jq -r '.metrics.adoptionScore // 0' .dss/results.json)
|
||||||
|
|
||||||
|
if [ "$ERRORS" = "0" ]; then
|
||||||
|
STATUS="✅"
|
||||||
|
MESSAGE="🎉 All design system checks passed!"
|
||||||
|
else
|
||||||
|
STATUS="❌"
|
||||||
|
MESSAGE="⚠️ Please fix design system violations before merging."
|
||||||
|
fi
|
||||||
|
|
||||||
|
BODY="## $STATUS DSS Validation Results\n\n| Metric | Value |\n|--------|-------|\n| Adoption Score | ${SCORE}% |\n| Errors | $ERRORS |\n| Warnings | $WARNINGS |\n\n$MESSAGE\n\n---\n*Powered by @dss/rules*"
|
||||||
|
|
||||||
|
curl --request POST \
|
||||||
|
--header "PRIVATE-TOKEN: ${GITLAB_TOKEN}" \
|
||||||
|
--header "Content-Type: application/json" \
|
||||||
|
--data "{\"body\": \"$BODY\"}" \
|
||||||
|
"${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/merge_requests/${CI_MERGE_REQUEST_IID}/notes" \
|
||||||
|
|| echo "⚠️ Failed to post MR comment (non-blocking)"
|
||||||
|
|
||||||
|
rules:
|
||||||
|
- if: $CI_MERGE_REQUEST_IID && $GITLAB_TOKEN
|
||||||
|
allow_failure: true
|
||||||
Reference in New Issue
Block a user