Initial commit: Clean DSS implementation

Migrated from design-system-swarm with fresh git history.
Old project history preserved in /home/overbits/apps/design-system-swarm

Core components:
- MCP Server (Python FastAPI with mcp 1.23.1)
- Claude Plugin (agents, commands, skills, strategies, hooks, core)
- DSS Backend (dss-mvp1 - token translation, Figma sync)
- Admin UI (Node.js/React)
- Server (Node.js/Express)
- Storybook integration (dss-mvp1/.storybook)

Self-contained configuration:
- All paths relative or use DSS_BASE_PATH=/home/overbits/dss
- PYTHONPATH configured for dss-mvp1 and dss-claude-plugin
- .env file with all configuration
- Claude plugin uses ${CLAUDE_PLUGIN_ROOT} for portability

Migration completed: $(date)
🤖 Clean migration with full functionality preserved
This commit is contained in:
Digital Production Factory
2025-12-09 18:45:48 -03:00
commit 276ed71f31
884 changed files with 373737 additions and 0 deletions

12
server/.env.example Normal file
View File

@@ -0,0 +1,12 @@
NODE_ENV=development
PORT=3001
DATABASE_URL=sqlite:./data/design-system.db
JWT_SECRET=your-secret-key-change-in-production
JWT_EXPIRE=7d
JWT_REFRESH_SECRET=your-refresh-secret-key
JWT_REFRESH_EXPIRE=30d
CORS_ORIGIN=http://localhost:5173,http://localhost:3000
FIGMA_API_KEY=your-figma-api-key
JIRA_API_KEY=your-jira-api-key
JIRA_HOST=your-jira-host.atlassian.net
LOG_LEVEL=info

12
server/.gitignore vendored Normal file
View File

@@ -0,0 +1,12 @@
node_modules/
dist/
build/
.env
.env.local
.env.*.local
*.log
npm-debug.log*
data/
.DS_Store
.vscode/
.idea/

25
server/Dockerfile Normal file
View File

@@ -0,0 +1,25 @@
FROM node:18-alpine
WORKDIR /app
# Copy package files
COPY package*.json ./
# Install dependencies
RUN npm ci
# Copy server code
COPY src ./src
# Create data directory for SQLite
RUN mkdir -p /app/data
# Expose port
EXPOSE 3001
# Health check
HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \
CMD node -e "require('http').get('http://localhost:3001/health', (r) => {if (r.statusCode !== 200) throw new Error(r.statusCode)})"
# Start server
CMD ["node", "src/server.js"]

View File

@@ -0,0 +1,185 @@
import { DataTypes } from 'sequelize';
import { v4 as uuidv4 } from 'uuid';
const ROLES = ['admin', 'ui_team', 'ux_team', 'qa_team'];
const ACTIONS = ['create', 'read', 'update', 'delete'];
export default {
up: async (queryInterface) => {
const transaction = await queryInterface.sequelize.transaction();
try {
// 1. Add role and team_id columns to Users table
await queryInterface.addColumn('Users', 'team_id', {
type: DataTypes.STRING,
allowNull: true
}, { transaction });
// Update existing users to use new role enum values
// First, add a temporary column
await queryInterface.addColumn('Users', 'role_new', {
type: DataTypes.ENUM(...ROLES),
allowNull: true
}, { transaction });
// Map old roles to new roles (if Users table exists with old roles)
await queryInterface.sequelize.query(`
UPDATE "Users" SET role_new = CASE
WHEN role = 'admin' THEN 'admin'
WHEN role = 'designer' THEN 'ux_team'
WHEN role = 'developer' THEN 'ui_team'
WHEN role = 'viewer' THEN 'qa_team'
ELSE 'ui_team'
END
`, { transaction });
// Drop old role column
await queryInterface.removeColumn('Users', 'role', { transaction });
// Rename new role column to role
await queryInterface.renameColumn('Users', 'role_new', 'role', { transaction });
// Set default value for role
await queryInterface.changeColumn('Users', 'role', {
type: DataTypes.ENUM(...ROLES),
allowNull: false,
defaultValue: 'ui_team'
}, { transaction });
// 2. Create Team Permissions table
await queryInterface.createTable('TeamPermissions', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
role: {
type: DataTypes.ENUM(...ROLES),
allowNull: false
},
permission: {
type: DataTypes.STRING,
allowNull: false
},
resource: {
type: DataTypes.STRING,
allowNull: false
},
action: {
type: DataTypes.ENUM(...ACTIONS),
allowNull: false
},
createdAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
},
updatedAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW
}
}, { transaction });
// 3. Add Unique Constraint
await queryInterface.addConstraint('TeamPermissions', {
fields: ['role', 'resource', 'action'],
type: 'unique',
name: 'unique_role_resource_action',
transaction
});
// 4. Seed Default Permissions
const timestamp = new Date();
const seeds = [
// UI Team Permissions
{ role: 'ui_team', permission: 'sync_figma', resource: 'figma', action: 'create' },
{ role: 'ui_team', permission: 'view_figma', resource: 'figma', action: 'read' },
{ role: 'ui_team', permission: 'quickwins', resource: 'analysis', action: 'read' },
{ role: 'ui_team', permission: 'regression', resource: 'analysis', action: 'create' },
{ role: 'ui_team', permission: 'view_metrics', resource: 'metrics', action: 'read' },
// UX Team Permissions
{ role: 'ux_team', permission: 'view_components', resource: 'components', action: 'read' },
{ role: 'ux_team', permission: 'update_components', resource: 'components', action: 'update' },
{ role: 'ux_team', permission: 'view_tokens', resource: 'tokens', action: 'read' },
{ role: 'ux_team', permission: 'update_tokens', resource: 'tokens', action: 'update' },
{ role: 'ux_team', permission: 'view_icons', resource: 'icons', action: 'read' },
{ role: 'ux_team', permission: 'update_icons', resource: 'icons', action: 'update' },
{ role: 'ux_team', permission: 'customize_figma_plugin', resource: 'figma', action: 'update' },
{ role: 'ux_team', permission: 'view_metrics', resource: 'metrics', action: 'read' },
// QA Team Permissions
{ role: 'qa_team', permission: 'test_components', resource: 'components', action: 'read' },
{ role: 'qa_team', permission: 'create_issue', resource: 'issues', action: 'create' },
{ role: 'qa_team', permission: 'view_metrics', resource: 'metrics', action: 'read' },
{ role: 'qa_team', permission: 'run_esre', resource: 'testing', action: 'create' }
].map(s => ({
...s,
id: uuidv4(),
createdAt: timestamp,
updatedAt: timestamp
}));
if (seeds.length > 0) {
await queryInterface.bulkInsert('TeamPermissions', seeds, { transaction });
}
await transaction.commit();
console.log('✓ RBAC migration completed successfully');
} catch (err) {
await transaction.rollback();
console.error('✗ RBAC migration failed:', err);
throw err;
}
},
down: async (queryInterface) => {
const transaction = await queryInterface.sequelize.transaction();
try {
// Drop team_permissions table
await queryInterface.dropTable('TeamPermissions', { transaction });
// Remove team_id column
await queryInterface.removeColumn('Users', 'team_id', { transaction });
// Restore old role enum
await queryInterface.addColumn('Users', 'role_old', {
type: DataTypes.ENUM('admin', 'designer', 'developer', 'viewer'),
allowNull: true
}, { transaction });
// Map new roles back to old roles
await queryInterface.sequelize.query(`
UPDATE "Users" SET role_old = CASE
WHEN role = 'admin' THEN 'admin'
WHEN role = 'ux_team' THEN 'designer'
WHEN role = 'ui_team' THEN 'developer'
WHEN role = 'qa_team' THEN 'viewer'
ELSE 'designer'
END
`, { transaction });
await queryInterface.removeColumn('Users', 'role', { transaction });
await queryInterface.renameColumn('Users', 'role_old', 'role', { transaction });
await queryInterface.changeColumn('Users', 'role', {
type: DataTypes.ENUM('admin', 'designer', 'developer', 'viewer'),
allowNull: false,
defaultValue: 'designer'
}, { transaction });
// Cleanup Enums
await queryInterface.sequelize.query('DROP TYPE IF EXISTS "enum_Users_role_new";', { transaction });
await queryInterface.sequelize.query('DROP TYPE IF EXISTS "enum_TeamPermissions_role";', { transaction });
await queryInterface.sequelize.query('DROP TYPE IF EXISTS "enum_TeamPermissions_action";', { transaction });
await transaction.commit();
console.log('✓ RBAC migration rolled back successfully');
} catch (err) {
await transaction.rollback();
console.error('✗ RBAC migration rollback failed:', err);
throw err;
}
}
};

View File

@@ -0,0 +1,97 @@
import { DataTypes, Op } from 'sequelize';
export default {
up: async (queryInterface) => {
const transaction = await queryInterface.sequelize.transaction();
try {
await queryInterface.createTable('config_settings', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
scope: {
type: DataTypes.ENUM('SYSTEM', 'PROJECT', 'USER'),
allowNull: false
},
scopeId: {
type: DataTypes.UUID,
allowNull: true,
field: 'scope_id'
},
key: {
type: DataTypes.STRING,
allowNull: false
},
value: {
type: DataTypes.JSONB,
allowNull: false
},
isSecret: {
type: DataTypes.BOOLEAN,
defaultValue: false,
field: 'is_secret'
},
schemaVersion: {
type: DataTypes.INTEGER,
defaultValue: 1,
field: 'schema_version'
},
createdAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'created_at'
},
updatedAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'updated_at'
}
}, { transaction });
// Create a unique index that handles the NULL scope_id for SYSTEM scope correctly
// In Postgres 15+ we could use NULLS NOT DISTINCT, but for compatibility we use a partial index
// for the NULL case and a standard unique index for the non-NULL case.
// 1. Standard unique constraint for non-null scope_id (Project/User)
await queryInterface.addIndex('config_settings', ['scope', 'scope_id', 'key'], {
unique: true,
where: {
scope_id: { [Op.ne]: null }
},
name: 'config_settings_scope_scope_id_key_unique',
transaction
});
// 2. Unique constraint for SYSTEM scope (where scope_id is NULL)
await queryInterface.addIndex('config_settings', ['scope', 'key'], {
unique: true,
where: {
scope_id: null
},
name: 'config_settings_system_scope_key_unique',
transaction
});
await transaction.commit();
} catch (err) {
await transaction.rollback();
throw err;
}
},
down: async (queryInterface) => {
const transaction = await queryInterface.sequelize.transaction();
try {
await queryInterface.dropTable('config_settings', { transaction });
await queryInterface.sequelize.query('DROP TYPE IF EXISTS "enum_config_settings_scope";', { transaction });
await transaction.commit();
} catch (err) {
await transaction.rollback();
throw err;
}
}
};

View File

@@ -0,0 +1,78 @@
import { DataTypes } from 'sequelize';
export default {
up: async (queryInterface) => {
const transaction = await queryInterface.sequelize.transaction();
try {
await queryInterface.createTable('config_audit_logs', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
configId: {
type: DataTypes.UUID,
allowNull: false,
field: 'config_id',
references: {
model: 'config_settings',
key: 'id'
},
onDelete: 'CASCADE'
},
actorId: {
type: DataTypes.UUID,
allowNull: false,
field: 'actor_id',
references: {
model: 'users', // Note: User table name usually lowercase in Postgres if created via Sequelize defaults, or "Users" if strictly quoted
key: 'id'
}
},
action: {
type: DataTypes.ENUM('CREATE', 'UPDATE', 'DELETE'),
allowNull: false
},
previousValue: {
type: DataTypes.JSONB,
allowNull: true,
field: 'previous_value'
},
newValue: {
type: DataTypes.JSONB,
allowNull: true,
field: 'new_value'
},
createdAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
field: 'created_at'
}
}, { transaction });
// Add indexes for common query patterns
await queryInterface.addIndex('config_audit_logs', ['config_id'], { transaction });
await queryInterface.addIndex('config_audit_logs', ['actor_id'], { transaction });
await queryInterface.addIndex('config_audit_logs', ['created_at'], { transaction });
await transaction.commit();
} catch (err) {
await transaction.rollback();
throw err;
}
},
down: async (queryInterface) => {
const transaction = await queryInterface.sequelize.transaction();
try {
await queryInterface.dropTable('config_audit_logs', { transaction });
await queryInterface.sequelize.query('DROP TYPE IF EXISTS "enum_config_audit_logs_action";', { transaction });
await transaction.commit();
} catch (err) {
await transaction.rollback();
throw err;
}
}
};

View File

@@ -0,0 +1,69 @@
import { v4 as uuidv4 } from 'uuid';
export default {
up: async (queryInterface) => {
const transaction = await queryInterface.sequelize.transaction();
try {
const now = new Date();
const defaultConfigs = [
{
key: 'figma.api_timeout',
value: JSON.stringify(30000),
is_secret: false,
scope: 'SYSTEM',
scope_id: null
},
{
key: 'storybook.default_port',
value: JSON.stringify(6006),
is_secret: false,
scope: 'SYSTEM',
scope_id: null
},
{
key: 'rate_limit.requests_per_minute',
value: JSON.stringify(60),
is_secret: false,
scope: 'SYSTEM',
scope_id: null
},
{
key: 'theme',
value: JSON.stringify('auto'),
is_secret: false,
scope: 'SYSTEM',
scope_id: null
}
];
const records = defaultConfigs.map(config => ({
id: uuidv4(),
...config,
schema_version: 1,
created_at: now,
updated_at: now
}));
await queryInterface.bulkInsert('config_settings', records, { transaction });
await transaction.commit();
} catch (err) {
await transaction.rollback();
console.error('Failed to seed default configs:', err);
throw err;
}
},
down: async (queryInterface) => {
const transaction = await queryInterface.sequelize.transaction();
try {
await queryInterface.bulkDelete('config_settings', { scope: 'SYSTEM' }, { transaction });
await transaction.commit();
} catch (err) {
await transaction.rollback();
throw err;
}
}
};

3608
server/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

40
server/package.json Normal file
View File

@@ -0,0 +1,40 @@
{
"name": "dss-server",
"version": "3.0.0",
"description": "Design System Swarm - Backend API Server",
"main": "src/server.js",
"type": "module",
"scripts": {
"start": "node src/server.js",
"dev": "nodemon src/server.js",
"test": "node --test tests/**/*.test.js",
"seed": "node src/scripts/seed.js",
"migrate": "node src/scripts/migrate.js"
},
"keywords": [
"design-system",
"api",
"backend"
],
"author": "",
"license": "MIT",
"dependencies": {
"bcryptjs": "^2.4.3",
"cors": "^2.8.5",
"dotenv": "^16.3.1",
"express": "^4.18.2",
"express-validator": "^7.0.0",
"glob": "^13.0.0",
"helmet": "^7.1.0",
"ioredis": "^5.8.2",
"jsonwebtoken": "^9.0.2",
"passport": "^0.7.0",
"passport-jwt": "^4.0.1",
"sequelize": "^6.35.2",
"socket.io": "^4.7.2",
"sqlite3": "^5.1.6"
},
"devDependencies": {
"nodemon": "^3.0.2"
}
}

View File

@@ -0,0 +1,26 @@
import { Sequelize } from 'sequelize';
import path from 'path';
import { fileURLToPath } from 'url';
import fs from 'fs';
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const dataDir = path.join(__dirname, '../../data');
// Ensure data directory exists
if (!fs.existsSync(dataDir)) {
fs.mkdirSync(dataDir, { recursive: true });
}
const sequelize = new Sequelize({
dialect: 'sqlite',
storage: path.join(dataDir, 'design-system.db'),
logging: process.env.LOG_LEVEL === 'debug' ? console.log : false,
pool: {
max: 5,
min: 0,
acquire: 30000,
idle: 10000
}
});
export default sequelize;

View File

@@ -0,0 +1,22 @@
import passport from 'passport';
import { Strategy as JWTStrategy, ExtractJwt } from 'passport-jwt';
import User from '../models/User.js';
const opts = {
jwtFromRequest: ExtractJwt.fromAuthHeaderAsBearerToken(),
secretOrKey: process.env.JWT_SECRET || 'your-secret-key'
};
passport.use(new JWTStrategy(opts, async (jwt_payload, done) => {
try {
const user = await User.findByPk(jwt_payload.id);
if (user) {
return done(null, user);
}
return done(null, false);
} catch (error) {
return done(error, false);
}
}));
export default passport;

View File

@@ -0,0 +1,75 @@
/**
* Redis Configuration
* Configures the Redis client using ioredis with robust connection handling
* and retry strategies suitable for production environments.
*/
import Redis from 'ioredis';
const isRedisEnabled = process.env.ENABLE_REDIS_CACHE === 'true';
let redisClient = null;
if (isRedisEnabled) {
const redisOptions = {
// Connection Details
host: process.env.REDIS_HOST || 'localhost',
port: parseInt(process.env.REDIS_PORT || '6379', 10),
password: process.env.REDIS_PASSWORD || undefined,
db: parseInt(process.env.REDIS_DB || '0', 10),
// Resilience Settings
lazyConnect: true, // Don't crash application boot if Redis is unavailable
connectTimeout: 10000, // 10 seconds
commandTimeout: parseInt(process.env.REDIS_TIMEOUT || '100', 10), // Fail fast (default 100ms)
// Retry Strategy: Capped Exponential Backoff
retryStrategy: (times) => {
const maxRetryTime = 3000; // 3 seconds cap
// 50, 100, 200, 400, 800, 1600, 3000, 3000...
const delay = Math.min(times * 50, maxRetryTime);
console.warn(`[Redis] Connection lost. Retrying in ${delay}ms... (Attempt ${times})`);
return delay;
},
// Reconnection handling
reconnectOnError: (err) => {
const targetError = 'READONLY';
if (err.message.includes(targetError)) {
// Only reconnect when the error starts with "READONLY"
return true;
}
return false;
}
};
// Support for Redis Connection URL if provided (overrides host/port)
if (process.env.REDIS_URL) {
redisClient = new Redis(process.env.REDIS_URL, redisOptions);
} else {
redisClient = new Redis(redisOptions);
}
// Event Logging
redisClient.on('connect', () => {
console.log('[Redis] Client connected successfully');
});
redisClient.on('error', (err) => {
// Only log distinct connection errors to avoid flooding
if (err.code === 'ECONNREFUSED') {
console.error('[Redis] Connection refused - is the server running?');
} else {
console.error('[Redis] Error:', err.message);
}
});
redisClient.on('ready', () => {
console.log('[Redis] Client is ready to accept commands');
});
} else {
console.log('[Redis] Caching disabled via ENABLE_REDIS_CACHE environment variable');
}
export default redisClient;

View File

@@ -0,0 +1,178 @@
import jwt from 'jsonwebtoken';
import RedisService from '../services/RedisService.js';
// Validate JWT_SECRET at module load
if (!process.env.JWT_SECRET) {
throw new Error('FATAL: JWT_SECRET environment variable is not defined');
}
export const authenticateToken = (req, res, next) => {
const authHeader = req.headers['authorization'];
const token = authHeader && authHeader.split(' ')[1];
if (!token) {
return res.status(401).json({
status: 'error',
code: 'UNAUTHORIZED',
message: 'No token provided',
data: null
});
}
jwt.verify(token, process.env.JWT_SECRET, (err, user) => {
if (err) {
return res.status(403).json({
status: 'error',
code: 'FORBIDDEN',
message: 'Invalid or expired token',
data: null
});
}
req.user = user;
next();
});
};
// Optional authentication - does not fail if token is missing, but validates if present
// Supports both Authorization header and query parameter token (for EventSource compatibility)
export const optionalAuth = (req, res, next) => {
const authHeader = req.headers['authorization'];
const headerToken = authHeader && authHeader.split(' ')[1];
// Check query parameter token (for EventSource which can't send headers)
const queryToken = req.query.token;
// Use header token if available, otherwise use query token
const token = headerToken || queryToken;
// If no token, continue without authentication (req.user will be undefined)
if (!token) {
return next();
}
// If token is present, validate it
jwt.verify(token, process.env.JWT_SECRET, (err, user) => {
if (err) {
// Invalid token - continue anyway but don't set req.user
console.log('[Auth] Invalid token provided but continuing without auth');
return next();
}
req.user = user;
next();
});
};
export const authorizeRole = (allowedRoles) => {
return (req, res, next) => {
if (!req.user) {
return res.status(401).json({
status: 'error',
code: 'UNAUTHORIZED',
message: 'User not authenticated',
data: null
});
}
if (!allowedRoles.includes(req.user.role)) {
return res.status(403).json({
status: 'error',
code: 'FORBIDDEN',
message: 'Insufficient permissions',
data: null
});
}
next();
};
};
/**
* Granular permission-based authorization middleware with Redis caching
* Implements Look-Aside pattern: Cache -> DB -> Cache population
*/
export const requirePermission = (resource, action) => {
return async (req, res, next) => {
try {
if (!req.user || !req.user.role) {
return res.status(401).json({
status: 'error',
code: 'UNAUTHORIZED',
message: 'No user context found',
data: null
});
}
const userRole = req.user.role;
// Admin override - admins have implicit access to everything
if (userRole === 'admin') {
return next();
}
const permissionKey = `${resource}:${action}`;
// ======== REDIS LAYER ========
// Attempt cache read (non-blocking)
const cachedPermissions = await RedisService.getRolePermissions(userRole);
if (cachedPermissions !== null) {
// Cache HIT
const hasPermission = cachedPermissions.includes(permissionKey);
console.debug(`[RBAC] Cache HIT for ${userRole} - ${permissionKey}: ${hasPermission}`);
if (!hasPermission) {
return res.status(403).json({
status: 'error',
code: 'FORBIDDEN',
message: 'Missing required permission',
data: { resource, action }
});
}
return next();
}
// Cache MISS - fallback to database
console.debug(`[RBAC] Cache MISS for ${userRole} - querying DB`);
// Dynamically import TeamPermissions model to avoid circular dependency
const { default: TeamPermissions } = await import('../models/TeamPermissions.js');
// Fetch ALL permissions for this role (for cache population)
const allPermissions = await TeamPermissions.findAll({
where: { role: userRole },
attributes: ['resource', 'action']
});
// Transform to "resource:action" format
const permissionStrings = allPermissions.map(p => `${p.resource}:${p.action}`);
// Fire-and-forget cache population (don't await to avoid blocking request)
RedisService.setRolePermissions(userRole, permissionStrings).catch(err => {
console.error('[RBAC] Failed to populate cache:', err.message);
});
// Check if requested permission exists
const hasPermission = permissionStrings.includes(permissionKey);
if (!hasPermission) {
return res.status(403).json({
status: 'error',
code: 'FORBIDDEN',
message: 'Missing required permission',
data: { resource, action }
});
}
next();
} catch (error) {
console.error('[RBAC] Permission check error:', error);
res.status(500).json({
status: 'error',
code: 'SERVER_ERROR',
message: 'Internal server error verifying permissions',
data: null
});
}
};
};

View File

@@ -0,0 +1,23 @@
export const errorHandler = (err, req, res, next) => {
console.error(err);
const status = err.status || 500;
const code = err.code || 'INTERNAL_SERVER_ERROR';
const message = err.message || 'An unexpected error occurred';
res.status(status).json({
status: 'error',
code,
message,
data: process.env.NODE_ENV === 'development' ? { error: err } : null
});
};
export const notFoundHandler = (req, res) => {
res.status(404).json({
status: 'error',
code: 'NOT_FOUND',
message: `Route ${req.method} ${req.path} not found`,
data: null
});
};

View File

@@ -0,0 +1,99 @@
import { DataTypes } from 'sequelize';
import sequelize from '../config/database.js';
const AIChat = sequelize.define('AIChat', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
projectId: {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'projects',
key: 'id'
}
},
userId: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'users',
key: 'id'
}
},
sessionId: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
index: true
},
messageType: {
type: DataTypes.ENUM('claude-query', 'ai-query', 'navigation-gen', 'token-save'),
allowNull: false
},
status: {
type: DataTypes.ENUM('pending', 'processing', 'completed', 'failed'),
defaultValue: 'pending',
index: true
},
userMessage: {
type: DataTypes.TEXT,
allowNull: false
},
aiResponse: {
type: DataTypes.TEXT,
allowNull: true
},
generatedCode: {
type: DataTypes.TEXT,
allowNull: true
},
navigationStructure: {
type: DataTypes.JSON,
allowNull: true
},
tokens: {
type: DataTypes.JSON,
allowNull: true
},
inputTokens: {
type: DataTypes.INTEGER,
defaultValue: 0
},
outputTokens: {
type: DataTypes.INTEGER,
defaultValue: 0
},
model: {
type: DataTypes.STRING,
defaultValue: 'mock-ai-v1'
},
error: {
type: DataTypes.TEXT,
allowNull: true
},
metadata: {
type: DataTypes.JSON,
defaultValue: {}
},
startedAt: {
type: DataTypes.DATE,
allowNull: true
},
completedAt: {
type: DataTypes.DATE,
allowNull: true
}
}, {
tableName: 'ai_chats',
timestamps: true,
indexes: [
{ fields: ['userId', 'createdAt'] },
{ fields: ['projectId', 'status'] },
{ fields: ['sessionId'] },
{ fields: ['status'] }
]
});
export default AIChat;

View File

@@ -0,0 +1,75 @@
import { DataTypes } from 'sequelize';
import sequelize from '../config/database.js';
const Component = sequelize.define('Component', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
projectId: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'Projects',
key: 'id'
}
},
name: {
type: DataTypes.STRING,
allowNull: false
},
description: {
type: DataTypes.TEXT,
allowNull: true
},
category: {
type: DataTypes.STRING,
allowNull: true
},
figmaId: {
type: DataTypes.STRING,
allowNull: true,
unique: true
},
storybookPath: {
type: DataTypes.STRING,
allowNull: true
},
status: {
type: DataTypes.ENUM('draft', 'wip', 'ready', 'deprecated'),
defaultValue: 'draft'
},
adoptionScore: {
type: DataTypes.FLOAT,
defaultValue: 0
},
usageCount: {
type: DataTypes.INTEGER,
defaultValue: 0
},
variants: {
type: DataTypes.JSON,
defaultValue: []
},
metadata: {
type: DataTypes.JSON,
allowNull: true
},
createdAt: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW
},
updatedAt: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW
}
}, {
timestamps: true,
indexes: [
{ fields: ['projectId'] },
{ fields: ['status'] }
]
});
export default Component;

View File

@@ -0,0 +1,70 @@
import { DataTypes } from 'sequelize';
import sequelize from '../config/database.js';
/**
* ConfigAuditLog Model
* Immutable record of every configuration change.
*/
const ConfigAuditLog = sequelize.define('ConfigAuditLog', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
configId: {
type: DataTypes.UUID,
allowNull: false,
field: 'config_id',
references: {
model: 'config_settings',
key: 'id'
}
},
actorId: {
type: DataTypes.UUID,
allowNull: false,
field: 'actor_id',
comment: 'User ID who made the change',
references: {
model: 'users',
key: 'id'
}
},
action: {
type: DataTypes.ENUM('CREATE', 'UPDATE', 'DELETE'),
allowNull: false
},
previousValue: {
type: DataTypes.JSONB,
allowNull: true,
field: 'previous_value',
comment: 'Snapshot of value before change (encrypted if secret)'
},
newValue: {
type: DataTypes.JSONB,
allowNull: true,
field: 'new_value',
comment: 'Snapshot of value after change (encrypted if secret)'
},
createdAt: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW,
field: 'created_at'
}
}, {
tableName: 'config_audit_logs',
timestamps: false, // Only creation time matters for audit logs
indexes: [
{
fields: ['configId']
},
{
fields: ['actorId']
},
{
fields: ['createdAt']
}
]
});
export default ConfigAuditLog;

View File

@@ -0,0 +1,78 @@
import { DataTypes } from 'sequelize';
import sequelize from '../config/database.js';
/**
* ConfigSetting Model
* Stores configuration key-value pairs at varying scopes (SYSTEM, PROJECT, USER).
* Secrets are encrypted at the application layer before storage.
*/
const ConfigSetting = sequelize.define('ConfigSetting', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
scope: {
type: DataTypes.ENUM('SYSTEM', 'PROJECT', 'USER'),
allowNull: false,
comment: 'The hierarchy level of this setting'
},
scopeId: {
type: DataTypes.UUID,
allowNull: true,
field: 'scope_id',
comment: 'NULL for SYSTEM, ProjectID for PROJECT, UserId for USER'
},
key: {
type: DataTypes.STRING,
allowNull: false,
validate: {
notEmpty: true
}
},
value: {
type: DataTypes.JSONB,
allowNull: false,
comment: 'Stores the actual config value. Encrypted string if isSecret=true'
},
isSecret: {
type: DataTypes.BOOLEAN,
defaultValue: false,
field: 'is_secret',
comment: 'If true, value is encrypted using AES-256-GCM'
},
schemaVersion: {
type: DataTypes.INTEGER,
defaultValue: 1,
field: 'schema_version',
comment: 'Used for future schema migrations of the config value itself'
},
createdAt: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW,
field: 'created_at'
},
updatedAt: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW,
field: 'updated_at'
}
}, {
tableName: 'config_settings',
timestamps: true,
indexes: [
{
// Ensure unique key per scope/scopeId combination
unique: true,
fields: ['scope', 'scope_id', 'key'],
// Note: In Postgres, multiple NULLs (for scope_id) are distinct by default in unique constraints
// We will handle specific SYSTEM scope uniqueness validation in the Service layer or
// rely on a partial index in migration if strictly needed.
},
{
fields: ['scope', 'scope_id']
}
]
});
export default ConfigSetting;

View File

@@ -0,0 +1,69 @@
import { DataTypes } from 'sequelize';
import sequelize from '../config/database.js';
const Discovery = sequelize.define('Discovery', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
projectId: {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'projects',
key: 'id'
}
},
userId: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'users',
key: 'id'
}
},
type: {
type: DataTypes.ENUM('project-analysis', 'quick-wins', 'component-audit', 'token-extraction'),
index: true
},
status: {
type: DataTypes.ENUM('queued', 'running', 'completed', 'failed'),
defaultValue: 'queued',
index: true
},
progress: {
type: DataTypes.INTEGER,
defaultValue: 0
},
results: {
type: DataTypes.JSON,
defaultValue: null
},
error: {
type: DataTypes.TEXT,
allowNull: true
},
metadata: {
type: DataTypes.JSON,
defaultValue: {}
},
startedAt: {
type: DataTypes.DATE,
allowNull: true
},
completedAt: {
type: DataTypes.DATE,
allowNull: true
}
}, {
tableName: 'discoveries',
timestamps: true,
indexes: [
{ fields: ['userId', 'createdAt'] },
{ fields: ['projectId', 'status'] },
{ fields: ['status'] }
]
});
export default Discovery;

View File

@@ -0,0 +1,89 @@
import { DataTypes } from 'sequelize';
import sequelize from '../config/database.js';
const FigmaSync = sequelize.define('FigmaSync', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
projectId: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'projects',
key: 'id'
}
},
userId: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'users',
key: 'id'
}
},
figmaFileKey: {
type: DataTypes.STRING,
allowNull: false
},
figmaFileUrl: {
type: DataTypes.STRING,
allowNull: false
},
syncType: {
type: DataTypes.ENUM('extract-variables', 'extract-components', 'extract-styles', 'sync-tokens', 'visual-diff', 'validate-components', 'generate-code', 'export-assets'),
allowNull: false
},
status: {
type: DataTypes.ENUM('queued', 'running', 'completed', 'failed'),
defaultValue: 'queued',
index: true
},
extractedData: {
type: DataTypes.JSON,
defaultValue: null
},
syncedTokens: {
type: DataTypes.JSON,
defaultValue: null
},
validationResults: {
type: DataTypes.JSON,
defaultValue: null
},
diffReport: {
type: DataTypes.JSON,
defaultValue: null
},
generatedCode: {
type: DataTypes.TEXT,
allowNull: true
},
error: {
type: DataTypes.TEXT,
allowNull: true
},
metadata: {
type: DataTypes.JSON,
defaultValue: {}
},
startedAt: {
type: DataTypes.DATE,
allowNull: true
},
completedAt: {
type: DataTypes.DATE,
allowNull: true
}
}, {
tableName: 'figma_syncs',
timestamps: true,
indexes: [
{ fields: ['projectId', 'createdAt'] },
{ fields: ['userId', 'status'] },
{ fields: ['status'] }
]
});
export default FigmaSync;

62
server/src/models/Icon.js Normal file
View File

@@ -0,0 +1,62 @@
import { DataTypes } from 'sequelize';
import sequelize from '../config/database.js';
const Icon = sequelize.define('Icon', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
projectId: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'Projects',
key: 'id'
}
},
name: {
type: DataTypes.STRING,
allowNull: false
},
category: {
type: DataTypes.STRING,
allowNull: true
},
svgData: {
type: DataTypes.TEXT,
allowNull: false
},
sizes: {
type: DataTypes.JSON,
defaultValue: ['16', '24', '32', '48']
},
figmaId: {
type: DataTypes.STRING,
allowNull: true
},
usageCount: {
type: DataTypes.INTEGER,
defaultValue: 0
},
tags: {
type: DataTypes.JSON,
defaultValue: []
},
createdAt: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW
},
updatedAt: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW
}
}, {
timestamps: true,
indexes: [
{ fields: ['projectId'] },
{ fields: ['category'] }
]
});
export default Icon;

View File

@@ -0,0 +1,59 @@
import { DataTypes } from 'sequelize';
import sequelize from '../config/database.js';
const Integration = sequelize.define('Integration', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
projectId: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'Projects',
key: 'id'
}
},
type: {
type: DataTypes.ENUM('figma', 'jira', 'storybook', 'github', 'slack'),
allowNull: false
},
name: {
type: DataTypes.STRING,
allowNull: false
},
config: {
type: DataTypes.JSON,
allowNull: false,
defaultValue: {}
},
isActive: {
type: DataTypes.BOOLEAN,
defaultValue: true
},
lastSyncedAt: {
type: DataTypes.DATE,
allowNull: true
},
errorMessage: {
type: DataTypes.TEXT,
allowNull: true
},
createdAt: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW
},
updatedAt: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW
}
}, {
timestamps: true,
indexes: [
{ fields: ['projectId'] },
{ fields: ['type'] }
]
});
export default Integration;

68
server/src/models/Log.js Normal file
View File

@@ -0,0 +1,68 @@
import { DataTypes } from 'sequelize';
import sequelize from '../config/database.js';
/**
* PHASE 1: Log Model
* Stores server and browser logs for debugging and monitoring
* Includes automatic timestamp and user tracking
*/
const Log = sequelize.define('Log', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
userId: {
type: DataTypes.UUID,
allowNull: true,
index: true,
references: {
model: 'Users',
key: 'id'
}
},
level: {
type: DataTypes.ENUM('log', 'warn', 'error', 'info', 'debug'),
defaultValue: 'log',
index: true
},
source: {
type: DataTypes.ENUM('server', 'browser'),
defaultValue: 'server',
index: true
},
message: {
type: DataTypes.TEXT,
allowNull: false
},
context: {
type: DataTypes.JSON,
allowNull: true,
comment: 'Additional context like stack trace, request details, etc.'
},
userAgent: {
type: DataTypes.STRING,
allowNull: true,
comment: 'Browser user agent for browser logs'
},
url: {
type: DataTypes.STRING,
allowNull: true,
comment: 'Page URL for browser logs'
},
timestamp: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW,
index: true
}
}, {
tableName: 'logs',
timestamps: true,
indexes: [
{ fields: ['userId', 'timestamp'] },
{ fields: ['level', 'timestamp'] },
{ fields: ['source', 'timestamp'] }
]
});
export default Log;

View File

@@ -0,0 +1,71 @@
import { DataTypes } from 'sequelize';
import sequelize from '../config/database.js';
const Project = sequelize.define('Project', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
name: {
type: DataTypes.STRING,
allowNull: false
},
description: {
type: DataTypes.TEXT,
allowNull: true
},
key: {
type: DataTypes.STRING,
allowNull: false,
unique: true,
validate: {
is: /^[A-Z0-9]+$/
}
},
userId: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'Users',
key: 'id'
}
},
figmaFileKey: {
type: DataTypes.STRING,
allowNull: true
},
jiraProjectKey: {
type: DataTypes.STRING,
allowNull: true
},
storybookUrl: {
type: DataTypes.STRING,
allowNull: true,
validate: { isUrl: true }
},
status: {
type: DataTypes.ENUM('active', 'archived', 'draft'),
defaultValue: 'draft'
},
settings: {
type: DataTypes.JSON,
defaultValue: {
tokenNamespace: 'design',
colorFormat: 'hex',
spacingUnit: 'px'
}
},
createdAt: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW
},
updatedAt: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW
}
}, {
timestamps: true
});
export default Project;

102
server/src/models/QATest.js Normal file
View File

@@ -0,0 +1,102 @@
import { DataTypes } from 'sequelize';
import sequelize from '../config/database.js';
const QATest = sequelize.define('QATest', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
projectId: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'projects',
key: 'id'
}
},
userId: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'users',
key: 'id'
}
},
testType: {
type: DataTypes.ENUM('screenshot-compare', 'test-run'),
allowNull: false
},
status: {
type: DataTypes.ENUM('queued', 'running', 'completed', 'failed'),
defaultValue: 'queued',
index: true
},
baselineScreenshots: {
type: DataTypes.JSON,
defaultValue: null
},
currentScreenshots: {
type: DataTypes.JSON,
defaultValue: null
},
comparisonResults: {
type: DataTypes.JSON,
defaultValue: null
},
testSuite: {
type: DataTypes.STRING,
allowNull: true
},
testResults: {
type: DataTypes.JSON,
defaultValue: null
},
passedTests: {
type: DataTypes.INTEGER,
defaultValue: 0
},
failedTests: {
type: DataTypes.INTEGER,
defaultValue: 0
},
skippedTests: {
type: DataTypes.INTEGER,
defaultValue: 0
},
totalDuration: {
type: DataTypes.INTEGER,
allowNull: true
},
diffPercentage: {
type: DataTypes.FLOAT,
allowNull: true
},
error: {
type: DataTypes.TEXT,
allowNull: true
},
metadata: {
type: DataTypes.JSON,
defaultValue: {}
},
startedAt: {
type: DataTypes.DATE,
allowNull: true
},
completedAt: {
type: DataTypes.DATE,
allowNull: true
}
}, {
tableName: 'qa_tests',
timestamps: true,
indexes: [
{ fields: ['projectId', 'createdAt'] },
{ fields: ['userId', 'status'] },
{ fields: ['status'] },
{ fields: ['testType'] }
]
});
export default QATest;

54
server/src/models/Team.js Normal file
View File

@@ -0,0 +1,54 @@
import { DataTypes } from 'sequelize';
import sequelize from '../config/database.js';
const Team = sequelize.define('Team', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
name: {
type: DataTypes.STRING(255),
allowNull: false,
validate: { len: [2, 255] }
},
description: {
type: DataTypes.TEXT,
allowNull: true
},
ownerId: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'users',
key: 'id'
}
},
settings: {
type: DataTypes.JSON,
defaultValue: {
color: '#3498db',
icon: 'users',
visibility: 'private',
allowPublicProjects: false
}
},
isActive: {
type: DataTypes.BOOLEAN,
defaultValue: true,
index: true
},
metadata: {
type: DataTypes.JSON,
defaultValue: {
createdByAdmin: false,
department: null,
costCenter: null
}
}
}, {
tableName: 'teams',
timestamps: true
});
export default Team;

View File

@@ -0,0 +1,56 @@
import { DataTypes } from 'sequelize';
import sequelize from '../config/database.js';
const TeamMember = sequelize.define('TeamMember', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
teamId: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'teams',
key: 'id'
}
},
userId: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'users',
key: 'id'
}
},
role: {
type: DataTypes.ENUM('admin', 'editor', 'viewer'),
defaultValue: 'viewer',
index: true
},
joinedAt: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW
},
invitedBy: {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'users',
key: 'id'
}
},
isActive: {
type: DataTypes.BOOLEAN,
defaultValue: true
}
}, {
tableName: 'team_members',
timestamps: true,
indexes: [
{ fields: ['teamId', 'userId'], unique: true },
{ fields: ['teamId', 'role'] }
]
});
export default TeamMember;

View File

@@ -0,0 +1,47 @@
import { DataTypes } from 'sequelize';
import sequelize from '../config/database.js';
const TeamPermissions = sequelize.define('TeamPermissions', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
role: {
type: DataTypes.ENUM('admin', 'ui_team', 'ux_team', 'qa_team'),
allowNull: false
},
permission: {
type: DataTypes.STRING,
allowNull: false,
comment: 'Human readable permission name, e.g. "sync_figma"'
},
resource: {
type: DataTypes.STRING,
allowNull: false
},
action: {
type: DataTypes.ENUM('create', 'read', 'update', 'delete'),
allowNull: false
},
createdAt: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW
},
updatedAt: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW
}
}, {
tableName: 'team_permissions',
timestamps: true,
indexes: [
{
unique: true,
fields: ['role', 'resource', 'action'],
name: 'unique_role_resource_action'
}
]
});
export default TeamPermissions;

View File

@@ -0,0 +1,53 @@
import { DataTypes } from 'sequelize';
import sequelize from '../config/database.js';
const TeamSettings = sequelize.define('TeamSettings', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
teamId: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'teams',
key: 'id'
},
unique: true
},
allowMemberInvites: {
type: DataTypes.BOOLEAN,
defaultValue: false
},
requireApprovalForProjects: {
type: DataTypes.BOOLEAN,
defaultValue: false
},
defaultRole: {
type: DataTypes.ENUM('admin', 'editor', 'viewer'),
defaultValue: 'viewer'
},
features: {
type: DataTypes.JSON,
defaultValue: {
analytics: true,
integrations: true,
apiAccess: false,
customBranding: false
}
},
notificationPreferences: {
type: DataTypes.JSON,
defaultValue: {
emailOnMemberJoin: true,
emailOnProjectCreation: true,
slackWebhook: null
}
}
}, {
tableName: 'team_settings',
timestamps: true
});
export default TeamSettings;

View File

@@ -0,0 +1,62 @@
import { DataTypes } from 'sequelize';
import sequelize from '../config/database.js';
const Token = sequelize.define('Token', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
projectId: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'Projects',
key: 'id'
}
},
name: {
type: DataTypes.STRING,
allowNull: false
},
category: {
type: DataTypes.ENUM('color', 'spacing', 'typography', 'shadow', 'border', 'other'),
defaultValue: 'other'
},
value: {
type: DataTypes.STRING,
allowNull: false
},
description: {
type: DataTypes.TEXT,
allowNull: true
},
figmaId: {
type: DataTypes.STRING,
allowNull: true
},
source: {
type: DataTypes.ENUM('manual', 'figma', 'imported'),
defaultValue: 'manual'
},
metadata: {
type: DataTypes.JSON,
allowNull: true
},
createdAt: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW
},
updatedAt: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW
}
}, {
timestamps: true,
indexes: [
{ fields: ['projectId'] },
{ fields: ['category'] }
]
});
export default Token;

View File

@@ -0,0 +1,76 @@
import { DataTypes } from 'sequelize';
import sequelize from '../config/database.js';
const TranslationDictionary = sequelize.define('TranslationDictionary', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
name: {
type: DataTypes.STRING,
allowNull: false,
validate: {
notEmpty: true,
len: [3, 255]
}
},
description: {
type: DataTypes.TEXT,
allowNull: true
},
projectId: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'Projects',
key: 'id'
}
},
createdBy: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'Users',
key: 'id'
}
},
status: {
type: DataTypes.ENUM('draft', 'active', 'archived'),
defaultValue: 'draft'
},
version: {
type: DataTypes.INTEGER,
defaultValue: 1,
allowNull: false
},
metadata: {
type: DataTypes.JSONB,
defaultValue: {
sourceSystem: null,
targetSystem: null,
coverage: 0,
validationStatus: 'pending',
lastValidated: null,
tags: []
}
},
createdAt: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW
},
updatedAt: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW
}
}, {
timestamps: true,
indexes: [
{ fields: ['projectId'] },
{ fields: ['createdBy'] },
{ fields: ['status'] },
{ fields: ['createdAt'] }
]
});
export default TranslationDictionary;

View File

@@ -0,0 +1,78 @@
import { DataTypes } from 'sequelize';
import sequelize from '../config/database.js';
const TranslationMapping = sequelize.define('TranslationMapping', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
dictionaryId: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'TranslationDictionaries',
key: 'id'
},
onDelete: 'CASCADE'
},
sourceToken: {
type: DataTypes.STRING,
allowNull: false,
validate: {
notEmpty: true
}
},
targetToken: {
type: DataTypes.STRING,
allowNull: false,
validate: {
notEmpty: true
}
},
transformRule: {
type: DataTypes.JSONB,
defaultValue: null,
comment: 'Optional transformation rules (scale, convert units, etc.)'
},
validated: {
type: DataTypes.BOOLEAN,
defaultValue: false
},
confidence: {
type: DataTypes.FLOAT,
defaultValue: 1.0,
validate: {
min: 0,
max: 1
},
comment: 'Confidence score for auto-generated mappings (0-1)'
},
notes: {
type: DataTypes.TEXT,
allowNull: true
},
createdAt: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW
},
updatedAt: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW
}
}, {
timestamps: true,
indexes: [
{ fields: ['dictionaryId'] },
{ fields: ['sourceToken'] },
{ fields: ['targetToken'] },
{ fields: ['validated'] },
// Composite unique constraint: one source token can only map to one target per dictionary
{
unique: true,
fields: ['dictionaryId', 'sourceToken']
}
]
});
export default TranslationMapping;

67
server/src/models/User.js Normal file
View File

@@ -0,0 +1,67 @@
import { DataTypes } from 'sequelize';
import sequelize from '../config/database.js';
import bcrypt from 'bcryptjs';
const User = sequelize.define('User', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
email: {
type: DataTypes.STRING,
allowNull: false,
unique: true,
validate: { isEmail: true }
},
password: {
type: DataTypes.STRING,
allowNull: false
},
name: {
type: DataTypes.STRING,
allowNull: true
},
role: {
type: DataTypes.ENUM('admin', 'ui_team', 'ux_team', 'qa_team'),
defaultValue: 'ui_team'
},
team_id: {
type: DataTypes.STRING,
allowNull: true
},
isActive: {
type: DataTypes.BOOLEAN,
defaultValue: true
},
lastLogin: {
type: DataTypes.DATE,
allowNull: true
},
createdAt: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW
},
updatedAt: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW
}
}, {
timestamps: true,
hooks: {
beforeCreate: async (user) => {
user.password = await bcrypt.hash(user.password, 10);
},
beforeUpdate: async (user) => {
if (user.changed('password')) {
user.password = await bcrypt.hash(user.password, 10);
}
}
}
});
User.prototype.validatePassword = async function(password) {
return await bcrypt.compare(password, this.password);
};
export default User;

View File

@@ -0,0 +1,131 @@
import express from 'express';
import { authenticateToken, authorizeRole } from '../../middleware/auth.js';
import ConfigService from '../../services/ConfigService.js';
import Project from '../../models/Project.js';
const router = express.Router();
/**
* GET /api/admin/config/system
* Get all system-level configuration
*/
router.get('/system',
authenticateToken,
authorizeRole(['admin']),
async (req, res, next) => {
try {
const config = await ConfigService.getScopeConfig('SYSTEM', null);
res.json({
status: 'success',
data: config
});
} catch (error) {
next(error);
}
});
/**
* PUT /api/admin/config/system
* Update system-level configuration
*/
router.put('/system',
authenticateToken,
authorizeRole(['admin']),
async (req, res, next) => {
try {
const { key, value } = req.body;
if (!key || value === undefined) {
return res.status(400).json({
status: 'error',
message: 'Key and value are required'
});
}
const result = await ConfigService.setConfig({
scope: 'SYSTEM',
scopeId: null,
key,
value,
actorId: req.user.id
});
res.json({
status: 'success',
message: 'System configuration updated',
data: result
});
} catch (error) {
next(error);
}
});
/**
* GET /api/admin/config/project/:projectId
* Get project-level configuration
*/
router.get('/project/:projectId',
authenticateToken,
// Admin or project owner access logic
async (req, res, next) => {
try {
const { projectId } = req.params;
// Access Check
if (req.user.role !== 'admin') {
const project = await Project.findOne({ where: { id: projectId, userId: req.user.id } });
if (!project) {
return res.status(403).json({ status: 'error', message: 'Forbidden' });
}
}
const config = await ConfigService.getScopeConfig('PROJECT', projectId);
res.json({
status: 'success',
data: config
});
} catch (error) {
next(error);
}
});
/**
* PUT /api/admin/config/project/:projectId
* Update project-level configuration
*/
router.put('/project/:projectId',
authenticateToken,
async (req, res, next) => {
try {
const { projectId } = req.params;
const { key, value } = req.body;
// Access Check
if (req.user.role !== 'admin') {
const project = await Project.findOne({ where: { id: projectId, userId: req.user.id } });
if (!project) {
return res.status(403).json({ status: 'error', message: 'Forbidden' });
}
}
const result = await ConfigService.setConfig({
scope: 'PROJECT',
scopeId: projectId,
key,
value,
actorId: req.user.id
});
res.json({
status: 'success',
message: 'Project configuration updated',
data: result
});
} catch (error) {
next(error);
}
});
export default router;

View File

@@ -0,0 +1,257 @@
import express from 'express';
import User from '../../models/User.js';
import TeamPermissions from '../../models/TeamPermissions.js';
import { authenticateToken, authorizeRole } from '../../middleware/auth.js';
import RedisService from '../../services/RedisService.js';
const router = express.Router();
/**
* POST /api/admin/roles/assign
* Assign a role to a user
*/
router.post('/assign',
authenticateToken,
authorizeRole(['admin']),
async (req, res) => {
try {
const { user_id, role, team_id } = req.body;
const validRoles = ['admin', 'ui_team', 'ux_team', 'qa_team'];
if (!validRoles.includes(role)) {
return res.status(400).json({
status: 'error',
code: 'INVALID_ROLE',
message: 'Invalid role provided',
data: null
});
}
const user = await User.findByPk(user_id);
if (!user) {
return res.status(404).json({
status: 'error',
code: 'USER_NOT_FOUND',
message: 'User not found',
data: null
});
}
user.role = role;
if (team_id !== undefined) user.team_id = team_id;
await user.save();
return res.json({
status: 'success',
code: 'ROLE_ASSIGNED',
message: 'Role assigned successfully',
data: {
id: user.id,
email: user.email,
role: user.role,
team_id: user.team_id
}
});
} catch (error) {
console.error('Assign Role Error:', error);
return res.status(500).json({
status: 'error',
code: 'SERVER_ERROR',
message: 'Failed to assign role',
data: null
});
}
}
);
/**
* GET /api/admin/roles/permissions/:role
* Get all permissions for a specific role
*/
router.get('/permissions/:role',
authenticateToken,
async (req, res) => {
try {
const requestedRole = req.params.role;
const validRoles = ['admin', 'ui_team', 'ux_team', 'qa_team'];
if (!validRoles.includes(requestedRole)) {
return res.status(400).json({
status: 'error',
code: 'INVALID_ROLE',
message: 'Invalid role',
data: null
});
}
// Users can view their own role's permissions, or Admin can view any
if (req.user.role !== 'admin' && req.user.role !== requestedRole) {
return res.status(403).json({
status: 'error',
code: 'FORBIDDEN',
message: 'You can only view your own role permissions',
data: null
});
}
const permissions = await TeamPermissions.findAll({
where: { role: requestedRole }
});
return res.json({
status: 'success',
code: 'PERMISSIONS_RETRIEVED',
message: 'Permissions retrieved successfully',
data: { role: requestedRole, permissions }
});
} catch (error) {
console.error('Get Permissions Error:', error);
return res.status(500).json({
status: 'error',
code: 'SERVER_ERROR',
message: 'Failed to fetch permissions',
data: null
});
}
}
);
/**
* PUT /api/admin/roles/permissions/:role
* Upsert permissions (Enable/Disable)
*/
router.put('/permissions/:role',
authenticateToken,
authorizeRole(['admin']),
async (req, res) => {
try {
const role = req.params.role;
const { permission, resource, action, enabled } = req.body;
const validRoles = ['admin', 'ui_team', 'ux_team', 'qa_team'];
if (!validRoles.includes(role)) {
return res.status(400).json({
status: 'error',
code: 'INVALID_ROLE',
message: 'Invalid role',
data: null
});
}
if (enabled === false) {
// Delete permission if it exists
await TeamPermissions.destroy({
where: { role, resource, action }
});
// Invalidate cache for this role
await RedisService.invalidateRole(role);
return res.json({
status: 'success',
code: 'PERMISSION_DISABLED',
message: 'Permission disabled (removed)',
data: null
});
} else {
// Create or Update
const [perm, created] = await TeamPermissions.findOrCreate({
where: { role, resource, action },
defaults: { permission, role, resource, action }
});
// Invalidate cache for this role
await RedisService.invalidateRole(role);
return res.json({
status: 'success',
code: created ? 'PERMISSION_CREATED' : 'PERMISSION_EXISTS',
message: created ? 'Permission created' : 'Permission already exists',
data: perm
});
}
} catch (error) {
console.error('Update Permission Error:', error);
return res.status(500).json({
status: 'error',
code: 'SERVER_ERROR',
message: 'Failed to update permission',
data: null
});
}
}
);
/**
* GET /api/admin/users/:user_id/permissions
* Get effective permissions for a specific user
*/
router.get('/users/:user_id/permissions',
authenticateToken,
async (req, res) => {
try {
const { user_id } = req.params;
// Security check: Admin or Self only
if (req.user.role !== 'admin' && req.user.id !== user_id) {
return res.status(403).json({
status: 'error',
code: 'FORBIDDEN',
message: 'Forbidden',
data: null
});
}
const user = await User.findByPk(user_id);
if (!user) {
return res.status(404).json({
status: 'error',
code: 'USER_NOT_FOUND',
message: 'User not found',
data: null
});
}
// If user is Admin, they implicitly have all permissions
if (user.role === 'admin') {
return res.json({
status: 'success',
code: 'PERMISSIONS_RETRIEVED',
message: 'Admin has all permissions',
data: {
user_id,
role: user.role,
isAdmin: true,
permissions: ['*'] // Represents all access
}
});
}
const permissions = await TeamPermissions.findAll({
where: { role: user.role }
});
return res.json({
status: 'success',
code: 'PERMISSIONS_RETRIEVED',
message: 'Permissions retrieved successfully',
data: {
user_id,
role: user.role,
isAdmin: false,
permissions
}
});
} catch (error) {
console.error('User Permissions Error:', error);
return res.status(500).json({
status: 'error',
code: 'SERVER_ERROR',
message: 'Failed to fetch user permissions',
data: null
});
}
}
);
export default router;

482
server/src/routes/ai.js Normal file
View File

@@ -0,0 +1,482 @@
import express from 'express';
import Project from '../models/Project.js';
import AIChat from '../models/AIChat.js';
import { authenticateToken } from '../middleware/auth.js';
const router = express.Router();
// Mock AI responses for MVP
const MOCK_AI_RESPONSES = {
'code': `import React from 'react';
export const GeneratedComponent = ({ title, children }) => (
<div className="p-4 bg-gradient-to-r from-blue-50 to-indigo-50 rounded-lg border border-blue-200">
<h2 className="text-lg font-bold text-blue-900 mb-2">{title || 'Component'}</h2>
<div className="text-gray-700">{children}</div>
</div>
);`,
'analysis': 'Based on the design system analysis, this component uses semantic colors from the design tokens. Recommended: Implement responsive padding and ensure accessibility with proper ARIA labels.',
'navigation': { type: 'hierarchical', structure: ['Home', 'Components', 'Tokens', 'Settings'], metadata: { depth: 2, itemCount: 4 } }
};
// 1. POST /api/claude/chat - Chat with Claude for design help
router.post('/claude/chat', authenticateToken, async (req, res, next) => {
try {
const { projectId, message, context } = req.body;
if (!message) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'Message is required',
data: null
});
}
// Verify project ownership if provided
if (projectId) {
const project = await Project.findOne({
where: { id: projectId, userId: req.user.id }
});
if (!project) {
return res.status(404).json({
status: 'error',
code: 'PROJECT_NOT_FOUND',
message: 'Project not found',
data: null
});
}
}
const aiChat = await AIChat.create({
projectId: projectId || null,
userId: req.user.id,
messageType: 'claude-query',
status: 'processing',
userMessage: message,
metadata: { context: context || {}, provider: 'claude' }
});
// Mock Claude API response
setTimeout(async () => {
try {
const response = MOCK_AI_RESPONSES.analysis;
await aiChat.update({
aiResponse: response,
status: 'completed',
inputTokens: Math.ceil(message.length / 4),
outputTokens: Math.ceil(response.length / 4),
completedAt: new Date()
});
} catch (error) {
console.error('[AI] Error updating chat:', error);
}
}, 1000);
console.log(`[AI] Claude chat started: ${aiChat.id}`);
res.status(202).json({
status: 'success',
code: 'CLAUDE_CHAT_STARTED',
message: 'Chat session started with Claude',
data: {
chatId: aiChat.id,
sessionId: aiChat.sessionId,
status: 'processing',
estimatedTime: '2-5 seconds'
}
});
} catch (error) {
next(error);
}
});
// 2. POST /api/ai/chat - Generic AI chat
router.post('/chat', authenticateToken, async (req, res, next) => {
try {
const { projectId, message, context } = req.body;
if (!message) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'Message is required',
data: null
});
}
if (projectId) {
const project = await Project.findOne({
where: { id: projectId, userId: req.user.id }
});
if (!project) {
return res.status(404).json({
status: 'error',
code: 'PROJECT_NOT_FOUND',
message: 'Project not found',
data: null
});
}
}
const aiChat = await AIChat.create({
projectId: projectId || null,
userId: req.user.id,
messageType: 'ai-query',
status: 'processing',
userMessage: message,
metadata: { context: context || {}, provider: 'generic-ai' }
});
setTimeout(async () => {
try {
const response = MOCK_AI_RESPONSES.analysis;
await aiChat.update({
aiResponse: response,
status: 'completed',
inputTokens: Math.ceil(message.length / 4),
outputTokens: Math.ceil(response.length / 4),
completedAt: new Date()
});
} catch (error) {
console.error('[AI] Error updating chat:', error);
}
}, 1200);
console.log(`[AI] Generic AI chat started: ${aiChat.id}`);
res.status(202).json({
status: 'success',
code: 'AI_CHAT_STARTED',
message: 'AI chat session started',
data: {
chatId: aiChat.id,
sessionId: aiChat.sessionId,
status: 'processing',
estimatedTime: '2-5 seconds'
}
});
} catch (error) {
next(error);
}
});
// 3. POST /api/dss/save-tokens - Save design tokens to DSS
router.post('/save-tokens', authenticateToken, async (req, res, next) => {
try {
const { projectId, tokens, tokenName } = req.body;
if (!projectId || !tokens || !tokenName) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'projectId, tokens, and tokenName are required',
data: null
});
}
const project = await Project.findOne({
where: { id: projectId, userId: req.user.id }
});
if (!project) {
return res.status(404).json({
status: 'error',
code: 'PROJECT_NOT_FOUND',
message: 'Project not found',
data: null
});
}
const aiChat = await AIChat.create({
projectId,
userId: req.user.id,
messageType: 'token-save',
status: 'processing',
userMessage: `Saving tokens: ${tokenName}`,
tokens: tokens,
metadata: { tokenName, tokenCount: Object.keys(tokens).length }
});
setTimeout(async () => {
try {
await aiChat.update({
status: 'completed',
completedAt: new Date()
});
} catch (error) {
console.error('[AI] Error saving tokens:', error);
}
}, 800);
console.log(`[AI] Tokens saved: ${aiChat.id}`);
res.status(202).json({
status: 'success',
code: 'TOKENS_SAVED',
message: 'Design tokens saved to DSS',
data: {
sessionId: aiChat.sessionId,
tokenName,
tokenCount: Object.keys(tokens).length,
estimatedTime: '1-2 seconds'
}
});
} catch (error) {
next(error);
}
});
// 4. POST /api/navigation/generate - Generate navigation structures
router.post('/generate', authenticateToken, async (req, res, next) => {
try {
const { projectId, componentList, depth = 2 } = req.body;
if (!projectId || !componentList || !Array.isArray(componentList)) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'projectId and componentList (array) are required',
data: null
});
}
const project = await Project.findOne({
where: { id: projectId, userId: req.user.id }
});
if (!project) {
return res.status(404).json({
status: 'error',
code: 'PROJECT_NOT_FOUND',
message: 'Project not found',
data: null
});
}
const navigationStructure = {
type: 'hierarchical',
depth,
items: componentList,
structure: ['Overview', ...componentList, 'Settings'],
generatedAt: new Date().toISOString()
};
const aiChat = await AIChat.create({
projectId,
userId: req.user.id,
messageType: 'navigation-gen',
status: 'processing',
userMessage: `Generate navigation for components: ${componentList.join(', ')}`,
navigationStructure,
metadata: { componentCount: componentList.length, depth }
});
setTimeout(async () => {
try {
await aiChat.update({
status: 'completed',
completedAt: new Date()
});
} catch (error) {
console.error('[AI] Error generating navigation:', error);
}
}, 600);
console.log(`[AI] Navigation generated: ${aiChat.id}`);
res.status(202).json({
status: 'success',
code: 'NAVIGATION_GENERATED',
message: 'Navigation structure generated',
data: {
sessionId: aiChat.sessionId,
structure: navigationStructure.structure,
itemCount: navigationStructure.structure.length,
estimatedTime: '1 second'
}
});
} catch (error) {
next(error);
}
});
// 5. POST /api/system/reset - Admin system reset
router.post('/reset', authenticateToken, async (req, res, next) => {
try {
const { confirmReset } = req.body;
if (confirmReset !== true) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'confirmReset must be true to proceed',
data: null
});
}
// Log the reset attempt
const aiChat = await AIChat.create({
userId: req.user.id,
messageType: 'claude-query',
status: 'completed',
userMessage: 'System reset initiated',
aiResponse: 'System reset operation logged',
metadata: { operation: 'system-reset', timestamp: new Date().toISOString() }
});
console.log(`[System] Reset initiated by user: ${req.user.id}`);
res.status(200).json({
status: 'success',
code: 'SYSTEM_RESET_INITIATED',
message: 'System reset initiated (mock)',
data: {
resetTime: new Date().toISOString(),
status: 'logged'
}
});
} catch (error) {
next(error);
}
});
// 6. GET /api/assets/list - List design assets
router.get('/assets-list', authenticateToken, async (req, res, next) => {
try {
const { projectId, limit = 20, offset = 0 } = req.query;
const maxLimit = 100;
const queryLimit = Math.min(parseInt(limit) || 20, maxLimit);
const queryOffset = parseInt(offset) || 0;
if (!projectId) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'projectId is required',
data: null
});
}
const project = await Project.findOne({
where: { id: projectId, userId: req.user.id }
});
if (!project) {
return res.status(404).json({
status: 'error',
code: 'PROJECT_NOT_FOUND',
message: 'Project not found',
data: null
});
}
// Mock asset list
const mockAssets = [
{ id: '1', name: 'Button.svg', type: 'component', size: 2048, createdAt: new Date().toISOString() },
{ id: '2', name: 'Input.svg', type: 'component', size: 1536, createdAt: new Date().toISOString() },
{ id: '3', name: 'Card.svg', type: 'component', size: 3072, createdAt: new Date().toISOString() },
{ id: '4', name: 'Colors.json', type: 'token', size: 512, createdAt: new Date().toISOString() },
{ id: '5', name: 'Typography.json', type: 'token', size: 768, createdAt: new Date().toISOString() }
];
const paginatedAssets = mockAssets.slice(queryOffset, queryOffset + queryLimit);
res.status(200).json({
status: 'success',
code: 'ASSETS_RETRIEVED',
message: 'Design assets retrieved successfully',
data: {
assets: paginatedAssets,
pagination: {
total: mockAssets.length,
limit: queryLimit,
offset: queryOffset
}
}
});
} catch (error) {
next(error);
}
});
// GET /api/ai/chat/:id - Get chat history and status
router.get('/:id', authenticateToken, async (req, res, next) => {
try {
const aiChat = await AIChat.findOne({
where: { id: req.params.id, userId: req.user.id }
});
if (!aiChat) {
return res.status(404).json({
status: 'error',
code: 'CHAT_NOT_FOUND',
message: 'Chat session not found',
data: null
});
}
res.status(200).json({
status: 'success',
code: 'CHAT_RETRIEVED',
message: 'Chat session retrieved successfully',
data: {
chat: {
id: aiChat.id,
sessionId: aiChat.sessionId,
messageType: aiChat.messageType,
status: aiChat.status,
userMessage: aiChat.userMessage,
aiResponse: aiChat.aiResponse,
generatedCode: aiChat.generatedCode,
navigationStructure: aiChat.navigationStructure,
inputTokens: aiChat.inputTokens,
outputTokens: aiChat.outputTokens,
model: aiChat.model,
completedAt: aiChat.completedAt,
createdAt: aiChat.createdAt
}
}
});
} catch (error) {
next(error);
}
});
// GET /api/ai/sessions - List user chat sessions
router.get('/sessions/list', authenticateToken, async (req, res, next) => {
try {
const { limit = 20, offset = 0 } = req.query;
const maxLimit = 100;
const queryLimit = Math.min(parseInt(limit) || 20, maxLimit);
const queryOffset = parseInt(offset) || 0;
const { count, rows } = await AIChat.findAndCountAll({
where: { userId: req.user.id },
limit: queryLimit,
offset: queryOffset,
order: [['createdAt', 'DESC']],
attributes: ['id', 'sessionId', 'messageType', 'status', 'userMessage', 'createdAt']
});
res.status(200).json({
status: 'success',
code: 'SESSIONS_RETRIEVED',
message: 'Chat sessions retrieved successfully',
data: {
sessions: rows,
pagination: {
total: count,
limit: queryLimit,
offset: queryOffset
}
}
});
} catch (error) {
next(error);
}
});
export default router;

View File

@@ -0,0 +1,88 @@
import express from 'express';
import { authenticateToken } from '../../middleware/auth.js';
import ConfigService from '../../services/ConfigService.js';
import Project from '../../models/Project.js';
const router = express.Router();
/**
* GET /api/config/resolved/:projectId
* Get fully resolved configuration (System > Project > User)
*/
router.get('/resolved/:projectId', authenticateToken, async (req, res, next) => {
try {
const { projectId } = req.params;
// Verify project access
const project = await Project.findByPk(projectId);
if (!project) {
return res.status(404).json({ status: 'error', message: 'Project not found' });
}
// Note: We allow any authenticated user to view config if they are on the team
// For now, simple check if project exists is sufficient for MVP,
// or strictly check ownership/team membership if RBAC allows.
// Assuming basic check for now:
const resolved = await ConfigService.resolveConfig(projectId, req.user.id);
res.json({
status: 'success',
data: resolved
});
} catch (error) {
next(error);
}
});
/**
* GET /api/config/user
* Get user-level preferences
*/
router.get('/user', authenticateToken, async (req, res, next) => {
try {
const config = await ConfigService.getScopeConfig('USER', req.user.id);
res.json({
status: 'success',
data: config
});
} catch (error) {
next(error);
}
});
/**
* PUT /api/config/user
* Update user-level preferences
*/
router.put('/user', authenticateToken, async (req, res, next) => {
try {
const { key, value } = req.body;
if (!key || value === undefined) {
return res.status(400).json({
status: 'error',
message: 'Key and value are required'
});
}
const result = await ConfigService.setConfig({
scope: 'USER',
scopeId: req.user.id,
key,
value,
actorId: req.user.id
});
res.json({
status: 'success',
message: 'User preference updated',
data: result
});
} catch (error) {
next(error);
}
});
export default router;

187
server/src/routes/auth.js Normal file
View File

@@ -0,0 +1,187 @@
import express from 'express';
import jwt from 'jsonwebtoken';
import User from '../models/User.js';
import { authenticateToken } from '../middleware/auth.js';
const router = express.Router();
const generateTokens = (user) => {
const accessToken = jwt.sign(
{ id: user.id, email: user.email, role: user.role },
process.env.JWT_SECRET || 'your-secret-key',
{ expiresIn: process.env.JWT_EXPIRE || '7d' }
);
const refreshToken = jwt.sign(
{ id: user.id },
process.env.JWT_REFRESH_SECRET || 'your-refresh-secret',
{ expiresIn: process.env.JWT_REFRESH_EXPIRE || '30d' }
);
return { accessToken, refreshToken };
};
router.post('/register', async (req, res, next) => {
try {
const { email, password, name } = req.body;
if (!email || !password) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'Email and password are required',
data: null
});
}
const existingUser = await User.findOne({ where: { email } });
if (existingUser) {
return res.status(409).json({
status: 'error',
code: 'USER_EXISTS',
message: 'User already exists',
data: null
});
}
const user = await User.create({
email,
password,
name,
role: 'designer'
});
const tokens = generateTokens(user);
res.status(201).json({
status: 'success',
code: 'USER_CREATED',
message: 'User registered successfully',
data: {
user: {
id: user.id,
email: user.email,
name: user.name,
role: user.role
},
tokens
}
});
} catch (error) {
next(error);
}
});
router.post('/login', async (req, res, next) => {
try {
const { email, password } = req.body;
if (!email || !password) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'Email and password are required',
data: null
});
}
const user = await User.findOne({ where: { email } });
if (!user || !await user.validatePassword(password)) {
return res.status(401).json({
status: 'error',
code: 'INVALID_CREDENTIALS',
message: 'Invalid email or password',
data: null
});
}
await user.update({ lastLogin: new Date() });
const tokens = generateTokens(user);
res.status(200).json({
status: 'success',
code: 'LOGIN_SUCCESS',
message: 'Login successful',
data: {
user: {
id: user.id,
email: user.email,
name: user.name,
role: user.role
},
tokens
}
});
} catch (error) {
next(error);
}
});
router.post('/refresh', async (req, res, next) => {
try {
const { refreshToken } = req.body;
if (!refreshToken) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'Refresh token is required',
data: null
});
}
const decoded = jwt.verify(
refreshToken,
process.env.JWT_REFRESH_SECRET || 'your-refresh-secret'
);
const user = await User.findByPk(decoded.id);
if (!user) {
return res.status(404).json({
status: 'error',
code: 'USER_NOT_FOUND',
message: 'User not found',
data: null
});
}
const tokens = generateTokens(user);
res.status(200).json({
status: 'success',
code: 'TOKEN_REFRESHED',
message: 'Token refreshed successfully',
data: { tokens }
});
} catch (error) {
if (error.name === 'TokenExpiredError') {
return res.status(401).json({
status: 'error',
code: 'TOKEN_EXPIRED',
message: 'Refresh token has expired',
data: null
});
}
next(error);
}
});
router.get('/me', authenticateToken, async (req, res, next) => {
try {
const user = await User.findByPk(req.user.id, {
attributes: { exclude: ['password'] }
});
res.status(200).json({
status: 'success',
code: 'USER_RETRIEVED',
message: 'User profile retrieved',
data: { user }
});
} catch (error) {
next(error);
}
});
export default router;

View File

@@ -0,0 +1,176 @@
import express from 'express';
import { authenticateToken } from '../middleware/auth.js';
import Component from '../models/Component.js';
import Project from '../models/Project.js';
const router = express.Router();
// Get all components for a project
router.get('/project/:projectId', authenticateToken, async (req, res, next) => {
try {
const project = await Project.findOne({
where: { id: req.params.projectId, userId: req.user.id }
});
if (!project) {
return res.status(404).json({
status: 'error',
code: 'PROJECT_NOT_FOUND',
message: 'Project not found',
data: null
});
}
const components = await Component.findAll({
where: { projectId: req.params.projectId },
order: [['createdAt', 'DESC']]
});
res.status(200).json({
status: 'success',
code: 'COMPONENTS_RETRIEVED',
message: 'Components retrieved successfully',
data: { components }
});
} catch (error) {
next(error);
}
});
// Create component
router.post('/', authenticateToken, async (req, res, next) => {
try {
const { projectId, name, description, category, figmaId, storybookPath } = req.body;
const project = await Project.findOne({
where: { id: projectId, userId: req.user.id }
});
if (!project) {
return res.status(404).json({
status: 'error',
code: 'PROJECT_NOT_FOUND',
message: 'Project not found',
data: null
});
}
if (!name) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'Name is required',
data: null
});
}
const component = await Component.create({
projectId,
name,
description,
category,
figmaId,
storybookPath
});
res.status(201).json({
status: 'success',
code: 'COMPONENT_CREATED',
message: 'Component created successfully',
data: { component }
});
} catch (error) {
next(error);
}
});
// Update component
router.put('/:id', authenticateToken, async (req, res, next) => {
try {
const component = await Component.findByPk(req.params.id);
if (!component) {
return res.status(404).json({
status: 'error',
code: 'COMPONENT_NOT_FOUND',
message: 'Component not found',
data: null
});
}
const project = await Project.findOne({
where: { id: component.projectId, userId: req.user.id }
});
if (!project) {
return res.status(403).json({
status: 'error',
code: 'FORBIDDEN',
message: 'You do not have permission to modify this component',
data: null
});
}
const { name, description, category, status, adoptionScore, variants } = req.body;
await component.update({
name: name || component.name,
description: description !== undefined ? description : component.description,
category: category || component.category,
status: status || component.status,
adoptionScore: adoptionScore !== undefined ? adoptionScore : component.adoptionScore,
variants: variants || component.variants
});
res.status(200).json({
status: 'success',
code: 'COMPONENT_UPDATED',
message: 'Component updated successfully',
data: { component }
});
} catch (error) {
next(error);
}
});
// Delete component
router.delete('/:id', authenticateToken, async (req, res, next) => {
try {
const component = await Component.findByPk(req.params.id);
if (!component) {
return res.status(404).json({
status: 'error',
code: 'COMPONENT_NOT_FOUND',
message: 'Component not found',
data: null
});
}
const project = await Project.findOne({
where: { id: component.projectId, userId: req.user.id }
});
if (!project) {
return res.status(403).json({
status: 'error',
code: 'FORBIDDEN',
message: 'You do not have permission to modify this component',
data: null
});
}
await component.destroy();
res.status(200).json({
status: 'success',
code: 'COMPONENT_DELETED',
message: 'Component deleted successfully',
data: null
});
} catch (error) {
next(error);
}
});
export default router;

163
server/src/routes/config.js Normal file
View File

@@ -0,0 +1,163 @@
import express from 'express';
import { authenticateToken } from '../middleware/auth.js';
import ConfigService from '../services/ConfigService.js';
import Project from '../models/Project.js';
const router = express.Router();
/**
* PHASE 1: Configuration Endpoint
* Returns application-wide configuration and feature flags
* No authentication required - public configuration only
*/
// Get application configuration
router.get('/', async (req, res, next) => {
try {
const config = {
status: 'success',
code: 'CONFIG_RETRIEVED',
message: 'Configuration retrieved successfully',
data: {
app: {
name: 'Design System Swarm',
version: '1.0.0',
environment: process.env.NODE_ENV || 'development'
},
features: {
authentication: {
enabled: true,
jwtExpiry: 3600
},
notifications: {
enabled: process.env.NOTIFICATIONS_ENABLED === 'true',
sse: {
enabled: process.env.SSE_ENABLED === 'true',
reconnectDelay: 5000
}
},
logging: {
enabled: true,
browserLogs: true,
serverLogs: true
},
integrations: {
figma: {
enabled: process.env.FIGMA_API_ENABLED === 'true'
},
jira: {
enabled: process.env.JIRA_API_ENABLED === 'true'
},
storybook: {
enabled: process.env.STORYBOOK_ENABLED === 'true'
}
},
teams: {
enabled: process.env.TEAMS_ENABLED === 'true'
},
mcp: {
enabled: process.env.MCP_ENABLED === 'true',
tools: [
'dss-analyze-project',
'dss-extract-tokens',
'dss-generate-theme',
'dss-audit-components'
]
}
},
api: {
baseUrl: process.env.API_BASE_URL || 'http://localhost:3001',
timeout: 30000,
rateLimitWindow: 900000,
rateLimitMax: 100
},
ui: {
adminUrl: process.env.ADMIN_UI_URL || 'http://localhost:5173',
locale: 'en-US',
theme: 'light'
}
}
};
res.status(200).json(config);
} catch (error) {
next(error);
}
});
/**
* GET /api/config/resolved/:projectId
* Get fully resolved configuration (System > Project > User)
*/
router.get('/resolved/:projectId', authenticateToken, async (req, res, next) => {
try {
const { projectId } = req.params;
// Verify project access
const project = await Project.findByPk(projectId);
if (!project) {
return res.status(404).json({ status: 'error', message: 'Project not found' });
}
const resolved = await ConfigService.resolveConfig(projectId, req.user.id);
res.json({
status: 'success',
data: resolved
});
} catch (error) {
next(error);
}
});
/**
* GET /api/config/user
* Get user-level preferences
*/
router.get('/user', authenticateToken, async (req, res, next) => {
try {
const config = await ConfigService.getScopeConfig('USER', req.user.id);
res.json({
status: 'success',
data: config
});
} catch (error) {
next(error);
}
});
/**
* PUT /api/config/user
* Update user-level preferences
*/
router.put('/user', authenticateToken, async (req, res, next) => {
try {
const { key, value } = req.body;
if (!key || value === undefined) {
return res.status(400).json({
status: 'error',
message: 'Key and value are required'
});
}
const result = await ConfigService.setConfig({
scope: 'USER',
scopeId: req.user.id,
key,
value,
actorId: req.user.id
});
res.json({
status: 'success',
message: 'User preference updated',
data: result
});
} catch (error) {
next(error);
}
});
export default router;

View File

@@ -0,0 +1,205 @@
import express from 'express';
import { authenticateToken } from '../middleware/auth.js';
import Discovery from '../models/Discovery.js';
import Project from '../models/Project.js';
import { startWorker } from '../workers/DiscoveryWorker.js';
const router = express.Router();
// Initialize the discovery worker on module load
startWorker();
/**
* PHASE 4: Discovery & Analysis Endpoints
* 4 endpoints for project discovery, analysis tracking, and statistics
*/
// 1. POST /api/discovery/scan - Start discovery scan
router.post('/scan', authenticateToken, async (req, res, next) => {
try {
const { projectId, type = 'project-analysis' } = req.body;
if (!projectId) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'projectId is required',
data: null
});
}
const project = await Project.findOne({
where: { id: projectId, userId: req.user.id }
});
if (!project) {
return res.status(404).json({
status: 'error',
code: 'PROJECT_NOT_FOUND',
message: 'Project not found',
data: null
});
}
const discovery = await Discovery.create({
projectId,
userId: req.user.id,
type,
status: 'queued'
});
// Queue for background processing
console.log(`[Discovery] Scan queued: ${discovery.id} (${type})`);
res.status(202).json({
status: 'success',
code: 'DISCOVERY_SCAN_STARTED',
message: 'Discovery scan started',
data: {
discoveryId: discovery.id,
status: 'queued',
estimatedTime: type === 'component-audit' ? '1-3 minutes' : '2-5 minutes'
}
});
} catch (error) {
next(error);
}
});
// 2. GET /api/discovery/activity - Get discovery activity
router.get('/activity', authenticateToken, async (req, res, next) => {
try {
const { projectId, limit = 50, offset = 0 } = req.query;
const where = { userId: req.user.id };
if (projectId) {
where.projectId = projectId;
}
const { count, rows } = await Discovery.findAndCountAll({
where,
order: [['createdAt', 'DESC']],
limit: Math.min(parseInt(limit), 100),
offset: parseInt(offset)
});
res.status(200).json({
status: 'success',
code: 'DISCOVERY_ACTIVITY_RETRIEVED',
message: 'Discovery activity retrieved',
data: {
total: count,
limit: Math.min(parseInt(limit), 100),
offset: parseInt(offset),
activities: rows
}
});
} catch (error) {
next(error);
}
});
// 3. GET /api/discovery/stats - Get discovery statistics
router.get('/stats', authenticateToken, async (req, res, next) => {
try {
const where = { userId: req.user.id };
// Use SQL aggregation instead of loading all records
const [totalCount, byTypeGroups, byStatusGroups, recentCompleted] = await Promise.all([
Discovery.count({ where }),
Discovery.findAll({
where,
attributes: [
'type',
[Discovery.sequelize.fn('COUNT', 'type'), 'count']
],
group: ['type'],
raw: true
}),
Discovery.findAll({
where,
attributes: [
'status',
[Discovery.sequelize.fn('COUNT', 'status'), 'count']
],
group: ['status'],
raw: true
}),
Discovery.findOne({
where: { ...where, status: 'completed' },
order: [['completedAt', 'DESC']],
limit: 1
})
]);
// Transform aggregated results
const stats = {
total: totalCount,
byType: {},
byStatus: {},
recentCompleted: recentCompleted,
totalTime: 0 // Note: Computing totalTime would require loading all records
};
byTypeGroups.forEach(group => {
stats.byType[group.type] = parseInt(group.count);
});
byStatusGroups.forEach(group => {
stats.byStatus[group.status] = parseInt(group.count);
});
res.status(200).json({
status: 'success',
code: 'DISCOVERY_STATS_RETRIEVED',
message: 'Discovery statistics retrieved',
data: { stats }
});
} catch (error) {
next(error);
}
});
// 4. GET /api/discovery/:id - Get status of a specific discovery job
router.get('/:id', authenticateToken, async (req, res, next) => {
try {
const discovery = await Discovery.findOne({
where: {
id: req.params.id,
userId: req.user.id
}
});
if (!discovery) {
return res.status(404).json({
status: 'error',
code: 'DISCOVERY_NOT_FOUND',
message: 'Discovery job not found',
data: null
});
}
res.status(200).json({
status: 'success',
code: 'DISCOVERY_JOB_RETRIEVED',
message: 'Discovery job retrieved',
data: {
id: discovery.id,
projectId: discovery.projectId,
type: discovery.type,
status: discovery.status,
progress: discovery.progress,
results: discovery.results,
error: discovery.error,
startedAt: discovery.startedAt,
completedAt: discovery.completedAt,
createdAt: discovery.createdAt,
updatedAt: discovery.updatedAt
}
});
} catch (error) {
next(error);
}
});
export default router;

463
server/src/routes/figma.js Normal file
View File

@@ -0,0 +1,463 @@
import express from 'express';
import { authenticateToken } from '../middleware/auth.js';
import FigmaSync from '../models/FigmaSync.js';
import Project from '../models/Project.js';
const router = express.Router();
/**
* PHASE 5A: Figma Integration Endpoints
* 9 endpoints for complete Figma design system integration
*/
// 1. GET /api/figma/health - Check Figma API connectivity
router.get('/health', authenticateToken, async (req, res, next) => {
try {
// Mock Figma API health check
const figmaHealth = {
status: 'healthy',
apiVersion: '2.0',
lastCheck: new Date().toISOString(),
responseTime: Math.floor(Math.random() * 100) + 50
};
res.status(200).json({
status: 'success',
code: 'FIGMA_HEALTH_OK',
message: 'Figma API is healthy',
data: figmaHealth
});
} catch (error) {
next(error);
}
});
// 2. POST /api/figma/extract-variables - Extract Figma variables
router.post('/extract-variables', authenticateToken, async (req, res, next) => {
try {
const { projectId, figmaFileKey, figmaFileUrl } = req.body;
if (!projectId || !figmaFileKey) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'projectId and figmaFileKey are required',
data: null
});
}
const project = await Project.findOne({
where: { id: projectId, userId: req.user.id }
});
if (!project) {
return res.status(404).json({
status: 'error',
code: 'PROJECT_NOT_FOUND',
message: 'Project not found',
data: null
});
}
const sync = await FigmaSync.create({
projectId,
userId: req.user.id,
figmaFileKey,
figmaFileUrl: figmaFileUrl || `https://www.figma.com/file/${figmaFileKey}`,
syncType: 'extract-variables',
status: 'queued'
});
console.log(`[Figma] Extract variables queued: ${sync.id}`);
res.status(202).json({
status: 'success',
code: 'FIGMA_EXTRACTION_STARTED',
message: 'Variable extraction started',
data: {
syncId: sync.id,
status: 'queued',
estimatedTime: '1-2 minutes'
}
});
} catch (error) {
next(error);
}
});
// 3. POST /api/figma/extract-components - Extract component library
router.post('/extract-components', authenticateToken, async (req, res, next) => {
try {
const { projectId, figmaFileKey, figmaFileUrl } = req.body;
if (!projectId || !figmaFileKey) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'projectId and figmaFileKey are required',
data: null
});
}
const project = await Project.findOne({
where: { id: projectId, userId: req.user.id }
});
if (!project) {
return res.status(404).json({
status: 'error',
code: 'PROJECT_NOT_FOUND',
message: 'Project not found',
data: null
});
}
const sync = await FigmaSync.create({
projectId,
userId: req.user.id,
figmaFileKey,
figmaFileUrl: figmaFileUrl || `https://www.figma.com/file/${figmaFileKey}`,
syncType: 'extract-components',
status: 'queued'
});
console.log(`[Figma] Extract components queued: ${sync.id}`);
res.status(202).json({
status: 'success',
code: 'FIGMA_COMPONENT_EXTRACTION_STARTED',
message: 'Component extraction started',
data: {
syncId: sync.id,
status: 'queued',
estimatedTime: '3-5 minutes'
}
});
} catch (error) {
next(error);
}
});
// 4. POST /api/figma/extract-styles - Extract design tokens/styles
router.post('/extract-styles', authenticateToken, async (req, res, next) => {
try {
const { projectId, figmaFileKey, figmaFileUrl } = req.body;
if (!projectId || !figmaFileKey) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'projectId and figmaFileKey are required',
data: null
});
}
const project = await Project.findOne({
where: { id: projectId, userId: req.user.id }
});
if (!project) {
return res.status(404).json({
status: 'error',
code: 'PROJECT_NOT_FOUND',
message: 'Project not found',
data: null
});
}
const sync = await FigmaSync.create({
projectId,
userId: req.user.id,
figmaFileKey,
figmaFileUrl: figmaFileUrl || `https://www.figma.com/file/${figmaFileKey}`,
syncType: 'extract-styles',
status: 'queued'
});
console.log(`[Figma] Extract styles queued: ${sync.id}`);
res.status(202).json({
status: 'success',
code: 'FIGMA_STYLE_EXTRACTION_STARTED',
message: 'Style extraction started',
data: {
syncId: sync.id,
status: 'queued',
estimatedTime: '2-3 minutes'
}
});
} catch (error) {
next(error);
}
});
// 5. POST /api/figma/sync-tokens - Sync tokens bidirectionally
router.post('/sync-tokens', authenticateToken, async (req, res, next) => {
try {
const { projectId, figmaFileKey, figmaFileUrl } = req.body;
if (!projectId || !figmaFileKey) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'projectId and figmaFileKey are required',
data: null
});
}
const project = await Project.findOne({
where: { id: projectId, userId: req.user.id }
});
if (!project) {
return res.status(404).json({
status: 'error',
code: 'PROJECT_NOT_FOUND',
message: 'Project not found',
data: null
});
}
const sync = await FigmaSync.create({
projectId,
userId: req.user.id,
figmaFileKey,
figmaFileUrl: figmaFileUrl || `https://www.figma.com/file/${figmaFileKey}`,
syncType: 'sync-tokens',
status: 'queued'
});
console.log(`[Figma] Token sync queued: ${sync.id}`);
res.status(202).json({
status: 'success',
code: 'FIGMA_TOKEN_SYNC_STARTED',
message: 'Token synchronization started',
data: {
syncId: sync.id,
status: 'queued',
estimatedTime: '2-4 minutes'
}
});
} catch (error) {
next(error);
}
});
// 6. POST /api/figma/visual-diff - Compare Figma vs codebase
router.post('/visual-diff', authenticateToken, async (req, res, next) => {
try {
const { projectId, figmaFileKey, figmaFileUrl } = req.body;
if (!projectId || !figmaFileKey) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'projectId and figmaFileKey are required',
data: null
});
}
const project = await Project.findOne({
where: { id: projectId, userId: req.user.id }
});
if (!project) {
return res.status(404).json({
status: 'error',
code: 'PROJECT_NOT_FOUND',
message: 'Project not found',
data: null
});
}
const sync = await FigmaSync.create({
projectId,
userId: req.user.id,
figmaFileKey,
figmaFileUrl: figmaFileUrl || `https://www.figma.com/file/${figmaFileKey}`,
syncType: 'visual-diff',
status: 'queued'
});
console.log(`[Figma] Visual diff queued: ${sync.id}`);
res.status(202).json({
status: 'success',
code: 'FIGMA_DIFF_STARTED',
message: 'Visual diff comparison started',
data: {
syncId: sync.id,
status: 'queued',
estimatedTime: '3-5 minutes'
}
});
} catch (error) {
next(error);
}
});
// 7. POST /api/figma/validate-components - Validate component compliance
router.post('/validate-components', authenticateToken, async (req, res, next) => {
try {
const { projectId, figmaFileKey, figmaFileUrl } = req.body;
if (!projectId || !figmaFileKey) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'projectId and figmaFileKey are required',
data: null
});
}
const project = await Project.findOne({
where: { id: projectId, userId: req.user.id }
});
if (!project) {
return res.status(404).json({
status: 'error',
code: 'PROJECT_NOT_FOUND',
message: 'Project not found',
data: null
});
}
const sync = await FigmaSync.create({
projectId,
userId: req.user.id,
figmaFileKey,
figmaFileUrl: figmaFileUrl || `https://www.figma.com/file/${figmaFileKey}`,
syncType: 'validate-components',
status: 'queued'
});
console.log(`[Figma] Component validation queued: ${sync.id}`);
res.status(202).json({
status: 'success',
code: 'FIGMA_VALIDATION_STARTED',
message: 'Component validation started',
data: {
syncId: sync.id,
status: 'queued',
estimatedTime: '2-3 minutes'
}
});
} catch (error) {
next(error);
}
});
// 8. POST /api/figma/generate-code - Generate component code from Figma
router.post('/generate-code', authenticateToken, async (req, res, next) => {
try {
const { projectId, figmaFileKey, figmaFileUrl, language = 'jsx' } = req.body;
if (!projectId || !figmaFileKey) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'projectId and figmaFileKey are required',
data: null
});
}
const project = await Project.findOne({
where: { id: projectId, userId: req.user.id }
});
if (!project) {
return res.status(404).json({
status: 'error',
code: 'PROJECT_NOT_FOUND',
message: 'Project not found',
data: null
});
}
const sync = await FigmaSync.create({
projectId,
userId: req.user.id,
figmaFileKey,
figmaFileUrl: figmaFileUrl || `https://www.figma.com/file/${figmaFileKey}`,
syncType: 'generate-code',
status: 'queued',
metadata: { language }
});
console.log(`[Figma] Code generation queued: ${sync.id} (${language})`);
res.status(202).json({
status: 'success',
code: 'FIGMA_CODE_GENERATION_STARTED',
message: 'Code generation started',
data: {
syncId: sync.id,
status: 'queued',
language,
estimatedTime: '5-10 minutes'
}
});
} catch (error) {
next(error);
}
});
// 9. POST /api/figma/export-assets - Export design assets
router.post('/export-assets', authenticateToken, async (req, res, next) => {
try {
const { projectId, figmaFileKey, figmaFileUrl, assetTypes = ['svg', 'png'] } = req.body;
if (!projectId || !figmaFileKey) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'projectId and figmaFileKey are required',
data: null
});
}
const project = await Project.findOne({
where: { id: projectId, userId: req.user.id }
});
if (!project) {
return res.status(404).json({
status: 'error',
code: 'PROJECT_NOT_FOUND',
message: 'Project not found',
data: null
});
}
const sync = await FigmaSync.create({
projectId,
userId: req.user.id,
figmaFileKey,
figmaFileUrl: figmaFileUrl || `https://www.figma.com/file/${figmaFileKey}`,
syncType: 'export-assets',
status: 'queued',
metadata: { assetTypes }
});
console.log(`[Figma] Asset export queued: ${sync.id} (${assetTypes.join(',')})`);
res.status(202).json({
status: 'success',
code: 'FIGMA_ASSET_EXPORT_STARTED',
message: 'Asset export started',
data: {
syncId: sync.id,
status: 'queued',
assetTypes,
estimatedTime: '5-15 minutes'
}
});
} catch (error) {
next(error);
}
});
export default router;

175
server/src/routes/icons.js Normal file
View File

@@ -0,0 +1,175 @@
import express from 'express';
import { authenticateToken } from '../middleware/auth.js';
import Icon from '../models/Icon.js';
import Project from '../models/Project.js';
const router = express.Router();
// Get all icons for a project
router.get('/project/:projectId', authenticateToken, async (req, res, next) => {
try {
const project = await Project.findOne({
where: { id: req.params.projectId, userId: req.user.id }
});
if (!project) {
return res.status(404).json({
status: 'error',
code: 'PROJECT_NOT_FOUND',
message: 'Project not found',
data: null
});
}
const icons = await Icon.findAll({
where: { projectId: req.params.projectId },
order: [['createdAt', 'DESC']]
});
res.status(200).json({
status: 'success',
code: 'ICONS_RETRIEVED',
message: 'Icons retrieved successfully',
data: { icons }
});
} catch (error) {
next(error);
}
});
// Create icon
router.post('/', authenticateToken, async (req, res, next) => {
try {
const { projectId, name, category, svgData, sizes, tags } = req.body;
const project = await Project.findOne({
where: { id: projectId, userId: req.user.id }
});
if (!project) {
return res.status(404).json({
status: 'error',
code: 'PROJECT_NOT_FOUND',
message: 'Project not found',
data: null
});
}
if (!name || !svgData) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'Name and svgData are required',
data: null
});
}
const icon = await Icon.create({
projectId,
name,
category,
svgData,
sizes: sizes || ['16', '24', '32', '48'],
tags: tags || []
});
res.status(201).json({
status: 'success',
code: 'ICON_CREATED',
message: 'Icon created successfully',
data: { icon }
});
} catch (error) {
next(error);
}
});
// Update icon
router.put('/:id', authenticateToken, async (req, res, next) => {
try {
const icon = await Icon.findByPk(req.params.id);
if (!icon) {
return res.status(404).json({
status: 'error',
code: 'ICON_NOT_FOUND',
message: 'Icon not found',
data: null
});
}
const project = await Project.findOne({
where: { id: icon.projectId, userId: req.user.id }
});
if (!project) {
return res.status(403).json({
status: 'error',
code: 'FORBIDDEN',
message: 'You do not have permission to modify this icon',
data: null
});
}
const { name, category, svgData, sizes, tags } = req.body;
await icon.update({
name: name || icon.name,
category: category || icon.category,
svgData: svgData || icon.svgData,
sizes: sizes || icon.sizes,
tags: tags || icon.tags
});
res.status(200).json({
status: 'success',
code: 'ICON_UPDATED',
message: 'Icon updated successfully',
data: { icon }
});
} catch (error) {
next(error);
}
});
// Delete icon
router.delete('/:id', authenticateToken, async (req, res, next) => {
try {
const icon = await Icon.findByPk(req.params.id);
if (!icon) {
return res.status(404).json({
status: 'error',
code: 'ICON_NOT_FOUND',
message: 'Icon not found',
data: null
});
}
const project = await Project.findOne({
where: { id: icon.projectId, userId: req.user.id }
});
if (!project) {
return res.status(403).json({
status: 'error',
code: 'FORBIDDEN',
message: 'You do not have permission to modify this icon',
data: null
});
}
await icon.destroy();
res.status(200).json({
status: 'success',
code: 'ICON_DELETED',
message: 'Icon deleted successfully',
data: null
});
} catch (error) {
next(error);
}
});
export default router;

View File

@@ -0,0 +1,170 @@
import express from 'express';
import { authenticateToken } from '../middleware/auth.js';
import Integration from '../models/Integration.js';
import Project from '../models/Project.js';
const router = express.Router();
// Get all integrations for a project
router.get('/project/:projectId', authenticateToken, async (req, res, next) => {
try {
const project = await Project.findOne({
where: { id: req.params.projectId, userId: req.user.id }
});
if (!project) {
return res.status(404).json({
status: 'error',
code: 'PROJECT_NOT_FOUND',
message: 'Project not found',
data: null
});
}
const integrations = await Integration.findAll({
where: { projectId: req.params.projectId }
});
res.status(200).json({
status: 'success',
code: 'INTEGRATIONS_RETRIEVED',
message: 'Integrations retrieved successfully',
data: { integrations }
});
} catch (error) {
next(error);
}
});
// Create integration
router.post('/', authenticateToken, async (req, res, next) => {
try {
const { projectId, type, name, config } = req.body;
const project = await Project.findOne({
where: { id: projectId, userId: req.user.id }
});
if (!project) {
return res.status(404).json({
status: 'error',
code: 'PROJECT_NOT_FOUND',
message: 'Project not found',
data: null
});
}
if (!type || !name || !config) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'Type, name, and config are required',
data: null
});
}
const integration = await Integration.create({
projectId,
type,
name,
config
});
res.status(201).json({
status: 'success',
code: 'INTEGRATION_CREATED',
message: 'Integration created successfully',
data: { integration }
});
} catch (error) {
next(error);
}
});
// Update integration
router.put('/:id', authenticateToken, async (req, res, next) => {
try {
const integration = await Integration.findByPk(req.params.id);
if (!integration) {
return res.status(404).json({
status: 'error',
code: 'INTEGRATION_NOT_FOUND',
message: 'Integration not found',
data: null
});
}
const project = await Project.findOne({
where: { id: integration.projectId, userId: req.user.id }
});
if (!project) {
return res.status(403).json({
status: 'error',
code: 'FORBIDDEN',
message: 'You do not have permission to modify this integration',
data: null
});
}
const { name, config, isActive } = req.body;
await integration.update({
name: name || integration.name,
config: config || integration.config,
isActive: isActive !== undefined ? isActive : integration.isActive
});
res.status(200).json({
status: 'success',
code: 'INTEGRATION_UPDATED',
message: 'Integration updated successfully',
data: { integration }
});
} catch (error) {
next(error);
}
});
// Delete integration
router.delete('/:id', authenticateToken, async (req, res, next) => {
try {
const integration = await Integration.findByPk(req.params.id);
if (!integration) {
return res.status(404).json({
status: 'error',
code: 'INTEGRATION_NOT_FOUND',
message: 'Integration not found',
data: null
});
}
const project = await Project.findOne({
where: { id: integration.projectId, userId: req.user.id }
});
if (!project) {
return res.status(403).json({
status: 'error',
code: 'FORBIDDEN',
message: 'You do not have permission to modify this integration',
data: null
});
}
await integration.destroy();
res.status(200).json({
status: 'success',
code: 'INTEGRATION_DELETED',
message: 'Integration deleted successfully',
data: null
});
} catch (error) {
next(error);
}
});
export default router;

182
server/src/routes/logs.js Normal file
View File

@@ -0,0 +1,182 @@
import express from 'express';
import { authenticateToken, optionalAuth } from '../middleware/auth.js';
import Log from '../models/Log.js';
const router = express.Router();
/**
* PHASE 1: Logging Endpoints
* Handles server and browser log ingestion for debugging and monitoring
*/
// Post server logs
router.post('/', optionalAuth, async (req, res, next) => {
try {
const { level = 'log', message, context } = req.body;
if (!message) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'Log message is required',
data: null
});
}
// Validate log level
const validLevels = ['log', 'warn', 'error', 'info', 'debug'];
if (!validLevels.includes(level)) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: `Invalid log level. Must be one of: ${validLevels.join(', ')}`,
data: null
});
}
const log = await Log.create({
userId: req.user?.id || null,
level,
source: 'server',
message,
context: context || {},
timestamp: new Date()
});
res.status(201).json({
status: 'success',
code: 'LOG_CREATED',
message: 'Server log recorded successfully',
data: {
logId: log.id,
timestamp: log.timestamp
}
});
} catch (error) {
next(error);
}
});
// Post browser logs (supports batch array or single log)
router.post('/browser', optionalAuth, async (req, res, next) => {
try {
const { logs } = req.body;
// Handle batch array from console-forwarder
if (logs && Array.isArray(logs)) {
const validLevels = ['log', 'warn', 'error', 'info', 'debug'];
const createdLogs = [];
for (const logEntry of logs) {
const { level = 'log', message, data, timestamp } = logEntry;
if (!message) continue; // Skip logs without message
if (!validLevels.includes(level)) continue; // Skip invalid levels
const log = await Log.create({
userId: req.user?.id || null,
level,
source: 'browser',
message,
context: { data: data || [], timestamp },
userAgent: req.headers['user-agent'],
url: req.headers.referer || req.headers.origin,
timestamp: timestamp ? new Date(timestamp) : new Date()
});
createdLogs.push(log.id);
}
return res.status(201).json({
status: 'success',
code: 'BROWSER_LOGS_CREATED',
message: `${createdLogs.length} browser logs recorded successfully`,
data: {
logIds: createdLogs,
count: createdLogs.length
}
});
}
// Fallback: Handle single log object for backward compatibility
const { level = 'log', message, context, userAgent, url } = req.body;
if (!message) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'Log message is required',
data: null
});
}
// Validate log level
const validLevels = ['log', 'warn', 'error', 'info', 'debug'];
if (!validLevels.includes(level)) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: `Invalid log level. Must be one of: ${validLevels.join(', ')}`,
data: null
});
}
const log = await Log.create({
userId: req.user?.id || null,
level,
source: 'browser',
message,
context: context || {},
userAgent,
url,
timestamp: new Date()
});
res.status(201).json({
status: 'success',
code: 'BROWSER_LOG_CREATED',
message: 'Browser log recorded successfully',
data: {
logId: log.id,
timestamp: log.timestamp
}
});
} catch (error) {
next(error);
}
});
// Get logs (admin/debugging endpoint)
router.get('/', authenticateToken, async (req, res, next) => {
try {
const { level, source, limit = 100, offset = 0 } = req.query;
const where = {};
if (level) where.level = level;
if (source) where.source = source;
const { count, rows } = await Log.findAndCountAll({
where,
limit: Math.min(parseInt(limit), 500),
offset: parseInt(offset),
order: [['timestamp', 'DESC']]
});
res.status(200).json({
status: 'success',
code: 'LOGS_RETRIEVED',
message: 'Logs retrieved successfully',
data: {
total: count,
limit: Math.min(parseInt(limit), 500),
offset: parseInt(offset),
logs: rows
}
});
} catch (error) {
next(error);
}
});
export default router;

276
server/src/routes/mcp.js Normal file
View File

@@ -0,0 +1,276 @@
import express from 'express';
import { authenticateToken, optionalAuth } from '../middleware/auth.js';
import Project from '../models/Project.js';
const router = express.Router();
/**
* PHASE 2: MCP Tools Integration
* Provides API endpoints for Model Context Protocol tool execution
* Tools include: project analysis, token extraction, theme generation, component auditing
*/
// List available MCP tools
router.get('/tools', optionalAuth, async (req, res, next) => {
try {
const tools = [
{
name: 'dss-analyze-project',
description: 'Analyze codebase for design patterns and component usage',
params: {
projectPath: 'string (required) - Path to project directory',
depth: 'number (optional) - Analysis depth (1-3)'
}
},
{
name: 'dss-extract-tokens',
description: 'Extract design tokens from CSS, SCSS, Tailwind, or JSON sources',
params: {
path: 'string (required) - Path to token source files',
sources: 'array (optional) - Source types: css, scss, tailwind, json'
}
},
{
name: 'dss-generate-theme',
description: 'Generate theme files from design tokens using style-dictionary',
params: {
tokens: 'object (required) - Design tokens configuration',
format: 'string (required) - Output format: css, scss, json, js',
themeName: 'string (optional) - Theme name for generation'
}
},
{
name: 'dss-audit-components',
description: 'Audit React components for design system adoption',
params: {
path: 'string (required) - Path to component directory',
strict: 'boolean (optional) - Enable strict validation'
}
},
{
name: 'dss-sync-figma',
description: 'Synchronize design tokens from Figma',
params: {
fileKey: 'string (required) - Figma file key from URL',
figmaToken: 'string (required) - Figma API token'
}
}
];
res.status(200).json({
status: 'success',
code: 'MCP_TOOLS_LIST',
message: 'Available MCP tools',
data: { tools }
});
} catch (error) {
next(error);
}
});
// Execute a specific MCP tool
router.post('/tools/:name/execute', authenticateToken, async (req, res, next) => {
try {
const { name } = req.params;
const params = req.body;
// Validate tool exists
const validTools = [
'dss-analyze-project',
'dss-extract-tokens',
'dss-generate-theme',
'dss-audit-components',
'dss-sync-figma'
];
if (!validTools.includes(name)) {
return res.status(404).json({
status: 'error',
code: 'TOOL_NOT_FOUND',
message: `Tool '${name}' not found`,
data: null
});
}
// Log execution attempt
console.log(`[MCP] Executing tool: ${name} for user: ${req.user.id}`);
// Simulate tool execution (actual execution would be handled by DSS Claude Plugin)
const result = {
toolName: name,
status: 'pending',
executedAt: new Date().toISOString(),
estimatedDuration: '30-60 seconds',
message: `Tool '${name}' execution queued. Check project discovery tab for results.`,
webhookUrl: `${process.env.API_BASE_URL || 'http://localhost:3001'}/api/mcp/callback`
};
res.status(202).json({
status: 'success',
code: 'TOOL_EXECUTION_QUEUED',
message: 'Tool execution queued',
data: result
});
} catch (error) {
next(error);
}
});
// Discover project using MCP analysis
router.post('/discover_project', authenticateToken, async (req, res, next) => {
try {
const { projectId, path, depth = 2 } = req.body;
if (!projectId && !path) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'Either projectId or path is required',
data: null
});
}
// Get project if projectId provided
let project = null;
if (projectId) {
project = await Project.findOne({
where: { id: projectId, userId: req.user.id }
});
if (!project) {
return res.status(404).json({
status: 'error',
code: 'PROJECT_NOT_FOUND',
message: 'Project not found',
data: null
});
}
}
const discoveryRequest = {
projectId: project?.id || null,
projectPath: path || project?.sourcePath || null,
userId: req.user.id,
analysisDepth: Math.min(depth, 3),
timestamp: new Date().toISOString(),
status: 'queued'
};
// Queue for DSS Claude Plugin to process
console.log(`[MCP] Discovery requested for project: ${projectId || path}`);
res.status(202).json({
status: 'success',
code: 'DISCOVERY_QUEUED',
message: 'Project discovery analysis queued',
data: {
discoveryId: `discovery_${Date.now()}`,
estimatedTime: '2-5 minutes',
pollEndpoint: `/api/discovery/activity`
}
});
} catch (error) {
next(error);
}
});
// Get quick wins for a project
router.post('/get_quick_wins', authenticateToken, async (req, res, next) => {
try {
const { projectId, path } = req.body;
if (!projectId && !path) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'Either projectId or path is required',
data: null
});
}
// Get project if projectId provided
let project = null;
if (projectId) {
project = await Project.findOne({
where: { id: projectId, userId: req.user.id }
});
if (!project) {
return res.status(404).json({
status: 'error',
code: 'PROJECT_NOT_FOUND',
message: 'Project not found',
data: null
});
}
}
console.log(`[MCP] Quick wins analysis requested for project: ${projectId || path}`);
// Return template for quick wins
const quickWinsRequest = {
projectPath: path || project?.sourcePath || null,
userId: req.user.id,
timestamp: new Date().toISOString(),
status: 'queued'
};
res.status(202).json({
status: 'success',
code: 'QUICK_WINS_ANALYSIS_QUEUED',
message: 'Quick wins analysis queued',
data: {
analysisId: `quickwins_${Date.now()}`,
expectedWins: [
'Component centralization',
'Token standardization',
'Color palette reduction',
'Typography consolidation'
],
estimatedTime: '1-3 minutes',
pollEndpoint: `/api/discovery/stats`
}
});
} catch (error) {
next(error);
}
});
// Webhook callback for tool execution results
router.post('/callback', express.json(), async (req, res, next) => {
try {
const { toolName, status, data, projectId } = req.body;
console.log(`[MCP] Tool callback received: ${toolName} - ${status}`);
// Store results or emit notification
if (projectId) {
const project = await Project.findByPk(projectId);
if (project) {
// Update project with discovery results if available
if (toolName === 'dss-analyze-project' && data) {
await project.update({
settings: {
...project.settings,
lastDiscovery: {
timestamp: new Date().toISOString(),
analysis: data
}
}
});
}
}
}
res.status(200).json({
status: 'success',
code: 'CALLBACK_ACKNOWLEDGED',
message: 'Tool callback processed',
data: null
});
} catch (error) {
next(error);
}
});
export default router;

View File

@@ -0,0 +1,106 @@
import express from 'express';
import { authenticateToken, optionalAuth } from '../middleware/auth.js';
import notificationService from '../services/NotificationService.js';
const router = express.Router();
/**
* PHASE 2: Notifications Endpoint (SSE)
* Provides real-time event streaming for client connections
*/
// Subscribe to events via Server-Sent Events (SSE)
router.get('/events', optionalAuth, (req, res) => {
// If not authenticated, return 401 (SSE requires user context)
if (!req.user) {
return res.status(401).json({
status: 'error',
code: 'UNAUTHORIZED',
message: 'Authentication required for notifications',
data: null
});
}
const userId = req.user.id;
// Set headers for SSE
res.setHeader('Content-Type', 'text/event-stream');
res.setHeader('Cache-Control', 'no-cache');
res.setHeader('Connection', 'keep-alive');
res.setHeader('Access-Control-Allow-Origin', '*');
res.setHeader('X-Accel-Buffering', 'no');
// Send initial connection confirmation
res.write(`data: ${JSON.stringify({
type: 'connection:established',
timestamp: new Date().toISOString(),
data: { userId, message: 'Connected to notification stream' }
})}\n\n`);
// Register subscriber
const success = notificationService.subscribe(userId, res);
if (!success) {
return res.status(503).json({
status: 'error',
code: 'NOTIFICATION_SERVICE_ERROR',
message: 'Unable to subscribe to notifications',
data: null
});
}
// Handle client disconnect
req.on('close', () => {
console.log(`[Notifications] User ${userId} disconnected from SSE`);
});
});
// Get notification statistics (admin endpoint)
router.get('/stats', authenticateToken, (req, res) => {
if (req.user.role !== 'admin') {
return res.status(403).json({
status: 'error',
code: 'FORBIDDEN',
message: 'Admin access required',
data: null
});
}
const stats = notificationService.getStats();
res.status(200).json({
status: 'success',
code: 'NOTIFICATION_STATS',
message: 'Notification service statistics',
data: stats
});
});
// Send test notification
router.post('/test', authenticateToken, (req, res) => {
try {
const { message = 'Test notification' } = req.body;
notificationService.broadcast(req.user.id, 'test:message', {
message,
userId: req.user.id,
sentAt: new Date().toISOString()
});
res.status(200).json({
status: 'success',
code: 'TEST_NOTIFICATION_SENT',
message: 'Test notification sent',
data: { timestamp: new Date().toISOString() }
});
} catch (error) {
res.status(500).json({
status: 'error',
code: 'NOTIFICATION_SEND_ERROR',
message: error.message,
data: null
});
}
});
export default router;

View File

@@ -0,0 +1,245 @@
import express from 'express';
import { authenticateToken } from '../middleware/auth.js';
import Project from '../models/Project.js';
import User from '../models/User.js';
const router = express.Router();
// Get all projects for user
router.get('/', authenticateToken, async (req, res, next) => {
try {
const projects = await Project.findAll({
where: { userId: req.user.id },
attributes: { exclude: ['settings'] }
});
res.status(200).json({
status: 'success',
code: 'PROJECTS_RETRIEVED',
message: 'Projects retrieved successfully',
data: { projects }
});
} catch (error) {
next(error);
}
});
// Get single project
router.get('/:id', authenticateToken, async (req, res, next) => {
try {
const project = await Project.findOne({
where: { id: req.params.id, userId: req.user.id }
});
if (!project) {
return res.status(404).json({
status: 'error',
code: 'PROJECT_NOT_FOUND',
message: 'Project not found',
data: null
});
}
res.status(200).json({
status: 'success',
code: 'PROJECT_RETRIEVED',
message: 'Project retrieved successfully',
data: { project }
});
} catch (error) {
next(error);
}
});
// Create project
router.post('/', authenticateToken, async (req, res, next) => {
try {
const { name, description, key, figmaFileKey, jiraProjectKey, storybookUrl } = req.body;
if (!name || !key) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'Name and key are required',
data: null
});
}
const project = await Project.create({
name,
description,
key,
userId: req.user.id,
figmaFileKey,
jiraProjectKey,
storybookUrl
});
res.status(201).json({
status: 'success',
code: 'PROJECT_CREATED',
message: 'Project created successfully',
data: { project }
});
} catch (error) {
if (error.name === 'SequelizeUniqueConstraintError') {
return res.status(409).json({
status: 'error',
code: 'DUPLICATE_KEY',
message: 'Project key already exists',
data: null
});
}
next(error);
}
});
// Update project
router.put('/:id', authenticateToken, async (req, res, next) => {
try {
const project = await Project.findOne({
where: { id: req.params.id, userId: req.user.id }
});
if (!project) {
return res.status(404).json({
status: 'error',
code: 'PROJECT_NOT_FOUND',
message: 'Project not found',
data: null
});
}
const { name, description, figmaFileKey, jiraProjectKey, storybookUrl, status, settings } = req.body;
await project.update({
name: name || project.name,
description: description !== undefined ? description : project.description,
figmaFileKey: figmaFileKey !== undefined ? figmaFileKey : project.figmaFileKey,
jiraProjectKey: jiraProjectKey !== undefined ? jiraProjectKey : project.jiraProjectKey,
storybookUrl: storybookUrl !== undefined ? storybookUrl : project.storybookUrl,
status: status || project.status,
settings: settings || project.settings
});
res.status(200).json({
status: 'success',
code: 'PROJECT_UPDATED',
message: 'Project updated successfully',
data: { project }
});
} catch (error) {
next(error);
}
});
// Delete project
router.delete('/:id', authenticateToken, async (req, res, next) => {
try {
const project = await Project.findOne({
where: { id: req.params.id, userId: req.user.id }
});
if (!project) {
return res.status(404).json({
status: 'error',
code: 'PROJECT_NOT_FOUND',
message: 'Project not found',
data: null
});
}
await project.destroy();
res.status(200).json({
status: 'success',
code: 'PROJECT_DELETED',
message: 'Project deleted successfully',
data: null
});
} catch (error) {
next(error);
}
});
// Save ESRE (style requirements) for project
router.post('/:id/esre', authenticateToken, async (req, res, next) => {
try {
const project = await Project.findOne({
where: { id: req.params.id, userId: req.user.id }
});
if (!project) {
return res.status(404).json({
status: 'error',
code: 'PROJECT_NOT_FOUND',
message: 'Project not found',
data: null
});
}
const { content } = req.body;
if (!content) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'ESRE content is required',
data: null
});
}
// Update project settings with ESRE content
await project.update({
settings: {
...project.settings,
esre: content
}
});
res.status(200).json({
status: 'success',
code: 'ESRE_SAVED',
message: 'ESRE saved successfully',
data: {
success: true,
savedAt: new Date().toISOString()
}
});
} catch (error) {
next(error);
}
});
// Get ESRE (style requirements) for project
router.get('/:id/esre', authenticateToken, async (req, res, next) => {
try {
const project = await Project.findOne({
where: { id: req.params.id, userId: req.user.id }
});
if (!project) {
return res.status(404).json({
status: 'error',
code: 'PROJECT_NOT_FOUND',
message: 'Project not found',
data: null
});
}
const esreContent = project.settings?.esre || '';
res.status(200).json({
status: 'success',
code: 'ESRE_RETRIEVED',
message: 'ESRE retrieved successfully',
data: {
content: esreContent
}
});
} catch (error) {
next(error);
}
});
export default router;

252
server/src/routes/qa.js Normal file
View File

@@ -0,0 +1,252 @@
import express from 'express';
import Project from '../models/Project.js';
import QATest from '../models/QATest.js';
import { authenticateToken } from '../middleware/auth.js';
const router = express.Router();
// 1. POST /api/qa/screenshot-compare - Visual regression testing
router.post('/screenshot-compare', authenticateToken, async (req, res, next) => {
try {
const { projectId, baselinePath, currentPath } = req.body;
if (!projectId || !baselinePath || !currentPath) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'projectId, baselinePath, and currentPath are required',
data: null
});
}
const project = await Project.findOne({
where: { id: projectId, userId: req.user.id }
});
if (!project) {
return res.status(404).json({
status: 'error',
code: 'PROJECT_NOT_FOUND',
message: 'Project not found',
data: null
});
}
const qaTest = await QATest.create({
projectId,
userId: req.user.id,
testType: 'screenshot-compare',
status: 'queued',
baselineScreenshots: {
path: baselinePath,
count: 0,
uploadedAt: new Date().toISOString()
},
currentScreenshots: {
path: currentPath,
count: 0,
uploadedAt: new Date().toISOString()
},
metadata: {
comparisonMode: 'pixel-perfect',
threshold: 0.01,
ignoreAntialiasing: false
}
});
console.log(`[QA] Screenshot comparison queued: ${qaTest.id}`);
res.status(202).json({
status: 'success',
code: 'SCREENSHOT_COMPARISON_STARTED',
message: 'Screenshot comparison started',
data: {
testId: qaTest.id,
status: 'queued',
estimatedTime: '2-5 minutes'
}
});
} catch (error) {
next(error);
}
});
// 2. POST /api/test/run - Execute test suite
router.post('/run', authenticateToken, async (req, res, next) => {
try {
const { projectId, testSuite, filters } = req.body;
if (!projectId || !testSuite) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'projectId and testSuite are required',
data: null
});
}
const project = await Project.findOne({
where: { id: projectId, userId: req.user.id }
});
if (!project) {
return res.status(404).json({
status: 'error',
code: 'PROJECT_NOT_FOUND',
message: 'Project not found',
data: null
});
}
// Validate testSuite format
const validSuites = [
'unit',
'integration',
'e2e',
'component',
'accessibility',
'performance'
];
if (!validSuites.includes(testSuite.toLowerCase())) {
return res.status(400).json({
status: 'error',
code: 'INVALID_TEST_SUITE',
message: `Test suite must be one of: ${validSuites.join(', ')}`,
data: null
});
}
const qaTest = await QATest.create({
projectId,
userId: req.user.id,
testType: 'test-run',
status: 'queued',
testSuite: testSuite.toLowerCase(),
passedTests: 0,
failedTests: 0,
skippedTests: 0,
metadata: {
filters: filters || {},
environment: process.env.NODE_ENV || 'development',
runner: 'jest',
parallel: true
}
});
console.log(`[QA] Test run queued: ${qaTest.id} (${testSuite})`);
res.status(202).json({
status: 'success',
code: 'TEST_RUN_STARTED',
message: 'Test suite execution started',
data: {
testId: qaTest.id,
status: 'queued',
testSuite: testSuite.toLowerCase(),
estimatedTime: '1-10 minutes depending on suite'
}
});
} catch (error) {
next(error);
}
});
// GET /api/qa/:id - Get QA test status and results
router.get('/:id', authenticateToken, async (req, res, next) => {
try {
const qaTest = await QATest.findOne({
where: { id: req.params.id, userId: req.user.id }
});
if (!qaTest) {
return res.status(404).json({
status: 'error',
code: 'TEST_NOT_FOUND',
message: 'QA test not found',
data: null
});
}
res.status(200).json({
status: 'success',
code: 'TEST_RETRIEVED',
message: 'QA test retrieved successfully',
data: {
test: {
id: qaTest.id,
testType: qaTest.testType,
status: qaTest.status,
testSuite: qaTest.testSuite,
passedTests: qaTest.passedTests,
failedTests: qaTest.failedTests,
skippedTests: qaTest.skippedTests,
totalDuration: qaTest.totalDuration,
diffPercentage: qaTest.diffPercentage,
error: qaTest.error,
completedAt: qaTest.completedAt,
createdAt: qaTest.createdAt
}
}
});
} catch (error) {
next(error);
}
});
// GET /api/qa/project/:projectId - List QA tests for a project
router.get('/project/:projectId/list', authenticateToken, async (req, res, next) => {
try {
const { limit = 20, offset = 0 } = req.query;
const maxLimit = 100;
const queryLimit = Math.min(parseInt(limit) || 20, maxLimit);
const queryOffset = parseInt(offset) || 0;
const project = await Project.findOne({
where: { id: req.params.projectId, userId: req.user.id }
});
if (!project) {
return res.status(404).json({
status: 'error',
code: 'PROJECT_NOT_FOUND',
message: 'Project not found',
data: null
});
}
const { count, rows } = await QATest.findAndCountAll({
where: { projectId: req.params.projectId },
limit: queryLimit,
offset: queryOffset,
order: [['createdAt', 'DESC']]
});
res.status(200).json({
status: 'success',
code: 'QA_TESTS_RETRIEVED',
message: 'QA tests retrieved successfully',
data: {
tests: rows.map(test => ({
id: test.id,
testType: test.testType,
status: test.status,
testSuite: test.testSuite,
passedTests: test.passedTests,
failedTests: test.failedTests,
createdAt: test.createdAt
})),
pagination: {
total: count,
limit: queryLimit,
offset: queryOffset
}
}
});
} catch (error) {
next(error);
}
});
export default router;

466
server/src/routes/teams.js Normal file
View File

@@ -0,0 +1,466 @@
import express from 'express';
import { authenticateToken } from '../middleware/auth.js';
import Team from '../models/Team.js';
import TeamMember from '../models/TeamMember.js';
import TeamSettings from '../models/TeamSettings.js';
import User from '../models/User.js';
import sequelize from '../config/database.js';
const router = express.Router();
/**
* PHASE 3: Team Management Endpoints
* 12 endpoints for complete team lifecycle management
*/
// 1. GET /api/teams - List user's teams
router.get('/', authenticateToken, async (req, res, next) => {
try {
const teams = await Team.findAll({
include: [{
model: TeamMember,
where: { userId: req.user.id, isActive: true },
attributes: ['role']
}],
order: [['createdAt', 'DESC']]
});
res.status(200).json({
status: 'success',
code: 'TEAMS_RETRIEVED',
message: 'Teams retrieved successfully',
data: { teams }
});
} catch (error) {
next(error);
}
});
// 2. POST /api/teams - Create team
router.post('/', authenticateToken, async (req, res, next) => {
const transaction = await sequelize.transaction();
try {
const { name, description } = req.body;
if (!name) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'Team name is required',
data: null
});
}
const team = await Team.create({
name,
description,
ownerId: req.user.id
}, { transaction });
// Add creator as admin
await TeamMember.create({
teamId: team.id,
userId: req.user.id,
role: 'admin'
}, { transaction });
// Create default settings
await TeamSettings.create({
teamId: team.id
}, { transaction });
await transaction.commit();
res.status(201).json({
status: 'success',
code: 'TEAM_CREATED',
message: 'Team created successfully',
data: { team }
});
} catch (error) {
await transaction.rollback();
next(error);
}
});
// 3. GET /api/teams/:id - Get team details
router.get('/:id', authenticateToken, async (req, res, next) => {
try {
const team = await Team.findOne({
where: { id: req.params.id },
include: [{
model: TeamMember,
where: { userId: req.user.id, isActive: true },
attributes: ['role']
}]
});
if (!team) {
return res.status(404).json({
status: 'error',
code: 'TEAM_NOT_FOUND',
message: 'Team not found',
data: null
});
}
res.status(200).json({
status: 'success',
code: 'TEAM_RETRIEVED',
message: 'Team retrieved successfully',
data: { team }
});
} catch (error) {
next(error);
}
});
// 4. PUT /api/teams/:id - Update team
router.put('/:id', authenticateToken, async (req, res, next) => {
try {
const team = await Team.findOne({
where: { id: req.params.id },
include: [{
model: TeamMember,
where: { userId: req.user.id, role: 'admin' }
}]
});
if (!team) {
return res.status(403).json({
status: 'error',
code: 'FORBIDDEN',
message: 'Only team admins can update team',
data: null
});
}
const { name, description } = req.body;
await team.update({ name, description });
res.status(200).json({
status: 'success',
code: 'TEAM_UPDATED',
message: 'Team updated successfully',
data: { team }
});
} catch (error) {
next(error);
}
});
// 5. DELETE /api/teams/:id - Delete team (admin only)
router.delete('/:id', authenticateToken, async (req, res, next) => {
const transaction = await sequelize.transaction();
try {
const team = await Team.findOne({
where: { id: req.params.id, ownerId: req.user.id }
});
if (!team) {
return res.status(403).json({
status: 'error',
code: 'FORBIDDEN',
message: 'Only team owner can delete team',
data: null
});
}
await TeamMember.destroy({
where: { teamId: team.id },
transaction
});
await TeamSettings.destroy({
where: { teamId: team.id },
transaction
});
await team.destroy({ transaction });
await transaction.commit();
res.status(200).json({
status: 'success',
code: 'TEAM_DELETED',
message: 'Team deleted successfully',
data: null
});
} catch (error) {
await transaction.rollback();
next(error);
}
});
// 6. GET /api/teams/:id/members - List team members
router.get('/:id/members', authenticateToken, async (req, res, next) => {
try {
const team = await Team.findOne({
where: { id: req.params.id },
include: [{
model: TeamMember,
where: { userId: req.user.id }
}]
});
if (!team) {
return res.status(404).json({
status: 'error',
code: 'TEAM_NOT_FOUND',
message: 'Team not found',
data: null
});
}
const members = await TeamMember.findAll({
where: { teamId: req.params.id, isActive: true },
include: [{
model: User,
attributes: ['id', 'name', 'email', 'role']
}]
});
res.status(200).json({
status: 'success',
code: 'TEAM_MEMBERS_RETRIEVED',
message: 'Team members retrieved successfully',
data: { members }
});
} catch (error) {
next(error);
}
});
// 7. POST /api/teams/:id/members - Add team member
router.post('/:id/members', authenticateToken, async (req, res, next) => {
try {
const { userId, role = 'viewer' } = req.body;
// Check user is team admin
const admin = await TeamMember.findOne({
where: { teamId: req.params.id, userId: req.user.id, role: 'admin' }
});
if (!admin) {
return res.status(403).json({
status: 'error',
code: 'FORBIDDEN',
message: 'Only team admins can add members',
data: null
});
}
const member = await TeamMember.create({
teamId: req.params.id,
userId,
role,
invitedBy: req.user.id
});
res.status(201).json({
status: 'success',
code: 'MEMBER_ADDED',
message: 'Member added to team',
data: { member }
});
} catch (error) {
next(error);
}
});
// 8. PUT /api/teams/:id/members/:memberId - Update member role
router.put('/:id/members/:memberId', authenticateToken, async (req, res, next) => {
try {
const { role } = req.body;
// Check user is team admin
const admin = await TeamMember.findOne({
where: { teamId: req.params.id, userId: req.user.id, role: 'admin' }
});
if (!admin) {
return res.status(403).json({
status: 'error',
code: 'FORBIDDEN',
message: 'Only team admins can manage members',
data: null
});
}
const member = await TeamMember.findOne({
where: { id: req.params.memberId, teamId: req.params.id }
});
if (!member) {
return res.status(404).json({
status: 'error',
code: 'MEMBER_NOT_FOUND',
message: 'Team member not found',
data: null
});
}
await member.update({ role });
res.status(200).json({
status: 'success',
code: 'MEMBER_UPDATED',
message: 'Member role updated',
data: { member }
});
} catch (error) {
next(error);
}
});
// 9. DELETE /api/teams/:id/members/:memberId - Remove member
router.delete('/:id/members/:memberId', authenticateToken, async (req, res, next) => {
try {
// Check user is team admin
const admin = await TeamMember.findOne({
where: { teamId: req.params.id, userId: req.user.id, role: 'admin' }
});
if (!admin) {
return res.status(403).json({
status: 'error',
code: 'FORBIDDEN',
message: 'Only team admins can remove members',
data: null
});
}
const member = await TeamMember.findOne({
where: { id: req.params.memberId, teamId: req.params.id }
});
if (!member) {
return res.status(404).json({
status: 'error',
code: 'MEMBER_NOT_FOUND',
message: 'Team member not found',
data: null
});
}
await member.update({ isActive: false });
res.status(200).json({
status: 'success',
code: 'MEMBER_REMOVED',
message: 'Member removed from team',
data: null
});
} catch (error) {
next(error);
}
});
// 10. GET /api/teams/:id/settings - Get team settings
router.get('/:id/settings', authenticateToken, async (req, res, next) => {
try {
const member = await TeamMember.findOne({
where: { teamId: req.params.id, userId: req.user.id, isActive: true }
});
if (!member) {
return res.status(403).json({
status: 'error',
code: 'FORBIDDEN',
message: 'Not a team member',
data: null
});
}
const settings = await TeamSettings.findOne({
where: { teamId: req.params.id }
});
res.status(200).json({
status: 'success',
code: 'TEAM_SETTINGS_RETRIEVED',
message: 'Team settings retrieved',
data: { settings }
});
} catch (error) {
next(error);
}
});
// 11. PUT /api/teams/:id/settings - Update team settings
router.put('/:id/settings', authenticateToken, async (req, res, next) => {
try {
const admin = await TeamMember.findOne({
where: { teamId: req.params.id, userId: req.user.id, role: 'admin' }
});
if (!admin) {
return res.status(403).json({
status: 'error',
code: 'FORBIDDEN',
message: 'Only team admins can update settings',
data: null
});
}
const settings = await TeamSettings.findOne({
where: { teamId: req.params.id }
});
await settings.update(req.body);
res.status(200).json({
status: 'success',
code: 'TEAM_SETTINGS_UPDATED',
message: 'Team settings updated',
data: { settings }
});
} catch (error) {
next(error);
}
});
// 12. GET /api/teams/:id/dashboard - Get team dashboard analytics
router.get('/:id/dashboard', authenticateToken, async (req, res, next) => {
try {
const member = await TeamMember.findOne({
where: { teamId: req.params.id, userId: req.user.id, isActive: true }
});
if (!member) {
return res.status(403).json({
status: 'error',
code: 'FORBIDDEN',
message: 'Not a team member',
data: null
});
}
// Aggregate team statistics
const memberCount = await TeamMember.count({
where: { teamId: req.params.id, isActive: true }
});
const dashboard = {
teamId: req.params.id,
memberCount,
activeMembers: memberCount,
recentActivity: [],
stats: {
projectsCreatedThisMonth: 0,
componentsCreatedThisMonth: 0,
tokensUpdatedThisMonth: 0
}
};
res.status(200).json({
status: 'success',
code: 'TEAM_DASHBOARD_RETRIEVED',
message: 'Team dashboard retrieved',
data: { dashboard }
});
} catch (error) {
next(error);
}
});
export default router;

View File

@@ -0,0 +1,138 @@
import express from 'express';
import fs from 'fs';
import path from 'path';
import { fileURLToPath } from 'url';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const router = express.Router();
// Configure telemetry log file path
const LOG_DIR = '/home/overbits/dss-monitor/logs/console';
const LOG_FILE = path.join(LOG_DIR, 'browser_errors.log');
// Ensure log directory exists
if (!fs.existsSync(LOG_DIR)) {
fs.mkdirSync(LOG_DIR, { recursive: true, mode: 0o755 });
}
// Rate limiting state (simple in-memory implementation)
const rateLimitMap = new Map();
const RATE_LIMIT_WINDOW = 60 * 1000; // 1 minute
const MAX_REQUESTS_PER_WINDOW = 100;
/**
* Simple rate limiting middleware
*/
function rateLimit(req, res, next) {
const clientIP = req.ip || req.connection.remoteAddress;
const now = Date.now();
if (!rateLimitMap.has(clientIP)) {
rateLimitMap.set(clientIP, { count: 1, resetTime: now + RATE_LIMIT_WINDOW });
return next();
}
const clientData = rateLimitMap.get(clientIP);
if (now > clientData.resetTime) {
// Reset window
rateLimitMap.set(clientIP, { count: 1, resetTime: now + RATE_LIMIT_WINDOW });
return next();
}
if (clientData.count >= MAX_REQUESTS_PER_WINDOW) {
return res.status(429).json({
status: 'error',
code: 'RATE_LIMIT_EXCEEDED',
message: 'Too many telemetry requests. Please try again later.'
});
}
clientData.count++;
next();
}
/**
* POST /api/telemetry/log
* Receive browser telemetry and log to file
*/
router.post('/log', rateLimit, (req, res) => {
try {
const telemetryData = req.body;
// Validate required fields
if (!telemetryData.type || !telemetryData.level) {
return res.status(400).json({
status: 'error',
code: 'INVALID_TELEMETRY',
message: 'Missing required fields: type and level'
});
}
// Format log entry
const timestamp = telemetryData.timestamp || new Date().toISOString();
const logEntry = {
timestamp,
type: telemetryData.type,
level: telemetryData.level,
message: telemetryData.message || '',
url: telemetryData.url || '',
userAgent: telemetryData.userAgent || '',
filename: telemetryData.filename || '',
lineno: telemetryData.lineno || '',
colno: telemetryData.colno || '',
stack: telemetryData.stack || ''
};
// Format as single-line JSON for easy parsing
const logLine = JSON.stringify(logEntry) + '\\n';
// Append to log file (async, non-blocking)
fs.appendFile(LOG_FILE, logLine, (err) => {
if (err) {
console.error('[Telemetry] Failed to write to log file:', err);
}
});
// Return success immediately (don't wait for file write)
res.status(200).json({ status: 'ok' });
} catch (error) {
console.error('[Telemetry] Error processing telemetry:', error);
res.status(500).json({
status: 'error',
code: 'TELEMETRY_ERROR',
message: 'Failed to process telemetry data'
});
}
});
/**
* GET /api/telemetry/status
* Get telemetry system status
*/
router.get('/status', (req, res) => {
try {
const stats = fs.existsSync(LOG_FILE) ? fs.statSync(LOG_FILE) : null;
res.status(200).json({
status: 'success',
data: {
logFile: LOG_FILE,
exists: !!stats,
size: stats ? stats.size : 0,
lastModified: stats ? stats.mtime : null,
rateLimitActive: rateLimitMap.size > 0
}
});
} catch (error) {
res.status(500).json({
status: 'error',
message: error.message
});
}
});
export default router;

178
server/src/routes/tokens.js Normal file
View File

@@ -0,0 +1,178 @@
import express from 'express';
import { authenticateToken } from '../middleware/auth.js';
import Token from '../models/Token.js';
import Project from '../models/Project.js';
const router = express.Router();
// Get all tokens for a project
router.get('/project/:projectId', authenticateToken, async (req, res, next) => {
try {
const project = await Project.findOne({
where: { id: req.params.projectId, userId: req.user.id }
});
if (!project) {
return res.status(404).json({
status: 'error',
code: 'PROJECT_NOT_FOUND',
message: 'Project not found',
data: null
});
}
const { category } = req.query;
const where = { projectId: req.params.projectId };
if (category) {
where.category = category;
}
const tokens = await Token.findAll({ where });
res.status(200).json({
status: 'success',
code: 'TOKENS_RETRIEVED',
message: 'Tokens retrieved successfully',
data: { tokens }
});
} catch (error) {
next(error);
}
});
// Create token
router.post('/', authenticateToken, async (req, res, next) => {
try {
const { projectId, name, category, value, description } = req.body;
const project = await Project.findOne({
where: { id: projectId, userId: req.user.id }
});
if (!project) {
return res.status(404).json({
status: 'error',
code: 'PROJECT_NOT_FOUND',
message: 'Project not found',
data: null
});
}
if (!name || !value) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'Name and value are required',
data: null
});
}
const token = await Token.create({
projectId,
name,
category,
value,
description,
source: 'manual'
});
res.status(201).json({
status: 'success',
code: 'TOKEN_CREATED',
message: 'Token created successfully',
data: { token }
});
} catch (error) {
next(error);
}
});
// Update token
router.put('/:id', authenticateToken, async (req, res, next) => {
try {
const token = await Token.findByPk(req.params.id);
if (!token) {
return res.status(404).json({
status: 'error',
code: 'TOKEN_NOT_FOUND',
message: 'Token not found',
data: null
});
}
// Verify ownership
const project = await Project.findOne({
where: { id: token.projectId, userId: req.user.id }
});
if (!project) {
return res.status(403).json({
status: 'error',
code: 'FORBIDDEN',
message: 'You do not have permission to modify this token',
data: null
});
}
const { name, category, value, description } = req.body;
await token.update({
name: name || token.name,
category: category || token.category,
value: value || token.value,
description: description !== undefined ? description : token.description
});
res.status(200).json({
status: 'success',
code: 'TOKEN_UPDATED',
message: 'Token updated successfully',
data: { token }
});
} catch (error) {
next(error);
}
});
// Delete token
router.delete('/:id', authenticateToken, async (req, res, next) => {
try {
const token = await Token.findByPk(req.params.id);
if (!token) {
return res.status(404).json({
status: 'error',
code: 'TOKEN_NOT_FOUND',
message: 'Token not found',
data: null
});
}
const project = await Project.findOne({
where: { id: token.projectId, userId: req.user.id }
});
if (!project) {
return res.status(403).json({
status: 'error',
code: 'FORBIDDEN',
message: 'You do not have permission to modify this token',
data: null
});
}
await token.destroy();
res.status(200).json({
status: 'success',
code: 'TOKEN_DELETED',
message: 'Token deleted successfully',
data: null
});
} catch (error) {
next(error);
}
});
export default router;

View File

@@ -0,0 +1,408 @@
import express from 'express';
import { authenticateToken } from '../middleware/auth.js';
import TranslationService from '../services/TranslationService.js';
const router = express.Router();
// ========== Dictionary Operations ==========
/**
* GET /api/translations
* List all translation dictionaries with pagination
*/
router.get('/', authenticateToken, async (req, res, next) => {
try {
const filters = {
projectId: req.query.projectId,
status: req.query.status,
createdBy: req.query.createdBy,
limit: parseInt(req.query.limit) || 50,
offset: parseInt(req.query.offset) || 0
};
const result = await TranslationService.listDictionaries(filters);
res.status(200).json({
status: 'success',
code: 'DICTIONARIES_RETRIEVED',
message: 'Translation dictionaries retrieved successfully',
data: {
dictionaries: result.dictionaries,
total: result.total,
limit: filters.limit,
offset: filters.offset
}
});
} catch (error) {
next(error);
}
});
/**
* GET /api/translations/:id
* Get a single translation dictionary with mappings
*/
router.get('/:id', authenticateToken, async (req, res, next) => {
try {
const dictionary = await TranslationService.getDictionary(req.params.id);
res.status(200).json({
status: 'success',
code: 'DICTIONARY_RETRIEVED',
message: 'Translation dictionary retrieved successfully',
data: { dictionary }
});
} catch (error) {
if (error.message === 'Dictionary not found') {
return res.status(404).json({
status: 'error',
code: 'DICTIONARY_NOT_FOUND',
message: error.message,
data: null
});
}
next(error);
}
});
/**
* POST /api/translations
* Create a new translation dictionary
*/
router.post('/', authenticateToken, async (req, res, next) => {
try {
const { name, description, projectId, sourceSystem, targetSystem, tags, status } = req.body;
if (!name || !projectId) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'Name and projectId are required',
data: null
});
}
const dictionary = await TranslationService.createDictionary({
name,
description,
projectId,
sourceSystem,
targetSystem,
tags,
status
}, req.user.id);
res.status(201).json({
status: 'success',
code: 'DICTIONARY_CREATED',
message: 'Translation dictionary created successfully',
data: { dictionary }
});
} catch (error) {
next(error);
}
});
/**
* PUT /api/translations/:id
* Update a translation dictionary
*/
router.put('/:id', authenticateToken, async (req, res, next) => {
try {
const { name, description, status, sourceSystem, targetSystem, tags } = req.body;
const dictionary = await TranslationService.updateDictionary(req.params.id, {
name,
description,
status,
sourceSystem,
targetSystem,
tags
}, req.user.id);
res.status(200).json({
status: 'success',
code: 'DICTIONARY_UPDATED',
message: 'Translation dictionary updated successfully',
data: { dictionary }
});
} catch (error) {
if (error.message === 'Dictionary not found') {
return res.status(404).json({
status: 'error',
code: 'DICTIONARY_NOT_FOUND',
message: error.message,
data: null
});
}
next(error);
}
});
/**
* DELETE /api/translations/:id
* Soft delete (archive) a translation dictionary
*/
router.delete('/:id', authenticateToken, async (req, res, next) => {
try {
await TranslationService.deleteDictionary(req.params.id, req.user.id);
res.status(200).json({
status: 'success',
code: 'DICTIONARY_ARCHIVED',
message: 'Translation dictionary archived successfully',
data: null
});
} catch (error) {
if (error.message === 'Dictionary not found') {
return res.status(404).json({
status: 'error',
code: 'DICTIONARY_NOT_FOUND',
message: error.message,
data: null
});
}
next(error);
}
});
// ========== Mapping Operations ==========
/**
* POST /api/translations/:id/mappings
* Create a new mapping in a dictionary
*/
router.post('/:id/mappings', authenticateToken, async (req, res, next) => {
try {
const { sourceToken, targetToken, transformRule, validated, confidence, notes } = req.body;
if (!sourceToken || !targetToken) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'sourceToken and targetToken are required',
data: null
});
}
const mapping = await TranslationService.createMapping(req.params.id, {
sourceToken,
targetToken,
transformRule,
validated,
confidence,
notes
}, req.user.id);
res.status(201).json({
status: 'success',
code: 'MAPPING_CREATED',
message: 'Translation mapping created successfully',
data: { mapping }
});
} catch (error) {
if (error.message === 'Dictionary not found') {
return res.status(404).json({
status: 'error',
code: 'DICTIONARY_NOT_FOUND',
message: error.message,
data: null
});
}
if (error.name === 'SequelizeUniqueConstraintError') {
return res.status(409).json({
status: 'error',
code: 'DUPLICATE_MAPPING',
message: 'A mapping for this source token already exists in this dictionary',
data: null
});
}
next(error);
}
});
/**
* PUT /api/translations/:id/mappings/:mappingId
* Update a mapping
*/
router.put('/:id/mappings/:mappingId', authenticateToken, async (req, res, next) => {
try {
const { sourceToken, targetToken, transformRule, validated, confidence, notes } = req.body;
const mapping = await TranslationService.updateMapping(req.params.id, req.params.mappingId, {
sourceToken,
targetToken,
transformRule,
validated,
confidence,
notes
}, req.user.id);
res.status(200).json({
status: 'success',
code: 'MAPPING_UPDATED',
message: 'Translation mapping updated successfully',
data: { mapping }
});
} catch (error) {
if (error.message === 'Mapping not found') {
return res.status(404).json({
status: 'error',
code: 'MAPPING_NOT_FOUND',
message: error.message,
data: null
});
}
next(error);
}
});
/**
* DELETE /api/translations/:id/mappings/:mappingId
* Delete a mapping
*/
router.delete('/:id/mappings/:mappingId', authenticateToken, async (req, res, next) => {
try {
await TranslationService.deleteMapping(req.params.id, req.params.mappingId, req.user.id);
res.status(200).json({
status: 'success',
code: 'MAPPING_DELETED',
message: 'Translation mapping deleted successfully',
data: null
});
} catch (error) {
if (error.message === 'Mapping not found') {
return res.status(404).json({
status: 'error',
code: 'MAPPING_NOT_FOUND',
message: error.message,
data: null
});
}
next(error);
}
});
/**
* POST /api/translations/:id/mappings/bulk
* Bulk import mappings
*/
router.post('/:id/mappings/bulk', authenticateToken, async (req, res, next) => {
try {
const { mappings } = req.body;
if (!Array.isArray(mappings) || mappings.length === 0) {
return res.status(400).json({
status: 'error',
code: 'VALIDATION_ERROR',
message: 'mappings must be a non-empty array',
data: null
});
}
const result = await TranslationService.bulkImportMappings(req.params.id, mappings, req.user.id);
res.status(200).json({
status: 'success',
code: 'BULK_IMPORT_COMPLETED',
message: `Bulk import completed: ${result.created} created, ${result.updated} updated, ${result.errors.length} errors`,
data: result
});
} catch (error) {
if (error.message === 'Dictionary not found') {
return res.status(404).json({
status: 'error',
code: 'DICTIONARY_NOT_FOUND',
message: error.message,
data: null
});
}
next(error);
}
});
// ========== Validation & Analysis ==========
/**
* GET /api/translations/:id/validate
* Validate dictionary mappings
*/
router.get('/:id/validate', authenticateToken, async (req, res, next) => {
try {
const validation = await TranslationService.validateDictionary(req.params.id);
res.status(200).json({
status: 'success',
code: 'VALIDATION_COMPLETED',
message: 'Dictionary validation completed',
data: { validation }
});
} catch (error) {
if (error.message === 'Dictionary not found') {
return res.status(404).json({
status: 'error',
code: 'DICTIONARY_NOT_FOUND',
message: error.message,
data: null
});
}
next(error);
}
});
/**
* GET /api/translations/:id/coverage
* Calculate token coverage percentage
*/
router.get('/:id/coverage', authenticateToken, async (req, res, next) => {
try {
const coverage = await TranslationService.calculateCoverage(req.params.id);
res.status(200).json({
status: 'success',
code: 'COVERAGE_CALCULATED',
message: 'Coverage calculated successfully',
data: { coverage }
});
} catch (error) {
if (error.message === 'Dictionary not found') {
return res.status(404).json({
status: 'error',
code: 'DICTIONARY_NOT_FOUND',
message: error.message,
data: null
});
}
next(error);
}
});
/**
* GET /api/translations/:id/export
* Export dictionary as JSON
*/
router.get('/:id/export', authenticateToken, async (req, res, next) => {
try {
const exportData = await TranslationService.exportDictionary(req.params.id);
res.status(200).json({
status: 'success',
code: 'DICTIONARY_EXPORTED',
message: 'Dictionary exported successfully',
data: { export: exportData }
});
} catch (error) {
if (error.message === 'Dictionary not found') {
return res.status(404).json({
status: 'error',
code: 'DICTIONARY_NOT_FOUND',
message: error.message,
data: null
});
}
next(error);
}
});
export default router;

View File

@@ -0,0 +1,124 @@
/**
* Fuzzy matching logic to link components across different sources.
*/
export class ComponentMatcher {
constructor() {
this.MATCH_THRESHOLD = 0.8;
}
/**
* Merges components from different sources based on name similarity.
* @param {Array} gitSources
* @param {Array} storybookSources
* @param {Array} figmaSources
* @returns {Array} Merged component objects
*/
match(gitSources, storybookSources, figmaSources) {
const merged = new Map();
// 1. Start with Git as the primary source of truth for code
gitSources.forEach(git => {
this._upsert(merged, git.name, { git });
});
// 2. Match Storybook entries
storybookSources.forEach(sb => {
const matchName = this._findBestMatch(sb.name, Array.from(merged.keys()));
if (matchName) {
const existing = merged.get(matchName);
merged.set(matchName, { ...existing, storybook: sb });
} else {
this._upsert(merged, sb.name, { storybook: sb });
}
});
// 3. Match Figma entries
figmaSources.forEach(figma => {
const matchName = this._findBestMatch(figma.name, Array.from(merged.keys()));
if (matchName) {
const existing = merged.get(matchName);
merged.set(matchName, { ...existing, figma });
} else {
this._upsert(merged, figma.name, { figma });
}
});
return Array.from(merged.values()).map(item => ({
name: item.git?.name || item.storybook?.name || item.figma?.name,
sources: {
git: item.git || null,
storybook: item.storybook || null,
figma: item.figma || null
}
}));
}
_upsert(map, name, data) {
if (!map.has(name)) {
map.set(name, {});
}
const existing = map.get(name);
map.set(name, { ...existing, ...data });
}
_findBestMatch(target, candidates) {
// 1. Try Exact Match (Case Insensitive)
const exact = candidates.find(c => c.toLowerCase() === target.toLowerCase());
if (exact) return exact;
// 2. Fuzzy Match
let bestMatch = null;
let maxSimilarity = 0;
for (const candidate of candidates) {
const similarity = this._calculateSimilarity(target, candidate);
if (similarity >= this.MATCH_THRESHOLD && similarity > maxSimilarity) {
maxSimilarity = similarity;
bestMatch = candidate;
}
}
return bestMatch;
}
/**
* Calculates similarity between 0 and 1 using Levenshtein distance.
*/
_calculateSimilarity(s1, s2) {
const a = s1.toLowerCase();
const b = s2.toLowerCase();
if (a.length === 0) return b.length === 0 ? 1.0 : 0.0;
if (b.length === 0) return 0.0;
const matrix = [];
for (let i = 0; i <= b.length; i++) {
matrix[i] = [i];
}
for (let j = 0; j <= a.length; j++) {
matrix[0][j] = j;
}
for (let i = 1; i <= b.length; i++) {
for (let j = 1; j <= a.length; j++) {
if (b.charAt(i - 1) === a.charAt(j - 1)) {
matrix[i][j] = matrix[i - 1][j - 1];
} else {
matrix[i][j] = Math.min(
matrix[i - 1][j - 1] + 1, // substitution
Math.min(
matrix[i][j - 1] + 1, // insertion
matrix[i - 1][j] + 1 // deletion
)
);
}
}
}
const distance = matrix[b.length][a.length];
const maxLength = Math.max(a.length, b.length);
return 1.0 - (distance / maxLength);
}
}

View File

@@ -0,0 +1,134 @@
import { GitScanner } from './GitScanner.js';
import { StorybookScanner } from './StorybookScanner.js';
import { FigmaScanner } from './FigmaScanner.js';
import { ComponentMatcher } from './ComponentMatcher.js';
import ConfigService from '../services/ConfigService.js';
import Component from '../models/Component.js';
import Project from '../models/Project.js';
import sequelize from '../config/database.js';
/**
* Orchestrator for component discovery.
* Coordinates scanners, matching, and database persistence.
*/
export class ComponentScanner {
constructor() {
this.gitScanner = new GitScanner();
this.sbScanner = new StorybookScanner();
this.figmaScanner = new FigmaScanner();
this.matcher = new ComponentMatcher();
}
/**
* Executes full scan for a project.
* @param {string} projectId - UUID of project to scan
*/
async scan(projectId) {
console.log(`[ComponentScanner] Starting scan for project ${projectId}`);
// 1. Fetch Project and Configuration
const project = await Project.findByPk(projectId);
if (!project) {
throw new Error(`Project not found: ${projectId}`);
}
// Get resolved configuration (System -> Project -> User cascade)
const config = await ConfigService.resolveConfig(projectId);
console.log(`[ComponentScanner] Configuration resolved`);
// Get workspace path from project metadata or config
const workspacePath = project.metadata?.workspacePath || config['component.directory'];
// 2. Run Scanners in Parallel
const [gitResults, sbResults, figmaResults] = await Promise.all([
this.gitScanner.scan(workspacePath),
this.sbScanner.scan(config['storybook.base_url']),
this.figmaScanner.scan(config)
]);
console.log(`[ComponentScanner] Scan results - Git: ${gitResults.length}, Storybook: ${sbResults.length}, Figma: ${figmaResults.length}`);
// 3. Match Components
const matches = this.matcher.match(gitResults, sbResults, figmaResults);
console.log(`[ComponentScanner] Matched ${matches.length} components`);
// 4. Persist Results (Transaction) - Use bulk upsert for performance
const transaction = await sequelize.transaction();
const stats = { discovered: matches.length, created: 0, updated: 0, matched: 0 };
try {
// Get existing components to determine created vs updated
const existingComponents = await Component.findAll({
where: {
projectId,
name: matches.map(m => m.name)
},
attributes: ['name'],
raw: true,
transaction
});
const existingNames = new Set(existingComponents.map(c => c.name));
// Prepare bulk data
const componentsData = matches.map(match => {
const sourceCount = [match.sources.git, match.sources.storybook, match.sources.figma]
.filter(Boolean).length;
if (sourceCount > 1) {
stats.matched++;
}
// Count created vs updated
if (existingNames.has(match.name)) {
stats.updated++;
} else {
stats.created++;
}
return {
projectId,
name: match.name,
status: 'draft',
description: match.sources.figma?.description || null,
figmaId: match.sources.figma?.figmaId || null,
storybookPath: match.sources.storybook?.storybookPath || null,
variants: match.sources.storybook?.variants || null,
metadata: {
lastScan: new Date().toISOString(),
sources: {
git: match.sources.git ? {
filePath: match.sources.git.filePath,
lastModified: match.sources.git.lastModified
} : null,
storybook: match.sources.storybook ? {
path: match.sources.storybook.storybookPath,
variants: match.sources.storybook.variants
} : null,
figma: match.sources.figma ? {
id: match.sources.figma.figmaId,
description: match.sources.figma.description
} : null
}
}
};
});
// Bulk upsert all components in single operation
await Component.bulkCreate(componentsData, {
updateOnDuplicate: ['description', 'figmaId', 'storybookPath', 'variants', 'metadata'],
transaction
});
await transaction.commit();
console.log(`[ComponentScanner] Scan complete. Created: ${stats.created}, Updated: ${stats.updated}, Matched: ${stats.matched}`);
return stats;
} catch (error) {
await transaction.rollback();
console.error('[ComponentScanner] Database transaction failed:', error);
throw error;
}
}
}

View File

@@ -0,0 +1,118 @@
import ConfigService from '../services/ConfigService.js';
/**
* Fetches component definitions from Figma API.
* Handles rate limiting, caching, and token decryption.
*/
export class FigmaScanner {
constructor() {
this.cache = new Map(); // Simple in-memory cache
this.CACHE_TTL = 5 * 60 * 1000; // 5 minutes
this.lastRequestTime = 0;
this.MIN_REQUEST_INTERVAL = 500; // 2 requests per second = 500ms gap
}
/**
* Scans a Figma file for components.
* @param {Object} config - Project configuration containing figma details
* @returns {Promise<Array<{name: string, figmaId: string, description: string}>>}
*/
async scan(config) {
const fileUrl = config['figma.file_url'];
// Encrypted token retrieval
let apiToken;
try {
const encryptedToken = config['figma.api_token'];
apiToken = encryptedToken ? ConfigService._decrypt(encryptedToken) : null;
} catch (e) {
console.error('[FigmaScanner] Failed to decrypt token:', e.message);
return [];
}
if (!fileUrl || !apiToken) {
console.log('[FigmaScanner] Figma not configured, skipping scan');
return []; // Skip if not configured
}
const fileKey = this._extractFileKey(fileUrl);
if (!fileKey) {
console.error('[FigmaScanner] Invalid Figma URL format');
return [];
}
// Check cache
const cacheKey = `figma_${fileKey}`;
const cached = this.cache.get(cacheKey);
if (cached && (Date.now() - cached.timestamp < this.CACHE_TTL)) {
console.log('[FigmaScanner] Using cached Figma data');
return cached.data;
}
try {
const components = await this._fetchWithBackoff(fileKey, apiToken);
const results = components.map(c => ({
name: c.name,
figmaId: c.key, // Figma API uses 'key' as the stable ID across versions
description: c.description || ''
}));
// Update cache
this.cache.set(cacheKey, { timestamp: Date.now(), data: results });
return results;
} catch (error) {
console.error('[FigmaScanner] API Error:', error.message);
return [];
}
}
_extractFileKey(url) {
const match = url.match(/figma\.com\/file\/([a-zA-Z0-9]+)/);
return match ? match[1] : null;
}
async _fetchWithBackoff(fileKey, token, retries = 3) {
const url = `https://api.figma.com/v1/files/${fileKey}/components`;
for (let i = 0; i <= retries; i++) {
try {
await this._enforceRateLimit();
const response = await fetch(url, {
headers: { 'X-Figma-Token': token }
});
if (response.status === 429) {
const waitTime = Math.pow(2, i) * 1000;
console.warn(`[FigmaScanner] Rate limited. Waiting ${waitTime}ms`);
await new Promise(resolve => setTimeout(resolve, waitTime));
continue;
}
if (!response.ok) {
throw new Error(`Figma API Error ${response.status}`);
}
const data = await response.json();
return data.meta?.components || [];
} catch (e) {
if (i === retries) throw e;
}
}
return [];
}
async _enforceRateLimit() {
const now = Date.now();
const timeSinceLast = now - this.lastRequestTime;
if (timeSinceLast < this.MIN_REQUEST_INTERVAL) {
await new Promise(resolve => setTimeout(resolve, this.MIN_REQUEST_INTERVAL - timeSinceLast));
}
this.lastRequestTime = Date.now();
}
}

View File

@@ -0,0 +1,74 @@
import { glob } from 'glob';
import path from 'path';
import fs from 'fs/promises';
/**
* Scans the local filesystem for component files.
* Enforces security boundaries to prevent directory traversal.
*/
export class GitScanner {
/**
* Scans a workspace for React components.
* @param {string} workspacePath - Absolute path to the workspace root
* @returns {Promise<Array<{name: string, filePath: string, lastModified: Date}>>}
*/
async scan(workspacePath) {
try {
if (!workspacePath) {
console.warn('[GitScanner] No workspace path provided');
return [];
}
// Security: Validate path traversal
const resolvedPath = path.resolve(workspacePath);
const cwdPath = path.resolve(process.cwd());
// Allow paths within project root or any absolute path (for workspace flexibility)
// In production, you'd want stricter validation based on allowed workspace directories
// Check if directory exists
try {
await fs.access(resolvedPath);
} catch (e) {
console.warn(`[GitScanner] Workspace path not found: ${resolvedPath}`);
return [];
}
// Glob pattern excludes common non-source directories
const pattern = '**/*.{jsx,tsx}';
const ignore = ['**/node_modules/**', '**/.next/**', '**/dist/**', '**/build/**'];
const files = await glob(pattern, {
cwd: resolvedPath,
ignore,
absolute: false // Return relative paths for cleaner storage
});
const components = await Promise.all(files.map(async (file) => {
try {
const fullPath = path.join(resolvedPath, file);
const stats = await fs.stat(fullPath);
// Extract name from basename (e.g., "Button.tsx" -> "Button")
const name = path.basename(file, path.extname(file));
return {
name,
filePath: file,
lastModified: stats.mtime
};
} catch (err) {
console.error(`[GitScanner] Error processing file ${file}:`, err.message);
return null;
}
}));
// Filter out failures
return components.filter(Boolean);
} catch (error) {
console.error('[GitScanner] Fatal error during scan:', error);
return []; // Return empty array for graceful degradation
}
}
}

View File

@@ -0,0 +1,103 @@
import { URL } from 'url';
/**
* Fetches and parses Storybook manifests.
* Includes SSRF protection and timeout handling.
*/
export class StorybookScanner {
/**
* Scans a published Storybook URL for components.
* @param {string} baseUrl - The base URL of the deployed Storybook
* @returns {Promise<Array<{name: string, storybookPath: string, variants: string[]}>>}
*/
async scan(baseUrl) {
if (!baseUrl) return [];
try {
// Security: SSRF Validation
this._validateUrl(baseUrl);
const storiesUrl = new URL('stories.json', baseUrl).toString();
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), 10000); // 10s timeout
try {
const response = await fetch(storiesUrl, { signal: controller.signal });
clearTimeout(timeoutId);
if (!response.ok) {
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
}
const manifest = await response.json();
return this._processManifest(manifest);
} catch (fetchError) {
clearTimeout(timeoutId);
if (fetchError.name === 'AbortError') {
throw new Error('Request timed out after 10 seconds');
}
throw fetchError;
}
} catch (error) {
console.error('[StorybookScanner] Scan failed:', error.message);
// Return empty array for graceful degradation
return [];
}
}
/**
* Validates URL scheme and prevents calls to internal network ranges.
* @private
*/
_validateUrl(urlStr) {
const url = new URL(urlStr);
if (!['http:', 'https:'].includes(url.protocol)) {
throw new Error('Invalid protocol. Only HTTP/HTTPS allowed.');
}
const hostname = url.hostname;
// Basic private IP blocking (Production should use a dedicated library or proxy)
const isPrivate = /^(localhost|127\.|192\.168\.|10\.|172\.(1[6-9]|2[0-9]|3[0-1])\.|0\.)/.test(hostname);
if (isPrivate) {
throw new Error('SSRF Protection: Access to internal IP addresses is denied.');
}
}
/**
* Transforms raw Storybook manifest into component list.
* @private
*/
_processManifest(manifest) {
const stories = manifest.stories || {};
const componentMap = new Map();
Object.values(stories).forEach(story => {
// Group by component title (e.g. "Components/Button")
// We assume the last part of the kind/title is the component name if hierarchical
const titleParts = story.title.split('/');
const componentName = titleParts[titleParts.length - 1];
// Extract variant from name (e.g. "Primary", "Large")
const variant = story.name;
if (!componentMap.has(componentName)) {
componentMap.set(componentName, {
name: componentName,
storybookPath: story.title, // Base path in SB sidebar
variants: new Set()
});
}
componentMap.get(componentName).variants.add(variant);
});
return Array.from(componentMap.values()).map(comp => ({
...comp,
variants: Array.from(comp.variants)
}));
}
}

View File

@@ -0,0 +1,90 @@
/**
* migrate.js
* Database migration runner
*/
import fs from 'fs';
import path from 'path';
import { fileURLToPath } from 'url';
import sequelize from '../config/database.js';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const runMigrations = async () => {
try {
console.log('[Migrate] Connecting to database...');
await sequelize.authenticate();
console.log('[Migrate] Database connection established');
// Create migrations tracking table if it doesn't exist
await sequelize.query(`
CREATE TABLE IF NOT EXISTS migrations (
id SERIAL PRIMARY KEY,
name VARCHAR(255) UNIQUE NOT NULL,
executed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
`);
// Get list of migration files
const migrationsDir = path.join(__dirname, '../../migrations');
const migrationFiles = fs.readdirSync(migrationsDir)
.filter(file => file.endsWith('.js'))
.sort(); // Sort by filename (which includes timestamp)
// Get already executed migrations
const [executedMigrations] = await sequelize.query(
'SELECT name FROM migrations ORDER BY name'
);
const executedNames = executedMigrations.map(m => m.name);
console.log(`[Migrate] Found ${migrationFiles.length} migration files`);
console.log(`[Migrate] Already executed: ${executedNames.length} migrations`);
// Run pending migrations
for (const file of migrationFiles) {
const migrationName = file.replace('.js', '');
if (executedNames.includes(migrationName)) {
console.log(`[Migrate] ✓ ${migrationName} (already executed)`);
continue;
}
console.log(`[Migrate] → Running ${migrationName}...`);
try {
// Import and run migration
const migrationPath = path.join(migrationsDir, file);
const migration = await import(migrationPath);
if (!migration.default || !migration.default.up) {
throw new Error(`Migration ${file} does not export a valid up() function`);
}
// Run the migration
await migration.default.up(sequelize.getQueryInterface());
// Record successful execution
await sequelize.query(
'INSERT INTO migrations (name) VALUES (?)',
{ replacements: [migrationName] }
);
console.log(`[Migrate] ✓ ${migrationName} completed successfully`);
} catch (error) {
console.error(`[Migrate] ✗ ${migrationName} failed:`, error.message);
throw error;
}
}
console.log('[Migrate] All migrations completed successfully');
await sequelize.close();
process.exit(0);
} catch (error) {
console.error('[Migrate] Migration failed:', error);
await sequelize.close();
process.exit(1);
}
};
runMigrations();

158
server/src/scripts/seed.js Normal file
View File

@@ -0,0 +1,158 @@
/**
* seed.js
* Database seeding script for development
*/
import sequelize from '../config/database.js';
import User from '../models/User.js';
import Project from '../models/Project.js';
import Token from '../models/Token.js';
import Component from '../models/Component.js';
import Icon from '../models/Icon.js';
import Integration from '../models/Integration.js';
const seedDatabase = async () => {
try {
console.log('[Seed] Synchronizing database...');
await sequelize.sync({ force: process.env.FORCE_SEED === 'true' });
// Create demo user (for auto-login in production)
console.log('[Seed] Creating demo user...');
const demoUser = await User.create({
email: 'demo@example.com',
password: 'demo123',
name: 'Demo User',
role: 'admin'
});
// Create test user
console.log('[Seed] Creating test user...');
const user = await User.create({
email: 'test@example.com',
password: 'password123',
name: 'Test User',
role: 'admin'
});
// Create test project
console.log('[Seed] Creating test project...');
const project = await Project.create({
name: 'Test Design System',
key: 'TDS',
description: 'A test design system project',
userId: user.id,
figmaFileKey: 'ABC123DEF456',
storybookUrl: 'https://storybook.example.com'
});
// Create test tokens
console.log('[Seed] Creating test tokens...');
const colorTokens = [
{ projectId: project.id, name: 'primary', category: 'color', value: '#0066CC', description: 'Primary brand color' },
{ projectId: project.id, name: 'secondary', category: 'color', value: '#4CAF50', description: 'Secondary brand color' },
{ projectId: project.id, name: 'error', category: 'color', value: '#F44336', description: 'Error color' }
];
await Token.bulkCreate(colorTokens);
const spacingTokens = [
{ projectId: project.id, name: 'xs', category: 'spacing', value: '4px', description: 'Extra small spacing' },
{ projectId: project.id, name: 'sm', category: 'spacing', value: '8px', description: 'Small spacing' },
{ projectId: project.id, name: 'md', category: 'spacing', value: '16px', description: 'Medium spacing' },
{ projectId: project.id, name: 'lg', category: 'spacing', value: '24px', description: 'Large spacing' }
];
await Token.bulkCreate(spacingTokens);
// Create test components
console.log('[Seed] Creating test components...');
const components = [
{
projectId: project.id,
name: 'Button',
category: 'input',
description: 'Reusable button component',
status: 'ready',
adoptionScore: 0.95,
usageCount: 150,
variants: ['primary', 'secondary', 'danger', 'ghost']
},
{
projectId: project.id,
name: 'Card',
category: 'layout',
description: 'Card container component',
status: 'ready',
adoptionScore: 0.85,
usageCount: 120,
variants: ['elevated', 'outlined', 'filled']
},
{
projectId: project.id,
name: 'Modal',
category: 'layout',
description: 'Modal dialog component',
status: 'wip',
adoptionScore: 0.60,
usageCount: 45,
variants: ['default', 'alert']
}
];
await Component.bulkCreate(components);
// Create test icons
console.log('[Seed] Creating test icons...');
const icons = [
{
projectId: project.id,
name: 'home',
category: 'navigation',
svgData: '<svg viewBox="0 0 24 24"><path d="M10 20v-6h4v6h5v-8h3L12 3 2 12h3v8z"/></svg>',
tags: ['navigation', 'common']
},
{
projectId: project.id,
name: 'search',
category: 'action',
svgData: '<svg viewBox="0 0 24 24"><path d="M15.5 14h-.79l-.28-.27A6.471 6.471 0 0016 9.5 6.5 6.5 0 109.5 16c1.61 0 3.09-.59 4.23-1.57l.27.28v.79l5 4.99L20.49 19l-4.99-5zm-6 0C7.01 14 5 11.99 5 9.5S7.01 5 9.5 5 14 7.01 14 9.5 11.99 14 9.5 14z"/></svg>',
tags: ['action', 'search']
}
];
await Icon.bulkCreate(icons);
// Create test integrations
console.log('[Seed] Creating test integrations...');
const integrations = [
{
projectId: project.id,
type: 'figma',
name: 'Figma Design Tokens',
config: { fileKey: 'ABC123DEF456', autoSync: true },
isActive: true
},
{
projectId: project.id,
type: 'storybook',
name: 'Storybook Documentation',
config: { url: 'https://storybook.example.com', autoPublish: false },
isActive: true
}
];
await Integration.bulkCreate(integrations);
console.log('[Seed] Database seeding completed successfully!');
console.log('[Seed] Demo user: demo@example.com / demo123 (for auto-login)');
console.log('[Seed] Test user: test@example.com / password123');
console.log('[Seed] Test project: Test Design System (key: TDS)');
process.exit(0);
} catch (error) {
console.error('[Seed] Error seeding database:', error);
process.exit(1);
}
};
seedDatabase();

208
server/src/server.js Normal file
View File

@@ -0,0 +1,208 @@
import express from 'express';
import cors from 'cors';
import helmet from 'helmet';
import 'dotenv/config';
import sequelize from './config/database.js';
import passport from './config/passport.js';
import { errorHandler, notFoundHandler } from './middleware/errorHandler.js';
// Routes
import authRoutes from './routes/auth.js';
import projectRoutes from './routes/projects.js';
import tokenRoutes from './routes/tokens.js';
import componentRoutes from './routes/components.js';
import iconRoutes from './routes/icons.js';
import integrationRoutes from './routes/integrations.js';
import configRoutes from './routes/config.js';
import logsRoutes from './routes/logs.js';
import notificationsRoutes from './routes/notifications.js';
import mcpRoutes from './routes/mcp.js';
import teamsRoutes from './routes/teams.js';
import discoveryRoutes from './routes/discovery.js';
import figmaRoutes from './routes/figma.js';
import qaRoutes from './routes/qa.js';
import aiRoutes from './routes/ai.js';
import telemetryRoutes from './routes/telemetry.js';
import translationRoutes from './routes/translations.js';
// Admin Routes
import adminRolesRoutes from './routes/admin/roles.js';
import adminConfigRoutes from './routes/admin/config.js';
// Models
import User from './models/User.js';
import Project from './models/Project.js';
import Token from './models/Token.js';
import Component from './models/Component.js';
import Icon from './models/Icon.js';
import Integration from './models/Integration.js';
import Log from './models/Log.js';
import Team from './models/Team.js';
import TeamMember from './models/TeamMember.js';
import TeamSettings from './models/TeamSettings.js';
import Discovery from './models/Discovery.js';
import FigmaSync from './models/FigmaSync.js';
import QATest from './models/QATest.js';
import AIChat from './models/AIChat.js';
import ConfigSetting from './models/ConfigSetting.js';
import ConfigAuditLog from './models/ConfigAuditLog.js';
import TranslationDictionary from './models/TranslationDictionary.js';
import TranslationMapping from './models/TranslationMapping.js';
const app = express();
const PORT = process.env.PORT || 3001;
// Middleware
app.use(helmet());
app.use(cors({
origin: process.env.CORS_ORIGIN?.split(',') || '*',
credentials: true
}));
app.use(express.json());
app.use(express.urlencoded({ extended: true }));
// Passport
app.use(passport.initialize());
// Health check
app.get('/health', (req, res) => {
res.status(200).json({
status: 'success',
code: 'HEALTH_OK',
message: 'Server is healthy',
data: { timestamp: new Date().toISOString() }
});
});
// API Routes
app.use('/api/auth', authRoutes);
app.use('/api/projects', projectRoutes);
app.use('/api/tokens', tokenRoutes);
app.use('/api/components', componentRoutes);
app.use('/api/icons', iconRoutes);
app.use('/api/integrations', integrationRoutes);
app.use('/api/config', configRoutes);
app.use('/api/logs', logsRoutes);
app.use('/api/notifications', notificationsRoutes);
app.use('/api/mcp', mcpRoutes);
app.use('/api/teams', teamsRoutes);
app.use('/api/discovery', discoveryRoutes);
app.use('/api/figma', figmaRoutes);
app.use('/api/qa', qaRoutes);
app.use('/api/test', qaRoutes);
app.use('/api/claude', aiRoutes);
app.use('/api/ai', aiRoutes);
app.use('/api/dss', aiRoutes);
app.use('/api/navigation', aiRoutes);
app.use('/api/system', aiRoutes);
app.use('/api/assets', aiRoutes);
app.use('/api/telemetry', telemetryRoutes);
app.use('/api/translations', translationRoutes);
// Admin Routes
app.use('/api/admin/roles', adminRolesRoutes);
app.use('/api/admin/config', adminConfigRoutes);
// Error handling
app.use(notFoundHandler);
app.use(errorHandler);
// Database relationships
User.hasMany(Project, { foreignKey: 'userId' });
Project.belongsTo(User, { foreignKey: 'userId' });
Project.hasMany(Token, { foreignKey: 'projectId' });
Token.belongsTo(Project, { foreignKey: 'projectId' });
Project.hasMany(Component, { foreignKey: 'projectId' });
Component.belongsTo(Project, { foreignKey: 'projectId' });
Project.hasMany(Icon, { foreignKey: 'projectId' });
Icon.belongsTo(Project, { foreignKey: 'projectId' });
Project.hasMany(Integration, { foreignKey: 'projectId' });
Integration.belongsTo(Project, { foreignKey: 'projectId' });
User.hasMany(Log, { foreignKey: 'userId' });
Log.belongsTo(User, { foreignKey: 'userId' });
// Team relationships (Phase 3)
User.hasMany(Team, { foreignKey: 'ownerId' });
Team.belongsTo(User, { foreignKey: 'ownerId' });
Team.hasMany(TeamMember, { foreignKey: 'teamId' });
TeamMember.belongsTo(Team, { foreignKey: 'teamId' });
User.hasMany(TeamMember, { foreignKey: 'userId' });
TeamMember.belongsTo(User, { foreignKey: 'userId' });
Team.hasOne(TeamSettings, { foreignKey: 'teamId' });
TeamSettings.belongsTo(Team, { foreignKey: 'teamId' });
// Discovery relationships (Phase 4)
User.hasMany(Discovery, { foreignKey: 'userId' });
Discovery.belongsTo(User, { foreignKey: 'userId' });
Project.hasMany(Discovery, { foreignKey: 'projectId' });
Discovery.belongsTo(Project, { foreignKey: 'projectId' });
// FigmaSync relationships (Phase 5A)
User.hasMany(FigmaSync, { foreignKey: 'userId' });
FigmaSync.belongsTo(User, { foreignKey: 'userId' });
Project.hasMany(FigmaSync, { foreignKey: 'projectId' });
FigmaSync.belongsTo(Project, { foreignKey: 'projectId' });
// QATest relationships (Phase 5B)
User.hasMany(QATest, { foreignKey: 'userId' });
QATest.belongsTo(User, { foreignKey: 'userId' });
Project.hasMany(QATest, { foreignKey: 'projectId' });
QATest.belongsTo(Project, { foreignKey: 'projectId' });
// AIChat relationships (Phase 5C)
User.hasMany(AIChat, { foreignKey: 'userId' });
AIChat.belongsTo(User, { foreignKey: 'userId' });
Project.hasMany(AIChat, { foreignKey: 'projectId' });
AIChat.belongsTo(Project, { foreignKey: 'projectId' });
// Config relationships (Track 2)
ConfigSetting.hasMany(ConfigAuditLog, { foreignKey: 'configId' });
ConfigAuditLog.belongsTo(ConfigSetting, { foreignKey: 'configId' });
User.hasMany(ConfigAuditLog, { foreignKey: 'actorId' });
ConfigAuditLog.belongsTo(User, { foreignKey: 'actorId' });
// Translation Dictionary relationships (Phase 2B)
Project.hasMany(TranslationDictionary, { foreignKey: 'projectId' });
TranslationDictionary.belongsTo(Project, { foreignKey: 'projectId' });
User.hasMany(TranslationDictionary, { foreignKey: 'createdBy' });
TranslationDictionary.belongsTo(User, { foreignKey: 'createdBy' });
TranslationDictionary.hasMany(TranslationMapping, { foreignKey: 'dictionaryId', onDelete: 'CASCADE' });
TranslationMapping.belongsTo(TranslationDictionary, { foreignKey: 'dictionaryId' });
// Initialize database and start server
const startServer = async () => {
try {
await sequelize.authenticate();
console.log('[Server] Database connection established');
await sequelize.sync({ alter: process.env.NODE_ENV === 'development' });
console.log('[Server] Database models synchronized');
app.listen(PORT, () => {
console.log(`[Server] API running on http://localhost:${PORT}`);
console.log(`[Server] Environment: ${process.env.NODE_ENV || 'development'}`);
});
} catch (error) {
console.error('[Server] Failed to start:', error);
process.exit(1);
}
};
startServer();
export default app;

View File

@@ -0,0 +1,283 @@
import crypto from 'crypto';
import ConfigSetting from '../models/ConfigSetting.js';
import ConfigAuditLog from '../models/ConfigAuditLog.js';
import redisClient from '../config/redis.js';
// Configuration definitions
const CONFIG_SCHEMAS = {
// System level defaults
'figma.api_timeout': { type: 'number', min: 1000, max: 60000 },
'storybook.default_port': { type: 'number', min: 1, max: 65535 },
'rate_limit.requests_per_minute': { type: 'number', min: 1, max: 1000 },
// Project level
'figma.file_url': { type: 'string', pattern: /^https:\/\/www\.figma\.com\/file\// },
'figma.api_token': { type: 'string', encrypted: true, minLength: 10 },
'storybook.base_url': { type: 'string', pattern: /^https?:\/\// },
'jira.project_key': { type: 'string', pattern: /^[A-Z0-9]{2,10}$/ },
'component.directory': { type: 'string', pattern: /^[a-zA-Z0-9/_-]+$/ },
// User level
'theme': { type: 'string', enum: ['light', 'dark', 'auto'] },
'dashboard.layout': { type: 'string', enum: ['grid', 'list', 'compact'] },
'notifications.enabled': { type: 'boolean' }
};
// Validate encryption key is set (32 bytes required for AES-256)
const ENCRYPTION_KEY = process.env.CONFIG_ENCRYPTION_KEY;
if (!ENCRYPTION_KEY) {
throw new Error('FATAL: CONFIG_ENCRYPTION_KEY environment variable is required for secure configuration storage');
}
if (ENCRYPTION_KEY.length !== 32) {
throw new Error(`FATAL: CONFIG_ENCRYPTION_KEY must be exactly 32 bytes for AES-256 (got ${ENCRYPTION_KEY.length})`);
}
const ALGORITHM = 'aes-256-gcm';
const CACHE_TTL_SECONDS = 300; // 5 minutes
class ConfigService {
/**
* Resolve configuration with cascade strategy: System -> Project -> User
* @param {string} projectId - Optional project context
* @param {string} userId - Optional user context
* @returns {Promise<Object>} Resolved configuration object
*/
async resolveConfig(projectId = null, userId = null) {
const cacheKey = `config:resolved:${projectId || 'none'}:${userId || 'none'}`;
// 1. Try Cache
if (redisClient && redisClient.status === 'ready') {
try {
const cached = await redisClient.get(cacheKey);
if (cached) return JSON.parse(cached);
} catch (err) {
console.warn('[ConfigService] Redis read failed', err);
}
}
// 2. Fetch all relevant scopes in parallel
const queries = [
ConfigSetting.findAll({ where: { scope: 'SYSTEM' } })
];
if (projectId) {
queries.push(ConfigSetting.findAll({ where: { scope: 'PROJECT', scopeId: projectId } }));
}
if (userId) {
queries.push(ConfigSetting.findAll({ where: { scope: 'USER', scopeId: userId } }));
}
const results = await Promise.all(queries);
const systemConfig = this._processConfigList(results[0]);
const projectConfig = projectId ? this._processConfigList(results[1]) : {};
const userConfig = userId ? this._processConfigList(results[projectId ? 2 : 1]) : {};
// 3. Deep Merge: System <- Project <- User
// Note: Simple object merge is sufficient here as keys are dot-notation strings in DB
// but represented as flat objects during resolution for keys.
// If we want nested objects returned, we would hydrate them here.
// For now, returning flat key-value map is safer and standard for config flags.
const resolved = {
...systemConfig,
...projectConfig,
...userConfig
};
// 4. Cache Result
if (redisClient && redisClient.status === 'ready') {
try {
await redisClient.set(cacheKey, JSON.stringify(resolved), 'EX', CACHE_TTL_SECONDS);
} catch (err) {
console.warn('[ConfigService] Redis set failed', err);
}
}
return resolved;
}
/**
* Set a configuration value
* @param {Object} params
* @param {string} params.scope - SYSTEM, PROJECT, or USER
* @param {string} params.scopeId - UUID or null for SYSTEM
* @param {string} params.key - Config key
* @param {any} params.value - Value to store
* @param {string} params.actorId - ID of user making change
*/
async setConfig({ scope, scopeId, key, value, actorId }) {
// 1. Validate Key & Schema
const schema = CONFIG_SCHEMAS[key];
if (!schema) {
throw new Error(`Invalid config key: ${key}`);
}
this._validateValue(value, schema);
// 2. Encrypt if needed
let finalValue = value;
const isSecret = schema.encrypted || false;
if (isSecret) {
finalValue = this._encrypt(value);
}
// 3. Upsert to DB
const existing = await ConfigSetting.findOne({
where: { scope, scopeId, key }
});
let record;
if (existing) {
// Create Audit Log
await ConfigAuditLog.create({
configId: existing.id,
actorId,
action: 'UPDATE',
previousValue: existing.value,
newValue: finalValue
});
existing.value = finalValue;
existing.isSecret = isSecret;
await existing.save();
record = existing;
} else {
record = await ConfigSetting.create({
scope,
scopeId,
key,
value: finalValue,
isSecret
});
await ConfigAuditLog.create({
configId: record.id,
actorId,
action: 'CREATE',
previousValue: null,
newValue: finalValue
});
}
// 4. Invalidate Cache
await this._invalidateCache(scopeId);
return {
key: record.key,
value: isSecret ? '********' : value, // Never return secret in response
updatedAt: record.updatedAt
};
}
/**
* Get settings for a specific scope (Admin usage mainly)
*/
async getScopeConfig(scope, scopeId) {
const settings = await ConfigSetting.findAll({
where: { scope, scopeId }
});
return this._processConfigList(settings, true); // true = mask secrets
}
// --- Helpers ---
_validateValue(value, schema) {
if (schema.type === 'number') {
if (typeof value !== 'number') throw new Error('Value must be a number');
if (schema.min !== undefined && value < schema.min) throw new Error(`Value must be >= ${schema.min}`);
if (schema.max !== undefined && value > schema.max) throw new Error(`Value must be <= ${schema.max}`);
}
if (schema.type === 'string') {
if (typeof value !== 'string') throw new Error('Value must be a string');
if (schema.enum && !schema.enum.includes(value)) throw new Error(`Value must be one of: ${schema.enum.join(', ')}`);
if (schema.pattern && !schema.pattern.test(value)) throw new Error('Value format is invalid');
if (schema.minLength && value.length < schema.minLength) throw new Error(`Value must be at least ${schema.minLength} chars`);
}
if (schema.type === 'boolean') {
if (typeof value !== 'boolean') throw new Error('Value must be a boolean');
}
if (schema.type === 'url') {
// Basic URL check if no specific pattern
if (!schema.pattern) {
try {
new URL(value);
} catch {
throw new Error('Value must be a valid URL');
}
} else if (!schema.pattern.test(value)) {
throw new Error('Value must match URL pattern');
}
}
}
_processConfigList(list, maskSecrets = false) {
const map = {};
for (const item of list) {
if (item.isSecret) {
if (maskSecrets) {
map[item.key] = '********';
} else {
try {
map[item.key] = this._decrypt(item.value);
} catch (e) {
console.error(`Failed to decrypt key ${item.key}`, e);
map[item.key] = null;
}
}
} else {
map[item.key] = item.value;
}
}
return map;
}
_encrypt(text) {
// Format: iv:authTag:encryptedContent
const iv = crypto.randomBytes(16);
const cipher = crypto.createCipheriv(ALGORITHM, Buffer.from(ENCRYPTION_KEY), iv);
let encrypted = cipher.update(String(text), 'utf8', 'hex');
encrypted += cipher.final('hex');
const authTag = cipher.getAuthTag().toString('hex');
return `${iv.toString('hex')}:${authTag}:${encrypted}`;
}
_decrypt(encryptedText) {
// Format: iv:authTag:encryptedContent
const parts = encryptedText.split(':');
if (parts.length !== 3) throw new Error('Invalid encrypted format');
const [ivHex, authTagHex, contentHex] = parts;
const decipher = crypto.createDecipheriv(ALGORITHM, Buffer.from(ENCRYPTION_KEY), Buffer.from(ivHex, 'hex'));
decipher.setAuthTag(Buffer.from(authTagHex, 'hex'));
let decrypted = decipher.update(contentHex, 'hex', 'utf8');
decrypted += decipher.final('utf8');
return decrypted;
}
async _invalidateCache(scopeId) {
if (!redisClient || redisClient.status !== 'ready') return;
// Pattern based invalidation is expensive in Redis (KEYS command).
// Better strategy for this MVP:
// Since we key by `resolved:projectId:userId`, we can't easily guess all keys to delete
// when SYSTEM changes.
// Ideally we use a version counter or specialized sets.
// For MVP: We accept that updating SYSTEM config might take 5 mins to propagate
// unless we iterate keys (bad for prod) or flushdb (bad for prod).
// A workable middle ground for specific project updates:
if (scopeId) {
// If updating a project, we can try to guess user keys if we tracked them,
// but let's just accept 5 min TTL for now as per requirements.
// We can explicitly delete the specific scope cache if we were caching raw scopes separately.
}
// Note: In a real heavy-load system, we'd use a 'config_version' key that we append to cache keys
// and increment on updates to instantly invalidate old namespaces.
}
}
export default new ConfigService();

View File

@@ -0,0 +1,153 @@
/**
* PHASE 2: Notification Service
* Manages real-time event broadcasting using Server-Sent Events (SSE)
* Each user gets a unique channel for personalized notifications
*/
class NotificationService {
constructor() {
// Map of userId -> Set of SSE response objects
this.subscribers = new Map();
// Map of userId -> Array of pending events (for offline subscribers)
this.eventQueue = new Map();
this.maxQueueSize = 100; // Keep last 100 events per user
}
/**
* Register a new SSE subscriber
* @param {string} userId - User identifier
* @param {Response} res - Express response object for SSE
*/
subscribe(userId, res) {
if (!this.subscribers.has(userId)) {
this.subscribers.set(userId, new Set());
this.eventQueue.set(userId, []);
}
this.subscribers.get(userId).add(res);
// Send queued events to new subscriber
const queued = this.eventQueue.get(userId) || [];
queued.forEach(event => {
this.sendSSE(res, event);
});
// Cleanup on disconnect
res.on('close', () => {
this.subscribers.get(userId).delete(res);
if (this.subscribers.get(userId).size === 0) {
this.subscribers.delete(userId);
}
});
return true;
}
/**
* Broadcast event to all subscribers of a user
* @param {string} userId - User identifier
* @param {string} eventType - Event type (e.g., 'project:created', 'log:error')
* @param {object} data - Event payload
*/
broadcast(userId, eventType, data) {
const event = {
type: eventType,
timestamp: new Date().toISOString(),
data
};
// Add to queue for offline subscribers
if (this.eventQueue.has(userId)) {
const queue = this.eventQueue.get(userId);
queue.push(event);
// Keep only last N events
if (queue.length > this.maxQueueSize) {
queue.shift();
}
}
// Send to all connected subscribers
if (this.subscribers.has(userId)) {
this.subscribers.get(userId).forEach(res => {
this.sendSSE(res, event);
});
}
}
/**
* Broadcast to all users (admin notifications)
* @param {string} eventType - Event type
* @param {object} data - Event payload
*/
broadcastToAll(eventType, data) {
const event = {
type: eventType,
timestamp: new Date().toISOString(),
data,
isGlobal: true
};
this.subscribers.forEach((subscribers, userId) => {
subscribers.forEach(res => {
this.sendSSE(res, event);
});
});
}
/**
* Send formatted SSE message
* @param {Response} res - Express response object
* @param {object} event - Event object
*/
sendSSE(res, event) {
try {
const message = `data: ${JSON.stringify(event)}\n\n`;
res.write(message);
} catch (error) {
console.error('[NotificationService] Error sending SSE:', error.message);
}
}
/**
* Get subscriber count (for monitoring)
* @returns {object} Subscriber statistics
*/
getStats() {
const stats = {
totalSubscribers: 0,
userCount: 0,
byUser: {}
};
this.subscribers.forEach((subscribers, userId) => {
const count = subscribers.size;
stats.totalSubscribers += count;
stats.byUser[userId] = count;
});
stats.userCount = Object.keys(stats.byUser).length;
return stats;
}
/**
* Cleanup resources (useful for graceful shutdown)
*/
disconnect() {
this.subscribers.forEach((subscribers) => {
subscribers.forEach(res => {
try {
res.end();
} catch (error) {
// Already closed
}
});
});
this.subscribers.clear();
this.eventQueue.clear();
}
}
// Singleton instance
const notificationService = new NotificationService();
export default notificationService;

View File

@@ -0,0 +1,134 @@
/**
* Redis Service
* Specialized service layer for RBAC caching operations.
* Implements Look-Aside caching pattern for role permissions.
*/
import redisClient from '../config/redis.js';
// Default TTL: 1 hour (can be overridden by env)
const CACHE_TTL = parseInt(process.env.REDIS_TTL || '3600', 10);
const IS_ENABLED = process.env.ENABLE_REDIS_CACHE === 'true';
class RedisService {
/**
* Check if Redis caching is currently enabled and client is ready
* @returns {boolean}
*/
isEnabled() {
return IS_ENABLED && redisClient && redisClient.status === 'ready';
}
/**
* Get all permissions for a specific role
* @param {string} role - The user role
* @returns {Promise<string[]|null>} Array of "resource:action" strings or null on miss/error
*/
async getRolePermissions(role) {
if (!this.isEnabled()) return null;
const key = `rbac:permissions:${role}`;
try {
const members = await redisClient.smembers(key);
// If empty, genuine cache miss (Redis returns [] for missing keys)
if (members.length === 0) {
return null;
}
// Filter out sentinel value used to prevent cache penetration
const filtered = members.filter(p => p !== '__EMPTY__');
// If only sentinel existed, return empty array
return filtered.length === 0 ? [] : filtered;
} catch (error) {
console.error(`[Redis] Error fetching permissions for role ${role}:`, error.message);
// Fail open: return null to fallback to DB
return null;
}
}
/**
* Cache permissions for a role
* @param {string} role - The user role
* @param {string[]} permissions - Array of "resource:action" strings
* @returns {Promise<void>}
*/
async setRolePermissions(role, permissions) {
if (!this.isEnabled()) return;
const key = `rbac:permissions:${role}`;
try {
// Use multi() for atomic transaction to prevent race conditions
const multi = redisClient.multi();
multi.del(key);
// Cache empty sets with sentinel value to prevent cache penetration
if (!permissions || permissions.length === 0) {
multi.sadd(key, '__EMPTY__');
} else {
multi.sadd(key, ...permissions);
}
// Set Expiration
multi.expire(key, CACHE_TTL);
const results = await multi.exec();
// Validate transaction success
if (!results || results.some(([err]) => err)) {
throw new Error('Redis transaction failed');
}
const permCount = permissions?.length || 0;
console.debug(`[RBAC] Cached ${permCount} permissions for role: ${role}`);
} catch (error) {
console.error(`[Redis] Error setting permissions for role ${role}:`, error.message);
}
}
/**
* Invalidate cache for a specific role
* Called when permissions are modified via API
* @param {string} role
* @returns {Promise<void>}
*/
async invalidateRole(role) {
if (!this.isEnabled()) return;
const key = `rbac:permissions:${role}`;
try {
await redisClient.del(key);
console.log(`[RBAC] Invalidated cache for role: ${role}`);
} catch (error) {
console.error(`[Redis] Error invalidating role ${role}:`, error.message);
}
}
/**
* Simple health check for monitoring
* @returns {Promise<object>}
*/
async healthCheck() {
if (!IS_ENABLED) return { enabled: false, status: 'disabled' };
if (!redisClient) return { enabled: true, status: 'client_not_initialized' };
try {
const start = Date.now();
await redisClient.ping();
return {
enabled: true,
status: redisClient.status,
latency: `${Date.now() - start}ms`
};
} catch (error) {
return { enabled: true, status: 'error', error: error.message };
}
}
}
export default new RedisService();

View File

@@ -0,0 +1,439 @@
import TranslationDictionary from '../models/TranslationDictionary.js';
import TranslationMapping from '../models/TranslationMapping.js';
import Token from '../models/Token.js';
import sequelize from '../config/database.js';
import { Op } from 'sequelize';
class TranslationService {
/**
* Create a new translation dictionary
*/
async createDictionary(data, userId) {
const dictionary = await TranslationDictionary.create({
name: data.name,
description: data.description,
projectId: data.projectId,
createdBy: userId,
status: data.status || 'draft',
metadata: {
sourceSystem: data.sourceSystem || null,
targetSystem: data.targetSystem || null,
coverage: 0,
validationStatus: 'pending',
lastValidated: null,
tags: data.tags || []
}
});
return dictionary;
}
/**
* Update an existing dictionary
*/
async updateDictionary(id, data, userId) {
const dictionary = await TranslationDictionary.findByPk(id);
if (!dictionary) {
throw new Error('Dictionary not found');
}
// Update fields
if (data.name) dictionary.name = data.name;
if (data.description !== undefined) dictionary.description = data.description;
if (data.status) dictionary.status = data.status;
// Update metadata
if (data.sourceSystem !== undefined || data.targetSystem !== undefined || data.tags) {
dictionary.metadata = {
...dictionary.metadata,
...(data.sourceSystem !== undefined && { sourceSystem: data.sourceSystem }),
...(data.targetSystem !== undefined && { targetSystem: data.targetSystem }),
...(data.tags && { tags: data.tags })
};
}
await dictionary.save();
return dictionary;
}
/**
* Get dictionary with mappings
*/
async getDictionary(id) {
const dictionary = await TranslationDictionary.findByPk(id);
if (!dictionary) {
throw new Error('Dictionary not found');
}
const mappings = await TranslationMapping.findAll({
where: { dictionaryId: id },
order: [['sourceToken', 'ASC']]
});
return {
...dictionary.toJSON(),
mappings
};
}
/**
* List dictionaries with filters
*/
async listDictionaries(filters = {}) {
const where = {};
if (filters.projectId) {
where.projectId = filters.projectId;
}
if (filters.status) {
where.status = filters.status;
}
if (filters.createdBy) {
where.createdBy = filters.createdBy;
}
const dictionaries = await TranslationDictionary.findAll({
where,
order: [['createdAt', 'DESC']],
limit: filters.limit || 50,
offset: filters.offset || 0
});
const total = await TranslationDictionary.count({ where });
return { dictionaries, total };
}
/**
* Create a mapping
*/
async createMapping(dictionaryId, mappingData, userId) {
// Verify dictionary exists
const dictionary = await TranslationDictionary.findByPk(dictionaryId);
if (!dictionary) {
throw new Error('Dictionary not found');
}
const mapping = await TranslationMapping.create({
dictionaryId,
sourceToken: mappingData.sourceToken,
targetToken: mappingData.targetToken,
transformRule: mappingData.transformRule || null,
validated: mappingData.validated || false,
confidence: mappingData.confidence || 1.0,
notes: mappingData.notes || null
});
// Recalculate coverage
await this._updateCoverage(dictionaryId);
return mapping;
}
/**
* Update a mapping
*/
async updateMapping(dictionaryId, mappingId, data, userId) {
const mapping = await TranslationMapping.findOne({
where: { id: mappingId, dictionaryId }
});
if (!mapping) {
throw new Error('Mapping not found');
}
if (data.sourceToken) mapping.sourceToken = data.sourceToken;
if (data.targetToken) mapping.targetToken = data.targetToken;
if (data.transformRule !== undefined) mapping.transformRule = data.transformRule;
if (data.validated !== undefined) mapping.validated = data.validated;
if (data.confidence !== undefined) mapping.confidence = data.confidence;
if (data.notes !== undefined) mapping.notes = data.notes;
await mapping.save();
// Recalculate coverage
await this._updateCoverage(dictionaryId);
return mapping;
}
/**
* Delete a mapping
*/
async deleteMapping(dictionaryId, mappingId, userId) {
const mapping = await TranslationMapping.findOne({
where: { id: mappingId, dictionaryId }
});
if (!mapping) {
throw new Error('Mapping not found');
}
await mapping.destroy();
// Recalculate coverage
await this._updateCoverage(dictionaryId);
return { success: true };
}
/**
* Bulk import mappings
* Uses transaction to ensure atomicity
*/
async bulkImportMappings(dictionaryId, mappings, userId) {
// Verify dictionary exists
const dictionary = await TranslationDictionary.findByPk(dictionaryId);
if (!dictionary) {
throw new Error('Dictionary not found');
}
const results = {
created: 0,
updated: 0,
errors: []
};
// Use transaction for atomicity
const transaction = await sequelize.transaction();
try {
for (const mapping of mappings) {
try {
// Check if mapping exists
const existing = await TranslationMapping.findOne({
where: {
dictionaryId,
sourceToken: mapping.sourceToken
},
transaction
});
if (existing) {
// Update
existing.targetToken = mapping.targetToken;
existing.transformRule = mapping.transformRule || null;
existing.confidence = mapping.confidence || 1.0;
await existing.save({ transaction });
results.updated++;
} else {
// Create
await TranslationMapping.create({
dictionaryId,
sourceToken: mapping.sourceToken,
targetToken: mapping.targetToken,
transformRule: mapping.transformRule || null,
validated: mapping.validated || false,
confidence: mapping.confidence || 1.0,
notes: mapping.notes || null
}, { transaction });
results.created++;
}
} catch (error) {
results.errors.push({
mapping,
error: error.message
});
// Continue processing remaining mappings even if one fails
}
}
// If all mappings failed, rollback
if (results.errors.length === mappings.length && mappings.length > 0) {
await transaction.rollback();
throw new Error('All mappings failed to import');
}
// Recalculate coverage within transaction
await this._updateCoverage(dictionaryId);
// Commit transaction
await transaction.commit();
return results;
} catch (error) {
// Rollback on any error
await transaction.rollback();
throw error;
}
}
/**
* Validate dictionary mappings
*/
async validateDictionary(dictionaryId) {
const dictionary = await TranslationDictionary.findByPk(dictionaryId);
if (!dictionary) {
throw new Error('Dictionary not found');
}
const mappings = await TranslationMapping.findAll({
where: { dictionaryId }
});
const validation = {
total: mappings.length,
validated: 0,
invalid: [],
warnings: []
};
for (const mapping of mappings) {
// Check if source token exists in system
const sourceExists = await Token.findOne({
where: { name: mapping.sourceToken, projectId: dictionary.projectId }
});
if (!sourceExists) {
validation.warnings.push({
mappingId: mapping.id,
sourceToken: mapping.sourceToken,
issue: 'Source token not found in project'
});
}
// Check if target token format is valid
if (!mapping.targetToken || mapping.targetToken.trim() === '') {
validation.invalid.push({
mappingId: mapping.id,
sourceToken: mapping.sourceToken,
issue: 'Target token is empty'
});
} else {
validation.validated++;
}
}
// Update dictionary metadata
dictionary.metadata = {
...dictionary.metadata,
validationStatus: validation.invalid.length === 0 ? 'valid' : 'invalid',
lastValidated: new Date().toISOString()
};
await dictionary.save();
return validation;
}
/**
* Calculate coverage percentage
* @param {string} dictionaryId - Dictionary ID
* @param {boolean} forceRefresh - Force refresh of cached token count
*/
async calculateCoverage(dictionaryId, forceRefresh = false) {
const dictionary = await TranslationDictionary.findByPk(dictionaryId);
if (!dictionary) {
throw new Error('Dictionary not found');
}
let totalTokens;
// Use cached token count if available and not stale (unless force refresh)
const cachedCount = dictionary.metadata?.cachedTokenCount;
const cacheTimestamp = dictionary.metadata?.tokenCountCachedAt;
const CACHE_TTL = 5 * 60 * 1000; // 5 minutes
const isCacheValid = cachedCount !== undefined &&
cacheTimestamp &&
(Date.now() - new Date(cacheTimestamp).getTime() < CACHE_TTL);
if (!forceRefresh && isCacheValid) {
totalTokens = cachedCount;
} else {
// Fetch fresh count and update cache
totalTokens = await Token.count({
where: { projectId: dictionary.projectId }
});
dictionary.metadata = {
...dictionary.metadata,
cachedTokenCount: totalTokens,
tokenCountCachedAt: new Date().toISOString()
};
await dictionary.save();
}
const mappedTokens = await TranslationMapping.count({
where: { dictionaryId }
});
const coverage = totalTokens > 0 ? (mappedTokens / totalTokens) * 100 : 0;
return {
totalTokens,
mappedTokens,
unmappedTokens: totalTokens - mappedTokens,
coveragePercentage: Math.round(coverage * 100) / 100
};
}
/**
* Internal: Update dictionary coverage metadata
* Uses cached token count for performance
*/
async _updateCoverage(dictionaryId) {
const dictionary = await TranslationDictionary.findByPk(dictionaryId);
if (!dictionary) return;
// Use cached token count (don't force refresh on every mapping change)
const coverage = await this.calculateCoverage(dictionaryId, false);
dictionary.metadata = {
...dictionary.metadata,
coverage: coverage.coveragePercentage
};
await dictionary.save();
}
/**
* Export dictionary as JSON
*/
async exportDictionary(dictionaryId) {
const dictionary = await TranslationDictionary.findByPk(dictionaryId);
if (!dictionary) {
throw new Error('Dictionary not found');
}
const mappings = await TranslationMapping.findAll({
where: { dictionaryId },
order: [['sourceToken', 'ASC']]
});
return {
name: dictionary.name,
description: dictionary.description,
version: dictionary.version,
metadata: dictionary.metadata,
mappings: mappings.map(m => ({
sourceToken: m.sourceToken,
targetToken: m.targetToken,
transformRule: m.transformRule,
validated: m.validated,
confidence: m.confidence,
notes: m.notes
})),
exportedAt: new Date().toISOString()
};
}
/**
* Delete dictionary (soft delete by archiving)
*/
async deleteDictionary(id, userId) {
const dictionary = await TranslationDictionary.findByPk(id);
if (!dictionary) {
throw new Error('Dictionary not found');
}
dictionary.status = 'archived';
await dictionary.save();
return { success: true };
}
}
export default new TranslationService();

View File

@@ -0,0 +1,118 @@
import { ComponentScanner } from '../scanners/ComponentScanner.js';
import Discovery from '../models/Discovery.js';
import sequelize from '../config/database.js';
import { QueryTypes } from 'sequelize';
/**
* Background worker for processing discovery jobs.
* Uses polling mechanism to pick up queued jobs.
*/
class DiscoveryWorker {
constructor() {
this.scanner = new ComponentScanner();
this.isRunning = false;
this.intervalId = null;
this.POLL_INTERVAL = 5000; // 5 seconds
}
/**
* Starts the polling loop.
*/
startWorker() {
if (this.isRunning) {
console.log('[DiscoveryWorker] Worker already running');
return;
}
console.log('[DiscoveryWorker] Starting worker...');
this.isRunning = true;
// Process immediately, then start interval
this._processNextJob();
this.intervalId = setInterval(() => this._processNextJob(), this.POLL_INTERVAL);
// Handle graceful shutdown
process.on('SIGTERM', () => this.stopWorker());
process.on('SIGINT', () => this.stopWorker());
}
/**
* Stops the polling loop gracefully.
*/
stopWorker() {
if (!this.isRunning) return;
console.log('[DiscoveryWorker] Stopping worker...');
this.isRunning = false;
if (this.intervalId) {
clearInterval(this.intervalId);
this.intervalId = null;
}
}
/**
* Processes a single job if available.
* @private
*/
async _processNextJob() {
if (!this.isRunning) return;
try {
// Atomically claim a queued job (SQLite-compatible approach)
const [affectedCount] = await sequelize.query(
`UPDATE discoveries
SET status = 'running', startedAt = datetime('now')
WHERE id = (
SELECT id FROM discoveries
WHERE status = 'queued' AND type = 'component-audit'
ORDER BY createdAt ASC
LIMIT 1
)`,
{ type: QueryTypes.UPDATE }
);
if (affectedCount === 0) return; // No jobs available
// Fetch the claimed job
const job = await Discovery.findOne({
where: { status: 'running', type: 'component-audit' },
order: [['startedAt', 'DESC']]
});
if (!job) return; // Race condition fallback
console.log(`[DiscoveryWorker] Claimed job ${job.id} for project ${job.projectId}`);
// Execute Scan
try {
const results = await this.scanner.scan(job.projectId);
await job.update({
status: 'completed',
completedAt: new Date(),
results: results,
progress: 100
});
console.log(`[DiscoveryWorker] Job ${job.id} completed successfully - Created: ${results.created}, Updated: ${results.updated}, Matched: ${results.matched}`);
} catch (scanError) {
console.error(`[DiscoveryWorker] Job ${job.id} failed:`, scanError);
await job.update({
status: 'failed',
completedAt: new Date(),
error: scanError.message
});
}
} catch (error) {
console.error('[DiscoveryWorker] System error:', error);
}
}
}
// Singleton instance
const worker = new DiscoveryWorker();
export const startWorker = () => worker.startWorker();
export const stopWorker = () => worker.stopWorker();