feat: add Agent Evolution Dashboard

- Create agent-evolution/ directory with standalone dashboard
- Add interactive HTML dashboard with agent/model matrix
- Add heatmap view for agent-model compatibility scores
- Add recommendations tab with optimization suggestions
- Add Gitea integration preparation (history timeline)
- Add Docker configuration for deployment
- Add build scripts for standalone HTML generation
- Add sync scripts for agent data synchronization
- Add milestone and issues documentation
- Add skills and rules for evolution sync
- Update AGENTS.md with dashboard documentation
- Update package.json with evolution scripts

Features:
- 28 agents with model assignments and fit scores
- 8 models with benchmarks (SWE-bench, RULER, Terminal)
- 11 recommendations for model optimization
- History timeline with agent changes
- Interactive modal windows for model details
- Filter and search functionality
- Russian language interface
- Works offline (file://) with embedded data

Docker:
- Dockerfile for standalone deployment
- docker-compose.evolution.yml
- docker-run.sh/docker-run.bat scripts

NPM scripts:
- sync:evolution - sync and build dashboard
- evolution:open - open in browser
- evolution:dashboard - start dev server

Status: PAUSED - foundation complete, Gitea integration pending
This commit is contained in:
¨NW¨
2026-04-05 19:58:59 +01:00
parent b899119d21
commit 15a7b4b7a4
17 changed files with 4934 additions and 2 deletions

View File

@@ -0,0 +1,117 @@
#!/usr/bin/env node
/**
* Build standalone HTML with embedded data
* Run: node agent-evolution/scripts/build-standalone.cjs
*/
const fs = require('fs');
const path = require('path');
const DATA_FILE = path.join(__dirname, '../data/agent-versions.json');
const HTML_FILE = path.join(__dirname, '../index.html');
const OUTPUT_FILE = path.join(__dirname, '../index.standalone.html');
try {
// Read data
console.log('📖 Reading data from:', DATA_FILE);
const data = JSON.parse(fs.readFileSync(DATA_FILE, 'utf-8'));
console.log(' Found', Object.keys(data.agents).length, 'agents');
// Read HTML
console.log('📖 Reading HTML from:', HTML_FILE);
let html = fs.readFileSync(HTML_FILE, 'utf-8');
// Step 1: Replace EMBEDDED_DATA
const startMarker = '// Default embedded data (minimal - updated by sync script)';
const endPattern = /"sync_sources":\s*\[[^\]]*\]\s*\}\s*\};/;
const startIdx = html.indexOf(startMarker);
const endMatch = html.match(endPattern);
if (startIdx === -1) {
throw new Error('Start marker not found in HTML');
}
if (!endMatch) {
throw new Error('End pattern not found in HTML');
}
const endIdx = endMatch.index + endMatch[0].length + 1;
// Create embedded data
const embeddedData = `// Embedded data (generated ${new Date().toISOString()})
const EMBEDDED_DATA = ${JSON.stringify(data, null, 2)};`;
// Replace the section
html = html.substring(0, startIdx) + embeddedData + html.substring(endIdx);
// Step 2: Replace entire init function
// Find the init function start and end
const initStartPattern = /\/\/ Initialize\s*\n\s*async function init\(\) \{/;
const initStartMatch = html.match(initStartPattern);
if (initStartMatch) {
const initStartIdx = initStartMatch.index;
// Find matching closing brace (count opening and closing)
let braceCount = 0;
let inFunction = false;
let initEndIdx = initStartIdx;
for (let i = initStartIdx; i < html.length; i++) {
if (html[i] === '{') {
braceCount++;
inFunction = true;
} else if (html[i] === '}') {
braceCount--;
if (inFunction && braceCount === 0) {
initEndIdx = i + 1;
break;
}
}
}
// New init function
const newInit = `// Initialize
async function init() {
// Use embedded data directly (works with file://)
agentData = EMBEDDED_DATA;
try {
document.getElementById('lastSync').textContent = formatDate(agentData.lastUpdated);
document.getElementById('agentCount').textContent = agentData.evolution_metrics.total_agents + ' agents';
document.getElementById('historyCount').textContent = agentData.evolution_metrics.agents_with_history + ' with history';
if (agentData.evolution_metrics.total_agents === 0) {
document.getElementById('lastSync').textContent = 'No data - run sync:evolution';
return;
}
renderOverview();
renderAllAgents();
renderTimeline();
renderRecommendations();
renderMatrix();
} catch (error) {
console.error('Failed to render dashboard:', error);
document.getElementById('lastSync').textContent = 'Error rendering data';
}
}`;
html = html.substring(0, initStartIdx) + newInit + html.substring(initEndIdx);
}
// Write output
fs.writeFileSync(OUTPUT_FILE, html);
console.log('\n✅ Built standalone dashboard');
console.log(' Output:', OUTPUT_FILE);
console.log(' Agents:', Object.keys(data.agents).length);
console.log(' Size:', (fs.statSync(OUTPUT_FILE).size / 1024).toFixed(1), 'KB');
console.log('\n📊 Open in browser:');
console.log(' Windows: start agent-evolution\\index.standalone.html');
console.log(' macOS: open agent-evolution/index.standalone.html');
console.log(' Linux: xdg-open agent-evolution/index.standalone.html');
} catch (error) {
console.error('❌ Error:', error.message);
process.exit(1);
}

View File

@@ -0,0 +1,501 @@
#!/usr/bin/env bun
/**
* Agent Evolution Synchronization Script
* Parses git history and syncs agent definitions
*
* Usage: bun run agent-evolution/scripts/sync-agent-history.ts
*
* Generates:
* - data/agent-versions.json - JSON data
* - index.standalone.html - Dashboard with embedded data
*/
import * as fs from "fs";
import * as path from "path";
import { spawnSync } from "child_process";
// Try to load yaml parser (optional)
let yaml: any;
try {
yaml = require("yaml");
} catch {
yaml = null;
}
// Types
interface AgentVersion {
date: string;
commit: string;
type: "model_change" | "prompt_change" | "agent_created" | "agent_removed" | "capability_change";
from: string | null;
to: string;
reason: string;
source: "git" | "gitea" | "manual";
}
interface AgentConfig {
model: string;
provider: string;
category: string;
mode: string;
color: string;
description: string;
benchmark?: {
swe_bench?: number;
ruler_1m?: number;
terminal_bench?: number;
pinch_bench?: number;
fit_score?: number;
};
capabilities: string[];
recommendations?: Array<{
target: string;
reason: string;
priority: string;
}>;
status?: string;
}
interface AgentData {
current: AgentConfig;
history: AgentVersion[];
performance_log: Array<{
date: string;
issue: number;
score: number;
duration_ms: number;
success: boolean;
}>;
}
interface EvolutionData {
version: string;
lastUpdated: string;
agents: Record<string, AgentData>;
providers: Record<string, { models: unknown[] }>;
evolution_metrics: {
total_agents: number;
agents_with_history: number;
pending_recommendations: number;
last_sync: string;
sync_sources: string[];
};
}
// Constants
const AGENTS_DIR = ".kilo/agents";
const CAPABILITY_INDEX = ".kilo/capability-index.yaml";
const KILO_CONFIG = ".kilo/kilo.jsonc";
const OUTPUT_FILE = "agent-evolution/data/agent-versions.json";
const GIT_DIR = ".git";
// Provider detection
function detectProvider(model: string): string {
if (model.startsWith("ollama-cloud/") || model.startsWith("ollama/")) return "Ollama";
if (model.startsWith("openrouter/") || model.includes("openrouter")) return "OpenRouter";
if (model.startsWith("groq/")) return "Groq";
return "Unknown";
}
// Parse agent file frontmatter
function parseAgentFrontmatter(content: string): AgentConfig | null {
const frontmatterMatch = content.match(/^---\n([\s\S]*?)\n---/);
if (!frontmatterMatch) return null;
try {
const frontmatter = frontmatterMatch[1];
const lines = frontmatter.split("\n");
const config: Record<string, unknown> = {};
for (const line of lines) {
const match = line.match(/^(\w+):\s*(.+)$/);
if (match) {
const [, key, value] = match;
if (value === "allow" || value === "deny") {
if (!config.permission) config.permission = {};
(config.permission as Record<string, unknown>)[key] = value;
} else if (key === "model") {
config[key] = value;
config.provider = detectProvider(value);
} else {
config[key] = value;
}
}
}
return config as unknown as AgentConfig;
} catch {
return null;
}
}
// Get git history for agent changes
function getGitHistory(): Map<string, AgentVersion[]> {
const history = new Map<string, AgentVersion[]>();
try {
// Get commits that modified agent files
const result = spawnSync('git', ['log', '--all', '--oneline', '--follow', '--format=%H|%ai|%s', '--', '.kilo/agents/'], {
cwd: process.cwd(),
encoding: 'utf-8',
maxBuffer: 10 * 1024 * 1024
});
if (result.status !== 0 || !result.stdout) {
console.warn('Git log failed, skipping history');
return history;
}
const logOutput = result.stdout.trim();
const commits = logOutput.split('\n').filter(Boolean);
for (const line of commits) {
const [hash, date, ...msgParts] = line.split('|');
if (!hash || !date) continue;
const message = msgParts.join('|').trim();
// Detect change type from commit message
const agentMatch = message.match(/(?:add|update|fix|feat|change|set)\s+(\w+-?\w*)/i);
if (agentMatch) {
const agentName = agentMatch[1].toLowerCase();
const type = message.toLowerCase().includes("add") || message.toLowerCase().includes("feat")
? "agent_created"
: message.toLowerCase().includes("model")
? "model_change"
: "prompt_change";
if (!history.has(agentName)) {
history.set(agentName, []);
}
history.get(agentName)!.push({
date: date.replace(" ", "T") + "Z",
commit: hash.substring(0, 8),
type: type as AgentVersion["type"],
from: null, // Will be filled later
to: "", // Will be filled later
reason: message,
source: "git"
});
}
}
} catch (error) {
console.warn("Git history extraction failed:", error);
}
return history;
}
// Load capability index (simple parsing without yaml dependency)
function loadCapabilityIndex(): Record<string, AgentConfig> {
const configs: Record<string, AgentConfig> = {};
try {
const content = fs.readFileSync(CAPABILITY_INDEX, "utf-8");
// Simple YAML-ish parsing for our specific format
// Extract agent blocks
const agentRegex = /^ (\w[\w-]+):\n((?: .+\n?)+)/gm;
let match;
while ((match = agentRegex.exec(content)) !== null) {
const name = match[1];
if (name === 'capability_routing' || name === 'parallel_groups' ||
name === 'iteration_loops' || name === 'quality_gates' ||
name === 'workflow_states') continue;
const block = match[2];
// Extract model
const modelMatch = block.match(/model:\s*(.+)/);
if (!modelMatch) continue;
const model = modelMatch[1].trim();
// Extract capabilities
const capsMatch = block.match(/capabilities:\n((?: - .+\n?)+)/);
const capabilities = capsMatch
? capsMatch[1].split('\n').filter(l => l.trim()).map(l => l.replace(/^\s*-?\s*/, '').trim())
: [];
// Extract mode
const modeMatch = block.match(/mode:\s*(\w+)/);
const mode = modeMatch ? modeMatch[1] : 'subagent';
configs[name] = {
model,
provider: detectProvider(model),
category: capabilities[0]?.replace(/_/g, ' ') || 'General',
mode,
color: '#6B7280',
description: '',
capabilities,
};
}
} catch (error) {
console.warn("Capability index loading failed:", error);
}
return configs;
}
// Load kilo.jsonc configuration
function loadKiloConfig(): Record<string, AgentConfig> {
const configs: Record<string, AgentConfig> = {};
try {
const content = fs.readFileSync(KILO_CONFIG, "utf-8");
// Remove comments for JSON parsing
const cleaned = content.replace(/\/\*[\s\S]*?\*\/|\/\/.*/g, "");
const parsed = JSON.parse(cleaned);
if (parsed.agent) {
for (const [name, config] of Object.entries(parsed.agent)) {
const agentConfig = config as Record<string, unknown>;
if (agentConfig.model) {
configs[name] = {
model: agentConfig.model as string,
provider: detectProvider(agentConfig.model as string),
category: "Built-in",
mode: (agentConfig.mode as string) || "primary",
color: "#3B82F6",
description: (agentConfig.description as string) || "",
capabilities: [],
};
}
}
}
} catch (error) {
console.warn("Kilo config loading failed:", error);
}
return configs;
}
// Load all agent files
function loadAgentFiles(): Record<string, AgentConfig> {
const configs: Record<string, AgentConfig> = {};
try {
const files = fs.readdirSync(AGENTS_DIR);
for (const file of files) {
if (!file.endsWith(".md")) continue;
const filepath = path.join(AGENTS_DIR, file);
const content = fs.readFileSync(filepath, "utf-8");
const frontmatter = parseAgentFrontmatter(content);
if (frontmatter && frontmatter.model) {
const name = file.replace(".md", "");
configs[name] = {
...frontmatter,
category: getCategoryFromCapabilities(frontmatter.capabilities),
};
}
}
} catch (error) {
console.warn("Agent files loading failed:", error);
}
return configs;
}
// Get category from capabilities
function getCategoryFromCapabilities(capabilities?: string[]): string {
if (!capabilities) return "General";
const categoryMap: Record<string, string> = {
code: "Core Dev",
ui: "Frontend",
test: "QA",
security: "Security",
performance: "Performance",
devops: "DevOps",
go_: "Go Development",
flutter: "Mobile",
memory: "Cognitive",
plan: "Cognitive",
workflow: "Process",
markdown: "Validation",
};
for (const cap of capabilities) {
const key = Object.keys(categoryMap).find((k) => cap.toLowerCase().includes(k.toLowerCase()));
if (key) return categoryMap[key];
}
return "General";
}
// Merge all sources
function mergeConfigs(
agentFiles: Record<string, AgentConfig>,
capabilityIndex: Record<string, AgentConfig>,
kiloConfig: Record<string, AgentConfig>
): Record<string, AgentConfig> {
const merged: Record<string, AgentConfig> = {};
// Start with agent files (highest priority)
for (const [name, config] of Object.entries(agentFiles)) {
merged[name] = { ...config };
}
// Overlay capability index data
for (const [name, config] of Object.entries(capabilityIndex)) {
if (merged[name]) {
merged[name] = {
...merged[name],
capabilities: config.capabilities,
};
} else {
merged[name] = config;
}
}
// Overlay kilo.jsonc data
for (const [name, config] of Object.entries(kiloConfig)) {
if (merged[name]) {
merged[name] = {
...merged[name],
model: config.model,
provider: config.provider,
};
} else {
merged[name] = config;
}
}
return merged;
}
// Main sync function
async function sync() {
console.log("🔄 Syncing agent evolution data...\n");
// Load all sources
console.log("📂 Loading agent files...");
const agentFiles = loadAgentFiles();
console.log(` Found ${Object.keys(agentFiles).length} agent files`);
console.log("📄 Loading capability index...");
const capabilityIndex = loadCapabilityIndex();
console.log(` Found ${Object.keys(capabilityIndex).length} agents`);
console.log("⚙️ Loading kilo config...");
const kiloConfig = loadKiloConfig();
console.log(` Found ${Object.keys(kiloConfig).length} agents`);
// Get git history
console.log("\n📜 Parsing git history...");
const gitHistory = await getGitHistory();
console.log(` Found history for ${gitHistory.size} agents`);
// Merge configs
const merged = mergeConfigs(agentFiles, capabilityIndex, kiloConfig);
// Load existing evolution data
let existingData: EvolutionData = {
version: "1.0.0",
lastUpdated: new Date().toISOString(),
agents: {},
providers: {
Ollama: { models: [] },
OpenRouter: { models: [] },
Groq: { models: [] },
},
evolution_metrics: {
total_agents: 0,
agents_with_history: 0,
pending_recommendations: 0,
last_sync: new Date().toISOString(),
sync_sources: ["git", "capability-index.yaml", "kilo.jsonc"],
},
};
try {
if (fs.existsSync(OUTPUT_FILE)) {
const existing = JSON.parse(fs.readFileSync(OUTPUT_FILE, "utf-8"));
existingData.agents = existing.agents || {};
}
} catch {
// Use defaults
}
// Update agents
for (const [name, config] of Object.entries(merged)) {
const existingAgent = existingData.agents[name];
// Check if model changed
if (existingAgent?.current?.model && existingAgent.current.model !== config.model) {
// Add to history
existingAgent.history.push({
date: new Date().toISOString(),
commit: "sync",
type: "model_change",
from: existingAgent.current.model,
to: config.model,
reason: "Model update from sync",
source: "git",
});
existingAgent.current = { ...config };
} else {
existingData.agents[name] = {
current: config,
history: existingAgent?.history || gitHistory.get(name) || [],
performance_log: existingAgent?.performance_log || [],
};
}
}
// Update metrics
existingData.evolution_metrics.total_agents = Object.keys(existingData.agents).length;
existingData.evolution_metrics.agents_with_history = Object.values(existingData.agents).filter(
(a) => a.history.length > 0
).length;
existingData.evolution_metrics.pending_recommendations = Object.values(existingData.agents).filter(
(a) => a.current.recommendations && a.current.recommendations.length > 0
).length;
existingData.evolution_metrics.last_sync = new Date().toISOString();
// Save JSON
fs.writeFileSync(OUTPUT_FILE, JSON.stringify(existingData, null, 2));
console.log(`\n✅ Synced ${existingData.evolution_metrics.total_agents} agents to ${OUTPUT_FILE}`);
// Generate standalone HTML
generateStandalone(existingData);
// Print summary
console.log("\n📊 Summary:");
console.log(` Total agents: ${existingData.evolution_metrics.total_agents}`);
console.log(` Agents with history: ${existingData.evolution_metrics.agents_with_history}`);
console.log(` Pending recommendations: ${existingData.evolution_metrics.pending_recommendations}`);
}
/**
* Generate standalone HTML with embedded data
*/
function generateStandalone(data: EvolutionData): void {
const templatePath = path.join(__dirname, '../index.html');
const outputPath = path.join(__dirname, '../index.standalone.html');
let html = fs.readFileSync(templatePath, 'utf-8');
// Replace EMBEDDED_DATA with actual data
const embeddedDataStr = `const EMBEDDED_DATA = ${JSON.stringify(data, null, 2)};`;
// Find and replace the EMBEDDED_DATA declaration
html = html.replace(
/const EMBEDDED_DATA = \{[\s\S]*?\};?\s*\/\/ Initialize/,
embeddedDataStr + '\n\n// Initialize'
);
fs.writeFileSync(outputPath, html);
console.log(`📄 Generated standalone: ${outputPath}`);
console.log(` File size: ${(fs.statSync(outputPath).size / 1024).toFixed(1)} KB`);
}
// Run
sync().catch(console.error);