- Create agent-evolution/ directory with standalone dashboard - Add interactive HTML dashboard with agent/model matrix - Add heatmap view for agent-model compatibility scores - Add recommendations tab with optimization suggestions - Add Gitea integration preparation (history timeline) - Add Docker configuration for deployment - Add build scripts for standalone HTML generation - Add sync scripts for agent data synchronization - Add milestone and issues documentation - Add skills and rules for evolution sync - Update AGENTS.md with dashboard documentation - Update package.json with evolution scripts Features: - 28 agents with model assignments and fit scores - 8 models with benchmarks (SWE-bench, RULER, Terminal) - 11 recommendations for model optimization - History timeline with agent changes - Interactive modal windows for model details - Filter and search functionality - Russian language interface - Works offline (file://) with embedded data Docker: - Dockerfile for standalone deployment - docker-compose.evolution.yml - docker-run.sh/docker-run.bat scripts NPM scripts: - sync:evolution - sync and build dashboard - evolution:open - open in browser - evolution:dashboard - start dev server Status: PAUSED - foundation complete, Gitea integration pending
501 lines
14 KiB
TypeScript
501 lines
14 KiB
TypeScript
#!/usr/bin/env bun
|
|
/**
|
|
* Agent Evolution Synchronization Script
|
|
* Parses git history and syncs agent definitions
|
|
*
|
|
* Usage: bun run agent-evolution/scripts/sync-agent-history.ts
|
|
*
|
|
* Generates:
|
|
* - data/agent-versions.json - JSON data
|
|
* - index.standalone.html - Dashboard with embedded data
|
|
*/
|
|
|
|
import * as fs from "fs";
|
|
import * as path from "path";
|
|
import { spawnSync } from "child_process";
|
|
|
|
// Try to load yaml parser (optional)
|
|
let yaml: any;
|
|
try {
|
|
yaml = require("yaml");
|
|
} catch {
|
|
yaml = null;
|
|
}
|
|
|
|
// Types
|
|
interface AgentVersion {
|
|
date: string;
|
|
commit: string;
|
|
type: "model_change" | "prompt_change" | "agent_created" | "agent_removed" | "capability_change";
|
|
from: string | null;
|
|
to: string;
|
|
reason: string;
|
|
source: "git" | "gitea" | "manual";
|
|
}
|
|
|
|
interface AgentConfig {
|
|
model: string;
|
|
provider: string;
|
|
category: string;
|
|
mode: string;
|
|
color: string;
|
|
description: string;
|
|
benchmark?: {
|
|
swe_bench?: number;
|
|
ruler_1m?: number;
|
|
terminal_bench?: number;
|
|
pinch_bench?: number;
|
|
fit_score?: number;
|
|
};
|
|
capabilities: string[];
|
|
recommendations?: Array<{
|
|
target: string;
|
|
reason: string;
|
|
priority: string;
|
|
}>;
|
|
status?: string;
|
|
}
|
|
|
|
interface AgentData {
|
|
current: AgentConfig;
|
|
history: AgentVersion[];
|
|
performance_log: Array<{
|
|
date: string;
|
|
issue: number;
|
|
score: number;
|
|
duration_ms: number;
|
|
success: boolean;
|
|
}>;
|
|
}
|
|
|
|
interface EvolutionData {
|
|
version: string;
|
|
lastUpdated: string;
|
|
agents: Record<string, AgentData>;
|
|
providers: Record<string, { models: unknown[] }>;
|
|
evolution_metrics: {
|
|
total_agents: number;
|
|
agents_with_history: number;
|
|
pending_recommendations: number;
|
|
last_sync: string;
|
|
sync_sources: string[];
|
|
};
|
|
}
|
|
|
|
// Constants
|
|
const AGENTS_DIR = ".kilo/agents";
|
|
const CAPABILITY_INDEX = ".kilo/capability-index.yaml";
|
|
const KILO_CONFIG = ".kilo/kilo.jsonc";
|
|
const OUTPUT_FILE = "agent-evolution/data/agent-versions.json";
|
|
const GIT_DIR = ".git";
|
|
|
|
// Provider detection
|
|
function detectProvider(model: string): string {
|
|
if (model.startsWith("ollama-cloud/") || model.startsWith("ollama/")) return "Ollama";
|
|
if (model.startsWith("openrouter/") || model.includes("openrouter")) return "OpenRouter";
|
|
if (model.startsWith("groq/")) return "Groq";
|
|
return "Unknown";
|
|
}
|
|
|
|
// Parse agent file frontmatter
|
|
function parseAgentFrontmatter(content: string): AgentConfig | null {
|
|
const frontmatterMatch = content.match(/^---\n([\s\S]*?)\n---/);
|
|
if (!frontmatterMatch) return null;
|
|
|
|
try {
|
|
const frontmatter = frontmatterMatch[1];
|
|
const lines = frontmatter.split("\n");
|
|
const config: Record<string, unknown> = {};
|
|
|
|
for (const line of lines) {
|
|
const match = line.match(/^(\w+):\s*(.+)$/);
|
|
if (match) {
|
|
const [, key, value] = match;
|
|
if (value === "allow" || value === "deny") {
|
|
if (!config.permission) config.permission = {};
|
|
(config.permission as Record<string, unknown>)[key] = value;
|
|
} else if (key === "model") {
|
|
config[key] = value;
|
|
config.provider = detectProvider(value);
|
|
} else {
|
|
config[key] = value;
|
|
}
|
|
}
|
|
}
|
|
|
|
return config as unknown as AgentConfig;
|
|
} catch {
|
|
return null;
|
|
}
|
|
}
|
|
|
|
// Get git history for agent changes
|
|
function getGitHistory(): Map<string, AgentVersion[]> {
|
|
const history = new Map<string, AgentVersion[]>();
|
|
|
|
try {
|
|
// Get commits that modified agent files
|
|
const result = spawnSync('git', ['log', '--all', '--oneline', '--follow', '--format=%H|%ai|%s', '--', '.kilo/agents/'], {
|
|
cwd: process.cwd(),
|
|
encoding: 'utf-8',
|
|
maxBuffer: 10 * 1024 * 1024
|
|
});
|
|
|
|
if (result.status !== 0 || !result.stdout) {
|
|
console.warn('Git log failed, skipping history');
|
|
return history;
|
|
}
|
|
|
|
const logOutput = result.stdout.trim();
|
|
const commits = logOutput.split('\n').filter(Boolean);
|
|
|
|
for (const line of commits) {
|
|
const [hash, date, ...msgParts] = line.split('|');
|
|
if (!hash || !date) continue;
|
|
|
|
const message = msgParts.join('|').trim();
|
|
|
|
// Detect change type from commit message
|
|
const agentMatch = message.match(/(?:add|update|fix|feat|change|set)\s+(\w+-?\w*)/i);
|
|
|
|
if (agentMatch) {
|
|
const agentName = agentMatch[1].toLowerCase();
|
|
const type = message.toLowerCase().includes("add") || message.toLowerCase().includes("feat")
|
|
? "agent_created"
|
|
: message.toLowerCase().includes("model")
|
|
? "model_change"
|
|
: "prompt_change";
|
|
|
|
if (!history.has(agentName)) {
|
|
history.set(agentName, []);
|
|
}
|
|
|
|
history.get(agentName)!.push({
|
|
date: date.replace(" ", "T") + "Z",
|
|
commit: hash.substring(0, 8),
|
|
type: type as AgentVersion["type"],
|
|
from: null, // Will be filled later
|
|
to: "", // Will be filled later
|
|
reason: message,
|
|
source: "git"
|
|
});
|
|
}
|
|
}
|
|
} catch (error) {
|
|
console.warn("Git history extraction failed:", error);
|
|
}
|
|
|
|
return history;
|
|
}
|
|
|
|
// Load capability index (simple parsing without yaml dependency)
|
|
function loadCapabilityIndex(): Record<string, AgentConfig> {
|
|
const configs: Record<string, AgentConfig> = {};
|
|
|
|
try {
|
|
const content = fs.readFileSync(CAPABILITY_INDEX, "utf-8");
|
|
|
|
// Simple YAML-ish parsing for our specific format
|
|
// Extract agent blocks
|
|
const agentRegex = /^ (\w[\w-]+):\n((?: .+\n?)+)/gm;
|
|
let match;
|
|
|
|
while ((match = agentRegex.exec(content)) !== null) {
|
|
const name = match[1];
|
|
if (name === 'capability_routing' || name === 'parallel_groups' ||
|
|
name === 'iteration_loops' || name === 'quality_gates' ||
|
|
name === 'workflow_states') continue;
|
|
|
|
const block = match[2];
|
|
|
|
// Extract model
|
|
const modelMatch = block.match(/model:\s*(.+)/);
|
|
if (!modelMatch) continue;
|
|
|
|
const model = modelMatch[1].trim();
|
|
|
|
// Extract capabilities
|
|
const capsMatch = block.match(/capabilities:\n((?: - .+\n?)+)/);
|
|
const capabilities = capsMatch
|
|
? capsMatch[1].split('\n').filter(l => l.trim()).map(l => l.replace(/^\s*-?\s*/, '').trim())
|
|
: [];
|
|
|
|
// Extract mode
|
|
const modeMatch = block.match(/mode:\s*(\w+)/);
|
|
const mode = modeMatch ? modeMatch[1] : 'subagent';
|
|
|
|
configs[name] = {
|
|
model,
|
|
provider: detectProvider(model),
|
|
category: capabilities[0]?.replace(/_/g, ' ') || 'General',
|
|
mode,
|
|
color: '#6B7280',
|
|
description: '',
|
|
capabilities,
|
|
};
|
|
}
|
|
} catch (error) {
|
|
console.warn("Capability index loading failed:", error);
|
|
}
|
|
|
|
return configs;
|
|
}
|
|
|
|
// Load kilo.jsonc configuration
|
|
function loadKiloConfig(): Record<string, AgentConfig> {
|
|
const configs: Record<string, AgentConfig> = {};
|
|
|
|
try {
|
|
const content = fs.readFileSync(KILO_CONFIG, "utf-8");
|
|
// Remove comments for JSON parsing
|
|
const cleaned = content.replace(/\/\*[\s\S]*?\*\/|\/\/.*/g, "");
|
|
const parsed = JSON.parse(cleaned);
|
|
|
|
if (parsed.agent) {
|
|
for (const [name, config] of Object.entries(parsed.agent)) {
|
|
const agentConfig = config as Record<string, unknown>;
|
|
if (agentConfig.model) {
|
|
configs[name] = {
|
|
model: agentConfig.model as string,
|
|
provider: detectProvider(agentConfig.model as string),
|
|
category: "Built-in",
|
|
mode: (agentConfig.mode as string) || "primary",
|
|
color: "#3B82F6",
|
|
description: (agentConfig.description as string) || "",
|
|
capabilities: [],
|
|
};
|
|
}
|
|
}
|
|
}
|
|
} catch (error) {
|
|
console.warn("Kilo config loading failed:", error);
|
|
}
|
|
|
|
return configs;
|
|
}
|
|
|
|
// Load all agent files
|
|
function loadAgentFiles(): Record<string, AgentConfig> {
|
|
const configs: Record<string, AgentConfig> = {};
|
|
|
|
try {
|
|
const files = fs.readdirSync(AGENTS_DIR);
|
|
|
|
for (const file of files) {
|
|
if (!file.endsWith(".md")) continue;
|
|
|
|
const filepath = path.join(AGENTS_DIR, file);
|
|
const content = fs.readFileSync(filepath, "utf-8");
|
|
const frontmatter = parseAgentFrontmatter(content);
|
|
|
|
if (frontmatter && frontmatter.model) {
|
|
const name = file.replace(".md", "");
|
|
configs[name] = {
|
|
...frontmatter,
|
|
category: getCategoryFromCapabilities(frontmatter.capabilities),
|
|
};
|
|
}
|
|
}
|
|
} catch (error) {
|
|
console.warn("Agent files loading failed:", error);
|
|
}
|
|
|
|
return configs;
|
|
}
|
|
|
|
// Get category from capabilities
|
|
function getCategoryFromCapabilities(capabilities?: string[]): string {
|
|
if (!capabilities) return "General";
|
|
|
|
const categoryMap: Record<string, string> = {
|
|
code: "Core Dev",
|
|
ui: "Frontend",
|
|
test: "QA",
|
|
security: "Security",
|
|
performance: "Performance",
|
|
devops: "DevOps",
|
|
go_: "Go Development",
|
|
flutter: "Mobile",
|
|
memory: "Cognitive",
|
|
plan: "Cognitive",
|
|
workflow: "Process",
|
|
markdown: "Validation",
|
|
};
|
|
|
|
for (const cap of capabilities) {
|
|
const key = Object.keys(categoryMap).find((k) => cap.toLowerCase().includes(k.toLowerCase()));
|
|
if (key) return categoryMap[key];
|
|
}
|
|
|
|
return "General";
|
|
}
|
|
|
|
// Merge all sources
|
|
function mergeConfigs(
|
|
agentFiles: Record<string, AgentConfig>,
|
|
capabilityIndex: Record<string, AgentConfig>,
|
|
kiloConfig: Record<string, AgentConfig>
|
|
): Record<string, AgentConfig> {
|
|
const merged: Record<string, AgentConfig> = {};
|
|
|
|
// Start with agent files (highest priority)
|
|
for (const [name, config] of Object.entries(agentFiles)) {
|
|
merged[name] = { ...config };
|
|
}
|
|
|
|
// Overlay capability index data
|
|
for (const [name, config] of Object.entries(capabilityIndex)) {
|
|
if (merged[name]) {
|
|
merged[name] = {
|
|
...merged[name],
|
|
capabilities: config.capabilities,
|
|
};
|
|
} else {
|
|
merged[name] = config;
|
|
}
|
|
}
|
|
|
|
// Overlay kilo.jsonc data
|
|
for (const [name, config] of Object.entries(kiloConfig)) {
|
|
if (merged[name]) {
|
|
merged[name] = {
|
|
...merged[name],
|
|
model: config.model,
|
|
provider: config.provider,
|
|
};
|
|
} else {
|
|
merged[name] = config;
|
|
}
|
|
}
|
|
|
|
return merged;
|
|
}
|
|
|
|
// Main sync function
|
|
async function sync() {
|
|
console.log("🔄 Syncing agent evolution data...\n");
|
|
|
|
// Load all sources
|
|
console.log("📂 Loading agent files...");
|
|
const agentFiles = loadAgentFiles();
|
|
console.log(` Found ${Object.keys(agentFiles).length} agent files`);
|
|
|
|
console.log("📄 Loading capability index...");
|
|
const capabilityIndex = loadCapabilityIndex();
|
|
console.log(` Found ${Object.keys(capabilityIndex).length} agents`);
|
|
|
|
console.log("⚙️ Loading kilo config...");
|
|
const kiloConfig = loadKiloConfig();
|
|
console.log(` Found ${Object.keys(kiloConfig).length} agents`);
|
|
|
|
// Get git history
|
|
console.log("\n📜 Parsing git history...");
|
|
const gitHistory = await getGitHistory();
|
|
console.log(` Found history for ${gitHistory.size} agents`);
|
|
|
|
// Merge configs
|
|
const merged = mergeConfigs(agentFiles, capabilityIndex, kiloConfig);
|
|
|
|
// Load existing evolution data
|
|
let existingData: EvolutionData = {
|
|
version: "1.0.0",
|
|
lastUpdated: new Date().toISOString(),
|
|
agents: {},
|
|
providers: {
|
|
Ollama: { models: [] },
|
|
OpenRouter: { models: [] },
|
|
Groq: { models: [] },
|
|
},
|
|
evolution_metrics: {
|
|
total_agents: 0,
|
|
agents_with_history: 0,
|
|
pending_recommendations: 0,
|
|
last_sync: new Date().toISOString(),
|
|
sync_sources: ["git", "capability-index.yaml", "kilo.jsonc"],
|
|
},
|
|
};
|
|
|
|
try {
|
|
if (fs.existsSync(OUTPUT_FILE)) {
|
|
const existing = JSON.parse(fs.readFileSync(OUTPUT_FILE, "utf-8"));
|
|
existingData.agents = existing.agents || {};
|
|
}
|
|
} catch {
|
|
// Use defaults
|
|
}
|
|
|
|
// Update agents
|
|
for (const [name, config] of Object.entries(merged)) {
|
|
const existingAgent = existingData.agents[name];
|
|
|
|
// Check if model changed
|
|
if (existingAgent?.current?.model && existingAgent.current.model !== config.model) {
|
|
// Add to history
|
|
existingAgent.history.push({
|
|
date: new Date().toISOString(),
|
|
commit: "sync",
|
|
type: "model_change",
|
|
from: existingAgent.current.model,
|
|
to: config.model,
|
|
reason: "Model update from sync",
|
|
source: "git",
|
|
});
|
|
existingAgent.current = { ...config };
|
|
} else {
|
|
existingData.agents[name] = {
|
|
current: config,
|
|
history: existingAgent?.history || gitHistory.get(name) || [],
|
|
performance_log: existingAgent?.performance_log || [],
|
|
};
|
|
}
|
|
}
|
|
|
|
// Update metrics
|
|
existingData.evolution_metrics.total_agents = Object.keys(existingData.agents).length;
|
|
existingData.evolution_metrics.agents_with_history = Object.values(existingData.agents).filter(
|
|
(a) => a.history.length > 0
|
|
).length;
|
|
existingData.evolution_metrics.pending_recommendations = Object.values(existingData.agents).filter(
|
|
(a) => a.current.recommendations && a.current.recommendations.length > 0
|
|
).length;
|
|
existingData.evolution_metrics.last_sync = new Date().toISOString();
|
|
|
|
// Save JSON
|
|
fs.writeFileSync(OUTPUT_FILE, JSON.stringify(existingData, null, 2));
|
|
console.log(`\n✅ Synced ${existingData.evolution_metrics.total_agents} agents to ${OUTPUT_FILE}`);
|
|
|
|
// Generate standalone HTML
|
|
generateStandalone(existingData);
|
|
|
|
// Print summary
|
|
console.log("\n📊 Summary:");
|
|
console.log(` Total agents: ${existingData.evolution_metrics.total_agents}`);
|
|
console.log(` Agents with history: ${existingData.evolution_metrics.agents_with_history}`);
|
|
console.log(` Pending recommendations: ${existingData.evolution_metrics.pending_recommendations}`);
|
|
}
|
|
|
|
/**
|
|
* Generate standalone HTML with embedded data
|
|
*/
|
|
function generateStandalone(data: EvolutionData): void {
|
|
const templatePath = path.join(__dirname, '../index.html');
|
|
const outputPath = path.join(__dirname, '../index.standalone.html');
|
|
|
|
let html = fs.readFileSync(templatePath, 'utf-8');
|
|
|
|
// Replace EMBEDDED_DATA with actual data
|
|
const embeddedDataStr = `const EMBEDDED_DATA = ${JSON.stringify(data, null, 2)};`;
|
|
|
|
// Find and replace the EMBEDDED_DATA declaration
|
|
html = html.replace(
|
|
/const EMBEDDED_DATA = \{[\s\S]*?\};?\s*\/\/ Initialize/,
|
|
embeddedDataStr + '\n\n// Initialize'
|
|
);
|
|
|
|
fs.writeFileSync(outputPath, html);
|
|
console.log(`📄 Generated standalone: ${outputPath}`);
|
|
console.log(` File size: ${(fs.statSync(outputPath).size / 1024).toFixed(1)} KB`);
|
|
}
|
|
|
|
// Run
|
|
sync().catch(console.error); |