{
"event": "PreToolUse",
"tool_name": "mcp__mcp-db__db_execute",
"tool_input": {
"statement": "CREATE TABLE IF NOT EXISTS ai_models (\n id INT AUTO_INCREMENT PRIMARY KEY,\n provider ENUM('anthropic', 'ollama', 'openai') NOT NULL,\n model_id VARCHAR(100) NOT NULL COMMENT 'z.B. gemma3:27b-it-qat oder claude-opus-4-5-20251101',\n display_name VARCHAR(200) NOT NULL COMMENT 'Anzeigename im UI',\n full_key VARCHAR(150) GENERATED ALWAYS AS (\n CASE WHEN provider = 'ollama' THEN CONCAT('ollama:', model_id) ELSE model_id END\n ) STORED COMMENT 'Vollständiger Schlüssel für Konfiguration',\n is_available BOOLEAN DEFAULT TRUE COMMENT 'Aktuell verfügbar?',\n is_chat BOOLEAN DEFAULT TRUE COMMENT 'Für Chat\/Generation geeignet',\n is_embedding BOOLEAN DEFAULT FALSE COMMENT 'Für Embeddings geeignet',\n is_vision BOOLEAN DEFAULT FALSE COMMENT 'Kann Bilder verarbeiten',\n context_length INT DEFAULT NULL COMMENT 'Kontext-Fenster in Tokens',\n parameters VARCHAR(50) DEFAULT NULL COMMENT 'Parameter-Größe z.B. 27b',\n priority INT DEFAULT 100 COMMENT 'Sortierung (niedriger = höher)',\n last_seen_at DATETIME DEFAULT NULL COMMENT 'Letzte Ollama-Erkennung',\n created_at DATETIME DEFAULT CURRENT_TIMESTAMP,\n updated_at DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,\n UNIQUE KEY uk_provider_model (provider, model_id),\n INDEX idx_available_chat (is_available, is_chat),\n INDEX idx_full_key (full_key)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='Zentrale Model-Registry für alle KI-Modelle'",
"database": "ki_dev"
}
}
{
"tool_response": [
{
"type": "text",
"text": "{\n \"status\": \"success\",\n \"affected_rows\": 0,\n \"error\": null,\n \"execution_ms\": 1\n}"
}
]
}