Protokoll #29499

ID29499
Zeitstempel2025-12-29 09:51:38.521981
Clientroot
IP145.224.72.140
Modellclaude-sonnet-4-20250514
Statuscompleted
Tokens2,679 (Input: 22, Output: 2,657)
Dauer73 ms
Request-Zeit2025-12-29 09:51:38.521981
Response-Zeit2025-12-29 09:51:38.595046

Request

{
    "event": "PreToolUse",
    "tool_name": "mcp__mcp-docs__docs_get",
    "tool_input": {
        "id": 54
    }
}

Response

{
    "tool_response": [
        {
            "type": "text",
            "text": "{\n  \"success\": true,\n  \"doc\": {\n    \"id\": 54,\n    \"parent_id\": 51,\n    \"slug\": \"services\",\n    \"path\": \"\/api\/services\",\n    \"title\": \"AI-Services\",\n    \"description\": \"PHP-Service-Klassen für KI-Integration. Native HTTP-Clients für Ollama, Qdrant und Claude API.\",\n    \"content\": \"<nav class=\\\"breadcrumb\\\">\\n    <a href=\\\"\/docs\\\">Dokumentation<\/a> &raquo; <a href=\\\"\/docs\/api\\\">API<\/a> &raquo; Services\\n<\/nav>\\n\\n<h1>AI Services<\/h1>\\n<p class=\\\"doc-meta\\\"><strong>Erstellt:<\/strong> 2025-12-20 | <strong>Aktualisiert:<\/strong> 2025-12-27<\/p>\\n\\n<p>PHP-Service-Klassen für KI-Integration. Native HTTP-Clients für Ollama, Qdrant und Claude API.<\/p>\\n\\n<table>\\n    <tr><th>Pfad<\/th><td>\/src\/Infrastructure\/AI\/<\/td><\/tr>\\n    <tr><th>Namespace<\/th><td>Infrastructure\\\\AI<\/td><\/tr>\\n    <tr><th>Contract<\/th><td>layered-architecture-pruefung_v1.0.yaml<\/td><\/tr>\\n    <tr><th>Status<\/th><td>Produktiv (Migration von Python abgeschlossen)<\/td><\/tr>\\n<\/table>\\n\\n<h2>AIConfig<\/h2>\\n<p>Zentralisierte Konfiguration und Service-Factory für alle AI-Services.<\/p>\\n\\n<table>\\n    <tr><th>Datei<\/th><td>AIConfig.php<\/td><\/tr>\\n    <tr><th>Typ<\/th><td>readonly class<\/td><\/tr>\\n    <tr><th>Funktion<\/th><td>Config-Management, Service-Factory, Credentials-Loading<\/td><\/tr>\\n<\/table>\\n\\n<h3>Properties<\/h3>\\n<pre><code>final readonly class AIConfig\\n{\\n    public string $ollamaHost;           \/\/ http:\/\/localhost:11434\\n    public string $qdrantHost;           \/\/ http:\/\/localhost:6333\\n    public string $anthropicApiKey;      \/\/ sk-ant-... (aus credentials.md)\\n    public string $embeddingModel;       \/\/ mxbai-embed-large\\n    public string $claudeModel;          \/\/ claude-opus-4-5-20251101\\n    public string $defaultCollection;    \/\/ documents\\n}<\/code><\/pre>\\n\\n<h3>Factory-Methoden<\/h3>\\n<pre><code>\/\/ Config erstellen (lädt API-Key aus credentials.md)\\n$config = AIConfig::fromCredentialsFile();\\n\\n\/\/ Einzelne Services erstellen\\n$ollama = $config->createOllamaService();\\n$qdrant = $config->createQdrantService();\\n$claude = $config->createClaudeService();\\n\\n\/\/ Vollständiger ChatService (alle Dependencies)\\n$chatService = $config->createChatService();<\/code><\/pre>\\n\\n<h3>Verwendung<\/h3>\\n<pre><code>use Infrastructure\\\\AI\\\\AIConfig;\\n\\n\/\/ Einfachster Weg: ChatService direkt nutzen\\n$config = AIConfig::fromCredentialsFile();\\n$chat = $config->createChatService();\\n$result = $chat->chat('Was ist systemisches Coaching?');\\n\\n\/\/ Alternative: Einzelne Services für spezielle Aufgaben\\n$config = AIConfig::fromCredentialsFile();\\n$ollama = $config->createOllamaService();\\n$embedding = $ollama->getEmbedding('Text zum Einbetten');<\/code><\/pre>\\n\\n<h2>OllamaService<\/h2>\\n<p>Embedding-Generierung und LLM-Generierung via Ollama REST API.<\/p>\\n\\n<table>\\n    <tr><th>Datei<\/th><td>OllamaService.php<\/td><\/tr>\\n    <tr><th>Host<\/th><td>http:\/\/localhost:11434<\/td><\/tr>\\n    <tr><th>Embedding-Model<\/th><td>mxbai-embed-large<\/td><\/tr>\\n    <tr><th>LLM-Model<\/th><td>gemma3:4b-it-qat<\/td><\/tr>\\n    <tr><th>Dimension<\/th><td>1024<\/td><\/tr>\\n<\/table>\\n\\n<h3>Methoden<\/h3>\\n<pre><code>final readonly class OllamaService\\n{\\n    public function __construct(string $host = 'http:\/\/localhost:11434');\\n\\n    \/**\\n     * Generiert Embedding-Vektor für Text.\\n     * @return array&lt;int, float&gt; 1024-dimensionaler Vektor\\n     * @throws RuntimeException\\n     *\/\\n    public function getEmbedding(\\n        string $text,\\n        string $model = 'mxbai-embed-large'\\n    ): array;\\n\\n    \/**\\n     * Generiert Text-Antwort mit Ollama-LLM.\\n     * @throws RuntimeException\\n     *\/\\n    public function generate(\\n        string $prompt,\\n        string $model = 'gemma3:4b-it-qat'\\n    ): string;\\n\\n    \/**\\n     * Prüft ob Ollama API erreichbar ist.\\n     *\/\\n    public function isAvailable(): bool;\\n}<\/code><\/pre>\\n\\n<h3>Code-Beispiel<\/h3>\\n<pre><code>use Infrastructure\\\\AI\\\\OllamaService;\\n\\n$ollama = new OllamaService('http:\/\/localhost:11434');\\n\\n\/\/ Embedding erstellen\\n$embedding = $ollama->getEmbedding('Hello World');\\n\/\/ Returns: [0.123, -0.456, 0.789, ...] (1024 floats)\\n\\n\/\/ Text generieren\\n$response = $ollama->generate('Explain quantum computing.');\\n\/\/ Returns: \\\"Quantum computing is a type of computing that...\\\"\\n\\n\/\/ Health Check\\nif ($ollama->isAvailable()) {\\n    echo \\\"Ollama läuft\\\";\\n}<\/code><\/pre>\\n\\n<h3>API-Aufruf<\/h3>\\n<pre><code>POST http:\/\/localhost:11434\/api\/embeddings\\nContent-Type: application\/json\\n\\n{\\n    \\\"model\\\": \\\"mxbai-embed-large\\\",\\n    \\\"prompt\\\": \\\"Text zum Einbetten\\\"\\n}\\n\\nResponse:\\n{\\n    \\\"embedding\\\": [0.123, -0.456, ...]\\n}<\/code><\/pre>\\n\\n<h2>QdrantService<\/h2>\\n<p>Vektorsuche via Qdrant REST API.<\/p>\\n\\n<table>\\n    <tr><th>Datei<\/th><td>QdrantService.php<\/td><\/tr>\\n    <tr><th>Host<\/th><td>http:\/\/localhost:6333<\/td><\/tr>\\n    <tr><th>Collection<\/th><td>documents<\/td><\/tr>\\n    <tr><th>Distance<\/th><td>Cosine<\/td><\/tr>\\n    <tr><th>Timeout<\/th><td>30s<\/td><\/tr>\\n<\/table>\\n\\n<h3>Methoden<\/h3>\\n<pre><code>final readonly class QdrantService\\n{\\n    public function __construct(string $host = 'http:\/\/localhost:6333');\\n\\n    \/**\\n     * Sucht ähnliche Dokumente per Vektor-Similarity.\\n     * @param array&lt;int, float&gt; $vector Embedding-Vektor\\n     * @return array&lt;int, array{id: int|string, score: float, payload: array}&gt;\\n     * @throws RuntimeException\\n     *\/\\n    public function search(\\n        array $vector,\\n        string $collection = 'documents',\\n        int $limit = 5\\n    ): array;\\n\\n    \/**\\n     * Prüft ob Collection existiert.\\n     *\/\\n    public function collectionExists(string $collection): bool;\\n\\n    \/**\\n     * Prüft ob Qdrant API erreichbar ist.\\n     *\/\\n    public function isAvailable(): bool;\\n\\n    \/**\\n     * Holt Collection-Informationen.\\n     * @return array&lt;string, mixed&gt;|null\\n     * @throws RuntimeException\\n     *\/\\n    public function getCollectionInfo(string $collection): ?array;\\n}<\/code><\/pre>\\n\\n<h3>Code-Beispiel<\/h3>\\n<pre><code>use Infrastructure\\\\AI\\\\QdrantService;\\n\\n$qdrant = new QdrantService('http:\/\/localhost:6333');\\n\\n\/\/ Vektorsuche\\n$vector = [0.123, -0.456, 0.789, ...]; \/\/ 1024-dimensional\\n$results = $qdrant->search($vector, 'documents', 5);\\n\/\/ Returns: [\\n\/\/   ['id' => 1, 'score' => 0.89, 'payload' => ['content' => '...', 'title' => '...']],\\n\/\/   ['id' => 2, 'score' => 0.76, 'payload' => ['content' => '...', 'title' => '...']]\\n\/\/ ]\\n\\n\/\/ Collection prüfen\\nif ($qdrant->collectionExists('documents')) {\\n    $info = $qdrant->getCollectionInfo('documents');\\n    echo \\\"Vektoren: \\\" . $info['vectors_count'];\\n}<\/code><\/pre>\\n\\n<h3>API-Aufruf<\/h3>\\n<pre><code>POST http:\/\/localhost:6333\/collections\/documents\/points\/search\\nContent-Type: application\/json\\n\\n{\\n    \\\"vector\\\": [0.123, -0.456, ...],\\n    \\\"limit\\\": 5,\\n    \\\"with_payload\\\": true\\n}\\n\\nResponse:\\n{\\n    \\\"result\\\": [\\n        {\\n            \\\"id\\\": 1,\\n            \\\"score\\\": 0.89,\\n            \\\"payload\\\": {\\n                \\\"content\\\": \\\"...\\\",\\n                \\\"document_title\\\": \\\"...\\\"\\n            }\\n        }\\n    ]\\n}<\/code><\/pre>\\n\\n<h2>ClaudeService<\/h2>\\n<p>LLM-Anfragen via Anthropic API.<\/p>\\n\\n<table>\\n    <tr><th>Datei<\/th><td>ClaudeService.php<\/td><\/tr>\\n    <tr><th>API<\/th><td>https:\/\/api.anthropic.com\/v1\/messages<\/td><\/tr>\\n    <tr><th>API Version<\/th><td>2023-06-01<\/td><\/tr>\\n    <tr><th>Model<\/th><td>claude-opus-4-5-20251101<\/td><\/tr>\\n    <tr><th>Max Tokens<\/th><td>4000<\/td><\/tr>\\n    <tr><th>Timeout<\/th><td>120s<\/td><\/tr>\\n<\/table>\\n\\n<h3>Methoden<\/h3>\\n<pre><code>final readonly class ClaudeService\\n{\\n    public function __construct(string $apiKey);\\n\\n    \/**\\n     * Sendet Prompt an Claude und gibt Antwort zurück.\\n     * @return array{text: string, usage: array{input_tokens: int, output_tokens: int}}\\n     * @throws RuntimeException\\n     *\/\\n    public function ask(\\n        string $prompt,\\n        ?string $systemPrompt = null,\\n        string $model = 'claude-opus-4-5-20251101',\\n        int $maxTokens = 4000\\n    ): array;\\n\\n    \/**\\n     * Erstellt RAG-Prompt mit Kontext.\\n     *\/\\n    public function buildRagPrompt(string $question, string $context): string;\\n\\n    \/**\\n     * Liefert Standard-System-Prompt für RAG.\\n     *\/\\n    public function getDefaultSystemPrompt(): string;\\n\\n    \/**\\n     * Prüft ob Claude API erreichbar ist.\\n     *\/\\n    public function isAvailable(): bool;\\n}<\/code><\/pre>\\n\\n<h3>Code-Beispiel<\/h3>\\n<pre><code>use Infrastructure\\\\AI\\\\ClaudeService;\\n\\n$claude = new ClaudeService('sk-ant-...');\\n\\n\/\/ Einfache Anfrage\\n$result = $claude->ask('Explain quantum computing');\\necho $result['text'];\\necho \\\"Tokens: \\\" . $result['usage']['output_tokens'];\\n\\n\/\/ Mit System-Prompt\\n$system = 'You are a physics teacher.';\\n$result = $claude->ask('Explain relativity', $system);\\n\\n\/\/ RAG-Prompt erstellen\\n$question = 'Was ist systemisches Coaching?';\\n$context = '[Quelle 1: Coaching Grundlagen]\\\\nSystemisches Coaching betrachtet...';\\n$prompt = $claude->buildRagPrompt($question, $context);\\n$systemPrompt = $claude->getDefaultSystemPrompt();\\n$result = $claude->ask($prompt, $systemPrompt);<\/code><\/pre>\\n\\n<h3>API-Aufruf<\/h3>\\n<pre><code>POST https:\/\/api.anthropic.com\/v1\/messages\\nContent-Type: application\/json\\nx-api-key: sk-ant-...\\nanthropic-version: 2023-06-01\\n\\n{\\n    \\\"model\\\": \\\"claude-opus-4-5-20251101\\\",\\n    \\\"max_tokens\\\": 4000,\\n    \\\"system\\\": \\\"Du bist ein hilfreicher Assistent...\\\",\\n    \\\"messages\\\": [\\n        {\\n            \\\"role\\\": \\\"user\\\",\\n            \\\"content\\\": \\\"...\\\"\\n        }\\n    ]\\n}\\n\\nResponse:\\n{\\n    \\\"content\\\": [\\n        {\\n            \\\"type\\\": \\\"text\\\",\\n            \\\"text\\\": \\\"...\\\"\\n        }\\n    ],\\n    \\\"usage\\\": {\\n        \\\"input_tokens\\\": 100,\\n        \\\"output_tokens\\\": 200\\n    }\\n}<\/code><\/pre>\\n\\n<h2>ChatService<\/h2>\\n<p>Orchestriert RAG-Pipeline mit allen Services. Vollständiger Ablauf: Embedding → Vektorsuche → Kontext → LLM-Antwort.<\/p>\\n\\n<table>\\n    <tr><th>Datei<\/th><td>ChatService.php<\/td><\/tr>\\n    <tr><... [TRUNCATED-9fec3896c82874ba]"
        }
    ]
}
← Vorheriger Zur Liste Nächster →