Protokoll #5931

ID5931
Zeitstempel2025-12-23 04:26:08.818074
Clientroot
IP145.224.96.190
Modellclaude-sonnet-4-20250514
Statuscompleted
Tokens861 (Input: 46, Output: 815)
Dauer92 ms
Request-Zeit2025-12-23 04:26:08.818074
Response-Zeit2025-12-23 04:26:08.909828

Request

{
    "event": "PreToolUse",
    "tool_name": "Read",
    "tool_input": {
        "file_path": "\/var\/www\/dev.campus.systemische-tools.de\/src\/Controller\/Api\/DocsController.php",
        "offset": 310,
        "limit": 100
    }
}

Response

{
    "tool_response": {
        "type": "text",
        "file": {
            "filePath": "\/var\/www\/dev.campus.systemische-tools.de\/src\/Controller\/Api\/DocsController.php",
            "content": "     * POST \/api\/v1\/docs\/chat\n     * Chat mit Dokumentation (RAG).\n     *\/\n    public function chat(): void\n    {\n        try {\n            $input = $this->getJsonInput();\n            $question = trim($input['question'] ?? '');\n            $model = $input['model'] ?? 'mistral';\n            $limit = (int) ($input['limit'] ?? 5);\n\n            if ($question === '') {\n                $this->json(['success' => false, 'error' => 'Keine Frage angegeben'], 400);\n\n                return;\n            }\n\n            \/\/ Get relevant chunks via semantic search\n            $chunks = $this->syncService->search($question, $limit);\n\n            if (empty($chunks)) {\n                $this->json([\n                    'success' => true,\n                    'data' => [\n                        'answer' => 'Leider konnte ich keine relevanten Informationen in der Dokumentation finden.',\n                        'sources' => [],\n                    ],\n                ]);\n\n                return;\n            }\n\n            \/\/ Build context from chunks\n            $context = $this->buildContext($chunks);\n\n            \/\/ Generate answer using Ollama\n            $ollama = new \\Infrastructure\\AI\\OllamaService();\n            $prompt = $this->buildChatPrompt($question, $context);\n            $answer = $ollama->generate($prompt, $model, 0.3);\n\n            $sources = array_map(static fn ($chunk) => [\n                'id' => $chunk['doc_id'],\n                'path' => $chunk['path'],\n                'title' => $chunk['title'],\n                'score' => round($chunk['score'], 3),\n            ], $chunks);\n\n            $this->json([\n                'success' => true,\n                'data' => [\n                    'answer' => $answer,\n                    'sources' => $sources,\n                ],\n            ]);\n        } catch (\\Exception $e) {\n            $this->jsonError($e->getMessage());\n        }\n    }\n\n    \/**\n     * Build context from chunks.\n     *\/\n    private function buildContext(array $chunks): string\n    {\n        $parts = [];\n\n        foreach ($chunks as $chunk) {\n            $part = \"## {$chunk['title']}\\n\";\n            $part .= \"Pfad: {$chunk['path']}\\n\";\n            $part .= $chunk['content'];\n            $parts[] = $part;\n        }\n\n        return implode(\"\\n\\n---\\n\\n\", $parts);\n    }\n\n    \/**\n     * Build chat prompt.\n     *\/\n    private function buildChatPrompt(string $question, string $context): string\n    {\n        return <<<PROMPT\n            Du bist ein Dokumentations-Assistent. Beantworte die Frage basierend auf dem bereitgestellten Kontext.\n\n            KONTEXT:\n            {$context}\n\n            FRAGE:\n            {$question}\n\n            ANLEITUNG:\n            - Antworte auf Deutsch\n            - Sei präzise und hilfreich\n            - Wenn der Kontext die Frage nicht beantwortet, sage das ehrlich\n            - Verweise auf die relevanten Abschnitte der Dokumentation\n            PROMPT;\n    }\n}\n",
            "numLines": 99,
            "startLine": 310,
            "totalLines": 408
        }
    }
}
← Vorheriger Zur Liste Nächster →