Protokoll #27178

ID27178
Zeitstempel2025-12-28 02:35:36.330619
Clientroot
IP145.224.72.140
Modellclaude-sonnet-4-20250514
Statuscompleted
Tokens1,537 (Input: 48, Output: 1,489)
Dauer80 ms
Request-Zeit2025-12-28 02:35:36.330619
Response-Zeit2025-12-28 02:35:36.410149

Request

{
    "event": "PreToolUse",
    "tool_name": "Read",
    "tool_input": {
        "file_path": "\/var\/www\/dev.campus.systemische-tools.de\/src\/UseCases\/Chat\/StreamingChatMessageUseCase.php",
        "offset": 66,
        "limit": 140
    }
}

Response

{
    "tool_response": {
        "type": "text",
        "file": {
            "filePath": "\/var\/www\/dev.campus.systemische-tools.de\/src\/UseCases\/Chat\/StreamingChatMessageUseCase.php",
            "content": "    \/**\n     * Execute chat with streaming progress\n     *\n     * @param array<string> $collections\n     *\/\n    public function execute(\n        string $sessionUuid,\n        string $message,\n        string $model,\n        array $collections = ['documents'],\n        int $contextLimit = 5,\n        int $authorProfileId = 0,\n        int $systemPromptId = 1,\n        float $temperature = 0.7,\n        int $maxTokens = 4096,\n        int $structureId = 0,\n        bool $qualityCheck = false\n    ): ChatResponse {\n        $totalStart = microtime(true);\n\n        \/\/ Step 1: Validate session\n        $this->emit('session', 'Session validieren...');\n        $this->startStep();\n        $session = $this->sessionRepo->findByUuid($sessionUuid);\n        if ($session === null) {\n            $this->emit('error', 'Session nicht gefunden');\n\n            return ChatResponse::error('Session nicht gefunden.');\n        }\n        $sessionId = $session->getId() ?? 0;\n        $this->endStep('session_done', 'Session validiert');\n\n        \/\/ Step 2: Validate message\n        $message = trim($message);\n        if ($message === '') {\n            $this->emit('error', 'Keine Nachricht');\n\n            return ChatResponse::error('Bitte gib eine Frage ein.');\n        }\n\n        \/\/ Step 3: Save user message\n        $this->emit('save_user', 'User-Nachricht speichern...');\n        $this->startStep();\n        $this->messageRepo->save(\n            sessionId: $sessionId,\n            role: 'user',\n            content: $message,\n            model: $model\n        );\n        $this->endStep('save_user_done', 'User-Nachricht gespeichert');\n\n        \/\/ Step 4: Auto-set title from first message\n        $currentTitle = $session->getTitle();\n        if ($currentTitle === null || $currentTitle === 'Neuer Chat') {\n            $title = mb_substr($message, 0, 50) . (mb_strlen($message) > 50 ? '...' : '');\n            $this->sessionRepo->updateTitle($sessionId, $title);\n        }\n\n        \/\/ Step 5: Get prompts\n        $this->emit('prompts', 'Prompts laden...');\n        $this->startStep();\n        $stylePrompt = $this->getStylePromptFromProfile($authorProfileId);\n        $systemPrompt = $this->getSystemPromptById($systemPromptId);\n        $structurePrompt = $this->getStructurePrompt($structureId);\n        if ($structurePrompt !== null) {\n            $systemPrompt = ($systemPrompt ?? '') . \"\\n\\n\" . $structurePrompt;\n        }\n        $this->endStep('prompts_done', 'Prompts geladen');\n\n        \/\/ RAG Pipeline\n        $searchResults = [];\n        $context = '';\n\n        if ($collections !== []) {\n            \/\/ Step 6: Generate embedding\n            $this->emit('embedding', 'Embedding generieren...');\n            $this->startStep();\n\n            try {\n                $queryEmbedding = $this->ollama->getEmbedding($message);\n            } catch (\\RuntimeException $e) {\n                $this->emit('error', 'Embedding fehlgeschlagen: ' . $e->getMessage());\n\n                return ChatResponse::error('Embedding generation failed: ' . $e->getMessage());\n            }\n            $this->endStep('embedding_done', 'Embedding generiert (' . count($queryEmbedding) . ' Dimensionen)');\n\n            \/\/ Step 7: Search collections\n            $collectionCount = count($collections);\n            $this->emit('search', \"Suche in {$collectionCount} Collection(s)...\");\n            $this->startStep();\n            $searchResults = $this->searchMultipleCollections($queryEmbedding, $collections, $contextLimit);\n            $resultCount = count($searchResults);\n            $this->endStep('search_done', \"{$resultCount} relevante Chunks gefunden\");\n\n            \/\/ Step 8: Build context\n            if ($searchResults !== []) {\n                $this->emit('context', 'Kontext aufbauen...');\n                $this->startStep();\n                $context = $this->buildContext($searchResults);\n                $contextLen = strlen($context);\n                $this->endStep('context_done', \"Kontext erstellt ({$contextLen} Zeichen)\");\n            }\n        }\n\n        \/\/ Step 9: LLM Request\n        $isOllama = str_starts_with($model, 'ollama:');\n        $isClaude = str_starts_with($model, 'claude-');\n        $hasContext = $context !== '';\n        $modelLabel = $isOllama ? substr($model, 7) : $model;\n\n        $this->emit('llm', \"Anfrage an {$modelLabel}...\");\n        $this->startStep();\n        $llmStart = microtime(true);\n\n        try {\n            if ($isClaude) {\n                $userPrompt = $hasContext\n                    ? $this->claude->buildRagPrompt($message, $context)\n                    : $message;\n\n                $effectiveSystemPrompt = $systemPrompt ?? ($hasContext\n                    ? $this->claude->getDefaultSystemPrompt()\n                    : 'Du bist ein hilfreicher Assistent. Antworte auf Deutsch, präzise und hilfreich.');\n\n                if ($stylePrompt !== null && $stylePrompt !== '') {\n                    $effectiveSystemPrompt .= \"\\n\\n\" . $stylePrompt;\n                }\n\n                $llmResponse = $this->claude->ask($userPrompt, $effectiveSystemPrompt, $model, $maxTokens, $temperature);\n                $answer = $llmResponse['text'];\n                $usage = $llmResponse['usage'];\n            } elseif ($isOllama) {\n                $ollamaModel = substr($model, 7);\n                $instructions = [];\n                if ($systemPrompt !== null && $systemPrompt !== '') {\n                    $instructions[] = $systemPrompt;\n                }\n                if ($stylePrompt !== null && $stylePrompt !== '') {\n                    $instructions[] = $stylePrompt;",
            "numLines": 140,
            "startLine": 66,
            "totalLines": 410
        }
    }
}
← Vorheriger Zur Liste Nächster →