Protokoll #27192

ID27192
Zeitstempel2025-12-28 02:37:13.248798
Clientroot
IP145.224.72.140
Modellclaude-sonnet-4-20250514
Statuscompleted
Tokens8,075 (Input: 1,988, Output: 6,087)
Dauer103 ms
Request-Zeit2025-12-28 02:37:13.248798
Response-Zeit2025-12-28 02:37:13.351575

Request

{
    "event": "PreToolUse",
    "tool_name": "Edit",
    "tool_input": {
        "file_path": "\/var\/www\/dev.campus.systemische-tools.de\/src\/UseCases\/Chat\/StreamingChatMessageUseCase.php",
        "old_string": "        \/\/ Step 9: LLM Request\n        $isOllama = str_starts_with($model, 'ollama:');\n        $isClaude = str_starts_with($model, 'claude-');\n        $hasContext = $context !== '';\n        $modelLabel = $isOllama ? substr($model, 7) : $model;\n\n        $this->emit('llm', \"Anfrage an {$modelLabel}...\");\n        $this->startStep();\n        $llmStart = microtime(true);\n\n        try {\n            if ($isClaude) {\n                $userPrompt = $hasContext\n                    ? $this->claude->buildRagPrompt($message, $context)\n                    : $message;\n\n                $effectiveSystemPrompt = $systemPrompt ?? ($hasContext\n                    ? $this->claude->getDefaultSystemPrompt()\n                    : 'Du bist ein hilfreicher Assistent. Antworte auf Deutsch, präzise und hilfreich.');\n\n                if ($stylePrompt !== null && $stylePrompt !== '') {\n                    $effectiveSystemPrompt .= \"\\n\\n\" . $stylePrompt;\n                }\n\n                $llmResponse = $this->claude->ask($userPrompt, $effectiveSystemPrompt, $model, $maxTokens, $temperature);\n                $answer = $llmResponse['text'];\n                $usage = $llmResponse['usage'];\n            } elseif ($isOllama) {\n                $ollamaModel = substr($model, 7);\n                $instructions = [];\n                if ($systemPrompt !== null && $systemPrompt !== '') {\n                    $instructions[] = $systemPrompt;\n                }\n                if ($stylePrompt !== null && $stylePrompt !== '') {\n                    $instructions[] = $stylePrompt;\n                }\n                $instructionBlock = $instructions !== [] ? implode(\"\\n\\n\", $instructions) . \"\\n\\n\" : '';\n\n                $userPrompt = $hasContext\n                    ? sprintf(\"%sKontext aus den Dokumenten:\\n\\n%s\\n\\n---\\n\\nFrage: %s\", $instructionBlock, $context, $message)\n                    : $instructionBlock . $message;\n\n                $answer = $this->ollama->generate($userPrompt, $ollamaModel, $temperature);\n                $usage = null;\n            } else {\n                $this->emit('error', \"Unbekanntes Modell: {$model}\");\n\n                return ChatResponse::error(\"Unknown model \\\"{$model}\\\".\");\n            }\n        } catch (\\RuntimeException $e) {\n            $this->emit('error', 'LLM-Fehler: ' . $e->getMessage());\n\n            return ChatResponse::error('LLM request failed: ' . $e->getMessage());\n        }\n\n        $llmDuration = (int) round((microtime(true) - $llmStart) * Constants::MS_PER_SECOND);\n        $tokenInfo = '';\n        if ($usage !== null) {\n            $tokenInfo = \" ({$usage['input_tokens']} in \/ {$usage['output_tokens']} out)\";\n        }\n        $this->emit('llm_done', \"Antwort erhalten{$tokenInfo}\", $llmDuration);\n\n        \/\/ Step 10: Extract sources\n        $this->emit('sources', 'Quellen extrahieren...');\n        $this->startStep();\n        $sources = $this->extractSources($searchResults);\n        $this->endStep('sources_done', count($sources) . ' Quellen extrahiert');\n\n        \/\/ Step 11: Save assistant message\n        $this->emit('save_assistant', 'Antwort speichern...');\n        $this->startStep();\n\n        \/** @var array<int, non-empty-string> $sourcesForStorage *\/\n        $sourcesForStorage = array_map(\n            static fn (array $source): string => json_encode($source, JSON_THROW_ON_ERROR),\n            $sources\n        );\n\n        $collectionsJson = json_encode($collections);\n        $this->messageRepo->save(\n            sessionId: $sessionId,\n            role: 'assistant',\n            content: $answer,\n            model: $model,\n            tokensInput: $usage['input_tokens'] ?? null,\n            tokensOutput: $usage['output_tokens'] ?? null,\n            sources: $sourcesForStorage,\n            startMicrotime: $llmStart,\n            endMicrotime: microtime(true),\n            authorProfileId: $authorProfileId > 0 ? $authorProfileId : null,\n            systemPromptId: $systemPromptId > 0 ? $systemPromptId : null,\n            collectionsJson: $collectionsJson,\n            contextLimit: $contextLimit\n        );\n        $this->endStep('save_assistant_done', 'Antwort gespeichert');",
        "new_string": "        \/\/ Step 9: LLM Request\n        $isOllama = str_starts_with($model, 'ollama:');\n        $isClaude = str_starts_with($model, 'claude-');\n        $hasContext = $context !== '';\n        $this->emit('llm', 'Anfrage an ' . ($isOllama ? substr($model, 7) : $model) . '...');\n        $this->startStep();\n        $llmStart = microtime(true);\n        try {\n            if ($isClaude) {\n                $userPrompt = $hasContext ? $this->claude->buildRagPrompt($message, $context) : $message;\n                $effectiveSystemPrompt = $systemPrompt ?? ($hasContext ? $this->claude->getDefaultSystemPrompt() : 'Du bist ein hilfreicher Assistent. Antworte auf Deutsch, präzise und hilfreich.');\n                if ($stylePrompt !== null && $stylePrompt !== '') { $effectiveSystemPrompt .= \"\\n\\n\" . $stylePrompt; }\n                $llmResponse = $this->claude->ask($userPrompt, $effectiveSystemPrompt, $model, $maxTokens, $temperature);\n                $answer = $llmResponse['text'];\n                $usage = $llmResponse['usage'];\n            } elseif ($isOllama) {\n                $ollamaModel = substr($model, 7);\n                $instructions = array_filter([$systemPrompt, $stylePrompt]);\n                $instructionBlock = $instructions !== [] ? implode(\"\\n\\n\", $instructions) . \"\\n\\n\" : '';\n                $userPrompt = $hasContext ? sprintf(\"%sKontext:\\n\\n%s\\n\\n---\\n\\nFrage: %s\", $instructionBlock, $context, $message) : $instructionBlock . $message;\n                $answer = $this->ollama->generate($userPrompt, $ollamaModel, $temperature);\n                $usage = null;\n            } else {\n                $this->emit('error', \"Unbekanntes Modell: {$model}\");\n                return ChatResponse::error(\"Unknown model \\\"{$model}\\\".\");\n            }\n        } catch (\\RuntimeException $e) {\n            $this->emit('error', 'LLM-Fehler: ' . $e->getMessage());\n            return ChatResponse::error('LLM request failed: ' . $e->getMessage());\n        }\n        $llmDuration = (int) round((microtime(true) - $llmStart) * Constants::MS_PER_SECOND);\n        $tokenInfo = $usage !== null ? \" ({$usage['input_tokens']} in \/ {$usage['output_tokens']} out)\" : '';\n        $this->emit('llm_done', \"Antwort erhalten{$tokenInfo}\", $llmDuration);\n        \/\/ Step 10: Extract sources\n        $this->emit('sources', 'Quellen extrahieren...');\n        $this->startStep();\n        $sources = $this->extractSources($searchResults);\n        $this->endStep('sources_done', count($sources) . ' Quellen extrahiert');\n        \/\/ Step 11: Save assistant message\n        $this->emit('save_assistant', 'Antwort speichern...');\n        $this->startStep();\n        $sourcesForStorage = array_map(static fn (array $s): string => json_encode($s, JSON_THROW_ON_ERROR), $sources);\n        $this->messageRepo->save(\n            sessionId: $sessionId, role: 'assistant', content: $answer, model: $model,\n            tokensInput: $usage['input_tokens'] ?? null, tokensOutput: $usage['output_tokens'] ?? null,\n            sources: $sourcesForStorage, startMicrotime: $llmStart, endMicrotime: microtime(true),\n            authorProfileId: $authorProfileId > 0 ? $authorProfileId : null,\n            systemPromptId: $systemPromptId > 0 ? $systemPromptId : null,\n            collectionsJson: json_encode($collections), contextLimit: $contextLimit\n        );\n        $this->endStep('save_assistant_done', 'Antwort gespeichert');"
    }
}

Response

{
    "tool_response": {
        "filePath": "\/var\/www\/dev.campus.systemische-tools.de\/src\/UseCases\/Chat\/StreamingChatMessageUseCase.php",
        "oldString": "        \/\/ Step 9: LLM Request\n        $isOllama = str_starts_with($model, 'ollama:');\n        $isClaude = str_starts_with($model, 'claude-');\n        $hasContext = $context !== '';\n        $modelLabel = $isOllama ? substr($model, 7) : $model;\n\n        $this->emit('llm', \"Anfrage an {$modelLabel}...\");\n        $this->startStep();\n        $llmStart = microtime(true);\n\n        try {\n            if ($isClaude) {\n                $userPrompt = $hasContext\n                    ? $this->claude->buildRagPrompt($message, $context)\n                    : $message;\n\n                $effectiveSystemPrompt = $systemPrompt ?? ($hasContext\n                    ? $this->claude->getDefaultSystemPrompt()\n                    : 'Du bist ein hilfreicher Assistent. Antworte auf Deutsch, präzise und hilfreich.');\n\n                if ($stylePrompt !== null && $stylePrompt !== '') {\n                    $effectiveSystemPrompt .= \"\\n\\n\" . $stylePrompt;\n                }\n\n                $llmResponse = $this->claude->ask($userPrompt, $effectiveSystemPrompt, $model, $maxTokens, $temperature);\n                $answer = $llmResponse['text'];\n                $usage = $llmResponse['usage'];\n            } elseif ($isOllama) {\n                $ollamaModel = substr($model, 7);\n                $instructions = [];\n                if ($systemPrompt !== null && $systemPrompt !== '') {\n                    $instructions[] = $systemPrompt;\n                }\n                if ($stylePrompt !== null && $stylePrompt !== '') {\n                    $instructions[] = $stylePrompt;\n                }\n                $instructionBlock = $instructions !== [] ? implode(\"\\n\\n\", $instructions) . \"\\n\\n\" : '';\n\n                $userPrompt = $hasContext\n                    ? sprintf(\"%sKontext aus den Dokumenten:\\n\\n%s\\n\\n---\\n\\nFrage: %s\", $instructionBlock, $context, $message)\n                    : $instructionBlock . $message;\n\n                $answer = $this->ollama->generate($userPrompt, $ollamaModel, $temperature);\n                $usage = null;\n            } else {\n                $this->emit('error', \"Unbekanntes Modell: {$model}\");\n\n                return ChatResponse::error(\"Unknown model \\\"{$model}\\\".\");\n            }\n        } catch (\\RuntimeException $e) {\n            $this->emit('error', 'LLM-Fehler: ' . $e->getMessage());\n\n            return ChatResponse::error('LLM request failed: ' . $e->getMessage());\n        }\n\n        $llmDuration = (int) round((microtime(true) - $llmStart) * Constants::MS_PER_SECOND);\n        $tokenInfo = '';\n        if ($usage !== null) {\n            $tokenInfo = \" ({$usage['input_tokens']} in \/ {$usage['output_tokens']} out)\";\n        }\n        $this->emit('llm_done', \"Antwort erhalten{$tokenInfo}\", $llmDuration);\n\n        \/\/ Step 10: Extract sources\n        $this->emit('sources', 'Quellen extrahieren...');\n        $this->startStep();\n        $sources = $this->extractSources($searchResults);\n        $this->endStep('sources_done', count($sources) . ' Quellen extrahiert');\n\n        \/\/ Step 11: Save assistant message\n        $this->emit('save_assistant', 'Antwort speichern...');\n        $this->startStep();\n\n        \/** @var array<int, non-empty-string> $sourcesForStorage *\/\n        $sourcesForStorage = array_map(\n            static fn (array $source): string => json_encode($source, JSON_THROW_ON_ERROR),\n            $sources\n        );\n\n        $collectionsJson = json_encode($collections);\n        $this->messageRepo->save(\n            sessionId: $sessionId,\n            role: 'assistant',\n            content: $answer,\n            model: $model,\n            tokensInput: $usage['input_tokens'] ?? null,\n            tokensOutput: $usage['output_tokens'] ?? null,\n            sources: $sourcesForStorage,\n            startMicrotime: $llmStart,\n            endMicrotime: microtime(true),\n            authorProfileId: $authorProfileId > 0 ? $authorProfileId : null,\n            systemPromptId: $systemPromptId > 0 ? $systemPromptId : null,\n            collectionsJson: $collectionsJson,\n            contextLimit: $contextLimit\n        );\n        $this->endStep('save_assistant_done', 'Antwort gespeichert');",
        "newString": "        \/\/ Step 9: LLM Request\n        $isOllama = str_starts_with($model, 'ollama:');\n        $isClaude = str_starts_with($model, 'claude-');\n        $hasContext = $context !== '';\n        $this->emit('llm', 'Anfrage an ' . ($isOllama ? substr($model, 7) : $model) . '...');\n        $this->startStep();\n        $llmStart = microtime(true);\n        try {\n            if ($isClaude) {\n                $userPrompt = $hasContext ? $this->claude->buildRagPrompt($message, $context) : $message;\n                $effectiveSystemPrompt = $systemPrompt ?? ($hasContext ? $this->claude->getDefaultSystemPrompt() : 'Du bist ein hilfreicher Assistent. Antworte auf Deutsch, präzise und hilfreich.');\n                if ($stylePrompt !== null && $stylePrompt !== '') { $effectiveSystemPrompt .= \"\\n\\n\" . $stylePrompt; }\n                $llmResponse = $this->claude->ask($userPrompt, $effectiveSystemPrompt, $model, $maxTokens, $temperature);\n                $answer = $llmResponse['text'];\n                $usage = $llmResponse['usage'];\n            } elseif ($isOllama) {\n                $ollamaModel = substr($model, 7);\n                $instructions = array_filter([$systemPrompt, $stylePrompt]);\n                $instructionBlock = $instructions !== [] ? implode(\"\\n\\n\", $instructions) . \"\\n\\n\" : '';\n                $userPrompt = $hasContext ? sprintf(\"%sKontext:\\n\\n%s\\n\\n---\\n\\nFrage: %s\", $instructionBlock, $context, $message) : $instructionBlock . $message;\n                $answer = $this->ollama->generate($userPrompt, $ollamaModel, $temperature);\n                $usage = null;\n            } else {\n                $this->emit('error', \"Unbekanntes Modell: {$model}\");\n                return ChatResponse::error(\"Unknown model \\\"{$model}\\\".\");\n            }\n        } catch (\\RuntimeException $e) {\n            $this->emit('error', 'LLM-Fehler: ' . $e->getMessage());\n            return ChatResponse::error('LLM request failed: ' . $e->getMessage());\n        }\n        $llmDuration = (int) round((microtime(true) - $llmStart) * Constants::MS_PER_SECOND);\n        $tokenInfo = $usage !== null ? \" ({$usage['input_tokens']} in \/ {$usage['output_tokens']} out)\" : '';\n        $this->emit('llm_done', \"Antwort erhalten{$tokenInfo}\", $llmDuration);\n        \/\/ Step 10: Extract sources\n        $this->emit('sources', 'Quellen extrahieren...');\n        $this->startStep();\n        $sources = $this->extractSources($searchResults);\n        $this->endStep('sources_done', count($sources) . ' Quellen extrahiert');\n        \/\/ Step 11: Save assistant message\n        $this->emit('save_assistant', 'Antwort speichern...');\n        $this->startStep();\n        $sourcesForStorage = array_map(static fn (array $s): string => json_encode($s, JSON_THROW_ON_ERROR), $sources);\n        $this->messageRepo->save(\n            sessionId: $sessionId, role: 'assistant', content: $answer, model: $model,\n            tokensInput: $usage['input_tokens'] ?? null, tokensOutput: $usage['output_tokens'] ?? null,\n            sources: $sourcesForStorage, startMicrotime: $llmStart, endMicrotime: microtime(true),\n            authorProfileId: $authorProfileId > 0 ? $authorProfileId : null,\n            systemPromptId: $systemPromptId > 0 ? $systemPromptId : null,\n            collectionsJson: json_encode($collections), contextLimit: $contextLimit\n        );\n        $this->endStep('save_assistant_done', 'Antwort gespeichert');",
        "originalFile": "<?php\n\ndeclare(strict_types=1);\n\nnamespace UseCases\\Chat;\n\n\/\/ @responsibility: Orchestriert Chat-Nachrichten mit SSE-Progress-Events\n\nuse Domain\\Constants;\nuse Domain\\Repository\\ChatMessageRepositoryInterface;\nuse Domain\\Repository\\ChatSessionRepositoryInterface;\nuse Infrastructure\\AI\\ClaudeService;\nuse Infrastructure\\AI\\ContentQualityValidator;\nuse Infrastructure\\AI\\OllamaService;\nuse Infrastructure\\AI\\QdrantService;\nuse Infrastructure\\AI\\ScoringService;\nuse Infrastructure\\Persistence\\ContentConfigRepository;\n\nclass StreamingChatMessageUseCase\n{\n    \/** @var callable|null *\/\n    private $progressCallback;\n\n    private float $stepStart;\n\n    public function __construct(\n        private OllamaService $ollama,\n        private QdrantService $qdrant,\n        private ClaudeService $claude,\n        private ScoringService $scoring,\n        private ChatSessionRepositoryInterface $sessionRepo,\n        private ChatMessageRepositoryInterface $messageRepo,\n        private ContentConfigRepository $configRepo,\n        private ContentQualityValidator $qualityValidator\n    ) {\n    }\n\n    \/**\n     * Set progress callback for SSE events\n     *\n     * @param callable $callback fn(string $step, string $message, ?int $durationMs): void\n     *\/\n    public function setProgressCallback(callable $callback): void\n    {\n        $this->progressCallback = $callback;\n    }\n\n    private function emit(string $step, string $message, ?int $durationMs = null): void\n    {\n        if ($this->progressCallback !== null) {\n            ($this->progressCallback)($step, $message, $durationMs);\n        }\n    }\n\n    private function startStep(): void\n    {\n        $this->stepStart = microtime(true);\n    }\n\n    private function endStep(string $step, string $message): void\n    {\n        $durationMs = (int) round((microtime(true) - $this->stepStart) * Constants::MS_PER_SECOND);\n        $this->emit($step, $message, $durationMs);\n    }\n\n    \/** Execute chat with streaming progress. @param array<string> $collections *\/\n    public function execute(\n        string $sessionUuid, string $message, string $model, array $collections = ['documents'],\n        int $contextLimit = 5, int $authorProfileId = 0, int $systemPromptId = 1,\n        float $temperature = 0.7, int $maxTokens = 4096, int $structureId = 0, bool $qualityCheck = false\n    ): ChatResponse {\n        $totalStart = microtime(true);\n        \/\/ Step 1: Validate session\n        $this->emit('session', 'Session validieren...');\n        $this->startStep();\n        $session = $this->sessionRepo->findByUuid($sessionUuid);\n        if ($session === null) {\n            $this->emit('error', 'Session nicht gefunden');\n            return ChatResponse::error('Session nicht gefunden.');\n        }\n        $sessionId = $session->getId() ?? 0;\n        $this->endStep('session_done', 'Session validiert');\n        \/\/ Step 2: Validate message\n        $message = trim($message);\n        if ($message === '') {\n            $this->emit('error', 'Keine Nachricht');\n            return ChatResponse::error('Bitte gib eine Frage ein.');\n        }\n        \/\/ Step 3: Save user message\n        $this->emit('save_user', 'User-Nachricht speichern...');\n        $this->startStep();\n        $this->messageRepo->save(sessionId: $sessionId, role: 'user', content: $message, model: $model);\n        $this->endStep('save_user_done', 'User-Nachricht gespeichert');\n        \/\/ Step 4: Auto-set title\n        $currentTitle = $session->getTitle();\n        if ($currentTitle === null || $currentTitle === 'Neuer Chat') {\n            $this->sessionRepo->updateTitle($sessionId, mb_substr($message, 0, 50) . (mb_strlen($message) > 50 ? '...' : ''));\n        }\n        \/\/ Step 5: Get prompts\n        $this->emit('prompts', 'Prompts laden...');\n        $this->startStep();\n        $stylePrompt = $this->getStylePromptFromProfile($authorProfileId);\n        $systemPrompt = $this->getSystemPromptById($systemPromptId);\n        $structurePrompt = $this->getStructurePrompt($structureId);\n        if ($structurePrompt !== null) { $systemPrompt = ($systemPrompt ?? '') . \"\\n\\n\" . $structurePrompt; }\n        $this->endStep('prompts_done', 'Prompts geladen');\n        \/\/ RAG Pipeline\n        $searchResults = [];\n        $context = '';\n        if ($collections !== []) {\n            \/\/ Step 6: Generate embedding\n            $this->emit('embedding', 'Embedding generieren...');\n            $this->startStep();\n            try {\n                $queryEmbedding = $this->ollama->getEmbedding($message);\n            } catch (\\RuntimeException $e) {\n                $this->emit('error', 'Embedding fehlgeschlagen: ' . $e->getMessage());\n                return ChatResponse::error('Embedding generation failed: ' . $e->getMessage());\n            }\n            $this->endStep('embedding_done', 'Embedding generiert (' . count($queryEmbedding) . ' Dimensionen)');\n            \/\/ Step 7: Search collections\n            $this->emit('search', 'Suche in ' . count($collections) . ' Collection(s)...');\n            $this->startStep();\n            $searchResults = $this->searchMultipleCollections($queryEmbedding, $collections, $contextLimit);\n            $this->endStep('search_done', count($searchResults) . ' relevante Chunks gefunden');\n            \/\/ Step 8: Build context\n            if ($searchResults !== []) {\n                $this->emit('context', 'Kontext aufbauen...');\n                $this->startStep();\n                $context = $this->buildContext($searchResults);\n                $this->endStep('context_done', 'Kontext erstellt (' . strlen($context) . ' Zeichen)');\n            }\n        }\n\n        \/\/ Step 9: LLM Request\n        $isOllama = str_starts_with($model, 'ollama:');\n        $isClaude = str_starts_with($model, 'claude-');\n        $hasContext = $context !== '';\n        $modelLabel = $isOllama ? substr($model, 7) : $model;\n\n        $this->emit('llm', \"Anfrage an {$modelLabel}...\");\n        $this->startStep();\n        $llmStart = microtime(true);\n\n        try {\n            if ($isClaude) {\n                $userPrompt = $hasContext\n                    ? $this->claude->buildRagPrompt($message, $context)\n                    : $message;\n\n                $effectiveSystemPrompt = $systemPrompt ?? ($hasContext\n                    ? $this->claude->getDefaultSystemPrompt()\n                    : 'Du bist ein hilfreicher Assistent. Antworte auf Deutsch, präzise und hilfreich.');\n\n                if ($stylePrompt !== null && $stylePrompt !== '') {\n                    $effectiveSystemPrompt .= \"\\n\\n\" . $stylePrompt;\n                }\n\n                $llmResponse = $this->claude->ask($userPrompt, $effectiveSystemPrompt, $model, $maxTokens, $temperature);\n                $answer = $llmResponse['text'];\n                $usage = $llmResponse['usage'];\n            } elseif ($isOllama) {\n                $ollamaModel = substr($model, 7);\n                $instructions = [];\n                if ($systemPrompt !== null && $systemPrompt !== '') {\n                    $instructions[] = $systemPrompt;\n                }\n                if ($stylePrompt !== null && $stylePrompt !== '') {\n                    $instructions[] = $stylePrompt;\n                }\n                $instructionBlock = $instructions !== [] ? implode(\"\\n\\n\", $instructions) . \"\\n\\n\" : '';\n\n                $userPrompt = $hasContext\n                    ? sprintf(\"%sKontext aus den Dokumenten:\\n\\n%s\\n\\n---\\n\\nFrage: %s\", $instructionBlock, $context, $message)\n                    : $instructionBlock . $message;\n\n                $answer = $this->ollama->generate($userPrompt, $ollamaModel, $temperature);\n                $usage = null;\n            } else {\n                $this->emit('error', \"Unbekanntes Modell: {$model}\");\n\n                return ChatResponse::error(\"Unknown model \\\"{$model}\\\".\");\n            }\n        } catch (\\RuntimeException $e) {\n            $this->emit('error', 'LLM-Fehler: ' . $e->getMessage());\n\n            return ChatResponse::error('LLM request failed: ' . $e->getMessage());\n        }\n\n        $llmDuration = (int) round((microtime(true) - $llmStart) * Constants::MS_PER_SECOND);\n        $tokenInfo = '';\n        if ($usage !== null) {\n            $tokenInfo = \" ({$usage['input_tokens']} in \/ {$usage['output_tokens']} out)\";\n        }\n        $this->emit('llm_done', \"Antwort erhalten{$tokenInfo}\", $llmDuration);\n\n        \/\/ Step 10: Extract sources\n        $this->emit('sources', 'Quellen extrahieren...');\n        $this->startStep();\n        $sources = $this->extractSources($searchResults);\n        $this->endStep('sources_done', count($sources) . ' Quellen extrahiert');\n\n        \/\/ Step 11: Save assistant message\n        $this->emit('save_assistant', 'Antwort speichern...');\n        $this->startStep();\n\n        \/** @var array<int, non-empty-string> $sourcesForStorage *\/\n        $sourcesForStorage = array_map(\n            static fn (array $source): string => json_encode($source, JSON_THROW_ON_ERROR),\n            $sources\n        );\n\n        $collectionsJson = json_encode($collections);\n        $this->messageRepo->save(\n            sessionId: $sessionId,\n            role: 'assistant',\n            content: $answer,\n            model: $model,\n            tokensInput: $usage['input_tokens'] ?? null,\n            tokensOutput: $usage['output_tokens'] ?? null,\n            sources: $sourcesForStorage,\n            startMicrotime: $llmStart,\n            endMicrotime: microtime(true),\n            authorProfileId: $authorProfileId > 0 ? $authorProfileId : null,\n            systemPromptId: $systemPromptId > 0 ? $systemPromptId : null,\n            collectionsJson: $collectionsJson,\n            contextLimit: $contextLimit\n        );\n        $this->endStep('save_assistant_done', 'Antwort gespeichert');\n\n        \/\/ Step 12: Quality check\n        $qualityValidation = null;\n        if ($qualityCheck) {\n            $this->emit('quality', 'Qualitätsprüfung...');\n            $this->startStep();\n            $structureName = $structureId > 0 ? $this->getStructure... [TRUNCATED-0b175ab588b9aa52]",
        "structuredPatch": [
            {
                "oldStart": 136,
                "oldLines": 95,
                "newStart": 136,
                "newLines": 51,
                "lines": [
                    "         $isOllama = str_starts_with($model, 'ollama:');",
                    "         $isClaude = str_starts_with($model, 'claude-');",
                    "         $hasContext = $context !== '';",
                    "-        $modelLabel = $isOllama ? substr($model, 7) : $model;",
                    "-",
                    "-        $this->emit('llm', \"Anfrage an {$modelLabel}...\");",
                    "+        $this->emit('llm', 'Anfrage an ' . ($isOllama ? substr($model, 7) : $model) . '...');",
                    "         $this->startStep();",
                    "         $llmStart = microtime(true);",
                    "-",
                    "         try {",
                    "             if ($isClaude) {",
                    "-                $userPrompt = $hasContext",
                    "-                    ? $this->claude->buildRagPrompt($message, $context)",
                    "-                    : $message;",
                    "-",
                    "-                $effectiveSystemPrompt = $systemPrompt ?? ($hasContext",
                    "-                    ? $this->claude->getDefaultSystemPrompt()",
                    "-                    : 'Du bist ein hilfreicher Assistent. Antworte auf Deutsch, präzise und hilfreich.');",
                    "-",
                    "-                if ($stylePrompt !== null && $stylePrompt !== '') {",
                    "-                    $effectiveSystemPrompt .= \"\\n\\n\" . $stylePrompt;",
                    "-                }",
                    "-",
                    "+                $userPrompt = $hasContext ? $this->claude->buildRagPrompt($message, $context) : $message;",
                    "+                $effectiveSystemPrompt = $systemPrompt ?? ($hasContext ? $this->claude->getDefaultSystemPrompt() : 'Du bist ein hilfreicher Assistent. Antworte auf Deutsch, präzise und hilfreich.');",
                    "+                if ($stylePrompt !== null && $stylePrompt !== '') { $effectiveSystemPrompt .= \"\\n\\n\" . $stylePrompt; }",
                    "                 $llmResponse = $this->claude->ask($userPrompt, $effectiveSystemPrompt, $model, $maxTokens, $temperature);",
                    "                 $answer = $llmResponse['text'];",
                    "                 $usage = $llmResponse['usage'];",
                    "             } elseif ($isOllama) {",
                    "                 $ollamaModel = substr($model, 7);",
                    "-                $instructions = [];",
                    "-                if ($systemPrompt !== null && $systemPrompt !== '') {",
                    "-                    $instructions[] = $systemPrompt;",
                    "-                }",
                    "-                if ($stylePrompt !== null && $stylePrompt !== '') {",
                    "-                    $instructions[] = $stylePrompt;",
                    "-                }",
                    "+                $instructions = array_filter([$systemPrompt, $stylePrompt]);",
                    "                 $instructionBlock = $instructions !== [] ? implode(\"\\n\\n\", $instructions) . \"\\n\\n\" : '';",
                    "-",
                    "-                $userPrompt = $hasContext",
                    "-                    ? sprintf(\"%sKontext aus den Dokumenten:\\n\\n%s\\n\\n---\\n\\nFrage: %s\", $instructionBlock, $context, $message)",
                    "-                    : $instructionBlock . $message;",
                    "-",
                    "+                $userPrompt = $hasContext ? sprintf(\"%sKontext:\\n\\n%s\\n\\n---\\n\\nFrage: %s\", $instructionBlock, $context, $message) : $instructionBlock . $message;",
                    "                 $answer = $this->ollama->generate($userPrompt, $ollamaModel, $temperature);",
                    "                 $usage = null;",
                    "             } else {",
                    "                 $this->emit('error', \"Unbekanntes Modell: {$model}\");",
                    "-",
                    "                 return ChatResponse::error(\"Unknown model \\\"{$model}\\\".\");",
                    "             }",
                    "         } catch (\\RuntimeException $e) {",
                    "             $this->emit('error', 'LLM-Fehler: ' . $e->getMessage());",
                    "-",
                    "             return ChatResponse::error('LLM request failed: ' . $e->getMessage());",
                    "         }",
                    "-",
                    "         $llmDuration = (int) round((microtime(true) - $llmStart) * Constants::MS_PER_SECOND);",
                    "-        $tokenInfo = '';",
                    "-        if ($usage !== null) {",
                    "-            $tokenInfo = \" ({$usage['input_tokens']} in \/ {$usage['output_tokens']} out)\";",
                    "-        }",
                    "+        $tokenInfo = $usage !== null ? \" ({$usage['input_tokens']} in \/ {$usage['output_tokens']} out)\" : '';",
                    "         $this->emit('llm_done', \"Antwort erhalten{$tokenInfo}\", $llmDuration);",
                    "-",
                    "         \/\/ Step 10: Extract sources",
                    "         $this->emit('sources', 'Quellen extrahieren...');",
                    "         $this->startStep();",
                    "         $sources = $this->extractSources($searchResults);",
                    "         $this->endStep('sources_done', count($sources) . ' Quellen extrahiert');",
                    "-",
                    "         \/\/ Step 11: Save assistant message",
                    "         $this->emit('save_assistant', 'Antwort speichern...');",
                    "         $this->startStep();",
                    "-",
                    "-        \/** @var array<int, non-empty-string> $sourcesForStorage *\/",
                    "-        $sourcesForStorage = array_map(",
                    "-            static fn (array $source): string => json_encode($source, JSON_THROW_ON_ERROR),",
                    "-            $sources",
                    "-        );",
                    "-",
                    "-        $collectionsJson = json_encode($collections);",
                    "+        $sourcesForStorage = array_map(static fn (array $s): string => json_encode($s, JSON_THROW_ON_ERROR), $sources);",
                    "         $this->messageRepo->save(",
                    "-            sessionId: $sessionId,",
                    "-            role: 'assistant',",
                    "-            content: $answer,",
                    "-            model: $model,",
                    "-            tokensInput: $usage['input_tokens'] ?? null,",
                    "-            tokensOutput: $usage['output_tokens'] ?? null,",
                    "-            sources: $sourcesForStorage,",
                    "-            startMicrotime: $llmStart,",
                    "-            endMicrotime: microtime(true),",
                    "+            sessionId: $sessionId, role: 'assistant', content: $answer, model: $model,",
                    "+            tokensInput: $usage['input_tokens'] ?? null, tokensOutput: $usage['output_tokens'] ?? null,",
                    "+            sources: $sourcesForStorage, startMicrotime: $llmStart, endMicrotime: microtime(true),",
                    "             authorProfileId: $authorProfileId > 0 ? $authorProfileId : null,",
                    "             systemPromptId: $systemPromptId > 0 ? $systemPromptId : null,",
                    "-            collectionsJson: $collectionsJson,",
                    "-            contextLimit: $contextLimit",
                    "+            collectionsJson: json_encode($collections), contextLimit: $contextLimit",
                    "         );",
                    "         $this->endStep('save_assistant_done', 'Antwort gespeichert');",
                    " "
                ]
            }
        ],
        "userModified": false,
        "replaceAll": false
    }
}
← Vorheriger Zur Liste Nächster →