{
"tool_response": {
"filePath": "\/var\/www\/dev.campus.systemische-tools.de\/src\/UseCases\/Chat\/StreamingChatMessageUseCase.php",
"oldString": "* 1000)",
"newString": "* Constants::MS_PER_SECOND)",
"originalFile": "<?php\n\ndeclare(strict_types=1);\n\nnamespace UseCases\\Chat;\n\n\/\/ @responsibility: Orchestriert Chat-Nachrichten mit SSE-Progress-Events\n\nuse Domain\\Constants;\nuse Domain\\Repository\\ChatMessageRepositoryInterface;\nuse Domain\\Repository\\ChatSessionRepositoryInterface;\nuse Infrastructure\\AI\\ClaudeService;\nuse Infrastructure\\AI\\ContentQualityValidator;\nuse Infrastructure\\AI\\OllamaService;\nuse Infrastructure\\AI\\QdrantService;\nuse Infrastructure\\AI\\ScoringService;\nuse Infrastructure\\Persistence\\ContentConfigRepository;\n\nclass StreamingChatMessageUseCase\n{\n \/** @var callable|null *\/\n private $progressCallback;\n\n private float $stepStart;\n\n public function __construct(\n private OllamaService $ollama,\n private QdrantService $qdrant,\n private ClaudeService $claude,\n private ScoringService $scoring,\n private ChatSessionRepositoryInterface $sessionRepo,\n private ChatMessageRepositoryInterface $messageRepo,\n private ContentConfigRepository $configRepo,\n private ContentQualityValidator $qualityValidator\n ) {\n }\n\n \/**\n * Set progress callback for SSE events\n *\n * @param callable $callback fn(string $step, string $message, ?int $durationMs): void\n *\/\n public function setProgressCallback(callable $callback): void\n {\n $this->progressCallback = $callback;\n }\n\n private function emit(string $step, string $message, ?int $durationMs = null): void\n {\n if ($this->progressCallback !== null) {\n ($this->progressCallback)($step, $message, $durationMs);\n }\n }\n\n private function startStep(): void\n {\n $this->stepStart = microtime(true);\n }\n\n private function endStep(string $step, string $message): void\n {\n $durationMs = (int) round((microtime(true) - $this->stepStart) * 1000);\n $this->emit($step, $message, $durationMs);\n }\n\n \/**\n * Execute chat with streaming progress\n *\n * @param array<string> $collections\n *\/\n public function execute(\n string $sessionUuid,\n string $message,\n string $model,\n array $collections = ['documents'],\n int $contextLimit = 5,\n int $authorProfileId = 0,\n int $systemPromptId = 1,\n float $temperature = 0.7,\n int $maxTokens = 4096,\n int $structureId = 0,\n bool $qualityCheck = false\n ): ChatResponse {\n $totalStart = microtime(true);\n\n \/\/ Step 1: Validate session\n $this->emit('session', 'Session validieren...');\n $this->startStep();\n $session = $this->sessionRepo->findByUuid($sessionUuid);\n if ($session === null) {\n $this->emit('error', 'Session nicht gefunden');\n\n return ChatResponse::error('Session nicht gefunden.');\n }\n $sessionId = $session->getId() ?? 0;\n $this->endStep('session_done', 'Session validiert');\n\n \/\/ Step 2: Validate message\n $message = trim($message);\n if ($message === '') {\n $this->emit('error', 'Keine Nachricht');\n\n return ChatResponse::error('Bitte gib eine Frage ein.');\n }\n\n \/\/ Step 3: Save user message\n $this->emit('save_user', 'User-Nachricht speichern...');\n $this->startStep();\n $this->messageRepo->save(\n sessionId: $sessionId,\n role: 'user',\n content: $message,\n model: $model\n );\n $this->endStep('save_user_done', 'User-Nachricht gespeichert');\n\n \/\/ Step 4: Auto-set title from first message\n $currentTitle = $session->getTitle();\n if ($currentTitle === null || $currentTitle === 'Neuer Chat') {\n $title = mb_substr($message, 0, 50) . (mb_strlen($message) > 50 ? '...' : '');\n $this->sessionRepo->updateTitle($sessionId, $title);\n }\n\n \/\/ Step 5: Get prompts\n $this->emit('prompts', 'Prompts laden...');\n $this->startStep();\n $stylePrompt = $this->getStylePromptFromProfile($authorProfileId);\n $systemPrompt = $this->getSystemPromptById($systemPromptId);\n $structurePrompt = $this->getStructurePrompt($structureId);\n if ($structurePrompt !== null) {\n $systemPrompt = ($systemPrompt ?? '') . \"\\n\\n\" . $structurePrompt;\n }\n $this->endStep('prompts_done', 'Prompts geladen');\n\n \/\/ RAG Pipeline\n $searchResults = [];\n $context = '';\n\n if ($collections !== []) {\n \/\/ Step 6: Generate embedding\n $this->emit('embedding', 'Embedding generieren...');\n $this->startStep();\n\n try {\n $queryEmbedding = $this->ollama->getEmbedding($message);\n } catch (\\RuntimeException $e) {\n $this->emit('error', 'Embedding fehlgeschlagen: ' . $e->getMessage());\n\n return ChatResponse::error('Embedding generation failed: ' . $e->getMessage());\n }\n $this->endStep('embedding_done', 'Embedding generiert (' . count($queryEmbedding) . ' Dimensionen)');\n\n \/\/ Step 7: Search collections\n $collectionCount = count($collections);\n $this->emit('search', \"Suche in {$collectionCount} Collection(s)...\");\n $this->startStep();\n $searchResults = $this->searchMultipleCollections($queryEmbedding, $collections, $contextLimit);\n $resultCount = count($searchResults);\n $this->endStep('search_done', \"{$resultCount} relevante Chunks gefunden\");\n\n \/\/ Step 8: Build context\n if ($searchResults !== []) {\n $this->emit('context', 'Kontext aufbauen...');\n $this->startStep();\n $context = $this->buildContext($searchResults);\n $contextLen = strlen($context);\n $this->endStep('context_done', \"Kontext erstellt ({$contextLen} Zeichen)\");\n }\n }\n\n \/\/ Step 9: LLM Request\n $isOllama = str_starts_with($model, 'ollama:');\n $isClaude = str_starts_with($model, 'claude-');\n $hasContext = $context !== '';\n $modelLabel = $isOllama ? substr($model, 7) : $model;\n\n $this->emit('llm', \"Anfrage an {$modelLabel}...\");\n $this->startStep();\n $llmStart = microtime(true);\n\n try {\n if ($isClaude) {\n $userPrompt = $hasContext\n ? $this->claude->buildRagPrompt($message, $context)\n : $message;\n\n $effectiveSystemPrompt = $systemPrompt ?? ($hasContext\n ? $this->claude->getDefaultSystemPrompt()\n : 'Du bist ein hilfreicher Assistent. Antworte auf Deutsch, präzise und hilfreich.');\n\n if ($stylePrompt !== null && $stylePrompt !== '') {\n $effectiveSystemPrompt .= \"\\n\\n\" . $stylePrompt;\n }\n\n $llmResponse = $this->claude->ask($userPrompt, $effectiveSystemPrompt, $model, $maxTokens, $temperature);\n $answer = $llmResponse['text'];\n $usage = $llmResponse['usage'];\n } elseif ($isOllama) {\n $ollamaModel = substr($model, 7);\n $instructions = [];\n if ($systemPrompt !== null && $systemPrompt !== '') {\n $instructions[] = $systemPrompt;\n }\n if ($stylePrompt !== null && $stylePrompt !== '') {\n $instructions[] = $stylePrompt;\n }\n $instructionBlock = $instructions !== [] ? implode(\"\\n\\n\", $instructions) . \"\\n\\n\" : '';\n\n $userPrompt = $hasContext\n ? sprintf(\"%sKontext aus den Dokumenten:\\n\\n%s\\n\\n---\\n\\nFrage: %s\", $instructionBlock, $context, $message)\n : $instructionBlock . $message;\n\n $answer = $this->ollama->generate($userPrompt, $ollamaModel, $temperature);\n $usage = null;\n } else {\n $this->emit('error', \"Unbekanntes Modell: {$model}\");\n\n return ChatResponse::error(\"Unknown model \\\"{$model}\\\".\");\n }\n } catch (\\RuntimeException $e) {\n $this->emit('error', 'LLM-Fehler: ' . $e->getMessage());\n\n return ChatResponse::error('LLM request failed: ' . $e->getMessage());\n }\n\n $llmDuration = (int) round((microtime(true) - $llmStart) * 1000);\n $tokenInfo = '';\n if ($usage !== null) {\n $tokenInfo = \" ({$usage['input_tokens']} in \/ {$usage['output_tokens']} out)\";\n }\n $this->emit('llm_done', \"Antwort erhalten{$tokenInfo}\", $llmDuration);\n\n \/\/ Step 10: Extract sources\n $this->emit('sources', 'Quellen extrahieren...');\n $this->startStep();\n $sources = $this->extractSources($searchResults);\n $this->endStep('sources_done', count($sources) . ' Quellen extrahiert');\n\n \/\/ Step 11: Save assistant message\n $this->emit('save_assistant', 'Antwort speichern...');\n $this->startStep();\n\n \/** @var array<int, non-empty-string> $sourcesForStorage *\/\n $sourcesForStorage = array_map(\n static fn (array $source): string => json_encode($source, JSON_THROW_ON_ERROR),\n $sources\n );\n\n $collectionsJson = json_encode($collections);\n $this->messageRepo->save(\n sessionId: $sessionId,\n role: 'assistant',\n content: $answer,\n model: $model,\n tokensInput: $usage['input_tokens'] ?? null,\n tokensOutput: $usage['output_tokens'] ?? null,\n sources: $sourcesForStorage,\n startMicrotime: $llmStart,\n endMicrotime: microtime(true),\n authorProfileId: $authorProfileId > 0 ? $authorProfileId : null,\n systemPromptId: $systemPromptId > 0 ? $systemPromptId : null,\n collectionsJson: $collectionsJson,\n contextLimit: $contextLimit\n );\n $this->endS... [TRUNCATED-4338acf55fe3fcc2]",
"structuredPatch": [
{
"oldStart": 59,
"oldLines": 7,
"newStart": 59,
"newLines": 7,
"lines": [
" ",
" private function endStep(string $step, string $message): void",
" {",
"- $durationMs = (int) round((microtime(true) - $this->stepStart) * 1000);",
"+ $durationMs = (int) round((microtime(true) - $this->stepStart) * Constants::MS_PER_SECOND);",
" $this->emit($step, $message, $durationMs);",
" }",
" "
]
},
{
"oldStart": 223,
"oldLines": 7,
"newStart": 223,
"newLines": 7,
"lines": [
" return ChatResponse::error('LLM request failed: ' . $e->getMessage());",
" }",
" ",
"- $llmDuration = (int) round((microtime(true) - $llmStart) * 1000);",
"+ $llmDuration = (int) round((microtime(true) - $llmStart) * Constants::MS_PER_SECOND);",
" $tokenInfo = '';",
" if ($usage !== null) {",
" $tokenInfo = \" ({$usage['input_tokens']} in \/ {$usage['output_tokens']} out)\";"
]
},
{
"oldStart": 281,
"oldLines": 7,
"newStart": 281,
"newLines": 7,
"lines": [
" }",
" ",
" \/\/ Final",
"- $totalDuration = (int) round((microtime(true) - $totalStart) * 1000);",
"+ $totalDuration = (int) round((microtime(true) - $totalStart) * Constants::MS_PER_SECOND);",
" $this->emit('complete', \"Fertig in {$totalDuration}ms\", $totalDuration);",
" ",
" \/\/ Build response"
]
}
],
"userModified": false,
"replaceAll": true
}
}