{
"event": "PreToolUse",
"tool_name": "Read",
"tool_input": {
"file_path": "\/var\/www\/dev.campus.systemische-tools.de\/src\/Controller\/ChatController.php",
"offset": 94,
"limit": 120
}
}
{
"tool_response": {
"type": "text",
"file": {
"filePath": "\/var\/www\/dev.campus.systemische-tools.de\/src\/Controller\/ChatController.php",
"content": " }\n\n \/**\n * POST \/chat\/{uuid}\/message (HTMX)\n * Process message and return HTML response\n *\/\n public function message(string $uuid): void\n {\n $session = $this->getSession($uuid);\n\n if ($session === null) {\n $this->view('chat.partials.error', ['error' => 'Session nicht gefunden.']);\n\n return;\n }\n\n $question = trim($_POST['message'] ?? '');\n $model = $this->validateModel($_POST['model'] ?? $session['model']);\n $sessionCollections = json_decode($session['collections'] ?? '[\"documents\"]', true) ?: ['documents'];\n $collections = $this->validateCollections($_POST['collections'] ?? $sessionCollections);\n $contextLimit = $this->validateContextLimit((int) ($_POST['context_limit'] ?? $session['context_limit'] ?? 5));\n $authorProfileId = $this->validateAuthorProfileId((int) ($_POST['author_profile_id'] ?? $session['author_profile_id'] ?? 0));\n $systemPromptId = (int) ($_POST['system_prompt_id'] ?? $session['system_prompt_id'] ?? 1);\n $temperature = $this->validateTemperature((float) ($_POST['temperature'] ?? $session['temperature'] ?? 0.7));\n $maxTokens = $this->validateMaxTokens((int) ($_POST['max_tokens'] ?? $session['max_tokens'] ?? 4096));\n\n \/\/ Update session if settings changed\n $currentLimit = (int) ($session['context_limit'] ?? 5);\n $currentProfileId = (int) ($session['author_profile_id'] ?? 0);\n $currentTemperature = (float) ($session['temperature'] ?? 0.7);\n $currentMaxTokens = (int) ($session['max_tokens'] ?? 4096);\n $collectionsJson = json_encode($collections);\n if ($model !== $session['model'] || $collectionsJson !== ($session['collections'] ?? '[\"documents\"]') || $contextLimit !== $currentLimit || $authorProfileId !== $currentProfileId || $temperature !== $currentTemperature || $maxTokens !== $currentMaxTokens) {\n $this->updateSessionSettings($session['id'], $model, $collections, $contextLimit, $authorProfileId, $temperature, $maxTokens);\n }\n\n if ($question === '') {\n $this->view('chat.partials.error', ['error' => 'Bitte gib eine Frage ein.']);\n\n return;\n }\n\n \/\/ Validate collection compatibility (vector dimensions)\n if (!empty($collections)) {\n $compatibility = $this->validateCollectionCompatibility($collections);\n if (!$compatibility['valid']) {\n $this->view('chat.partials.error', [\n 'error' => 'Collection-Fehler: ' . $compatibility['error'],\n 'details' => 'Bitte wähle nur Collections mit gleichem Embedding-Modell.',\n ]);\n\n return;\n }\n }\n\n try {\n \/\/ Save user message\n $this->saveMessage($session['id'], 'user', $question, $model);\n\n \/\/ Auto-set title from first message\n if ($session['title'] === null) {\n $title = mb_substr($question, 0, 50) . (mb_strlen($question) > 50 ? '...' : '');\n $this->updateSessionTitle($session['id'], $title);\n }\n\n \/\/ Get style prompt from author profile\n $stylePrompt = $this->getStylePromptFromProfile($authorProfileId);\n\n \/\/ Get system prompt from content_config\n $systemPromptData = $this->getSystemPromptById($systemPromptId);\n $customSystemPrompt = $this->extractPromptText($systemPromptData);\n\n \/\/ Track timing\n $startMicrotime = microtime(true);\n\n \/\/ Get response from AI\n $result = $this->askChat(\n $question,\n $model,\n $collections,\n $contextLimit,\n $stylePrompt,\n $customSystemPrompt,\n $temperature,\n $maxTokens\n );\n\n $endMicrotime = microtime(true);\n\n if (isset($result['error'])) {\n $this->view('chat.partials.error', ['error' => $result['error']]);\n\n return;\n }\n\n \/\/ Save assistant message with full tracking\n $tokensInput = $result['usage']['input_tokens'] ?? null;\n $tokensOutput = $result['usage']['output_tokens'] ?? null;\n $sources = $result['sources'] ?? [];\n\n $this->saveMessage(\n $session['id'],\n 'assistant',\n $result['answer'],\n $model,\n $tokensInput,\n $tokensOutput,\n $sources,\n $startMicrotime,\n $endMicrotime,\n $authorProfileId > 0 ? $authorProfileId : null,\n $systemPromptId > 0 ? $systemPromptId : null,\n $collectionsJson,\n $contextLimit\n );\n\n $this->renderResponse($question, $result, $model);\n } catch (\\Exception $e) {\n $this->view('chat.partials.error', ['error' => 'Fehler: ' . $e->getMessage()]);\n }",
"numLines": 120,
"startLine": 94,
"totalLines": 900
}
}
}