Backup #2028

ID2028
Dateipfad/var/www/dev.campus.systemische-tools.de/src/UseCases/Chat/StreamingChatMessageUseCase.php
Version12
Typ modified
Größe15.6 KB
Hashbc23eca20ad723466ff340954ce96c2864ec60ce578770afdedbfda95230cbd8
Datum2025-12-28 23:22:41
Geändert vonclaude-code-hook
GrundClaude Code Pre-Hook Backup vor Edit-Operation
Datei existiert Ja

Dateiinhalt

<?php

declare(strict_types=1);

namespace UseCases\Chat;

// @responsibility: Orchestriert Chat-Nachrichten mit SSE-Progress-Events

use Domain\Constants;
use Domain\Repository\ChatMessageRepositoryInterface;
use Domain\Repository\ChatSessionRepositoryInterface;
use Domain\Service\SearchServiceInterface;
use Infrastructure\AI\ClaudeService;
use Infrastructure\AI\ContentQualityValidator;
use Infrastructure\AI\OllamaService;
use Infrastructure\AI\QdrantService;
use Infrastructure\AI\ScoringService;
use Infrastructure\Persistence\ContentConfigRepository;

class StreamingChatMessageUseCase
{
    /** @var callable|null */
    private $progressCallback;

    private float $stepStart;

    public function __construct(
        private OllamaService $ollama,
        private QdrantService $qdrant,
        private ClaudeService $claude,
        private ScoringService $scoring,
        private ChatSessionRepositoryInterface $sessionRepo,
        private ChatMessageRepositoryInterface $messageRepo,
        private ContentConfigRepository $configRepo,
        private ContentQualityValidator $qualityValidator,
        private SearchServiceInterface $searchService
    ) {
    }

    /**
     * Set progress callback for SSE events
     *
     * @param callable $callback fn(string $step, string $message, ?int $durationMs): void
     */
    public function setProgressCallback(callable $callback): void
    {
        $this->progressCallback = $callback;
    }

    private function emit(string $step, string $message, ?int $durationMs = null): void
    {
        if ($this->progressCallback !== null) {
            ($this->progressCallback)($step, $message, $durationMs);
        }
    }

    private function startStep(): void
    {
        $this->stepStart = microtime(true);
    }

    private function endStep(string $step, string $message): void
    {
        $durationMs = (int) round((microtime(true) - $this->stepStart) * Constants::MS_PER_SECOND);
        $this->emit($step, $message, $durationMs);
    }

    /** Execute chat with streaming progress. @param array<string> $collections */
    public function execute(
        string $sessionUuid, string $message, string $model, array $collections = ['documents'],
        int $contextLimit = 5, int $authorProfileId = 0, int $systemPromptId = 1,
        float $temperature = 0.7, int $maxTokens = 4096, int $structureId = 0, bool $qualityCheck = false
    ): ChatResponse {
        $totalStart = microtime(true);
        // Step 1: Validate session
        $this->emit('session', 'Session validieren...');
        $this->startStep();
        $session = $this->sessionRepo->findByUuid($sessionUuid);
        if ($session === null) {
            $this->emit('error', 'Session nicht gefunden');
            return ChatResponse::error('Session nicht gefunden.');
        }
        $sessionId = $session->getId() ?? 0;
        $this->endStep('session_done', 'Session validiert');
        // Step 2: Validate message
        $message = trim($message);
        if ($message === '') {
            $this->emit('error', 'Keine Nachricht');
            return ChatResponse::error('Bitte gib eine Frage ein.');
        }
        // Step 3: Save user message
        $this->emit('save_user', 'User-Nachricht speichern...');
        $this->startStep();
        $this->messageRepo->save(sessionId: $sessionId, role: 'user', content: $message, model: $model);
        $this->endStep('save_user_done', 'User-Nachricht gespeichert');
        // Step 4: Auto-set title
        $currentTitle = $session->getTitle();
        if ($currentTitle === null || $currentTitle === 'Neuer Chat') {
            $this->sessionRepo->updateTitle($sessionId, mb_substr($message, 0, 50) . (mb_strlen($message) > 50 ? '...' : ''));
        }
        // Step 5: Get prompts
        $this->emit('prompts', 'Prompts laden...');
        $this->startStep();
        $stylePrompt = $this->getStylePromptFromProfile($authorProfileId);
        $systemPrompt = $this->getSystemPromptById($systemPromptId);
        $structurePrompt = $this->getStructurePrompt($structureId);
        if ($structurePrompt !== null) { $systemPrompt = ($systemPrompt ?? '') . "\n\n" . $structurePrompt; }
        $this->endStep('prompts_done', 'Prompts geladen');
        // RAG Pipeline
        $searchResults = [];
        $context = '';
        if ($collections !== []) {
            // Step 6+7: Semantic search via ContentSearchService
            $this->emit('search', 'Semantische Suche in ' . count($collections) . ' Collection(s)...');
            $this->startStep();
            $searchResults = $this->searchWithSemantics($message, $collections, $contextLimit);
            $semanticCount = count(array_filter($searchResults, static fn ($r) => isset($r['intent'])));
            $this->endStep('search_done', count($searchResults) . ' Chunks gefunden (' . $semanticCount . ' mit Semantik)');
            // Step 8: Build context with semantic metadata
            if ($searchResults !== []) {
                $this->emit('context', 'Kontext aufbauen...');
                $this->startStep();
                $context = $this->buildSemanticContext($searchResults);
                $this->endStep('context_done', 'Kontext erstellt (' . strlen($context) . ' Zeichen)');
            }
        }

        // Step 9: LLM Request
        $isOllama = str_starts_with($model, 'ollama:');
        $isClaude = str_starts_with($model, 'claude-');
        $hasContext = $context !== '';
        $this->emit('llm', 'Anfrage an ' . ($isOllama ? substr($model, 7) : $model) . '...');
        $this->startStep();
        $llmStart = microtime(true);
        try {
            if ($isClaude) {
                $userPrompt = $hasContext ? $this->claude->buildRagPrompt($message, $context) : $message;
                $effectiveSystemPrompt = $systemPrompt ?? ($hasContext ? $this->claude->getDefaultSystemPrompt() : 'Du bist ein hilfreicher Assistent. Antworte auf Deutsch, präzise und hilfreich.');
                if ($stylePrompt !== null && $stylePrompt !== '') { $effectiveSystemPrompt .= "\n\n" . $stylePrompt; }
                $llmResponse = $this->claude->ask($userPrompt, $effectiveSystemPrompt, $model, $maxTokens, $temperature);
                $answer = $llmResponse['text'];
                $usage = $llmResponse['usage'];
            } elseif ($isOllama) {
                $ollamaModel = substr($model, 7);
                $instructions = array_filter([$systemPrompt, $stylePrompt]);
                $instructionBlock = $instructions !== [] ? implode("\n\n", $instructions) . "\n\n" : '';
                $userPrompt = $hasContext ? sprintf("%sKontext:\n\n%s\n\n---\n\nFrage: %s", $instructionBlock, $context, $message) : $instructionBlock . $message;
                $answer = $this->ollama->generate($userPrompt, $ollamaModel, $temperature);
                $usage = null;
            } else {
                $this->emit('error', "Unbekanntes Modell: {$model}");
                return ChatResponse::error("Unknown model \"{$model}\".");
            }
        } catch (\RuntimeException $e) {
            $this->emit('error', 'LLM-Fehler: ' . $e->getMessage());
            return ChatResponse::error('LLM request failed: ' . $e->getMessage());
        }
        $llmDuration = (int) round((microtime(true) - $llmStart) * Constants::MS_PER_SECOND);
        $tokenInfo = $usage !== null ? " ({$usage['input_tokens']} in / {$usage['output_tokens']} out)" : '';
        $this->emit('llm_done', "Antwort erhalten{$tokenInfo}", $llmDuration);
        // Step 10: Extract sources
        $this->emit('sources', 'Quellen extrahieren...');
        $this->startStep();
        $sources = $this->extractSources($searchResults);
        $this->endStep('sources_done', count($sources) . ' Quellen extrahiert');
        // Step 11: Save assistant message
        $this->emit('save_assistant', 'Antwort speichern...');
        $this->startStep();
        $sourcesForStorage = array_map(static fn (array $s): string => json_encode($s, JSON_THROW_ON_ERROR), $sources);
        $this->messageRepo->save(
            sessionId: $sessionId, role: 'assistant', content: $answer, model: $model,
            tokensInput: $usage['input_tokens'] ?? null, tokensOutput: $usage['output_tokens'] ?? null,
            sources: $sourcesForStorage, startMicrotime: $llmStart, endMicrotime: microtime(true),
            authorProfileId: $authorProfileId > 0 ? $authorProfileId : null,
            systemPromptId: $systemPromptId > 0 ? $systemPromptId : null,
            collectionsJson: json_encode($collections), contextLimit: $contextLimit
        );
        $this->endStep('save_assistant_done', 'Antwort gespeichert');

        // Step 12: Quality check
        $qualityValidation = null;
        if ($qualityCheck) {
            $this->emit('quality', 'Qualitätsprüfung...');
            $this->startStep();
            $structureName = $structureId > 0 ? $this->getStructureName($structureId) : null;
            $qualityValidation = $this->qualityValidator->validate(
                question: $message,
                answer: $answer,
                sources: $sources,
                structureName: $structureName
            );
            $score = round($qualityValidation['score']);
            $this->endStep('quality_done', "Qualität: {$score}%");
        }

        // Final
        $totalDuration = (int) round((microtime(true) - $totalStart) * Constants::MS_PER_SECOND);
        $this->emit('complete', "Fertig in {$totalDuration}ms", $totalDuration);

        // Build response
        $result = [
            'answer' => $answer,
            'sources' => $sources,
            'usage' => $usage,
            'chunks_used' => count($searchResults),
        ];

        $response = ChatResponse::fromServiceResponse($result, (microtime(true) - $totalStart));
        if ($qualityValidation !== null) {
            $response = $response->withQualityValidation($qualityValidation);
        }

        return $response;
    }

    private function searchMultipleCollections(array $embedding, array $collections, int $limit): array
    {
        $allResults = [];
        foreach ($collections as $collection) {
            try {
                $results = $this->qdrant->search($embedding, $collection, $limit);
                foreach ($results as &$result) { $result['payload']['_collection'] = $collection; }
                $allResults = array_merge($allResults, $results);
            } catch (\RuntimeException) { continue; }
        }
        foreach ($allResults as &$result) {
            $processedAt = $result['payload']['processed_at'] ?? null;
            $documentDate = $processedAt !== null ? new \DateTime($processedAt) : new \DateTime();
            $authorityScore = (float) ($result['payload']['authority_score'] ?? 0.5);
            $result['weighted_score'] = $this->scoring->calculateScore($result['score'], $documentDate, $authorityScore);
        }
        unset($result);
        usort($allResults, static fn ($a, $b) => ($b['weighted_score'] ?? 0.0) <=> ($a['weighted_score'] ?? 0.0));
        return array_slice($allResults, 0, $limit);
    }

    private function buildContext(array $searchResults, int $maxTokens = 3000): string
    {
        $contextParts = [];
        $totalChars = 0;
        $maxChars = $maxTokens * 4;
        foreach ($searchResults as $index => $result) {
            $payload = $result['payload'];
            $content = (string) ($payload['content'] ?? $payload['content_preview'] ?? '');
            $docTitle = (string) ($payload['document_title'] ?? $payload['title'] ?? 'Unbekannt');
            if ($totalChars + strlen($content) > $maxChars) { break; }
            $contextParts[] = sprintf('[Quelle %d: %s]%s%s', $index + 1, $docTitle, "\n", $content);
            $totalChars += strlen($content);
        }
        return implode("\n\n---\n\n", $contextParts);
    }

    private function extractSources(array $searchResults): array
    {
        $sources = [];
        $seen = [];
        foreach ($searchResults as $result) {
            $payload = $result['payload'];
            $docTitle = (string) ($payload['document_title'] ?? $payload['title'] ?? '');
            if ($docTitle === '' || isset($seen[$docTitle])) { continue; }
            $source = ['title' => $docTitle, 'score' => round($result['score'], 3)];
            $content = $payload['content'] ?? $payload['content_preview'] ?? null;
            if (is_string($content) && $content !== '') { $source['content'] = $content; }
            if (isset($payload['_collection'])) { $source['collection'] = $payload['_collection']; }
            $sources[] = $source;
            $seen[$docTitle] = true;
        }
        return $sources;
    }

    private function getStylePromptFromProfile(int $profileId): ?string
    {
        if ($profileId === 0) { return null; }
        $profile = $this->configRepo->findByIdAndType($profileId, 'author_profile');
        if ($profile === null) { return null; }
        $config = json_decode($profile['content'] ?? '{}', true);
        if ($config === null) { return null; }
        $parts = [];
        if (isset($config['stimme']['ton'])) { $parts[] = 'Ton: ' . $config['stimme']['ton']; }
        if (isset($config['stimme']['perspektive'])) { $parts[] = 'Perspektive: ' . $config['stimme']['perspektive']; }
        if (isset($config['stil']['fachsprache']) && $config['stil']['fachsprache']) { $parts[] = 'Verwende Fachsprache'; }
        if (isset($config['stil']['beispiele']) && $config['stil']['beispiele'] === 'häufig') { $parts[] = 'Nutze häufig Beispiele'; }
        if (isset($config['stil']['listen']) && $config['stil']['listen'] === 'bevorzugt') { $parts[] = 'Bevorzuge Listen und Bullet-Points'; }
        if (isset($config['tabus']) && is_array($config['tabus'])) { $parts[] = 'Vermeide: ' . implode(', ', $config['tabus']); }
        if ($parts === []) { return null; }
        return 'Schreibstil (' . ($profile['name'] ?? 'Profil') . '): ' . implode('. ', $parts) . '.';
    }

    private function getSystemPromptById(int $promptId): ?string
    {
        if ($promptId === 0) { return null; }
        $prompt = $this->configRepo->findByIdAndType($promptId, 'system_prompt');
        if ($prompt === null) { return null; }
        $content = json_decode($prompt['content'] ?? '{}', true);
        return $content['prompt'] ?? null;
    }

    private function getStructurePrompt(int $structureId): ?string
    {
        if ($structureId === 0) { return null; }
        $structure = $this->configRepo->findByIdAndType($structureId, 'structure');
        if ($structure === null) { return null; }
        $name = $structure['name'] ?? 'Struktur';
        $config = json_decode($structure['content'] ?? '{}', true);
        if ($config === null) { return null; }
        $parts = ["Formatiere deine Antwort als: {$name}"];
        if (isset($config['sections']) && is_array($config['sections'])) { $parts[] = 'Struktur: ' . implode(' → ', $config['sections']); }
        if (isset($config['max_chars'])) { $parts[] = 'Maximale Länge: ' . $config['max_chars'] . ' Zeichen'; }
        if (isset($config['min_words'])) { $parts[] = 'Mindestens ' . $config['min_words'] . ' Wörter'; }
        if (isset($config['max_words'])) { $parts[] = 'Maximal ' . $config['max_words'] . ' Wörter'; }
        if (isset($config['format']) && $config['format'] === 'qa') { $parts[] = 'FAQ mit ' . ($config['min_questions'] ?? 3) . ' Paaren'; }
        if (isset($config['hashtags']) && $config['hashtags']) { $parts[] = 'Füge passende Hashtags hinzu'; }
        if (isset($config['cta']) && $config['cta']) { $parts[] = 'Schließe mit einem Call-to-Action ab'; }
        return implode('. ', $parts) . '.';
    }

    private function getStructureName(int $structureId): ?string
    {
        $structure = $this->configRepo->findByIdAndType($structureId, 'structure');
        return $structure['name'] ?? null;
    }
}

Vollständig herunterladen

Aktionen

Herunterladen

Andere Versionen dieser Datei

ID Version Typ Größe Datum
2118 29 modified 12.9 KB 2025-12-29 09:09
2106 28 modified 12.8 KB 2025-12-29 08:47
2105 27 modified 12.5 KB 2025-12-29 08:46
2104 26 modified 12.2 KB 2025-12-29 08:46
2103 25 modified 11.9 KB 2025-12-29 08:46
2102 24 modified 11.7 KB 2025-12-29 08:45
2101 23 modified 11.6 KB 2025-12-29 08:45
2100 22 modified 11.6 KB 2025-12-29 08:45
2077 21 modified 18.7 KB 2025-12-29 00:04
2076 20 modified 18.7 KB 2025-12-29 00:03
2075 19 modified 18.7 KB 2025-12-29 00:02
2074 18 modified 18.5 KB 2025-12-29 00:02
2073 17 modified 18.6 KB 2025-12-29 00:02
2041 16 modified 18.6 KB 2025-12-28 23:25
2039 15 modified 18.6 KB 2025-12-28 23:25
2035 14 modified 18.5 KB 2025-12-28 23:25
2029 13 modified 18.0 KB 2025-12-28 23:23
2028 12 modified 15.6 KB 2025-12-28 23:22
2027 11 modified 16.0 KB 2025-12-28 23:21
2026 10 modified 16.0 KB 2025-12-28 23:21
2023 9 modified 15.9 KB 2025-12-28 23:21
1978 8 modified 16.6 KB 2025-12-28 02:37
1977 7 modified 17.0 KB 2025-12-28 02:36
1976 6 modified 17.8 KB 2025-12-28 02:35
1975 5 modified 18.3 KB 2025-12-28 02:34
1825 4 modified 18.2 KB 2025-12-27 23:16
1824 3 modified 18.2 KB 2025-12-27 23:16
1665 2 modified 18.1 KB 2025-12-27 11:23
1566 1 modified 18.1 KB 2025-12-26 20:31

← Zurück zur Übersicht