DocumentationChatUseCase.php
- Pfad:
src/UseCases/Docs/DocumentationChatUseCase.php - Namespace: UseCases\Docs
- Zeilen: 100 | Größe: 2,801 Bytes
- Geändert: 2025-12-25 13:01:59 | Gescannt: 2025-12-31 10:22:15
Code Hygiene Score: 100
- Dependencies: 100 (25%)
- LOC: 100 (20%)
- Methods: 100 (20%)
- Secrets: 100 (15%)
- Classes: 100 (10%)
- Magic Numbers: 100 (10%)
Keine Issues gefunden.
Dependencies 4
- constructor Infrastructure\Docs\ChunkSearchService
- constructor Infrastructure\AI\OllamaService
- use Infrastructure\AI\OllamaService
- use Infrastructure\Docs\ChunkSearchService
Klassen 1
-
DocumentationChatUseCaseclass Zeile 12
Funktionen 4
-
__construct()public Zeile 14 -
execute()public Zeile 28 -
buildContext()private Zeile 64 -
buildPrompt()Zeile 81
Verwendet von 2
- DocsController.php constructor
- DocsController.php use
Versionen 4
-
v4
2025-12-25 13:01 | claude-code-hook | modified
Claude Code Pre-Hook Backup vor Edit-Operation -
v3
2025-12-25 13:01 | claude-code-hook | modified
Claude Code Pre-Hook Backup vor Edit-Operation -
v2
2025-12-23 08:17 | claude-code-hook | modified
Claude Code Pre-Hook Backup vor Edit-Operation -
v1
2025-12-23 07:56 | claude-code-hook | modified
Claude Code Pre-Hook Backup vor Edit-Operation
Code
<?php
declare(strict_types=1);
namespace UseCases\Docs;
// @responsibility: RAG-basierter Chat über Dokumentation
use Infrastructure\AI\OllamaService;
use Infrastructure\Docs\ChunkSearchService;
class DocumentationChatUseCase
{
public function __construct(
private ChunkSearchService $searchService,
private OllamaService $ollamaService
) {
}
/**
* Execute documentation chat.
*
* @param string $question User question
* @param string $model LLM model to use
* @param int $limit Number of context chunks
* @return array{answer: string, sources: array<int, array{id: int, path: string, title: string, score: float}>}
*/
public function execute(string $question, string $model = 'mistral', int $limit = 5): array
{
// Get relevant chunks via semantic search
$chunks = $this->searchService->search($question, $limit);
if (empty($chunks)) {
return [
'answer' => 'Leider konnte ich keine relevanten Informationen in der Dokumentation finden.',
'sources' => [],
];
}
// Build context from chunks
$context = $this->buildContext($chunks);
// Generate answer using LLM
$prompt = $this->buildPrompt($question, $context);
$answer = $this->ollamaService->generate($prompt, $model, 0.3);
// Map sources
$sources = array_map(static fn (array $chunk): array => [
'id' => $chunk['doc_id'],
'path' => $chunk['path'],
'title' => $chunk['title'],
'score' => round($chunk['score'], 3),
], $chunks);
return [
'answer' => $answer,
'sources' => $sources,
];
}
/**
* Build context from chunks.
*/
private function buildContext(array $chunks): string
{
$parts = [];
foreach ($chunks as $chunk) {
$part = "## {$chunk['title']}\n";
$part .= "Pfad: {$chunk['path']}\n";
$part .= $chunk['content'];
$parts[] = $part;
}
return implode("\n\n---\n\n", $parts);
}
/**
* Build chat prompt.
*/
private function buildPrompt(string $question, string $context): string
{
return <<<PROMPT
Du bist ein Dokumentations-Assistent. Beantworte die Frage basierend auf dem bereitgestellten Kontext.
KONTEXT:
{$context}
FRAGE:
{$question}
ANLEITUNG:
- Antworte auf Deutsch
- Sei präzise und hilfreich
- Wenn der Kontext die Frage nicht beantwortet, sage das ehrlich
- Verweise auf die relevanten Abschnitte der Dokumentation
PROMPT;
}
}