Protokoll #15223

ID15223
Zeitstempel2025-12-25 15:45:08.157931
Clientroot
IP145.224.72.140
Modellclaude-sonnet-4-20250514
Statuscompleted
Tokens827 (Input: 32, Output: 795)
Dauer97 ms
Request-Zeit2025-12-25 15:45:08.157931
Response-Zeit2025-12-25 15:45:08.254440

Request

{
    "event": "PreToolUse",
    "tool_name": "Read",
    "tool_input": {
        "file_path": "\/var\/www\/scripts\/pipeline\/knowledge.py",
        "limit": 100
    }
}

Response

{
    "tool_response": {
        "type": "text",
        "file": {
            "filePath": "\/var\/www\/scripts\/pipeline\/knowledge.py",
            "content": "\"\"\"\nModulares Wissensextraktions-System für 3-Ebenen-Analyse.\n\nEbenen:\n1. Seite (page): Detailanalyse pro Einzelseite\n2. Abschnitt (section): Aggregation pro Kapitel\/Unterkapitel\n3. Dokument (document): Konsolidierte Gesamtsicht\n\nWissenstypen:\n- Entitäten: Personen, Konzepte, Methoden, Organisationen\n- Semantik: Bedeutung, Definition, Referenzen\n- Ontologie: Wechselwirkungen zwischen Entitäten\n- Taxonomie: Hierarchische Einordnung\n\nAutor: KI-System Pipeline\n\"\"\"\n\nimport json\nimport re\nimport time\nfrom dataclasses import dataclass\nfrom enum import Enum\n\nimport requests\n\nfrom config import ANTHROPIC_API_KEY, OLLAMA_HOST\nfrom db import db\n\n\nclass KnowledgeLevel(Enum):\n    \"\"\"Ebene der Wissensextraktion.\"\"\"\n\n    PAGE = \"page\"\n    SECTION = \"section\"\n    DOCUMENT = \"document\"\n\n\nclass KnowledgeType(Enum):\n    \"\"\"Typ des extrahierten Wissens.\"\"\"\n\n    ENTITY = \"entity\"\n    SEMANTIC = \"semantic\"\n    ONTOLOGY = \"ontology\"\n    TAXONOMY = \"taxonomy\"\n\n\n@dataclass\nclass ModelConfig:\n    \"\"\"Konfiguration für LLM-Modell.\"\"\"\n\n    provider: str  # 'ollama' oder 'anthropic'\n    model_name: str\n    temperature: float = 0.3\n    max_tokens: int = 2000\n\n\n# Standard-Modellkonfigurationen\nDEFAULT_MODELS = {\n    \"ollama\": ModelConfig(\"ollama\", \"gemma3:27b-it-qat\"),\n    \"anthropic\": ModelConfig(\"anthropic\", \"claude-3-haiku-20240307\"),\n    \"anthropic_opus\": ModelConfig(\"anthropic\", \"claude-opus-4-5-20251101\"),\n}\n\n\nclass KnowledgeExtractor:\n    \"\"\"\n    Modulare Wissensextraktion mit Datenbankabgleich.\n\n    Verwendung:\n        extractor = KnowledgeExtractor(model_config)\n\n        # Pro Seite\n        entities = extractor.extract_entities(text, KnowledgeLevel.PAGE, page_id)\n        semantics = extractor.extract_semantics(entities, text, KnowledgeLevel.PAGE, page_id)\n        ontology = extractor.extract_ontology(entities, text, KnowledgeLevel.PAGE, page_id)\n        taxonomy = extractor.extract_taxonomy(entities, text, KnowledgeLevel.PAGE, page_id)\n    \"\"\"\n\n    def __init__(self, model_config: ModelConfig | None = None):\n        \"\"\"Initialisiere Extractor mit Modellkonfiguration.\"\"\"\n        self.model = model_config or DEFAULT_MODELS[\"ollama\"]\n        self.anthropic_client = None\n\n        if self.model.provider == \"anthropic\":\n            self._init_anthropic()\n\n    def _init_anthropic(self):\n        \"\"\"Initialisiere Anthropic Client.\"\"\"\n        try:\n            import anthropic\n\n            if ANTHROPIC_API_KEY:\n                self.anthropic_client = anthropic.Anthropic(api_key=ANTHROPIC_API_KEY)\n        except ImportError:\n            db.log(\"WARNING\", \"Anthropic SDK nicht installiert, fallback zu Ollama\")\n            self.model = DEFAULT_MODELS[\"ollama\"]\n\n    def _call_llm(self, prompt: str, json_output: bool = True) -> str:\n        \"\"\"Rufe LLM auf und gib Antwort zurück.\"\"\"\n        start_time = time.time()",
            "numLines": 100,
            "startLine": 1,
            "totalLines": 905
        }
    }
}
← Vorheriger Zur Liste Nächster →