Protokoll #4032

ID4032
Zeitstempel2025-12-22 09:04:20.047944
Clientroot
IP
Warning: Undefined array key "request_ip" in /var/www/dev.campus.systemische-tools.de/src/View/protokoll/show.php on line 13

Deprecated: htmlspecialchars(): Passing null to parameter #1 ($string) of type string is deprecated in /var/www/dev.campus.systemische-tools.de/src/View/protokoll/show.php on line 13
Modellclaude-sonnet-4-20250514
Statuscompleted
Tokens1,821 (Input: 0, Output: 0)
Dauer89 ms
Request-Zeit
Warning: Undefined array key "request_timestamp" in /var/www/dev.campus.systemische-tools.de/src/View/protokoll/show.php on line 18
Response-Zeit-

Warning: Undefined array key "error_message" in /var/www/dev.campus.systemische-tools.de/src/View/protokoll/show.php on line 22

Request

{
    "event": "PreToolUse",
    "tool_name": "Read",
    "tool_input": {
        "file_path": "\/opt\/scripts\/pipeline\/detect.py"
    }
}

Response

{
    "tool_response": {
        "type": "text",
        "file": {
            "filePath": "\/opt\/scripts\/pipeline\/pipeline.py",
            "content": "#!\/usr\/bin\/env python3\n\"\"\"\nKI-System Document Pipeline\nMain orchestration script for document processing.\n\nUsage:\n    python pipeline.py scan      # Scan for new documents\n    python pipeline.py process   # Process queued documents\n    python pipeline.py embed     # Embed pending chunks\n    python pipeline.py all       # Full pipeline run\n    python pipeline.py file <path>  # Process single file\n\"\"\"\n\nimport json\nimport os\nimport sys\nimport time\nfrom pathlib import Path\n\nfrom analyze import analyze_document\nfrom chunk import chunk_by_structure\nfrom config import MAX_RETRIES, RETRY_BACKOFF_BASE\nfrom db import db\nfrom detect import queue_files, scan_directory\nfrom embed import embed_chunks, embed_pending_chunks\nfrom extract import extract, get_full_text\n\n\ndef process_file(file_path):\n    \"\"\"Process a single file through the pipeline.\"\"\"\n    db.log(\"INFO\", f\"Processing: {file_path}\")\n\n    # Extract text\n    extraction = extract(file_path)\n    if not extraction[\"success\"]:\n        db.log(\"ERROR\", f\"Extraction failed: {extraction.get('error')}\")\n        return False\n\n    # Get document info\n    file_name = Path(file_path).name\n    file_stat = os.stat(file_path)\n\n    import hashlib\n\n    with open(file_path, \"rb\") as f:\n        file_hash = hashlib.sha256(f.read()).hexdigest()\n\n    # Insert document\n    doc_id = db.insert_document(\n        file_path=file_path,\n        title=file_name,\n        file_type=extraction[\"file_type\"],\n        file_size=file_stat.st_size,\n        file_hash=file_hash,\n    )\n    db.log(\"INFO\", f\"Created document: {doc_id}\")\n\n    # Chunk content\n    chunks = chunk_by_structure(extraction)\n    db.log(\"INFO\", f\"Created {len(chunks)} chunks\")\n\n    # Store chunks\n    for i, chunk in enumerate(chunks):\n        chunk_id = db.insert_chunk(\n            doc_id=doc_id,\n            chunk_index=i,\n            content=chunk[\"content\"],\n            heading_path=json.dumps(chunk.get(\"heading_path\", [])),\n            position_start=chunk.get(\"position_start\", 0),\n            position_end=chunk.get(\"position_end\", 0),\n            metadata=json.dumps(chunk.get(\"metadata\", {})),\n        )\n        chunk[\"db_id\"] = chunk_id\n\n    # Generate embeddings\n    embedded = embed_chunks(chunks, doc_id, file_name, file_path)\n    db.log(\"INFO\", f\"Embedded {embedded}\/{len(chunks)} chunks\")\n\n    # Semantic analysis\n    full_text = get_full_text(extraction)\n    analysis = analyze_document(doc_id, full_text)\n    db.log(\"INFO\", f\"Analysis complete: {analysis}\")\n\n    # Update status\n    db.update_document_status(doc_id, \"done\")\n    return True\n\n\ndef process_queue():\n    \"\"\"Process items from the queue.\"\"\"\n    items = db.get_pending_queue_items(limit=10)\n    db.log(\"INFO\", f\"Found {len(items)} items in queue\")\n\n    for item in items:\n        queue_id = item[\"id\"]\n        file_path = item[\"file_path\"]\n        retry_count = item[\"retry_count\"]\n\n        if retry_count >= MAX_RETRIES:\n            db.update_queue_status(queue_id, \"failed\", \"Max retries exceeded\")\n            continue\n\n        db.update_queue_status(queue_id, \"processing\")\n\n        try:\n            success = process_file(file_path)\n            if success:\n                db.update_queue_status(queue_id, \"done\")\n            else:\n                raise Exception(\"Processing returned False\")\n        except Exception as e:\n            error_msg = str(e)\n            db.update_queue_status(queue_id, \"pending\", error_msg)\n\n            # Exponential backoff\n            wait_time = RETRY_BACKOFF_BASE ** (retry_count + 1)\n            db.log(\"INFO\", f\"Retry {retry_count + 1} in {wait_time}s: {file_path}\")\n            time.sleep(wait_time)\n\n\ndef run_scan():\n    \"\"\"Scan for new documents.\"\"\"\n    files = scan_directory()\n    print(f\"Found {len(files)} files\")\n\n    if files:\n        queued = queue_files(files)\n        print(f\"Queued {queued} files\")\n\n    return files\n\n\ndef run_full_pipeline():\n    \"\"\"Run complete pipeline: scan → process → embed.\"\"\"\n    print(\"=\" * 50)\n    print(\"KI-System Pipeline - Full Run\")\n    print(\"=\" * 50)\n\n    print(\"\\n[1\/3] Scanning for documents...\")\n    files = run_scan()\n\n    if files:\n        print(f\"\\n[2\/3] Processing {len(files)} documents...\")\n        process_queue()\n    else:\n        print(\"\\n[2\/3] No new documents to process\")\n\n    print(\"\\n[3\/3] Embedding remaining chunks...\")\n    embedded = embed_pending_chunks()\n    print(f\"Embedded {embedded} chunks\")\n\n    print(\"\\n\" + \"=\" * 50)\n    print(\"Pipeline complete!\")\n\n\ndef main():\n    \"\"\"Main entry point.\"\"\"\n    if len(sys.argv) < 2:\n        print(__doc__)\n        return\n\n    command = sys.argv[1].lower()\n\n    db.connect()\n\n    try:\n        if command == \"scan\":\n            run_scan()\n\n        elif command == \"process\":\n            process_queue()\n\n        elif command == \"embed\":\n            count = embed_pending_chunks()\n            print(f\"Embedded {count} chunks\")\n\n        elif command == \"all\":\n            run_full_pipeline()\n\n        elif command == \"file\" and len(sys.argv) > 2:\n            file_path = sys.argv[2]\n            if os.path.exists(file_path):\n                success = process_file(file_path)\n                print(f\"Processing {'successful' if success else 'failed'}\")\n            else:\n                print(f\"File not found: {file_path}\")\n\n        elif command == \"status\":\n            # Show pipeline status\n            cursor = db.execute(\n                \"\"\"SELECT status, COUNT(*) as count\n                   FROM pipeline_queue\n                   GROUP BY status\"\"\"\n            )\n            results = cursor.fetchall()\n            cursor.close()\n\n            print(\"\\nQueue Status:\")\n            for r in results:\n                print(f\"  {r['status']}: {r['count']}\")\n\n            cursor = db.execute(\"SELECT COUNT(*) as count FROM documents\")\n            doc_count = cursor.fetchone()[\"count\"]\n            cursor.close()\n\n            cursor = db.execute(\"SELECT COUNT(*) as count FROM chunks\")\n            chunk_count = cursor.fetchone()[\"count\"]\n            cursor.close()\n\n            cursor = db.execute(\"SELECT COUNT(*) as count FROM chunks WHERE qdrant_id IS NOT NULL\")\n            embedded_count = cursor.fetchone()[\"count\"]\n            cursor.close()\n\n            print(f\"\\nDocuments: {doc_count}\")\n            print(f\"Chunks: {chunk_count} ({embedded_count} embedded)\")\n\n        else:\n            print(f\"Unknown command: {command}\")\n            print(__doc__)\n\n    except Exception as e:\n        db.log(\"ERROR\", f\"Pipeline error: {e}\")\n        print(f\"Error: {e}\")\n        raise\n    finally:\n        db.disconnect()\n\n\nif __name__ == \"__main__\":\n    main()\n",
            "numLines": 231,
            "startLine": 1,
            "totalLines": 231
        }
    }
}
← Vorheriger Zur Liste Nächster →