{
"event": "PreToolUse",
"tool_name": "Edit",
"tool_input": {
"file_path": "\/opt\/scripts\/pipeline\/config.py",
"old_string": "OLLAMA_CHAT_MODEL = \"gpt-oss:20b\"",
"new_string": "OLLAMA_CHAT_MODEL = \"gemma3:27b-it-qat\""
}
}
{
"tool_response": {
"filePath": "\/opt\/scripts\/pipeline\/config.py",
"oldString": "OLLAMA_CHAT_MODEL = \"gpt-oss:20b\"",
"newString": "OLLAMA_CHAT_MODEL = \"gemma3:27b-it-qat\"",
"originalFile": "\"\"\"\nKI-System Pipeline Configuration\n\"\"\"\n\nimport os\n\n# Nextcloud Settings\nNEXTCLOUD_PATH = \"\/var\/www\/nextcloud\/data\/root\/files\/Documents\"\nSUPPORTED_EXTENSIONS = [\".pdf\", \".pptx\", \".docx\", \".md\", \".txt\"]\n\n# MariaDB Settings - Content Database (chunks, entities, semantic data)\nDB_CONFIG = {\n \"host\": \"localhost\",\n \"database\": \"ki_content\",\n \"user\": \"root\",\n \"password\": \"\", # Set via environment variable DB_PASSWORD\n \"charset\": \"utf8mb4\",\n}\n\n# Logging\/System Database (for pipeline_log)\nDB_LOG_CONFIG = {\n \"host\": \"localhost\",\n \"database\": \"ki_dev\",\n \"user\": \"root\",\n \"password\": \"\",\n \"charset\": \"utf8mb4\",\n}\n\n# Protokoll Database (for LLM call logging to ki_dev.protokoll)\nDB_PROTOKOLL_CONFIG = {\n \"host\": \"localhost\",\n \"database\": \"ki_dev\",\n \"user\": \"root\",\n \"password\": \"\",\n \"charset\": \"utf8mb4\",\n}\n\n# Qdrant Settings\nQDRANT_HOST = \"localhost\"\nQDRANT_PORT = 6333\nQDRANT_COLLECTIONS = {\n \"documents\": {\"size\": 1024, \"distance\": \"Cosine\"},\n \"mail\": {\"size\": 1024, \"distance\": \"Cosine\"},\n \"entities\": {\"size\": 1024, \"distance\": \"Cosine\"},\n}\n\n# Ollama Settings (Embeddings)\nOLLAMA_HOST = \"http:\/\/localhost:11434\"\nEMBEDDING_MODEL = \"mxbai-embed-large\" # 1024-dim, max ~1600 chars per chunk\nEMBEDDING_DIMENSION = 1024\n\n# Ollama Settings (Chat\/Generation)\nOLLAMA_CHAT_MODEL = \"gpt-oss:20b\"\n\n# Anthropic Settings (Chat\/Analysis)\nANTHROPIC_MODEL = \"claude-opus-4-5-20251101\"\n\n# Chunking Settings\nCHUNK_OVERLAP_PERCENT = 10\nMIN_CHUNK_SIZE = 100\nMAX_CHUNK_SIZE = 2000\n\n# Pipeline Settings\nMAX_RETRIES = 3\nRETRY_BACKOFF_BASE = 2 # Seconds, exponential: 2, 4, 8\n\n# OCR Settings\nOCR_ENABLED = True\nOCR_LANGUAGE = \"deu\" # German\n\n# Logging\nLOG_LEVEL = \"INFO\"\n\n\ndef get_db_password():\n \"\"\"Get database password from environment or .env file.\"\"\"\n password = os.environ.get(\"DB_PASSWORD\", \"\")\n if not password:\n password = os.environ.get(\"MARIADB_ROOT_PASSWORD\", \"\")\n if not password:\n # Try to read from project .env file\n env_file = \"\/var\/www\/dev.campus.systemische-tools.de\/.env\"\n if os.path.exists(env_file):\n with open(env_file) as f:\n for line in f:\n if line.startswith(\"MARIADB_ROOT_PASSWORD=\"):\n password = line.split(\"=\", 1)[1].strip()\n break\n return password\n\n\ndef get_anthropic_key():\n \"\"\"Get Anthropic API key from environment or .env file.\"\"\"\n key = os.environ.get(\"ANTHROPIC_API_KEY\", \"\")\n if not key:\n # Try to read from project .env file\n env_file = \"\/var\/www\/dev.campus.systemische-tools.de\/.env\"\n if os.path.exists(env_file):\n with open(env_file) as f:\n for line in f:\n if line.startswith(\"ANTHROPIC_API_KEY=\"):\n key = line.split(\"=\", 1)[1].strip()\n break\n return key\n\n\n# Update DB configs with password\nDB_CONFIG[\"password\"] = get_db_password()\nDB_LOG_CONFIG[\"password\"] = get_db_password()\nDB_PROTOKOLL_CONFIG[\"password\"] = get_db_password()\nANTHROPIC_API_KEY = get_anthropic_key()\n",
"structuredPatch": [
{
"oldStart": 50,
"oldLines": 7,
"newStart": 50,
"newLines": 7,
"lines": [
" EMBEDDING_DIMENSION = 1024",
" ",
" # Ollama Settings (Chat\/Generation)",
"-OLLAMA_CHAT_MODEL = \"gpt-oss:20b\"",
"+OLLAMA_CHAT_MODEL = \"gemma3:27b-it-qat\"",
" ",
" # Anthropic Settings (Chat\/Analysis)",
" ANTHROPIC_MODEL = \"claude-opus-4-5-20251101\""
]
}
],
"userModified": false,
"replaceAll": false
}
}