{
"event": "PreToolUse",
"tool_name": "Read",
"tool_input": {
"file_path": "\/var\/www\/migration\/content-pipeline\/src\/analyzers\/semantic_analyzer.py",
"offset": 85,
"limit": 40
}
}
{
"tool_response": {
"type": "text",
"file": {
"filePath": "\/var\/www\/migration\/content-pipeline\/src\/analyzers\/semantic_analyzer.py",
"content": " # Parse response using DRY utility\n result = parse_semantic_response(response_text)\n\n # Check if we got valid results\n if result.get(\"summary\") or result.get(\"keywords\"):\n cursor = db.execute(\n \"\"\"INSERT INTO chunk_semantics\n (chunk_id, summary, keywords, sentiment, topics, language, analyzed_at, analysis_model)\n VALUES (%s, %s, %s, %s, %s, 'de', NOW(), %s)\n ON DUPLICATE KEY UPDATE\n summary = VALUES(summary), keywords = VALUES(keywords),\n sentiment = VALUES(sentiment), topics = VALUES(topics),\n analyzed_at = NOW(), analysis_model = VALUES(analysis_model)\"\"\",\n (\n chunk_id,\n result.get(\"summary\", \"\"),\n json.dumps(result.get(\"keywords\", []), ensure_ascii=False),\n result.get(\"sentiment\", \"neutral\"),\n json.dumps(result.get(\"topics\", []), ensure_ascii=False),\n model_name,\n ),\n )\n db.commit()\n cursor.close()\n return result\n\n return None\n\n except Exception as e:\n db.log(\"ERROR\", f\"Chunk semantics analysis failed: {e}\")\n return None\n\n\ndef analyze_chunks_semantics(document_id: int, client=None, progress=None) -> int:\n \"\"\"Analyze all chunks of a document for semantics.\"\"\"\n cursor = db.execute(\"SELECT id, content FROM chunks WHERE document_id = %s ORDER BY chunk_index\", (document_id,))\n chunks = cursor.fetchall()\n cursor.close()\n\n total = len(chunks)",
"numLines": 40,
"startLine": 85,
"totalLines": 142
}
}
}