{
"event": "PreToolUse",
"tool_name": "Read",
"tool_input": {
"file_path": "\/var\/www\/scripts\/pipeline\/generate.py",
"offset": 525,
"limit": 50
}
}
{
"tool_response": {
"type": "text",
"file": {
"filePath": "\/var\/www\/scripts\/pipeline\/generate.py",
"content": " )\n\n return response_text\n\n\ndef save_version(order_id, content, version_number=1, output_format=\"markdown\"):\n \"\"\"Save content version to database.\"\"\"\n content_json = json.dumps({\"text\": content, \"format\": output_format})\n\n cursor = db.execute(\n \"\"\"INSERT INTO content_versions (order_id, version_number, content)\n VALUES (%s, %s, %s)\"\"\",\n (order_id, version_number, content_json),\n )\n db.commit()\n version_id = cursor.lastrowid\n cursor.close()\n return version_id\n\n\ndef save_sources(order_id, context):\n \"\"\"Save RAG sources to content_sources.\"\"\"\n for ctx in context:\n # Try to find chunk_id by content match\n cursor = db.execute(\"SELECT id FROM chunks WHERE content LIKE %s LIMIT 1\", (ctx[\"content\"][:100] + \"%\",))\n chunk = cursor.fetchone()\n cursor.close()\n\n if chunk:\n cursor = db.execute(\n \"\"\"INSERT IGNORE INTO content_sources (order_id, chunk_id, relevance_score)\n VALUES (%s, %s, %s)\"\"\",\n (order_id, chunk[\"id\"], ctx[\"score\"]),\n )\n db.commit()\n cursor.close()\n\n\ndef update_order_status(order_id, status):\n \"\"\"Update order status.\"\"\"\n cursor = db.execute(\"UPDATE content_orders SET status = %s, updated_at = NOW() WHERE id = %s\", (status, order_id))\n db.commit()\n cursor.close()\n\n\ndef generate_content(order_id, model=\"anthropic\", collection=\"documents\", context_limit=5):\n \"\"\"\n Main content generation function.\n\n Args:",
"numLines": 50,
"startLine": 525,
"totalLines": 987
}
}
}