Protokoll #33600

ID33600
Zeitstempel2026-04-21 02:52:24.849651
Clientroot
IP62.246.34.195
Modellclaude-sonnet-4-20250514
Statuscompleted
Tokens1,467 (Input: 728, Output: 739)
Dauer61 ms
Request-Zeit2026-04-21 02:52:24.849651
Response-Zeit2026-04-21 02:52:24.910214

Request

{
    "event": "PreToolUse",
    "tool_name": "Write",
    "tool_input": {
        "file_path": "\/var\/www\/dev.campus.systemische-tools.de\/crawler\/src\/infrastructure\/queue\/db_url_queue.py",
        "content": "from __future__ import annotations\n\nfrom datetime import datetime, timezone\n\nfrom src.domain.ports.url_queue_port import UrlQueuePort\nfrom src.domain.value_objects.normalized_url import NormalizedUrl\nfrom src.infrastructure.persistence.connection import MariaDbConnectionFactory\n\n_ENQUEUE = (\n    \"INSERT INTO url_queue (crawl_id, url_hash, url, enqueued_at, status)\"\n    \" VALUES (%s, %s, %s, %s, 'pending')\"\n    \" ON DUPLICATE KEY UPDATE id=id\"\n)\n_CLAIM_SELECT = (\n    \"SELECT id, url_hash, url FROM url_queue\"\n    \" WHERE crawl_id=%s AND status='pending'\"\n    \" ORDER BY enqueued_at LIMIT 1 FOR UPDATE\"\n)\n_CLAIM_UPDATE = \"UPDATE url_queue SET status='in_progress' WHERE id=%s\"\n_PENDING_COUNT = (\n    \"SELECT COUNT(*) AS c FROM url_queue WHERE crawl_id=%s AND status='pending'\"\n)\n_MARK_DONE = (\n    \"UPDATE url_queue SET status='done' WHERE crawl_id=%s AND url_hash=%s\"\n)\n\n\nclass DbUrlQueue(UrlQueuePort):\n    def __init__(self, connections: MariaDbConnectionFactory) -> None:\n        self._connections = connections\n\n    def enqueue(self, crawl_id: int, url: NormalizedUrl) -> bool:\n        with self._connections.session() as conn:\n            cur = conn.cursor()\n            cur.execute(\n                _ENQUEUE,\n                (crawl_id, url.url_hash, url.url, datetime.now(tz=timezone.utc)),\n            )\n            return cur.rowcount > 0\n\n    def dequeue(self, crawl_id: int) -> NormalizedUrl | None:\n        with self._connections.session() as conn:\n            cur = conn.cursor(dictionary=True)\n            cur.execute(_CLAIM_SELECT, (crawl_id,))\n            row = cur.fetchone()\n            if row is None:\n                return None\n            cur.execute(_CLAIM_UPDATE, (row[\"id\"],))\n            return _rebuild_url(row[\"url\"], row[\"url_hash\"])\n\n    def pending_count(self, crawl_id: int) -> int:\n        with self._connections.session() as conn:\n            cur = conn.cursor(dictionary=True)\n            cur.execute(_PENDING_COUNT, (crawl_id,))\n            row = cur.fetchone()\n            return int(row[\"c\"]) if row else 0\n\n    def mark_done(self, crawl_id: int, url: NormalizedUrl) -> None:\n        with self._connections.session() as conn:\n            conn.cursor().execute(_MARK_DONE, (crawl_id, url.url_hash))\n\n\ndef _rebuild_url(url: str, url_hash: str) -> NormalizedUrl:\n    from src.domain.value_objects.normalized_url import normalize\n\n    rebuilt = normalize(url)\n    if rebuilt.url_hash == url_hash:\n        return rebuilt\n    return NormalizedUrl(\n        raw=url, scheme=rebuilt.scheme, host=rebuilt.host, port=rebuilt.port,\n        path=rebuilt.path, query=rebuilt.query, url_hash=url_hash,\n    )\n"
    }
}

Response

{
    "tool_response": {
        "type": "create",
        "filePath": "\/var\/www\/dev.campus.systemische-tools.de\/crawler\/src\/infrastructure\/queue\/db_url_queue.py",
        "content": "from __future__ import annotations\n\nfrom datetime import datetime, timezone\n\nfrom src.domain.ports.url_queue_port import UrlQueuePort\nfrom src.domain.value_objects.normalized_url import NormalizedUrl\nfrom src.infrastructure.persistence.connection import MariaDbConnectionFactory\n\n_ENQUEUE = (\n    \"INSERT INTO url_queue (crawl_id, url_hash, url, enqueued_at, status)\"\n    \" VALUES (%s, %s, %s, %s, 'pending')\"\n    \" ON DUPLICATE KEY UPDATE id=id\"\n)\n_CLAIM_SELECT = (\n    \"SELECT id, url_hash, url FROM url_queue\"\n    \" WHERE crawl_id=%s AND status='pending'\"\n    \" ORDER BY enqueued_at LIMIT 1 FOR UPDATE\"\n)\n_CLAIM_UPDATE = \"UPDATE url_queue SET status='in_progress' WHERE id=%s\"\n_PENDING_COUNT = (\n    \"SELECT COUNT(*) AS c FROM url_queue WHERE crawl_id=%s AND status='pending'\"\n)\n_MARK_DONE = (\n    \"UPDATE url_queue SET status='done' WHERE crawl_id=%s AND url_hash=%s\"\n)\n\n\nclass DbUrlQueue(UrlQueuePort):\n    def __init__(self, connections: MariaDbConnectionFactory) -> None:\n        self._connections = connections\n\n    def enqueue(self, crawl_id: int, url: NormalizedUrl) -> bool:\n        with self._connections.session() as conn:\n            cur = conn.cursor()\n            cur.execute(\n                _ENQUEUE,\n                (crawl_id, url.url_hash, url.url, datetime.now(tz=timezone.utc)),\n            )\n            return cur.rowcount > 0\n\n    def dequeue(self, crawl_id: int) -> NormalizedUrl | None:\n        with self._connections.session() as conn:\n            cur = conn.cursor(dictionary=True)\n            cur.execute(_CLAIM_SELECT, (crawl_id,))\n            row = cur.fetchone()\n            if row is None:\n                return None\n            cur.execute(_CLAIM_UPDATE, (row[\"id\"],))\n            return _rebuild_url(row[\"url\"], row[\"url_hash\"])\n\n    def pending_count(self, crawl_id: int) -> int:\n        with self._connections.session() as conn:\n            cur = conn.cursor(dictionary=True)\n            cur.execute(_PENDING_COUNT, (crawl_id,))\n            row = cur.fetchone()\n            return int(row[\"c\"]) if row else 0\n\n    def mark_done(self, crawl_id: int, url: NormalizedUrl) -> None:\n        with self._connections.session() as conn:\n            conn.cursor().execute(_MARK_DONE, (crawl_id, url.url_hash))\n\n\ndef _rebuild_url(url: str, url_hash: str) -> NormalizedUrl:\n    from src.domain.value_objects.normalized_url import normalize\n\n    rebuilt = normalize(url)\n    if rebuilt.url_hash == url_hash:\n        return rebuilt\n    return NormalizedUrl(\n        raw=url, scheme=rebuilt.scheme, host=rebuilt.host, port=rebuilt.port,\n        path=rebuilt.path, query=rebuilt.query, url_hash=url_hash,\n    )\n",
        "structuredPatch": [],
        "originalFile": null,
        "userModified": false
    }
}
← Vorheriger Zur Liste Nächster →