Protokoll #33553

ID33553
Zeitstempel2026-04-21 02:43:00.583679
Clientroot
IP62.246.34.195
Modellclaude-sonnet-4-20250514
Statuscompleted
Tokens1,463 (Input: 726, Output: 737)
Dauer72 ms
Request-Zeit2026-04-21 02:43:00.583679
Response-Zeit2026-04-21 02:43:00.655638

Request

{
    "event": "PreToolUse",
    "tool_name": "Write",
    "tool_input": {
        "file_path": "\/var\/www\/dev.campus.systemische-tools.de\/crawler\/tests\/unit\/application\/test_finalize_crawl.py",
        "content": "from __future__ import annotations\n\nfrom datetime import datetime, timedelta\n\nimport pytest\n\nfrom src.application.use_cases.finalize_crawl import FinalizeCrawlUseCase\nfrom src.domain.entities.crawl import Crawl\nfrom src.domain.entities.crawl_error import CrawlError\nfrom src.domain.value_objects.enums import CrawlMode, CrawlStatus, TriggerSource\nfrom tests.unit.application.fakes import FakeClock, FakeCrawlErrorRepo, FakeCrawlRepo\n\n\ndef _running_crawl(repo: FakeCrawlRepo, started: datetime) -> int:\n    crawl = repo.save(\n        Crawl(\n            id=None,\n            base_url=\"https:\/\/x\/\",\n            mode=CrawlMode.FAST,\n            started_at=started,\n            finished_at=None,\n            status=CrawlStatus.RUNNING,\n            trigger_source=TriggerSource.CLI,\n            config={},\n        ),\n    )\n    assert crawl.id is not None\n    return crawl.id\n\n\ndef test_finalize_marks_completed_and_counts_errors() -> None:\n    crawls = FakeCrawlRepo()\n    errors = FakeCrawlErrorRepo()\n    start = datetime(2026, 4, 21, 12, 0, 0)\n    cid = _running_crawl(crawls, start)\n    errors.save(CrawlError(id=None, crawl_id=cid, url=\"u\", error_type=\"dns\",\n                           error_msg=\"boom\", occurred_at=start))\n\n    result = FinalizeCrawlUseCase(\n        crawls=crawls,\n        errors=errors,\n        clock=FakeClock(start + timedelta(minutes=5)),\n    ).execute(crawl_id=cid, status=CrawlStatus.COMPLETED, total_urls=42)\n\n    assert result.status is CrawlStatus.COMPLETED\n    assert result.total_urls == 42\n    assert result.total_errors == 1\n    assert result.finished_at == start + timedelta(minutes=5)\n\n\ndef test_finalize_idempotent_for_already_finished() -> None:\n    crawls = FakeCrawlRepo()\n    errors = FakeCrawlErrorRepo()\n    start = datetime(2026, 4, 21, 12, 0, 0)\n    cid = _running_crawl(crawls, start)\n    first = FinalizeCrawlUseCase(\n        crawls=crawls, errors=errors, clock=FakeClock(start + timedelta(minutes=1)),\n    ).execute(crawl_id=cid, status=CrawlStatus.COMPLETED, total_urls=1)\n\n    second = FinalizeCrawlUseCase(\n        crawls=crawls, errors=errors, clock=FakeClock(start + timedelta(minutes=2)),\n    ).execute(crawl_id=cid, status=CrawlStatus.FAILED, total_urls=99)\n\n    assert second.finished_at == first.finished_at\n    assert second.status is CrawlStatus.COMPLETED\n\n\ndef test_finalize_missing_crawl_raises() -> None:\n    start = datetime(2026, 4, 21, 12, 0, 0)\n    with pytest.raises(LookupError):\n        FinalizeCrawlUseCase(\n            crawls=FakeCrawlRepo(), errors=FakeCrawlErrorRepo(), clock=FakeClock(start),\n        ).execute(crawl_id=999, status=CrawlStatus.COMPLETED, total_urls=0)\n"
    }
}

Response

{
    "tool_response": {
        "type": "create",
        "filePath": "\/var\/www\/dev.campus.systemische-tools.de\/crawler\/tests\/unit\/application\/test_finalize_crawl.py",
        "content": "from __future__ import annotations\n\nfrom datetime import datetime, timedelta\n\nimport pytest\n\nfrom src.application.use_cases.finalize_crawl import FinalizeCrawlUseCase\nfrom src.domain.entities.crawl import Crawl\nfrom src.domain.entities.crawl_error import CrawlError\nfrom src.domain.value_objects.enums import CrawlMode, CrawlStatus, TriggerSource\nfrom tests.unit.application.fakes import FakeClock, FakeCrawlErrorRepo, FakeCrawlRepo\n\n\ndef _running_crawl(repo: FakeCrawlRepo, started: datetime) -> int:\n    crawl = repo.save(\n        Crawl(\n            id=None,\n            base_url=\"https:\/\/x\/\",\n            mode=CrawlMode.FAST,\n            started_at=started,\n            finished_at=None,\n            status=CrawlStatus.RUNNING,\n            trigger_source=TriggerSource.CLI,\n            config={},\n        ),\n    )\n    assert crawl.id is not None\n    return crawl.id\n\n\ndef test_finalize_marks_completed_and_counts_errors() -> None:\n    crawls = FakeCrawlRepo()\n    errors = FakeCrawlErrorRepo()\n    start = datetime(2026, 4, 21, 12, 0, 0)\n    cid = _running_crawl(crawls, start)\n    errors.save(CrawlError(id=None, crawl_id=cid, url=\"u\", error_type=\"dns\",\n                           error_msg=\"boom\", occurred_at=start))\n\n    result = FinalizeCrawlUseCase(\n        crawls=crawls,\n        errors=errors,\n        clock=FakeClock(start + timedelta(minutes=5)),\n    ).execute(crawl_id=cid, status=CrawlStatus.COMPLETED, total_urls=42)\n\n    assert result.status is CrawlStatus.COMPLETED\n    assert result.total_urls == 42\n    assert result.total_errors == 1\n    assert result.finished_at == start + timedelta(minutes=5)\n\n\ndef test_finalize_idempotent_for_already_finished() -> None:\n    crawls = FakeCrawlRepo()\n    errors = FakeCrawlErrorRepo()\n    start = datetime(2026, 4, 21, 12, 0, 0)\n    cid = _running_crawl(crawls, start)\n    first = FinalizeCrawlUseCase(\n        crawls=crawls, errors=errors, clock=FakeClock(start + timedelta(minutes=1)),\n    ).execute(crawl_id=cid, status=CrawlStatus.COMPLETED, total_urls=1)\n\n    second = FinalizeCrawlUseCase(\n        crawls=crawls, errors=errors, clock=FakeClock(start + timedelta(minutes=2)),\n    ).execute(crawl_id=cid, status=CrawlStatus.FAILED, total_urls=99)\n\n    assert second.finished_at == first.finished_at\n    assert second.status is CrawlStatus.COMPLETED\n\n\ndef test_finalize_missing_crawl_raises() -> None:\n    start = datetime(2026, 4, 21, 12, 0, 0)\n    with pytest.raises(LookupError):\n        FinalizeCrawlUseCase(\n            crawls=FakeCrawlRepo(), errors=FakeCrawlErrorRepo(), clock=FakeClock(start),\n        ).execute(crawl_id=999, status=CrawlStatus.COMPLETED, total_urls=0)\n",
        "structuredPatch": [],
        "originalFile": null,
        "userModified": false
    }
}
← Vorheriger Zur Liste Nächster →