{
"event": "PreToolUse",
"tool_name": "Edit",
"tool_input": {
"file_path": "\/var\/www\/dev.campus.systemische-tools.de\/crawler\/tests\/e2e\/test_crawl_fixture.py",
"old_string": " factory = MariaDbConnectionFactory(load_db_settings(\"crawler\"))",
"new_string": " factory = MariaDbConnectionFactory(load_db_settings(\"crawler_test\"))",
"replace_all": true
}
}
{
"tool_response": {
"filePath": "\/var\/www\/dev.campus.systemische-tools.de\/crawler\/tests\/e2e\/test_crawl_fixture.py",
"oldString": " factory = MariaDbConnectionFactory(load_db_settings(\"crawler\"))",
"newString": " factory = MariaDbConnectionFactory(load_db_settings(\"crawler_test\"))",
"originalFile": "from __future__ import annotations\n\nimport asyncio\nimport socket\nfrom collections.abc import AsyncGenerator\n\nimport pytest\nimport pytest_asyncio\nfrom aiohttp import web\n\nfrom src.domain.value_objects.enums import CrawlMode, TriggerSource\nfrom src.infrastructure.config.db_config import load_db_settings\nfrom src.infrastructure.persistence.connection import MariaDbConnectionFactory\nfrom src.interfaces.cli.commands.crawl import CrawlCliInput, run_crawl\nfrom src.interfaces.cli.wiring import Container\nfrom tests.fixtures.fixture_server import build_app\n\n_TRUNCATE_ORDER = (\n \"url_queue\",\n \"external_checks\",\n \"errors\",\n \"js_errors\",\n \"hreflang\",\n \"structured_data\",\n \"resources\",\n \"images\",\n \"links\",\n \"page_headers\",\n \"pages\",\n \"urls\",\n \"crawls\",\n)\n\n\ndef _free_port() -> int:\n with socket.socket() as s:\n s.bind((\"127.0.0.1\", 0))\n return int(s.getsockname()[1])\n\n\n@pytest_asyncio.fixture\nasync def fixture_server() -> AsyncGenerator[str, None]:\n port = _free_port()\n runner = web.AppRunner(build_app())\n await runner.setup()\n site = web.TCPSite(runner, \"127.0.0.1\", port)\n await site.start()\n try:\n yield f\"http:\/\/127.0.0.1:{port}\/\"\n finally:\n await runner.cleanup()\n\n\n@pytest.fixture(autouse=True)\ndef _clean_db() -> None:\n factory = MariaDbConnectionFactory(load_db_settings(\"crawler\"))\n with factory.session() as conn:\n cur = conn.cursor()\n cur.execute(\"SET FOREIGN_KEY_CHECKS=0\")\n for table in _TRUNCATE_ORDER:\n cur.execute(f\"TRUNCATE TABLE {table}\")\n cur.execute(\"SET FOREIGN_KEY_CHECKS=1\")\n\n\n@pytest.mark.asyncio\nasync def test_end_to_end_crawl_against_fixture(fixture_server: str) -> None:\n factory = MariaDbConnectionFactory(load_db_settings(\"crawler\"))\n container = Container(factory)\n command = CrawlCliInput(\n base_url=fixture_server,\n mode=CrawlMode.FAST,\n trigger=TriggerSource.CLI,\n concurrency=4,\n max_urls=20,\n )\n\n crawl_id = await asyncio.wait_for(run_crawl(container, command), timeout=30)\n\n with factory.session() as conn:\n cur = conn.cursor(dictionary=True)\n cur.execute(\"SELECT COUNT(*) AS c FROM pages WHERE crawl_id=%s\", (crawl_id,))\n page_count = int(cur.fetchone()[\"c\"])\n cur.execute(\n \"SELECT quality_flags FROM pages WHERE crawl_id=%s\",\n (crawl_id,),\n )\n flags_blobs = [r[\"quality_flags\"] for r in cur.fetchall()]\n assert page_count >= 5\n assert any(\"h1_multiple\" in b for b in flags_blobs)\n",
"structuredPatch": [
{
"oldStart": 53,
"oldLines": 7,
"newStart": 53,
"newLines": 7,
"lines": [
" ",
" @pytest.fixture(autouse=True)",
" def _clean_db() -> None:",
"- factory = MariaDbConnectionFactory(load_db_settings(\"crawler\"))",
"+ factory = MariaDbConnectionFactory(load_db_settings(\"crawler_test\"))",
" with factory.session() as conn:",
" cur = conn.cursor()",
" cur.execute(\"SET FOREIGN_KEY_CHECKS=0\")"
]
},
{
"oldStart": 64,
"oldLines": 7,
"newStart": 64,
"newLines": 7,
"lines": [
" ",
" @pytest.mark.asyncio",
" async def test_end_to_end_crawl_against_fixture(fixture_server: str) -> None:",
"- factory = MariaDbConnectionFactory(load_db_settings(\"crawler\"))",
"+ factory = MariaDbConnectionFactory(load_db_settings(\"crawler_test\"))",
" container = Container(factory)",
" command = CrawlCliInput(",
" base_url=fixture_server,"
]
}
],
"userModified": false,
"replaceAll": true
}
}