ollama_service.py

Code Hygiene Score: 97

Issues 1

Zeile Typ Beschreibung
33 magic_number Magic Number gefunden: 1000

Dependencies 6

Klassen 1

Code

"""
Ollama API Service for LLM communication.
"""

import json
import re
import sys

import requests

sys.path.insert(0, "/var/www/scripts/pipeline")

from config import OLLAMA_HOST
from db import db

ANALYSIS_MODEL = "gemma3:27b-it-qat"


class OllamaService:
    """Ollama API Wrapper - Single Responsibility: LLM Kommunikation."""

    def __init__(self, host: str = OLLAMA_HOST, model: str = ANALYSIS_MODEL):
        self.host = host
        self.model = model

    def generate(self, prompt: str, json_format: bool = True) -> dict | None:
        """Generiere Antwort von Ollama."""
        try:
            payload = {
                "model": self.model,
                "prompt": prompt,
                "stream": False,
                "options": {"temperature": 0.3, "num_predict": 1000},
            }
            if json_format:
                payload["format"] = "json"

            response = requests.post(f"{self.host}/api/generate", json=payload, timeout=120)
            response.raise_for_status()

            text = response.json().get("response", "{}")
            if json_format:
                return self._parse_json(text)
            return {"text": text}
        except Exception as e:
            db.log("ERROR", f"Ollama error: {e}")
            return None

    def _parse_json(self, text: str) -> dict | None:
        """Parse JSON aus Antwort."""
        try:
            return json.loads(text)
        except json.JSONDecodeError:
            match = re.search(r"\{[\s\S]*\}", text)
            if match:
                try:
                    return json.loads(match.group())
                except json.JSONDecodeError:
                    pass
            return None
← Übersicht Graph