<?php
declare(strict_types=1);
namespace Infrastructure\AI;
/**
* Central configuration for available AI models.
* Used by Chat, Content Studio, and other AI-powered features.
*/
final class ModelConfig
{
/**
* All available models with their display labels.
* Format: 'model-id' => 'Display Label'
*/
public const MODELS = [
// Anthropic Claude
'claude-opus-4-5-20251101' => 'Claude Opus 4.5',
'claude-sonnet-4-20250514' => 'Claude Sonnet 4',
// Local Ollama models
'ollama:gemma3:4b-it-qat' => 'Gemma 3 (lokal)',
'ollama:mistral:latest' => 'Mistral (lokal)',
'ollama:llama3.2:latest' => 'Llama 3.2 (lokal)',
'ollama:gpt-oss:20b' => 'GPT-OSS 20B (lokal)',
];
/**
* Default model for new sessions/orders.
* Local Ollama model for cost-efficiency.
*/
public const DEFAULT_MODEL = 'ollama:gemma3:4b-it-qat';
/**
* Get all models as array.
*/
public static function getAll(): array
{
return self::MODELS;
}
/**
* Get display label for a model ID.
*/
public static function getLabel(string $modelId): string
{
return self::MODELS[$modelId] ?? $modelId;
}
/**
* Check if a model ID is valid.
*/
public static function isValid(string $modelId): bool
{
return isset(self::MODELS[$modelId])
|| str_starts_with($modelId, 'claude-')
|| str_starts_with($modelId, 'ollama:');
}
/**
* Validate and return model, or default if invalid.
*/
public static function validate(string $modelId): string
{
return self::isValid($modelId) ? $modelId : self::DEFAULT_MODEL;
}
/**
* Check if model is a local (Ollama) model.
*/
public static function isLocal(string $modelId): bool
{
return str_starts_with($modelId, 'ollama:');
}
}