<?php
declare(strict_types=1);
namespace Infrastructure\AI;
// @responsibility: Legacy-Interface für Modell-Konfiguration (delegiert an ModelRegistry)
final class ModelConfig
{
/**
* Get all available chat models.
*
* @return array<string, string> [model_key => display_name]
*/
public static function getAll(): array
{
return ModelRegistry::getInstance()->getChatModels();
}
/**
* Get vision-capable models.
*
* @return array<string, string> [model_key => display_name]
*/
public static function getVisionModels(): array
{
return ModelRegistry::getInstance()->getVisionModels();
}
/**
* Get display label for a model ID.
*/
public static function getLabel(string $modelId): string
{
return ModelRegistry::getInstance()->getLabel($modelId);
}
/**
* Check if a model ID is valid.
*/
public static function isValid(string $modelId): bool
{
$registry = ModelRegistry::getInstance();
// Check in registry first
if ($registry->isValid($modelId)) {
return true;
}
// Fallback: accept any claude- or ollama: prefixed model
return str_starts_with($modelId, 'claude-')
|| str_starts_with($modelId, 'ollama:');
}
/**
* Validate and return model, or default if invalid.
*/
public static function validate(string $modelId): string
{
return self::isValid($modelId) ? $modelId : self::getDefaultModel();
}
/**
* Check if model is a local (Ollama) model.
*/
public static function isLocal(string $modelId): bool
{
return str_starts_with($modelId, 'ollama:');
}
/**
* Get default chat model.
*/
public static function getDefaultModel(): string
{
return ModelRegistry::getInstance()->getDefaultChatModel();
}
/**
* Get default vision model.
*/
public static function getDefaultVisionModel(): string
{
return ModelRegistry::getInstance()->getDefaultVisionModel();
}
/**
* Force refresh of model cache (e.g., after Ollama sync).
*/
public static function clearCache(): void
{
ModelRegistry::clearCache();
}
/**
* Sync models from Ollama.
*
* @return array Sync results
*/
public static function syncFromOllama(): array
{
return ModelRegistry::getInstance()->syncFromOllama();
}
}