diff --git a/app/Services/AIAnalysisService.php b/app/Services/AIAnalysisService.php index 9035f47..012a66d 100644 --- a/app/Services/AIAnalysisService.php +++ b/app/Services/AIAnalysisService.php @@ -92,11 +92,12 @@ class AIAnalysisService // For now, I'll use a mocked response or try to use a generic endpoint if configured. // I'll check if the user has an Ollama endpoint. - $ollamaUrl = config('services.ollama.url', 'http://localhost:11434/api/generate'); + $ollamaUrl = env('OLLAMA_URL', 'http://localhost:11434/api/generate'); + $ollamaModel = env('OLLAMA_MODEL', 'mistral'); try { $response = Http::timeout(120)->post($ollamaUrl, [ - 'model' => 'mistral', + 'model' => $ollamaModel, 'prompt' => $prompt, 'stream' => false, 'format' => 'json'