Fix default llama3.1

This commit is contained in:
shamoon
2026-01-14 15:36:01 -08:00
parent 948c664dcf
commit 94a5af66eb
2 changed files with 2 additions and 2 deletions

View File

@@ -23,7 +23,7 @@ class AIClient:
def get_llm(self) -> Ollama | OpenAI:
if self.settings.llm_backend == "ollama":
return Ollama(
model=self.settings.llm_model or "llama3",
model=self.settings.llm_model or "llama3.1",
base_url=self.settings.llm_endpoint or "http://localhost:11434",
request_timeout=120,
)