mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2026-01-18 22:14:22 -06:00
Fix default llama3.1
This commit is contained in:
@@ -1873,7 +1873,7 @@ using the OpenAI API. This setting is required to be set to use the AI features.
|
|||||||
#### [`PAPERLESS_AI_LLM_MODEL=<str>`](#PAPERLESS_AI_LLM_MODEL) {#PAPERLESS_AI_LLM_MODEL}
|
#### [`PAPERLESS_AI_LLM_MODEL=<str>`](#PAPERLESS_AI_LLM_MODEL) {#PAPERLESS_AI_LLM_MODEL}
|
||||||
|
|
||||||
: The model to use for the AI backend, i.e. "gpt-3.5-turbo", "gpt-4" or any of the models supported by the
|
: The model to use for the AI backend, i.e. "gpt-3.5-turbo", "gpt-4" or any of the models supported by the
|
||||||
current backend. If not supplied, defaults to "gpt-3.5-turbo" for OpenAI and "llama3" for Ollama.
|
current backend. If not supplied, defaults to "gpt-3.5-turbo" for OpenAI and "llama3.1" for Ollama.
|
||||||
|
|
||||||
Defaults to None.
|
Defaults to None.
|
||||||
|
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ class AIClient:
|
|||||||
def get_llm(self) -> Ollama | OpenAI:
|
def get_llm(self) -> Ollama | OpenAI:
|
||||||
if self.settings.llm_backend == "ollama":
|
if self.settings.llm_backend == "ollama":
|
||||||
return Ollama(
|
return Ollama(
|
||||||
model=self.settings.llm_model or "llama3",
|
model=self.settings.llm_model or "llama3.1",
|
||||||
base_url=self.settings.llm_endpoint or "http://localhost:11434",
|
base_url=self.settings.llm_endpoint or "http://localhost:11434",
|
||||||
request_timeout=120,
|
request_timeout=120,
|
||||||
)
|
)
|
||||||
|
|||||||
Reference in New Issue
Block a user