mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-04-23 10:39:25 -05:00
Changeup logging
This commit is contained in:
parent
7e3ec32580
commit
f3c7c95c69
@ -7,9 +7,20 @@ logger = logging.getLogger("paperless.ai.client")
|
|||||||
|
|
||||||
|
|
||||||
def run_llm_query(prompt: str) -> str:
|
def run_llm_query(prompt: str) -> str:
|
||||||
if settings.LLM_BACKEND == "ollama":
|
logger.debug(
|
||||||
return _run_ollama_query(prompt)
|
"Running LLM query against %s with model %s",
|
||||||
return _run_openai_query(prompt)
|
settings.LLM_BACKEND,
|
||||||
|
settings.LLM_MODEL,
|
||||||
|
)
|
||||||
|
match settings.LLM_BACKEND:
|
||||||
|
case "openai":
|
||||||
|
result = _run_openai_query(prompt)
|
||||||
|
case "ollama":
|
||||||
|
result = _run_ollama_query(prompt)
|
||||||
|
case _:
|
||||||
|
raise ValueError(f"Unsupported LLM backend: {settings.LLM_BACKEND}")
|
||||||
|
logger.debug("LLM query result: %s", result)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
def _run_ollama_query(prompt: str) -> str:
|
def _run_ollama_query(prompt: str) -> str:
|
||||||
|
@ -15,6 +15,9 @@ def get_ai_document_classification(document: Document) -> dict:
|
|||||||
filename = document.filename or ""
|
filename = document.filename or ""
|
||||||
content = document.content or ""
|
content = document.content or ""
|
||||||
|
|
||||||
|
# Limit the content to 10k characters
|
||||||
|
content = content[:10000]
|
||||||
|
|
||||||
prompt = f"""
|
prompt = f"""
|
||||||
You are a document classification assistant. Based on the content below, return a JSON object suggesting the following classification fields:
|
You are a document classification assistant. Based on the content below, return a JSON object suggesting the following classification fields:
|
||||||
- title: A descriptive title for the document
|
- title: A descriptive title for the document
|
||||||
@ -33,9 +36,7 @@ def get_ai_document_classification(document: Document) -> dict:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
logger.debug(f"LLM classification prompt: {prompt}")
|
|
||||||
result = run_llm_query(prompt)
|
result = run_llm_query(prompt)
|
||||||
logger.debug(f"LLM classification result: {result}")
|
|
||||||
suggestions = parse_llm_classification_response(result)
|
suggestions = parse_llm_classification_response(result)
|
||||||
return suggestions or {}
|
return suggestions or {}
|
||||||
except Exception:
|
except Exception:
|
||||||
|
Loading…
x
Reference in New Issue
Block a user