mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-05-23 12:58:18 -05:00
Fix openai api key, config settings saving
This commit is contained in:
parent
51c47707bb
commit
7ab15cda96
@ -56,7 +56,7 @@ export const ConfigCategory = {
|
|||||||
|
|
||||||
export const LLMEmbeddingBackendConfig = {
|
export const LLMEmbeddingBackendConfig = {
|
||||||
OPENAI: 'openai',
|
OPENAI: 'openai',
|
||||||
LOCAL: 'local',
|
HUGGINGFACE: 'huggingface',
|
||||||
}
|
}
|
||||||
|
|
||||||
export const LLMBackendConfig = {
|
export const LLMBackendConfig = {
|
||||||
|
@ -28,7 +28,7 @@ class AIClient:
|
|||||||
elif self.settings.llm_backend == "openai":
|
elif self.settings.llm_backend == "openai":
|
||||||
return OpenAI(
|
return OpenAI(
|
||||||
model=self.settings.llm_model or "gpt-3.5-turbo",
|
model=self.settings.llm_model or "gpt-3.5-turbo",
|
||||||
api_key=self.settings.openai_api_key,
|
api_key=self.settings.llm_api_key,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
raise ValueError(f"Unsupported LLM backend: {self.settings.llm_backend}")
|
raise ValueError(f"Unsupported LLM backend: {self.settings.llm_backend}")
|
||||||
|
@ -203,6 +203,11 @@ class ApplicationConfigurationSerializer(serializers.ModelSerializer):
|
|||||||
data["barcode_tag_mapping"] = None
|
data["barcode_tag_mapping"] = None
|
||||||
if "language" in data and data["language"] == "":
|
if "language" in data and data["language"] == "":
|
||||||
data["language"] = None
|
data["language"] = None
|
||||||
|
if "llm_api_key" in data and data["llm_api_key"] is not None:
|
||||||
|
if data["llm_api_key"] == "":
|
||||||
|
data["llm_api_key"] = None
|
||||||
|
elif len(data["llm_api_key"].replace("*", "")) == 0:
|
||||||
|
del data["llm_api_key"]
|
||||||
return super().run_validation(data)
|
return super().run_validation(data)
|
||||||
|
|
||||||
def update(self, instance, validated_data):
|
def update(self, instance, validated_data):
|
||||||
|
@ -45,7 +45,7 @@ def test_get_llm_ollama(mock_ai_config, mock_ollama_llm):
|
|||||||
def test_get_llm_openai(mock_ai_config, mock_openai_llm):
|
def test_get_llm_openai(mock_ai_config, mock_openai_llm):
|
||||||
mock_ai_config.llm_backend = "openai"
|
mock_ai_config.llm_backend = "openai"
|
||||||
mock_ai_config.llm_model = "test_model"
|
mock_ai_config.llm_model = "test_model"
|
||||||
mock_ai_config.openai_api_key = "test_api_key"
|
mock_ai_config.llm_api_key = "test_api_key"
|
||||||
|
|
||||||
client = AIClient()
|
client = AIClient()
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user