mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-10-02 01:42:50 -05:00
Fix this migration
This commit is contained in:
@@ -1,4 +1,4 @@
|
|||||||
# Generated by Django 5.1.8 on 2025-04-30 02:38
|
# Generated by Django 5.2.6 on 2025-09-30 17:43
|
||||||
|
|
||||||
from django.db import migrations
|
from django.db import migrations
|
||||||
from django.db import models
|
from django.db import models
|
||||||
@@ -25,6 +25,7 @@ class Migration(migrations.Migration):
|
|||||||
field=models.CharField(
|
field=models.CharField(
|
||||||
blank=True,
|
blank=True,
|
||||||
max_length=128,
|
max_length=128,
|
||||||
|
null=True,
|
||||||
verbose_name="Sets the LLM API key",
|
verbose_name="Sets the LLM API key",
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
@@ -35,6 +36,7 @@ class Migration(migrations.Migration):
|
|||||||
blank=True,
|
blank=True,
|
||||||
choices=[("openai", "OpenAI"), ("ollama", "Ollama")],
|
choices=[("openai", "OpenAI"), ("ollama", "Ollama")],
|
||||||
max_length=32,
|
max_length=32,
|
||||||
|
null=True,
|
||||||
verbose_name="Sets the LLM backend",
|
verbose_name="Sets the LLM backend",
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
@@ -45,6 +47,7 @@ class Migration(migrations.Migration):
|
|||||||
blank=True,
|
blank=True,
|
||||||
choices=[("openai", "OpenAI"), ("huggingface", "Huggingface")],
|
choices=[("openai", "OpenAI"), ("huggingface", "Huggingface")],
|
||||||
max_length=32,
|
max_length=32,
|
||||||
|
null=True,
|
||||||
verbose_name="Sets the LLM embedding backend",
|
verbose_name="Sets the LLM embedding backend",
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
@@ -54,25 +57,28 @@ class Migration(migrations.Migration):
|
|||||||
field=models.CharField(
|
field=models.CharField(
|
||||||
blank=True,
|
blank=True,
|
||||||
max_length=32,
|
max_length=32,
|
||||||
|
null=True,
|
||||||
verbose_name="Sets the LLM embedding model",
|
verbose_name="Sets the LLM embedding model",
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
|
||||||
model_name="applicationconfiguration",
|
|
||||||
name="llm_model",
|
|
||||||
field=models.CharField(
|
|
||||||
blank=True,
|
|
||||||
max_length=32,
|
|
||||||
verbose_name="Sets the LLM model",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name="applicationconfiguration",
|
model_name="applicationconfiguration",
|
||||||
name="llm_endpoint",
|
name="llm_endpoint",
|
||||||
field=models.CharField(
|
field=models.CharField(
|
||||||
blank=True,
|
blank=True,
|
||||||
max_length=128,
|
max_length=128,
|
||||||
|
null=True,
|
||||||
verbose_name="Sets the LLM endpoint, optional",
|
verbose_name="Sets the LLM endpoint, optional",
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="applicationconfiguration",
|
||||||
|
name="llm_model",
|
||||||
|
field=models.CharField(
|
||||||
|
blank=True,
|
||||||
|
max_length=32,
|
||||||
|
null=True,
|
||||||
|
verbose_name="Sets the LLM model",
|
||||||
|
),
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
@@ -292,6 +292,7 @@ class ApplicationConfiguration(AbstractSingletonModel):
|
|||||||
llm_embedding_backend = models.CharField(
|
llm_embedding_backend = models.CharField(
|
||||||
verbose_name=_("Sets the LLM embedding backend"),
|
verbose_name=_("Sets the LLM embedding backend"),
|
||||||
blank=True,
|
blank=True,
|
||||||
|
null=True,
|
||||||
max_length=32,
|
max_length=32,
|
||||||
choices=LLMEmbeddingBackend.choices,
|
choices=LLMEmbeddingBackend.choices,
|
||||||
)
|
)
|
||||||
@@ -299,12 +300,14 @@ class ApplicationConfiguration(AbstractSingletonModel):
|
|||||||
llm_embedding_model = models.CharField(
|
llm_embedding_model = models.CharField(
|
||||||
verbose_name=_("Sets the LLM embedding model"),
|
verbose_name=_("Sets the LLM embedding model"),
|
||||||
blank=True,
|
blank=True,
|
||||||
|
null=True,
|
||||||
max_length=32,
|
max_length=32,
|
||||||
)
|
)
|
||||||
|
|
||||||
llm_backend = models.CharField(
|
llm_backend = models.CharField(
|
||||||
verbose_name=_("Sets the LLM backend"),
|
verbose_name=_("Sets the LLM backend"),
|
||||||
blank=True,
|
blank=True,
|
||||||
|
null=True,
|
||||||
max_length=32,
|
max_length=32,
|
||||||
choices=LLMBackend.choices,
|
choices=LLMBackend.choices,
|
||||||
)
|
)
|
||||||
@@ -312,18 +315,21 @@ class ApplicationConfiguration(AbstractSingletonModel):
|
|||||||
llm_model = models.CharField(
|
llm_model = models.CharField(
|
||||||
verbose_name=_("Sets the LLM model"),
|
verbose_name=_("Sets the LLM model"),
|
||||||
blank=True,
|
blank=True,
|
||||||
|
null=True,
|
||||||
max_length=32,
|
max_length=32,
|
||||||
)
|
)
|
||||||
|
|
||||||
llm_api_key = models.CharField(
|
llm_api_key = models.CharField(
|
||||||
verbose_name=_("Sets the LLM API key"),
|
verbose_name=_("Sets the LLM API key"),
|
||||||
blank=True,
|
blank=True,
|
||||||
|
null=True,
|
||||||
max_length=128,
|
max_length=128,
|
||||||
)
|
)
|
||||||
|
|
||||||
llm_endpoint = models.CharField(
|
llm_endpoint = models.CharField(
|
||||||
verbose_name=_("Sets the LLM endpoint, optional"),
|
verbose_name=_("Sets the LLM endpoint, optional"),
|
||||||
blank=True,
|
blank=True,
|
||||||
|
null=True,
|
||||||
max_length=128,
|
max_length=128,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user