mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-09-08 21:23:44 -05:00
Basic start of update endpoint
This commit is contained in:
@@ -113,6 +113,12 @@ class ConsumerPluginMixin:
|
|||||||
|
|
||||||
self.filename = self.metadata.filename or self.input_doc.original_file.name
|
self.filename = self.metadata.filename or self.input_doc.original_file.name
|
||||||
|
|
||||||
|
if input_doc.head_version_id:
|
||||||
|
self.log.debug(f"Document head version id: {input_doc.head_version_id}")
|
||||||
|
head_version = Document.objects.get(pk=input_doc.head_version_id)
|
||||||
|
version_index = head_version.versions.count()
|
||||||
|
self.filename += f"_v{version_index}"
|
||||||
|
|
||||||
def _send_progress(
|
def _send_progress(
|
||||||
self,
|
self,
|
||||||
current_progress: int,
|
current_progress: int,
|
||||||
@@ -470,12 +476,28 @@ class ConsumerPlugin(
|
|||||||
try:
|
try:
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
# store the document.
|
# store the document.
|
||||||
document = self._store(
|
if self.input_doc.head_version_id:
|
||||||
text=text,
|
# If this is a new version of an existing document, we need
|
||||||
date=date,
|
# to make sure we're not creating a new document, but updating
|
||||||
page_count=page_count,
|
# the existing one.
|
||||||
mime_type=mime_type,
|
original_document = Document.objects.get(
|
||||||
)
|
pk=self.input_doc.head_version_id,
|
||||||
|
)
|
||||||
|
self.log.debug("Saving record for updated version to database")
|
||||||
|
original_document.pk = None
|
||||||
|
original_document.head_version = Document.objects.get(
|
||||||
|
pk=self.input_doc.head_version_id,
|
||||||
|
)
|
||||||
|
original_document.modified = timezone.now()
|
||||||
|
original_document.save()
|
||||||
|
document = original_document
|
||||||
|
else:
|
||||||
|
document = self._store(
|
||||||
|
text=text,
|
||||||
|
date=date,
|
||||||
|
page_count=page_count,
|
||||||
|
mime_type=mime_type,
|
||||||
|
)
|
||||||
|
|
||||||
# If we get here, it was successful. Proceed with post-consume
|
# If we get here, it was successful. Proceed with post-consume
|
||||||
# hooks. If they fail, nothing will get changed.
|
# hooks. If they fail, nothing will get changed.
|
||||||
|
@@ -156,6 +156,7 @@ class ConsumableDocument:
|
|||||||
|
|
||||||
source: DocumentSource
|
source: DocumentSource
|
||||||
original_file: Path
|
original_file: Path
|
||||||
|
head_version_id: int | None = None
|
||||||
mailrule_id: int | None = None
|
mailrule_id: int | None = None
|
||||||
mime_type: str = dataclasses.field(init=False, default=None)
|
mime_type: str = dataclasses.field(init=False, default=None)
|
||||||
|
|
||||||
|
@@ -1740,6 +1740,15 @@ class PostDocumentSerializer(serializers.Serializer):
|
|||||||
return created.date()
|
return created.date()
|
||||||
|
|
||||||
|
|
||||||
|
class DocumentVersionSerializer(serializers.Serializer):
|
||||||
|
document = serializers.FileField(
|
||||||
|
label="Document",
|
||||||
|
write_only=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
validate_document = PostDocumentSerializer().validate_document
|
||||||
|
|
||||||
|
|
||||||
class BulkDownloadSerializer(DocumentListSerializer):
|
class BulkDownloadSerializer(DocumentListSerializer):
|
||||||
content = serializers.ChoiceField(
|
content = serializers.ChoiceField(
|
||||||
choices=["archive", "originals", "both"],
|
choices=["archive", "originals", "both"],
|
||||||
|
@@ -145,13 +145,17 @@ def consume_file(
|
|||||||
if overrides is None:
|
if overrides is None:
|
||||||
overrides = DocumentMetadataOverrides()
|
overrides = DocumentMetadataOverrides()
|
||||||
|
|
||||||
plugins: list[type[ConsumeTaskPlugin]] = [
|
plugins: list[type[ConsumeTaskPlugin]] = (
|
||||||
ConsumerPreflightPlugin,
|
[ConsumerPreflightPlugin,ConsumerPlugin]
|
||||||
CollatePlugin,
|
if input_doc.head_version_id is not None
|
||||||
BarcodePlugin,
|
else [
|
||||||
WorkflowTriggerPlugin,
|
ConsumerPreflightPlugin,
|
||||||
ConsumerPlugin,
|
CollatePlugin,
|
||||||
]
|
BarcodePlugin,
|
||||||
|
WorkflowTriggerPlugin,
|
||||||
|
ConsumerPlugin,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
with (
|
with (
|
||||||
ProgressManager(
|
ProgressManager(
|
||||||
|
@@ -147,6 +147,7 @@ from documents.serialisers import CustomFieldSerializer
|
|||||||
from documents.serialisers import DocumentListSerializer
|
from documents.serialisers import DocumentListSerializer
|
||||||
from documents.serialisers import DocumentSerializer
|
from documents.serialisers import DocumentSerializer
|
||||||
from documents.serialisers import DocumentTypeSerializer
|
from documents.serialisers import DocumentTypeSerializer
|
||||||
|
from documents.serialisers import DocumentVersionSerializer
|
||||||
from documents.serialisers import NotesSerializer
|
from documents.serialisers import NotesSerializer
|
||||||
from documents.serialisers import PostDocumentSerializer
|
from documents.serialisers import PostDocumentSerializer
|
||||||
from documents.serialisers import RunTaskViewSerializer
|
from documents.serialisers import RunTaskViewSerializer
|
||||||
@@ -1096,6 +1097,56 @@ class DocumentViewSet(
|
|||||||
"Error emailing document, check logs for more detail.",
|
"Error emailing document, check logs for more detail.",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@action(methods=["post"], detail=True)
|
||||||
|
def update_version(self, request, pk=None):
|
||||||
|
serializer = DocumentVersionSerializer(data=request.data)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
|
||||||
|
try:
|
||||||
|
doc = Document.objects.select_related("owner").get(pk=pk)
|
||||||
|
if request.user is not None and not has_perms_owner_aware(
|
||||||
|
request.user,
|
||||||
|
"change_document",
|
||||||
|
doc,
|
||||||
|
):
|
||||||
|
return HttpResponseForbidden("Insufficient permissions")
|
||||||
|
except Document.DoesNotExist:
|
||||||
|
raise Http404
|
||||||
|
|
||||||
|
try:
|
||||||
|
doc_name, doc_data = serializer.validated_data.get("document")
|
||||||
|
|
||||||
|
t = int(mktime(datetime.now().timetuple()))
|
||||||
|
|
||||||
|
settings.SCRATCH_DIR.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
temp_file_path = Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR)) / Path(
|
||||||
|
pathvalidate.sanitize_filename(doc_name),
|
||||||
|
)
|
||||||
|
|
||||||
|
temp_file_path.write_bytes(doc_data)
|
||||||
|
|
||||||
|
os.utime(temp_file_path, times=(t, t))
|
||||||
|
|
||||||
|
input_doc = ConsumableDocument(
|
||||||
|
source=DocumentSource.ApiUpload,
|
||||||
|
original_file=temp_file_path,
|
||||||
|
head_version_id=doc.pk,
|
||||||
|
)
|
||||||
|
|
||||||
|
async_task = consume_file.delay(
|
||||||
|
input_doc,
|
||||||
|
)
|
||||||
|
logger.debug(
|
||||||
|
f"Updated document {doc.id} with new version",
|
||||||
|
)
|
||||||
|
return Response(async_task.id)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"An error occurred updating document: {e!s}")
|
||||||
|
return HttpResponseServerError(
|
||||||
|
"Error updating document, check logs for more detail.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@extend_schema_view(
|
@extend_schema_view(
|
||||||
list=extend_schema(
|
list=extend_schema(
|
||||||
|
Reference in New Issue
Block a user