mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-12-31 13:58:04 -06:00
Compare commits
3 Commits
fix-11679
...
feature-be
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
016bccdcdf | ||
|
|
92deebddd4 | ||
|
|
c7efcee3d6 |
@@ -13,7 +13,6 @@ from pikepdf import Page
|
||||
from pikepdf import PasswordError
|
||||
from pikepdf import Pdf
|
||||
|
||||
from documents.consumer import ConsumerPreflightPlugin
|
||||
from documents.converters import convert_from_tiff_to_pdf
|
||||
from documents.data_models import ConsumableDocument
|
||||
from documents.data_models import DocumentMetadataOverrides
|
||||
@@ -187,22 +186,9 @@ class BarcodePlugin(ConsumeTaskPlugin):
|
||||
|
||||
# Update/overwrite an ASN if possible
|
||||
# After splitting, as otherwise each split document gets the same ASN
|
||||
if (
|
||||
self.settings.barcode_enable_asn
|
||||
and not self.metadata.skip_asn
|
||||
and (located_asn := self.asn) is not None
|
||||
):
|
||||
if self.settings.barcode_enable_asn and (located_asn := self.asn) is not None:
|
||||
logger.info(f"Found ASN in barcode: {located_asn}")
|
||||
self.metadata.asn = located_asn
|
||||
# (Re-)run the preflight ASN check
|
||||
preflight_plugin = ConsumerPreflightPlugin(
|
||||
input_doc=self.input_doc,
|
||||
metadata=self.metadata,
|
||||
status_mgr=self.status_mgr,
|
||||
base_tmp_dir=self.base_tmp_dir,
|
||||
task_id=self.task_id,
|
||||
)
|
||||
preflight_plugin.pre_check_asn_value()
|
||||
|
||||
def cleanup(self) -> None:
|
||||
self.temp_dir.cleanup()
|
||||
|
||||
@@ -7,7 +7,6 @@ from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Literal
|
||||
|
||||
from celery import chain
|
||||
from celery import chord
|
||||
from celery import group
|
||||
from celery import shared_task
|
||||
@@ -38,6 +37,42 @@ if TYPE_CHECKING:
|
||||
logger: logging.Logger = logging.getLogger("paperless.bulk_edit")
|
||||
|
||||
|
||||
@shared_task(bind=True)
|
||||
def restore_archive_serial_numbers_task(
|
||||
self,
|
||||
backup: dict[int, int],
|
||||
*args,
|
||||
**kwargs,
|
||||
) -> None:
|
||||
restore_archive_serial_numbers(backup)
|
||||
|
||||
|
||||
def release_archive_serial_numbers(doc_ids: list[int]) -> dict[int, int]:
|
||||
"""
|
||||
Clears ASNs on documents that are about to be replaced so new documents
|
||||
can be assigned ASNs without uniqueness collisions. Returns a backup map
|
||||
of doc_id -> previous ASN for potential restoration.
|
||||
"""
|
||||
qs = Document.objects.filter(
|
||||
id__in=doc_ids,
|
||||
archive_serial_number__isnull=False,
|
||||
).only("pk", "archive_serial_number")
|
||||
backup = dict(qs.values_list("pk", "archive_serial_number"))
|
||||
qs.update(archive_serial_number=None)
|
||||
logger.info(f"Released archive serial numbers for documents {list(backup.keys())}")
|
||||
return backup
|
||||
|
||||
|
||||
def restore_archive_serial_numbers(backup: dict[int, int]) -> None:
|
||||
"""
|
||||
Restores ASNs using the provided backup map, intended for
|
||||
rollback when replacement consumption fails.
|
||||
"""
|
||||
for doc_id, asn in backup.items():
|
||||
Document.objects.filter(pk=doc_id).update(archive_serial_number=asn)
|
||||
logger.info(f"Restored archive serial numbers for documents {list(backup.keys())}")
|
||||
|
||||
|
||||
def set_correspondent(
|
||||
doc_ids: list[int],
|
||||
correspondent: Correspondent,
|
||||
@@ -386,6 +421,7 @@ def merge(
|
||||
|
||||
merged_pdf = pikepdf.new()
|
||||
version: str = merged_pdf.pdf_version
|
||||
handoff_asn: int | None = None
|
||||
# use doc_ids to preserve order
|
||||
for doc_id in doc_ids:
|
||||
doc = qs.get(id=doc_id)
|
||||
@@ -401,6 +437,8 @@ def merge(
|
||||
version = max(version, pdf.pdf_version)
|
||||
merged_pdf.pages.extend(pdf.pages)
|
||||
affected_docs.append(doc.id)
|
||||
if handoff_asn is None and doc.archive_serial_number is not None:
|
||||
handoff_asn = doc.archive_serial_number
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
f"Error merging document {doc.id}, it will not be included in the merge: {e}",
|
||||
@@ -426,6 +464,8 @@ def merge(
|
||||
DocumentMetadataOverrides.from_document(metadata_document)
|
||||
)
|
||||
overrides.title = metadata_document.title + " (merged)"
|
||||
if metadata_document.archive_serial_number is not None:
|
||||
handoff_asn = metadata_document.archive_serial_number
|
||||
else:
|
||||
overrides = DocumentMetadataOverrides()
|
||||
else:
|
||||
@@ -433,8 +473,9 @@ def merge(
|
||||
|
||||
if user is not None:
|
||||
overrides.owner_id = user.id
|
||||
# Avoid copying or detecting ASN from merged PDFs to prevent collision
|
||||
overrides.skip_asn = True
|
||||
|
||||
if delete_originals and handoff_asn is not None:
|
||||
overrides.asn = handoff_asn
|
||||
|
||||
logger.info("Adding merged document to the task queue.")
|
||||
|
||||
@@ -447,12 +488,20 @@ def merge(
|
||||
)
|
||||
|
||||
if delete_originals:
|
||||
backup = release_archive_serial_numbers(affected_docs)
|
||||
logger.info(
|
||||
"Queueing removal of original documents after consumption of merged document",
|
||||
)
|
||||
chain(consume_task, delete.si(affected_docs)).delay()
|
||||
else:
|
||||
consume_task.delay()
|
||||
try:
|
||||
consume_task.apply_async(
|
||||
link=[delete.si(affected_docs)],
|
||||
link_error=[restore_archive_serial_numbers_task.s(backup)],
|
||||
)
|
||||
except Exception:
|
||||
restore_archive_serial_numbers(backup)
|
||||
raise
|
||||
else:
|
||||
consume_task.delay()
|
||||
|
||||
return "OK"
|
||||
|
||||
@@ -508,10 +557,20 @@ def split(
|
||||
)
|
||||
|
||||
if delete_originals:
|
||||
backup = release_archive_serial_numbers([doc.id])
|
||||
logger.info(
|
||||
"Queueing removal of original document after consumption of the split documents",
|
||||
)
|
||||
chord(header=consume_tasks, body=delete.si([doc.id])).delay()
|
||||
try:
|
||||
chord(
|
||||
header=consume_tasks,
|
||||
body=delete.si([doc.id]),
|
||||
).apply_async(
|
||||
link_error=[restore_archive_serial_numbers_task.s(backup)],
|
||||
)
|
||||
except Exception:
|
||||
restore_archive_serial_numbers(backup)
|
||||
raise
|
||||
else:
|
||||
group(consume_tasks).delay()
|
||||
|
||||
@@ -614,7 +673,8 @@ def edit_pdf(
|
||||
)
|
||||
if user is not None:
|
||||
overrides.owner_id = user.id
|
||||
|
||||
if delete_original and len(pdf_docs) == 1:
|
||||
overrides.asn = doc.archive_serial_number
|
||||
for idx, pdf in enumerate(pdf_docs, start=1):
|
||||
filepath: Path = (
|
||||
Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR))
|
||||
@@ -633,7 +693,17 @@ def edit_pdf(
|
||||
)
|
||||
|
||||
if delete_original:
|
||||
chord(header=consume_tasks, body=delete.si([doc.id])).delay()
|
||||
backup = release_archive_serial_numbers([doc.id])
|
||||
try:
|
||||
chord(
|
||||
header=consume_tasks,
|
||||
body=delete.si([doc.id]),
|
||||
).apply_async(
|
||||
link_error=[restore_archive_serial_numbers_task.s(backup)],
|
||||
)
|
||||
except Exception:
|
||||
restore_archive_serial_numbers(backup)
|
||||
raise
|
||||
else:
|
||||
group(consume_tasks).delay()
|
||||
|
||||
|
||||
@@ -696,7 +696,7 @@ class ConsumerPlugin(
|
||||
pk=self.metadata.storage_path_id,
|
||||
)
|
||||
|
||||
if self.metadata.asn is not None and not self.metadata.skip_asn:
|
||||
if self.metadata.asn is not None:
|
||||
document.archive_serial_number = self.metadata.asn
|
||||
|
||||
if self.metadata.owner_id:
|
||||
@@ -812,8 +812,8 @@ class ConsumerPreflightPlugin(
|
||||
"""
|
||||
Check that if override_asn is given, it is unique and within a valid range
|
||||
"""
|
||||
if self.metadata.skip_asn or self.metadata.asn is None:
|
||||
# if skip is set or ASN is None
|
||||
if self.metadata.asn is None:
|
||||
# if ASN is None
|
||||
return
|
||||
# Validate the range is above zero and less than uint32_t max
|
||||
# otherwise, Whoosh can't handle it in the index
|
||||
|
||||
@@ -30,7 +30,6 @@ class DocumentMetadataOverrides:
|
||||
change_users: list[int] | None = None
|
||||
change_groups: list[int] | None = None
|
||||
custom_fields: dict | None = None
|
||||
skip_asn: bool = False
|
||||
|
||||
def update(self, other: "DocumentMetadataOverrides") -> "DocumentMetadataOverrides":
|
||||
"""
|
||||
@@ -50,8 +49,6 @@ class DocumentMetadataOverrides:
|
||||
self.storage_path_id = other.storage_path_id
|
||||
if other.owner_id is not None:
|
||||
self.owner_id = other.owner_id
|
||||
if other.skip_asn:
|
||||
self.skip_asn = True
|
||||
|
||||
# merge
|
||||
if self.tag_ids is None:
|
||||
|
||||
@@ -11,7 +11,6 @@ from django.test import override_settings
|
||||
|
||||
from documents import tasks
|
||||
from documents.barcodes import BarcodePlugin
|
||||
from documents.consumer import ConsumerError
|
||||
from documents.data_models import ConsumableDocument
|
||||
from documents.data_models import DocumentMetadataOverrides
|
||||
from documents.data_models import DocumentSource
|
||||
@@ -94,41 +93,6 @@ class TestBarcode(
|
||||
|
||||
self.assertDictEqual(separator_page_numbers, {1: False})
|
||||
|
||||
@override_settings(CONSUMER_ENABLE_ASN_BARCODE=True)
|
||||
def test_asn_barcode_duplicate_in_trash_fails(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- A document with ASN barcode 123 is in the trash
|
||||
WHEN:
|
||||
- A file with the same barcode ASN is consumed
|
||||
THEN:
|
||||
- The ASN check is re-run and consumption fails
|
||||
"""
|
||||
test_file = self.BARCODE_SAMPLE_DIR / "barcode-39-asn-123.pdf"
|
||||
|
||||
first_doc = Document.objects.create(
|
||||
title="First ASN 123",
|
||||
content="",
|
||||
checksum="asn123first",
|
||||
mime_type="application/pdf",
|
||||
archive_serial_number=123,
|
||||
)
|
||||
|
||||
first_doc.delete()
|
||||
|
||||
dupe_asn = settings.SCRATCH_DIR / "barcode-39-asn-123-second.pdf"
|
||||
shutil.copy(test_file, dupe_asn)
|
||||
|
||||
with mock.patch("documents.tasks.ProgressManager", DummyProgressManager):
|
||||
with self.assertRaisesRegex(ConsumerError, r"ASN 123.*trash"):
|
||||
tasks.consume_file(
|
||||
ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=dupe_asn,
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
@override_settings(
|
||||
CONSUMER_BARCODE_TIFF_SUPPORT=True,
|
||||
)
|
||||
|
||||
@@ -602,23 +602,21 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
expected_filename,
|
||||
)
|
||||
self.assertEqual(consume_file_args[1].title, None)
|
||||
self.assertTrue(consume_file_args[1].skip_asn)
|
||||
# No metadata_document_id, delete_originals False, so ASN should be None
|
||||
self.assertIsNone(consume_file_args[1].asn)
|
||||
|
||||
# With metadata_document_id overrides
|
||||
result = bulk_edit.merge(doc_ids, metadata_document_id=metadata_document_id)
|
||||
consume_file_args, _ = mock_consume_file.call_args
|
||||
self.assertEqual(consume_file_args[1].title, "B (merged)")
|
||||
self.assertEqual(consume_file_args[1].created, self.doc2.created)
|
||||
self.assertTrue(consume_file_args[1].skip_asn)
|
||||
|
||||
self.assertEqual(result, "OK")
|
||||
|
||||
@mock.patch("documents.bulk_edit.delete.si")
|
||||
@mock.patch("documents.tasks.consume_file.s")
|
||||
@mock.patch("documents.bulk_edit.chain")
|
||||
def test_merge_and_delete_originals(
|
||||
self,
|
||||
mock_chain,
|
||||
mock_consume_file,
|
||||
mock_delete_documents,
|
||||
):
|
||||
@@ -632,6 +630,12 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
- Document deletion task should be called
|
||||
"""
|
||||
doc_ids = [self.doc1.id, self.doc2.id, self.doc3.id]
|
||||
self.doc1.archive_serial_number = 101
|
||||
self.doc2.archive_serial_number = 102
|
||||
self.doc3.archive_serial_number = 103
|
||||
self.doc1.save()
|
||||
self.doc2.save()
|
||||
self.doc3.save()
|
||||
|
||||
result = bulk_edit.merge(doc_ids, delete_originals=True)
|
||||
self.assertEqual(result, "OK")
|
||||
@@ -642,7 +646,8 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
|
||||
mock_consume_file.assert_called()
|
||||
mock_delete_documents.assert_called()
|
||||
mock_chain.assert_called_once()
|
||||
consume_sig = mock_consume_file.return_value
|
||||
consume_sig.apply_async.assert_called_once()
|
||||
|
||||
consume_file_args, _ = mock_consume_file.call_args
|
||||
self.assertEqual(
|
||||
@@ -650,7 +655,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
expected_filename,
|
||||
)
|
||||
self.assertEqual(consume_file_args[1].title, None)
|
||||
self.assertTrue(consume_file_args[1].skip_asn)
|
||||
self.assertEqual(consume_file_args[1].asn, 101)
|
||||
|
||||
delete_documents_args, _ = mock_delete_documents.call_args
|
||||
self.assertEqual(
|
||||
@@ -658,6 +663,13 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
doc_ids,
|
||||
)
|
||||
|
||||
self.doc1.refresh_from_db()
|
||||
self.doc2.refresh_from_db()
|
||||
self.doc3.refresh_from_db()
|
||||
self.assertIsNone(self.doc1.archive_serial_number)
|
||||
self.assertIsNone(self.doc2.archive_serial_number)
|
||||
self.assertIsNone(self.doc3.archive_serial_number)
|
||||
|
||||
@mock.patch("documents.tasks.consume_file.s")
|
||||
def test_merge_with_archive_fallback(self, mock_consume_file):
|
||||
"""
|
||||
@@ -726,6 +738,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
self.assertEqual(mock_consume_file.call_count, 2)
|
||||
consume_file_args, _ = mock_consume_file.call_args
|
||||
self.assertEqual(consume_file_args[1].title, "B (split 2)")
|
||||
self.assertIsNone(consume_file_args[1].asn)
|
||||
|
||||
self.assertEqual(result, "OK")
|
||||
|
||||
@@ -750,6 +763,8 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
"""
|
||||
doc_ids = [self.doc2.id]
|
||||
pages = [[1, 2], [3]]
|
||||
self.doc2.archive_serial_number = 200
|
||||
self.doc2.save()
|
||||
|
||||
result = bulk_edit.split(doc_ids, pages, delete_originals=True)
|
||||
self.assertEqual(result, "OK")
|
||||
@@ -767,6 +782,9 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
doc_ids,
|
||||
)
|
||||
|
||||
self.doc2.refresh_from_db()
|
||||
self.assertIsNone(self.doc2.archive_serial_number)
|
||||
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("pikepdf.Pdf.save")
|
||||
def test_split_with_errors(self, mock_save_pdf, mock_consume_file):
|
||||
@@ -967,10 +985,16 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
mock_chord.return_value.delay.return_value = None
|
||||
doc_ids = [self.doc2.id]
|
||||
operations = [{"page": 1}, {"page": 2}]
|
||||
self.doc2.archive_serial_number = 250
|
||||
self.doc2.save()
|
||||
|
||||
result = bulk_edit.edit_pdf(doc_ids, operations, delete_original=True)
|
||||
self.assertEqual(result, "OK")
|
||||
mock_chord.assert_called_once()
|
||||
consume_file_args, _ = mock_consume_file.call_args
|
||||
self.assertEqual(consume_file_args[1].asn, 250)
|
||||
self.doc2.refresh_from_db()
|
||||
self.assertIsNone(self.doc2.archive_serial_number)
|
||||
|
||||
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.delay")
|
||||
def test_edit_pdf_with_update_document(self, mock_update_document):
|
||||
|
||||
@@ -412,14 +412,6 @@ class TestConsumer(
|
||||
self.assertEqual(document.archive_serial_number, 123)
|
||||
self._assert_first_last_send_progress()
|
||||
|
||||
def testMetadataOverridesSkipAsnPropagation(self):
|
||||
overrides = DocumentMetadataOverrides()
|
||||
incoming = DocumentMetadataOverrides(skip_asn=True)
|
||||
|
||||
overrides.update(incoming)
|
||||
|
||||
self.assertTrue(overrides.skip_asn)
|
||||
|
||||
def testOverrideTitlePlaceholders(self):
|
||||
c = Correspondent.objects.create(name="Correspondent Name")
|
||||
dt = DocumentType.objects.create(name="DocType Name")
|
||||
|
||||
Reference in New Issue
Block a user