mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2026-01-12 21:44:21 -06:00
Compare commits
3 Commits
feature-be
...
dependabot
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9a88301aec | ||
|
|
4347ba1f9c | ||
|
|
7b666e7569 |
@@ -16,7 +16,6 @@ from pikepdf import Pdf
|
|||||||
from documents.converters import convert_from_tiff_to_pdf
|
from documents.converters import convert_from_tiff_to_pdf
|
||||||
from documents.data_models import ConsumableDocument
|
from documents.data_models import ConsumableDocument
|
||||||
from documents.data_models import DocumentMetadataOverrides
|
from documents.data_models import DocumentMetadataOverrides
|
||||||
from documents.models import Document
|
|
||||||
from documents.models import Tag
|
from documents.models import Tag
|
||||||
from documents.plugins.base import ConsumeTaskPlugin
|
from documents.plugins.base import ConsumeTaskPlugin
|
||||||
from documents.plugins.base import StopConsumeTaskError
|
from documents.plugins.base import StopConsumeTaskError
|
||||||
@@ -116,24 +115,6 @@ class BarcodePlugin(ConsumeTaskPlugin):
|
|||||||
self._tiff_conversion_done = False
|
self._tiff_conversion_done = False
|
||||||
self.barcodes: list[Barcode] = []
|
self.barcodes: list[Barcode] = []
|
||||||
|
|
||||||
def _apply_detected_asn(self, detected_asn: int) -> None:
|
|
||||||
"""
|
|
||||||
Apply a detected ASN to metadata if allowed.
|
|
||||||
"""
|
|
||||||
if (
|
|
||||||
self.metadata.skip_asn_if_exists
|
|
||||||
and Document.global_objects.filter(
|
|
||||||
archive_serial_number=detected_asn,
|
|
||||||
).exists()
|
|
||||||
):
|
|
||||||
logger.info(
|
|
||||||
f"Found ASN in barcode {detected_asn} but skipping because it already exists.",
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
logger.info(f"Found ASN in barcode: {detected_asn}")
|
|
||||||
self.metadata.asn = detected_asn
|
|
||||||
|
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
# Some operations may use PIL, override pixel setting if needed
|
# Some operations may use PIL, override pixel setting if needed
|
||||||
maybe_override_pixel_limit()
|
maybe_override_pixel_limit()
|
||||||
@@ -205,8 +186,13 @@ class BarcodePlugin(ConsumeTaskPlugin):
|
|||||||
|
|
||||||
# Update/overwrite an ASN if possible
|
# Update/overwrite an ASN if possible
|
||||||
# After splitting, as otherwise each split document gets the same ASN
|
# After splitting, as otherwise each split document gets the same ASN
|
||||||
if self.settings.barcode_enable_asn and (located_asn := self.asn) is not None:
|
if (
|
||||||
self._apply_detected_asn(located_asn)
|
self.settings.barcode_enable_asn
|
||||||
|
and not self.metadata.skip_asn
|
||||||
|
and (located_asn := self.asn) is not None
|
||||||
|
):
|
||||||
|
logger.info(f"Found ASN in barcode: {located_asn}")
|
||||||
|
self.metadata.asn = located_asn
|
||||||
|
|
||||||
def cleanup(self) -> None:
|
def cleanup(self) -> None:
|
||||||
self.temp_dir.cleanup()
|
self.temp_dir.cleanup()
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ from pathlib import Path
|
|||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
from typing import Literal
|
from typing import Literal
|
||||||
|
|
||||||
|
from celery import chain
|
||||||
from celery import chord
|
from celery import chord
|
||||||
from celery import group
|
from celery import group
|
||||||
from celery import shared_task
|
from celery import shared_task
|
||||||
@@ -37,42 +38,6 @@ if TYPE_CHECKING:
|
|||||||
logger: logging.Logger = logging.getLogger("paperless.bulk_edit")
|
logger: logging.Logger = logging.getLogger("paperless.bulk_edit")
|
||||||
|
|
||||||
|
|
||||||
@shared_task(bind=True)
|
|
||||||
def restore_archive_serial_numbers_task(
|
|
||||||
self,
|
|
||||||
backup: dict[int, int],
|
|
||||||
*args,
|
|
||||||
**kwargs,
|
|
||||||
) -> None:
|
|
||||||
restore_archive_serial_numbers(backup)
|
|
||||||
|
|
||||||
|
|
||||||
def release_archive_serial_numbers(doc_ids: list[int]) -> dict[int, int]:
|
|
||||||
"""
|
|
||||||
Clears ASNs on documents that are about to be replaced so new documents
|
|
||||||
can be assigned ASNs without uniqueness collisions. Returns a backup map
|
|
||||||
of doc_id -> previous ASN for potential restoration.
|
|
||||||
"""
|
|
||||||
qs = Document.objects.filter(
|
|
||||||
id__in=doc_ids,
|
|
||||||
archive_serial_number__isnull=False,
|
|
||||||
).only("pk", "archive_serial_number")
|
|
||||||
backup = dict(qs.values_list("pk", "archive_serial_number"))
|
|
||||||
qs.update(archive_serial_number=None)
|
|
||||||
logger.info(f"Released archive serial numbers for documents {list(backup.keys())}")
|
|
||||||
return backup
|
|
||||||
|
|
||||||
|
|
||||||
def restore_archive_serial_numbers(backup: dict[int, int]) -> None:
|
|
||||||
"""
|
|
||||||
Restores ASNs using the provided backup map, intended for
|
|
||||||
rollback when replacement consumption fails.
|
|
||||||
"""
|
|
||||||
for doc_id, asn in backup.items():
|
|
||||||
Document.objects.filter(pk=doc_id).update(archive_serial_number=asn)
|
|
||||||
logger.info(f"Restored archive serial numbers for documents {list(backup.keys())}")
|
|
||||||
|
|
||||||
|
|
||||||
def set_correspondent(
|
def set_correspondent(
|
||||||
doc_ids: list[int],
|
doc_ids: list[int],
|
||||||
correspondent: Correspondent,
|
correspondent: Correspondent,
|
||||||
@@ -421,7 +386,6 @@ def merge(
|
|||||||
|
|
||||||
merged_pdf = pikepdf.new()
|
merged_pdf = pikepdf.new()
|
||||||
version: str = merged_pdf.pdf_version
|
version: str = merged_pdf.pdf_version
|
||||||
handoff_asn: int | None = None
|
|
||||||
# use doc_ids to preserve order
|
# use doc_ids to preserve order
|
||||||
for doc_id in doc_ids:
|
for doc_id in doc_ids:
|
||||||
doc = qs.get(id=doc_id)
|
doc = qs.get(id=doc_id)
|
||||||
@@ -437,8 +401,6 @@ def merge(
|
|||||||
version = max(version, pdf.pdf_version)
|
version = max(version, pdf.pdf_version)
|
||||||
merged_pdf.pages.extend(pdf.pages)
|
merged_pdf.pages.extend(pdf.pages)
|
||||||
affected_docs.append(doc.id)
|
affected_docs.append(doc.id)
|
||||||
if handoff_asn is None and doc.archive_serial_number is not None:
|
|
||||||
handoff_asn = doc.archive_serial_number
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.exception(
|
logger.exception(
|
||||||
f"Error merging document {doc.id}, it will not be included in the merge: {e}",
|
f"Error merging document {doc.id}, it will not be included in the merge: {e}",
|
||||||
@@ -464,8 +426,6 @@ def merge(
|
|||||||
DocumentMetadataOverrides.from_document(metadata_document)
|
DocumentMetadataOverrides.from_document(metadata_document)
|
||||||
)
|
)
|
||||||
overrides.title = metadata_document.title + " (merged)"
|
overrides.title = metadata_document.title + " (merged)"
|
||||||
if metadata_document.archive_serial_number is not None:
|
|
||||||
handoff_asn = metadata_document.archive_serial_number
|
|
||||||
else:
|
else:
|
||||||
overrides = DocumentMetadataOverrides()
|
overrides = DocumentMetadataOverrides()
|
||||||
else:
|
else:
|
||||||
@@ -473,11 +433,8 @@ def merge(
|
|||||||
|
|
||||||
if user is not None:
|
if user is not None:
|
||||||
overrides.owner_id = user.id
|
overrides.owner_id = user.id
|
||||||
if not delete_originals:
|
# Avoid copying or detecting ASN from merged PDFs to prevent collision
|
||||||
overrides.skip_asn_if_exists = True
|
overrides.skip_asn = True
|
||||||
|
|
||||||
if delete_originals and handoff_asn is not None:
|
|
||||||
overrides.asn = handoff_asn
|
|
||||||
|
|
||||||
logger.info("Adding merged document to the task queue.")
|
logger.info("Adding merged document to the task queue.")
|
||||||
|
|
||||||
@@ -490,20 +447,12 @@ def merge(
|
|||||||
)
|
)
|
||||||
|
|
||||||
if delete_originals:
|
if delete_originals:
|
||||||
backup = release_archive_serial_numbers(affected_docs)
|
|
||||||
logger.info(
|
logger.info(
|
||||||
"Queueing removal of original documents after consumption of merged document",
|
"Queueing removal of original documents after consumption of merged document",
|
||||||
)
|
)
|
||||||
try:
|
chain(consume_task, delete.si(affected_docs)).delay()
|
||||||
consume_task.apply_async(
|
else:
|
||||||
link=[delete.si(affected_docs)],
|
consume_task.delay()
|
||||||
link_error=[restore_archive_serial_numbers_task.s(backup)],
|
|
||||||
)
|
|
||||||
except Exception:
|
|
||||||
restore_archive_serial_numbers(backup)
|
|
||||||
raise
|
|
||||||
else:
|
|
||||||
consume_task.delay()
|
|
||||||
|
|
||||||
return "OK"
|
return "OK"
|
||||||
|
|
||||||
@@ -545,8 +494,6 @@ def split(
|
|||||||
overrides.title = f"{doc.title} (split {idx + 1})"
|
overrides.title = f"{doc.title} (split {idx + 1})"
|
||||||
if user is not None:
|
if user is not None:
|
||||||
overrides.owner_id = user.id
|
overrides.owner_id = user.id
|
||||||
if not delete_originals:
|
|
||||||
overrides.skip_asn_if_exists = True
|
|
||||||
logger.info(
|
logger.info(
|
||||||
f"Adding split document with pages {split_doc} to the task queue.",
|
f"Adding split document with pages {split_doc} to the task queue.",
|
||||||
)
|
)
|
||||||
@@ -561,20 +508,10 @@ def split(
|
|||||||
)
|
)
|
||||||
|
|
||||||
if delete_originals:
|
if delete_originals:
|
||||||
backup = release_archive_serial_numbers([doc.id])
|
|
||||||
logger.info(
|
logger.info(
|
||||||
"Queueing removal of original document after consumption of the split documents",
|
"Queueing removal of original document after consumption of the split documents",
|
||||||
)
|
)
|
||||||
try:
|
chord(header=consume_tasks, body=delete.si([doc.id])).delay()
|
||||||
chord(
|
|
||||||
header=consume_tasks,
|
|
||||||
body=delete.si([doc.id]),
|
|
||||||
).apply_async(
|
|
||||||
link_error=[restore_archive_serial_numbers_task.s(backup)],
|
|
||||||
)
|
|
||||||
except Exception:
|
|
||||||
restore_archive_serial_numbers(backup)
|
|
||||||
raise
|
|
||||||
else:
|
else:
|
||||||
group(consume_tasks).delay()
|
group(consume_tasks).delay()
|
||||||
|
|
||||||
@@ -677,10 +614,7 @@ def edit_pdf(
|
|||||||
)
|
)
|
||||||
if user is not None:
|
if user is not None:
|
||||||
overrides.owner_id = user.id
|
overrides.owner_id = user.id
|
||||||
if not delete_original:
|
|
||||||
overrides.skip_asn_if_exists = True
|
|
||||||
if delete_original and len(pdf_docs) == 1:
|
|
||||||
overrides.asn = doc.archive_serial_number
|
|
||||||
for idx, pdf in enumerate(pdf_docs, start=1):
|
for idx, pdf in enumerate(pdf_docs, start=1):
|
||||||
filepath: Path = (
|
filepath: Path = (
|
||||||
Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR))
|
Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR))
|
||||||
@@ -699,17 +633,7 @@ def edit_pdf(
|
|||||||
)
|
)
|
||||||
|
|
||||||
if delete_original:
|
if delete_original:
|
||||||
backup = release_archive_serial_numbers([doc.id])
|
chord(header=consume_tasks, body=delete.si([doc.id])).delay()
|
||||||
try:
|
|
||||||
chord(
|
|
||||||
header=consume_tasks,
|
|
||||||
body=delete.si([doc.id]),
|
|
||||||
).apply_async(
|
|
||||||
link_error=[restore_archive_serial_numbers_task.s(backup)],
|
|
||||||
)
|
|
||||||
except Exception:
|
|
||||||
restore_archive_serial_numbers(backup)
|
|
||||||
raise
|
|
||||||
else:
|
else:
|
||||||
group(consume_tasks).delay()
|
group(consume_tasks).delay()
|
||||||
|
|
||||||
|
|||||||
@@ -696,7 +696,7 @@ class ConsumerPlugin(
|
|||||||
pk=self.metadata.storage_path_id,
|
pk=self.metadata.storage_path_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
if self.metadata.asn is not None:
|
if self.metadata.asn is not None and not self.metadata.skip_asn:
|
||||||
document.archive_serial_number = self.metadata.asn
|
document.archive_serial_number = self.metadata.asn
|
||||||
|
|
||||||
if self.metadata.owner_id:
|
if self.metadata.owner_id:
|
||||||
@@ -812,8 +812,8 @@ class ConsumerPreflightPlugin(
|
|||||||
"""
|
"""
|
||||||
Check that if override_asn is given, it is unique and within a valid range
|
Check that if override_asn is given, it is unique and within a valid range
|
||||||
"""
|
"""
|
||||||
if self.metadata.asn is None:
|
if self.metadata.skip_asn or self.metadata.asn is None:
|
||||||
# if ASN is None
|
# if skip is set or ASN is None
|
||||||
return
|
return
|
||||||
# Validate the range is above zero and less than uint32_t max
|
# Validate the range is above zero and less than uint32_t max
|
||||||
# otherwise, Whoosh can't handle it in the index
|
# otherwise, Whoosh can't handle it in the index
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ class DocumentMetadataOverrides:
|
|||||||
change_users: list[int] | None = None
|
change_users: list[int] | None = None
|
||||||
change_groups: list[int] | None = None
|
change_groups: list[int] | None = None
|
||||||
custom_fields: dict | None = None
|
custom_fields: dict | None = None
|
||||||
skip_asn_if_exists: bool = False
|
skip_asn: bool = False
|
||||||
|
|
||||||
def update(self, other: "DocumentMetadataOverrides") -> "DocumentMetadataOverrides":
|
def update(self, other: "DocumentMetadataOverrides") -> "DocumentMetadataOverrides":
|
||||||
"""
|
"""
|
||||||
@@ -50,8 +50,8 @@ class DocumentMetadataOverrides:
|
|||||||
self.storage_path_id = other.storage_path_id
|
self.storage_path_id = other.storage_path_id
|
||||||
if other.owner_id is not None:
|
if other.owner_id is not None:
|
||||||
self.owner_id = other.owner_id
|
self.owner_id = other.owner_id
|
||||||
if other.skip_asn_if_exists:
|
if other.skip_asn:
|
||||||
self.skip_asn_if_exists = True
|
self.skip_asn = True
|
||||||
|
|
||||||
# merge
|
# merge
|
||||||
if self.tag_ids is None:
|
if self.tag_ids is None:
|
||||||
|
|||||||
@@ -580,30 +580,34 @@ class TagSerializer(MatchingModelSerializer, OwnedObjectSerializer):
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
def get_children(self, obj):
|
def get_children(self, obj):
|
||||||
filter_q = self.context.get("document_count_filter")
|
children_map = self.context.get("children_map")
|
||||||
request = self.context.get("request")
|
if children_map is not None:
|
||||||
if filter_q is None:
|
children = children_map.get(obj.pk, [])
|
||||||
user = getattr(request, "user", None) if request else None
|
else:
|
||||||
filter_q = get_document_count_filter_for_user(user)
|
filter_q = self.context.get("document_count_filter")
|
||||||
self.context["document_count_filter"] = filter_q
|
request = self.context.get("request")
|
||||||
|
if filter_q is None:
|
||||||
|
user = getattr(request, "user", None) if request else None
|
||||||
|
filter_q = get_document_count_filter_for_user(user)
|
||||||
|
self.context["document_count_filter"] = filter_q
|
||||||
|
|
||||||
children_queryset = (
|
children = (
|
||||||
obj.get_children_queryset()
|
obj.get_children_queryset()
|
||||||
.select_related("owner")
|
.select_related("owner")
|
||||||
.annotate(document_count=Count("documents", filter=filter_q))
|
.annotate(document_count=Count("documents", filter=filter_q))
|
||||||
)
|
)
|
||||||
|
|
||||||
view = self.context.get("view")
|
view = self.context.get("view")
|
||||||
ordering = (
|
ordering = (
|
||||||
OrderingFilter().get_ordering(request, children_queryset, view)
|
OrderingFilter().get_ordering(request, children, view)
|
||||||
if request and view
|
if request and view
|
||||||
else None
|
else None
|
||||||
)
|
)
|
||||||
ordering = ordering or (Lower("name"),)
|
ordering = ordering or (Lower("name"),)
|
||||||
children_queryset = children_queryset.order_by(*ordering)
|
children = children.order_by(*ordering)
|
||||||
|
|
||||||
serializer = TagSerializer(
|
serializer = TagSerializer(
|
||||||
children_queryset,
|
children,
|
||||||
many=True,
|
many=True,
|
||||||
user=self.user,
|
user=self.user,
|
||||||
full_perms=self.full_perms,
|
full_perms=self.full_perms,
|
||||||
|
|||||||
@@ -603,21 +603,23 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
|||||||
expected_filename,
|
expected_filename,
|
||||||
)
|
)
|
||||||
self.assertEqual(consume_file_args[1].title, None)
|
self.assertEqual(consume_file_args[1].title, None)
|
||||||
# No metadata_document_id, delete_originals False, so ASN should be None
|
self.assertTrue(consume_file_args[1].skip_asn)
|
||||||
self.assertIsNone(consume_file_args[1].asn)
|
|
||||||
|
|
||||||
# With metadata_document_id overrides
|
# With metadata_document_id overrides
|
||||||
result = bulk_edit.merge(doc_ids, metadata_document_id=metadata_document_id)
|
result = bulk_edit.merge(doc_ids, metadata_document_id=metadata_document_id)
|
||||||
consume_file_args, _ = mock_consume_file.call_args
|
consume_file_args, _ = mock_consume_file.call_args
|
||||||
self.assertEqual(consume_file_args[1].title, "B (merged)")
|
self.assertEqual(consume_file_args[1].title, "B (merged)")
|
||||||
self.assertEqual(consume_file_args[1].created, self.doc2.created)
|
self.assertEqual(consume_file_args[1].created, self.doc2.created)
|
||||||
|
self.assertTrue(consume_file_args[1].skip_asn)
|
||||||
|
|
||||||
self.assertEqual(result, "OK")
|
self.assertEqual(result, "OK")
|
||||||
|
|
||||||
@mock.patch("documents.bulk_edit.delete.si")
|
@mock.patch("documents.bulk_edit.delete.si")
|
||||||
@mock.patch("documents.tasks.consume_file.s")
|
@mock.patch("documents.tasks.consume_file.s")
|
||||||
|
@mock.patch("documents.bulk_edit.chain")
|
||||||
def test_merge_and_delete_originals(
|
def test_merge_and_delete_originals(
|
||||||
self,
|
self,
|
||||||
|
mock_chain,
|
||||||
mock_consume_file,
|
mock_consume_file,
|
||||||
mock_delete_documents,
|
mock_delete_documents,
|
||||||
):
|
):
|
||||||
@@ -631,12 +633,6 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
|||||||
- Document deletion task should be called
|
- Document deletion task should be called
|
||||||
"""
|
"""
|
||||||
doc_ids = [self.doc1.id, self.doc2.id, self.doc3.id]
|
doc_ids = [self.doc1.id, self.doc2.id, self.doc3.id]
|
||||||
self.doc1.archive_serial_number = 101
|
|
||||||
self.doc2.archive_serial_number = 102
|
|
||||||
self.doc3.archive_serial_number = 103
|
|
||||||
self.doc1.save()
|
|
||||||
self.doc2.save()
|
|
||||||
self.doc3.save()
|
|
||||||
|
|
||||||
result = bulk_edit.merge(doc_ids, delete_originals=True)
|
result = bulk_edit.merge(doc_ids, delete_originals=True)
|
||||||
self.assertEqual(result, "OK")
|
self.assertEqual(result, "OK")
|
||||||
@@ -647,8 +643,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
|||||||
|
|
||||||
mock_consume_file.assert_called()
|
mock_consume_file.assert_called()
|
||||||
mock_delete_documents.assert_called()
|
mock_delete_documents.assert_called()
|
||||||
consume_sig = mock_consume_file.return_value
|
mock_chain.assert_called_once()
|
||||||
consume_sig.apply_async.assert_called_once()
|
|
||||||
|
|
||||||
consume_file_args, _ = mock_consume_file.call_args
|
consume_file_args, _ = mock_consume_file.call_args
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
@@ -656,7 +651,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
|||||||
expected_filename,
|
expected_filename,
|
||||||
)
|
)
|
||||||
self.assertEqual(consume_file_args[1].title, None)
|
self.assertEqual(consume_file_args[1].title, None)
|
||||||
self.assertEqual(consume_file_args[1].asn, 101)
|
self.assertTrue(consume_file_args[1].skip_asn)
|
||||||
|
|
||||||
delete_documents_args, _ = mock_delete_documents.call_args
|
delete_documents_args, _ = mock_delete_documents.call_args
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
@@ -664,92 +659,6 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
|||||||
doc_ids,
|
doc_ids,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.doc1.refresh_from_db()
|
|
||||||
self.doc2.refresh_from_db()
|
|
||||||
self.doc3.refresh_from_db()
|
|
||||||
self.assertIsNone(self.doc1.archive_serial_number)
|
|
||||||
self.assertIsNone(self.doc2.archive_serial_number)
|
|
||||||
self.assertIsNone(self.doc3.archive_serial_number)
|
|
||||||
|
|
||||||
@mock.patch("documents.bulk_edit.delete.si")
|
|
||||||
@mock.patch("documents.tasks.consume_file.s")
|
|
||||||
def test_merge_and_delete_originals_restore_on_failure(
|
|
||||||
self,
|
|
||||||
mock_consume_file,
|
|
||||||
mock_delete_documents,
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
GIVEN:
|
|
||||||
- Existing documents
|
|
||||||
WHEN:
|
|
||||||
- Merge action with deleting documents is called with 1 document
|
|
||||||
- Error occurs when queuing consume file task
|
|
||||||
THEN:
|
|
||||||
- Archive serial numbers are restored
|
|
||||||
"""
|
|
||||||
doc_ids = [self.doc1.id]
|
|
||||||
self.doc1.archive_serial_number = 111
|
|
||||||
self.doc1.save()
|
|
||||||
sig = mock.Mock()
|
|
||||||
sig.apply_async.side_effect = Exception("boom")
|
|
||||||
mock_consume_file.return_value = sig
|
|
||||||
|
|
||||||
with self.assertRaises(Exception):
|
|
||||||
bulk_edit.merge(doc_ids, delete_originals=True)
|
|
||||||
|
|
||||||
self.doc1.refresh_from_db()
|
|
||||||
self.assertEqual(self.doc1.archive_serial_number, 111)
|
|
||||||
|
|
||||||
@mock.patch("documents.bulk_edit.delete.si")
|
|
||||||
@mock.patch("documents.tasks.consume_file.s")
|
|
||||||
def test_merge_and_delete_originals_metadata_handoff(
|
|
||||||
self,
|
|
||||||
mock_consume_file,
|
|
||||||
mock_delete_documents,
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
GIVEN:
|
|
||||||
- Existing documents with ASNs
|
|
||||||
WHEN:
|
|
||||||
- Merge with delete_originals=True and metadata_document_id set
|
|
||||||
THEN:
|
|
||||||
- Handoff ASN uses metadata document ASN
|
|
||||||
"""
|
|
||||||
doc_ids = [self.doc1.id, self.doc2.id]
|
|
||||||
self.doc1.archive_serial_number = 101
|
|
||||||
self.doc2.archive_serial_number = 202
|
|
||||||
self.doc1.save()
|
|
||||||
self.doc2.save()
|
|
||||||
|
|
||||||
result = bulk_edit.merge(
|
|
||||||
doc_ids,
|
|
||||||
metadata_document_id=self.doc2.id,
|
|
||||||
delete_originals=True,
|
|
||||||
)
|
|
||||||
self.assertEqual(result, "OK")
|
|
||||||
|
|
||||||
consume_file_args, _ = mock_consume_file.call_args
|
|
||||||
self.assertEqual(consume_file_args[1].asn, 202)
|
|
||||||
|
|
||||||
def test_restore_archive_serial_numbers_task(self):
|
|
||||||
"""
|
|
||||||
GIVEN:
|
|
||||||
- Existing document with no archive serial number
|
|
||||||
WHEN:
|
|
||||||
- Restore archive serial number task is called with backup data
|
|
||||||
THEN:
|
|
||||||
- Document archive serial number is restored
|
|
||||||
"""
|
|
||||||
self.doc1.archive_serial_number = 444
|
|
||||||
self.doc1.save()
|
|
||||||
Document.objects.filter(pk=self.doc1.id).update(archive_serial_number=None)
|
|
||||||
|
|
||||||
backup = {self.doc1.id: 444}
|
|
||||||
bulk_edit.restore_archive_serial_numbers_task(backup)
|
|
||||||
|
|
||||||
self.doc1.refresh_from_db()
|
|
||||||
self.assertEqual(self.doc1.archive_serial_number, 444)
|
|
||||||
|
|
||||||
@mock.patch("documents.tasks.consume_file.s")
|
@mock.patch("documents.tasks.consume_file.s")
|
||||||
def test_merge_with_archive_fallback(self, mock_consume_file):
|
def test_merge_with_archive_fallback(self, mock_consume_file):
|
||||||
"""
|
"""
|
||||||
@@ -818,7 +727,6 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
|||||||
self.assertEqual(mock_consume_file.call_count, 2)
|
self.assertEqual(mock_consume_file.call_count, 2)
|
||||||
consume_file_args, _ = mock_consume_file.call_args
|
consume_file_args, _ = mock_consume_file.call_args
|
||||||
self.assertEqual(consume_file_args[1].title, "B (split 2)")
|
self.assertEqual(consume_file_args[1].title, "B (split 2)")
|
||||||
self.assertIsNone(consume_file_args[1].asn)
|
|
||||||
|
|
||||||
self.assertEqual(result, "OK")
|
self.assertEqual(result, "OK")
|
||||||
|
|
||||||
@@ -843,8 +751,6 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
|||||||
"""
|
"""
|
||||||
doc_ids = [self.doc2.id]
|
doc_ids = [self.doc2.id]
|
||||||
pages = [[1, 2], [3]]
|
pages = [[1, 2], [3]]
|
||||||
self.doc2.archive_serial_number = 200
|
|
||||||
self.doc2.save()
|
|
||||||
|
|
||||||
result = bulk_edit.split(doc_ids, pages, delete_originals=True)
|
result = bulk_edit.split(doc_ids, pages, delete_originals=True)
|
||||||
self.assertEqual(result, "OK")
|
self.assertEqual(result, "OK")
|
||||||
@@ -862,42 +768,6 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
|||||||
doc_ids,
|
doc_ids,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.doc2.refresh_from_db()
|
|
||||||
self.assertIsNone(self.doc2.archive_serial_number)
|
|
||||||
|
|
||||||
@mock.patch("documents.bulk_edit.delete.si")
|
|
||||||
@mock.patch("documents.tasks.consume_file.s")
|
|
||||||
@mock.patch("documents.bulk_edit.chord")
|
|
||||||
def test_split_restore_on_failure(
|
|
||||||
self,
|
|
||||||
mock_chord,
|
|
||||||
mock_consume_file,
|
|
||||||
mock_delete_documents,
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
GIVEN:
|
|
||||||
- Existing documents
|
|
||||||
WHEN:
|
|
||||||
- Split action with deleting documents is called with 1 document and 2 page groups
|
|
||||||
- Error occurs when queuing chord task
|
|
||||||
THEN:
|
|
||||||
- Archive serial numbers are restored
|
|
||||||
"""
|
|
||||||
doc_ids = [self.doc2.id]
|
|
||||||
pages = [[1, 2]]
|
|
||||||
self.doc2.archive_serial_number = 222
|
|
||||||
self.doc2.save()
|
|
||||||
|
|
||||||
sig = mock.Mock()
|
|
||||||
sig.apply_async.side_effect = Exception("boom")
|
|
||||||
mock_chord.return_value = sig
|
|
||||||
|
|
||||||
result = bulk_edit.split(doc_ids, pages, delete_originals=True)
|
|
||||||
self.assertEqual(result, "OK")
|
|
||||||
|
|
||||||
self.doc2.refresh_from_db()
|
|
||||||
self.assertEqual(self.doc2.archive_serial_number, 222)
|
|
||||||
|
|
||||||
@mock.patch("documents.tasks.consume_file.delay")
|
@mock.patch("documents.tasks.consume_file.delay")
|
||||||
@mock.patch("pikepdf.Pdf.save")
|
@mock.patch("pikepdf.Pdf.save")
|
||||||
def test_split_with_errors(self, mock_save_pdf, mock_consume_file):
|
def test_split_with_errors(self, mock_save_pdf, mock_consume_file):
|
||||||
@@ -1098,49 +968,10 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
|||||||
mock_chord.return_value.delay.return_value = None
|
mock_chord.return_value.delay.return_value = None
|
||||||
doc_ids = [self.doc2.id]
|
doc_ids = [self.doc2.id]
|
||||||
operations = [{"page": 1}, {"page": 2}]
|
operations = [{"page": 1}, {"page": 2}]
|
||||||
self.doc2.archive_serial_number = 250
|
|
||||||
self.doc2.save()
|
|
||||||
|
|
||||||
result = bulk_edit.edit_pdf(doc_ids, operations, delete_original=True)
|
result = bulk_edit.edit_pdf(doc_ids, operations, delete_original=True)
|
||||||
self.assertEqual(result, "OK")
|
self.assertEqual(result, "OK")
|
||||||
mock_chord.assert_called_once()
|
mock_chord.assert_called_once()
|
||||||
consume_file_args, _ = mock_consume_file.call_args
|
|
||||||
self.assertEqual(consume_file_args[1].asn, 250)
|
|
||||||
self.doc2.refresh_from_db()
|
|
||||||
self.assertIsNone(self.doc2.archive_serial_number)
|
|
||||||
|
|
||||||
@mock.patch("documents.bulk_edit.delete.si")
|
|
||||||
@mock.patch("documents.tasks.consume_file.s")
|
|
||||||
@mock.patch("documents.bulk_edit.chord")
|
|
||||||
def test_edit_pdf_restore_on_failure(
|
|
||||||
self,
|
|
||||||
mock_chord,
|
|
||||||
mock_consume_file,
|
|
||||||
mock_delete_documents,
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
GIVEN:
|
|
||||||
- Existing document
|
|
||||||
WHEN:
|
|
||||||
- edit_pdf is called with delete_original=True
|
|
||||||
- Error occurs when queuing chord task
|
|
||||||
THEN:
|
|
||||||
- Archive serial numbers are restored
|
|
||||||
"""
|
|
||||||
doc_ids = [self.doc2.id]
|
|
||||||
operations = [{"page": 1}]
|
|
||||||
self.doc2.archive_serial_number = 333
|
|
||||||
self.doc2.save()
|
|
||||||
|
|
||||||
sig = mock.Mock()
|
|
||||||
sig.apply_async.side_effect = Exception("boom")
|
|
||||||
mock_chord.return_value = sig
|
|
||||||
|
|
||||||
with self.assertRaises(Exception):
|
|
||||||
bulk_edit.edit_pdf(doc_ids, operations, delete_original=True)
|
|
||||||
|
|
||||||
self.doc2.refresh_from_db()
|
|
||||||
self.assertEqual(self.doc2.archive_serial_number, 333)
|
|
||||||
|
|
||||||
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.delay")
|
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.delay")
|
||||||
def test_edit_pdf_with_update_document(self, mock_update_document):
|
def test_edit_pdf_with_update_document(self, mock_update_document):
|
||||||
|
|||||||
@@ -14,7 +14,6 @@ from django.test import override_settings
|
|||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from guardian.core import ObjectPermissionChecker
|
from guardian.core import ObjectPermissionChecker
|
||||||
|
|
||||||
from documents.barcodes import BarcodePlugin
|
|
||||||
from documents.consumer import ConsumerError
|
from documents.consumer import ConsumerError
|
||||||
from documents.data_models import DocumentMetadataOverrides
|
from documents.data_models import DocumentMetadataOverrides
|
||||||
from documents.data_models import DocumentSource
|
from documents.data_models import DocumentSource
|
||||||
@@ -413,6 +412,14 @@ class TestConsumer(
|
|||||||
self.assertEqual(document.archive_serial_number, 123)
|
self.assertEqual(document.archive_serial_number, 123)
|
||||||
self._assert_first_last_send_progress()
|
self._assert_first_last_send_progress()
|
||||||
|
|
||||||
|
def testMetadataOverridesSkipAsnPropagation(self):
|
||||||
|
overrides = DocumentMetadataOverrides()
|
||||||
|
incoming = DocumentMetadataOverrides(skip_asn=True)
|
||||||
|
|
||||||
|
overrides.update(incoming)
|
||||||
|
|
||||||
|
self.assertTrue(overrides.skip_asn)
|
||||||
|
|
||||||
def testOverrideTitlePlaceholders(self):
|
def testOverrideTitlePlaceholders(self):
|
||||||
c = Correspondent.objects.create(name="Correspondent Name")
|
c = Correspondent.objects.create(name="Correspondent Name")
|
||||||
dt = DocumentType.objects.create(name="DocType Name")
|
dt = DocumentType.objects.create(name="DocType Name")
|
||||||
@@ -1233,46 +1240,3 @@ class PostConsumeTestCase(DirectoriesMixin, GetConsumerMixin, TestCase):
|
|||||||
r"sample\.pdf: Error while executing post-consume script: Command '\[.*\]' returned non-zero exit status \d+\.",
|
r"sample\.pdf: Error while executing post-consume script: Command '\[.*\]' returned non-zero exit status \d+\.",
|
||||||
):
|
):
|
||||||
consumer.run_post_consume_script(doc)
|
consumer.run_post_consume_script(doc)
|
||||||
|
|
||||||
|
|
||||||
class TestMetadataOverrides(TestCase):
|
|
||||||
def test_update_skip_asn_if_exists(self):
|
|
||||||
base = DocumentMetadataOverrides()
|
|
||||||
incoming = DocumentMetadataOverrides(skip_asn_if_exists=True)
|
|
||||||
base.update(incoming)
|
|
||||||
self.assertTrue(base.skip_asn_if_exists)
|
|
||||||
|
|
||||||
|
|
||||||
class TestBarcodeApplyDetectedASN(TestCase):
|
|
||||||
"""
|
|
||||||
GIVEN:
|
|
||||||
- Existing Documents with ASN 123
|
|
||||||
WHEN:
|
|
||||||
- A BarcodePlugin which detected an ASN
|
|
||||||
THEN:
|
|
||||||
- If skip_asn_if_exists is set, and ASN exists, do not set ASN
|
|
||||||
- If skip_asn_if_exists is set, and ASN does not exist, set ASN
|
|
||||||
"""
|
|
||||||
|
|
||||||
def test_apply_detected_asn_skips_existing_when_flag_set(self):
|
|
||||||
doc = Document.objects.create(
|
|
||||||
checksum="X1",
|
|
||||||
title="D1",
|
|
||||||
archive_serial_number=123,
|
|
||||||
)
|
|
||||||
metadata = DocumentMetadataOverrides(skip_asn_if_exists=True)
|
|
||||||
plugin = BarcodePlugin(
|
|
||||||
input_doc=mock.Mock(),
|
|
||||||
metadata=metadata,
|
|
||||||
status_mgr=mock.Mock(),
|
|
||||||
base_tmp_dir=tempfile.gettempdir(),
|
|
||||||
task_id="test-task",
|
|
||||||
)
|
|
||||||
|
|
||||||
plugin._apply_detected_asn(123)
|
|
||||||
self.assertIsNone(plugin.metadata.asn)
|
|
||||||
|
|
||||||
doc.hard_delete()
|
|
||||||
|
|
||||||
plugin._apply_detected_asn(123)
|
|
||||||
self.assertEqual(plugin.metadata.asn, 123)
|
|
||||||
|
|||||||
@@ -448,8 +448,43 @@ class TagViewSet(ModelViewSet, PermissionsAwareDocumentCountMixin):
|
|||||||
def get_serializer_context(self):
|
def get_serializer_context(self):
|
||||||
context = super().get_serializer_context()
|
context = super().get_serializer_context()
|
||||||
context["document_count_filter"] = self.get_document_count_filter()
|
context["document_count_filter"] = self.get_document_count_filter()
|
||||||
|
if hasattr(self, "_children_map"):
|
||||||
|
context["children_map"] = self._children_map
|
||||||
return context
|
return context
|
||||||
|
|
||||||
|
def list(self, request, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
Build a children map once to avoid per-parent queries in the serializer.
|
||||||
|
"""
|
||||||
|
queryset = self.filter_queryset(self.get_queryset())
|
||||||
|
ordering = OrderingFilter().get_ordering(request, queryset, self) or (
|
||||||
|
Lower("name"),
|
||||||
|
)
|
||||||
|
queryset = queryset.order_by(*ordering)
|
||||||
|
|
||||||
|
all_tags = list(queryset)
|
||||||
|
descendant_pks = {pk for tag in all_tags for pk in tag.get_descendants_pks()}
|
||||||
|
|
||||||
|
if descendant_pks:
|
||||||
|
filter_q = self.get_document_count_filter()
|
||||||
|
children_source = (
|
||||||
|
Tag.objects.filter(pk__in=descendant_pks | {t.pk for t in all_tags})
|
||||||
|
.select_related("owner")
|
||||||
|
.annotate(document_count=Count("documents", filter=filter_q))
|
||||||
|
.order_by(*ordering)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
children_source = all_tags
|
||||||
|
|
||||||
|
children_map = {}
|
||||||
|
for tag in children_source:
|
||||||
|
children_map.setdefault(tag.tn_parent_id, []).append(tag)
|
||||||
|
self._children_map = children_map
|
||||||
|
|
||||||
|
page = self.paginate_queryset(queryset)
|
||||||
|
serializer = self.get_serializer(page, many=True)
|
||||||
|
return self.get_paginated_response(serializer.data)
|
||||||
|
|
||||||
def perform_update(self, serializer):
|
def perform_update(self, serializer):
|
||||||
old_parent = self.get_object().get_parent()
|
old_parent = self.get_object().get_parent()
|
||||||
tag = serializer.save()
|
tag = serializer.save()
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ msgid ""
|
|||||||
msgstr ""
|
msgstr ""
|
||||||
"Project-Id-Version: paperless-ngx\n"
|
"Project-Id-Version: paperless-ngx\n"
|
||||||
"Report-Msgid-Bugs-To: \n"
|
"Report-Msgid-Bugs-To: \n"
|
||||||
"POT-Creation-Date: 2026-01-08 21:50+0000\n"
|
"POT-Creation-Date: 2026-01-12 21:04+0000\n"
|
||||||
"PO-Revision-Date: 2022-02-17 04:17\n"
|
"PO-Revision-Date: 2022-02-17 04:17\n"
|
||||||
"Last-Translator: \n"
|
"Last-Translator: \n"
|
||||||
"Language-Team: English\n"
|
"Language-Team: English\n"
|
||||||
@@ -1219,35 +1219,35 @@ msgstr ""
|
|||||||
msgid "workflow runs"
|
msgid "workflow runs"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/serialisers.py:642
|
#: documents/serialisers.py:646
|
||||||
msgid "Invalid color."
|
msgid "Invalid color."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/serialisers.py:1846
|
#: documents/serialisers.py:1850
|
||||||
#, python-format
|
#, python-format
|
||||||
msgid "File type %(type)s not supported"
|
msgid "File type %(type)s not supported"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/serialisers.py:1890
|
#: documents/serialisers.py:1894
|
||||||
#, python-format
|
#, python-format
|
||||||
msgid "Custom field id must be an integer: %(id)s"
|
msgid "Custom field id must be an integer: %(id)s"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/serialisers.py:1897
|
#: documents/serialisers.py:1901
|
||||||
#, python-format
|
#, python-format
|
||||||
msgid "Custom field with id %(id)s does not exist"
|
msgid "Custom field with id %(id)s does not exist"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/serialisers.py:1914 documents/serialisers.py:1924
|
#: documents/serialisers.py:1918 documents/serialisers.py:1928
|
||||||
msgid ""
|
msgid ""
|
||||||
"Custom fields must be a list of integers or an object mapping ids to values."
|
"Custom fields must be a list of integers or an object mapping ids to values."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/serialisers.py:1919
|
#: documents/serialisers.py:1923
|
||||||
msgid "Some custom fields don't exist or were specified twice."
|
msgid "Some custom fields don't exist or were specified twice."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/serialisers.py:2034
|
#: documents/serialisers.py:2038
|
||||||
msgid "Invalid variable detected."
|
msgid "Invalid variable detected."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
|
|||||||
36
uv.lock
generated
36
uv.lock
generated
@@ -104,9 +104,9 @@ dependencies = [
|
|||||||
{ name = "isodate", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "isodate", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
{ name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
]
|
]
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/44/7b/8115cd713e2caa5e44def85f2b7ebd02a74ae74d7113ba20bdd41fd6dd80/azure_ai_documentintelligence-1.0.2.tar.gz", hash = "sha256:4d75a2513f2839365ebabc0e0e1772f5601b3a8c9a71e75da12440da13b63484", size = 170940 }
|
sdist = { url = "https://files.pythonhosted.org/packages/44/7b/8115cd713e2caa5e44def85f2b7ebd02a74ae74d7113ba20bdd41fd6dd80/azure_ai_documentintelligence-1.0.2.tar.gz", hash = "sha256:4d75a2513f2839365ebabc0e0e1772f5601b3a8c9a71e75da12440da13b63484", size = 170940, upload-time = "2025-03-27T02:46:20.606Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/d9/75/c9ec040f23082f54ffb1977ff8f364c2d21c79a640a13d1c1809e7fd6b1a/azure_ai_documentintelligence-1.0.2-py3-none-any.whl", hash = "sha256:e1fb446abbdeccc9759d897898a0fe13141ed29f9ad11fc705f951925822ed59", size = 106005 },
|
{ url = "https://files.pythonhosted.org/packages/d9/75/c9ec040f23082f54ffb1977ff8f364c2d21c79a640a13d1c1809e7fd6b1a/azure_ai_documentintelligence-1.0.2-py3-none-any.whl", hash = "sha256:e1fb446abbdeccc9759d897898a0fe13141ed29f9ad11fc705f951925822ed59", size = 106005, upload-time = "2025-03-27T02:46:22.356Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -118,9 +118,9 @@ dependencies = [
|
|||||||
{ name = "six", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "six", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
{ name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
]
|
]
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/75/aa/7c9db8edd626f1a7d99d09ef7926f6f4fb34d5f9fa00dc394afdfe8e2a80/azure_core-1.33.0.tar.gz", hash = "sha256:f367aa07b5e3005fec2c1e184b882b0b039910733907d001c20fb08ebb8c0eb9", size = 295633 }
|
sdist = { url = "https://files.pythonhosted.org/packages/75/aa/7c9db8edd626f1a7d99d09ef7926f6f4fb34d5f9fa00dc394afdfe8e2a80/azure_core-1.33.0.tar.gz", hash = "sha256:f367aa07b5e3005fec2c1e184b882b0b039910733907d001c20fb08ebb8c0eb9", size = 295633, upload-time = "2025-04-03T23:51:02.058Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/07/b7/76b7e144aa53bd206bf1ce34fa75350472c3f69bf30e5c8c18bc9881035d/azure_core-1.33.0-py3-none-any.whl", hash = "sha256:9b5b6d0223a1d38c37500e6971118c1e0f13f54951e6893968b38910bc9cda8f", size = 207071 },
|
{ url = "https://files.pythonhosted.org/packages/07/b7/76b7e144aa53bd206bf1ce34fa75350472c3f69bf30e5c8c18bc9881035d/azure_core-1.33.0-py3-none-any.whl", hash = "sha256:9b5b6d0223a1d38c37500e6971118c1e0f13f54951e6893968b38910bc9cda8f", size = 207071, upload-time = "2025-04-03T23:51:03.806Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -359,15 +359,15 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "channels"
|
name = "channels"
|
||||||
version = "4.3.1"
|
version = "4.3.2"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "asgiref", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "asgiref", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
{ name = "django", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "django", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
]
|
]
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/12/a0/46450fcf9e56af18a6b0440ba49db6635419bb7bc84142c35f4143b1a66c/channels-4.3.1.tar.gz", hash = "sha256:97413ffd674542db08e16a9ef09cd86ec0113e5f8125fbd33cf0854adcf27cdb", size = 26896, upload-time = "2025-08-01T13:25:19.952Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/74/92/b18d4bb54d14986a8b35215a1c9e6a7f9f4d57ca63ac9aee8290ebb4957d/channels-4.3.2.tar.gz", hash = "sha256:f2bb6bfb73ad7fb4705041d07613c7b4e69528f01ef8cb9fb6c21d9295f15667", size = 27023, upload-time = "2025-11-20T15:13:05.102Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/89/1c/eae1c2a8c195760376e7f65d0bdcc3e966695d29cfbe5c54841ce5c71408/channels-4.3.1-py3-none-any.whl", hash = "sha256:b091d4b26f91d807de3e84aead7ba785314f27eaf5bac31dd51b1c956b883859", size = 31286, upload-time = "2025-08-01T13:25:18.845Z" },
|
{ url = "https://files.pythonhosted.org/packages/16/34/c32915288b7ef482377b6adc401192f98c6a99b3a145423d3b8aed807898/channels-4.3.2-py3-none-any.whl", hash = "sha256:fef47e9055a603900cf16cef85f050d522d9ac4b3daccf24835bd9580705c176", size = 31313, upload-time = "2025-11-20T15:13:02.357Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -867,14 +867,14 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "django-soft-delete"
|
name = "django-soft-delete"
|
||||||
version = "1.0.21"
|
version = "1.0.22"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "django", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "django", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
]
|
]
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/da/bf/13996c18bffee3bbcf294830c1737bfb5564164b8319c51e6714b6bdf783/django_soft_delete-1.0.21.tar.gz", hash = "sha256:542bd4650d2769105a4363ea7bb7fbdb3c28429dbaa66417160f8f4b5dc689d5", size = 21153, upload-time = "2025-09-17T08:46:30.476Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/98/d1/c990b731676f93bd4594dee4b5133df52f5d0eee1eb8a969b4030014ac54/django_soft_delete-1.0.22.tar.gz", hash = "sha256:32d0bb95f180c28a40163e78a558acc18901fd56011f91f8ee735c171a6d4244", size = 21982, upload-time = "2025-10-25T13:11:46.199Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/fa/e6/8f4fed14499c63e35ca33cf9f424ad2e14e963ec5545594d7c7dc2f710f4/django_soft_delete-1.0.21-py3-none-any.whl", hash = "sha256:dd91e671d9d431ff96f4db727ce03e7fbb4008ae4541b1d162d5d06cc9becd2a", size = 18681, upload-time = "2025-09-17T08:46:29.272Z" },
|
{ url = "https://files.pythonhosted.org/packages/f5/c2/fca2bf69b7ca7e18aed9ac059e89f1043663e207a514e8fb652450e49631/django_soft_delete-1.0.22-py3-none-any.whl", hash = "sha256:81973c541d21452d249151085d617ebbfb5ec463899f47cd6b1306677481e94c", size = 19221, upload-time = "2025-10-25T13:11:44.755Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -913,11 +913,11 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "django-treenode"
|
name = "django-treenode"
|
||||||
version = "0.23.2"
|
version = "0.23.3"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/75/f3/274b84607fd64c0844e98659985f964190a46c2460f2523a446c4a946216/django_treenode-0.23.2.tar.gz", hash = "sha256:3c5a6ff5e0c83e34da88749f602b3013dd1ab0527f51952c616e3c21bf265d52", size = 26700, upload-time = "2025-09-04T21:16:53.497Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/25/58/86edbbd1075bb8bc0962c6feb13bc06822405a10fea8352ad73ab2babdd9/django_treenode-0.23.3.tar.gz", hash = "sha256:714c825d5b925a3d2848d0709f29973941ea41a606b8e2b64cbec46010a8cce3", size = 27812, upload-time = "2025-12-01T23:01:24.847Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/00/61/e17d3dee5c6bb24b8faf0c101e17f9a8cafeba6384166176e066c80e8cbb/django_treenode-0.23.2-py3-none-any.whl", hash = "sha256:9363cb50f753654a9acfad6ec4df2a664a5f89dfdf8b55ffd964f27461bef85e", size = 21879, upload-time = "2025-09-04T21:16:51.811Z" },
|
{ url = "https://files.pythonhosted.org/packages/bc/52/696db237167483324ef38d8d090fb0fcc33dbb70ebe66c75868005fb7c75/django_treenode-0.23.3-py3-none-any.whl", hash = "sha256:8072e1ac688c1ed3ab95a98a797c5e965380de5228a389d60a4ef8b9a6449387", size = 22014, upload-time = "2025-12-01T23:01:23.266Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1064,11 +1064,11 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "filelock"
|
name = "filelock"
|
||||||
version = "3.20.0"
|
version = "3.20.3"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/58/46/0028a82567109b5ef6e4d2a1f04a583fb513e6cf9527fcdd09afd817deeb/filelock-3.20.0.tar.gz", hash = "sha256:711e943b4ec6be42e1d4e6690b48dc175c822967466bb31c0c293f34334c13f4", size = 18922, upload-time = "2025-10-08T18:03:50.056Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/1d/65/ce7f1b70157833bf3cb851b556a37d4547ceafc158aa9b34b36782f23696/filelock-3.20.3.tar.gz", hash = "sha256:18c57ee915c7ec61cff0ecf7f0f869936c7c30191bb0cf406f1341778d0834e1", size = 19485, upload-time = "2026-01-09T17:55:05.421Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/76/91/7216b27286936c16f5b4d0c530087e4a54eead683e6b0b73dd0c64844af6/filelock-3.20.0-py3-none-any.whl", hash = "sha256:339b4732ffda5cd79b13f4e2711a31b0365ce445d95d243bb996273d072546a2", size = 16054, upload-time = "2025-10-08T18:03:48.35Z" },
|
{ url = "https://files.pythonhosted.org/packages/b5/36/7fb70f04bf00bc646cd5bb45aa9eddb15e19437a28b8fb2b4a5249fac770/filelock-3.20.3-py3-none-any.whl", hash = "sha256:4b0dda527ee31078689fc205ec4f1c1bf7d56cf88b6dc9426c4f230e46c2dce1", size = 16701, upload-time = "2026-01-09T17:55:04.334Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1483,9 +1483,9 @@ wheels = [
|
|||||||
name = "isodate"
|
name = "isodate"
|
||||||
version = "0.7.2"
|
version = "0.7.2"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/54/4d/e940025e2ce31a8ce1202635910747e5a87cc3a6a6bb2d00973375014749/isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6", size = 29705 }
|
sdist = { url = "https://files.pythonhosted.org/packages/54/4d/e940025e2ce31a8ce1202635910747e5a87cc3a6a6bb2d00973375014749/isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6", size = 29705, upload-time = "2024-10-08T23:04:11.5Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/15/aa/0aca39a37d3c7eb941ba736ede56d689e7be91cab5d9ca846bde3999eba6/isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15", size = 22320 },
|
{ url = "https://files.pythonhosted.org/packages/15/aa/0aca39a37d3c7eb941ba736ede56d689e7be91cab5d9ca846bde3999eba6/isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15", size = 22320, upload-time = "2024-10-08T23:04:09.501Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|||||||
Reference in New Issue
Block a user