Compare commits

..

3 Commits

Author SHA1 Message Date
Trenton H
0df0f3a21f And the rest of it 2026-01-16 15:28:35 -08:00
Trenton H
d81488d054 Latest 5.2 Django 2026-01-16 15:25:53 -08:00
Trenton H
8dc4f34743 Takes care of allauth, granian, psycopg 2026-01-16 15:24:43 -08:00
21 changed files with 2159 additions and 1954 deletions

View File

@@ -37,7 +37,7 @@ repos:
- json - json
# See https://github.com/prettier/prettier/issues/15742 for the fork reason # See https://github.com/prettier/prettier/issues/15742 for the fork reason
- repo: https://github.com/rbubley/mirrors-prettier - repo: https://github.com/rbubley/mirrors-prettier
rev: 'v3.6.2' rev: 'v3.8.0'
hooks: hooks:
- id: prettier - id: prettier
types_or: types_or:
@@ -49,7 +49,7 @@ repos:
- 'prettier-plugin-organize-imports@4.1.0' - 'prettier-plugin-organize-imports@4.1.0'
# Python hooks # Python hooks
- repo: https://github.com/astral-sh/ruff-pre-commit - repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.14.5 rev: v0.14.13
hooks: hooks:
- id: ruff-check - id: ruff-check
- id: ruff-format - id: ruff-format
@@ -76,7 +76,7 @@ repos:
hooks: hooks:
- id: shellcheck - id: shellcheck
- repo: https://github.com/google/yamlfmt - repo: https://github.com/google/yamlfmt
rev: v0.20.0 rev: v0.21.0
hooks: hooks:
- id: yamlfmt - id: yamlfmt
exclude: "^src-ui/pnpm-lock.yaml" exclude: "^src-ui/pnpm-lock.yaml"

View File

@@ -1146,9 +1146,8 @@ via the consumption directory, you can disable the consumer to save resources.
#### [`PAPERLESS_CONSUMER_DELETE_DUPLICATES=<bool>`](#PAPERLESS_CONSUMER_DELETE_DUPLICATES) {#PAPERLESS_CONSUMER_DELETE_DUPLICATES} #### [`PAPERLESS_CONSUMER_DELETE_DUPLICATES=<bool>`](#PAPERLESS_CONSUMER_DELETE_DUPLICATES) {#PAPERLESS_CONSUMER_DELETE_DUPLICATES}
: As of version 3.0 Paperless-ngx allows duplicate documents to be consumed by default, _except_ when : When the consumer detects a duplicate document, it will not touch
this setting is enabled. When enabled, Paperless will check if a document with the same hash already the original document. This default behavior can be changed here.
exists in the system and delete the duplicate file from the consumption directory without consuming it.
Defaults to false. Defaults to false.

View File

@@ -19,15 +19,15 @@ dependencies = [
"azure-ai-documentintelligence>=1.0.2", "azure-ai-documentintelligence>=1.0.2",
"babel>=2.17", "babel>=2.17",
"bleach~=6.3.0", "bleach~=6.3.0",
"celery[redis]~=5.5.1", "celery[redis]~=5.6.2",
"channels~=4.2", "channels~=4.2",
"channels-redis~=4.2", "channels-redis~=4.2",
"concurrent-log-handler~=0.9.25", "concurrent-log-handler~=0.9.25",
"dateparser~=1.2", "dateparser~=1.2",
# WARNING: django does not use semver. # WARNING: django does not use semver.
# Only patch versions are guaranteed to not introduce breaking changes. # Only patch versions are guaranteed to not introduce breaking changes.
"django~=5.2.5", "django==5.2.10",
"django-allauth[mfa,socialaccount]~=65.12.1", "django-allauth[mfa,socialaccount]~=65.13.0",
"django-auditlog~=3.4.1", "django-auditlog~=3.4.1",
"django-cachalot~=2.8.0", "django-cachalot~=2.8.0",
"django-celery-results~=2.6.0", "django-celery-results~=2.6.0",
@@ -80,7 +80,7 @@ dependencies = [
"torch~=2.9.1", "torch~=2.9.1",
"tqdm~=4.67.1", "tqdm~=4.67.1",
"watchdog~=6.0", "watchdog~=6.0",
"whitenoise~=6.9", "whitenoise~=6.11",
"whoosh-reloaded>=2.7.5", "whoosh-reloaded>=2.7.5",
"zxing-cpp~=2.3.0", "zxing-cpp~=2.3.0",
] ]
@@ -89,13 +89,13 @@ optional-dependencies.mariadb = [
"mysqlclient~=2.2.7", "mysqlclient~=2.2.7",
] ]
optional-dependencies.postgres = [ optional-dependencies.postgres = [
"psycopg[c,pool]==3.2.12", "psycopg[c,pool]==3.3",
# Direct dependency for proper resolution of the pre-built wheels # Direct dependency for proper resolution of the pre-built wheels
"psycopg-c==3.2.12", "psycopg-c==3.3",
"psycopg-pool==3.3", "psycopg-pool==3.3",
] ]
optional-dependencies.webserver = [ optional-dependencies.webserver = [
"granian[uvloop]~=2.5.1", "granian[uvloop]~=2.6.0",
] ]
[dependency-groups] [dependency-groups]
@@ -152,7 +152,7 @@ typing = [
] ]
[tool.uv] [tool.uv]
required-version = ">=0.5.14" required-version = ">=0.9.0"
package = false package = false
environments = [ environments = [
"sys_platform == 'darwin'", "sys_platform == 'darwin'",
@@ -162,8 +162,8 @@ environments = [
[tool.uv.sources] [tool.uv.sources]
# Markers are chosen to select these almost exclusively when building the Docker image # Markers are chosen to select these almost exclusively when building the Docker image
psycopg-c = [ psycopg-c = [
{ url = "https://github.com/paperless-ngx/builder/releases/download/psycopg-bookworm-3.2.12/psycopg_c-3.2.12-cp312-cp312-linux_x86_64.whl", marker = "sys_platform == 'linux' and platform_machine == 'x86_64' and python_version == '3.12'" }, { url = "https://github.com/paperless-ngx/builder/releases/download/psycopg-trixie-3.3.0/psycopg_c-3.3.0-cp312-cp312-linux_x86_64.whl", marker = "sys_platform == 'linux' and platform_machine == 'x86_64' and python_version == '3.12'" },
{ url = "https://github.com/paperless-ngx/builder/releases/download/psycopg-bookworm-3.2.12/psycopg_c-3.2.12-cp312-cp312-linux_aarch64.whl", marker = "sys_platform == 'linux' and platform_machine == 'aarch64' and python_version == '3.12'" }, { url = "https://github.com/paperless-ngx/builder/releases/download/psycopg-trixie-3.3.0/psycopg_c-3.3.0-cp312-cp312-linux_aarch64.whl", marker = "sys_platform == 'linux' and platform_machine == 'aarch64' and python_version == '3.12'" },
] ]
zxing-cpp = [ zxing-cpp = [
{ url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_x86_64.whl", marker = "sys_platform == 'linux' and platform_machine == 'x86_64' and python_version == '3.12'" }, { url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_x86_64.whl", marker = "sys_platform == 'linux' and platform_machine == 'x86_64' and python_version == '3.12'" },

View File

@@ -97,12 +97,6 @@
<br/><em>(<ng-container i18n>click for full output</ng-container>)</em> <br/><em>(<ng-container i18n>click for full output</ng-container>)</em>
} }
</ng-template> </ng-template>
@if (task.duplicate_documents?.length > 0) {
<div class="small text-warning-emphasis d-flex align-items-center gap-1">
<i-bs class="lh-1" width="1em" height="1em" name="exclamation-triangle"></i-bs>
<span i18n>Duplicate(s) detected</span>
</div>
}
</td> </td>
} }
<td class="d-lg-none"> <td class="d-lg-none">

View File

@@ -28,7 +28,7 @@
</button> </button>
</ng-template> </ng-template>
<ng-template ng-option-tmp let-item="item" let-index="index" let-search="searchTerm"> <ng-template ng-option-tmp let-item="item" let-index="index" let-search="searchTerm">
<div class="tag-option-row d-flex align-items-center" [class.w-auto]="!getTag(item.id)?.parent"> <div class="tag-option-row d-flex align-items-center">
@if (item.id && tags) { @if (item.id && tags) {
@if (getTag(item.id)?.parent) { @if (getTag(item.id)?.parent) {
<i-bs name="list-nested" class="me-1"></i-bs> <i-bs name="list-nested" class="me-1"></i-bs>

View File

@@ -23,7 +23,7 @@
// Dropdown hierarchy reveal for ng-select options // Dropdown hierarchy reveal for ng-select options
::ng-deep .ng-dropdown-panel .ng-option { ::ng-deep .ng-dropdown-panel .ng-option {
overflow-x: scroll !important; overflow-x: scroll;
.tag-option-row { .tag-option-row {
font-size: 1rem; font-size: 1rem;

View File

@@ -370,37 +370,6 @@
</ng-template> </ng-template>
</li> </li>
} }
@if (document?.duplicate_documents?.length) {
<li [ngbNavItem]="DocumentDetailNavIDs.Duplicates">
<a class="text-nowrap" ngbNavLink i18n>
Duplicates
<span class="badge text-bg-secondary ms-1">{{ document.duplicate_documents.length }}</span>
</a>
<ng-template ngbNavContent>
<div class="d-flex flex-column gap-2">
<div class="fst-italic" i18n>Duplicate documents detected:</div>
<div class="list-group">
@for (duplicate of document.duplicate_documents; track duplicate.id) {
<a
class="list-group-item list-group-item-action d-flex justify-content-between align-items-center"
[routerLink]="['/documents', duplicate.id, 'details']"
[class.disabled]="duplicate.deleted_at"
>
<span class="d-flex align-items-center gap-2">
<span>{{ duplicate.title || ('#' + duplicate.id) }}</span>
@if (duplicate.deleted_at) {
<span class="badge text-bg-secondary" i18n>In trash</span>
}
</span>
<span class="text-secondary">#{{ duplicate.id }}</span>
</a>
}
</div>
</div>
</ng-template>
</li>
}
</ul> </ul>
<div [ngbNavOutlet]="nav" class="mt-3"></div> <div [ngbNavOutlet]="nav" class="mt-3"></div>

View File

@@ -301,16 +301,16 @@ describe('DocumentDetailComponent', () => {
.spyOn(openDocumentsService, 'openDocument') .spyOn(openDocumentsService, 'openDocument')
.mockReturnValueOnce(of(true)) .mockReturnValueOnce(of(true))
fixture.detectChanges() fixture.detectChanges()
expect(component.activeNavID).toEqual(component.DocumentDetailNavIDs.Notes) expect(component.activeNavID).toEqual(5) // DocumentDetailNavIDs.Notes
}) })
it('should change url on tab switch', () => { it('should change url on tab switch', () => {
initNormally() initNormally()
const navigateSpy = jest.spyOn(router, 'navigate') const navigateSpy = jest.spyOn(router, 'navigate')
component.nav.select(component.DocumentDetailNavIDs.Notes) component.nav.select(5)
component.nav.navChange.next({ component.nav.navChange.next({
activeId: 1, activeId: 1,
nextId: component.DocumentDetailNavIDs.Notes, nextId: 5,
preventDefault: () => {}, preventDefault: () => {},
}) })
fixture.detectChanges() fixture.detectChanges()
@@ -352,18 +352,6 @@ describe('DocumentDetailComponent', () => {
expect(component.document).toEqual(doc) expect(component.document).toEqual(doc)
}) })
it('should fall back to details tab when duplicates tab is active but no duplicates', () => {
initNormally()
component.activeNavID = component.DocumentDetailNavIDs.Duplicates
const noDupDoc = { ...doc, duplicate_documents: [] }
component.updateComponent(noDupDoc)
expect(component.activeNavID).toEqual(
component.DocumentDetailNavIDs.Details
)
})
it('should load already-opened document via param', () => { it('should load already-opened document via param', () => {
initNormally() initNormally()
jest.spyOn(documentService, 'get').mockReturnValueOnce(of(doc)) jest.spyOn(documentService, 'get').mockReturnValueOnce(of(doc))
@@ -379,38 +367,6 @@ describe('DocumentDetailComponent', () => {
expect(component.document).toEqual(doc) expect(component.document).toEqual(doc)
}) })
it('should update cached open document duplicates when reloading an open doc', () => {
const openDoc = { ...doc, duplicate_documents: [{ id: 1, title: 'Old' }] }
const updatedDuplicates = [
{ id: 2, title: 'Newer duplicate', deleted_at: null },
]
jest
.spyOn(activatedRoute, 'paramMap', 'get')
.mockReturnValue(of(convertToParamMap({ id: 3, section: 'details' })))
jest.spyOn(documentService, 'get').mockReturnValue(
of({
...doc,
modified: new Date('2024-01-02T00:00:00Z'),
duplicate_documents: updatedDuplicates,
})
)
jest.spyOn(openDocumentsService, 'getOpenDocument').mockReturnValue(openDoc)
const saveSpy = jest.spyOn(openDocumentsService, 'save')
jest.spyOn(openDocumentsService, 'openDocument').mockReturnValue(of(true))
jest.spyOn(customFieldsService, 'listAll').mockReturnValue(
of({
count: customFields.length,
all: customFields.map((f) => f.id),
results: customFields,
})
)
fixture.detectChanges()
expect(openDoc.duplicate_documents).toEqual(updatedDuplicates)
expect(saveSpy).toHaveBeenCalled()
})
it('should disable form if user cannot edit', () => { it('should disable form if user cannot edit', () => {
currentUserHasObjectPermissions = false currentUserHasObjectPermissions = false
initNormally() initNormally()

View File

@@ -8,7 +8,7 @@ import {
FormsModule, FormsModule,
ReactiveFormsModule, ReactiveFormsModule,
} from '@angular/forms' } from '@angular/forms'
import { ActivatedRoute, Router, RouterModule } from '@angular/router' import { ActivatedRoute, Router } from '@angular/router'
import { import {
NgbDateStruct, NgbDateStruct,
NgbDropdownModule, NgbDropdownModule,
@@ -124,7 +124,6 @@ enum DocumentDetailNavIDs {
Notes = 5, Notes = 5,
Permissions = 6, Permissions = 6,
History = 7, History = 7,
Duplicates = 8,
} }
enum ContentRenderType { enum ContentRenderType {
@@ -182,7 +181,6 @@ export enum ZoomSetting {
NgxBootstrapIconsModule, NgxBootstrapIconsModule,
PdfViewerModule, PdfViewerModule,
TextAreaComponent, TextAreaComponent,
RouterModule,
], ],
}) })
export class DocumentDetailComponent export class DocumentDetailComponent
@@ -287,10 +285,10 @@ export class DocumentDetailComponent
if ( if (
element && element &&
element.nativeElement.offsetParent !== null && element.nativeElement.offsetParent !== null &&
this.nav?.activeId == DocumentDetailNavIDs.Preview this.nav?.activeId == 4
) { ) {
// its visible // its visible
setTimeout(() => this.nav?.select(DocumentDetailNavIDs.Details)) setTimeout(() => this.nav?.select(1))
} }
} }
@@ -456,11 +454,6 @@ export class DocumentDetailComponent
const openDocument = this.openDocumentService.getOpenDocument( const openDocument = this.openDocumentService.getOpenDocument(
this.documentId this.documentId
) )
// update duplicate documents if present
if (openDocument && doc?.duplicate_documents) {
openDocument.duplicate_documents = doc.duplicate_documents
this.openDocumentService.save()
}
const useDoc = openDocument || doc const useDoc = openDocument || doc
if (openDocument) { if (openDocument) {
if ( if (
@@ -711,13 +704,6 @@ export class DocumentDetailComponent
} }
this.title = this.documentTitlePipe.transform(doc.title) this.title = this.documentTitlePipe.transform(doc.title)
this.prepareForm(doc) this.prepareForm(doc)
if (
this.activeNavID === DocumentDetailNavIDs.Duplicates &&
!doc?.duplicate_documents?.length
) {
this.activeNavID = DocumentDetailNavIDs.Details
}
} }
get customFieldFormFields(): FormArray { get customFieldFormFields(): FormArray {

View File

@@ -159,8 +159,6 @@ export interface Document extends ObjectWithPermissions {
page_count?: number page_count?: number
duplicate_documents?: Document[]
// Frontend only // Frontend only
__changedFields?: string[] __changedFields?: string[]
} }

View File

@@ -1,4 +1,3 @@
import { Document } from './document'
import { ObjectWithId } from './object-with-id' import { ObjectWithId } from './object-with-id'
export enum PaperlessTaskType { export enum PaperlessTaskType {
@@ -43,7 +42,5 @@ export interface PaperlessTask extends ObjectWithId {
related_document?: number related_document?: number
duplicate_documents?: Document[]
owner?: number owner?: number
} }

View File

@@ -785,45 +785,19 @@ class ConsumerPreflightPlugin(
Q(checksum=checksum) | Q(archive_checksum=checksum), Q(checksum=checksum) | Q(archive_checksum=checksum),
) )
if existing_doc.exists(): if existing_doc.exists():
existing_doc = existing_doc.order_by("-created") msg = ConsumerStatusShortMessage.DOCUMENT_ALREADY_EXISTS
duplicates_in_trash = existing_doc.filter(deleted_at__isnull=False) log_msg = f"Not consuming {self.filename}: It is a duplicate of {existing_doc.get().title} (#{existing_doc.get().pk})."
log_msg = (
f"Consuming duplicate {self.filename}: "
f"{existing_doc.count()} existing document(s) share the same content."
)
if duplicates_in_trash.exists(): if existing_doc.first().deleted_at is not None:
log_msg += " Note: at least one existing document is in the trash." msg = ConsumerStatusShortMessage.DOCUMENT_ALREADY_EXISTS_IN_TRASH
log_msg += " Note: existing document is in the trash."
self.log.warning(log_msg)
if settings.CONSUMER_DELETE_DUPLICATES: if settings.CONSUMER_DELETE_DUPLICATES:
duplicate = existing_doc.first()
duplicate_label = (
duplicate.title
or duplicate.original_filename
or (Path(duplicate.filename).name if duplicate.filename else None)
or str(duplicate.pk)
)
Path(self.input_doc.original_file).unlink() Path(self.input_doc.original_file).unlink()
self._fail(
failure_msg = ( msg,
f"Not consuming {self.filename}: " log_msg,
f"It is a duplicate of {duplicate_label} (#{duplicate.pk})" )
)
status_msg = ConsumerStatusShortMessage.DOCUMENT_ALREADY_EXISTS
if duplicates_in_trash.exists():
status_msg = (
ConsumerStatusShortMessage.DOCUMENT_ALREADY_EXISTS_IN_TRASH
)
failure_msg += " Note: existing document is in the trash."
self._fail(
status_msg,
failure_msg,
)
def pre_check_directories(self): def pre_check_directories(self):
""" """

View File

@@ -6,7 +6,7 @@ from django.db import models
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [
("documents", "1075_workflowaction_order"), ("documents", "1074_workflowrun_deleted_at_workflowrun_restored_at_and_more"),
] ]
operations = [ operations = [

View File

@@ -12,7 +12,7 @@ def populate_action_order(apps, schema_editor):
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [
("documents", "1074_workflowrun_deleted_at_workflowrun_restored_at_and_more"), ("documents", "1075_alter_paperlesstask_task_name"),
] ]
operations = [ operations = [

View File

@@ -1,23 +0,0 @@
# Generated by Django 5.2.7 on 2026-01-14 17:45
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "1076_alter_paperlesstask_task_name"),
]
operations = [
migrations.AlterField(
model_name="document",
name="checksum",
field=models.CharField(
editable=False,
max_length=32,
verbose_name="checksum",
help_text="The checksum of the original document.",
),
),
]

View File

@@ -212,6 +212,7 @@ class Document(SoftDeleteModel, ModelWithOwner):
_("checksum"), _("checksum"),
max_length=32, max_length=32,
editable=False, editable=False,
unique=True,
help_text=_("The checksum of the original document."), help_text=_("The checksum of the original document."),
) )

View File

@@ -148,29 +148,13 @@ def get_document_count_filter_for_user(user):
) )
def get_objects_for_user_owner_aware( def get_objects_for_user_owner_aware(user, perms, Model) -> QuerySet:
user, objects_owned = Model.objects.filter(owner=user)
perms, objects_unowned = Model.objects.filter(owner__isnull=True)
Model,
*,
include_deleted=False,
) -> QuerySet:
"""
Returns objects the user owns, are unowned, or has explicit perms.
When include_deleted is True, soft-deleted items are also included.
"""
manager = (
Model.global_objects
if include_deleted and hasattr(Model, "global_objects")
else Model.objects
)
objects_owned = manager.filter(owner=user)
objects_unowned = manager.filter(owner__isnull=True)
objects_with_perms = get_objects_for_user( objects_with_perms = get_objects_for_user(
user=user, user=user,
perms=perms, perms=perms,
klass=manager.all(), klass=Model,
accept_global_perms=False, accept_global_perms=False,
) )
return objects_owned | objects_unowned | objects_with_perms return objects_owned | objects_unowned | objects_with_perms

View File

@@ -23,7 +23,6 @@ from django.core.validators import MinValueValidator
from django.core.validators import RegexValidator from django.core.validators import RegexValidator
from django.core.validators import integer_validator from django.core.validators import integer_validator
from django.db.models import Count from django.db.models import Count
from django.db.models import Q
from django.db.models.functions import Lower from django.db.models.functions import Lower
from django.utils.crypto import get_random_string from django.utils.crypto import get_random_string
from django.utils.dateparse import parse_datetime from django.utils.dateparse import parse_datetime
@@ -73,7 +72,6 @@ from documents.models import WorkflowTrigger
from documents.parsers import is_mime_type_supported from documents.parsers import is_mime_type_supported
from documents.permissions import get_document_count_filter_for_user from documents.permissions import get_document_count_filter_for_user
from documents.permissions import get_groups_with_only_permission from documents.permissions import get_groups_with_only_permission
from documents.permissions import get_objects_for_user_owner_aware
from documents.permissions import set_permissions_for_object from documents.permissions import set_permissions_for_object
from documents.regex import validate_regex_pattern from documents.regex import validate_regex_pattern
from documents.templating.filepath import validate_filepath_template_and_render from documents.templating.filepath import validate_filepath_template_and_render
@@ -1016,29 +1014,6 @@ class NotesSerializer(serializers.ModelSerializer):
return ret return ret
def _get_viewable_duplicates(document: Document, user: User | None):
checksums = {document.checksum}
if document.archive_checksum:
checksums.add(document.archive_checksum)
duplicates = Document.global_objects.filter(
Q(checksum__in=checksums) | Q(archive_checksum__in=checksums),
).exclude(pk=document.pk)
duplicates = duplicates.order_by("-created")
allowed = get_objects_for_user_owner_aware(
user,
"documents.view_document",
Document,
include_deleted=True,
)
return duplicates.filter(id__in=allowed.values_list("id", flat=True))
class DuplicateDocumentSummarySerializer(serializers.Serializer):
id = serializers.IntegerField()
title = serializers.CharField()
deleted_at = serializers.DateTimeField(allow_null=True)
@extend_schema_serializer( @extend_schema_serializer(
deprecate_fields=["created_date"], deprecate_fields=["created_date"],
) )
@@ -1056,7 +1031,6 @@ class DocumentSerializer(
archived_file_name = SerializerMethodField() archived_file_name = SerializerMethodField()
created_date = serializers.DateField(required=False) created_date = serializers.DateField(required=False)
page_count = SerializerMethodField() page_count = SerializerMethodField()
duplicate_documents = SerializerMethodField()
notes = NotesSerializer(many=True, required=False, read_only=True) notes = NotesSerializer(many=True, required=False, read_only=True)
@@ -1082,16 +1056,6 @@ class DocumentSerializer(
def get_page_count(self, obj) -> int | None: def get_page_count(self, obj) -> int | None:
return obj.page_count return obj.page_count
@extend_schema_field(DuplicateDocumentSummarySerializer(many=True))
def get_duplicate_documents(self, obj):
view = self.context.get("view")
if view and getattr(view, "action", None) != "retrieve":
return []
request = self.context.get("request")
user = request.user if request else None
duplicates = _get_viewable_duplicates(obj, user)
return list(duplicates.values("id", "title", "deleted_at"))
def get_original_file_name(self, obj) -> str | None: def get_original_file_name(self, obj) -> str | None:
return obj.original_filename return obj.original_filename
@@ -1269,7 +1233,6 @@ class DocumentSerializer(
"archive_serial_number", "archive_serial_number",
"original_file_name", "original_file_name",
"archived_file_name", "archived_file_name",
"duplicate_documents",
"owner", "owner",
"permissions", "permissions",
"user_can_change", "user_can_change",
@@ -2131,12 +2094,10 @@ class TasksViewSerializer(OwnedObjectSerializer):
"result", "result",
"acknowledged", "acknowledged",
"related_document", "related_document",
"duplicate_documents",
"owner", "owner",
) )
related_document = serializers.SerializerMethodField() related_document = serializers.SerializerMethodField()
duplicate_documents = serializers.SerializerMethodField()
created_doc_re = re.compile(r"New document id (\d+) created") created_doc_re = re.compile(r"New document id (\d+) created")
duplicate_doc_re = re.compile(r"It is a duplicate of .* \(#(\d+)\)") duplicate_doc_re = re.compile(r"It is a duplicate of .* \(#(\d+)\)")
@@ -2161,17 +2122,6 @@ class TasksViewSerializer(OwnedObjectSerializer):
return result return result
@extend_schema_field(DuplicateDocumentSummarySerializer(many=True))
def get_duplicate_documents(self, obj):
related_document = self.get_related_document(obj)
request = self.context.get("request")
user = request.user if request else None
document = Document.global_objects.filter(pk=related_document).first()
if not related_document or not user or not document:
return []
duplicates = _get_viewable_duplicates(document, user)
return list(duplicates.values("id", "title", "deleted_at"))
class RunTaskViewSerializer(serializers.Serializer): class RunTaskViewSerializer(serializers.Serializer):
task_name = serializers.ChoiceField( task_name = serializers.ChoiceField(

View File

@@ -7,7 +7,6 @@ from django.contrib.auth.models import User
from rest_framework import status from rest_framework import status
from rest_framework.test import APITestCase from rest_framework.test import APITestCase
from documents.models import Document
from documents.models import PaperlessTask from documents.models import PaperlessTask
from documents.tests.utils import DirectoriesMixin from documents.tests.utils import DirectoriesMixin
from documents.views import TasksViewSet from documents.views import TasksViewSet
@@ -259,7 +258,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
task_id=str(uuid.uuid4()), task_id=str(uuid.uuid4()),
task_file_name="task_one.pdf", task_file_name="task_one.pdf",
status=celery.states.FAILURE, status=celery.states.FAILURE,
result="test.pdf: Unexpected error during ingestion.", result="test.pdf: Not consuming test.pdf: It is a duplicate.",
) )
response = self.client.get(self.ENDPOINT) response = self.client.get(self.ENDPOINT)
@@ -271,7 +270,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
self.assertEqual( self.assertEqual(
returned_data["result"], returned_data["result"],
"test.pdf: Unexpected error during ingestion.", "test.pdf: Not consuming test.pdf: It is a duplicate.",
) )
def test_task_name_webui(self): def test_task_name_webui(self):
@@ -326,34 +325,20 @@ class TestTasks(DirectoriesMixin, APITestCase):
self.assertEqual(returned_data["task_file_name"], "anothertest.pdf") self.assertEqual(returned_data["task_file_name"], "anothertest.pdf")
def test_task_result_duplicate_warning_includes_count(self): def test_task_result_failed_duplicate_includes_related_doc(self):
""" """
GIVEN: GIVEN:
- A celery task succeeds, but a duplicate exists - A celery task failed with a duplicate error
WHEN: WHEN:
- API call is made to get tasks - API call is made to get tasks
THEN: THEN:
- The returned data includes duplicate warning metadata - The returned data includes a related document link
""" """
checksum = "duplicate-checksum"
Document.objects.create(
title="Existing",
content="",
mime_type="application/pdf",
checksum=checksum,
)
created_doc = Document.objects.create(
title="Created",
content="",
mime_type="application/pdf",
checksum=checksum,
archive_checksum="another-checksum",
)
PaperlessTask.objects.create( PaperlessTask.objects.create(
task_id=str(uuid.uuid4()), task_id=str(uuid.uuid4()),
task_file_name="task_one.pdf", task_file_name="task_one.pdf",
status=celery.states.SUCCESS, status=celery.states.FAILURE,
result=f"Success. New document id {created_doc.pk} created", result="Not consuming task_one.pdf: It is a duplicate of task_one_existing.pdf (#1234).",
) )
response = self.client.get(self.ENDPOINT) response = self.client.get(self.ENDPOINT)
@@ -363,7 +348,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
returned_data = response.data[0] returned_data = response.data[0]
self.assertEqual(returned_data["related_document"], str(created_doc.pk)) self.assertEqual(returned_data["related_document"], "1234")
def test_run_train_classifier_task(self): def test_run_train_classifier_task(self):
""" """

View File

@@ -485,21 +485,21 @@ class TestConsumer(
with self.get_consumer(self.get_test_file()) as consumer: with self.get_consumer(self.get_test_file()) as consumer:
consumer.run() consumer.run()
with self.get_consumer(self.get_test_file()) as consumer: with self.assertRaisesMessage(ConsumerError, "It is a duplicate"):
consumer.run() with self.get_consumer(self.get_test_file()) as consumer:
consumer.run()
self.assertEqual(Document.objects.count(), 2) self._assert_first_last_send_progress(last_status="FAILED")
self._assert_first_last_send_progress()
def testDuplicates2(self): def testDuplicates2(self):
with self.get_consumer(self.get_test_file()) as consumer: with self.get_consumer(self.get_test_file()) as consumer:
consumer.run() consumer.run()
with self.get_consumer(self.get_test_archive_file()) as consumer: with self.assertRaisesMessage(ConsumerError, "It is a duplicate"):
consumer.run() with self.get_consumer(self.get_test_archive_file()) as consumer:
consumer.run()
self.assertEqual(Document.objects.count(), 2) self._assert_first_last_send_progress(last_status="FAILED")
self._assert_first_last_send_progress()
def testDuplicates3(self): def testDuplicates3(self):
with self.get_consumer(self.get_test_archive_file()) as consumer: with self.get_consumer(self.get_test_archive_file()) as consumer:
@@ -513,10 +513,9 @@ class TestConsumer(
Document.objects.all().delete() Document.objects.all().delete()
with self.get_consumer(self.get_test_file()) as consumer: with self.assertRaisesMessage(ConsumerError, "document is in the trash"):
consumer.run() with self.get_consumer(self.get_test_file()) as consumer:
consumer.run()
self.assertEqual(Document.objects.count(), 1)
def testAsnExists(self): def testAsnExists(self):
with self.get_consumer( with self.get_consumer(
@@ -719,45 +718,12 @@ class TestConsumer(
dst = self.get_test_file() dst = self.get_test_file()
self.assertIsFile(dst) self.assertIsFile(dst)
expected_message = ( with self.assertRaises(ConsumerError):
f"{dst.name}: Not consuming {dst.name}: "
f"It is a duplicate of {document.title} (#{document.pk})"
)
with self.assertRaisesMessage(ConsumerError, expected_message):
with self.get_consumer(dst) as consumer: with self.get_consumer(dst) as consumer:
consumer.run() consumer.run()
self.assertIsNotFile(dst) self.assertIsNotFile(dst)
self.assertEqual(Document.objects.count(), 1) self._assert_first_last_send_progress(last_status="FAILED")
self._assert_first_last_send_progress(last_status=ProgressStatusOptions.FAILED)
@override_settings(CONSUMER_DELETE_DUPLICATES=True)
def test_delete_duplicate_in_trash(self):
dst = self.get_test_file()
with self.get_consumer(dst) as consumer:
consumer.run()
# Move the existing document to trash
document = Document.objects.first()
document.delete()
dst = self.get_test_file()
self.assertIsFile(dst)
expected_message = (
f"{dst.name}: Not consuming {dst.name}: "
f"It is a duplicate of {document.title} (#{document.pk})"
f" Note: existing document is in the trash."
)
with self.assertRaisesMessage(ConsumerError, expected_message):
with self.get_consumer(dst) as consumer:
consumer.run()
self.assertIsNotFile(dst)
self.assertEqual(Document.global_objects.count(), 1)
self.assertEqual(Document.objects.count(), 0)
@override_settings(CONSUMER_DELETE_DUPLICATES=False) @override_settings(CONSUMER_DELETE_DUPLICATES=False)
def test_no_delete_duplicate(self): def test_no_delete_duplicate(self):
@@ -777,12 +743,15 @@ class TestConsumer(
dst = self.get_test_file() dst = self.get_test_file()
self.assertIsFile(dst) self.assertIsFile(dst)
with self.get_consumer(dst) as consumer: with self.assertRaisesRegex(
consumer.run() ConsumerError,
r"sample\.pdf: Not consuming sample\.pdf: It is a duplicate of sample \(#\d+\)",
):
with self.get_consumer(dst) as consumer:
consumer.run()
self.assertIsNotFile(dst) self.assertIsFile(dst)
self.assertEqual(Document.objects.count(), 2) self._assert_first_last_send_progress(last_status="FAILED")
self._assert_first_last_send_progress()
@override_settings(FILENAME_FORMAT="{title}") @override_settings(FILENAME_FORMAT="{title}")
@mock.patch("documents.parsers.document_consumer_declaration.send") @mock.patch("documents.parsers.document_consumer_declaration.send")

3716
uv.lock generated

File diff suppressed because it is too large Load Diff