Compare commits

..

1 Commits

Author SHA1 Message Date
shamoon
b36cfab43e Update handlers.py 2026-01-25 22:46:25 -08:00
44 changed files with 829 additions and 1561 deletions

View File

@@ -8,7 +8,7 @@ Further documentation is provided here for some endpoints and features.
## Authorization ## Authorization
The REST api provides five different forms of authentication. The REST api provides four different forms of authentication.
1. Basic authentication 1. Basic authentication
@@ -52,14 +52,6 @@ The REST api provides five different forms of authentication.
[configuration](configuration.md#PAPERLESS_ENABLE_HTTP_REMOTE_USER_API)), [configuration](configuration.md#PAPERLESS_ENABLE_HTTP_REMOTE_USER_API)),
you can authenticate against the API using Remote User auth. you can authenticate against the API using Remote User auth.
5. Headless OIDC via [`django-allauth`](https://codeberg.org/allauth/django-allauth)
`django-allauth` exposes API endpoints under `api/auth/` which enable tools
like third-party apps to authenticate with social accounts that are
configured. See
[here](advanced_usage.md#openid-connect-and-social-authentication) for more
information on social accounts.
## Searching for documents ## Searching for documents
Full text searching is available on the `/api/documents/` endpoint. Two Full text searching is available on the `/api/documents/` endpoint. Two

View File

@@ -659,7 +659,7 @@ system. See the corresponding
: Sync groups from the third party authentication system (e.g. OIDC) to Paperless-ngx. When enabled, users will be added or removed from groups based on their group membership in the third party authentication system. Groups must already exist in Paperless-ngx and have the same name as in the third party authentication system. Groups are updated upon logging in via the third party authentication system, see the corresponding [django-allauth documentation](https://docs.allauth.org/en/dev/socialaccount/signals.html). : Sync groups from the third party authentication system (e.g. OIDC) to Paperless-ngx. When enabled, users will be added or removed from groups based on their group membership in the third party authentication system. Groups must already exist in Paperless-ngx and have the same name as in the third party authentication system. Groups are updated upon logging in via the third party authentication system, see the corresponding [django-allauth documentation](https://docs.allauth.org/en/dev/socialaccount/signals.html).
: In order to pass groups from the authentication system you will need to update your [PAPERLESS_SOCIALACCOUNT_PROVIDERS](#PAPERLESS_SOCIALACCOUNT_PROVIDERS) setting by adding a top-level "SCOPES" setting which includes "groups", or the custom groups claim configured in [`PAPERLESS_SOCIAL_ACCOUNT_SYNC_GROUPS_CLAIM`](#PAPERLESS_SOCIAL_ACCOUNT_SYNC_GROUPS_CLAIM) e.g.: : In order to pass groups from the authentication system you will need to update your [PAPERLESS_SOCIALACCOUNT_PROVIDERS](#PAPERLESS_SOCIALACCOUNT_PROVIDERS) setting by adding a top-level "SCOPES" setting which includes "groups", e.g.:
```json ```json
{"openid_connect":{"SCOPE": ["openid","profile","email","groups"]... {"openid_connect":{"SCOPE": ["openid","profile","email","groups"]...
@@ -667,12 +667,6 @@ system. See the corresponding
Defaults to False Defaults to False
#### [`PAPERLESS_SOCIAL_ACCOUNT_SYNC_GROUPS_CLAIM=<str>`](#PAPERLESS_SOCIAL_ACCOUNT_SYNC_GROUPS_CLAIM) {#PAPERLESS_SOCIAL_ACCOUNT_SYNC_GROUPS_CLAIM}
: Allows you to define a custom groups claim. See [PAPERLESS_SOCIAL_ACCOUNT_SYNC_GROUPS](#PAPERLESS_SOCIAL_ACCOUNT_SYNC_GROUPS) which is required for this setting to take effect.
Defaults to "groups"
#### [`PAPERLESS_SOCIAL_ACCOUNT_DEFAULT_GROUPS=<comma-separated-list>`](#PAPERLESS_SOCIAL_ACCOUNT_DEFAULT_GROUPS) {#PAPERLESS_SOCIAL_ACCOUNT_DEFAULT_GROUPS} #### [`PAPERLESS_SOCIAL_ACCOUNT_DEFAULT_GROUPS=<comma-separated-list>`](#PAPERLESS_SOCIAL_ACCOUNT_DEFAULT_GROUPS) {#PAPERLESS_SOCIAL_ACCOUNT_DEFAULT_GROUPS}
: A list of group names that users who signup via social accounts will be added to upon signup. Groups listed here must already exist. : A list of group names that users who signup via social accounts will be added to upon signup. Groups listed here must already exist.
@@ -1152,9 +1146,8 @@ via the consumption directory, you can disable the consumer to save resources.
#### [`PAPERLESS_CONSUMER_DELETE_DUPLICATES=<bool>`](#PAPERLESS_CONSUMER_DELETE_DUPLICATES) {#PAPERLESS_CONSUMER_DELETE_DUPLICATES} #### [`PAPERLESS_CONSUMER_DELETE_DUPLICATES=<bool>`](#PAPERLESS_CONSUMER_DELETE_DUPLICATES) {#PAPERLESS_CONSUMER_DELETE_DUPLICATES}
: As of version 3.0 Paperless-ngx allows duplicate documents to be consumed by default, _except_ when : When the consumer detects a duplicate document, it will not touch
this setting is enabled. When enabled, Paperless will check if a document with the same hash already the original document. This default behavior can be changed here.
exists in the system and delete the duplicate file from the consumption directory without consuming it.
Defaults to false. Defaults to false.

View File

@@ -114,16 +114,15 @@ testing = [
"daphne", "daphne",
"factory-boy~=3.3.1", "factory-boy~=3.3.1",
"imagehash", "imagehash",
"pytest~=9.0.0", "pytest~=8.4.1",
"pytest-cov~=7.0.0", "pytest-cov~=7.0.0",
"pytest-django~=4.11.1", "pytest-django~=4.11.1",
"pytest-env~=1.2.0", "pytest-env",
"pytest-httpx", "pytest-httpx",
"pytest-mock~=3.15.1", "pytest-mock",
#"pytest-randomly~=4.0.1", "pytest-rerunfailures",
"pytest-rerunfailures~=16.1",
"pytest-sugar", "pytest-sugar",
"pytest-xdist~=3.8.0", "pytest-xdist",
] ]
lint = [ lint = [
@@ -261,15 +260,11 @@ write-changes = true
ignore-words-list = "criterias,afterall,valeu,ureue,equest,ure,assertIn,Oktober,commitish" ignore-words-list = "criterias,afterall,valeu,ureue,equest,ure,assertIn,Oktober,commitish"
skip = "src-ui/src/locale/*,src-ui/pnpm-lock.yaml,src-ui/e2e/*,src/paperless_mail/tests/samples/*,src/documents/tests/samples/*,*.po,*.json" skip = "src-ui/src/locale/*,src-ui/pnpm-lock.yaml,src-ui/e2e/*,src/paperless_mail/tests/samples/*,src/documents/tests/samples/*,*.po,*.json"
[tool.pytest] [tool.pytest.ini_options]
minversion = "9.0" minversion = "8.0"
pythonpath = [ "src" ] pythonpath = [
"src",
strict_config = true ]
strict_markers = true
strict_parametrization_ids = true
strict_xfail = true
testpaths = [ testpaths = [
"src/documents/tests/", "src/documents/tests/",
"src/paperless/tests/", "src/paperless/tests/",
@@ -280,7 +275,6 @@ testpaths = [
"src/paperless_remote/tests/", "src/paperless_remote/tests/",
"src/paperless_ai/tests", "src/paperless_ai/tests",
] ]
addopts = [ addopts = [
"--pythonwarnings=all", "--pythonwarnings=all",
"--cov", "--cov",
@@ -288,14 +282,11 @@ addopts = [
"--cov-report=xml", "--cov-report=xml",
"--numprocesses=auto", "--numprocesses=auto",
"--maxprocesses=16", "--maxprocesses=16",
"--dist=loadscope", "--quiet",
"--durations=50", "--durations=50",
"--durations-min=0.5",
"--junitxml=junit.xml", "--junitxml=junit.xml",
"-o", "-o junit_family=legacy",
"junit_family=legacy",
] ]
norecursedirs = [ "src/locale/", ".venv/", "src-ui/" ] norecursedirs = [ "src/locale/", ".venv/", "src-ui/" ]
DJANGO_SETTINGS_MODULE = "paperless.settings" DJANGO_SETTINGS_MODULE = "paperless.settings"

File diff suppressed because it is too large Load Diff

View File

@@ -97,12 +97,6 @@
<br/><em>(<ng-container i18n>click for full output</ng-container>)</em> <br/><em>(<ng-container i18n>click for full output</ng-container>)</em>
} }
</ng-template> </ng-template>
@if (task.duplicate_documents?.length > 0) {
<div class="small text-warning-emphasis d-flex align-items-center gap-1">
<i-bs class="lh-1" width="1em" height="1em" name="exclamation-triangle"></i-bs>
<span i18n>Duplicate(s) detected</span>
</div>
}
</td> </td>
} }
<td class="d-lg-none"> <td class="d-lg-none">

View File

@@ -1,18 +1,9 @@
<div class="row pt-3 pb-3 pb-md-2 align-items-center"> <div class="row pt-3 pb-3 pb-md-2 align-items-center">
<div class="col-md text-truncate"> <div class="col-md text-truncate">
<h3 class="d-flex align-items-center mb-1" style="line-height: 1.4"> <h3 class="text-truncate" style="line-height: 1.4">
<span class="text-truncate">{{title}}</span> {{title}}
@if (id) {
<span class="badge bg-primary text-primary-text-contrast ms-3 small fs-normal cursor-pointer" (click)="copyID()">
@if (copied) {
<i-bs width="1em" height="1em" name="clipboard-check"></i-bs>&nbsp;<ng-container i18n>Copied!</ng-container>
} @else {
ID: {{id}}
}
</span>
}
@if (subTitle) { @if (subTitle) {
<span class="h6 mb-0 mt-1 d-block d-md-inline fw-normal ms-md-3 text-truncate" style="line-height: 1.4">{{subTitle}}</span> <span class="h6 mb-0 d-block d-md-inline fw-normal ms-md-3 text-truncate" style="line-height: 1.4">{{subTitle}}</span>
} }
@if (info) { @if (info) {
<button class="btn btn-sm btn-link text-muted me-auto p-0 p-md-2" title="What's this?" i18n-title type="button" [ngbPopover]="infoPopover" [autoClose]="true"> <button class="btn btn-sm btn-link text-muted me-auto p-0 p-md-2" title="What's this?" i18n-title type="button" [ngbPopover]="infoPopover" [autoClose]="true">

View File

@@ -1,10 +1,5 @@
h3 { h3 {
min-height: calc(1.325rem + 0.9vw); min-height: calc(1.325rem + 0.9vw);
.badge {
font-size: 0.65rem;
line-height: 1;
}
} }
@media (min-width: 1200px) { @media (min-width: 1200px) {

View File

@@ -1,4 +1,3 @@
import { Clipboard } from '@angular/cdk/clipboard'
import { ComponentFixture, TestBed } from '@angular/core/testing' import { ComponentFixture, TestBed } from '@angular/core/testing'
import { Title } from '@angular/platform-browser' import { Title } from '@angular/platform-browser'
import { environment } from 'src/environments/environment' import { environment } from 'src/environments/environment'
@@ -8,7 +7,6 @@ describe('PageHeaderComponent', () => {
let component: PageHeaderComponent let component: PageHeaderComponent
let fixture: ComponentFixture<PageHeaderComponent> let fixture: ComponentFixture<PageHeaderComponent>
let titleService: Title let titleService: Title
let clipboard: Clipboard
beforeEach(async () => { beforeEach(async () => {
TestBed.configureTestingModule({ TestBed.configureTestingModule({
@@ -17,7 +15,6 @@ describe('PageHeaderComponent', () => {
}).compileComponents() }).compileComponents()
titleService = TestBed.inject(Title) titleService = TestBed.inject(Title)
clipboard = TestBed.inject(Clipboard)
fixture = TestBed.createComponent(PageHeaderComponent) fixture = TestBed.createComponent(PageHeaderComponent)
component = fixture.componentInstance component = fixture.componentInstance
fixture.detectChanges() fixture.detectChanges()
@@ -27,8 +24,7 @@ describe('PageHeaderComponent', () => {
component.title = 'Foo' component.title = 'Foo'
component.subTitle = 'Bar' component.subTitle = 'Bar'
fixture.detectChanges() fixture.detectChanges()
expect(fixture.nativeElement.textContent).toContain('Foo') expect(fixture.nativeElement.textContent).toContain('Foo Bar')
expect(fixture.nativeElement.textContent).toContain('Bar')
}) })
it('should set html title', () => { it('should set html title', () => {
@@ -36,16 +32,4 @@ describe('PageHeaderComponent', () => {
component.title = 'Foo Bar' component.title = 'Foo Bar'
expect(titleSpy).toHaveBeenCalledWith(`Foo Bar - ${environment.appTitle}`) expect(titleSpy).toHaveBeenCalledWith(`Foo Bar - ${environment.appTitle}`)
}) })
it('should copy id to clipboard, reset after 3 seconds', () => {
jest.useFakeTimers()
component.id = 42 as any
jest.spyOn(clipboard, 'copy').mockReturnValue(true)
component.copyID()
expect(clipboard.copy).toHaveBeenCalledWith('42')
expect(component.copied).toBe(true)
jest.advanceTimersByTime(3000)
expect(component.copied).toBe(false)
})
}) })

View File

@@ -1,4 +1,3 @@
import { Clipboard } from '@angular/cdk/clipboard'
import { Component, Input, inject } from '@angular/core' import { Component, Input, inject } from '@angular/core'
import { Title } from '@angular/platform-browser' import { Title } from '@angular/platform-browser'
import { NgbPopoverModule } from '@ng-bootstrap/ng-bootstrap' import { NgbPopoverModule } from '@ng-bootstrap/ng-bootstrap'
@@ -14,11 +13,8 @@ import { environment } from 'src/environments/environment'
}) })
export class PageHeaderComponent { export class PageHeaderComponent {
private titleService = inject(Title) private titleService = inject(Title)
private clipboard = inject(Clipboard)
private _title = '' _title = ''
public copied: boolean = false
private copyTimeout: any
@Input() @Input()
set title(title: string) { set title(title: string) {
@@ -30,9 +26,6 @@ export class PageHeaderComponent {
return this._title return this._title
} }
@Input()
id: number
@Input() @Input()
subTitle: string = '' subTitle: string = ''
@@ -41,12 +34,4 @@ export class PageHeaderComponent {
@Input() @Input()
infoLink: string infoLink: string
public copyID() {
this.copied = this.clipboard.copy(this.id.toString())
clearTimeout(this.copyTimeout)
this.copyTimeout = setTimeout(() => {
this.copied = false
}, 3000)
}
} }

View File

@@ -1,4 +1,4 @@
<pngx-page-header [(title)]="title" [id]="documentId"> <pngx-page-header [(title)]="title">
@if (archiveContentRenderType === ContentRenderType.PDF && !useNativePdfViewer) { @if (archiveContentRenderType === ContentRenderType.PDF && !useNativePdfViewer) {
@if (previewNumPages) { @if (previewNumPages) {
<div class="input-group input-group-sm d-none d-md-flex"> <div class="input-group input-group-sm d-none d-md-flex">
@@ -370,37 +370,6 @@
</ng-template> </ng-template>
</li> </li>
} }
@if (document?.duplicate_documents?.length) {
<li [ngbNavItem]="DocumentDetailNavIDs.Duplicates">
<a class="text-nowrap" ngbNavLink i18n>
Duplicates
<span class="badge text-bg-secondary ms-1">{{ document.duplicate_documents.length }}</span>
</a>
<ng-template ngbNavContent>
<div class="d-flex flex-column gap-2">
<div class="fst-italic" i18n>Duplicate documents detected:</div>
<div class="list-group">
@for (duplicate of document.duplicate_documents; track duplicate.id) {
<a
class="list-group-item list-group-item-action d-flex justify-content-between align-items-center"
[routerLink]="['/documents', duplicate.id, 'details']"
[class.disabled]="duplicate.deleted_at"
>
<span class="d-flex align-items-center gap-2">
<span>{{ duplicate.title || ('#' + duplicate.id) }}</span>
@if (duplicate.deleted_at) {
<span class="badge text-bg-secondary" i18n>In trash</span>
}
</span>
<span class="text-secondary">#{{ duplicate.id }}</span>
</a>
}
</div>
</div>
</ng-template>
</li>
}
</ul> </ul>
<div [ngbNavOutlet]="nav" class="mt-3"></div> <div [ngbNavOutlet]="nav" class="mt-3"></div>

View File

@@ -301,16 +301,16 @@ describe('DocumentDetailComponent', () => {
.spyOn(openDocumentsService, 'openDocument') .spyOn(openDocumentsService, 'openDocument')
.mockReturnValueOnce(of(true)) .mockReturnValueOnce(of(true))
fixture.detectChanges() fixture.detectChanges()
expect(component.activeNavID).toEqual(component.DocumentDetailNavIDs.Notes) expect(component.activeNavID).toEqual(5) // DocumentDetailNavIDs.Notes
}) })
it('should change url on tab switch', () => { it('should change url on tab switch', () => {
initNormally() initNormally()
const navigateSpy = jest.spyOn(router, 'navigate') const navigateSpy = jest.spyOn(router, 'navigate')
component.nav.select(component.DocumentDetailNavIDs.Notes) component.nav.select(5)
component.nav.navChange.next({ component.nav.navChange.next({
activeId: 1, activeId: 1,
nextId: component.DocumentDetailNavIDs.Notes, nextId: 5,
preventDefault: () => {}, preventDefault: () => {},
}) })
fixture.detectChanges() fixture.detectChanges()
@@ -352,18 +352,6 @@ describe('DocumentDetailComponent', () => {
expect(component.document).toEqual(doc) expect(component.document).toEqual(doc)
}) })
it('should fall back to details tab when duplicates tab is active but no duplicates', () => {
initNormally()
component.activeNavID = component.DocumentDetailNavIDs.Duplicates
const noDupDoc = { ...doc, duplicate_documents: [] }
component.updateComponent(noDupDoc)
expect(component.activeNavID).toEqual(
component.DocumentDetailNavIDs.Details
)
})
it('should load already-opened document via param', () => { it('should load already-opened document via param', () => {
initNormally() initNormally()
jest.spyOn(documentService, 'get').mockReturnValueOnce(of(doc)) jest.spyOn(documentService, 'get').mockReturnValueOnce(of(doc))
@@ -379,38 +367,6 @@ describe('DocumentDetailComponent', () => {
expect(component.document).toEqual(doc) expect(component.document).toEqual(doc)
}) })
it('should update cached open document duplicates when reloading an open doc', () => {
const openDoc = { ...doc, duplicate_documents: [{ id: 1, title: 'Old' }] }
const updatedDuplicates = [
{ id: 2, title: 'Newer duplicate', deleted_at: null },
]
jest
.spyOn(activatedRoute, 'paramMap', 'get')
.mockReturnValue(of(convertToParamMap({ id: 3, section: 'details' })))
jest.spyOn(documentService, 'get').mockReturnValue(
of({
...doc,
modified: new Date('2024-01-02T00:00:00Z'),
duplicate_documents: updatedDuplicates,
})
)
jest.spyOn(openDocumentsService, 'getOpenDocument').mockReturnValue(openDoc)
const saveSpy = jest.spyOn(openDocumentsService, 'save')
jest.spyOn(openDocumentsService, 'openDocument').mockReturnValue(of(true))
jest.spyOn(customFieldsService, 'listAll').mockReturnValue(
of({
count: customFields.length,
all: customFields.map((f) => f.id),
results: customFields,
})
)
fixture.detectChanges()
expect(openDoc.duplicate_documents).toEqual(updatedDuplicates)
expect(saveSpy).toHaveBeenCalled()
})
it('should disable form if user cannot edit', () => { it('should disable form if user cannot edit', () => {
currentUserHasObjectPermissions = false currentUserHasObjectPermissions = false
initNormally() initNormally()

View File

@@ -8,7 +8,7 @@ import {
FormsModule, FormsModule,
ReactiveFormsModule, ReactiveFormsModule,
} from '@angular/forms' } from '@angular/forms'
import { ActivatedRoute, Router, RouterModule } from '@angular/router' import { ActivatedRoute, Router } from '@angular/router'
import { import {
NgbDateStruct, NgbDateStruct,
NgbDropdownModule, NgbDropdownModule,
@@ -124,7 +124,6 @@ enum DocumentDetailNavIDs {
Notes = 5, Notes = 5,
Permissions = 6, Permissions = 6,
History = 7, History = 7,
Duplicates = 8,
} }
enum ContentRenderType { enum ContentRenderType {
@@ -182,7 +181,6 @@ export enum ZoomSetting {
NgxBootstrapIconsModule, NgxBootstrapIconsModule,
PdfViewerModule, PdfViewerModule,
TextAreaComponent, TextAreaComponent,
RouterModule,
], ],
}) })
export class DocumentDetailComponent export class DocumentDetailComponent
@@ -456,11 +454,6 @@ export class DocumentDetailComponent
const openDocument = this.openDocumentService.getOpenDocument( const openDocument = this.openDocumentService.getOpenDocument(
this.documentId this.documentId
) )
// update duplicate documents if present
if (openDocument && doc?.duplicate_documents) {
openDocument.duplicate_documents = doc.duplicate_documents
this.openDocumentService.save()
}
const useDoc = openDocument || doc const useDoc = openDocument || doc
if (openDocument) { if (openDocument) {
if ( if (
@@ -711,13 +704,6 @@ export class DocumentDetailComponent
} }
this.title = this.documentTitlePipe.transform(doc.title) this.title = this.documentTitlePipe.transform(doc.title)
this.prepareForm(doc) this.prepareForm(doc)
if (
this.activeNavID === DocumentDetailNavIDs.Duplicates &&
!doc?.duplicate_documents?.length
) {
this.activeNavID = DocumentDetailNavIDs.Details
}
} }
get customFieldFormFields(): FormArray { get customFieldFormFields(): FormArray {

View File

@@ -14,7 +14,6 @@ import { SortableDirective } from 'src/app/directives/sortable.directive'
import { CustomDatePipe } from 'src/app/pipes/custom-date.pipe' import { CustomDatePipe } from 'src/app/pipes/custom-date.pipe'
import { PermissionType } from 'src/app/services/permissions.service' import { PermissionType } from 'src/app/services/permissions.service'
import { CorrespondentService } from 'src/app/services/rest/correspondent.service' import { CorrespondentService } from 'src/app/services/rest/correspondent.service'
import { ClearableBadgeComponent } from '../../common/clearable-badge/clearable-badge.component'
import { CorrespondentEditDialogComponent } from '../../common/edit-dialog/correspondent-edit-dialog/correspondent-edit-dialog.component' import { CorrespondentEditDialogComponent } from '../../common/edit-dialog/correspondent-edit-dialog/correspondent-edit-dialog.component'
import { PageHeaderComponent } from '../../common/page-header/page-header.component' import { PageHeaderComponent } from '../../common/page-header/page-header.component'
import { ManagementListComponent } from '../management-list/management-list.component' import { ManagementListComponent } from '../management-list/management-list.component'
@@ -37,7 +36,6 @@ import { ManagementListComponent } from '../management-list/management-list.comp
NgbDropdownModule, NgbDropdownModule,
NgbPaginationModule, NgbPaginationModule,
NgxBootstrapIconsModule, NgxBootstrapIconsModule,
ClearableBadgeComponent,
], ],
}) })
export class CorrespondentListComponent extends ManagementListComponent<Correspondent> { export class CorrespondentListComponent extends ManagementListComponent<Correspondent> {

View File

@@ -13,7 +13,6 @@ import { IfPermissionsDirective } from 'src/app/directives/if-permissions.direct
import { SortableDirective } from 'src/app/directives/sortable.directive' import { SortableDirective } from 'src/app/directives/sortable.directive'
import { PermissionType } from 'src/app/services/permissions.service' import { PermissionType } from 'src/app/services/permissions.service'
import { DocumentTypeService } from 'src/app/services/rest/document-type.service' import { DocumentTypeService } from 'src/app/services/rest/document-type.service'
import { ClearableBadgeComponent } from '../../common/clearable-badge/clearable-badge.component'
import { DocumentTypeEditDialogComponent } from '../../common/edit-dialog/document-type-edit-dialog/document-type-edit-dialog.component' import { DocumentTypeEditDialogComponent } from '../../common/edit-dialog/document-type-edit-dialog/document-type-edit-dialog.component'
import { PageHeaderComponent } from '../../common/page-header/page-header.component' import { PageHeaderComponent } from '../../common/page-header/page-header.component'
import { ManagementListComponent } from '../management-list/management-list.component' import { ManagementListComponent } from '../management-list/management-list.component'
@@ -35,7 +34,6 @@ import { ManagementListComponent } from '../management-list/management-list.comp
NgbDropdownModule, NgbDropdownModule,
NgbPaginationModule, NgbPaginationModule,
NgxBootstrapIconsModule, NgxBootstrapIconsModule,
ClearableBadgeComponent,
], ],
}) })
export class DocumentTypeListComponent extends ManagementListComponent<DocumentType> { export class DocumentTypeListComponent extends ManagementListComponent<DocumentType> {

View File

@@ -1,48 +1,17 @@
<pngx-page-header title="{{ typeNamePlural | titlecase }}" info="View, add, edit and delete {{ typeNamePlural }}." infoLink="usage/#terms-and-definitions"> <pngx-page-header title="{{ typeNamePlural | titlecase }}" info="View, add, edit and delete {{ typeNamePlural }}." infoLink="usage/#terms-and-definitions">
<div ngbDropdown class="btn-group flex-fill d-sm-none"> <button class="btn btn-sm btn-outline-secondary" (click)="clearSelection()" [hidden]="selectedObjects.size === 0">
<button class="btn btn-sm btn-outline-primary" id="dropdownSelectMobile" ngbDropdownToggle> <i-bs name="x"></i-bs>&nbsp;<ng-container i18n>Clear selection</ng-container>
<i-bs name="text-indent-left"></i-bs> </button>
<div class="d-none d-sm-inline">&nbsp;<ng-container i18n>Select</ng-container></div> <button type="button" class="btn btn-sm btn-outline-primary" (click)="setPermissions()" [disabled]="!userCanBulkEdit(PermissionAction.Change) || selectedObjects.size === 0">
@if (selectedObjects.size > 0) { <i-bs name="person-fill-lock"></i-bs>&nbsp;<ng-container i18n>Permissions</ng-container>
<pngx-clearable-badge [selected]="selectedObjects.size > 0" [number]="selectedObjects.size" (cleared)="selectNone()"></pngx-clearable-badge><span class="visually-hidden">selected</span> </button>
} <button type="button" class="btn btn-sm btn-outline-danger" (click)="delete()" [disabled]="!userCanBulkEdit(PermissionAction.Delete) || selectedObjects.size === 0">
<i-bs name="trash"></i-bs>&nbsp;<ng-container i18n>Delete</ng-container>
</button>
<button type="button" class="btn btn-sm btn-outline-primary ms-md-5" (click)="openCreateDialog()" *pngxIfPermissions="{ action: PermissionAction.Add, type: permissionType }">
<i-bs name="plus-circle"></i-bs>&nbsp;<ng-container i18n>Create</ng-container>
</button> </button>
<div ngbDropdownMenu aria-labelledby="dropdownSelectMobile" class="shadow">
<button ngbDropdownItem (click)="selectNone()" i18n>Select none</button>
<button ngbDropdownItem (click)="selectPage(true)" i18n>Select page</button>
<button ngbDropdownItem (click)="selectAll()" i18n>Select all</button>
</div>
</div>
<div class="d-none d-sm-flex flex-fill me-3">
<div class="input-group input-group-sm">
<span class="input-group-text border-0" i18n>Select:</span>
</div>
<div class="btn-group btn-group-sm flex-nowrap">
@if (selectedObjects.size > 0) {
<button class="btn btn-sm btn-outline-secondary" (click)="selectNone()">
<i-bs name="slash-circle"></i-bs>&nbsp;<ng-container i18n>None</ng-container>
</button>
}
<button class="btn btn-sm btn-outline-primary" (click)="selectPage(true)">
<i-bs name="file-earmark-check"></i-bs>&nbsp;<ng-container i18n>Page</ng-container>
</button>
<button class="btn btn-sm btn-outline-primary" (click)="selectAll()">
<i-bs name="check-all"></i-bs>&nbsp;<ng-container i18n>All</ng-container>
</button>
</div>
</div>
<button type="button" class="btn btn-sm btn-outline-primary" (click)="setPermissions()" [disabled]="!userCanBulkEdit(PermissionAction.Change) || selectedObjects.size === 0">
<i-bs name="person-fill-lock"></i-bs>&nbsp;<ng-container i18n>Permissions</ng-container>
</button>
<button type="button" class="btn btn-sm btn-outline-danger" (click)="delete()" [disabled]="!userCanBulkEdit(PermissionAction.Delete) || selectedObjects.size === 0">
<i-bs name="trash"></i-bs>&nbsp;<ng-container i18n>Delete</ng-container>
</button>
<button type="button" class="btn btn-sm btn-outline-primary ms-md-5" (click)="openCreateDialog()" *pngxIfPermissions="{ action: PermissionAction.Add, type: permissionType }">
<i-bs name="plus-circle"></i-bs>&nbsp;<ng-container i18n>Create</ng-container>
</button>
</pngx-page-header> </pngx-page-header>
<div class="row mb-3"> <div class="row mb-3">
@@ -62,7 +31,7 @@
<tr> <tr>
<th scope="col"> <th scope="col">
<div class="form-check m-0 ms-2 me-n2"> <div class="form-check m-0 ms-2 me-n2">
<input type="checkbox" class="form-check-input" id="all-objects" [(ngModel)]="togggleAll" [disabled]="data.length === 0" (change)="selectPage($event.target.checked); $event.stopPropagation();"> <input type="checkbox" class="form-check-input" id="all-objects" [(ngModel)]="togggleAll" [disabled]="data.length === 0" (click)="toggleAll($event); $event.stopPropagation();">
<label class="form-check-label" for="all-objects"></label> <label class="form-check-label" for="all-objects"></label>
</div> </div>
</th> </th>

View File

@@ -163,7 +163,8 @@ describe('ManagementListComponent', () => {
const toastInfoSpy = jest.spyOn(toastService, 'showInfo') const toastInfoSpy = jest.spyOn(toastService, 'showInfo')
const reloadSpy = jest.spyOn(component, 'reloadData') const reloadSpy = jest.spyOn(component, 'reloadData')
component.openCreateDialog() const createButton = fixture.debugElement.queryAll(By.css('button'))[4]
createButton.triggerEventHandler('click')
expect(modal).not.toBeUndefined() expect(modal).not.toBeUndefined()
const editDialog = modal.componentInstance as EditDialogComponent<Tag> const editDialog = modal.componentInstance as EditDialogComponent<Tag>
@@ -186,7 +187,8 @@ describe('ManagementListComponent', () => {
const toastInfoSpy = jest.spyOn(toastService, 'showInfo') const toastInfoSpy = jest.spyOn(toastService, 'showInfo')
const reloadSpy = jest.spyOn(component, 'reloadData') const reloadSpy = jest.spyOn(component, 'reloadData')
component.openEditDialog(tags[0]) const editButton = fixture.debugElement.queryAll(By.css('button'))[7]
editButton.triggerEventHandler('click')
expect(modal).not.toBeUndefined() expect(modal).not.toBeUndefined()
const editDialog = modal.componentInstance as EditDialogComponent<Tag> const editDialog = modal.componentInstance as EditDialogComponent<Tag>
@@ -210,7 +212,8 @@ describe('ManagementListComponent', () => {
const deleteSpy = jest.spyOn(tagService, 'delete') const deleteSpy = jest.spyOn(tagService, 'delete')
const reloadSpy = jest.spyOn(component, 'reloadData') const reloadSpy = jest.spyOn(component, 'reloadData')
component.openDeleteDialog(tags[0]) const deleteButton = fixture.debugElement.queryAll(By.css('button'))[8]
deleteButton.triggerEventHandler('click')
expect(modal).not.toBeUndefined() expect(modal).not.toBeUndefined()
const editDialog = modal.componentInstance as ConfirmDialogComponent const editDialog = modal.componentInstance as ConfirmDialogComponent
@@ -227,21 +230,6 @@ describe('ManagementListComponent', () => {
expect(reloadSpy).toHaveBeenCalled() expect(reloadSpy).toHaveBeenCalled()
}) })
it('should use the all list length for collection size when provided', fakeAsync(() => {
jest.spyOn(tagService, 'listFiltered').mockReturnValueOnce(
of({
count: 1,
all: [1, 2, 3],
results: tags.slice(0, 1),
})
)
component.reloadData()
tick(100)
expect(component.collectionSize).toBe(3)
}))
it('should support quick filter for objects', () => { it('should support quick filter for objects', () => {
const expectedUrl = documentListViewService.getQuickFilterUrl([ const expectedUrl = documentListViewService.getQuickFilterUrl([
{ rule_type: FILTER_HAS_TAGS_ALL, value: tags[0].id.toString() }, { rule_type: FILTER_HAS_TAGS_ALL, value: tags[0].id.toString() },
@@ -276,84 +264,19 @@ describe('ManagementListComponent', () => {
expect(component.page).toEqual(1) expect(component.page).toEqual(1)
}) })
it('should support toggle select page in vew', () => { it('should support toggle all items in view', () => {
expect(component.selectedObjects.size).toEqual(0) expect(component.selectedObjects.size).toEqual(0)
const selectPageSpy = jest.spyOn(component, 'selectPage') const toggleAllSpy = jest.spyOn(component, 'toggleAll')
const checkButton = fixture.debugElement.queryAll( const checkButton = fixture.debugElement.queryAll(
By.css('input.form-check-input') By.css('input.form-check-input')
)[0] )[0]
checkButton.nativeElement.dispatchEvent(new Event('change')) checkButton.nativeElement.dispatchEvent(new Event('click'))
checkButton.nativeElement.checked = true checkButton.nativeElement.checked = true
checkButton.nativeElement.dispatchEvent(new Event('change')) checkButton.nativeElement.dispatchEvent(new Event('click'))
expect(selectPageSpy).toHaveBeenCalled() expect(toggleAllSpy).toHaveBeenCalled()
expect(component.selectedObjects.size).toEqual(tags.length) expect(component.selectedObjects.size).toEqual(tags.length)
}) })
it('selectNone should clear selection and reset toggle flag', () => {
component.selectedObjects = new Set([tags[0].id, tags[1].id])
component.togggleAll = true
component.selectNone()
expect(component.selectedObjects.size).toBe(0)
expect(component.togggleAll).toBe(false)
})
it('selectPage should select current page items or clear selection', () => {
component.selectPage(true)
expect(component.selectedObjects).toEqual(new Set(tags.map((t) => t.id)))
expect(component.togggleAll).toBe(true)
component.togggleAll = true
component.selectPage(false)
expect(component.selectedObjects.size).toBe(0)
expect(component.togggleAll).toBe(false)
})
it('selectAll should use all IDs when collection size exists', () => {
;(component as any).allIDs = [1, 2, 3, 4]
component.collectionSize = 4
component.selectAll()
expect(component.selectedObjects).toEqual(new Set([1, 2, 3, 4]))
expect(component.togggleAll).toBe(true)
})
it('selectAll should clear selection when collection size is zero', () => {
component.selectedObjects = new Set([1])
component.collectionSize = 0
component.togggleAll = true
component.selectAll()
expect(component.selectedObjects.size).toBe(0)
expect(component.togggleAll).toBe(false)
})
it('toggleSelected should toggle object selection and update toggle state', () => {
component.toggleSelected(tags[0])
expect(component.selectedObjects.has(tags[0].id)).toBe(true)
expect(component.togggleAll).toBe(false)
component.toggleSelected(tags[1])
component.toggleSelected(tags[2])
expect(component.togggleAll).toBe(true)
component.toggleSelected(tags[1])
expect(component.selectedObjects.has(tags[1].id)).toBe(false)
expect(component.togggleAll).toBe(false)
})
it('areAllPageItemsSelected should return false when page has no selectable items', () => {
component.data = []
component.selectedObjects.clear()
expect((component as any).areAllPageItemsSelected()).toBe(false)
component.data = tags
})
it('should support bulk edit permissions', () => { it('should support bulk edit permissions', () => {
const bulkEditPermsSpy = jest.spyOn(tagService, 'bulk_edit_objects') const bulkEditPermsSpy = jest.spyOn(tagService, 'bulk_edit_objects')
component.toggleSelected(tags[0]) component.toggleSelected(tags[0])

View File

@@ -84,7 +84,6 @@ export abstract class ManagementListComponent<T extends MatchingModel>
public data: T[] = [] public data: T[] = []
private unfilteredData: T[] = [] private unfilteredData: T[] = []
private allIDs: number[] = []
public page = 1 public page = 1
@@ -172,8 +171,7 @@ export abstract class ManagementListComponent<T extends MatchingModel>
tap((c) => { tap((c) => {
this.unfilteredData = c.results this.unfilteredData = c.results
this.data = this.filterData(c.results) this.data = this.filterData(c.results)
this.collectionSize = c.all?.length ?? c.count this.collectionSize = c.count
this.allIDs = c.all
}), }),
delay(100) delay(100)
) )
@@ -302,6 +300,16 @@ export abstract class ManagementListComponent<T extends MatchingModel>
return ownsAll return ownsAll
} }
toggleAll(event: PointerEvent) {
const checked = (event.target as HTMLInputElement).checked
this.togggleAll = checked
if (checked) {
this.selectedObjects = new Set(this.getSelectableIDs(this.data))
} else {
this.clearSelection()
}
}
protected getSelectableIDs(objects: T[]): number[] { protected getSelectableIDs(objects: T[]): number[] {
return objects.map((o) => o.id) return objects.map((o) => o.id)
} }
@@ -311,38 +319,10 @@ export abstract class ManagementListComponent<T extends MatchingModel>
this.selectedObjects.clear() this.selectedObjects.clear()
} }
selectNone() {
this.clearSelection()
}
selectPage(select: boolean) {
if (select) {
this.selectedObjects = new Set(this.getSelectableIDs(this.data))
this.togggleAll = this.areAllPageItemsSelected()
} else {
this.clearSelection()
}
}
selectAll() {
if (!this.collectionSize) {
this.clearSelection()
return
}
this.selectedObjects = new Set(this.allIDs)
this.togggleAll = this.areAllPageItemsSelected()
}
toggleSelected(object) { toggleSelected(object) {
this.selectedObjects.has(object.id) this.selectedObjects.has(object.id)
? this.selectedObjects.delete(object.id) ? this.selectedObjects.delete(object.id)
: this.selectedObjects.add(object.id) : this.selectedObjects.add(object.id)
this.togggleAll = this.areAllPageItemsSelected()
}
protected areAllPageItemsSelected(): boolean {
const ids = this.getSelectableIDs(this.data)
return ids.length > 0 && ids.every((id) => this.selectedObjects.has(id))
} }
setPermissions() { setPermissions() {

View File

@@ -13,7 +13,6 @@ import { IfPermissionsDirective } from 'src/app/directives/if-permissions.direct
import { SortableDirective } from 'src/app/directives/sortable.directive' import { SortableDirective } from 'src/app/directives/sortable.directive'
import { PermissionType } from 'src/app/services/permissions.service' import { PermissionType } from 'src/app/services/permissions.service'
import { StoragePathService } from 'src/app/services/rest/storage-path.service' import { StoragePathService } from 'src/app/services/rest/storage-path.service'
import { ClearableBadgeComponent } from '../../common/clearable-badge/clearable-badge.component'
import { StoragePathEditDialogComponent } from '../../common/edit-dialog/storage-path-edit-dialog/storage-path-edit-dialog.component' import { StoragePathEditDialogComponent } from '../../common/edit-dialog/storage-path-edit-dialog/storage-path-edit-dialog.component'
import { PageHeaderComponent } from '../../common/page-header/page-header.component' import { PageHeaderComponent } from '../../common/page-header/page-header.component'
import { ManagementListComponent } from '../management-list/management-list.component' import { ManagementListComponent } from '../management-list/management-list.component'
@@ -35,7 +34,6 @@ import { ManagementListComponent } from '../management-list/management-list.comp
NgbDropdownModule, NgbDropdownModule,
NgbPaginationModule, NgbPaginationModule,
NgxBootstrapIconsModule, NgxBootstrapIconsModule,
ClearableBadgeComponent,
], ],
}) })
export class StoragePathListComponent extends ManagementListComponent<StoragePath> { export class StoragePathListComponent extends ManagementListComponent<StoragePath> {

View File

@@ -138,12 +138,16 @@ describe('TagListComponent', () => {
} }
component.data = [parent as any] component.data = [parent as any]
component.selectPage(true) const selectEvent = { target: { checked: true } } as unknown as PointerEvent
component.toggleAll(selectEvent)
expect(component.selectedObjects.has(10)).toBe(true) expect(component.selectedObjects.has(10)).toBe(true)
expect(component.selectedObjects.has(11)).toBe(true) expect(component.selectedObjects.has(11)).toBe(true)
component.selectPage(false) const deselectEvent = {
target: { checked: false },
} as unknown as PointerEvent
component.toggleAll(deselectEvent)
expect(component.selectedObjects.size).toBe(0) expect(component.selectedObjects.size).toBe(0)
}) })
}) })

View File

@@ -13,7 +13,6 @@ import { IfPermissionsDirective } from 'src/app/directives/if-permissions.direct
import { SortableDirective } from 'src/app/directives/sortable.directive' import { SortableDirective } from 'src/app/directives/sortable.directive'
import { PermissionType } from 'src/app/services/permissions.service' import { PermissionType } from 'src/app/services/permissions.service'
import { TagService } from 'src/app/services/rest/tag.service' import { TagService } from 'src/app/services/rest/tag.service'
import { ClearableBadgeComponent } from '../../common/clearable-badge/clearable-badge.component'
import { TagEditDialogComponent } from '../../common/edit-dialog/tag-edit-dialog/tag-edit-dialog.component' import { TagEditDialogComponent } from '../../common/edit-dialog/tag-edit-dialog/tag-edit-dialog.component'
import { PageHeaderComponent } from '../../common/page-header/page-header.component' import { PageHeaderComponent } from '../../common/page-header/page-header.component'
import { ManagementListComponent } from '../management-list/management-list.component' import { ManagementListComponent } from '../management-list/management-list.component'
@@ -35,7 +34,6 @@ import { ManagementListComponent } from '../management-list/management-list.comp
NgbDropdownModule, NgbDropdownModule,
NgbPaginationModule, NgbPaginationModule,
NgxBootstrapIconsModule, NgxBootstrapIconsModule,
ClearableBadgeComponent,
], ],
}) })
export class TagListComponent extends ManagementListComponent<Tag> { export class TagListComponent extends ManagementListComponent<Tag> {

View File

@@ -159,8 +159,6 @@ export interface Document extends ObjectWithPermissions {
page_count?: number page_count?: number
duplicate_documents?: Document[]
// Frontend only // Frontend only
__changedFields?: string[] __changedFields?: string[]
} }

View File

@@ -1,4 +1,3 @@
import { Document } from './document'
import { ObjectWithId } from './object-with-id' import { ObjectWithId } from './object-with-id'
export enum PaperlessTaskType { export enum PaperlessTaskType {
@@ -43,7 +42,5 @@ export interface PaperlessTask extends ObjectWithId {
related_document?: number related_document?: number
duplicate_documents?: Document[]
owner?: number owner?: number
} }

View File

@@ -779,45 +779,19 @@ class ConsumerPreflightPlugin(
Q(checksum=checksum) | Q(archive_checksum=checksum), Q(checksum=checksum) | Q(archive_checksum=checksum),
) )
if existing_doc.exists(): if existing_doc.exists():
existing_doc = existing_doc.order_by("-created") msg = ConsumerStatusShortMessage.DOCUMENT_ALREADY_EXISTS
duplicates_in_trash = existing_doc.filter(deleted_at__isnull=False) log_msg = f"Not consuming {self.filename}: It is a duplicate of {existing_doc.get().title} (#{existing_doc.get().pk})."
log_msg = (
f"Consuming duplicate {self.filename}: "
f"{existing_doc.count()} existing document(s) share the same content."
)
if duplicates_in_trash.exists(): if existing_doc.first().deleted_at is not None:
log_msg += " Note: at least one existing document is in the trash." msg = ConsumerStatusShortMessage.DOCUMENT_ALREADY_EXISTS_IN_TRASH
log_msg += " Note: existing document is in the trash."
self.log.warning(log_msg)
if settings.CONSUMER_DELETE_DUPLICATES: if settings.CONSUMER_DELETE_DUPLICATES:
duplicate = existing_doc.first()
duplicate_label = (
duplicate.title
or duplicate.original_filename
or (Path(duplicate.filename).name if duplicate.filename else None)
or str(duplicate.pk)
)
Path(self.input_doc.original_file).unlink() Path(self.input_doc.original_file).unlink()
self._fail(
failure_msg = ( msg,
f"Not consuming {self.filename}: " log_msg,
f"It is a duplicate of {duplicate_label} (#{duplicate.pk})" )
)
status_msg = ConsumerStatusShortMessage.DOCUMENT_ALREADY_EXISTS
if duplicates_in_trash.exists():
status_msg = (
ConsumerStatusShortMessage.DOCUMENT_ALREADY_EXISTS_IN_TRASH
)
failure_msg += " Note: existing document is in the trash."
self._fail(
status_msg,
failure_msg,
)
def pre_check_directories(self): def pre_check_directories(self):
""" """

View File

@@ -602,7 +602,7 @@ def rewrite_natural_date_keywords(query_string: str) -> str:
case "this year": case "this year":
start = datetime(local_now.year, 1, 1, 0, 0, 0, tzinfo=tz) start = datetime(local_now.year, 1, 1, 0, 0, 0, tzinfo=tz)
end = datetime(local_now.year, 12, 31, 23, 59, 59, tzinfo=tz) end = datetime.combine(today, time.max, tzinfo=tz)
case "previous week": case "previous week":
days_since_monday = local_now.weekday() days_since_monday = local_now.weekday()

View File

@@ -1,23 +0,0 @@
# Generated by Django 5.2.7 on 2026-01-14 17:45
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "0005_workflowtrigger_filter_has_any_correspondents_and_more"),
]
operations = [
migrations.AlterField(
model_name="document",
name="checksum",
field=models.CharField(
editable=False,
max_length=32,
verbose_name="checksum",
help_text="The checksum of the original document.",
),
),
]

View File

@@ -1,25 +0,0 @@
# Generated by Django 5.2.6 on 2026-01-24 07:33
import django.db.models.functions.text
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "0006_alter_document_checksum_unique"),
]
operations = [
migrations.AddField(
model_name="document",
name="content_length",
field=models.GeneratedField(
db_persist=True,
expression=django.db.models.functions.text.Length("content"),
null=False,
help_text="Length of the content field in characters. Automatically maintained by the database for faster statistics computation.",
output_field=models.PositiveIntegerField(default=0),
),
),
]

View File

@@ -20,9 +20,7 @@ if settings.AUDIT_LOG_ENABLED:
from auditlog.registry import auditlog from auditlog.registry import auditlog
from django.db.models import Case from django.db.models import Case
from django.db.models import PositiveIntegerField
from django.db.models.functions import Cast from django.db.models.functions import Cast
from django.db.models.functions import Length
from django.db.models.functions import Substr from django.db.models.functions import Substr
from django_softdelete.models import SoftDeleteModel from django_softdelete.models import SoftDeleteModel
@@ -194,15 +192,6 @@ class Document(SoftDeleteModel, ModelWithOwner):
), ),
) )
content_length = models.GeneratedField(
expression=Length("content"),
output_field=PositiveIntegerField(default=0),
db_persist=True,
null=False,
serialize=False,
help_text="Length of the content field in characters. Automatically maintained by the database for faster statistics computation.",
)
mime_type = models.CharField(_("mime type"), max_length=256, editable=False) mime_type = models.CharField(_("mime type"), max_length=256, editable=False)
tags = models.ManyToManyField( tags = models.ManyToManyField(
@@ -216,6 +205,7 @@ class Document(SoftDeleteModel, ModelWithOwner):
_("checksum"), _("checksum"),
max_length=32, max_length=32,
editable=False, editable=False,
unique=True,
help_text=_("The checksum of the original document."), help_text=_("The checksum of the original document."),
) )
@@ -956,7 +946,7 @@ if settings.AUDIT_LOG_ENABLED:
auditlog.register( auditlog.register(
Document, Document,
m2m_fields={"tags"}, m2m_fields={"tags"},
exclude_fields=["content_length", "modified"], exclude_fields=["modified"],
) )
auditlog.register(Correspondent) auditlog.register(Correspondent)
auditlog.register(Tag) auditlog.register(Tag)

View File

@@ -148,29 +148,13 @@ def get_document_count_filter_for_user(user):
) )
def get_objects_for_user_owner_aware( def get_objects_for_user_owner_aware(user, perms, Model) -> QuerySet:
user, objects_owned = Model.objects.filter(owner=user)
perms, objects_unowned = Model.objects.filter(owner__isnull=True)
Model,
*,
include_deleted=False,
) -> QuerySet:
"""
Returns objects the user owns, are unowned, or has explicit perms.
When include_deleted is True, soft-deleted items are also included.
"""
manager = (
Model.global_objects
if include_deleted and hasattr(Model, "global_objects")
else Model.objects
)
objects_owned = manager.filter(owner=user)
objects_unowned = manager.filter(owner__isnull=True)
objects_with_perms = get_objects_for_user( objects_with_perms = get_objects_for_user(
user=user, user=user,
perms=perms, perms=perms,
klass=manager.all(), klass=Model,
accept_global_perms=False, accept_global_perms=False,
) )
return objects_owned | objects_unowned | objects_with_perms return objects_owned | objects_unowned | objects_with_perms

View File

@@ -23,7 +23,6 @@ from django.core.validators import MinValueValidator
from django.core.validators import RegexValidator from django.core.validators import RegexValidator
from django.core.validators import integer_validator from django.core.validators import integer_validator
from django.db.models import Count from django.db.models import Count
from django.db.models import Q
from django.db.models.functions import Lower from django.db.models.functions import Lower
from django.utils.crypto import get_random_string from django.utils.crypto import get_random_string
from django.utils.dateparse import parse_datetime from django.utils.dateparse import parse_datetime
@@ -73,7 +72,6 @@ from documents.models import WorkflowTrigger
from documents.parsers import is_mime_type_supported from documents.parsers import is_mime_type_supported
from documents.permissions import get_document_count_filter_for_user from documents.permissions import get_document_count_filter_for_user
from documents.permissions import get_groups_with_only_permission from documents.permissions import get_groups_with_only_permission
from documents.permissions import get_objects_for_user_owner_aware
from documents.permissions import set_permissions_for_object from documents.permissions import set_permissions_for_object
from documents.regex import validate_regex_pattern from documents.regex import validate_regex_pattern
from documents.templating.filepath import validate_filepath_template_and_render from documents.templating.filepath import validate_filepath_template_and_render
@@ -84,9 +82,6 @@ from documents.validators import url_validator
if TYPE_CHECKING: if TYPE_CHECKING:
from collections.abc import Iterable from collections.abc import Iterable
from django.db.models.query import QuerySet
logger = logging.getLogger("paperless.serializers") logger = logging.getLogger("paperless.serializers")
@@ -1019,32 +1014,6 @@ class NotesSerializer(serializers.ModelSerializer):
return ret return ret
def _get_viewable_duplicates(
document: Document,
user: User | None,
) -> QuerySet[Document]:
checksums = {document.checksum}
if document.archive_checksum:
checksums.add(document.archive_checksum)
duplicates = Document.global_objects.filter(
Q(checksum__in=checksums) | Q(archive_checksum__in=checksums),
).exclude(pk=document.pk)
duplicates = duplicates.order_by("-created")
allowed = get_objects_for_user_owner_aware(
user,
"documents.view_document",
Document,
include_deleted=True,
)
return duplicates.filter(id__in=allowed)
class DuplicateDocumentSummarySerializer(serializers.Serializer):
id = serializers.IntegerField()
title = serializers.CharField()
deleted_at = serializers.DateTimeField(allow_null=True)
@extend_schema_serializer( @extend_schema_serializer(
deprecate_fields=["created_date"], deprecate_fields=["created_date"],
) )
@@ -1062,7 +1031,6 @@ class DocumentSerializer(
archived_file_name = SerializerMethodField() archived_file_name = SerializerMethodField()
created_date = serializers.DateField(required=False) created_date = serializers.DateField(required=False)
page_count = SerializerMethodField() page_count = SerializerMethodField()
duplicate_documents = SerializerMethodField()
notes = NotesSerializer(many=True, required=False, read_only=True) notes = NotesSerializer(many=True, required=False, read_only=True)
@@ -1088,16 +1056,6 @@ class DocumentSerializer(
def get_page_count(self, obj) -> int | None: def get_page_count(self, obj) -> int | None:
return obj.page_count return obj.page_count
@extend_schema_field(DuplicateDocumentSummarySerializer(many=True))
def get_duplicate_documents(self, obj):
view = self.context.get("view")
if view and getattr(view, "action", None) != "retrieve":
return []
request = self.context.get("request")
user = request.user if request else None
duplicates = _get_viewable_duplicates(obj, user)
return list(duplicates.values("id", "title", "deleted_at"))
def get_original_file_name(self, obj) -> str | None: def get_original_file_name(self, obj) -> str | None:
return obj.original_filename return obj.original_filename
@@ -1275,7 +1233,6 @@ class DocumentSerializer(
"archive_serial_number", "archive_serial_number",
"original_file_name", "original_file_name",
"archived_file_name", "archived_file_name",
"duplicate_documents",
"owner", "owner",
"permissions", "permissions",
"user_can_change", "user_can_change",
@@ -2137,12 +2094,10 @@ class TasksViewSerializer(OwnedObjectSerializer):
"result", "result",
"acknowledged", "acknowledged",
"related_document", "related_document",
"duplicate_documents",
"owner", "owner",
) )
related_document = serializers.SerializerMethodField() related_document = serializers.SerializerMethodField()
duplicate_documents = serializers.SerializerMethodField()
created_doc_re = re.compile(r"New document id (\d+) created") created_doc_re = re.compile(r"New document id (\d+) created")
duplicate_doc_re = re.compile(r"It is a duplicate of .* \(#(\d+)\)") duplicate_doc_re = re.compile(r"It is a duplicate of .* \(#(\d+)\)")
@@ -2167,17 +2122,6 @@ class TasksViewSerializer(OwnedObjectSerializer):
return result return result
@extend_schema_field(DuplicateDocumentSummarySerializer(many=True))
def get_duplicate_documents(self, obj):
related_document = self.get_related_document(obj)
request = self.context.get("request")
user = request.user if request else None
document = Document.global_objects.filter(pk=related_document).first()
if not related_document or not user or not document:
return []
duplicates = _get_viewable_duplicates(document, user)
return list(duplicates.values("id", "title", "deleted_at"))
class RunTaskViewSerializer(serializers.Serializer): class RunTaskViewSerializer(serializers.Serializer):
task_name = serializers.ChoiceField( task_name = serializers.ChoiceField(

View File

@@ -19,6 +19,7 @@ from django.db import DatabaseError
from django.db import close_old_connections from django.db import close_old_connections
from django.db import connections from django.db import connections
from django.db import models from django.db import models
from django.db import transaction
from django.db.models import Q from django.db.models import Q
from django.dispatch import receiver from django.dispatch import receiver
from django.utils import timezone from django.utils import timezone
@@ -453,62 +454,94 @@ def update_filename_and_move_files(
# This will in turn cause this logic to move the file where it belongs. # This will in turn cause this logic to move the file where it belongs.
return return
with FileLock(settings.MEDIA_LOCK): move_original = False
try: move_archive = False
# If this was waiting for the lock, the filename or archive_filename old_filename = None
# of this document may have been updated. This happens if multiple updates old_archive_filename = None
# get queued from the UI for the same document old_source_path = None
# So freshen up the data before doing anything old_archive_path = None
instance.refresh_from_db()
old_filename = instance.filename try:
old_source_path = instance.source_path with transaction.atomic():
Document.global_objects.select_for_update().get(pk=instance.pk)
with FileLock(settings.MEDIA_LOCK):
# If this was waiting for the lock, the filename or archive_filename
# of this document may have been updated. This happens if multiple updates
# get queued from the UI for the same document
# So freshen up the data before doing anything
instance.refresh_from_db()
candidate_filename = generate_filename(instance) old_filename = instance.filename
candidate_source_path = ( old_source_path = instance.source_path
settings.ORIGINALS_DIR / candidate_filename
).resolve()
if candidate_filename == Path(old_filename):
new_filename = Path(old_filename)
elif (
candidate_source_path.exists()
and candidate_source_path != old_source_path
):
# Only fall back to unique search when there is an actual conflict
new_filename = generate_unique_filename(instance)
else:
new_filename = candidate_filename
# Need to convert to string to be able to save it to the db candidate_filename = generate_filename(instance)
instance.filename = str(new_filename) candidate_source_path = (
move_original = old_filename != instance.filename settings.ORIGINALS_DIR / candidate_filename
old_archive_filename = instance.archive_filename
old_archive_path = instance.archive_path
if instance.has_archive_version:
archive_candidate = generate_filename(instance, archive_filename=True)
archive_candidate_path = (
settings.ARCHIVE_DIR / archive_candidate
).resolve() ).resolve()
if archive_candidate == Path(old_archive_filename): if candidate_filename == Path(old_filename):
new_archive_filename = Path(old_archive_filename) new_filename = Path(old_filename)
elif ( elif (
archive_candidate_path.exists() candidate_source_path.exists()
and archive_candidate_path != old_archive_path and candidate_source_path != old_source_path
): ):
new_archive_filename = generate_unique_filename( # Only fall back to unique search when there is an actual conflict
new_filename = generate_unique_filename(instance)
else:
new_filename = candidate_filename
# Need to convert to string to be able to save it to the db
instance.filename = str(new_filename)
move_original = old_filename != instance.filename
old_archive_filename = instance.archive_filename
old_archive_path = instance.archive_path
if instance.has_archive_version:
archive_candidate = generate_filename(
instance, instance,
archive_filename=True, archive_filename=True,
) )
archive_candidate_path = (
settings.ARCHIVE_DIR / archive_candidate
).resolve()
if archive_candidate == Path(old_archive_filename):
new_archive_filename = Path(old_archive_filename)
elif (
archive_candidate_path.exists()
and archive_candidate_path != old_archive_path
):
new_archive_filename = generate_unique_filename(
instance,
archive_filename=True,
)
else:
new_archive_filename = archive_candidate
instance.archive_filename = str(new_archive_filename)
move_archive = old_archive_filename != instance.archive_filename
else: else:
new_archive_filename = archive_candidate move_archive = False
instance.archive_filename = str(new_archive_filename) if move_original:
validate_move(
instance,
old_source_path,
instance.source_path,
settings.ORIGINALS_DIR,
)
create_source_path_directory(instance.source_path)
shutil.move(old_source_path, instance.source_path)
move_archive = old_archive_filename != instance.archive_filename if move_archive:
else: validate_move(
move_archive = False instance,
old_archive_path,
instance.archive_path,
settings.ARCHIVE_DIR,
)
create_source_path_directory(instance.archive_path)
shutil.move(old_archive_path, instance.archive_path)
if not move_original and not move_archive: if not move_original and not move_archive:
# Just update modified. Also, don't save() here to prevent infinite recursion. # Just update modified. Also, don't save() here to prevent infinite recursion.
@@ -517,26 +550,6 @@ def update_filename_and_move_files(
) )
return return
if move_original:
validate_move(
instance,
old_source_path,
instance.source_path,
settings.ORIGINALS_DIR,
)
create_source_path_directory(instance.source_path)
shutil.move(old_source_path, instance.source_path)
if move_archive:
validate_move(
instance,
old_archive_path,
instance.archive_path,
settings.ARCHIVE_DIR,
)
create_source_path_directory(instance.archive_path)
shutil.move(old_archive_path, instance.archive_path)
# Don't save() here to prevent infinite recursion. # Don't save() here to prevent infinite recursion.
Document.global_objects.filter(pk=instance.pk).update( Document.global_objects.filter(pk=instance.pk).update(
filename=instance.filename, filename=instance.filename,
@@ -546,22 +559,24 @@ def update_filename_and_move_files(
# Clear any caching for this document. Slightly overkill, but not terrible # Clear any caching for this document. Slightly overkill, but not terrible
clear_document_caches(instance.pk) clear_document_caches(instance.pk)
except (OSError, DatabaseError, CannotMoveFilesException) as e: except (OSError, DatabaseError, CannotMoveFilesException) as e:
logger.warning(f"Exception during file handling: {e}") logger.warning(f"Exception during file handling: {e}")
# This happens when either: # This happens when either:
# - moving the files failed due to file system errors # - moving the files failed due to file system errors
# - saving to the database failed due to database errors # - saving to the database failed due to database errors
# In both cases, we need to revert to the original state. # In both cases, we need to revert to the original state.
if move_original or move_archive:
# Try to move files to their original location. # Try to move files to their original location.
try: try:
if move_original and instance.source_path.is_file(): with FileLock(settings.MEDIA_LOCK):
logger.info("Restoring previous original path") if move_original and instance.source_path.is_file():
shutil.move(instance.source_path, old_source_path) logger.info("Restoring previous original path")
shutil.move(instance.source_path, old_source_path)
if move_archive and instance.archive_path.is_file(): if move_archive and instance.archive_path.is_file():
logger.info("Restoring previous archive path") logger.info("Restoring previous archive path")
shutil.move(instance.archive_path, old_archive_path) shutil.move(instance.archive_path, old_archive_path)
except Exception: except Exception:
# This is fine, since: # This is fine, since:
@@ -574,23 +589,29 @@ def update_filename_and_move_files(
# anyway. # anyway.
pass pass
# restore old values on the instance # restore old values on the instance
if old_filename is not None:
instance.filename = old_filename instance.filename = old_filename
if old_archive_filename is not None:
instance.archive_filename = old_archive_filename instance.archive_filename = old_archive_filename
# finally, remove any empty sub folders. This will do nothing if # finally, remove any empty sub folders. This will do nothing if
# something has failed above. # something has failed above.
if not old_source_path.is_file(): if old_source_path and not old_source_path.is_file():
delete_empty_directories( delete_empty_directories(
Path(old_source_path).parent, Path(old_source_path).parent,
root=settings.ORIGINALS_DIR, root=settings.ORIGINALS_DIR,
) )
if instance.has_archive_version and not old_archive_path.is_file(): if (
delete_empty_directories( instance.has_archive_version
Path(old_archive_path).parent, and old_archive_path
root=settings.ARCHIVE_DIR, and not old_archive_path.is_file()
) ):
delete_empty_directories(
Path(old_archive_path).parent,
root=settings.ARCHIVE_DIR,
)
@shared_task @shared_task

View File

@@ -131,10 +131,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
self.assertIn("content", results_full[0]) self.assertIn("content", results_full[0])
self.assertIn("id", results_full[0]) self.assertIn("id", results_full[0])
# Content length is used internally for performance reasons.
# No need to expose this field.
self.assertNotIn("content_length", results_full[0])
response = self.client.get("/api/documents/?fields=id", format="json") response = self.client.get("/api/documents/?fields=id", format="json")
self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.status_code, status.HTTP_200_OK)
results = response.data["results"] results = response.data["results"]

View File

@@ -7,7 +7,6 @@ from django.contrib.auth.models import User
from rest_framework import status from rest_framework import status
from rest_framework.test import APITestCase from rest_framework.test import APITestCase
from documents.models import Document
from documents.models import PaperlessTask from documents.models import PaperlessTask
from documents.tests.utils import DirectoriesMixin from documents.tests.utils import DirectoriesMixin
from documents.views import TasksViewSet from documents.views import TasksViewSet
@@ -259,7 +258,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
task_id=str(uuid.uuid4()), task_id=str(uuid.uuid4()),
task_file_name="task_one.pdf", task_file_name="task_one.pdf",
status=celery.states.FAILURE, status=celery.states.FAILURE,
result="test.pdf: Unexpected error during ingestion.", result="test.pdf: Not consuming test.pdf: It is a duplicate.",
) )
response = self.client.get(self.ENDPOINT) response = self.client.get(self.ENDPOINT)
@@ -271,7 +270,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
self.assertEqual( self.assertEqual(
returned_data["result"], returned_data["result"],
"test.pdf: Unexpected error during ingestion.", "test.pdf: Not consuming test.pdf: It is a duplicate.",
) )
def test_task_name_webui(self): def test_task_name_webui(self):
@@ -326,34 +325,20 @@ class TestTasks(DirectoriesMixin, APITestCase):
self.assertEqual(returned_data["task_file_name"], "anothertest.pdf") self.assertEqual(returned_data["task_file_name"], "anothertest.pdf")
def test_task_result_duplicate_warning_includes_count(self): def test_task_result_failed_duplicate_includes_related_doc(self):
""" """
GIVEN: GIVEN:
- A celery task succeeds, but a duplicate exists - A celery task failed with a duplicate error
WHEN: WHEN:
- API call is made to get tasks - API call is made to get tasks
THEN: THEN:
- The returned data includes duplicate warning metadata - The returned data includes a related document link
""" """
checksum = "duplicate-checksum"
Document.objects.create(
title="Existing",
content="",
mime_type="application/pdf",
checksum=checksum,
)
created_doc = Document.objects.create(
title="Created",
content="",
mime_type="application/pdf",
checksum=checksum,
archive_checksum="another-checksum",
)
PaperlessTask.objects.create( PaperlessTask.objects.create(
task_id=str(uuid.uuid4()), task_id=str(uuid.uuid4()),
task_file_name="task_one.pdf", task_file_name="task_one.pdf",
status=celery.states.SUCCESS, status=celery.states.FAILURE,
result=f"Success. New document id {created_doc.pk} created", result="Not consuming task_one.pdf: It is a duplicate of task_one_existing.pdf (#1234).",
) )
response = self.client.get(self.ENDPOINT) response = self.client.get(self.ENDPOINT)
@@ -363,7 +348,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
returned_data = response.data[0] returned_data = response.data[0]
self.assertEqual(returned_data["related_document"], str(created_doc.pk)) self.assertEqual(returned_data["related_document"], "1234")
def test_run_train_classifier_task(self): def test_run_train_classifier_task(self):
""" """

View File

@@ -485,21 +485,21 @@ class TestConsumer(
with self.get_consumer(self.get_test_file()) as consumer: with self.get_consumer(self.get_test_file()) as consumer:
consumer.run() consumer.run()
with self.get_consumer(self.get_test_file()) as consumer: with self.assertRaisesMessage(ConsumerError, "It is a duplicate"):
consumer.run() with self.get_consumer(self.get_test_file()) as consumer:
consumer.run()
self.assertEqual(Document.objects.count(), 2) self._assert_first_last_send_progress(last_status="FAILED")
self._assert_first_last_send_progress()
def testDuplicates2(self): def testDuplicates2(self):
with self.get_consumer(self.get_test_file()) as consumer: with self.get_consumer(self.get_test_file()) as consumer:
consumer.run() consumer.run()
with self.get_consumer(self.get_test_archive_file()) as consumer: with self.assertRaisesMessage(ConsumerError, "It is a duplicate"):
consumer.run() with self.get_consumer(self.get_test_archive_file()) as consumer:
consumer.run()
self.assertEqual(Document.objects.count(), 2) self._assert_first_last_send_progress(last_status="FAILED")
self._assert_first_last_send_progress()
def testDuplicates3(self): def testDuplicates3(self):
with self.get_consumer(self.get_test_archive_file()) as consumer: with self.get_consumer(self.get_test_archive_file()) as consumer:
@@ -513,10 +513,9 @@ class TestConsumer(
Document.objects.all().delete() Document.objects.all().delete()
with self.get_consumer(self.get_test_file()) as consumer: with self.assertRaisesMessage(ConsumerError, "document is in the trash"):
consumer.run() with self.get_consumer(self.get_test_file()) as consumer:
consumer.run()
self.assertEqual(Document.objects.count(), 1)
def testAsnExists(self): def testAsnExists(self):
with self.get_consumer( with self.get_consumer(
@@ -719,45 +718,12 @@ class TestConsumer(
dst = self.get_test_file() dst = self.get_test_file()
self.assertIsFile(dst) self.assertIsFile(dst)
expected_message = ( with self.assertRaises(ConsumerError):
f"{dst.name}: Not consuming {dst.name}: "
f"It is a duplicate of {document.title} (#{document.pk})"
)
with self.assertRaisesMessage(ConsumerError, expected_message):
with self.get_consumer(dst) as consumer: with self.get_consumer(dst) as consumer:
consumer.run() consumer.run()
self.assertIsNotFile(dst) self.assertIsNotFile(dst)
self.assertEqual(Document.objects.count(), 1) self._assert_first_last_send_progress(last_status="FAILED")
self._assert_first_last_send_progress(last_status=ProgressStatusOptions.FAILED)
@override_settings(CONSUMER_DELETE_DUPLICATES=True)
def test_delete_duplicate_in_trash(self):
dst = self.get_test_file()
with self.get_consumer(dst) as consumer:
consumer.run()
# Move the existing document to trash
document = Document.objects.first()
document.delete()
dst = self.get_test_file()
self.assertIsFile(dst)
expected_message = (
f"{dst.name}: Not consuming {dst.name}: "
f"It is a duplicate of {document.title} (#{document.pk})"
f" Note: existing document is in the trash."
)
with self.assertRaisesMessage(ConsumerError, expected_message):
with self.get_consumer(dst) as consumer:
consumer.run()
self.assertIsNotFile(dst)
self.assertEqual(Document.global_objects.count(), 1)
self.assertEqual(Document.objects.count(), 0)
@override_settings(CONSUMER_DELETE_DUPLICATES=False) @override_settings(CONSUMER_DELETE_DUPLICATES=False)
def test_no_delete_duplicate(self): def test_no_delete_duplicate(self):
@@ -777,12 +743,15 @@ class TestConsumer(
dst = self.get_test_file() dst = self.get_test_file()
self.assertIsFile(dst) self.assertIsFile(dst)
with self.get_consumer(dst) as consumer: with self.assertRaisesRegex(
consumer.run() ConsumerError,
r"sample\.pdf: Not consuming sample\.pdf: It is a duplicate of sample \(#\d+\)",
):
with self.get_consumer(dst) as consumer:
consumer.run()
self.assertIsNotFile(dst) self.assertIsFile(dst)
self.assertEqual(Document.objects.count(), 2) self._assert_first_last_send_progress(last_status="FAILED")
self._assert_first_last_send_progress()
@override_settings(FILENAME_FORMAT="{title}") @override_settings(FILENAME_FORMAT="{title}")
@mock.patch("documents.parsers.document_consumer_declaration.send") @mock.patch("documents.parsers.document_consumer_declaration.send")

View File

@@ -224,18 +224,17 @@ class TestDoubleSided(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
THEN: THEN:
- The collated file gets put into foo/bar - The collated file gets put into foo/bar
""" """
# TODO: parameterize this instead
for path in [ for path in [
Path("foo") / "bar" / "double-sided", Path("foo") / "bar" / "double-sided",
Path("double-sided") / "foo" / "bar", Path("double-sided") / "foo" / "bar",
]: ]:
with self.subTest(path=str(path)): with self.subTest(path=path):
# Ensure we get fresh directories for each run # Ensure we get fresh directories for each run
self.tearDown() self.tearDown()
self.setUp() self.setUp()
self.create_staging_file() self.create_staging_file()
self.consume_file("double-sided-odd.pdf", Path(path) / "foo.pdf") self.consume_file("double-sided-odd.pdf", path / "foo.pdf")
self.assertIsFile( self.assertIsFile(
self.dirs.consumption_dir / "foo" / "bar" / "foo-collated.pdf", self.dirs.consumption_dir / "foo" / "bar" / "foo-collated.pdf",
) )

View File

@@ -180,7 +180,7 @@ class TestRewriteNaturalDateKeywords(SimpleTestCase):
( (
"added:this year", "added:this year",
datetime(2025, 7, 15, 12, 0, 0, tzinfo=timezone.utc), datetime(2025, 7, 15, 12, 0, 0, tzinfo=timezone.utc),
("added:[20250101", "TO 20251231"), ("added:[20250101", "TO 20250715"),
), ),
( (
"added:previous year", "added:previous year",

View File

@@ -241,10 +241,6 @@ class TestExportImport(
checksum = hashlib.md5(f.read()).hexdigest() checksum = hashlib.md5(f.read()).hexdigest()
self.assertEqual(checksum, element["fields"]["checksum"]) self.assertEqual(checksum, element["fields"]["checksum"])
# Generated field "content_length" should not be exported,
# it is automatically computed during import.
self.assertNotIn("content_length", element["fields"])
if document_exporter.EXPORTER_ARCHIVE_NAME in element: if document_exporter.EXPORTER_ARCHIVE_NAME in element:
fname = ( fname = (
self.target / element[document_exporter.EXPORTER_ARCHIVE_NAME] self.target / element[document_exporter.EXPORTER_ARCHIVE_NAME]

View File

@@ -35,6 +35,7 @@ from django.db.models import Model
from django.db.models import Q from django.db.models import Q
from django.db.models import Sum from django.db.models import Sum
from django.db.models import When from django.db.models import When
from django.db.models.functions import Length
from django.db.models.functions import Lower from django.db.models.functions import Lower
from django.db.models.manager import Manager from django.db.models.manager import Manager
from django.http import FileResponse from django.http import FileResponse
@@ -478,11 +479,11 @@ class TagViewSet(ModelViewSet, PermissionsAwareDocumentCountMixin):
if descendant_pks: if descendant_pks:
filter_q = self.get_document_count_filter() filter_q = self.get_document_count_filter()
children_source = list( children_source = (
Tag.objects.filter(pk__in=descendant_pks | {t.pk for t in all_tags}) Tag.objects.filter(pk__in=descendant_pks | {t.pk for t in all_tags})
.select_related("owner") .select_related("owner")
.annotate(document_count=Count("documents", filter=filter_q)) .annotate(document_count=Count("documents", filter=filter_q))
.order_by(*ordering), .order_by(*ordering)
) )
else: else:
children_source = all_tags children_source = all_tags
@@ -494,11 +495,7 @@ class TagViewSet(ModelViewSet, PermissionsAwareDocumentCountMixin):
page = self.paginate_queryset(queryset) page = self.paginate_queryset(queryset)
serializer = self.get_serializer(page, many=True) serializer = self.get_serializer(page, many=True)
response = self.get_paginated_response(serializer.data) return self.get_paginated_response(serializer.data)
if descendant_pks:
# Include children in the "all" field, if needed
response.data["all"] = [tag.pk for tag in children_source]
return response
def perform_update(self, serializer): def perform_update(self, serializer):
old_parent = self.get_object().get_parent() old_parent = self.get_object().get_parent()
@@ -2325,19 +2322,23 @@ class StatisticsView(GenericAPIView):
user = request.user if request.user is not None else None user = request.user if request.user is not None else None
documents = ( documents = (
Document.objects.all() (
if user is None Document.objects.all()
else get_objects_for_user_owner_aware( if user is None
user, else get_objects_for_user_owner_aware(
"documents.view_document", user,
Document, "documents.view_document",
Document,
)
) )
.only("mime_type", "content")
.prefetch_related("tags")
) )
tags = ( tags = (
Tag.objects.all() Tag.objects.all()
if user is None if user is None
else get_objects_for_user_owner_aware(user, "documents.view_tag", Tag) else get_objects_for_user_owner_aware(user, "documents.view_tag", Tag)
).only("id", "is_inbox_tag") )
correspondent_count = ( correspondent_count = (
Correspondent.objects.count() Correspondent.objects.count()
if user is None if user is None
@@ -2366,33 +2367,31 @@ class StatisticsView(GenericAPIView):
).count() ).count()
) )
inbox_tag_pks = list( documents_total = documents.count()
tags.filter(is_inbox_tag=True).values_list("pk", flat=True),
) inbox_tags = tags.filter(is_inbox_tag=True)
documents_inbox = ( documents_inbox = (
documents.filter(tags__id__in=inbox_tag_pks).values("id").distinct().count() documents.filter(tags__id__in=inbox_tags).distinct().count()
if inbox_tag_pks if inbox_tags.exists()
else None else None
) )
# Single SQL request for document stats and mime type counts document_file_type_counts = (
mime_type_stats = list(
documents.values("mime_type") documents.values("mime_type")
.annotate( .annotate(mime_type_count=Count("mime_type"))
mime_type_count=Count("id"), .order_by("-mime_type_count")
mime_type_chars=Sum("content_length"), if documents_total > 0
) else []
.order_by("-mime_type_count"),
) )
# Calculate totals from grouped results character_count = (
documents_total = sum(row["mime_type_count"] for row in mime_type_stats) documents.annotate(
character_count = sum(row["mime_type_chars"] or 0 for row in mime_type_stats) characters=Length("content"),
document_file_type_counts = [ )
{"mime_type": row["mime_type"], "mime_type_count": row["mime_type_count"]} .aggregate(Sum("characters"))
for row in mime_type_stats .get("characters__sum")
] )
current_asn = Document.objects.aggregate( current_asn = Document.objects.aggregate(
Max("archive_serial_number", default=0), Max("archive_serial_number", default=0),
@@ -2405,9 +2404,11 @@ class StatisticsView(GenericAPIView):
"documents_total": documents_total, "documents_total": documents_total,
"documents_inbox": documents_inbox, "documents_inbox": documents_inbox,
"inbox_tag": ( "inbox_tag": (
inbox_tag_pks[0] if inbox_tag_pks else None inbox_tags.first().pk if inbox_tags.exists() else None
), # backwards compatibility ), # backwards compatibility
"inbox_tags": (inbox_tag_pks if inbox_tag_pks else None), "inbox_tags": (
[tag.pk for tag in inbox_tags] if inbox_tags.exists() else None
),
"document_file_type_counts": document_file_type_counts, "document_file_type_counts": document_file_type_counts,
"character_count": character_count, "character_count": character_count,
"tag_count": len(tags), "tag_count": len(tags),

File diff suppressed because it is too large Load Diff

View File

@@ -3,15 +3,12 @@ from urllib.parse import quote
from allauth.account.adapter import DefaultAccountAdapter from allauth.account.adapter import DefaultAccountAdapter
from allauth.core import context from allauth.core import context
from allauth.headless.tokens.sessions import SessionTokenStrategy
from allauth.socialaccount.adapter import DefaultSocialAccountAdapter from allauth.socialaccount.adapter import DefaultSocialAccountAdapter
from django.conf import settings from django.conf import settings
from django.contrib.auth.models import Group from django.contrib.auth.models import Group
from django.contrib.auth.models import User from django.contrib.auth.models import User
from django.forms import ValidationError from django.forms import ValidationError
from django.http import HttpRequest
from django.urls import reverse from django.urls import reverse
from rest_framework.authtoken.models import Token
from documents.models import Document from documents.models import Document
from paperless.signals import handle_social_account_updated from paperless.signals import handle_social_account_updated
@@ -162,11 +159,3 @@ class CustomSocialAccountAdapter(DefaultSocialAccountAdapter):
exception, exception,
extra_context, extra_context,
) )
class DrfTokenStrategy(SessionTokenStrategy):
def create_access_token(self, request: HttpRequest) -> str | None:
if not request.user.is_authenticated:
return None
token, _ = Token.objects.get_or_create(user=request.user)
return token.key

View File

@@ -345,7 +345,6 @@ INSTALLED_APPS = [
"allauth.account", "allauth.account",
"allauth.socialaccount", "allauth.socialaccount",
"allauth.mfa", "allauth.mfa",
"allauth.headless",
"drf_spectacular", "drf_spectacular",
"drf_spectacular_sidecar", "drf_spectacular_sidecar",
"treenode", "treenode",
@@ -540,12 +539,6 @@ SOCIALACCOUNT_PROVIDERS = json.loads(
) )
SOCIAL_ACCOUNT_DEFAULT_GROUPS = __get_list("PAPERLESS_SOCIAL_ACCOUNT_DEFAULT_GROUPS") SOCIAL_ACCOUNT_DEFAULT_GROUPS = __get_list("PAPERLESS_SOCIAL_ACCOUNT_DEFAULT_GROUPS")
SOCIAL_ACCOUNT_SYNC_GROUPS = __get_boolean("PAPERLESS_SOCIAL_ACCOUNT_SYNC_GROUPS") SOCIAL_ACCOUNT_SYNC_GROUPS = __get_boolean("PAPERLESS_SOCIAL_ACCOUNT_SYNC_GROUPS")
SOCIAL_ACCOUNT_SYNC_GROUPS_CLAIM: Final[str] = os.getenv(
"PAPERLESS_SOCIAL_ACCOUNT_SYNC_GROUPS_CLAIM",
"groups",
)
HEADLESS_TOKEN_STRATEGY = "paperless.adapter.DrfTokenStrategy"
MFA_TOTP_ISSUER = "Paperless-ngx" MFA_TOTP_ISSUER = "Paperless-ngx"

View File

@@ -40,19 +40,15 @@ def handle_social_account_updated(sender, request, sociallogin, **kwargs):
extra_data = sociallogin.account.extra_data or {} extra_data = sociallogin.account.extra_data or {}
social_account_groups = extra_data.get( social_account_groups = extra_data.get(
settings.SOCIAL_ACCOUNT_SYNC_GROUPS_CLAIM, "groups",
[], [],
) # pre-allauth 65.11.0 structure ) # pre-allauth 65.11.0 structure
if not social_account_groups: if not social_account_groups:
# allauth 65.11.0+ nests claims under `userinfo`/`id_token` # allauth 65.11.0+ nests claims under `userinfo`/`id_token`
social_account_groups = ( social_account_groups = (
extra_data.get("userinfo", {}).get( extra_data.get("userinfo", {}).get("groups")
settings.SOCIAL_ACCOUNT_SYNC_GROUPS_CLAIM, or extra_data.get("id_token", {}).get("groups")
)
or extra_data.get("id_token", {}).get(
settings.SOCIAL_ACCOUNT_SYNC_GROUPS_CLAIM,
)
or [] or []
) )
if settings.SOCIAL_ACCOUNT_SYNC_GROUPS and social_account_groups is not None: if settings.SOCIAL_ACCOUNT_SYNC_GROUPS and social_account_groups is not None:

View File

@@ -4,7 +4,6 @@ from allauth.account.adapter import get_adapter
from allauth.core import context from allauth.core import context
from allauth.socialaccount.adapter import get_adapter as get_social_adapter from allauth.socialaccount.adapter import get_adapter as get_social_adapter
from django.conf import settings from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from django.contrib.auth.models import Group from django.contrib.auth.models import Group
from django.contrib.auth.models import User from django.contrib.auth.models import User
from django.forms import ValidationError from django.forms import ValidationError
@@ -12,9 +11,6 @@ from django.http import HttpRequest
from django.test import TestCase from django.test import TestCase
from django.test import override_settings from django.test import override_settings
from django.urls import reverse from django.urls import reverse
from rest_framework.authtoken.models import Token
from paperless.adapter import DrfTokenStrategy
class TestCustomAccountAdapter(TestCase): class TestCustomAccountAdapter(TestCase):
@@ -185,74 +181,3 @@ class TestCustomSocialAccountAdapter(TestCase):
self.assertTrue( self.assertTrue(
any("Test authentication error" in message for message in log_cm.output), any("Test authentication error" in message for message in log_cm.output),
) )
class TestDrfTokenStrategy(TestCase):
def test_create_access_token_creates_new_token(self):
"""
GIVEN:
- A user with no existing DRF token
WHEN:
- create_access_token is called
THEN:
- A new token is created and its key is returned
"""
user = User.objects.create_user("testuser")
request = HttpRequest()
request.user = user
strategy = DrfTokenStrategy()
token_key = strategy.create_access_token(request)
# Verify a token was created
self.assertIsNotNone(token_key)
self.assertTrue(Token.objects.filter(user=user).exists())
# Verify the returned key matches the created token
token = Token.objects.get(user=user)
self.assertEqual(token_key, token.key)
def test_create_access_token_returns_existing_token(self):
"""
GIVEN:
- A user with an existing DRF token
WHEN:
- create_access_token is called again
THEN:
- The same token key is returned (no new token created)
"""
user = User.objects.create_user("testuser")
existing_token = Token.objects.create(user=user)
request = HttpRequest()
request.user = user
strategy = DrfTokenStrategy()
token_key = strategy.create_access_token(request)
# Verify the existing token key is returned
self.assertEqual(token_key, existing_token.key)
# Verify only one token exists (no duplicate created)
self.assertEqual(Token.objects.filter(user=user).count(), 1)
def test_create_access_token_returns_none_for_unauthenticated_user(self):
"""
GIVEN:
- An unauthenticated request
WHEN:
- create_access_token is called
THEN:
- None is returned and no token is created
"""
request = HttpRequest()
request.user = AnonymousUser()
strategy = DrfTokenStrategy()
token_key = strategy.create_access_token(request)
self.assertIsNone(token_key)
self.assertEqual(Token.objects.count(), 0)

View File

@@ -228,7 +228,6 @@ urlpatterns = [
], ],
), ),
), ),
re_path("^auth/headless/", include("allauth.headless.urls")),
re_path( re_path(
"^$", # Redirect to the API swagger view "^$", # Redirect to the API swagger view
RedirectView.as_view(url="schema/view/"), RedirectView.as_view(url="schema/view/"),

74
uv.lock generated
View File

@@ -3152,15 +3152,15 @@ dev = [
{ name = "mkdocs-material", specifier = "~=9.7.0" }, { name = "mkdocs-material", specifier = "~=9.7.0" },
{ name = "pre-commit", specifier = "~=4.5.1" }, { name = "pre-commit", specifier = "~=4.5.1" },
{ name = "pre-commit-uv", specifier = "~=4.2.0" }, { name = "pre-commit-uv", specifier = "~=4.2.0" },
{ name = "pytest", specifier = "~=9.0.0" }, { name = "pytest", specifier = "~=8.4.1" },
{ name = "pytest-cov", specifier = "~=7.0.0" }, { name = "pytest-cov", specifier = "~=7.0.0" },
{ name = "pytest-django", specifier = "~=4.11.1" }, { name = "pytest-django", specifier = "~=4.11.1" },
{ name = "pytest-env", specifier = "~=1.2.0" }, { name = "pytest-env" },
{ name = "pytest-httpx" }, { name = "pytest-httpx" },
{ name = "pytest-mock", specifier = "~=3.15.1" }, { name = "pytest-mock" },
{ name = "pytest-rerunfailures", specifier = "~=16.1" }, { name = "pytest-rerunfailures" },
{ name = "pytest-sugar" }, { name = "pytest-sugar" },
{ name = "pytest-xdist", specifier = "~=3.8.0" }, { name = "pytest-xdist" },
{ name = "ruff", specifier = "~=0.14.0" }, { name = "ruff", specifier = "~=0.14.0" },
] ]
docs = [ docs = [
@@ -3176,15 +3176,15 @@ testing = [
{ name = "daphne" }, { name = "daphne" },
{ name = "factory-boy", specifier = "~=3.3.1" }, { name = "factory-boy", specifier = "~=3.3.1" },
{ name = "imagehash" }, { name = "imagehash" },
{ name = "pytest", specifier = "~=9.0.0" }, { name = "pytest", specifier = "~=8.4.1" },
{ name = "pytest-cov", specifier = "~=7.0.0" }, { name = "pytest-cov", specifier = "~=7.0.0" },
{ name = "pytest-django", specifier = "~=4.11.1" }, { name = "pytest-django", specifier = "~=4.11.1" },
{ name = "pytest-env", specifier = "~=1.2.0" }, { name = "pytest-env" },
{ name = "pytest-httpx" }, { name = "pytest-httpx" },
{ name = "pytest-mock", specifier = "~=3.15.1" }, { name = "pytest-mock" },
{ name = "pytest-rerunfailures", specifier = "~=16.1" }, { name = "pytest-rerunfailures" },
{ name = "pytest-sugar" }, { name = "pytest-sugar" },
{ name = "pytest-xdist", specifier = "~=3.8.0" }, { name = "pytest-xdist" },
] ]
typing = [ typing = [
{ name = "celery-types" }, { name = "celery-types" },
@@ -3841,7 +3841,7 @@ wheels = [
[[package]] [[package]]
name = "pytest" name = "pytest"
version = "9.0.2" version = "8.4.2"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "exceptiongroup", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux')" }, { name = "exceptiongroup", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux')" },
@@ -3851,9 +3851,9 @@ dependencies = [
{ name = "pygments", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "pygments", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "tomli", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux')" }, { name = "tomli", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux')" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" },
] ]
[[package]] [[package]]
@@ -3897,15 +3897,15 @@ wheels = [
[[package]] [[package]]
name = "pytest-httpx" name = "pytest-httpx"
version = "0.36.0" version = "0.35.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "httpx", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "httpx", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "pytest", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "pytest", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/5574834da9499066fa1a5ea9c336f94dba2eae02298d36dab192fcf95c86/pytest_httpx-0.36.0.tar.gz", hash = "sha256:9edb66a5fd4388ce3c343189bc67e7e1cb50b07c2e3fc83b97d511975e8a831b", size = 56793, upload-time = "2025-12-02T16:34:57.414Z" } sdist = { url = "https://files.pythonhosted.org/packages/1f/89/5b12b7b29e3d0af3a4b9c071ee92fa25a9017453731a38f08ba01c280f4c/pytest_httpx-0.35.0.tar.gz", hash = "sha256:d619ad5d2e67734abfbb224c3d9025d64795d4b8711116b1a13f72a251ae511f", size = 54146, upload-time = "2024-11-28T19:16:54.237Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/e2/d2/1eb1ea9c84f0d2033eb0b49675afdc71aa4ea801b74615f00f3c33b725e3/pytest_httpx-0.36.0-py3-none-any.whl", hash = "sha256:bd4c120bb80e142df856e825ec9f17981effb84d159f9fa29ed97e2357c3a9c8", size = 20229, upload-time = "2025-12-02T16:34:56.45Z" }, { url = "https://files.pythonhosted.org/packages/b0/ed/026d467c1853dd83102411a78126b4842618e86c895f93528b0528c7a620/pytest_httpx-0.35.0-py3-none-any.whl", hash = "sha256:ee11a00ffcea94a5cbff47af2114d34c5b231c326902458deed73f9c459fd744", size = 19442, upload-time = "2024-11-28T19:16:52.787Z" },
] ]
[[package]] [[package]]
@@ -5108,13 +5108,13 @@ dependencies = [
{ name = "typing-extensions", marker = "sys_platform == 'darwin'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin'" },
] ]
wheels = [ wheels = [
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:bf1e68cfb935ae2046374ff02a7aa73dda70351b46342846f557055b3a540bf0" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp310-none-macosx_11_0_arm64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:a52952a8c90a422c14627ea99b9826b7557203b46b4d0772d3ca5c7699692425" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp311-none-macosx_11_0_arm64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:287242dd1f830846098b5eca847f817aa5c6015ea57ab4c1287809efea7b77eb" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp312-none-macosx_11_0_arm64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8924d10d36eac8fe0652a060a03fc2ae52980841850b9a1a2ddb0f27a4f181cd" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp313-cp313t-macosx_11_0_arm64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:bcee64ae7aa65876ceeae6dcaebe75109485b213528c74939602208a20706e3f" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp313-none-macosx_11_0_arm64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:defadbeb055cfcf5def58f70937145aecbd7a4bc295238ded1d0e85ae2cf0e1d" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp314-cp314-macosx_11_0_arm64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:886f84b181f766f53265ba0a1d503011e60f53fff9d569563ef94f24160e1072" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp314-cp314t-macosx_11_0_arm64.whl" },
] ]
[[package]] [[package]]
@@ -5138,20 +5138,20 @@ dependencies = [
{ name = "typing-extensions", marker = "sys_platform == 'linux'" }, { name = "typing-extensions", marker = "sys_platform == 'linux'" },
] ]
wheels = [ wheels = [
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:10866c8a48c4aa5ae3f48538dc8a055b99c57d9c6af2bf5dd715374d9d6ddca3" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp310-cp310-manylinux_2_28_aarch64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:7210713b66943fdbfcc237b2e782871b649123ac5d29f548ce8c85be4223ab38" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp310-cp310-manylinux_2_28_x86_64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:0e611cfb16724e62252b67d31073bc5c490cb83e92ecdc1192762535e0e44487" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp311-cp311-manylinux_2_28_aarch64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:3de2adb9b4443dc9210ef1f1b16da3647ace53553166d6360bbbd7edd6f16e4d" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp311-cp311-manylinux_2_28_x86_64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3bf9b442a51a2948e41216a76d7ab00f0694cfcaaa51b6f9bcab57b7f89843e6" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp312-cp312-manylinux_2_28_aarch64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:7417d8c565f219d3455654cb431c6d892a3eb40246055e14d645422de13b9ea1" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp312-cp312-manylinux_2_28_x86_64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:3e532e553b37ee859205a9b2d1c7977fd6922f53bbb1b9bfdd5bdc00d1a60ed4" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp313-cp313-manylinux_2_28_aarch64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:39b3dff6d8fba240ae0d1bede4ca11c2531ae3b47329206512d99e17907ff74b" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp313-cp313-manylinux_2_28_x86_64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:01b1884f724977a20c7da2f640f1c7b37f4a2c117a7f4a6c1c0424d14cb86322" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp313-cp313t-manylinux_2_28_aarch64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:031a597147fa81b1e6d79ccf1ad3ccc7fafa27941d6cf26ff5caaa384fb20e92" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp313-cp313t-manylinux_2_28_x86_64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:65010ab4aacce6c9a1ddfc935f986c003ca8638ded04348fd326c3e74346237c" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp314-cp314-manylinux_2_28_aarch64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:88adf5157db5da1d54b1c9fe4a6c1d20ceef00e75d854e206a87dbf69e3037dc" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp314-cp314-manylinux_2_28_x86_64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:3ac2b8df2c55430e836dcda31940d47f1f5f94b8731057b6f20300ebea394dd9" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp314-cp314t-manylinux_2_28_aarch64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:5b688445f928f13563b7418b17c57e97bf955ab559cf73cd8f2b961f8572dbb3" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp314-cp314t-manylinux_2_28_x86_64.whl" },
] ]
[[package]] [[package]]