Compare commits

..

12 Commits

Author SHA1 Message Date
shamoon
e6da2a94d1 coverage, unify error tests 2025-09-15 01:23:04 -07:00
shamoon
8d0581177e Update serialisers.py 2025-09-15 00:19:48 -07:00
shamoon
3ef0b89e6c typo 2025-09-15 00:01:19 -07:00
shamoon
42463d68a0 simplify 2025-09-14 23:56:36 -07:00
shamoon
4c2e361762 Update views.py 2025-09-14 23:54:35 -07:00
shamoon
10c254e96d Maybe handle this backwards compat 2025-09-14 22:57:04 -07:00
shamoon
90b2f694c0 Update api.md 2025-09-14 22:22:53 -07:00
shamoon
c02907ff37 Update serialisers.py 2025-09-14 22:21:54 -07:00
shamoon
a2d89e7633 Actually lets unify it 2025-09-14 22:19:36 -07:00
shamoon
1d6cdf7b1d Use json str, normalize keys 2025-09-14 21:37:37 -07:00
shamoon
5a8b470673 Add negative test 2025-09-14 19:47:57 -07:00
shamoon
b2f1c5a6af Enhancement: support custom field values with post document endpoint 2025-09-14 19:38:52 -07:00
65 changed files with 798 additions and 2290 deletions

View File

@@ -49,6 +49,7 @@ services:
- ./data:/usr/src/paperless/paperless-ngx/data
- ./media:/usr/src/paperless/paperless-ngx/media
- ./consume:/usr/src/paperless/paperless-ngx/consume
- ~/.gitconfig:/usr/src/paperless/.gitconfig:ro
environment:
PAPERLESS_REDIS: redis://broker:6379
PAPERLESS_TIKA_ENABLED: 1

3
.gitignore vendored
View File

@@ -107,6 +107,3 @@ celerybeat-schedule*
/.devcontainer/data/
/.devcontainer/media/
/.devcontainer/redisdata/
# ignore pnpm package store folder created when setting up the devcontainer
.pnpm-store/

View File

@@ -4,7 +4,7 @@
repos:
# General hooks
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v6.0.0
rev: v5.0.0
hooks:
- id: check-docstring-first
- id: check-json
@@ -49,7 +49,7 @@ repos:
- 'prettier-plugin-organize-imports@4.1.0'
# Python hooks
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.13.0
rev: v0.12.2
hooks:
- id: ruff-check
- id: ruff-format
@@ -72,7 +72,7 @@ repos:
args:
- "--tab"
- repo: https://github.com/shellcheck-py/shellcheck-py
rev: "v0.11.0.1"
rev: "v0.10.0.1"
hooks:
- id: shellcheck
- repo: https://github.com/google/yamlfmt

View File

@@ -32,7 +32,7 @@ RUN set -eux \
# Purpose: Installs s6-overlay and rootfs
# Comments:
# - Don't leave anything extra in here either
FROM ghcr.io/astral-sh/uv:0.8.17-python3.12-bookworm-slim AS s6-overlay-base
FROM ghcr.io/astral-sh/uv:0.8.15-python3.12-bookworm-slim AS s6-overlay-base
WORKDIR /usr/src/s6

View File

@@ -470,14 +470,9 @@ To get started:
2. VS Code will prompt you with "Reopen in container". Do so and wait for the environment to start.
3. In case your host operating system is Windows:
- The Source Control view in Visual Studio Code might show: "The detected Git repository is potentially unsafe as the folder is owned by someone other than the current user." Use "Manage Unsafe Repositories" to fix this.
- Git might have detecteded modifications for all files, because Windows is using CRLF line endings. Run `git checkout .` in the containers terminal to fix this issue.
4. Initialize the project by running the task **Project Setup: Run all Init Tasks**. This
3. Initialize the project by running the task **Project Setup: Run all Init Tasks**. This
will initialize the database tables and create a superuser. Then you can compile the front end
for production or run the frontend in debug mode.
5. The project is ready for debugging, start either run the fullstack debug or individual debug
4. The project is ready for debugging, start either run the fullstack debug or individual debug
processes. Yo spin up the project without debugging run the task **Project Start: Run all Services**

View File

@@ -92,16 +92,6 @@ and more. These areas allow you to view, add, edit, delete and manage permission
for these objects. You can also manage saved views, mail accounts, mail rules,
workflows and more from the management sections.
### Nested Tags
Paperless-ngx v2.19 introduces support for nested tags, allowing you to create a
hierarchy of tags, which may be useful for organizing your documents. Tags can
have a 'parent' tag, creating a tree-like structure, to a maximum depth of 5. When
a tag is added to a document, all of its parent tags are also added automatically
and similarly, when a tag is removed from a document, all of its child tags are
also removed. Additionally, assigning a parent to an existing tag will automatically
update all documents that have this tag assigned, adding the parent tag as well.
## Adding documents to Paperless-ngx
Once you've got Paperless setup, you need to start feeding documents

View File

@@ -36,7 +36,6 @@ dependencies = [
"django-guardian~=3.1.2",
"django-multiselectfield~=1.0.1",
"django-soft-delete~=1.0.18",
"django-treenode>=0.23.2",
"djangorestframework~=3.16",
"djangorestframework-guardian~=0.4.0",
"drf-spectacular~=0.28",
@@ -51,7 +50,7 @@ dependencies = [
"jinja2~=3.1.5",
"langdetect~=1.0.9",
"nltk~=3.9.1",
"ocrmypdf~=16.11.0",
"ocrmypdf~=16.10.0",
"pathvalidate~=3.3.1",
"pdf2image~=1.17.0",
"psycopg-pool",
@@ -117,7 +116,7 @@ testing = [
lint = [
"pre-commit~=4.3.0",
"pre-commit-uv~=4.1.3",
"ruff~=0.13.0",
"ruff~=0.12.2",
]
typing = [

File diff suppressed because it is too large Load Diff

View File

@@ -12,8 +12,6 @@
<pngx-input-color i18n-title title="Color" formControlName="color" [error]="error?.color"></pngx-input-color>
<pngx-input-select i18n-title title="Parent" formControlName="parent" [items]="tags" [allowNull]="true" [error]="error?.parent"></pngx-input-select>
<pngx-input-check i18n-title title="Inbox tag" formControlName="is_inbox_tag" i18n-hint hint="Inbox tags are automatically assigned to all consumed documents."></pngx-input-check>
<pngx-input-select i18n-title title="Matching algorithm" [items]="getMatchingAlgorithms()" formControlName="matching_algorithm"></pngx-input-select>
@if (patternRequired) {

View File

@@ -35,16 +35,11 @@ import { TextComponent } from '../../input/text/text.component'
],
})
export class TagEditDialogComponent extends EditDialogComponent<Tag> {
tags: Tag[]
constructor() {
super()
this.service = inject(TagService)
this.userService = inject(UserService)
this.settingsService = inject(SettingsService)
this.service.listAll().subscribe((result) => {
this.tags = result.results
})
}
getCreateTitle() {
@@ -60,7 +55,6 @@ export class TagEditDialogComponent extends EditDialogComponent<Tag> {
name: new FormControl(''),
color: new FormControl(randomColor()),
is_inbox_tag: new FormControl(false),
parent: new FormControl(null),
matching_algorithm: new FormControl(DEFAULT_MATCHING_ALGORITHM),
match: new FormControl(''),
is_insensitive: new FormControl(true),

View File

@@ -114,13 +114,6 @@ export class FilterableDropdownSelectionModel {
b.id == NEGATIVE_NULL_FILTER_VALUE)
) {
return 1
}
// Preserve hierarchical order when provided (e.g., Tags)
const ao = (a as any)['orderIndex']
const bo = (b as any)['orderIndex']
if (ao !== undefined && bo !== undefined) {
return ao - bo
} else if (
this.getNonTemporary(a.id) == ToggleableItemState.NotSelected &&
this.getNonTemporary(b.id) != ToggleableItemState.NotSelected

View File

@@ -15,17 +15,12 @@
<i-bs width="1em" height="1em" name="x"></i-bs>
}
</div>
<div class="me-1 name-cell" [style.--depth]="isTag ? getDepth() + 1 : 1">
@if (isTag && getDepth() > 0) {
<div class="indicator"></div>
<div class="me-1">
@if (isTag) {
<pngx-tag [tag]="item" [clickable]="false"></pngx-tag>
} @else {
<small>{{item.name}}</small>
}
<div>
@if (isTag) {
<pngx-tag [tag]="item" [clickable]="false"></pngx-tag>
} @else {
<small>{{item.name}}</small>
}
</div>
</div>
@if (!hideCount) {
<div class="badge bg-light text-dark rounded-pill ms-auto me-1">{{currentCount}}</div>

View File

@@ -2,19 +2,3 @@
min-width: 1em;
min-height: 1em;
}
.name-cell {
padding-left: calc(calc(var(--depth) - 2) * 1rem);
display: flex;
align-items: center;
.indicator {
display: inline-block;
width: .8rem;
height: .8rem;
border-left: 1px solid var(--bs-secondary);
border-bottom: 1px solid var(--bs-secondary);
margin-right: .25rem;
margin-left: .5rem;
}
}

View File

@@ -1,7 +1,6 @@
import { Component, EventEmitter, Input, Output } from '@angular/core'
import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
import { MatchingModel } from 'src/app/data/matching-model'
import { Tag } from 'src/app/data/tag'
import { TagComponent } from '../../tag/tag.component'
export enum ToggleableItemState {
@@ -46,10 +45,6 @@ export class ToggleableDropdownButtonComponent {
return 'is_inbox_tag' in this.item
}
getDepth(): number {
return (this.item as Tag).depth ?? 0
}
get currentCount(): number {
return this.count ?? this.item.document_count
}

View File

@@ -7,14 +7,13 @@
<div class="input-group flex-nowrap">
<ng-select #tagSelect name="tags" [items]="tags" bindLabel="name" bindValue="id" [(ngModel)]="value"
[disabled]="disabled"
[multiple]="multiple"
[multiple]="true"
[closeOnSelect]="false"
[clearSearchOnAdd]="true"
[hideSelected]="tags.length > 0"
[addTag]="allowCreate ? createTagRef : false"
addTagText="Add tag"
i18n-addTagText
(add)="onAdd($event)"
(change)="onChange(value)">
<ng-template ng-label-tmp let-item="item">
@@ -26,20 +25,9 @@
</button>
</ng-template>
<ng-template ng-option-tmp let-item="item" let-index="index" let-search="searchTerm">
<div class="tag-option-row d-flex align-items-center">
<div class="tag-wrap">
@if (item.id && tags) {
@if (getTag(item.id)?.parent) {
<i-bs name="list-nested" class="me-1"></i-bs>
<span class="hierarchy-reveal d-flex align-items-center">
<span class="parents d-flex align-items-center">
@for (p of getParentChain(item.id); track p.id) {
<span class="badge me-1" [style.background]="p.color" [style.color]="p.text_color">{{p.name}}</span>
<i-bs name="chevron-right" width=".8em" height=".8em" class="me-1"></i-bs>
}
</span>
</span>
}
<pngx-tag class="current-tag d-flex" [tag]="getTag(item.id)"></pngx-tag>
<pngx-tag class="me-2" [tag]="getTag(item.id)"></pngx-tag>
}
</div>
</ng-template>

View File

@@ -20,33 +20,3 @@
}
}
}
// Dropdown hierarchy reveal for ng-select options
::ng-deep .ng-dropdown-panel .ng-option {
overflow-x: scroll;
.tag-option-row {
font-size: 1rem;
width: max-content;
}
.hierarchy-reveal {
overflow: hidden;
max-width: 0;
transition: max-width 200ms ease;
}
.parents .badge {
white-space: nowrap;
}
}
::ng-deep .ng-dropdown-panel .ng-option:hover .hierarchy-reveal,
::ng-deep .ng-dropdown-panel .ng-option.ng-option-marked .hierarchy-reveal {
max-width: 1000px;
}
::ng-deep .ng-dropdown-panel .ng-option:hover .hierarchy-indicator,
::ng-deep .ng-dropdown-panel .ng-option.ng-option-marked .hierarchy-indicator {
background: transparent;
}

View File

@@ -177,59 +177,4 @@ describe('TagsComponent', () => {
component.onFilterDocuments()
expect(emitSpy).toHaveBeenCalledWith([tags[2]])
})
it('should remove all descendants from selection', () => {
const c: Tag = { id: 4, name: 'c' }
const b: Tag = { id: 3, name: 'b', children: [c] }
const a: Tag = { id: 2, name: 'a' }
const root: Tag = { id: 1, name: 'root', children: [a, b] }
const inputIDs = [2, 3, 4, 99]
const result = (component as any).removeChildren(inputIDs, root)
expect(result).toEqual([99])
})
it('should append all parents recursively', () => {
const root: Tag = { id: 1, name: 'root' }
const mid: Tag = { id: 2, name: 'mid', parent: 1 }
const leaf: Tag = { id: 3, name: 'leaf', parent: 2 }
component.tags = [root, mid, leaf]
component.value = []
component.onAdd(leaf)
expect(component.value).toEqual([2, 1])
// Calling onAdd on a root should not change value
component.onAdd(root)
expect(component.value).toEqual([2, 1])
})
it('should return ancestors from root to parent using getParentChain', () => {
const root: Tag = { id: 1, name: 'root' }
const mid: Tag = { id: 2, name: 'mid', parent: 1 }
const leaf: Tag = { id: 3, name: 'leaf', parent: 2 }
component.tags = [root, mid, leaf]
expect(component.getParentChain(3).map((t) => t.id)).toEqual([1, 2])
expect(component.getParentChain(2).map((t) => t.id)).toEqual([1])
expect(component.getParentChain(1).map((t) => t.id)).toEqual([])
// Non-existent id
expect(component.getParentChain(999).map((t) => t.id)).toEqual([])
})
it('should handle cyclic parents via guard in getParentChain', () => {
const one: Tag = { id: 1, name: 'one', parent: 2 }
const two: Tag = { id: 2, name: 'two', parent: 1 }
component.tags = [one, two]
const chain = component.getParentChain(1)
// Guard avoids infinite loop; chain contains both nodes once
expect(chain.map((t) => t.id)).toEqual([1, 2])
})
it('should stop when parent does not exist in getParentChain', () => {
const lone: Tag = { id: 5, name: 'lone', parent: 999 }
component.tags = [lone]
expect(component.getParentChain(5)).toEqual([])
})
})

View File

@@ -100,9 +100,6 @@ export class TagsComponent implements OnInit, ControlValueAccessor {
@Input()
horizontal: boolean = false
@Input()
multiple: boolean = true
@Output()
filterDocuments = new EventEmitter<Tag[]>()
@@ -127,40 +124,13 @@ export class TagsComponent implements OnInit, ControlValueAccessor {
let index = this.value.indexOf(tagID)
if (index > -1) {
const tag = this.getTag(tagID)
// remove tag
let oldValue = this.value
oldValue.splice(index, 1)
// remove children
oldValue = this.removeChildren(oldValue, tag)
this.value = [...oldValue]
this.onChange(this.value)
}
}
private removeChildren(tagIDs: number[], tag: Tag) {
if (tag.children?.length) {
const childIDs = tag.children.map((child) => child.id)
tagIDs = tagIDs.filter((id) => !childIDs.includes(id))
for (const child of tag.children) {
tagIDs = this.removeChildren(tagIDs, child)
}
}
return tagIDs
}
public onAdd(tag: Tag) {
if (tag.parent) {
// add all parents recursively
const parent = this.getTag(tag.parent)
this.value = [...this.value, parent.id]
this.onAdd(parent)
}
}
createTag(name: string = null, add: boolean = false) {
var modal = this.modalService.open(TagEditDialogComponent, {
backdrop: 'static',
@@ -196,7 +166,6 @@ export class TagsComponent implements OnInit, ControlValueAccessor {
addTag(id) {
this.value = [...this.value, id]
this.onAdd(this.getTag(id))
this.onChange(this.value)
}
@@ -211,20 +180,4 @@ export class TagsComponent implements OnInit, ControlValueAccessor {
this.tags.filter((t) => this.value.includes(t.id))
)
}
getParentChain(id: number): Tag[] {
// Returns ancestors from root → immediate parent for a tag id
const chain: Tag[] = []
let current = this.getTag(id)
const guard = new Set<number>()
while (current?.parent) {
if (guard.has(current.parent)) break
guard.add(current.parent)
const parent = this.getTag(current.parent)
if (!parent) break
chain.unshift(parent)
current = parent
}
return chain
}
}

View File

@@ -1,8 +1,4 @@
@if (tag) {
@if (showParents && tag.parent) {
<pngx-tag [tagID]="tag.parent" [clickable]="clickable" [linkTitle]="linkTitle"></pngx-tag>
&nbsp;&gt;&nbsp;
}
@if (!clickable) {
<span class="badge" [style.background]="tag.color" [style.color]="tag.text_color">{{tag.name}}</span>
}

View File

@@ -50,7 +50,4 @@ export class TagComponent {
@Input()
clickable: boolean = false
@Input()
showParents: boolean = false
}

View File

@@ -1,30 +1,7 @@
<pngx-page-header [(title)]="title">
@if (document?.versions?.length > 0) {
<div class="btn-group" ngbDropdown role="group">
<div class="btn-group" ngbDropdown role="group">
<button class="btn btn-sm btn-outline-secondary dropdown-toggle" ngbDropdownToggle [disabled]="!hasVersions">
<i-bs name="layers"></i-bs>
<span class="d-none d-lg-inline ps-1" i18n>Version</span>
</button>
<div class="dropdown-menu shadow" ngbDropdownMenu>
@for (vid of document.versions; track vid) {
<button ngbDropdownItem (click)="selectVersion(vid)">
<span i18n>Version</span> {{vid}}
@if (selectedVersionId === vid) { <span>&nbsp;</span> }
</button>
}
</div>
</div>
<input #versionFileInput type="file" class="visually-hidden" (change)="onVersionFileSelected($event)" />
<button class="btn btn-sm btn-outline-secondary" title="Upload new version" i18n-title (click)="versionFileInput.click()" [disabled]="!userIsOwner || !userCanEdit">
<i-bs name="file-earmark-plus"></i-bs><span class="visually-hidden" i18n>Upload new version</span>
</button>
</div>
}
@if (archiveContentRenderType === ContentRenderType.PDF && !useNativePdfViewer) {
@if (previewNumPages) {
<div class="input-group input-group-sm ms-2 d-none d-md-flex">
<div class="input-group input-group-sm d-none d-md-flex">
<div class="input-group-text" i18n>Page</div>
<input class="form-control flex-grow-0 w-auto" type="number" min="1" [max]="previewNumPages" [(ngModel)]="previewCurrentPage" />
<div class="input-group-text" i18n>of {{previewNumPages}}</div>

View File

@@ -222,8 +222,6 @@ export class DocumentDetailComponent
titleSubject: Subject<string> = new Subject()
previewUrl: string
thumbUrl: string
// Versioning: which document ID to use for file preview/download
selectedVersionId: number
previewText: string
previewLoaded: boolean = false
tiffURL: string
@@ -272,7 +270,6 @@ export class DocumentDetailComponent
public readonly DataType = DataType
@ViewChild('nav') nav: NgbNav
@ViewChild('versionFileInput') versionFileInput
@ViewChild('pdfPreview') set pdfPreview(element) {
// this gets called when component added or removed from DOM
if (
@@ -405,10 +402,7 @@ export class DocumentDetailComponent
}
private loadDocument(documentId: number): void {
this.selectedVersionId = documentId
this.previewUrl = this.documentsService.getPreviewUrl(
this.selectedVersionId
)
this.previewUrl = this.documentsService.getPreviewUrl(documentId)
this.http
.get(this.previewUrl, { responseType: 'text' })
.pipe(
@@ -423,7 +417,7 @@ export class DocumentDetailComponent
err.message ?? err.toString()
}`),
})
this.thumbUrl = this.documentsService.getThumbUrl(this.selectedVersionId)
this.thumbUrl = this.documentsService.getThumbUrl(documentId)
this.documentsService
.get(documentId)
.pipe(
@@ -644,10 +638,6 @@ export class DocumentDetailComponent
updateComponent(doc: Document) {
this.document = doc
// Default selected version is the newest version
this.selectedVersionId = doc.versions?.length
? Math.max(...doc.versions)
: doc.id
this.requiresPassword = false
this.updateFormForCustomFields()
if (this.archiveContentRenderType === ContentRenderType.TIFF) {
@@ -712,36 +702,6 @@ export class DocumentDetailComponent
this.prepareForm(doc)
}
get hasVersions(): boolean {
return this.document?.versions?.length > 1
}
// Update file preview and download target to a specific version (by document id)
selectVersion(versionId: number) {
this.selectedVersionId = versionId
this.previewUrl = this.documentsService.getPreviewUrl(
this.documentId,
false,
this.selectedVersionId
)
this.thumbUrl = this.documentsService.getThumbUrl(this.selectedVersionId)
// For text previews, refresh content
this.http
.get(this.previewUrl, { responseType: 'text' })
.pipe(
first(),
takeUntil(this.unsubscribeNotifier),
takeUntil(this.docChangeNotifier)
)
.subscribe({
next: (res) => (this.previewText = res.toString()),
error: (err) =>
(this.previewText = $localize`An error occurred loading content: ${
err.message ?? err.toString()
}`),
})
}
get customFieldFormFields(): FormArray {
return this.documentForm.get('custom_fields') as FormArray
}
@@ -1089,36 +1049,10 @@ export class DocumentDetailComponent
})
}
onVersionFileSelected(event: Event) {
const input = event.target as HTMLInputElement
if (!input?.files || input.files.length === 0) return
const file = input.files[0]
// Reset input to allow re-selection of the same file later
input.value = ''
this.documentsService
.uploadVersion(this.documentId, file)
.pipe(first())
.subscribe({
next: () => {
this.toastService.showInfo(
$localize`Uploading new version. Processing will happen in the background.`
)
// Refresh metadata to reflect that versions changed (when ready)
this.openDocumentService.refreshDocument(this.documentId)
},
error: (error) => {
this.toastService.showError(
$localize`Error uploading new version`,
error
)
},
})
}
download(original: boolean = false) {
this.downloading = true
const downloadUrl = this.documentsService.getDownloadUrl(
this.selectedVersionId || this.documentId,
this.documentId,
original
)
this.http

View File

@@ -1204,7 +1204,7 @@ describe('BulkEditorComponent', () => {
expect(tagListAllSpy).toHaveBeenCalled()
expect(tagSelectionModelToggleSpy).toHaveBeenCalledWith(newTag.id)
expect(component.tagSelectionModel.items).toMatchObject(
expect(component.tagSelectionModel.items).toEqual(
[{ id: null, name: 'Not assigned' }].concat(tags.results as any)
)
})

View File

@@ -37,7 +37,6 @@ import { StoragePathService } from 'src/app/services/rest/storage-path.service'
import { TagService } from 'src/app/services/rest/tag.service'
import { SettingsService } from 'src/app/services/settings.service'
import { ToastService } from 'src/app/services/toast.service'
import { flattenTags } from 'src/app/utils/flatten-tags'
import { MergeConfirmDialogComponent } from '../../common/confirm-dialog/merge-confirm-dialog/merge-confirm-dialog.component'
import { RotateConfirmDialogComponent } from '../../common/confirm-dialog/rotate-confirm-dialog/rotate-confirm-dialog.component'
import { CorrespondentEditDialogComponent } from '../../common/edit-dialog/correspondent-edit-dialog/correspondent-edit-dialog.component'
@@ -165,10 +164,7 @@ export class BulkEditorComponent
this.tagService
.listAll()
.pipe(first())
.subscribe(
(result) =>
(this.tagSelectionModel.items = flattenTags(result.results))
)
.subscribe((result) => (this.tagSelectionModel.items = result.results))
}
if (
this.permissionService.currentUserCan(
@@ -652,7 +648,7 @@ export class BulkEditorComponent
)
.pipe(takeUntil(this.unsubscribeNotifier))
.subscribe(({ newTag, tags }) => {
this.tagSelectionModel.items = flattenTags(tags.results)
this.tagSelectionModel.items = tags.results
this.tagSelectionModel.toggle(newTag.id)
})
}

View File

@@ -589,7 +589,7 @@ describe('FilterEditorComponent', () => {
expect(component.tagSelectionModel.logicalOperator).toEqual(
LogicalOperator.And
)
expect(component.tagSelectionModel.getSelectedItems()).toMatchObject(tags)
expect(component.tagSelectionModel.getSelectedItems()).toEqual(tags)
// coverage
component.filterRules = [
{
@@ -615,7 +615,7 @@ describe('FilterEditorComponent', () => {
expect(component.tagSelectionModel.logicalOperator).toEqual(
LogicalOperator.Or
)
expect(component.tagSelectionModel.getSelectedItems()).toMatchObject(tags)
expect(component.tagSelectionModel.getSelectedItems()).toEqual(tags)
// coverage
component.filterRules = [
{
@@ -652,7 +652,7 @@ describe('FilterEditorComponent', () => {
expect(component.tagSelectionModel.logicalOperator).toEqual(
LogicalOperator.And
)
expect(component.tagSelectionModel.getExcludedItems()).toMatchObject(tags)
expect(component.tagSelectionModel.getExcludedItems()).toEqual(tags)
// coverage
component.filterRules = [
{

View File

@@ -97,7 +97,6 @@ import {
CustomFieldQueryExpression,
} from 'src/app/utils/custom-field-query-element'
import { filterRulesDiffer } from 'src/app/utils/filter-rules'
import { flattenTags } from 'src/app/utils/flatten-tags'
import {
CustomFieldQueriesModel,
CustomFieldsQueryDropdownComponent,
@@ -1135,7 +1134,7 @@ export class FilterEditorComponent
) {
this.loadingCountTotal++
this.tagService.listAll().subscribe((result) => {
this.tagSelectionModel.items = flattenTags(result.results)
this.tagSelectionModel.items = result.results
this.maybeCompleteLoading()
})
}

View File

@@ -1,4 +1,4 @@
import { NgClass, NgTemplateOutlet, TitleCasePipe } from '@angular/common'
import { NgClass, TitleCasePipe } from '@angular/common'
import { Component, inject } from '@angular/core'
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
import {
@@ -30,7 +30,6 @@ import { ManagementListComponent } from '../management-list/management-list.comp
FormsModule,
ReactiveFormsModule,
NgClass,
NgTemplateOutlet,
NgbDropdownModule,
NgbPaginationModule,
NgxBootstrapIconsModule,

View File

@@ -1,4 +1,4 @@
import { NgClass, NgTemplateOutlet, TitleCasePipe } from '@angular/common'
import { NgClass, TitleCasePipe } from '@angular/common'
import { Component, inject } from '@angular/core'
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
import {
@@ -28,7 +28,6 @@ import { ManagementListComponent } from '../management-list/management-list.comp
FormsModule,
ReactiveFormsModule,
NgClass,
NgTemplateOutlet,
NgbDropdownModule,
NgbPaginationModule,
NgxBootstrapIconsModule,

View File

@@ -54,7 +54,61 @@
</tr>
}
@for (object of data; track object) {
<ng-container [ngTemplateOutlet]="objectRow" [ngTemplateOutletContext]="{ object: object, depth: 0 }"></ng-container>
<tr (click)="toggleSelected(object); $event.stopPropagation();" class="data-row fade" [class.show]="show">
<td>
<div class="form-check m-0 ms-2 me-n2">
<input type="checkbox" class="form-check-input" id="{{typeName}}{{object.id}}" [checked]="selectedObjects.has(object.id)" (click)="toggleSelected(object); $event.stopPropagation();">
<label class="form-check-label" for="{{typeName}}{{object.id}}"></label>
</div>
</td>
<td scope="row"><button class="btn btn-link ms-0 ps-0 text-start" (click)="userCanEdit(object) ? openEditDialog(object) : null; $event.stopPropagation()">{{ object.name }}</button> </td>
<td scope="row" class="d-none d-sm-table-cell">{{ getMatching(object) }}</td>
<td scope="row">{{ object.document_count }}</td>
@for (column of extraColumns; track column) {
<td scope="row" [ngClass]="{ 'd-none d-sm-table-cell' : column.hideOnMobile }">
@if (column.rendersHtml) {
<div [innerHtml]="column.valueFn.call(null, object) | safeHtml"></div>
} @else if (column.monospace) {
<span class="font-monospace">{{ column.valueFn.call(null, object) }}</span>
} @else {
{{ column.valueFn.call(null, object) }}
}
</td>
}
<td scope="row">
<div class="btn-toolbar gap-2">
<div class="btn-group d-block d-sm-none">
<div ngbDropdown container="body" class="d-inline-block">
<button type="button" class="btn btn-link" id="actionsMenuMobile" (click)="$event.stopPropagation()" ngbDropdownToggle>
<i-bs name="three-dots-vertical"></i-bs>
</button>
<div ngbDropdownMenu aria-labelledby="actionsMenuMobile">
<button (click)="openEditDialog(object)" *pngxIfPermissions="{ action: PermissionAction.Change, type: permissionType }" ngbDropdownItem i18n>Edit</button>
<button class="text-danger" (click)="openDeleteDialog(object)" *pngxIfPermissions="{ action: PermissionAction.Delete, type: permissionType }" ngbDropdownItem i18n>Delete</button>
@if (object.document_count > 0) {
<button (click)="filterDocuments(object)" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Document }" ngbDropdownItem i18n>Filter Documents ({{ object.document_count }})</button>
}
</div>
</div>
</div>
<div class="btn-group d-none d-sm-inline-block">
<button class="btn btn-sm btn-outline-secondary" (click)="openEditDialog(object); $event.stopPropagation();" *pngxIfPermissions="{ action: PermissionAction.Change, type: permissionType }" [disabled]="!userCanEdit(object)">
<i-bs width="1em" height="1em" name="pencil"></i-bs>&nbsp;<ng-container i18n>Edit</ng-container>
</button>
<button class="btn btn-sm btn-outline-danger" (click)="openDeleteDialog(object); $event.stopPropagation();" *pngxIfPermissions="{ action: PermissionAction.Delete, type: permissionType }" [disabled]="!userCanDelete(object)">
<i-bs width="1em" height="1em" name="trash"></i-bs>&nbsp;<ng-container i18n>Delete</ng-container>
</button>
</div>
@if (object.document_count > 0) {
<div class="btn-group d-none d-sm-inline-block">
<button class="btn btn-sm btn-outline-secondary" (click)="filterDocuments(object); $event.stopPropagation();" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Document }">
<i-bs width="1em" height="1em" name="filter"></i-bs>&nbsp;<ng-container i18n>Documents</ng-container><span class="badge bg-light text-secondary ms-2">{{ object.document_count }}</span>
</button>
</div>
}
</div>
</td>
</tr>
}
</tbody>
</table>
@@ -75,72 +129,3 @@
}
</div>
}
<ng-template #objectRow let-object="object" let-depth="depth">
<tr (click)="toggleSelected(object); $event.stopPropagation();" class="data-row fade" [class.show]="show">
<td>
<div class="form-check m-0 ms-2 me-n2">
<input type="checkbox" class="form-check-input" id="{{typeName}}{{object.id}}" [checked]="selectedObjects.has(object.id)" (click)="toggleSelected(object); $event.stopPropagation();">
<label class="form-check-label" for="{{typeName}}{{object.id}}"></label>
</div>
</td>
<td scope="row" class="name-cell" style="--depth: {{depth}}">
@if (depth > 0) {
<div class="indicator"></div>
}
<button class="btn btn-link ms-0 ps-0 text-start" (click)="userCanEdit(object) ? openEditDialog(object) : null; $event.stopPropagation()">{{ object.name }}</button>
</td>
<td scope="row" class="d-none d-sm-table-cell">{{ getMatching(object) }}</td>
<td scope="row">{{ getDocumentCount(object) }}</td>
@for (column of extraColumns; track column) {
<td scope="row" [ngClass]="{ 'd-none d-sm-table-cell' : column.hideOnMobile }">
@if (column.rendersHtml) {
<div [innerHtml]="column.valueFn.call(null, object) | safeHtml"></div>
} @else if (column.monospace) {
<span class="font-monospace">{{ column.valueFn.call(null, object) }}</span>
} @else {
{{ column.valueFn.call(null, object) }}
}
</td>
}
<td scope="row">
<div class="btn-toolbar gap-2">
<div class="btn-group d-block d-sm-none">
<div ngbDropdown container="body" class="d-inline-block">
<button type="button" class="btn btn-link" id="actionsMenuMobile" (click)="$event.stopPropagation()" ngbDropdownToggle>
<i-bs name="three-dots-vertical"></i-bs>
</button>
<div ngbDropdownMenu aria-labelledby="actionsMenuMobile">
<button (click)="openEditDialog(object)" *pngxIfPermissions="{ action: PermissionAction.Change, type: permissionType }" ngbDropdownItem i18n>Edit</button>
<button class="text-danger" (click)="openDeleteDialog(object)" *pngxIfPermissions="{ action: PermissionAction.Delete, type: permissionType }" ngbDropdownItem i18n>Delete</button>
@if (getDocumentCount(object) > 0) {
<button (click)="filterDocuments(object)" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Document }" ngbDropdownItem i18n>Filter Documents ({{ getDocumentCount(object) }})</button>
}
</div>
</div>
</div>
<div class="btn-group d-none d-sm-inline-block">
<button class="btn btn-sm btn-outline-secondary" (click)="openEditDialog(object); $event.stopPropagation();" *pngxIfPermissions="{ action: PermissionAction.Change, type: permissionType }" [disabled]="!userCanEdit(object)">
<i-bs width="1em" height="1em" name="pencil"></i-bs>&nbsp;<ng-container i18n>Edit</ng-container>
</button>
<button class="btn btn-sm btn-outline-danger" (click)="openDeleteDialog(object); $event.stopPropagation();" *pngxIfPermissions="{ action: PermissionAction.Delete, type: permissionType }" [disabled]="!userCanDelete(object)">
<i-bs width="1em" height="1em" name="trash"></i-bs>&nbsp;<ng-container i18n>Delete</ng-container>
</button>
</div>
@if (getDocumentCount(object) > 0) {
<div class="btn-group d-none d-sm-inline-block">
<button class="btn btn-sm btn-outline-secondary" (click)="filterDocuments(object); $event.stopPropagation();" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Document }">
<i-bs width="1em" height="1em" name="filter"></i-bs>&nbsp;<ng-container i18n>Documents</ng-container><span class="badge bg-light text-secondary ms-2">{{ getDocumentCount(object) }}</span>
</button>
</div>
}
</div>
</td>
</tr>
@if (object.children && object.children.length > 0) {
@for (child of object.children; track child) {
<ng-container [ngTemplateOutlet]="objectRow" [ngTemplateOutletContext]="{ object: child, depth: depth + 1 }"></ng-container>
}
}
</ng-template>

View File

@@ -10,17 +10,3 @@ tbody tr:last-child td {
.form-check {
min-height: 0;
}
td.name-cell {
padding-left: calc(calc(var(--depth) - 1) * 1.1rem);
.indicator {
display: inline-block;
width: .8rem;
height: .8rem;
border-left: 1px solid var(--bs-secondary);
border-bottom: 1px solid var(--bs-secondary);
margin-right: .25rem;
margin-left: .5rem;
}
}

View File

@@ -79,7 +79,6 @@ export abstract class ManagementListComponent<T extends MatchingModel>
@ViewChildren(SortableDirective) headers: QueryList<SortableDirective>
public data: T[] = []
private unfilteredData: T[] = []
public page = 1
@@ -133,18 +132,6 @@ export abstract class ManagementListComponent<T extends MatchingModel>
this.reloadData()
}
protected filterData(data: T[]): T[] {
return data
}
getDocumentCount(object: MatchingModel): number {
return (
object.document_count ??
this.unfilteredData.find((d) => d.id == object.id)?.document_count ??
0
)
}
reloadData(extraParams: { [key: string]: any } = null) {
this.loading = true
this.clearSelection()
@@ -161,8 +148,7 @@ export abstract class ManagementListComponent<T extends MatchingModel>
.pipe(
takeUntil(this.unsubscribeNotifier),
tap((c) => {
this.unfilteredData = c.results
this.data = this.filterData(c.results)
this.data = c.results
this.collectionSize = c.count
}),
delay(100)

View File

@@ -1,4 +1,4 @@
import { NgClass, NgTemplateOutlet, TitleCasePipe } from '@angular/common'
import { NgClass, TitleCasePipe } from '@angular/common'
import { Component, inject } from '@angular/core'
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
import {
@@ -30,7 +30,6 @@ import { ManagementListComponent } from '../management-list/management-list.comp
FormsModule,
ReactiveFormsModule,
NgClass,
NgTemplateOutlet,
NgbDropdownModule,
NgbPaginationModule,
NgxBootstrapIconsModule,

View File

@@ -1,4 +1,4 @@
import { NgClass, NgTemplateOutlet, TitleCasePipe } from '@angular/common'
import { NgClass, TitleCasePipe } from '@angular/common'
import { Component, inject } from '@angular/core'
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
import {
@@ -30,7 +30,6 @@ import { ManagementListComponent } from '../management-list/management-list.comp
FormsModule,
ReactiveFormsModule,
NgClass,
NgTemplateOutlet,
NgbDropdownModule,
NgbPaginationModule,
NgxBootstrapIconsModule,
@@ -60,8 +59,4 @@ export class TagListComponent extends ManagementListComponent<Tag> {
getDeleteMessage(object: Tag) {
return $localize`Do you really want to delete the tag "${object.name}"?`
}
filterData(data: Tag[]) {
return data.filter((tag) => !tag.parent)
}
}

View File

@@ -159,10 +159,6 @@ export interface Document extends ObjectWithPermissions {
page_count?: number
// Versioning
head_version?: number
versions?: number[]
// Frontend only
__changedFields?: string[]
}

View File

@@ -6,12 +6,4 @@ export interface Tag extends MatchingModel {
text_color?: string
is_inbox_tag?: boolean
parent?: number // Tag ID
children?: Tag[] // read-only
// UI-only: computed depth and order for hierarchical dropdowns
depth?: number
orderIndex?: number
}

View File

@@ -163,19 +163,12 @@ export class DocumentService extends AbstractPaperlessService<Document> {
})
}
getPreviewUrl(
id: number,
original: boolean = false,
versionID: number = null
): string {
getPreviewUrl(id: number, original: boolean = false): string {
let url = new URL(this.getResourceUrl(id, 'preview'))
if (this._searchQuery) url.hash = `#search="${this.searchQuery}"`
if (original) {
url.searchParams.append('original', 'true')
}
if (versionID) {
url.searchParams.append('version', versionID.toString())
}
return url.toString()
}
@@ -191,16 +184,6 @@ export class DocumentService extends AbstractPaperlessService<Document> {
return url
}
uploadVersion(documentId: number, file: File) {
const formData = new FormData()
formData.append('document', file, file.name)
return this.http.post(
this.getResourceUrl(documentId, 'update_version'),
formData,
{ reportProgress: true, observe: 'events' }
)
}
getNextAsn(): Observable<number> {
return this.http.get<number>(this.getResourceUrl(null, 'next_asn'))
}

View File

@@ -1,63 +0,0 @@
import type { Tag } from '../data/tag'
import { flattenTags } from './flatten-tags'
describe('flattenTags', () => {
it('returns empty array for empty input', () => {
expect(flattenTags([])).toEqual([])
})
it('orders roots and children by name (case-insensitive, numeric) and sets depth/orderIndex', () => {
const input: Tag[] = [
{ id: 11, name: 'A-root' },
{ id: 10, name: 'B-root' },
{ id: 101, name: 'Child 10', parent: 11 },
{ id: 102, name: 'child 2', parent: 11 },
{ id: 201, name: 'beta', parent: 10 },
{ id: 202, name: 'Alpha', parent: 10 },
{ id: 103, name: 'Sub 1', parent: 102 },
]
const flat = flattenTags(input)
const names = flat.map((t) => t.name)
expect(names).toEqual([
'A-root',
'child 2',
'Sub 1',
'Child 10',
'B-root',
'Alpha',
'beta',
])
expect(flat.map((t) => t.depth)).toEqual([0, 1, 2, 1, 0, 1, 1])
expect(flat.map((t) => t.orderIndex)).toEqual([0, 1, 2, 3, 4, 5, 6])
// Children are rebuilt
const aRoot = flat.find((t) => t.name === 'A-root')
expect(new Set(aRoot.children?.map((c) => c.name))).toEqual(
new Set(['child 2', 'Child 10'])
)
const bRoot = flat.find((t) => t.name === 'B-root')
expect(new Set(bRoot.children?.map((c) => c.name))).toEqual(
new Set(['Alpha', 'beta'])
)
const child2 = flat.find((t) => t.name === 'child 2')
expect(new Set(child2.children?.map((c) => c.name))).toEqual(
new Set(['Sub 1'])
)
})
it('excludes orphaned nodes (with missing parent)', () => {
const input: Tag[] = [
{ id: 1, name: 'Root' },
{ id: 2, name: 'Child', parent: 1 },
{ id: 3, name: 'Orphan', parent: 999 }, // missing parent
]
const flat = flattenTags(input)
expect(flat.map((t) => t.name)).toEqual(['Root', 'Child'])
})
})

View File

@@ -1,35 +0,0 @@
import { Tag } from '../data/tag'
export function flattenTags(all: Tag[]): Tag[] {
const map = new Map<number, Tag>(
all.map((t) => [t.id, { ...t, children: [] }])
)
// rebuild children
for (const t of map.values()) {
if (t.parent) {
const p = map.get(t.parent)
p?.children.push(t)
}
}
const roots = Array.from(map.values()).filter((t) => !t.parent)
const sortByName = (a: Tag, b: Tag) =>
a.name.localeCompare(b.name, undefined, {
sensitivity: 'base',
numeric: true,
})
const ordered: Tag[] = []
let idx = 0
const walk = (node: Tag, depth: number) => {
node.depth = depth
node.orderIndex = idx++
ordered.push(node)
if (node.children?.length) {
for (const child of [...node.children].sort(sortByName)) {
walk(child, depth + 1)
}
}
}
roots.sort(sortByName)
roots.forEach((r) => walk(r, 0))
return ordered
}

View File

@@ -55,7 +55,6 @@ import {
checkLg,
chevronDoubleLeft,
chevronDoubleRight,
chevronRight,
clipboard,
clipboardCheck,
clipboardCheckFill,
@@ -78,7 +77,6 @@ import {
fileEarmarkFill,
fileEarmarkLock,
fileEarmarkMinus,
fileEarmarkPlus,
fileEarmarkRichtext,
fileText,
files,
@@ -95,9 +93,7 @@ import {
house,
infoCircle,
journals,
layers,
link,
listNested,
listTask,
listUl,
microsoft,
@@ -269,7 +265,6 @@ const icons = {
checkLg,
chevronDoubleLeft,
chevronDoubleRight,
chevronRight,
clipboard,
clipboardCheck,
clipboardCheckFill,
@@ -292,7 +287,6 @@ const icons = {
fileEarmarkFill,
fileEarmarkLock,
fileEarmarkMinus,
fileEarmarkPlus,
fileEarmarkRichtext,
files,
fileText,
@@ -309,9 +303,7 @@ const icons = {
house,
infoCircle,
journals,
layers,
link,
listNested,
listTask,
listUl,
microsoft,

View File

@@ -1,7 +1,6 @@
from django.conf import settings
from django.contrib import admin
from guardian.admin import GuardedModelAdmin
from treenode.admin import TreeNodeModelAdmin
from documents.models import Correspondent
from documents.models import CustomField
@@ -15,7 +14,6 @@ from documents.models import SavedViewFilterRule
from documents.models import ShareLink
from documents.models import StoragePath
from documents.models import Tag
from documents.tasks import update_document_parent_tags
if settings.AUDIT_LOG_ENABLED:
from auditlog.admin import LogEntryAdmin
@@ -28,25 +26,12 @@ class CorrespondentAdmin(GuardedModelAdmin):
list_editable = ("match", "matching_algorithm")
class TagAdmin(GuardedModelAdmin, TreeNodeModelAdmin):
class TagAdmin(GuardedModelAdmin):
list_display = ("name", "color", "match", "matching_algorithm")
list_filter = ("matching_algorithm",)
list_editable = ("color", "match", "matching_algorithm")
search_fields = ("color", "name")
def save_model(self, request, obj, form, change):
old_parent = None
if change and obj.pk:
tag = Tag.objects.get(pk=obj.pk)
old_parent = tag.get_parent() if tag else None
super().save_model(request, obj, form, change)
# sync parent tags on documents if changed
new_parent = obj.get_parent()
if new_parent and old_parent != new_parent:
update_document_parent_tags(obj, new_parent)
class DocumentTypeAdmin(GuardedModelAdmin):
list_display = ("name", "match", "matching_algorithm")

View File

@@ -1,5 +1,7 @@
from __future__ import annotations
import hashlib
import itertools
import logging
import tempfile
from pathlib import Path
@@ -11,7 +13,6 @@ from celery import chord
from celery import group
from celery import shared_task
from django.conf import settings
from django.db import transaction
from django.db.models import Q
from django.utils import timezone
@@ -24,7 +25,6 @@ from documents.models import CustomFieldInstance
from documents.models import Document
from documents.models import DocumentType
from documents.models import StoragePath
from documents.models import Tag
from documents.permissions import set_permissions_for_object
from documents.plugins.helpers import DocumentsStatusManager
from documents.tasks import bulk_update_documents
@@ -96,45 +96,31 @@ def set_document_type(doc_ids: list[int], document_type: DocumentType) -> Litera
def add_tag(doc_ids: list[int], tag: int) -> Literal["OK"]:
tag_obj = Tag.objects.get(pk=tag)
tags_to_add = [tag_obj, *tag_obj.get_ancestors()]
qs = Document.objects.filter(Q(id__in=doc_ids) & ~Q(tags__id=tag)).only("pk")
affected_docs = list(qs.values_list("pk", flat=True))
DocumentTagRelationship = Document.tags.through
to_create = []
affected_docs: set[int] = set()
for t in tags_to_add:
qs = Document.objects.filter(Q(id__in=doc_ids) & ~Q(tags__id=t.id)).only("pk")
doc_ids_missing_tag = list(qs.values_list("pk", flat=True))
affected_docs.update(doc_ids_missing_tag)
to_create.extend(
DocumentTagRelationship(document_id=doc, tag_id=t.id)
for doc in doc_ids_missing_tag
)
DocumentTagRelationship.objects.bulk_create(
[DocumentTagRelationship(document_id=doc, tag_id=tag) for doc in affected_docs],
)
if to_create:
DocumentTagRelationship.objects.bulk_create(to_create)
if affected_docs:
bulk_update_documents.delay(document_ids=list(affected_docs))
bulk_update_documents.delay(document_ids=affected_docs)
return "OK"
def remove_tag(doc_ids: list[int], tag: int) -> Literal["OK"]:
tag_obj = Tag.objects.get(pk=tag)
tag_ids = [tag_obj.id, *tag_obj.get_descendants_pks()]
qs = Document.objects.filter(Q(id__in=doc_ids) & Q(tags__id=tag)).only("pk")
affected_docs = list(qs.values_list("pk", flat=True))
DocumentTagRelationship = Document.tags.through
qs = DocumentTagRelationship.objects.filter(
document_id__in=doc_ids,
tag_id__in=tag_ids,
)
affected_docs = list(qs.values_list("document_id", flat=True).distinct())
qs.delete()
if affected_docs:
bulk_update_documents.delay(document_ids=affected_docs)
DocumentTagRelationship.objects.filter(
Q(document_id__in=affected_docs) & Q(tag_id=tag),
).delete()
bulk_update_documents.delay(document_ids=affected_docs)
return "OK"
@@ -146,57 +132,23 @@ def modify_tags(
) -> Literal["OK"]:
qs = Document.objects.filter(id__in=doc_ids).only("pk")
affected_docs = list(qs.values_list("pk", flat=True))
DocumentTagRelationship = Document.tags.through
# add with all ancestors
expanded_add_tags: set[int] = set()
add_tag_objects = Tag.objects.filter(pk__in=add_tags)
for t in add_tag_objects:
expanded_add_tags.add(int(t.id))
expanded_add_tags.update(int(pk) for pk in t.get_ancestors_pks())
DocumentTagRelationship.objects.filter(
document_id__in=affected_docs,
tag_id__in=remove_tags,
).delete()
# remove with all descendants
expanded_remove_tags: set[int] = set()
remove_tag_objects = Tag.objects.filter(pk__in=remove_tags)
for t in remove_tag_objects:
expanded_remove_tags.add(int(t.id))
expanded_remove_tags.update(int(pk) for pk in t.get_descendants_pks())
DocumentTagRelationship.objects.bulk_create(
[
DocumentTagRelationship(document_id=doc, tag_id=tag)
for (doc, tag) in itertools.product(affected_docs, add_tags)
],
ignore_conflicts=True,
)
try:
with transaction.atomic():
if expanded_remove_tags:
DocumentTagRelationship.objects.filter(
document_id__in=affected_docs,
tag_id__in=expanded_remove_tags,
).delete()
to_create = []
if expanded_add_tags:
existing_pairs = set(
DocumentTagRelationship.objects.filter(
document_id__in=affected_docs,
tag_id__in=expanded_add_tags,
).values_list("document_id", "tag_id"),
)
to_create = [
DocumentTagRelationship(document_id=doc, tag_id=tag)
for doc in affected_docs
for tag in expanded_add_tags
if (doc, tag) not in existing_pairs
]
if to_create:
DocumentTagRelationship.objects.bulk_create(
to_create,
ignore_conflicts=True,
)
if affected_docs:
bulk_update_documents.delay(document_ids=affected_docs)
except Exception as e:
logger.error(f"Error modifying tags: {e}")
return "ERROR"
bulk_update_documents.delay(document_ids=affected_docs)
return "OK"
@@ -332,8 +284,10 @@ def rotate(doc_ids: list[int], degrees: int) -> Literal["OK"]:
f"Attempting to rotate {len(doc_ids)} documents by {degrees} degrees.",
)
qs = Document.objects.filter(id__in=doc_ids)
affected_docs: list[int] = []
import pikepdf
rotate_tasks = []
for doc in qs:
if doc.mime_type != "application/pdf":
logger.warning(
@@ -341,34 +295,28 @@ def rotate(doc_ids: list[int], degrees: int) -> Literal["OK"]:
)
continue
try:
# Write rotated output to a temp file and create a new version via consume pipeline
filepath: Path = (
Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR))
/ f"{doc.id}_rotated.pdf"
)
with pikepdf.open(doc.source_path) as pdf:
with pikepdf.open(doc.source_path, allow_overwriting_input=True) as pdf:
for page in pdf.pages:
page.rotate(degrees, relative=True)
pdf.remove_unreferenced_resources()
pdf.save(filepath)
# Preserve metadata/permissions via overrides; mark as new version
overrides = DocumentMetadataOverrides().from_document(doc)
consume_file.delay(
ConsumableDocument(
source=DocumentSource.ConsumeFolder,
original_file=filepath,
head_version_id=doc.id,
),
overrides,
)
logger.info(
f"Queued new rotated version for document {doc.id} by {degrees} degrees",
)
pdf.save()
doc.checksum = hashlib.md5(doc.source_path.read_bytes()).hexdigest()
doc.save()
rotate_tasks.append(
update_document_content_maybe_archive_file.s(
document_id=doc.id,
),
)
logger.info(
f"Rotated document {doc.id} by {degrees} degrees",
)
affected_docs.append(doc.id)
except Exception as e:
logger.exception(f"Error rotating document {doc.id}: {e}")
if len(affected_docs) > 0:
bulk_update_task = bulk_update_documents.si(document_ids=affected_docs)
chord(header=rotate_tasks, body=bulk_update_task).delay()
return "OK"
@@ -531,31 +479,19 @@ def delete_pages(doc_ids: list[int], pages: list[int]) -> Literal["OK"]:
import pikepdf
try:
# Produce edited PDF to a temp file and create a new version
filepath: Path = (
Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR))
/ f"{doc.id}_pages_deleted.pdf"
)
with pikepdf.open(doc.source_path) as pdf:
with pikepdf.open(doc.source_path, allow_overwriting_input=True) as pdf:
offset = 1 # pages are 1-indexed
for page_num in pages:
pdf.pages.remove(pdf.pages[page_num - offset])
offset += 1 # remove() changes the index of the pages
pdf.remove_unreferenced_resources()
pdf.save(filepath)
overrides = DocumentMetadataOverrides().from_document(doc)
consume_file.delay(
ConsumableDocument(
source=DocumentSource.ConsumeFolder,
original_file=filepath,
head_version_id=doc.id,
),
overrides,
)
logger.info(
f"Queued new version for document {doc.id} after deleting pages {pages}",
)
pdf.save()
doc.checksum = hashlib.md5(doc.source_path.read_bytes()).hexdigest()
if doc.page_count is not None:
doc.page_count = doc.page_count - len(pages)
doc.save()
update_document_content_maybe_archive_file.delay(document_id=doc.id)
logger.info(f"Deleted pages {pages} from document {doc.id}")
except Exception as e:
logger.exception(f"Error deleting pages from document {doc.id}: {e}")
@@ -607,29 +543,17 @@ def edit_pdf(
dst.pages[-1].rotate(op["rotate"], relative=True)
if update_document:
# Create a new version from the edited PDF rather than replacing in-place
temp_path = doc.source_path.with_suffix(".tmp.pdf")
pdf = pdf_docs[0]
pdf.remove_unreferenced_resources()
filepath: Path = (
Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR))
/ f"{doc.id}_edited.pdf"
)
pdf.save(filepath)
overrides = (
DocumentMetadataOverrides().from_document(doc)
if include_metadata
else DocumentMetadataOverrides()
)
if user is not None:
overrides.owner_id = user.id
consume_file.delay(
ConsumableDocument(
source=DocumentSource.ConsumeFolder,
original_file=filepath,
head_version_id=doc.id,
),
overrides,
)
# save the edited PDF to a temporary file in case of errors
pdf.save(temp_path)
# replace the original document with the edited one
temp_path.replace(doc.source_path)
doc.checksum = hashlib.md5(doc.source_path.read_bytes()).hexdigest()
doc.page_count = len(pdf.pages)
doc.save()
update_document_content_maybe_archive_file.delay(document_id=doc.id)
else:
consume_tasks = []
overrides = (

View File

@@ -14,51 +14,6 @@ from documents.classifier import DocumentClassifier
from documents.models import Document
def _resolve_effective_doc(pk: int, request) -> Document | None:
"""
Resolve which Document row should be considered for caching keys:
- If a version is requested, use that version
- If pk is a head doc, use its newest child version if present, else the head.
- Else, pk is a version, use that version.
Returns None if resolution fails (treat as no-cache).
"""
try:
request_doc = Document.objects.only("id", "head_version_id").get(pk=pk)
except Document.DoesNotExist:
return None
head_doc = (
request_doc
if request_doc.head_version_id is None
else Document.objects.only("id").get(id=request_doc.head_version_id)
)
version_param = (
request.query_params.get("version")
if hasattr(request, "query_params")
else None
)
if version_param:
try:
version_id = int(version_param)
candidate = Document.objects.only("id", "head_version_id").get(
id=version_id,
)
if candidate.id != head_doc.id and candidate.head_version_id != head_doc.id:
return None
return candidate
except Exception:
return None
# Default behavior: if pk is a head doc, prefer its newest child version
if request_doc.head_version_id is None:
latest = head_doc.versions.only("id").order_by("id").last()
return latest or head_doc
# pk is already a version
return request_doc
def suggestions_etag(request, pk: int) -> str | None:
"""
Returns an optional string for the ETag, allowing browser caching of
@@ -116,10 +71,11 @@ def metadata_etag(request, pk: int) -> str | None:
Metadata is extracted from the original file, so use its checksum as the
ETag
"""
doc = _resolve_effective_doc(pk, request)
if doc is None:
try:
doc = Document.objects.only("checksum").get(pk=pk)
return doc.checksum
except Document.DoesNotExist: # pragma: no cover
return None
return doc.checksum
return None
@@ -129,10 +85,11 @@ def metadata_last_modified(request, pk: int) -> datetime | None:
not the modification of the original file, but of the database object, but might as well
error on the side of more cautious
"""
doc = _resolve_effective_doc(pk, request)
if doc is None:
try:
doc = Document.objects.only("modified").get(pk=pk)
return doc.modified
except Document.DoesNotExist: # pragma: no cover
return None
return doc.modified
return None
@@ -140,15 +97,15 @@ def preview_etag(request, pk: int) -> str | None:
"""
ETag for the document preview, using the original or archive checksum, depending on the request
"""
doc = _resolve_effective_doc(pk, request)
if doc is None:
try:
doc = Document.objects.only("checksum", "archive_checksum").get(pk=pk)
use_original = (
"original" in request.query_params
and request.query_params["original"] == "true"
)
return doc.checksum if use_original else doc.archive_checksum
except Document.DoesNotExist: # pragma: no cover
return None
use_original = (
hasattr(request, "query_params")
and "original" in request.query_params
and request.query_params["original"] == "true"
)
return doc.checksum if use_original else doc.archive_checksum
return None
@@ -157,10 +114,11 @@ def preview_last_modified(request, pk: int) -> datetime | None:
Uses the documents modified time to set the Last-Modified header. Not strictly
speaking correct, but close enough and quick
"""
doc = _resolve_effective_doc(pk, request)
if doc is None:
try:
doc = Document.objects.only("modified").get(pk=pk)
return doc.modified
except Document.DoesNotExist: # pragma: no cover
return None
return doc.modified
return None
@@ -170,13 +128,10 @@ def thumbnail_last_modified(request, pk: int) -> datetime | None:
Cache should be (slightly?) faster than filesystem
"""
try:
doc = _resolve_effective_doc(pk, request)
if doc is None:
return None
doc = Document.objects.only("storage_type").get(pk=pk)
if not doc.thumbnail_path.exists():
return None
# Use the effective document id for cache key
doc_key = get_thumbnail_modified_key(doc.id)
doc_key = get_thumbnail_modified_key(pk)
cache_hit = cache.get(doc_key)
if cache_hit is not None:

View File

@@ -113,12 +113,6 @@ class ConsumerPluginMixin:
self.filename = self.metadata.filename or self.input_doc.original_file.name
if input_doc.head_version_id:
self.log.debug(f"Document head version id: {input_doc.head_version_id}")
head_version = Document.objects.get(pk=input_doc.head_version_id)
version_index = head_version.versions.count()
self.filename += f"_v{version_index}"
def _send_progress(
self,
current_progress: int,
@@ -476,44 +470,12 @@ class ConsumerPlugin(
try:
with transaction.atomic():
# store the document.
if self.input_doc.head_version_id:
# If this is a new version of an existing document, we need
# to make sure we're not creating a new document, but updating
# the existing one.
original_document = Document.objects.get(
pk=self.input_doc.head_version_id,
)
self.log.debug("Saving record for updated version to database")
original_document.pk = None
original_document.head_version = Document.objects.get(
pk=self.input_doc.head_version_id,
)
file_for_checksum = (
self.unmodified_original
if self.unmodified_original is not None
else self.working_copy
)
original_document.checksum = hashlib.md5(
file_for_checksum.read_bytes(),
).hexdigest()
original_document.content = text
original_document.page_count = page_count
original_document.mime_type = mime_type
original_document.original_filename = self.filename
# Clear unique file path fields so they can be generated uniquely later
original_document.filename = None
original_document.archive_filename = None
original_document.archive_checksum = None
original_document.modified = timezone.now()
original_document.save()
document = original_document
else:
document = self._store(
text=text,
date=date,
page_count=page_count,
mime_type=mime_type,
)
document = self._store(
text=text,
date=date,
page_count=page_count,
mime_type=mime_type,
)
# If we get here, it was successful. Proceed with post-consume
# hooks. If they fail, nothing will get changed.
@@ -727,7 +689,7 @@ class ConsumerPlugin(
if self.metadata.tag_ids:
for tag_id in self.metadata.tag_ids:
document.add_nested_tags([Tag.objects.get(pk=tag_id)])
document.tags.add(Tag.objects.get(pk=tag_id))
if self.metadata.storage_path_id:
document.storage_path = StoragePath.objects.get(

View File

@@ -156,7 +156,6 @@ class ConsumableDocument:
source: DocumentSource
original_file: Path
head_version_id: int | None = None
mailrule_id: int | None = None
mime_type: str = dataclasses.field(init=False, default=None)

View File

@@ -17,7 +17,7 @@ def move_sender_strings_to_sender_model(apps, schema_editor):
if document.sender:
(
DOCUMENT_SENDER_MAP[document.pk],
_,
created,
) = sender_model.objects.get_or_create(
name=document.sender,
defaults={"slug": slugify(document.sender)},

View File

@@ -1,159 +0,0 @@
# Generated by Django 5.2.6 on 2025-09-12 18:42
import django.core.validators
import django.db.models.deletion
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "1070_customfieldinstance_value_long_text_and_more"),
]
operations = [
migrations.AddField(
model_name="tag",
name="tn_ancestors_count",
field=models.PositiveIntegerField(
default=0,
editable=False,
verbose_name="Ancestors count",
),
),
migrations.AddField(
model_name="tag",
name="tn_ancestors_pks",
field=models.TextField(
blank=True,
default="",
editable=False,
verbose_name="Ancestors pks",
),
),
migrations.AddField(
model_name="tag",
name="tn_children_count",
field=models.PositiveIntegerField(
default=0,
editable=False,
verbose_name="Children count",
),
),
migrations.AddField(
model_name="tag",
name="tn_children_pks",
field=models.TextField(
blank=True,
default="",
editable=False,
verbose_name="Children pks",
),
),
migrations.AddField(
model_name="tag",
name="tn_depth",
field=models.PositiveIntegerField(
default=0,
editable=False,
validators=[
django.core.validators.MinValueValidator(0),
django.core.validators.MaxValueValidator(10),
],
verbose_name="Depth",
),
),
migrations.AddField(
model_name="tag",
name="tn_descendants_count",
field=models.PositiveIntegerField(
default=0,
editable=False,
verbose_name="Descendants count",
),
),
migrations.AddField(
model_name="tag",
name="tn_descendants_pks",
field=models.TextField(
blank=True,
default="",
editable=False,
verbose_name="Descendants pks",
),
),
migrations.AddField(
model_name="tag",
name="tn_index",
field=models.PositiveIntegerField(
default=0,
editable=False,
verbose_name="Index",
),
),
migrations.AddField(
model_name="tag",
name="tn_level",
field=models.PositiveIntegerField(
default=1,
editable=False,
validators=[
django.core.validators.MinValueValidator(1),
django.core.validators.MaxValueValidator(10),
],
verbose_name="Level",
),
),
migrations.AddField(
model_name="tag",
name="tn_order",
field=models.PositiveIntegerField(
default=0,
editable=False,
verbose_name="Order",
),
),
migrations.AddField(
model_name="tag",
name="tn_parent",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="tn_children",
to="documents.tag",
verbose_name="Parent",
),
),
migrations.AddField(
model_name="tag",
name="tn_priority",
field=models.PositiveIntegerField(
default=0,
validators=[
django.core.validators.MinValueValidator(0),
django.core.validators.MaxValueValidator(9999999999),
],
verbose_name="Priority",
),
),
migrations.AddField(
model_name="tag",
name="tn_siblings_count",
field=models.PositiveIntegerField(
default=0,
editable=False,
verbose_name="Siblings count",
),
),
migrations.AddField(
model_name="tag",
name="tn_siblings_pks",
field=models.TextField(
blank=True,
default="",
editable=False,
verbose_name="Siblings pks",
),
),
]

View File

@@ -1,26 +0,0 @@
# Generated by Django 5.1.6 on 2025-02-26 17:08
import django.db.models.deletion
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "1071_tag_tn_ancestors_count_tag_tn_ancestors_pks_and_more"),
]
operations = [
migrations.AddField(
model_name="document",
name="head_version",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="versions",
to="documents.document",
verbose_name="head version of document",
),
),
]

View File

@@ -7,14 +7,12 @@ from celery import states
from django.conf import settings
from django.contrib.auth.models import Group
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.core.validators import MaxValueValidator
from django.core.validators import MinValueValidator
from django.db import models
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from multiselectfield import MultiSelectField
from treenode.models import TreeNodeModel
if settings.AUDIT_LOG_ENABLED:
from auditlog.registry import auditlog
@@ -98,10 +96,8 @@ class Correspondent(MatchingModel):
verbose_name_plural = _("correspondents")
class Tag(MatchingModel, TreeNodeModel):
class Tag(MatchingModel):
color = models.CharField(_("color"), max_length=7, default="#a6cee3")
# Maximum allowed nesting depth for tags (root = 1, max depth = 5)
MAX_NESTING_DEPTH: Final[int] = 5
is_inbox_tag = models.BooleanField(
_("is inbox tag"),
@@ -112,30 +108,10 @@ class Tag(MatchingModel, TreeNodeModel):
),
)
class Meta(MatchingModel.Meta, TreeNodeModel.Meta):
class Meta(MatchingModel.Meta):
verbose_name = _("tag")
verbose_name_plural = _("tags")
def clean(self):
# Prevent self-parenting and assigning a descendant as parent
parent = self.get_parent()
if parent == self:
raise ValidationError({"parent": _("Cannot set itself as parent.")})
if parent and self.pk is not None and self.is_ancestor_of(parent):
raise ValidationError({"parent": _("Cannot set parent to a descendant.")})
# Enforce maximum nesting depth
new_parent_depth = 0
if parent:
new_parent_depth = parent.get_ancestors_count() + 1
height = 0 if self.pk is None else self.get_depth()
deepest_new_depth = (new_parent_depth + 1) + height
if deepest_new_depth > self.MAX_NESTING_DEPTH:
raise ValidationError(_("Maximum nesting depth exceeded."))
return super().clean()
class DocumentType(MatchingModel):
class Meta(MatchingModel.Meta):
@@ -313,15 +289,6 @@ class Document(SoftDeleteModel, ModelWithOwner):
),
)
head_version = models.ForeignKey(
"self",
blank=True,
null=True,
related_name="versions",
on_delete=models.CASCADE,
verbose_name=_("head version of document"),
)
class Meta:
ordering = ("-created",)
verbose_name = _("document")
@@ -431,15 +398,6 @@ class Document(SoftDeleteModel, ModelWithOwner):
def created_date(self):
return self.created
def add_nested_tags(self, tags):
tag_ids = set()
for tag in tags:
tag_ids.add(tag.id)
tag_ids.update(tag.get_ancestors_pks())
tags_to_add = self.tags.model.objects.filter(id__in=tag_ids)
self.tags.add(*tags_to_add)
class SavedView(ModelWithOwner):
class DisplayMode(models.TextChoices):

View File

@@ -13,7 +13,6 @@ from django.conf import settings
from django.contrib.auth.models import Group
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ValidationError
from django.core.validators import DecimalValidator
from django.core.validators import MaxLengthValidator
from django.core.validators import RegexValidator
@@ -541,32 +540,6 @@ class TagSerializer(MatchingModelSerializer, OwnedObjectSerializer):
text_color = serializers.SerializerMethodField()
# map to treenode's tn_parent
parent = serializers.PrimaryKeyRelatedField(
queryset=Tag.objects.all(),
allow_null=True,
required=False,
source="tn_parent",
)
@extend_schema_field(
field=serializers.ListSerializer(
child=serializers.PrimaryKeyRelatedField(
queryset=Tag.objects.all(),
),
),
)
def get_children(self, obj):
serializer = TagSerializer(
obj.get_children(),
many=True,
context=self.context,
)
return serializer.data
# children as nested Tag objects
children = serializers.SerializerMethodField()
class Meta:
model = Tag
fields = (
@@ -584,8 +557,6 @@ class TagSerializer(MatchingModelSerializer, OwnedObjectSerializer):
"permissions",
"user_can_change",
"set_permissions",
"parent",
"children",
)
def validate_color(self, color):
@@ -594,36 +565,6 @@ class TagSerializer(MatchingModelSerializer, OwnedObjectSerializer):
raise serializers.ValidationError(_("Invalid color."))
return color
def validate(self, attrs):
# Validate when changing parent
parent = attrs.get(
"tn_parent",
self.instance.get_parent() if self.instance else None,
)
if self.instance:
# Temporarily set parent on the instance if updating and use model clean()
original_parent = self.instance.get_parent()
try:
# Temporarily set tn_parent in-memory to validate clean()
self.instance.tn_parent = parent
self.instance.clean()
except ValidationError as e:
logger.debug("Tag parent validation failed: %s", e)
raise e
finally:
self.instance.tn_parent = original_parent
else:
# For new instances, create a transient Tag and validate
temp = Tag(tn_parent=parent)
try:
temp.clean()
except ValidationError as e:
logger.debug("Tag parent validation failed: %s", e)
raise serializers.ValidationError({"parent": _("Invalid parent tag.")})
return super().validate(attrs)
class CorrespondentField(serializers.PrimaryKeyRelatedField):
def get_queryset(self):
@@ -974,8 +915,6 @@ class DocumentSerializer(
page_count = SerializerMethodField()
notes = NotesSerializer(many=True, required=False, read_only=True)
head_version = serializers.PrimaryKeyRelatedField(read_only=True)
versions = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
custom_fields = CustomFieldInstanceSerializer(
many=True,
@@ -1018,10 +957,6 @@ class DocumentSerializer(
request.version if request else settings.REST_FRAMEWORK["DEFAULT_VERSION"],
)
if doc.get("versions") is not None:
doc["versions"] = sorted(doc["versions"], reverse=True)
doc["versions"].append(doc["id"])
if api_version < 9:
# provide created as a datetime for backwards compatibility
from django.utils import timezone
@@ -1093,28 +1028,6 @@ class DocumentSerializer(
custom_field_instance.field,
doc_id,
)
if "tags" in validated_data:
# Respect tag hierarchy on updates:
# - Adding a child adds its ancestors
# - Removing a parent removes all its descendants
prev_tags = set(instance.tags.all())
requested_tags = set(validated_data["tags"])
# Tags being removed in this update and all descendants
removed_tags = prev_tags - requested_tags
blocked_tags = set(removed_tags)
for t in removed_tags:
blocked_tags.update(t.get_descendants())
# Add all parent tags
final_tags = set(requested_tags)
for t in requested_tags:
final_tags.update(t.get_ancestors())
# Drop removed parents and their descendants
final_tags.difference_update(blocked_tags)
validated_data["tags"] = list(final_tags)
if validated_data.get("remove_inbox_tags"):
tag_ids_being_added = (
[
@@ -1190,8 +1103,6 @@ class DocumentSerializer(
"remove_inbox_tags",
"page_count",
"mime_type",
"head_version",
"versions",
)
list_serializer_class = OwnedObjectListSerializer
@@ -1875,15 +1786,6 @@ class PostDocumentSerializer(serializers.Serializer):
return created.date()
class DocumentVersionSerializer(serializers.Serializer):
document = serializers.FileField(
label="Document",
write_only=True,
)
validate_document = PostDocumentSerializer().validate_document
class BulkDownloadSerializer(DocumentListSerializer):
content = serializers.ChoiceField(
choices=["archive", "originals", "both"],

View File

@@ -71,7 +71,7 @@ def add_inbox_tags(sender, document: Document, logging_group=None, **kwargs):
else:
tags = Tag.objects.all()
inbox_tags = tags.filter(is_inbox_tag=True)
document.add_nested_tags(inbox_tags)
document.tags.add(*inbox_tags)
def _suggestion_printer(
@@ -260,7 +260,7 @@ def set_tags(
extra={"group": logging_group},
)
document.add_nested_tags(relevant_tags)
document.tags.add(*relevant_tags)
def set_storage_path(
@@ -767,17 +767,14 @@ def run_workflows(
def assignment_action():
if action.assign_tags.exists():
tag_ids_to_add: set[int] = set()
for tag in action.assign_tags.all():
tag_ids_to_add.add(tag.pk)
tag_ids_to_add.update(int(pk) for pk in tag.get_ancestors_pks())
if not use_overrides:
doc_tag_ids[:] = list(set(doc_tag_ids) | tag_ids_to_add)
doc_tag_ids.extend(action.assign_tags.values_list("pk", flat=True))
else:
if overrides.tag_ids is None:
overrides.tag_ids = []
overrides.tag_ids = list(set(overrides.tag_ids) | tag_ids_to_add)
overrides.tag_ids.extend(
action.assign_tags.values_list("pk", flat=True),
)
if action.assign_correspondent:
if not use_overrides:
@@ -920,17 +917,14 @@ def run_workflows(
else:
overrides.tag_ids = None
else:
tag_ids_to_remove: set[int] = set()
for tag in action.remove_tags.all():
tag_ids_to_remove.add(tag.pk)
tag_ids_to_remove.update(int(pk) for pk in tag.get_descendants_pks())
if not use_overrides:
doc_tag_ids[:] = [t for t in doc_tag_ids if t not in tag_ids_to_remove]
for tag in action.remove_tags.filter(
pk__in=document.tags.values_list("pk", flat=True),
):
doc_tag_ids.remove(tag.pk)
elif overrides.tag_ids:
overrides.tag_ids = [
t for t in overrides.tag_ids if t not in tag_ids_to_remove
]
for tag in action.remove_tags.filter(pk__in=overrides.tag_ids):
overrides.tag_ids.remove(tag.pk)
if not use_overrides and (
action.remove_all_correspondents

View File

@@ -145,17 +145,13 @@ def consume_file(
if overrides is None:
overrides = DocumentMetadataOverrides()
plugins: list[type[ConsumeTaskPlugin]] = (
[ConsumerPreflightPlugin, ConsumerPlugin]
if input_doc.head_version_id is not None
else [
ConsumerPreflightPlugin,
CollatePlugin,
BarcodePlugin,
WorkflowTriggerPlugin,
ConsumerPlugin,
]
)
plugins: list[type[ConsumeTaskPlugin]] = [
ConsumerPreflightPlugin,
CollatePlugin,
BarcodePlugin,
WorkflowTriggerPlugin,
ConsumerPlugin,
]
with (
ProgressManager(
@@ -519,51 +515,3 @@ def check_scheduled_workflows():
workflow_to_run=workflow,
document=document,
)
def update_document_parent_tags(tag: Tag, new_parent: Tag) -> None:
"""
When a tag's parent changes, ensure all documents containing the tag also have
the parent tag (and its ancestors) applied.
"""
doc_tag_relationship = Document.tags.through
doc_ids: list[int] = list(
Document.objects.filter(tags=tag).values_list("pk", flat=True),
)
if not doc_ids:
return
parent_ids = [new_parent.id, *new_parent.get_ancestors_pks()]
parent_ids = list(dict.fromkeys(parent_ids))
existing_pairs = set(
doc_tag_relationship.objects.filter(
document_id__in=doc_ids,
tag_id__in=parent_ids,
).values_list("document_id", "tag_id"),
)
to_create: list = []
affected: set[int] = set()
for doc_id in doc_ids:
for parent_id in parent_ids:
if (doc_id, parent_id) in existing_pairs:
continue
to_create.append(
doc_tag_relationship(document_id=doc_id, tag_id=parent_id),
)
affected.add(doc_id)
if to_create:
doc_tag_relationship.objects.bulk_create(
to_create,
ignore_conflicts=True,
)
if affected:
bulk_update_documents.delay(document_ids=list(affected))

View File

@@ -1,5 +1,4 @@
import types
from unittest.mock import patch
from django.contrib.admin.sites import AdminSite
from django.contrib.auth.models import User
@@ -8,9 +7,7 @@ from django.utils import timezone
from documents import index
from documents.admin import DocumentAdmin
from documents.admin import TagAdmin
from documents.models import Document
from documents.models import Tag
from documents.tests.utils import DirectoriesMixin
from paperless.admin import PaperlessUserAdmin
@@ -73,24 +70,6 @@ class TestDocumentAdmin(DirectoriesMixin, TestCase):
self.assertEqual(self.doc_admin.created_(doc), "2020-04-12")
class TestTagAdmin(DirectoriesMixin, TestCase):
def setUp(self) -> None:
super().setUp()
self.tag_admin = TagAdmin(model=Tag, admin_site=AdminSite())
@patch("documents.tasks.bulk_update_documents")
def test_parent_tags_get_added(self, mock_bulk_update):
document = Document.objects.create(title="test")
parent = Tag.objects.create(name="parent")
child = Tag.objects.create(name="child")
document.tags.add(child)
child.tn_parent = parent
self.tag_admin.save_model(None, child, None, change=True)
document.refresh_from_db()
self.assertIn(parent, document.tags.all())
class TestPaperlessAdmin(DirectoriesMixin, TestCase):
def setUp(self) -> None:
super().setUp()

View File

@@ -839,7 +839,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
self.assertEqual(response.status_code, status.HTTP_200_OK)
m.assert_called()
_, kwargs = m.call_args
args, kwargs = m.call_args
self.assertEqual(kwargs["merge"], False)
response = self.client.post(
@@ -857,7 +857,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
self.assertEqual(response.status_code, status.HTTP_200_OK)
m.assert_called()
_, kwargs = m.call_args
args, kwargs = m.call_args
self.assertEqual(kwargs["merge"], True)
@mock.patch("documents.serialisers.bulk_edit.set_storage_path")

View File

@@ -1529,7 +1529,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
input_doc, overrides = self.get_last_consume_delay_call_args()
new_overrides, _ = run_workflows(
new_overrides, msg = run_workflows(
trigger_type=WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
document=input_doc,
logging_group=None,
@@ -1638,7 +1638,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
self.consume_file_mock.assert_called_once()
input_doc, _ = self.get_last_consume_delay_call_args()
input_doc, overrides = self.get_last_consume_delay_call_args()
self.assertEqual(input_doc.source, WorkflowTrigger.DocumentSourceChoices.WEB_UI)

View File

@@ -74,7 +74,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
)
self.assertEqual(Document.objects.filter(correspondent=self.c2).count(), 3)
self.async_task.assert_called_once()
_, kwargs = self.async_task.call_args
args, kwargs = self.async_task.call_args
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc2.id])
def test_unset_correspondent(self):
@@ -82,7 +82,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
bulk_edit.set_correspondent([self.doc1.id, self.doc2.id, self.doc3.id], None)
self.assertEqual(Document.objects.filter(correspondent=self.c2).count(), 0)
self.async_task.assert_called_once()
_, kwargs = self.async_task.call_args
args, kwargs = self.async_task.call_args
self.assertCountEqual(kwargs["document_ids"], [self.doc2.id, self.doc3.id])
def test_set_document_type(self):
@@ -93,7 +93,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
)
self.assertEqual(Document.objects.filter(document_type=self.dt2).count(), 3)
self.async_task.assert_called_once()
_, kwargs = self.async_task.call_args
args, kwargs = self.async_task.call_args
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc2.id])
def test_unset_document_type(self):
@@ -101,7 +101,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
bulk_edit.set_document_type([self.doc1.id, self.doc2.id, self.doc3.id], None)
self.assertEqual(Document.objects.filter(document_type=self.dt2).count(), 0)
self.async_task.assert_called_once()
_, kwargs = self.async_task.call_args
args, kwargs = self.async_task.call_args
self.assertCountEqual(kwargs["document_ids"], [self.doc2.id, self.doc3.id])
def test_set_document_storage_path(self):
@@ -123,7 +123,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
self.assertEqual(Document.objects.filter(storage_path=None).count(), 4)
self.async_task.assert_called_once()
_, kwargs = self.async_task.call_args
args, kwargs = self.async_task.call_args
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id])
@@ -154,7 +154,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
self.assertEqual(Document.objects.filter(storage_path=None).count(), 5)
self.async_task.assert_called()
_, kwargs = self.async_task.call_args
args, kwargs = self.async_task.call_args
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id])
@@ -166,7 +166,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
)
self.assertEqual(Document.objects.filter(tags__id=self.t1.id).count(), 4)
self.async_task.assert_called_once()
_, kwargs = self.async_task.call_args
args, kwargs = self.async_task.call_args
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc3.id])
def test_remove_tag(self):
@@ -174,7 +174,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
bulk_edit.remove_tag([self.doc1.id, self.doc3.id, self.doc4.id], self.t1.id)
self.assertEqual(Document.objects.filter(tags__id=self.t1.id).count(), 1)
self.async_task.assert_called_once()
_, kwargs = self.async_task.call_args
args, kwargs = self.async_task.call_args
self.assertCountEqual(kwargs["document_ids"], [self.doc4.id])
def test_modify_tags(self):
@@ -191,7 +191,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
self.assertCountEqual(list(self.doc3.tags.all()), [self.t2, tag_unrelated])
self.async_task.assert_called_once()
_, kwargs = self.async_task.call_args
args, kwargs = self.async_task.call_args
# TODO: doc3 should not be affected, but the query for that is rather complicated
self.assertCountEqual(kwargs["document_ids"], [self.doc2.id, self.doc3.id])
@@ -248,7 +248,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
)
self.async_task.assert_called_once()
_, kwargs = self.async_task.call_args
args, kwargs = self.async_task.call_args
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc2.id])
def test_modify_custom_fields_with_values(self):
@@ -325,7 +325,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
)
self.async_task.assert_called_once()
_, kwargs = self.async_task.call_args
args, kwargs = self.async_task.call_args
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc2.id])
# removal of document link cf, should also remove symmetric link
@@ -787,8 +787,10 @@ class TestPDFActions(DirectoriesMixin, TestCase):
mock_consume_file.assert_not_called()
@mock.patch("documents.tasks.consume_file.delay")
def test_rotate(self, mock_consume_delay):
@mock.patch("documents.tasks.bulk_update_documents.si")
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.s")
@mock.patch("celery.chord.delay")
def test_rotate(self, mock_chord, mock_update_document, mock_update_documents):
"""
GIVEN:
- Existing documents
@@ -799,22 +801,19 @@ class TestPDFActions(DirectoriesMixin, TestCase):
"""
doc_ids = [self.doc1.id, self.doc2.id]
result = bulk_edit.rotate(doc_ids, 90)
self.assertEqual(mock_consume_delay.call_count, 2)
for call, expected_id in zip(
mock_consume_delay.call_args_list,
doc_ids,
):
consumable, overrides = call.args
self.assertEqual(consumable.head_version_id, expected_id)
self.assertIsNotNone(overrides)
self.assertEqual(mock_update_document.call_count, 2)
mock_update_documents.assert_called_once()
mock_chord.assert_called_once()
self.assertEqual(result, "OK")
@mock.patch("documents.tasks.consume_file.delay")
@mock.patch("documents.tasks.bulk_update_documents.si")
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.s")
@mock.patch("pikepdf.Pdf.save")
def test_rotate_with_error(
self,
mock_pdf_save,
mock_consume_delay,
mock_update_archive_file,
mock_update_documents,
):
"""
GIVEN:
@@ -833,12 +832,16 @@ class TestPDFActions(DirectoriesMixin, TestCase):
error_str = cm.output[0]
expected_str = "Error rotating document"
self.assertIn(expected_str, error_str)
mock_consume_delay.assert_not_called()
mock_update_archive_file.assert_not_called()
@mock.patch("documents.tasks.consume_file.delay")
@mock.patch("documents.tasks.bulk_update_documents.si")
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.s")
@mock.patch("celery.chord.delay")
def test_rotate_non_pdf(
self,
mock_consume_delay,
mock_chord,
mock_update_document,
mock_update_documents,
):
"""
GIVEN:
@@ -853,16 +856,14 @@ class TestPDFActions(DirectoriesMixin, TestCase):
output_str = cm.output[1]
expected_str = "Document 4 is not a PDF, skipping rotation"
self.assertIn(expected_str, output_str)
self.assertEqual(mock_consume_delay.call_count, 1)
consumable, overrides = mock_consume_delay.call_args[0]
self.assertEqual(consumable.head_version_id, self.doc2.id)
self.assertIsNotNone(overrides)
self.assertEqual(mock_update_document.call_count, 1)
mock_update_documents.assert_called_once()
mock_chord.assert_called_once()
self.assertEqual(result, "OK")
@mock.patch("documents.tasks.consume_file.delay")
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.delay")
@mock.patch("pikepdf.Pdf.save")
@mock.patch("documents.data_models.magic.from_file", return_value="application/pdf")
def test_delete_pages(self, mock_magic, mock_pdf_save, mock_consume_delay):
def test_delete_pages(self, mock_pdf_save, mock_update_archive_file):
"""
GIVEN:
- Existing documents
@@ -870,22 +871,24 @@ class TestPDFActions(DirectoriesMixin, TestCase):
- Delete pages action is called with 1 document and 2 pages
THEN:
- Save should be called once
- A new version should be enqueued via consume_file
- Archive file should be updated once
- The document's page_count should be reduced by the number of deleted pages
"""
doc_ids = [self.doc2.id]
initial_page_count = self.doc2.page_count
pages = [1, 3]
result = bulk_edit.delete_pages(doc_ids, pages)
mock_pdf_save.assert_called_once()
mock_consume_delay.assert_called_once()
consumable, overrides = mock_consume_delay.call_args[0]
self.assertEqual(consumable.head_version_id, self.doc2.id)
self.assertTrue(str(consumable.original_file).endswith("_pages_deleted.pdf"))
self.assertIsNotNone(overrides)
mock_update_archive_file.assert_called_once()
self.assertEqual(result, "OK")
@mock.patch("documents.tasks.consume_file.delay")
expected_page_count = initial_page_count - len(pages)
self.doc2.refresh_from_db()
self.assertEqual(self.doc2.page_count, expected_page_count)
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.delay")
@mock.patch("pikepdf.Pdf.save")
def test_delete_pages_with_error(self, mock_pdf_save, mock_consume_delay):
def test_delete_pages_with_error(self, mock_pdf_save, mock_update_archive_file):
"""
GIVEN:
- Existing documents
@@ -894,7 +897,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
- PikePDF raises an error
THEN:
- Save should be called once
- No new version should be enqueued
- Archive file should not be updated
"""
mock_pdf_save.side_effect = Exception("Error saving PDF")
doc_ids = [self.doc2.id]
@@ -905,7 +908,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
error_str = cm.output[0]
expected_str = "Error deleting pages from document"
self.assertIn(expected_str, error_str)
mock_consume_delay.assert_not_called()
mock_update_archive_file.assert_not_called()
@mock.patch("documents.bulk_edit.group")
@mock.patch("documents.tasks.consume_file.s")
@@ -965,18 +968,21 @@ class TestPDFActions(DirectoriesMixin, TestCase):
self.assertEqual(result, "OK")
mock_chord.assert_called_once()
@mock.patch("documents.tasks.consume_file.delay")
def test_edit_pdf_with_update_document(self, mock_consume_delay):
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.delay")
def test_edit_pdf_with_update_document(self, mock_update_document):
"""
GIVEN:
- A single existing PDF document
WHEN:
- edit_pdf is called with update_document=True and a single output
THEN:
- A version update is enqueued targeting the existing document
- The original document is updated in-place
- The update_document_content_maybe_archive_file task is triggered
"""
doc_ids = [self.doc2.id]
operations = [{"page": 1}, {"page": 2}]
original_checksum = self.doc2.checksum
original_page_count = self.doc2.page_count
result = bulk_edit.edit_pdf(
doc_ids,
@@ -986,11 +992,10 @@ class TestPDFActions(DirectoriesMixin, TestCase):
)
self.assertEqual(result, "OK")
mock_consume_delay.assert_called_once()
consumable, overrides = mock_consume_delay.call_args[0]
self.assertEqual(consumable.head_version_id, self.doc2.id)
self.assertTrue(str(consumable.original_file).endswith("_edited.pdf"))
self.assertIsNotNone(overrides)
self.doc2.refresh_from_db()
self.assertNotEqual(self.doc2.checksum, original_checksum)
self.assertNotEqual(self.doc2.page_count, original_page_count)
mock_update_document.assert_called_once_with(document_id=self.doc2.id)
@mock.patch("documents.bulk_edit.group")
@mock.patch("documents.tasks.consume_file.s")

View File

@@ -123,14 +123,14 @@ class TestRetagger(DirectoriesMixin, TestCase):
def test_add_type(self):
call_command("document_retagger", "--document_type")
d_first, d_second, _, _ = self.get_updated_docs()
d_first, d_second, d_unrelated, d_auto = self.get_updated_docs()
self.assertEqual(d_first.document_type, self.doctype_first)
self.assertEqual(d_second.document_type, self.doctype_second)
def test_add_correspondent(self):
call_command("document_retagger", "--correspondent")
d_first, d_second, _, _ = self.get_updated_docs()
d_first, d_second, d_unrelated, d_auto = self.get_updated_docs()
self.assertEqual(d_first.correspondent, self.correspondent_first)
self.assertEqual(d_second.correspondent, self.correspondent_second)
@@ -160,7 +160,7 @@ class TestRetagger(DirectoriesMixin, TestCase):
def test_add_tags_suggest(self):
call_command("document_retagger", "--tags", "--suggest")
d_first, d_second, _, d_auto = self.get_updated_docs()
d_first, d_second, d_unrelated, d_auto = self.get_updated_docs()
self.assertEqual(d_first.tags.count(), 0)
self.assertEqual(d_second.tags.count(), 0)
@@ -168,14 +168,14 @@ class TestRetagger(DirectoriesMixin, TestCase):
def test_add_type_suggest(self):
call_command("document_retagger", "--document_type", "--suggest")
d_first, d_second, _, _ = self.get_updated_docs()
d_first, d_second, d_unrelated, d_auto = self.get_updated_docs()
self.assertIsNone(d_first.document_type)
self.assertIsNone(d_second.document_type)
def test_add_correspondent_suggest(self):
call_command("document_retagger", "--correspondent", "--suggest")
d_first, d_second, _, _ = self.get_updated_docs()
d_first, d_second, d_unrelated, d_auto = self.get_updated_docs()
self.assertIsNone(d_first.correspondent)
self.assertIsNone(d_second.correspondent)
@@ -187,7 +187,7 @@ class TestRetagger(DirectoriesMixin, TestCase):
"--suggest",
"--base-url=http://localhost",
)
d_first, d_second, _, d_auto = self.get_updated_docs()
d_first, d_second, d_unrelated, d_auto = self.get_updated_docs()
self.assertEqual(d_first.tags.count(), 0)
self.assertEqual(d_second.tags.count(), 0)
@@ -200,7 +200,7 @@ class TestRetagger(DirectoriesMixin, TestCase):
"--suggest",
"--base-url=http://localhost",
)
d_first, d_second, _, _ = self.get_updated_docs()
d_first, d_second, d_unrelated, d_auto = self.get_updated_docs()
self.assertIsNone(d_first.document_type)
self.assertIsNone(d_second.document_type)
@@ -212,7 +212,7 @@ class TestRetagger(DirectoriesMixin, TestCase):
"--suggest",
"--base-url=http://localhost",
)
d_first, d_second, _, _ = self.get_updated_docs()
d_first, d_second, d_unrelated, d_auto = self.get_updated_docs()
self.assertIsNone(d_first.correspondent)
self.assertIsNone(d_second.correspondent)

View File

@@ -4,7 +4,6 @@ import shutil
from pathlib import Path
from unittest import mock
import pytest
from django.conf import settings
from django.test import override_settings
@@ -282,7 +281,6 @@ class TestMigrateArchiveFilesErrors(DirectoriesMixin, TestMigrations):
migrate_to = "1012_fix_archive_files"
auto_migrate = False
@pytest.mark.skip(reason="Fails with migration tearDown util. Needs investigation.")
def test_archive_missing(self):
Document = self.apps.get_model("documents", "Document")
@@ -302,7 +300,6 @@ class TestMigrateArchiveFilesErrors(DirectoriesMixin, TestMigrations):
self.performMigration,
)
@pytest.mark.skip(reason="Fails with migration tearDown util. Needs investigation.")
def test_parser_missing(self):
Document = self.apps.get_model("documents", "Document")

View File

@@ -1,205 +0,0 @@
from unittest import mock
from django.contrib.auth.models import User
from rest_framework.test import APITestCase
from documents import bulk_edit
from documents.models import Document
from documents.models import Tag
from documents.models import Workflow
from documents.models import WorkflowAction
from documents.models import WorkflowTrigger
from documents.signals.handlers import run_workflows
class TestTagHierarchy(APITestCase):
def setUp(self):
self.user = User.objects.create_superuser(username="admin")
self.client.force_authenticate(user=self.user)
self.parent = Tag.objects.create(name="Parent")
self.child = Tag.objects.create(name="Child", tn_parent=self.parent)
patcher = mock.patch("documents.bulk_edit.bulk_update_documents.delay")
self.async_task = patcher.start()
self.addCleanup(patcher.stop)
self.document = Document.objects.create(
title="doc",
content="",
checksum="1",
mime_type="application/pdf",
)
def test_document_api_add_child_adds_parent(self):
self.client.patch(
f"/api/documents/{self.document.pk}/",
{"tags": [self.child.pk]},
format="json",
)
self.document.refresh_from_db()
tags = set(self.document.tags.values_list("pk", flat=True))
assert tags == {self.parent.pk, self.child.pk}
def test_document_api_remove_parent_removes_children(self):
self.document.add_nested_tags([self.parent, self.child])
self.client.patch(
f"/api/documents/{self.document.pk}/",
{"tags": [self.child.pk]},
format="json",
)
self.document.refresh_from_db()
assert self.document.tags.count() == 0
def test_document_api_remove_parent_removes_child(self):
self.document.add_nested_tags([self.child])
self.client.patch(
f"/api/documents/{self.document.pk}/",
{"tags": []},
format="json",
)
self.document.refresh_from_db()
assert self.document.tags.count() == 0
def test_bulk_edit_respects_hierarchy(self):
bulk_edit.add_tag([self.document.pk], self.child.pk)
self.document.refresh_from_db()
tags = set(self.document.tags.values_list("pk", flat=True))
assert tags == {self.parent.pk, self.child.pk}
bulk_edit.remove_tag([self.document.pk], self.parent.pk)
self.document.refresh_from_db()
assert self.document.tags.count() == 0
bulk_edit.modify_tags([self.document.pk], [self.child.pk], [])
self.document.refresh_from_db()
tags = set(self.document.tags.values_list("pk", flat=True))
assert tags == {self.parent.pk, self.child.pk}
bulk_edit.modify_tags([self.document.pk], [], [self.parent.pk])
self.document.refresh_from_db()
assert self.document.tags.count() == 0
def test_workflow_actions(self):
workflow = Workflow.objects.create(name="wf", order=0)
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
)
assign_action = WorkflowAction.objects.create()
assign_action.assign_tags.add(self.child)
workflow.triggers.add(trigger)
workflow.actions.add(assign_action)
run_workflows(trigger.type, self.document)
self.document.refresh_from_db()
tags = set(self.document.tags.values_list("pk", flat=True))
assert tags == {self.parent.pk, self.child.pk}
# removal
removal_action = WorkflowAction.objects.create(
type=WorkflowAction.WorkflowActionType.REMOVAL,
)
removal_action.remove_tags.add(self.parent)
workflow.actions.clear()
workflow.actions.add(removal_action)
run_workflows(trigger.type, self.document)
self.document.refresh_from_db()
assert self.document.tags.count() == 0
def test_tag_view_parent_update_adds_parent_to_docs(self):
orphan = Tag.objects.create(name="Orphan")
self.document.tags.add(orphan)
self.client.patch(
f"/api/tags/{orphan.pk}/",
{"parent": self.parent.pk},
format="json",
)
self.document.refresh_from_db()
tags = set(self.document.tags.values_list("pk", flat=True))
assert tags == {self.parent.pk, orphan.pk}
def test_cannot_set_parent_to_self(self):
tag = Tag.objects.create(name="Selfie")
resp = self.client.patch(
f"/api/tags/{tag.pk}/",
{"parent": tag.pk},
format="json",
)
assert resp.status_code == 400
assert "Cannot set itself as parent" in str(resp.data["parent"])
def test_cannot_set_parent_to_descendant(self):
a = Tag.objects.create(name="A")
b = Tag.objects.create(name="B", tn_parent=a)
c = Tag.objects.create(name="C", tn_parent=b)
# Attempt to set A's parent to C (descendant) should fail
resp = self.client.patch(
f"/api/tags/{a.pk}/",
{"parent": c.pk},
format="json",
)
assert resp.status_code == 400
assert "Cannot set parent to a descendant" in str(resp.data["parent"])
def test_max_depth_on_create(self):
a = Tag.objects.create(name="A1")
b = Tag.objects.create(name="B1", tn_parent=a)
c = Tag.objects.create(name="C1", tn_parent=b)
d = Tag.objects.create(name="D1", tn_parent=c)
# Creating E under D yields depth 5: allowed
resp_ok = self.client.post(
"/api/tags/",
{"name": "E1", "parent": d.pk},
format="json",
)
assert resp_ok.status_code in (200, 201)
e_id = (
resp_ok.data["id"] if resp_ok.status_code == 201 else resp_ok.data.get("id")
)
assert e_id is not None
# Creating F under E would yield depth 6: rejected
resp_fail = self.client.post(
"/api/tags/",
{"name": "F1", "parent": e_id},
format="json",
)
assert resp_fail.status_code == 400
assert "parent" in resp_fail.data
assert "Invalid" in str(resp_fail.data["parent"])
def test_max_depth_on_move_subtree(self):
a = Tag.objects.create(name="A2")
b = Tag.objects.create(name="B2", tn_parent=a)
c = Tag.objects.create(name="C2", tn_parent=b)
d = Tag.objects.create(name="D2", tn_parent=c)
x = Tag.objects.create(name="X2")
y = Tag.objects.create(name="Y2", tn_parent=x)
assert y.parent_pk == x.pk
# Moving X under D would make deepest node Y exceed depth 5 -> reject
resp_fail = self.client.patch(
f"/api/tags/{x.pk}/",
{"parent": d.pk},
format="json",
)
assert resp_fail.status_code == 400
assert "Maximum nesting depth exceeded" in str(
resp_fail.data["non_field_errors"],
)
# Moving X under C (depth 3) should be allowed (deepest becomes 5)
resp_ok = self.client.patch(
f"/api/tags/{x.pk}/",
{"parent": c.pk},
format="json",
)
assert resp_ok.status_code in (200, 202)
x.refresh_from_db()
assert x.parent_pk == c.id

View File

@@ -327,19 +327,6 @@ class TestMigrations(TransactionTestCase):
def setUpBeforeMigration(self, apps):
pass
def tearDown(self):
"""
Ensure the database schema is restored to the latest migration after
each migration test, so subsequent tests run against HEAD.
"""
try:
executor = MigrationExecutor(connection)
executor.loader.build_graph()
targets = executor.loader.graph.leaf_nodes()
executor.migrate(targets)
finally:
super().tearDown()
class SampleDirMixin:
SAMPLE_DIR = Path(__file__).parent / "samples"

View File

@@ -147,7 +147,6 @@ from documents.serialisers import CustomFieldSerializer
from documents.serialisers import DocumentListSerializer
from documents.serialisers import DocumentSerializer
from documents.serialisers import DocumentTypeSerializer
from documents.serialisers import DocumentVersionSerializer
from documents.serialisers import NotesSerializer
from documents.serialisers import PostDocumentSerializer
from documents.serialisers import RunTaskViewSerializer
@@ -170,7 +169,6 @@ from documents.tasks import empty_trash
from documents.tasks import index_optimize
from documents.tasks import sanity_check
from documents.tasks import train_classifier
from documents.tasks import update_document_parent_tags
from documents.templating.filepath import validate_filepath_template_and_render
from documents.utils import get_boolean
from paperless import version
@@ -343,13 +341,6 @@ class TagViewSet(ModelViewSet, PermissionsAwareDocumentCountMixin):
filterset_class = TagFilterSet
ordering_fields = ("color", "name", "matching_algorithm", "match", "document_count")
def perform_update(self, serializer):
old_parent = self.get_object().get_parent()
tag = serializer.save()
new_parent = tag.get_parent()
if new_parent and old_parent != new_parent:
update_document_parent_tags(tag, new_parent)
@extend_schema_view(**generate_object_with_permissions_schema(DocumentTypeSerializer))
class DocumentTypeViewSet(ModelViewSet, PermissionsAwareDocumentCountMixin):
@@ -568,7 +559,7 @@ class DocumentViewSet(
GenericViewSet,
):
model = Document
queryset = Document.objects.all()
queryset = Document.objects.annotate(num_notes=Count("notes"))
serializer_class = DocumentSerializer
pagination_class = StandardPagination
permission_classes = (IsAuthenticated, PaperlessObjectPermissions)
@@ -597,8 +588,7 @@ class DocumentViewSet(
def get_queryset(self):
return (
Document.objects.filter(head_version__isnull=True)
.distinct()
Document.objects.distinct()
.order_by("-created")
.annotate(num_notes=Count("notes"))
.select_related("correspondent", "storage_path", "document_type", "owner")
@@ -660,55 +650,18 @@ class DocumentViewSet(
and request.query_params["original"] == "true"
)
def _resolve_file_doc(self, head_doc: Document, request):
version_param = request.query_params.get("version")
if version_param:
try:
version_id = int(version_param)
except (TypeError, ValueError):
raise NotFound("Invalid version parameter")
try:
candidate = Document.global_objects.select_related("owner").get(
id=version_id,
)
except Document.DoesNotExist:
raise Http404
if candidate.id != head_doc.id and candidate.head_version_id != head_doc.id:
raise Http404
return candidate
latest = head_doc.versions.order_by("id").last()
return latest or head_doc
def file_response(self, pk, request, disposition):
request_doc = Document.global_objects.select_related("owner").get(id=pk)
head_doc = (
request_doc
if request_doc.head_version_id is None
else Document.global_objects.select_related("owner").get(
id=request_doc.head_version_id,
)
)
doc = Document.global_objects.select_related("owner").get(id=pk)
if request.user is not None and not has_perms_owner_aware(
request.user,
"view_document",
head_doc,
doc,
):
return HttpResponseForbidden("Insufficient permissions")
# If a version is explicitly requested, use it. Otherwise:
# - if pk is a head document: serve newest version
# - if pk is a version: serve that version
if "version" in request.query_params:
file_doc = self._resolve_file_doc(head_doc, request)
else:
file_doc = (
self._resolve_file_doc(head_doc, request)
if request_doc.head_version_id is None
else request_doc
)
return serve_file(
doc=file_doc,
doc=doc,
use_archive=not self.original_requested(request)
and file_doc.has_archive_version,
and doc.has_archive_version,
disposition=disposition,
)
@@ -743,33 +696,16 @@ class DocumentViewSet(
)
def metadata(self, request, pk=None):
try:
request_doc = Document.objects.select_related("owner").get(pk=pk)
head_doc = (
request_doc
if request_doc.head_version_id is None
else Document.objects.select_related("owner").get(
id=request_doc.head_version_id,
)
)
doc = Document.objects.select_related("owner").get(pk=pk)
if request.user is not None and not has_perms_owner_aware(
request.user,
"view_document",
head_doc,
doc,
):
return HttpResponseForbidden("Insufficient permissions")
except Document.DoesNotExist:
raise Http404
# Choose the effective document (newest version by default, or explicit via ?version=)
if "version" in request.query_params:
doc = self._resolve_file_doc(head_doc, request)
else:
doc = (
self._resolve_file_doc(head_doc, request)
if request_doc.head_version_id is None
else request_doc
)
document_cached_metadata = get_metadata_cache(doc.pk)
archive_metadata = None
@@ -871,36 +807,8 @@ class DocumentViewSet(
)
def preview(self, request, pk=None):
try:
request_doc = Document.objects.select_related("owner").get(id=pk)
head_doc = (
request_doc
if request_doc.head_version_id is None
else Document.objects.select_related("owner").get(
id=request_doc.head_version_id,
)
)
if request.user is not None and not has_perms_owner_aware(
request.user,
"view_document",
head_doc,
):
return HttpResponseForbidden("Insufficient permissions")
if "version" in request.query_params:
file_doc = self._resolve_file_doc(head_doc, request)
else:
file_doc = (
self._resolve_file_doc(head_doc, request)
if request_doc.head_version_id is None
else request_doc
)
return serve_file(
doc=file_doc,
use_archive=not self.original_requested(request)
and file_doc.has_archive_version,
disposition="inline",
)
response = self.file_response(pk, request, "inline")
return response
except (FileNotFoundError, Document.DoesNotExist):
raise Http404
@@ -909,32 +817,17 @@ class DocumentViewSet(
@method_decorator(last_modified(thumbnail_last_modified))
def thumb(self, request, pk=None):
try:
request_doc = Document.objects.select_related("owner").get(id=pk)
head_doc = (
request_doc
if request_doc.head_version_id is None
else Document.objects.select_related("owner").get(
id=request_doc.head_version_id,
)
)
doc = Document.objects.select_related("owner").get(id=pk)
if request.user is not None and not has_perms_owner_aware(
request.user,
"view_document",
head_doc,
doc,
):
return HttpResponseForbidden("Insufficient permissions")
if "version" in request.query_params:
file_doc = self._resolve_file_doc(head_doc, request)
if doc.storage_type == Document.STORAGE_TYPE_GPG:
handle = GnuPG.decrypted(doc.thumbnail_file)
else:
file_doc = (
self._resolve_file_doc(head_doc, request)
if request_doc.head_version_id is None
else request_doc
)
if file_doc.storage_type == Document.STORAGE_TYPE_GPG:
handle = GnuPG.decrypted(file_doc.thumbnail_file)
else:
handle = file_doc.thumbnail_file
handle = doc.thumbnail_file
return HttpResponse(handle, content_type="image/webp")
except (FileNotFoundError, Document.DoesNotExist):
@@ -1202,56 +1095,6 @@ class DocumentViewSet(
"Error emailing document, check logs for more detail.",
)
@action(methods=["post"], detail=True)
def update_version(self, request, pk=None):
serializer = DocumentVersionSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
try:
doc = Document.objects.select_related("owner").get(pk=pk)
if request.user is not None and not has_perms_owner_aware(
request.user,
"change_document",
doc,
):
return HttpResponseForbidden("Insufficient permissions")
except Document.DoesNotExist:
raise Http404
try:
doc_name, doc_data = serializer.validated_data.get("document")
t = int(mktime(datetime.now().timetuple()))
settings.SCRATCH_DIR.mkdir(parents=True, exist_ok=True)
temp_file_path = Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR)) / Path(
pathvalidate.sanitize_filename(doc_name),
)
temp_file_path.write_bytes(doc_data)
os.utime(temp_file_path, times=(t, t))
input_doc = ConsumableDocument(
source=DocumentSource.ApiUpload,
original_file=temp_file_path,
head_version_id=doc.pk,
)
async_task = consume_file.delay(
input_doc,
)
logger.debug(
f"Updated document {doc.id} with new version",
)
return Response(async_task.id)
except Exception as e:
logger.warning(f"An error occurred updating document: {e!s}")
return HttpResponseServerError(
"Error updating document, check logs for more detail.",
)
@extend_schema_view(
list=extend_schema(

File diff suppressed because it is too large Load Diff

View File

@@ -334,7 +334,6 @@ INSTALLED_APPS = [
"allauth.mfa",
"drf_spectacular",
"drf_spectacular_sidecar",
"treenode",
*env_apps,
]

View File

@@ -21,7 +21,7 @@ TEST_CHANNEL_LAYERS = {
class TestWebSockets(TestCase):
async def test_no_auth(self):
communicator = WebsocketCommunicator(application, "/ws/status/")
connected, _ = await communicator.connect()
connected, subprotocol = await communicator.connect()
self.assertFalse(connected)
await communicator.disconnect()
@@ -31,7 +31,7 @@ class TestWebSockets(TestCase):
_authenticated.return_value = True
communicator = WebsocketCommunicator(application, "/ws/status/")
connected, _ = await communicator.connect()
connected, subprotocol = await communicator.connect()
self.assertTrue(connected)
message = {"type": "status_update", "data": {"task_id": "test"}}
@@ -63,7 +63,7 @@ class TestWebSockets(TestCase):
_authenticated.return_value = True
communicator = WebsocketCommunicator(application, "/ws/status/")
connected, _ = await communicator.connect()
connected, subprotocol = await communicator.connect()
self.assertTrue(connected)
await communicator.disconnect()
@@ -73,7 +73,7 @@ class TestWebSockets(TestCase):
_authenticated.return_value = True
communicator = WebsocketCommunicator(application, "/ws/status/")
connected, _ = await communicator.connect()
connected, subprotocol = await communicator.connect()
self.assertTrue(connected)
message = {"type": "status_update", "data": {"task_id": "test"}}
@@ -98,7 +98,7 @@ class TestWebSockets(TestCase):
communicator.scope["user"].is_superuser = False
communicator.scope["user"].id = 1
connected, _ = await communicator.connect()
connected, subprotocol = await communicator.connect()
self.assertTrue(connected)
# Test as owner
@@ -141,7 +141,7 @@ class TestWebSockets(TestCase):
_authenticated.return_value = True
communicator = WebsocketCommunicator(application, "/ws/status/")
connected, _ = await communicator.connect()
connected, subprotocol = await communicator.connect()
self.assertTrue(connected)
message = {"type": "documents_deleted", "data": {"documents": [1, 2, 3]}}

View File

@@ -132,7 +132,7 @@ class RasterisedDocumentParser(DocumentParser):
def get_dpi(self, image) -> int | None:
try:
with Image.open(image) as im:
x, _ = im.info["dpi"]
x, y = im.info["dpi"]
return round(x)
except Exception as e:
self.log.warning(f"Error while getting DPI from image {image}: {e}")
@@ -141,7 +141,7 @@ class RasterisedDocumentParser(DocumentParser):
def calculate_a4_dpi(self, image) -> int | None:
try:
with Image.open(image) as im:
width, _ = im.size
width, height = im.size
# divide image width by A4 width (210mm) in inches.
dpi = int(width / (21 / 2.54))
self.log.debug(f"Estimated DPI {dpi} based on image width {width}")

69
uv.lock generated
View File

@@ -782,15 +782,15 @@ wheels = [
[[package]]
name = "django-guardian"
version = "3.1.3"
version = "3.1.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "django", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "typing-extensions", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux')" },
]
sdist = { url = "https://files.pythonhosted.org/packages/81/d3/436a44c7688fce1a978224c349ba66c95bf9103d548596b7a2694fd58c03/django_guardian-3.1.3.tar.gz", hash = "sha256:12b5e66c18c97088b0adfa033ab14be68c321c170fd3ec438898271f00a71699", size = 93571, upload-time = "2025-09-10T08:36:23.928Z" }
sdist = { url = "https://files.pythonhosted.org/packages/28/ac/5a8f7301c0181ee9020e020a4fa519f9851726b8fd3c1177656c1f5a1be0/django_guardian-3.1.2.tar.gz", hash = "sha256:6fc93b55e5eacd1a062a959c5578b433d999a286742aa3e7e713c71046813538", size = 93422, upload-time = "2025-09-08T15:43:51.361Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/83/fc/6fd7b8bc7c52cbbfd1714673cfd28ff0b3fae32265c52d492ec0dee22cb8/django_guardian-3.1.3-py3-none-any.whl", hash = "sha256:90e28b40eea65c326a3a961908cc300f9e1cd69b74e88d38317a9befa167b71c", size = 127687, upload-time = "2025-09-10T08:36:22.533Z" },
{ url = "https://files.pythonhosted.org/packages/20/50/3a4a891f809c9865d30864f59f993f9535b18e3935dfad278c9682fc537f/django_guardian-3.1.2-py3-none-any.whl", hash = "sha256:6c10f88d0b7efd171ae65d7ac487a666f41eb373c6f94d80016b1a19bdfbf212", size = 127451, upload-time = "2025-09-08T15:43:49.987Z" },
]
[[package]]
@@ -851,15 +851,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/4e/38/2903676f97f7902ee31984a06756b0e8836e897f4b617e1a03be4a43eb4f/django_stubs_ext-5.2.2-py3-none-any.whl", hash = "sha256:8833bbe32405a2a0ce168d3f75a87168f61bd16939caf0e8bf173bccbd8a44c5", size = 8816, upload-time = "2025-07-17T08:34:33.715Z" },
]
[[package]]
name = "django-treenode"
version = "0.23.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/75/f3/274b84607fd64c0844e98659985f964190a46c2460f2523a446c4a946216/django_treenode-0.23.2.tar.gz", hash = "sha256:3c5a6ff5e0c83e34da88749f602b3013dd1ab0527f51952c616e3c21bf265d52", size = 26700, upload-time = "2025-09-04T21:16:53.497Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/00/61/e17d3dee5c6bb24b8faf0c101e17f9a8cafeba6384166176e066c80e8cbb/django_treenode-0.23.2-py3-none-any.whl", hash = "sha256:9363cb50f753654a9acfad6ec4df2a664a5f89dfdf8b55ffd964f27461bef85e", size = 21879, upload-time = "2025-09-04T21:16:51.811Z" },
]
[[package]]
name = "djangorestframework"
version = "3.16.1"
@@ -1709,7 +1700,7 @@ wheels = [
[[package]]
name = "mkdocs-material"
version = "9.6.20"
version = "9.6.19"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "babel", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
@@ -1725,9 +1716,9 @@ dependencies = [
{ name = "pymdown-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ba/ee/6ed7fc739bd7591485c8bec67d5984508d3f2733e708f32714c21593341a/mkdocs_material-9.6.20.tar.gz", hash = "sha256:e1f84d21ec5fb730673c4259b2e0d39f8d32a3fef613e3a8e7094b012d43e790", size = 4037822, upload-time = "2025-09-15T08:48:01.816Z" }
sdist = { url = "https://files.pythonhosted.org/packages/44/94/eb0fca39b19c2251b16bc759860a50f232655c4377116fa9c0e7db11b82c/mkdocs_material-9.6.19.tar.gz", hash = "sha256:80e7b3f9acabfee9b1f68bd12c26e59c865b3d5bbfb505fd1344e970db02c4aa", size = 4038202, upload-time = "2025-09-07T17:46:40.468Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/67/d8/a31dd52e657bf12b20574706d07df8d767e1ab4340f9bfb9ce73950e5e59/mkdocs_material-9.6.20-py3-none-any.whl", hash = "sha256:b8d8c8b0444c7c06dd984b55ba456ce731f0035c5a1533cc86793618eb1e6c82", size = 9193367, upload-time = "2025-09-15T08:47:58.722Z" },
{ url = "https://files.pythonhosted.org/packages/02/23/a2551d1038bedc2771366f65ff3680bb3a89674cd7ca6140850c859f1f71/mkdocs_material-9.6.19-py3-none-any.whl", hash = "sha256:7492d2ac81952a467ca8a10cac915d6ea5c22876932f44b5a0f4f8e7d68ac06f", size = 9240205, upload-time = "2025-09-07T17:46:36.484Z" },
]
[[package]]
@@ -1991,7 +1982,7 @@ wheels = [
[[package]]
name = "ocrmypdf"
version = "16.11.0"
version = "16.10.4"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "deprecation", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
@@ -2004,9 +1995,9 @@ dependencies = [
{ name = "pluggy", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "rich", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/44/af/947d6abb0cb41f99971a7a4bd33684d3cee20c9e32c8f9dc90e8c5dcf21c/ocrmypdf-16.11.0.tar.gz", hash = "sha256:d89077e503238dac35c6e565925edc8d98b71e5289853c02cacbc1d0901f1be7", size = 7015068, upload-time = "2025-09-12T08:36:53.507Z" }
sdist = { url = "https://files.pythonhosted.org/packages/cd/40/cb85e6260e5a20d08195d03541b31db4296f8f4d3442ee595686f47a75b0/ocrmypdf-16.10.4.tar.gz", hash = "sha256:de749ef5f554b63d57e68d032e7cba5500cbd5030835bf24f658f7b7a04f3dc1", size = 7003649, upload-time = "2025-07-07T20:55:01.735Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/d9/b2/eda3bb0939bf81d889812dd82cf37fa6f8769af8e31008bd586ba12fae09/ocrmypdf-16.11.0-py3-none-any.whl", hash = "sha256:13628294a309c85b21947b5c7bc7fcd202464517c14b71a050adc9dde85c48f7", size = 162883, upload-time = "2025-09-12T08:36:51.611Z" },
{ url = "https://files.pythonhosted.org/packages/8e/6a/53bb2b0e57f8ca8d4a021194202cc772d1ce049269e9b0cb88d1fa87a0ef/ocrmypdf-16.10.4-py3-none-any.whl", hash = "sha256:061f3165d09ffafac975cea00803802b8a75551ada9965292ea86ea382673688", size = 162559, upload-time = "2025-07-07T20:55:00.061Z" },
]
[[package]]
@@ -2051,7 +2042,6 @@ dependencies = [
{ name = "django-guardian", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "django-multiselectfield", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "django-soft-delete", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "django-treenode", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "djangorestframework", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "djangorestframework-guardian", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "drf-spectacular", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
@@ -2188,7 +2178,6 @@ requires-dist = [
{ name = "django-guardian", specifier = "~=3.1.2" },
{ name = "django-multiselectfield", specifier = "~=1.0.1" },
{ name = "django-soft-delete", specifier = "~=1.0.18" },
{ name = "django-treenode", specifier = ">=0.23.2" },
{ name = "djangorestframework", specifier = "~=3.16" },
{ name = "djangorestframework-guardian", specifier = "~=0.4.0" },
{ name = "drf-spectacular", specifier = "~=0.28" },
@@ -2205,7 +2194,7 @@ requires-dist = [
{ name = "langdetect", specifier = "~=1.0.9" },
{ name = "mysqlclient", marker = "extra == 'mariadb'", specifier = "~=2.2.7" },
{ name = "nltk", specifier = "~=3.9.1" },
{ name = "ocrmypdf", specifier = "~=16.11.0" },
{ name = "ocrmypdf", specifier = "~=16.10.0" },
{ name = "pathvalidate", specifier = "~=3.3.1" },
{ name = "pdf2image", specifier = "~=1.17.0" },
{ name = "psycopg", extras = ["c", "pool"], marker = "extra == 'postgres'", specifier = "==3.2.9" },
@@ -2253,7 +2242,7 @@ dev = [
{ name = "pytest-rerunfailures" },
{ name = "pytest-sugar" },
{ name = "pytest-xdist" },
{ name = "ruff", specifier = "~=0.13.0" },
{ name = "ruff", specifier = "~=0.12.2" },
]
docs = [
{ name = "mkdocs-glightbox", specifier = "~=0.5.1" },
@@ -2262,7 +2251,7 @@ docs = [
lint = [
{ name = "pre-commit", specifier = "~=4.3.0" },
{ name = "pre-commit-uv", specifier = "~=4.1.3" },
{ name = "ruff", specifier = "~=0.13.0" },
{ name = "ruff", specifier = "~=0.12.2" },
]
testing = [
{ name = "daphne" },
@@ -3291,25 +3280,25 @@ wheels = [
[[package]]
name = "ruff"
version = "0.13.0"
version = "0.12.12"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/6e/1a/1f4b722862840295bcaba8c9e5261572347509548faaa99b2d57ee7bfe6a/ruff-0.13.0.tar.gz", hash = "sha256:5b4b1ee7eb35afae128ab94459b13b2baaed282b1fb0f472a73c82c996c8ae60", size = 5372863, upload-time = "2025-09-10T16:25:37.917Z" }
sdist = { url = "https://files.pythonhosted.org/packages/a8/f0/e0965dd709b8cabe6356811c0ee8c096806bb57d20b5019eb4e48a117410/ruff-0.12.12.tar.gz", hash = "sha256:b86cd3415dbe31b3b46a71c598f4c4b2f550346d1ccf6326b347cc0c8fd063d6", size = 5359915, upload-time = "2025-09-04T16:50:18.273Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ac/fe/6f87b419dbe166fd30a991390221f14c5b68946f389ea07913e1719741e0/ruff-0.13.0-py3-none-linux_armv6l.whl", hash = "sha256:137f3d65d58ee828ae136a12d1dc33d992773d8f7644bc6b82714570f31b2004", size = 12187826, upload-time = "2025-09-10T16:24:39.5Z" },
{ url = "https://files.pythonhosted.org/packages/e4/25/c92296b1fc36d2499e12b74a3fdb230f77af7bdf048fad7b0a62e94ed56a/ruff-0.13.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:21ae48151b66e71fd111b7d79f9ad358814ed58c339631450c66a4be33cc28b9", size = 12933428, upload-time = "2025-09-10T16:24:43.866Z" },
{ url = "https://files.pythonhosted.org/packages/44/cf/40bc7221a949470307d9c35b4ef5810c294e6cfa3caafb57d882731a9f42/ruff-0.13.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:64de45f4ca5441209e41742d527944635a05a6e7c05798904f39c85bafa819e3", size = 12095543, upload-time = "2025-09-10T16:24:46.638Z" },
{ url = "https://files.pythonhosted.org/packages/f1/03/8b5ff2a211efb68c63a1d03d157e924997ada87d01bebffbd13a0f3fcdeb/ruff-0.13.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b2c653ae9b9d46e0ef62fc6fbf5b979bda20a0b1d2b22f8f7eb0cde9f4963b8", size = 12312489, upload-time = "2025-09-10T16:24:49.556Z" },
{ url = "https://files.pythonhosted.org/packages/37/fc/2336ef6d5e9c8d8ea8305c5f91e767d795cd4fc171a6d97ef38a5302dadc/ruff-0.13.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4cec632534332062bc9eb5884a267b689085a1afea9801bf94e3ba7498a2d207", size = 11991631, upload-time = "2025-09-10T16:24:53.439Z" },
{ url = "https://files.pythonhosted.org/packages/39/7f/f6d574d100fca83d32637d7f5541bea2f5e473c40020bbc7fc4a4d5b7294/ruff-0.13.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dcd628101d9f7d122e120ac7c17e0a0f468b19bc925501dbe03c1cb7f5415b24", size = 13720602, upload-time = "2025-09-10T16:24:56.392Z" },
{ url = "https://files.pythonhosted.org/packages/fd/c8/a8a5b81d8729b5d1f663348d11e2a9d65a7a9bd3c399763b1a51c72be1ce/ruff-0.13.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:afe37db8e1466acb173bb2a39ca92df00570e0fd7c94c72d87b51b21bb63efea", size = 14697751, upload-time = "2025-09-10T16:24:59.89Z" },
{ url = "https://files.pythonhosted.org/packages/57/f5/183ec292272ce7ec5e882aea74937f7288e88ecb500198b832c24debc6d3/ruff-0.13.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f96a8d90bb258d7d3358b372905fe7333aaacf6c39e2408b9f8ba181f4b6ef2", size = 14095317, upload-time = "2025-09-10T16:25:03.025Z" },
{ url = "https://files.pythonhosted.org/packages/9f/8d/7f9771c971724701af7926c14dab31754e7b303d127b0d3f01116faef456/ruff-0.13.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b5e3d883e4f924c5298e3f2ee0f3085819c14f68d1e5b6715597681433f153", size = 13144418, upload-time = "2025-09-10T16:25:06.272Z" },
{ url = "https://files.pythonhosted.org/packages/a8/a6/7985ad1778e60922d4bef546688cd8a25822c58873e9ff30189cfe5dc4ab/ruff-0.13.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03447f3d18479df3d24917a92d768a89f873a7181a064858ea90a804a7538991", size = 13370843, upload-time = "2025-09-10T16:25:09.965Z" },
{ url = "https://files.pythonhosted.org/packages/64/1c/bafdd5a7a05a50cc51d9f5711da704942d8dd62df3d8c70c311e98ce9f8a/ruff-0.13.0-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:fbc6b1934eb1c0033da427c805e27d164bb713f8e273a024a7e86176d7f462cf", size = 13321891, upload-time = "2025-09-10T16:25:12.969Z" },
{ url = "https://files.pythonhosted.org/packages/bc/3e/7817f989cb9725ef7e8d2cee74186bf90555279e119de50c750c4b7a72fe/ruff-0.13.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a8ab6a3e03665d39d4a25ee199d207a488724f022db0e1fe4002968abdb8001b", size = 12119119, upload-time = "2025-09-10T16:25:16.621Z" },
{ url = "https://files.pythonhosted.org/packages/58/07/9df080742e8d1080e60c426dce6e96a8faf9a371e2ce22eef662e3839c95/ruff-0.13.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d2a5c62f8ccc6dd2fe259917482de7275cecc86141ee10432727c4816235bc41", size = 11961594, upload-time = "2025-09-10T16:25:19.49Z" },
{ url = "https://files.pythonhosted.org/packages/6a/f4/ae1185349197d26a2316840cb4d6c3fba61d4ac36ed728bf0228b222d71f/ruff-0.13.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:b7b85ca27aeeb1ab421bc787009831cffe6048faae08ad80867edab9f2760945", size = 12933377, upload-time = "2025-09-10T16:25:22.371Z" },
{ url = "https://files.pythonhosted.org/packages/b6/39/e776c10a3b349fc8209a905bfb327831d7516f6058339a613a8d2aaecacd/ruff-0.13.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:79ea0c44a3032af768cabfd9616e44c24303af49d633b43e3a5096e009ebe823", size = 13418555, upload-time = "2025-09-10T16:25:25.681Z" },
{ url = "https://files.pythonhosted.org/packages/09/79/8d3d687224d88367b51c7974cec1040c4b015772bfbeffac95face14c04a/ruff-0.12.12-py3-none-linux_armv6l.whl", hash = "sha256:de1c4b916d98ab289818e55ce481e2cacfaad7710b01d1f990c497edf217dafc", size = 12116602, upload-time = "2025-09-04T16:49:18.892Z" },
{ url = "https://files.pythonhosted.org/packages/c3/c3/6e599657fe192462f94861a09aae935b869aea8a1da07f47d6eae471397c/ruff-0.12.12-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:7acd6045e87fac75a0b0cdedacf9ab3e1ad9d929d149785903cff9bb69ad9727", size = 12868393, upload-time = "2025-09-04T16:49:23.043Z" },
{ url = "https://files.pythonhosted.org/packages/e8/d2/9e3e40d399abc95336b1843f52fc0daaceb672d0e3c9290a28ff1a96f79d/ruff-0.12.12-py3-none-macosx_11_0_arm64.whl", hash = "sha256:abf4073688d7d6da16611f2f126be86523a8ec4343d15d276c614bda8ec44edb", size = 12036967, upload-time = "2025-09-04T16:49:26.04Z" },
{ url = "https://files.pythonhosted.org/packages/e9/03/6816b2ed08836be272e87107d905f0908be5b4a40c14bfc91043e76631b8/ruff-0.12.12-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:968e77094b1d7a576992ac078557d1439df678a34c6fe02fd979f973af167577", size = 12276038, upload-time = "2025-09-04T16:49:29.056Z" },
{ url = "https://files.pythonhosted.org/packages/9f/d5/707b92a61310edf358a389477eabd8af68f375c0ef858194be97ca5b6069/ruff-0.12.12-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42a67d16e5b1ffc6d21c5f67851e0e769517fb57a8ebad1d0781b30888aa704e", size = 11901110, upload-time = "2025-09-04T16:49:32.07Z" },
{ url = "https://files.pythonhosted.org/packages/9d/3d/f8b1038f4b9822e26ec3d5b49cf2bc313e3c1564cceb4c1a42820bf74853/ruff-0.12.12-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b216ec0a0674e4b1214dcc998a5088e54eaf39417327b19ffefba1c4a1e4971e", size = 13668352, upload-time = "2025-09-04T16:49:35.148Z" },
{ url = "https://files.pythonhosted.org/packages/98/0e/91421368ae6c4f3765dd41a150f760c5f725516028a6be30e58255e3c668/ruff-0.12.12-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:59f909c0fdd8f1dcdbfed0b9569b8bf428cf144bec87d9de298dcd4723f5bee8", size = 14638365, upload-time = "2025-09-04T16:49:38.892Z" },
{ url = "https://files.pythonhosted.org/packages/74/5d/88f3f06a142f58ecc8ecb0c2fe0b82343e2a2b04dcd098809f717cf74b6c/ruff-0.12.12-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ac93d87047e765336f0c18eacad51dad0c1c33c9df7484c40f98e1d773876f5", size = 14060812, upload-time = "2025-09-04T16:49:42.732Z" },
{ url = "https://files.pythonhosted.org/packages/13/fc/8962e7ddd2e81863d5c92400820f650b86f97ff919c59836fbc4c1a6d84c/ruff-0.12.12-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:01543c137fd3650d322922e8b14cc133b8ea734617c4891c5a9fccf4bfc9aa92", size = 13050208, upload-time = "2025-09-04T16:49:46.434Z" },
{ url = "https://files.pythonhosted.org/packages/53/06/8deb52d48a9a624fd37390555d9589e719eac568c020b27e96eed671f25f/ruff-0.12.12-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afc2fa864197634e549d87fb1e7b6feb01df0a80fd510d6489e1ce8c0b1cc45", size = 13311444, upload-time = "2025-09-04T16:49:49.931Z" },
{ url = "https://files.pythonhosted.org/packages/2a/81/de5a29af7eb8f341f8140867ffb93f82e4fde7256dadee79016ac87c2716/ruff-0.12.12-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:0c0945246f5ad776cb8925e36af2438e66188d2b57d9cf2eed2c382c58b371e5", size = 13279474, upload-time = "2025-09-04T16:49:53.465Z" },
{ url = "https://files.pythonhosted.org/packages/7f/14/d9577fdeaf791737ada1b4f5c6b59c21c3326f3f683229096cccd7674e0c/ruff-0.12.12-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a0fbafe8c58e37aae28b84a80ba1817f2ea552e9450156018a478bf1fa80f4e4", size = 12070204, upload-time = "2025-09-04T16:49:56.882Z" },
{ url = "https://files.pythonhosted.org/packages/77/04/a910078284b47fad54506dc0af13839c418ff704e341c176f64e1127e461/ruff-0.12.12-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b9c456fb2fc8e1282affa932c9e40f5ec31ec9cbb66751a316bd131273b57c23", size = 11880347, upload-time = "2025-09-04T16:49:59.729Z" },
{ url = "https://files.pythonhosted.org/packages/df/58/30185fcb0e89f05e7ea82e5817b47798f7fa7179863f9d9ba6fd4fe1b098/ruff-0.12.12-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5f12856123b0ad0147d90b3961f5c90e7427f9acd4b40050705499c98983f489", size = 12891844, upload-time = "2025-09-04T16:50:02.591Z" },
{ url = "https://files.pythonhosted.org/packages/21/9c/28a8dacce4855e6703dcb8cdf6c1705d0b23dd01d60150786cd55aa93b16/ruff-0.12.12-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:26a1b5a2bf7dd2c47e3b46d077cd9c0fc3b93e6c6cc9ed750bd312ae9dc302ee", size = 13360687, upload-time = "2025-09-04T16:50:05.8Z" },
]
[[package]]