Compare commits

...

38 Commits

Author SHA1 Message Date
shamoon
ece061d41d Ah, thats why this is here 2026-01-30 18:51:02 -08:00
shamoon
b9b22778fc Apply suggestions 2026-01-30 17:43:43 -08:00
shamoon
04abdade1a Remove perf script 2026-01-30 13:09:50 -08:00
shamoon
6a16d5c63e Ignore coverage for perf script 2026-01-30 12:23:56 -08:00
shamoon
e0ff7244ab Update script to always report counts 2026-01-30 08:50:02 -08:00
shamoon
e08af2f726 Simplify the superuser pathway 2026-01-30 08:34:12 -08:00
shamoon
cb2f15689f Minor refactor 2026-01-30 08:19:22 -08:00
shamoon
0a50d3bded Add correspondents to script 2026-01-30 08:11:44 -08:00
shamoon
e9a21088db More cleanup 2026-01-30 01:16:09 -08:00
shamoon
06bed0f634 Fix cf serializer 2026-01-30 01:16:09 -08:00
shamoon
bcfd80ed6f Maybe more DRY 2026-01-30 01:16:08 -08:00
shamoon
14440b9bc8 Optimize tag/custom-field counts with subqueries 2026-01-30 01:16:08 -08:00
shamoon
9962f3d0a3 Fix
[ci skip]
2026-01-30 01:01:26 -08:00
shamoon
2b0d80dc9a Performance script, available as management command
[ci skip]
2026-01-30 01:01:13 -08:00
shamoon
e4b861d76f Fix: prevent note deletion outside doc 2026-01-29 13:35:01 -08:00
shamoon
6913f9d79c Fix: fix user checks in management scripts (#11928) 2026-01-28 13:45:12 -08:00
shamoon
891f4a2faf Fix: correctly extract all ids for nested tags (#11888) 2026-01-26 09:12:03 -08:00
shamoon
2312314aa7 Performance: improve treenode inefficiencies (#11606) 2026-01-25 21:47:08 -08:00
shamoon
72e8b73108 Fix test 2026-01-25 17:08:15 -08:00
shamoon
5c9ff367e3 Fixhancement: change date calculation for 'this year' to include future documents (#11884) 2026-01-25 16:56:51 -08:00
Trenton H
94f6b8d36d Fixes the management scripts under a non-root install where the user ID is something besides 1000 (#11870) 2026-01-23 16:08:28 -08:00
shamoon
32d04e1fd3 Fix: use correct field id for overrides (#11869) 2026-01-23 15:49:22 -08:00
Trenton H
56c744fd56 Fixes the spelling of the commitish argument to the action 2026-01-23 15:49:00 -08:00
shamoon
d1aa76e4ce Narrow scope of these css rules 2026-01-20 12:30:06 -08:00
shamoon
5381bc5907 Fix: fix tag list horizontal scroll, again (#11839) 2026-01-20 12:30:06 -08:00
shamoon
771f3f150a Bump version to 2.20.5 2026-01-19 09:18:23 -08:00
shamoon
ecfeff5054 Chore: reverse migration order (#11813) 2026-01-18 11:21:35 -08:00
shamoon
37477d391e Fix: ensure horizontal scroll for long tag names in list, wrap tags without parent (#11811) 2026-01-18 08:22:01 -08:00
Trenton H
2f1cd31e31 Adds the release-drafter commitish filtering to perhaps generate the release notes better 2026-01-16 07:42:54 -08:00
shamoon
742c136773 Fix: use explicit order field for workflow actions (#11781) 2026-01-16 07:39:00 -08:00
shamoon
3618c50b62 Bump version to 2.20.4 2026-01-13 10:01:42 -08:00
shamoon
6f4497185e Fix merge conflict 2026-01-13 10:01:41 -08:00
shamoon
e816269db5 Fix: recurring workflow to respect latest run time (#11735) 2026-01-13 09:36:53 -08:00
shamoon
d4e60e13bf Fixhancement: add error handling and retry when opening index (#11731) 2026-01-13 09:36:44 -08:00
shamoon
cb091665e2 Fix: validate cf integer values within PostgreSQL range (#11666) 2026-01-13 09:36:29 -08:00
shamoon
00bb92e3e1 Fix: support ordering by storage path name (#11661) 2026-01-13 09:36:14 -08:00
shamoon
11ec676909 Fix: propagate metadata override created value (#11659) 2026-01-13 09:36:07 -08:00
shamoon
7c457466b7 Security: prevent path traversal in storage paths 2026-01-13 09:29:48 -08:00
46 changed files with 677 additions and 125 deletions

View File

@@ -44,6 +44,7 @@ include-labels:
- 'notable' - 'notable'
exclude-labels: exclude-labels:
- 'skip-changelog' - 'skip-changelog'
filter-by-commitish: true
category-template: '### $TITLE' category-template: '### $TITLE'
change-template: '- $TITLE @$AUTHOR ([#$NUMBER]($URL))' change-template: '- $TITLE @$AUTHOR ([#$NUMBER]($URL))'
change-title-escapes: '\<*_&#@' change-title-escapes: '\<*_&#@'

View File

@@ -617,6 +617,7 @@ jobs:
version: ${{ steps.get_version.outputs.version }} version: ${{ steps.get_version.outputs.version }}
prerelease: ${{ steps.get_version.outputs.prerelease }} prerelease: ${{ steps.get_version.outputs.prerelease }}
publish: true # ensures release is not marked as draft publish: true # ensures release is not marked as draft
commitish: ${{ github.sha }}
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Upload release archive - name: Upload release archive

View File

@@ -5,10 +5,13 @@ set -e
cd "${PAPERLESS_SRC_DIR}" cd "${PAPERLESS_SRC_DIR}"
if [[ $(id -u) == 0 ]]; then if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py management_command "$@"
elif [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py management_command "$@" s6-setuidgid paperless python3 manage.py management_command "$@"
elif [[ $(id -un) == "paperless" ]]; then elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py management_command "$@" python3 manage.py management_command "$@"
else else
echo "Unknown user." echo "Unknown user."
exit 1
fi fi

View File

@@ -5,10 +5,13 @@ set -e
cd "${PAPERLESS_SRC_DIR}" cd "${PAPERLESS_SRC_DIR}"
if [[ $(id -u) == 0 ]]; then if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py convert_mariadb_uuid "$@"
elif [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py convert_mariadb_uuid "$@" s6-setuidgid paperless python3 manage.py convert_mariadb_uuid "$@"
elif [[ $(id -un) == "paperless" ]]; then elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py convert_mariadb_uuid "$@" python3 manage.py convert_mariadb_uuid "$@"
else else
echo "Unknown user." echo "Unknown user."
exit 1
fi fi

View File

@@ -5,10 +5,13 @@ set -e
cd "${PAPERLESS_SRC_DIR}" cd "${PAPERLESS_SRC_DIR}"
if [[ $(id -u) == 0 ]]; then if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py createsuperuser "$@"
elif [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py createsuperuser "$@" s6-setuidgid paperless python3 manage.py createsuperuser "$@"
elif [[ $(id -un) == "paperless" ]]; then elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py createsuperuser "$@" python3 manage.py createsuperuser "$@"
else else
echo "Unknown user." echo "Unknown user."
exit 1
fi fi

View File

@@ -5,10 +5,13 @@ set -e
cd "${PAPERLESS_SRC_DIR}" cd "${PAPERLESS_SRC_DIR}"
if [[ $(id -u) == 0 ]]; then if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py decrypt_documents "$@"
elif [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py decrypt_documents "$@" s6-setuidgid paperless python3 manage.py decrypt_documents "$@"
elif [[ $(id -un) == "paperless" ]]; then elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py decrypt_documents "$@" python3 manage.py decrypt_documents "$@"
else else
echo "Unknown user." echo "Unknown user."
exit 1
fi fi

View File

@@ -5,10 +5,13 @@ set -e
cd "${PAPERLESS_SRC_DIR}" cd "${PAPERLESS_SRC_DIR}"
if [[ $(id -u) == 0 ]]; then if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py document_archiver "$@"
elif [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py document_archiver "$@" s6-setuidgid paperless python3 manage.py document_archiver "$@"
elif [[ $(id -un) == "paperless" ]]; then elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py document_archiver "$@" python3 manage.py document_archiver "$@"
else else
echo "Unknown user." echo "Unknown user."
exit 1
fi fi

View File

@@ -5,10 +5,17 @@ set -e
cd "${PAPERLESS_SRC_DIR}" cd "${PAPERLESS_SRC_DIR}"
if [[ $(id -u) == 0 ]]; then if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py document_create_classifier "$@"
elif [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py document_create_classifier "$@" s6-setuidgid paperless python3 manage.py document_create_classifier "$@"
elif [[ $(id -un) == "paperless" ]]; then elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py document_create_classifier "$@" python3 manage.py document_create_classifier "$@"
else else
echo "Unknown user." echo "Unknown user."
exit 1
fi
er "$@"
elif [[ $(id -un) == "paperless" ]]; then
s6-setuidgid paperless python3 manage.py document_create_classifier "$@"
fi fi

View File

@@ -5,10 +5,13 @@ set -e
cd "${PAPERLESS_SRC_DIR}" cd "${PAPERLESS_SRC_DIR}"
if [[ $(id -u) == 0 ]]; then if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py document_exporter "$@"
elif [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py document_exporter "$@" s6-setuidgid paperless python3 manage.py document_exporter "$@"
elif [[ $(id -un) == "paperless" ]]; then elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py document_exporter "$@" python3 manage.py document_exporter "$@"
else else
echo "Unknown user." echo "Unknown user."
exit 1
fi fi

View File

@@ -5,10 +5,13 @@ set -e
cd "${PAPERLESS_SRC_DIR}" cd "${PAPERLESS_SRC_DIR}"
if [[ $(id -u) == 0 ]]; then if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py document_fuzzy_match "$@"
elif [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py document_fuzzy_match "$@" s6-setuidgid paperless python3 manage.py document_fuzzy_match "$@"
elif [[ $(id -un) == "paperless" ]]; then elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py document_fuzzy_match "$@" python3 manage.py document_fuzzy_match "$@"
else else
echo "Unknown user." echo "Unknown user."
exit 1
fi fi

View File

@@ -5,10 +5,13 @@ set -e
cd "${PAPERLESS_SRC_DIR}" cd "${PAPERLESS_SRC_DIR}"
if [[ $(id -u) == 0 ]]; then if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py document_importer "$@"
elif [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py document_importer "$@" s6-setuidgid paperless python3 manage.py document_importer "$@"
elif [[ $(id -un) == "paperless" ]]; then elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py document_importer "$@" python3 manage.py document_importer "$@"
else else
echo "Unknown user." echo "Unknown user."
exit 1
fi fi

View File

@@ -5,10 +5,13 @@ set -e
cd "${PAPERLESS_SRC_DIR}" cd "${PAPERLESS_SRC_DIR}"
if [[ $(id -u) == 0 ]]; then if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py document_index "$@"
elif [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py document_index "$@" s6-setuidgid paperless python3 manage.py document_index "$@"
elif [[ $(id -un) == "paperless" ]]; then elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py document_index "$@" python3 manage.py document_index "$@"
else else
echo "Unknown user." echo "Unknown user."
exit 1
fi fi

View File

@@ -5,10 +5,13 @@ set -e
cd "${PAPERLESS_SRC_DIR}" cd "${PAPERLESS_SRC_DIR}"
if [[ $(id -u) == 0 ]]; then if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py document_renamer "$@"
elif [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py document_renamer "$@" s6-setuidgid paperless python3 manage.py document_renamer "$@"
elif [[ $(id -un) == "paperless" ]]; then elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py document_renamer "$@" python3 manage.py document_renamer "$@"
else else
echo "Unknown user." echo "Unknown user."
exit 1
fi fi

View File

@@ -5,10 +5,13 @@ set -e
cd "${PAPERLESS_SRC_DIR}" cd "${PAPERLESS_SRC_DIR}"
if [[ $(id -u) == 0 ]]; then if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py document_retagger "$@"
elif [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py document_retagger "$@" s6-setuidgid paperless python3 manage.py document_retagger "$@"
elif [[ $(id -un) == "paperless" ]]; then elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py document_retagger "$@" python3 manage.py document_retagger "$@"
else else
echo "Unknown user." echo "Unknown user."
exit 1
fi fi

View File

@@ -5,10 +5,13 @@ set -e
cd "${PAPERLESS_SRC_DIR}" cd "${PAPERLESS_SRC_DIR}"
if [[ $(id -u) == 0 ]]; then if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py document_sanity_checker "$@"
elif [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py document_sanity_checker "$@" s6-setuidgid paperless python3 manage.py document_sanity_checker "$@"
elif [[ $(id -un) == "paperless" ]]; then elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py document_sanity_checker "$@" python3 manage.py document_sanity_checker "$@"
else else
echo "Unknown user." echo "Unknown user."
exit 1
fi fi

View File

@@ -5,10 +5,13 @@ set -e
cd "${PAPERLESS_SRC_DIR}" cd "${PAPERLESS_SRC_DIR}"
if [[ $(id -u) == 0 ]]; then if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py document_thumbnails "$@"
elif [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py document_thumbnails "$@" s6-setuidgid paperless python3 manage.py document_thumbnails "$@"
elif [[ $(id -un) == "paperless" ]]; then elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py document_thumbnails "$@" python3 manage.py document_thumbnails "$@"
else else
echo "Unknown user." echo "Unknown user."
exit 1
fi fi

View File

@@ -5,10 +5,13 @@ set -e
cd "${PAPERLESS_SRC_DIR}" cd "${PAPERLESS_SRC_DIR}"
if [[ $(id -u) == 0 ]]; then if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py mail_fetcher "$@"
elif [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py mail_fetcher "$@" s6-setuidgid paperless python3 manage.py mail_fetcher "$@"
elif [[ $(id -un) == "paperless" ]]; then elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py mail_fetcher "$@" python3 manage.py mail_fetcher "$@"
else else
echo "Unknown user." echo "Unknown user."
exit 1
fi fi

View File

@@ -5,10 +5,13 @@ set -e
cd "${PAPERLESS_SRC_DIR}" cd "${PAPERLESS_SRC_DIR}"
if [[ $(id -u) == 0 ]]; then if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py manage_superuser "$@"
elif [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py manage_superuser "$@" s6-setuidgid paperless python3 manage.py manage_superuser "$@"
elif [[ $(id -un) == "paperless" ]]; then elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py manage_superuser "$@" python3 manage.py manage_superuser "$@"
else else
echo "Unknown user." echo "Unknown user."
exit 1
fi fi

View File

@@ -5,10 +5,13 @@ set -e
cd "${PAPERLESS_SRC_DIR}" cd "${PAPERLESS_SRC_DIR}"
if [[ $(id -u) == 0 ]]; then if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py prune_audit_logs "$@"
elif [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py prune_audit_logs "$@" s6-setuidgid paperless python3 manage.py prune_audit_logs "$@"
elif [[ $(id -un) == "paperless" ]]; then elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py prune_audit_logs "$@" python3 manage.py prune_audit_logs "$@"
else else
echo "Unknown user." echo "Unknown user."
exit 1
fi fi

View File

@@ -1,6 +1,6 @@
[project] [project]
name = "paperless-ngx" name = "paperless-ngx"
version = "2.20.3" version = "2.20.5"
description = "A community-supported supercharged document management system: scan, index and archive all your physical documents" description = "A community-supported supercharged document management system: scan, index and archive all your physical documents"
readme = "README.md" readme = "README.md"
requires-python = ">=3.10" requires-python = ">=3.10"
@@ -238,7 +238,7 @@ lint.isort.force-single-line = true
[tool.codespell] [tool.codespell]
write-changes = true write-changes = true
ignore-words-list = "criterias,afterall,valeu,ureue,equest,ure,assertIn,Oktober" ignore-words-list = "criterias,afterall,valeu,ureue,equest,ure,assertIn,Oktober,commitish"
skip = "src-ui/src/locale/*,src-ui/pnpm-lock.yaml,src-ui/e2e/*,src/paperless_mail/tests/samples/*,src/documents/tests/samples/*,*.po,*.json" skip = "src-ui/src/locale/*,src-ui/pnpm-lock.yaml,src-ui/e2e/*,src/paperless_mail/tests/samples/*,src/documents/tests/samples/*,*.po,*.json"
[tool.pytest.ini_options] [tool.pytest.ini_options]

View File

@@ -1,6 +1,6 @@
{ {
"name": "paperless-ngx-ui", "name": "paperless-ngx-ui",
"version": "2.20.3", "version": "2.20.5",
"scripts": { "scripts": {
"preinstall": "npx only-allow pnpm", "preinstall": "npx only-allow pnpm",
"ng": "ng", "ng": "ng",

View File

@@ -252,7 +252,7 @@ describe('WorkflowEditDialogComponent', () => {
expect(component.object.actions.length).toEqual(2) expect(component.object.actions.length).toEqual(2)
}) })
it('should update order and remove ids from actions on drag n drop', () => { it('should update order on drag n drop', () => {
const action1 = workflow.actions[0] const action1 = workflow.actions[0]
const action2 = workflow.actions[1] const action2 = workflow.actions[1]
component.object = workflow component.object = workflow
@@ -261,8 +261,6 @@ describe('WorkflowEditDialogComponent', () => {
WorkflowAction[] WorkflowAction[]
>) >)
expect(component.object.actions).toEqual([action2, action1]) expect(component.object.actions).toEqual([action2, action1])
expect(action1.id).toBeNull()
expect(action2.id).toBeNull()
}) })
it('should not include auto matching in algorithms', () => { it('should not include auto matching in algorithms', () => {

View File

@@ -1283,11 +1283,6 @@ export class WorkflowEditDialogComponent
const actionField = this.actionFields.at(event.previousIndex) const actionField = this.actionFields.at(event.previousIndex)
this.actionFields.removeAt(event.previousIndex) this.actionFields.removeAt(event.previousIndex)
this.actionFields.insert(event.currentIndex, actionField) this.actionFields.insert(event.currentIndex, actionField)
// removing id will effectively re-create the actions in this order
this.object.actions.forEach((a) => (a.id = null))
this.actionFields.controls.forEach((c) =>
c.get('id').setValue(null, { emitEvent: false })
)
} }
save(): void { save(): void {

View File

@@ -28,7 +28,7 @@
</button> </button>
</ng-template> </ng-template>
<ng-template ng-option-tmp let-item="item" let-index="index" let-search="searchTerm"> <ng-template ng-option-tmp let-item="item" let-index="index" let-search="searchTerm">
<div class="tag-option-row d-flex align-items-center"> <div class="tag-option-row d-flex align-items-center" [class.w-auto]="!getTag(item.id)?.parent">
@if (item.id && tags) { @if (item.id && tags) {
@if (getTag(item.id)?.parent) { @if (getTag(item.id)?.parent) {
<i-bs name="list-nested" class="me-1"></i-bs> <i-bs name="list-nested" class="me-1"></i-bs>

View File

@@ -22,8 +22,8 @@
} }
// Dropdown hierarchy reveal for ng-select options // Dropdown hierarchy reveal for ng-select options
::ng-deep .ng-dropdown-panel .ng-option { :host ::ng-deep .ng-dropdown-panel .ng-option {
overflow-x: scroll; overflow-x: auto !important;
.tag-option-row { .tag-option-row {
font-size: 1rem; font-size: 1rem;
@@ -41,12 +41,12 @@
} }
} }
::ng-deep .ng-dropdown-panel .ng-option:hover .hierarchy-reveal, :host ::ng-deep .ng-dropdown-panel .ng-option:hover .hierarchy-reveal,
::ng-deep .ng-dropdown-panel .ng-option.ng-option-marked .hierarchy-reveal { :host ::ng-deep .ng-dropdown-panel .ng-option.ng-option-marked .hierarchy-reveal {
max-width: 1000px; max-width: 1000px;
} }
::ng-deep .ng-dropdown-panel .ng-option:hover .hierarchy-indicator, ::ng-deep .ng-dropdown-panel .ng-option:hover .hierarchy-indicator,
::ng-deep .ng-dropdown-panel .ng-option.ng-option-marked .hierarchy-indicator { :host ::ng-deep .ng-dropdown-panel .ng-option.ng-option-marked .hierarchy-indicator {
background: transparent; background: transparent;
} }

View File

@@ -229,6 +229,21 @@ describe('ManagementListComponent', () => {
expect(reloadSpy).toHaveBeenCalled() expect(reloadSpy).toHaveBeenCalled()
}) })
it('should use the all list length for collection size when provided', fakeAsync(() => {
jest.spyOn(tagService, 'listFiltered').mockReturnValueOnce(
of({
count: 1,
all: [1, 2, 3],
results: tags.slice(0, 1),
})
)
component.reloadData()
tick(100)
expect(component.collectionSize).toBe(3)
}))
it('should support quick filter for objects', () => { it('should support quick filter for objects', () => {
const qfSpy = jest.spyOn(documentListViewService, 'quickFilter') const qfSpy = jest.spyOn(documentListViewService, 'quickFilter')
const filterButton = fixture.debugElement.queryAll(By.css('button'))[9] const filterButton = fixture.debugElement.queryAll(By.css('button'))[9]

View File

@@ -171,7 +171,7 @@ export abstract class ManagementListComponent<T extends MatchingModel>
tap((c) => { tap((c) => {
this.unfilteredData = c.results this.unfilteredData = c.results
this.data = this.filterData(c.results) this.data = this.filterData(c.results)
this.collectionSize = c.count this.collectionSize = c.all?.length ?? c.count
}), }),
delay(100) delay(100)
) )

View File

@@ -6,7 +6,7 @@ export const environment = {
apiVersion: '9', // match src/paperless/settings.py apiVersion: '9', // match src/paperless/settings.py
appTitle: 'Paperless-ngx', appTitle: 'Paperless-ngx',
tag: 'prod', tag: 'prod',
version: '2.20.3', version: '2.20.5',
webSocketHost: window.location.host, webSocketHost: window.location.host,
webSocketProtocol: window.location.protocol == 'https:' ? 'wss:' : 'ws:', webSocketProtocol: window.location.protocol == 'https:' ? 'wss:' : 'ws:',
webSocketBaseUrl: base_url.pathname + 'ws/', webSocketBaseUrl: base_url.pathname + 'ws/',

View File

@@ -22,7 +22,7 @@ class DocumentMetadataOverrides:
document_type_id: int | None = None document_type_id: int | None = None
tag_ids: list[int] | None = None tag_ids: list[int] | None = None
storage_path_id: int | None = None storage_path_id: int | None = None
created: datetime.datetime | None = None created: datetime.date | None = None
asn: int | None = None asn: int | None = None
owner_id: int | None = None owner_id: int | None = None
view_users: list[int] | None = None view_users: list[int] | None = None
@@ -100,6 +100,7 @@ class DocumentMetadataOverrides:
overrides.storage_path_id = doc.storage_path.id if doc.storage_path else None overrides.storage_path_id = doc.storage_path.id if doc.storage_path else None
overrides.owner_id = doc.owner.id if doc.owner else None overrides.owner_id = doc.owner.id if doc.owner else None
overrides.tag_ids = list(doc.tags.values_list("id", flat=True)) overrides.tag_ids = list(doc.tags.values_list("id", flat=True))
overrides.created = doc.created
overrides.view_users = list( overrides.view_users = list(
get_users_with_perms( get_users_with_perms(
@@ -114,7 +115,7 @@ class DocumentMetadataOverrides:
).values_list("id", flat=True), ).values_list("id", flat=True),
) )
overrides.custom_fields = { overrides.custom_fields = {
custom_field.id: custom_field.value custom_field.field.id: custom_field.value
for custom_field in doc.custom_fields.all() for custom_field in doc.custom_fields.all()
} }

View File

@@ -10,6 +10,7 @@ from datetime import time
from datetime import timedelta from datetime import timedelta
from datetime import timezone from datetime import timezone
from shutil import rmtree from shutil import rmtree
from time import sleep
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from typing import Literal from typing import Literal
@@ -32,6 +33,7 @@ from whoosh.highlight import HtmlFormatter
from whoosh.idsets import BitSet from whoosh.idsets import BitSet
from whoosh.idsets import DocIdSet from whoosh.idsets import DocIdSet
from whoosh.index import FileIndex from whoosh.index import FileIndex
from whoosh.index import LockError
from whoosh.index import create_in from whoosh.index import create_in
from whoosh.index import exists_in from whoosh.index import exists_in
from whoosh.index import open_dir from whoosh.index import open_dir
@@ -97,11 +99,33 @@ def get_schema() -> Schema:
def open_index(*, recreate=False) -> FileIndex: def open_index(*, recreate=False) -> FileIndex:
transient_exceptions = (FileNotFoundError, LockError)
max_retries = 3
retry_delay = 0.1
for attempt in range(max_retries + 1):
try: try:
if exists_in(settings.INDEX_DIR) and not recreate: if exists_in(settings.INDEX_DIR) and not recreate:
return open_dir(settings.INDEX_DIR, schema=get_schema()) return open_dir(settings.INDEX_DIR, schema=get_schema())
break
except transient_exceptions as exc:
is_last_attempt = attempt == max_retries or recreate
if is_last_attempt:
logger.exception(
"Error while opening the index after retries, recreating.",
)
break
logger.warning(
"Transient error while opening the index (attempt %s/%s): %s. Retrying.",
attempt + 1,
max_retries + 1,
exc,
)
sleep(retry_delay)
except Exception: except Exception:
logger.exception("Error while opening the index, recreating.") logger.exception("Error while opening the index, recreating.")
break
# create_in doesn't handle corrupted indexes very well, remove the directory entirely first # create_in doesn't handle corrupted indexes very well, remove the directory entirely first
if settings.INDEX_DIR.is_dir(): if settings.INDEX_DIR.is_dir():
@@ -578,7 +602,7 @@ def rewrite_natural_date_keywords(query_string: str) -> str:
case "this year": case "this year":
start = datetime(local_now.year, 1, 1, 0, 0, 0, tzinfo=tz) start = datetime(local_now.year, 1, 1, 0, 0, 0, tzinfo=tz)
end = datetime.combine(today, time.max, tzinfo=tz) end = datetime(local_now.year, 12, 31, 23, 59, 59, tzinfo=tz)
case "previous week": case "previous week":
days_since_monday = local_now.weekday() days_since_monday = local_now.weekday()

View File

@@ -0,0 +1,28 @@
# Generated by Django 5.2.7 on 2026-01-14 16:53
from django.db import migrations
from django.db import models
from django.db.models import F
def populate_action_order(apps, schema_editor):
WorkflowAction = apps.get_model("documents", "WorkflowAction")
WorkflowAction.objects.all().update(order=F("id"))
class Migration(migrations.Migration):
dependencies = [
("documents", "1074_workflowrun_deleted_at_workflowrun_restored_at_and_more"),
]
operations = [
migrations.AddField(
model_name="workflowaction",
name="order",
field=models.PositiveIntegerField(default=0, verbose_name="order"),
),
migrations.RunPython(
populate_action_order,
reverse_code=migrations.RunPython.noop,
),
]

View File

@@ -1294,6 +1294,8 @@ class WorkflowAction(models.Model):
default=WorkflowActionType.ASSIGNMENT, default=WorkflowActionType.ASSIGNMENT,
) )
order = models.PositiveIntegerField(_("order"), default=0)
assign_title = models.TextField( assign_title = models.TextField(
_("assign title"), _("assign title"),
null=True, null=True,

View File

@@ -2,10 +2,17 @@ from django.contrib.auth.models import Group
from django.contrib.auth.models import Permission from django.contrib.auth.models import Permission
from django.contrib.auth.models import User from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType from django.contrib.contenttypes.models import ContentType
from django.db.models import Count
from django.db.models import IntegerField
from django.db.models import OuterRef
from django.db.models import Q from django.db.models import Q
from django.db.models import QuerySet from django.db.models import QuerySet
from django.db.models import Subquery
from django.db.models.functions import Cast
from django.db.models.functions import Coalesce
from guardian.core import ObjectPermissionChecker from guardian.core import ObjectPermissionChecker
from guardian.models import GroupObjectPermission from guardian.models import GroupObjectPermission
from guardian.models import UserObjectPermission
from guardian.shortcuts import assign_perm from guardian.shortcuts import assign_perm
from guardian.shortcuts import get_objects_for_user from guardian.shortcuts import get_objects_for_user
from guardian.shortcuts import get_users_with_perms from guardian.shortcuts import get_users_with_perms
@@ -129,23 +136,93 @@ def set_permissions_for_object(permissions: dict, object, *, merge: bool = False
) )
def _permitted_document_ids(user):
"""
Return a queryset of document IDs the user may view, limited to non-deleted
documents. This intentionally avoids ``get_objects_for_user`` to keep the
subquery small and index-friendly.
"""
base_docs = Document.objects.filter(deleted_at__isnull=True).only("id", "owner")
if user is None or not getattr(user, "is_authenticated", False):
# Just Anonymous user e.g. for drf-spectacular
return base_docs.filter(owner__isnull=True).values_list("id", flat=True)
if getattr(user, "is_superuser", False):
return base_docs.values_list("id", flat=True)
document_ct = ContentType.objects.get_for_model(Document)
perm_filter = {
"permission__codename": "view_document",
"permission__content_type": document_ct,
}
user_perm_docs = (
UserObjectPermission.objects.filter(user=user, **perm_filter)
.annotate(object_pk_int=Cast("object_pk", IntegerField()))
.values_list("object_pk_int", flat=True)
)
group_perm_docs = (
GroupObjectPermission.objects.filter(group__user=user, **perm_filter)
.annotate(object_pk_int=Cast("object_pk", IntegerField()))
.values_list("object_pk_int", flat=True)
)
permitted_documents = user_perm_docs.union(group_perm_docs)
return base_docs.filter(
Q(owner=user) | Q(owner__isnull=True) | Q(id__in=permitted_documents),
).values_list("id", flat=True)
def get_document_count_filter_for_user(user): def get_document_count_filter_for_user(user):
""" """
Return the Q object used to filter document counts for the given user. Return the Q object used to filter document counts for the given user.
The filter is expressed as an ``id__in`` against a small subquery of permitted
document IDs to keep the generated SQL simple and avoid large OR clauses.
""" """
if user is None or not getattr(user, "is_authenticated", False):
return Q(documents__deleted_at__isnull=True, documents__owner__isnull=True)
if getattr(user, "is_superuser", False): if getattr(user, "is_superuser", False):
# Superuser: no permission filtering needed
return Q(documents__deleted_at__isnull=True) return Q(documents__deleted_at__isnull=True)
return Q(
documents__deleted_at__isnull=True, permitted_ids = _permitted_document_ids(user)
documents__id__in=get_objects_for_user_owner_aware( return Q(documents__id__in=permitted_ids)
user,
"documents.view_document",
Document, def annotate_document_count_for_related_queryset(
).values_list("id", flat=True), queryset,
through_model,
source_field: str,
target_field: str = "document_id",
user=None,
):
"""
Annotate a queryset with permissions-aware document counts using a subquery
against a relation table.
Args:
queryset: base queryset to annotate (must contain pk)
through_model: model representing the relation (e.g., Document.tags.through
or CustomFieldInstance)
source_field: field on the relation pointing back to queryset pk
target_field: field on the relation pointing to Document id
user: the user for whom to filter permitted document ids
"""
permitted_ids = _permitted_document_ids(user)
counts = (
through_model.objects.filter(
**{source_field: OuterRef("pk"), f"{target_field}__in": permitted_ids},
) )
.values(source_field)
.annotate(c=Count(target_field))
.values("c")
)
return queryset.annotate(document_count=Coalesce(Subquery(counts[:1]), 0))
def get_objects_for_user_owner_aware(user, perms, Model) -> QuerySet: def get_objects_for_user_owner_aware(user, perms, Model) -> QuerySet:

View File

@@ -18,6 +18,8 @@ from django.core.exceptions import ValidationError
from django.core.validators import DecimalValidator from django.core.validators import DecimalValidator
from django.core.validators import EmailValidator from django.core.validators import EmailValidator
from django.core.validators import MaxLengthValidator from django.core.validators import MaxLengthValidator
from django.core.validators import MaxValueValidator
from django.core.validators import MinValueValidator
from django.core.validators import RegexValidator from django.core.validators import RegexValidator
from django.core.validators import integer_validator from django.core.validators import integer_validator
from django.db.models import Count from django.db.models import Count
@@ -578,6 +580,10 @@ class TagSerializer(MatchingModelSerializer, OwnedObjectSerializer):
), ),
) )
def get_children(self, obj): def get_children(self, obj):
children_map = self.context.get("children_map")
if children_map is not None:
children = children_map.get(obj.pk, [])
else:
filter_q = self.context.get("document_count_filter") filter_q = self.context.get("document_count_filter")
request = self.context.get("request") request = self.context.get("request")
if filter_q is None: if filter_q is None:
@@ -585,7 +591,7 @@ class TagSerializer(MatchingModelSerializer, OwnedObjectSerializer):
filter_q = get_document_count_filter_for_user(user) filter_q = get_document_count_filter_for_user(user)
self.context["document_count_filter"] = filter_q self.context["document_count_filter"] = filter_q
children_queryset = ( children = (
obj.get_children_queryset() obj.get_children_queryset()
.select_related("owner") .select_related("owner")
.annotate(document_count=Count("documents", filter=filter_q)) .annotate(document_count=Count("documents", filter=filter_q))
@@ -593,15 +599,15 @@ class TagSerializer(MatchingModelSerializer, OwnedObjectSerializer):
view = self.context.get("view") view = self.context.get("view")
ordering = ( ordering = (
OrderingFilter().get_ordering(request, children_queryset, view) OrderingFilter().get_ordering(request, children, view)
if request and view if request and view
else None else None
) )
ordering = ordering or (Lower("name"),) ordering = ordering or (Lower("name"),)
children_queryset = children_queryset.order_by(*ordering) children = children.order_by(*ordering)
serializer = TagSerializer( serializer = TagSerializer(
children_queryset, children,
many=True, many=True,
user=self.user, user=self.user,
full_perms=self.full_perms, full_perms=self.full_perms,
@@ -693,6 +699,9 @@ class StoragePathField(serializers.PrimaryKeyRelatedField):
class CustomFieldSerializer(serializers.ModelSerializer): class CustomFieldSerializer(serializers.ModelSerializer):
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
# Ignore args passed by permissions mixin
kwargs.pop("user", None)
kwargs.pop("full_perms", None)
context = kwargs.get("context") context = kwargs.get("context")
self.api_version = int( self.api_version = int(
context.get("request").version context.get("request").version
@@ -875,6 +884,13 @@ class CustomFieldInstanceSerializer(serializers.ModelSerializer):
uri_validator(data["value"]) uri_validator(data["value"])
elif field.data_type == CustomField.FieldDataType.INT: elif field.data_type == CustomField.FieldDataType.INT:
integer_validator(data["value"]) integer_validator(data["value"])
try:
value_int = int(data["value"])
except (TypeError, ValueError):
raise serializers.ValidationError("Enter a valid integer.")
# Keep values within the PostgreSQL integer range
MinValueValidator(-2147483648)(value_int)
MaxValueValidator(2147483647)(value_int)
elif ( elif (
field.data_type == CustomField.FieldDataType.MONETARY field.data_type == CustomField.FieldDataType.MONETARY
and data["value"] != "" and data["value"] != ""
@@ -2553,7 +2569,8 @@ class WorkflowSerializer(serializers.ModelSerializer):
set_triggers.append(trigger_instance) set_triggers.append(trigger_instance)
if actions is not None and actions is not serializers.empty: if actions is not None and actions is not serializers.empty:
for action in actions: for index, action in enumerate(actions):
action["order"] = index
assign_tags = action.pop("assign_tags", None) assign_tags = action.pop("assign_tags", None)
assign_view_users = action.pop("assign_view_users", None) assign_view_users = action.pop("assign_view_users", None)
assign_view_groups = action.pop("assign_view_groups", None) assign_view_groups = action.pop("assign_view_groups", None)
@@ -2680,6 +2697,16 @@ class WorkflowSerializer(serializers.ModelSerializer):
return instance return instance
def to_representation(self, instance):
data = super().to_representation(instance)
actions = instance.actions.order_by("order", "pk")
data["actions"] = WorkflowActionSerializer(
actions,
many=True,
context=self.context,
).data
return data
class TrashSerializer(SerializerWithPerms): class TrashSerializer(SerializerWithPerms):
documents = serializers.ListField( documents = serializers.ListField(

View File

@@ -418,7 +418,15 @@ def update_filename_and_move_files(
return return
instance = instance.document instance = instance.document
def validate_move(instance, old_path: Path, new_path: Path): def validate_move(instance, old_path: Path, new_path: Path, root: Path):
if not new_path.is_relative_to(root):
msg = (
f"Document {instance!s}: Refusing to move file outside root {root}: "
f"{new_path}."
)
logger.warning(msg)
raise CannotMoveFilesException(msg)
if not old_path.is_file(): if not old_path.is_file():
# Can't do anything if the old file does not exist anymore. # Can't do anything if the old file does not exist anymore.
msg = f"Document {instance!s}: File {old_path} doesn't exist." msg = f"Document {instance!s}: File {old_path} doesn't exist."
@@ -507,12 +515,22 @@ def update_filename_and_move_files(
return return
if move_original: if move_original:
validate_move(instance, old_source_path, instance.source_path) validate_move(
instance,
old_source_path,
instance.source_path,
settings.ORIGINALS_DIR,
)
create_source_path_directory(instance.source_path) create_source_path_directory(instance.source_path)
shutil.move(old_source_path, instance.source_path) shutil.move(old_source_path, instance.source_path)
if move_archive: if move_archive:
validate_move(instance, old_archive_path, instance.archive_path) validate_move(
instance,
old_archive_path,
instance.archive_path,
settings.ARCHIVE_DIR,
)
create_source_path_directory(instance.archive_path) create_source_path_directory(instance.archive_path)
shutil.move(old_archive_path, instance.archive_path) shutil.move(old_archive_path, instance.archive_path)
@@ -751,7 +769,7 @@ def run_workflows(
if matching.document_matches_workflow(document, workflow, trigger_type): if matching.document_matches_workflow(document, workflow, trigger_type):
action: WorkflowAction action: WorkflowAction
for action in workflow.actions.all(): for action in workflow.actions.order_by("order", "pk"):
message = f"Applying {action} from {workflow}" message = f"Applying {action} from {workflow}"
if not use_overrides: if not use_overrides:
logger.info(message, extra={"group": logging_group}) logger.info(message, extra={"group": logging_group})

View File

@@ -493,7 +493,7 @@ def check_scheduled_workflows():
trigger.schedule_is_recurring trigger.schedule_is_recurring
and workflow_runs.exists() and workflow_runs.exists()
and ( and (
workflow_runs.last().run_at workflow_runs.first().run_at
> now > now
- datetime.timedelta( - datetime.timedelta(
days=trigger.schedule_recurring_interval_days, days=trigger.schedule_recurring_interval_days,

View File

@@ -262,6 +262,17 @@ def get_custom_fields_context(
return field_data return field_data
def _is_safe_relative_path(value: str) -> bool:
if value == "":
return True
path = PurePath(value)
if path.is_absolute() or path.drive:
return False
return ".." not in path.parts
def validate_filepath_template_and_render( def validate_filepath_template_and_render(
template_string: str, template_string: str,
document: Document | None = None, document: Document | None = None,
@@ -309,6 +320,12 @@ def validate_filepath_template_and_render(
) )
rendered_template = template.render(context) rendered_template = template.render(context)
if not _is_safe_relative_path(rendered_template):
logger.warning(
"Template rendered an unsafe path (absolute or containing traversal).",
)
return None
# We're good! # We're good!
return rendered_template return rendered_template
except UndefinedError: except UndefinedError:

View File

@@ -1664,6 +1664,44 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
self.consume_file_mock.assert_not_called() self.consume_file_mock.assert_not_called()
def test_patch_document_integer_custom_field_out_of_range(self):
"""
GIVEN:
- An integer custom field
- A document
WHEN:
- Patching the document with an integer value exceeding PostgreSQL's range
THEN:
- HTTP 400 is returned (validation catches the overflow)
- No custom field instance is created
"""
cf_int = CustomField.objects.create(
name="intfield",
data_type=CustomField.FieldDataType.INT,
)
doc = Document.objects.create(
title="Doc",
checksum="123",
mime_type="application/pdf",
)
response = self.client.patch(
f"/api/documents/{doc.pk}/",
{
"custom_fields": [
{
"field": cf_int.pk,
"value": 2**31, # overflow for PostgreSQL integer fields
},
],
},
format="json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn("custom_fields", response.data)
self.assertEqual(CustomFieldInstance.objects.count(), 0)
def test_upload_with_webui_source(self): def test_upload_with_webui_source(self):
""" """
GIVEN: A document with a source file GIVEN: A document with a source file

View File

@@ -219,6 +219,30 @@ class TestApiStoragePaths(DirectoriesMixin, APITestCase):
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(StoragePath.objects.count(), 1) self.assertEqual(StoragePath.objects.count(), 1)
def test_api_create_storage_path_rejects_traversal(self):
"""
GIVEN:
- API request to create a storage paths
- Storage path attempts directory traversal
WHEN:
- API is called
THEN:
- Correct HTTP 400 response
- No storage path is created
"""
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"name": "Traversal path",
"path": "../../../../../tmp/proof",
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(StoragePath.objects.count(), 1)
def test_api_storage_path_placeholders(self): def test_api_storage_path_placeholders(self):
""" """
GIVEN: GIVEN:

View File

@@ -581,7 +581,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
- Consume file should be called - Consume file should be called
""" """
doc_ids = [self.doc1.id, self.doc2.id, self.doc3.id] doc_ids = [self.doc1.id, self.doc2.id, self.doc3.id]
metadata_document_id = self.doc1.id metadata_document_id = self.doc2.id
user = User.objects.create(username="test_user") user = User.objects.create(username="test_user")
result = bulk_edit.merge( result = bulk_edit.merge(
@@ -606,7 +606,8 @@ class TestPDFActions(DirectoriesMixin, TestCase):
# With metadata_document_id overrides # With metadata_document_id overrides
result = bulk_edit.merge(doc_ids, metadata_document_id=metadata_document_id) result = bulk_edit.merge(doc_ids, metadata_document_id=metadata_document_id)
consume_file_args, _ = mock_consume_file.call_args consume_file_args, _ = mock_consume_file.call_args
self.assertEqual(consume_file_args[1].title, "A (merged)") self.assertEqual(consume_file_args[1].title, "B (merged)")
self.assertEqual(consume_file_args[1].created, self.doc2.created)
self.assertEqual(result, "OK") self.assertEqual(result, "OK")

View File

@@ -1,6 +1,7 @@
from datetime import datetime from datetime import datetime
from unittest import mock from unittest import mock
from django.conf import settings
from django.contrib.auth.models import User from django.contrib.auth.models import User
from django.test import SimpleTestCase from django.test import SimpleTestCase
from django.test import TestCase from django.test import TestCase
@@ -179,7 +180,7 @@ class TestRewriteNaturalDateKeywords(SimpleTestCase):
( (
"added:this year", "added:this year",
datetime(2025, 7, 15, 12, 0, 0, tzinfo=timezone.utc), datetime(2025, 7, 15, 12, 0, 0, tzinfo=timezone.utc),
("added:[20250101", "TO 20250715"), ("added:[20250101", "TO 20251231"),
), ),
( (
"added:previous year", "added:previous year",
@@ -251,3 +252,120 @@ class TestRewriteNaturalDateKeywords(SimpleTestCase):
result = self._rewrite_with_now("added:today", fixed_now) result = self._rewrite_with_now("added:today", fixed_now)
# Should convert to UTC properly # Should convert to UTC properly
self.assertIn("added:[20250719", result) self.assertIn("added:[20250719", result)
class TestIndexResilience(DirectoriesMixin, SimpleTestCase):
def _assert_recreate_called(self, mock_create_in):
mock_create_in.assert_called_once()
path_arg, schema_arg = mock_create_in.call_args.args
self.assertEqual(path_arg, settings.INDEX_DIR)
self.assertEqual(schema_arg.__class__.__name__, "Schema")
def test_transient_missing_segment_does_not_force_recreate(self):
"""
GIVEN:
- Index directory exists
WHEN:
- open_index is called
- Opening the index raises FileNotFoundError once due to a
transient missing segment
THEN:
- Index is opened successfully on retry
- Index is not recreated
"""
file_marker = settings.INDEX_DIR / "file_marker.txt"
file_marker.write_text("keep")
expected_index = object()
with (
mock.patch("documents.index.exists_in", return_value=True),
mock.patch(
"documents.index.open_dir",
side_effect=[FileNotFoundError("missing"), expected_index],
) as mock_open_dir,
mock.patch(
"documents.index.create_in",
) as mock_create_in,
mock.patch(
"documents.index.rmtree",
) as mock_rmtree,
):
ix = index.open_index()
self.assertIs(ix, expected_index)
self.assertGreaterEqual(mock_open_dir.call_count, 2)
mock_rmtree.assert_not_called()
mock_create_in.assert_not_called()
self.assertEqual(file_marker.read_text(), "keep")
def test_transient_errors_exhaust_retries_and_recreate(self):
"""
GIVEN:
- Index directory exists
WHEN:
- open_index is called
- Opening the index raises FileNotFoundError multiple times due to
transient missing segments
THEN:
- Index is recreated after retries are exhausted
"""
recreated_index = object()
with (
self.assertLogs("paperless.index", level="ERROR") as cm,
mock.patch("documents.index.exists_in", return_value=True),
mock.patch(
"documents.index.open_dir",
side_effect=FileNotFoundError("missing"),
) as mock_open_dir,
mock.patch("documents.index.rmtree") as mock_rmtree,
mock.patch(
"documents.index.create_in",
return_value=recreated_index,
) as mock_create_in,
):
ix = index.open_index()
self.assertIs(ix, recreated_index)
self.assertEqual(mock_open_dir.call_count, 4)
mock_rmtree.assert_called_once_with(settings.INDEX_DIR)
self._assert_recreate_called(mock_create_in)
self.assertIn(
"Error while opening the index after retries, recreating.",
cm.output[0],
)
def test_non_transient_error_recreates_index(self):
"""
GIVEN:
- Index directory exists
WHEN:
- open_index is called
- Opening the index raises a "non-transient" error
THEN:
- Index is recreated
"""
recreated_index = object()
with (
self.assertLogs("paperless.index", level="ERROR") as cm,
mock.patch("documents.index.exists_in", return_value=True),
mock.patch(
"documents.index.open_dir",
side_effect=RuntimeError("boom"),
),
mock.patch("documents.index.rmtree") as mock_rmtree,
mock.patch(
"documents.index.create_in",
return_value=recreated_index,
) as mock_create_in,
):
ix = index.open_index()
self.assertIs(ix, recreated_index)
mock_rmtree.assert_called_once_with(settings.INDEX_DIR)
self._assert_recreate_called(mock_create_in)
self.assertIn(
"Error while opening the index, recreating.",
cm.output[0],
)

View File

@@ -2094,6 +2094,68 @@ class TestWorkflows(
doc.refresh_from_db() doc.refresh_from_db()
self.assertIsNone(doc.owner) self.assertIsNone(doc.owner)
def test_workflow_scheduled_recurring_respects_latest_run(self):
"""
GIVEN:
- Scheduled workflow marked as recurring with a 1-day interval
- Document that matches the trigger
- Two prior runs exist: one 2 days ago and one 1 hour ago
WHEN:
- Scheduled workflows are checked again
THEN:
- Workflow does not run because the most recent run is inside the interval
"""
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
schedule_date_field=WorkflowTrigger.ScheduleDateField.CREATED,
schedule_is_recurring=True,
schedule_recurring_interval_days=1,
)
action = WorkflowAction.objects.create(
assign_title="Doc assign owner",
assign_owner=self.user2,
)
w = Workflow.objects.create(
name="Workflow 1",
order=0,
)
w.triggers.add(trigger)
w.actions.add(action)
w.save()
doc = Document.objects.create(
title="sample test",
correspondent=self.c,
original_filename="sample.pdf",
created=timezone.now().date() - timedelta(days=3),
)
WorkflowRun.objects.create(
workflow=w,
document=doc,
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
run_at=timezone.now() - timedelta(days=2),
)
WorkflowRun.objects.create(
workflow=w,
document=doc,
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
run_at=timezone.now() - timedelta(hours=1),
)
tasks.check_scheduled_workflows()
doc.refresh_from_db()
self.assertIsNone(doc.owner)
self.assertEqual(
WorkflowRun.objects.filter(
workflow=w,
document=doc,
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
).count(),
2,
)
def test_workflow_scheduled_trigger_negative_offset_customfield(self): def test_workflow_scheduled_trigger_negative_offset_customfield(self):
""" """
GIVEN: GIVEN:

View File

@@ -32,7 +32,6 @@ from django.db.models import Count
from django.db.models import IntegerField from django.db.models import IntegerField
from django.db.models import Max from django.db.models import Max
from django.db.models import Model from django.db.models import Model
from django.db.models import Q
from django.db.models import Sum from django.db.models import Sum
from django.db.models import When from django.db.models import When
from django.db.models.functions import Length from django.db.models.functions import Length
@@ -128,6 +127,7 @@ from documents.matching import match_storage_paths
from documents.matching import match_tags from documents.matching import match_tags
from documents.models import Correspondent from documents.models import Correspondent
from documents.models import CustomField from documents.models import CustomField
from documents.models import CustomFieldInstance
from documents.models import Document from documents.models import Document
from documents.models import DocumentType from documents.models import DocumentType
from documents.models import Note from documents.models import Note
@@ -147,6 +147,7 @@ from documents.permissions import PaperlessAdminPermissions
from documents.permissions import PaperlessNotePermissions from documents.permissions import PaperlessNotePermissions
from documents.permissions import PaperlessObjectPermissions from documents.permissions import PaperlessObjectPermissions
from documents.permissions import ViewDocumentsPermissions from documents.permissions import ViewDocumentsPermissions
from documents.permissions import annotate_document_count_for_related_queryset
from documents.permissions import get_document_count_filter_for_user from documents.permissions import get_document_count_filter_for_user
from documents.permissions import get_objects_for_user_owner_aware from documents.permissions import get_objects_for_user_owner_aware
from documents.permissions import has_perms_owner_aware from documents.permissions import has_perms_owner_aware
@@ -370,22 +371,37 @@ class PermissionsAwareDocumentCountMixin(BulkPermissionMixin, PassUserMixin):
Mixin to add document count to queryset, permissions-aware if needed Mixin to add document count to queryset, permissions-aware if needed
""" """
# Default is simple relation path, override for through-table/count specialization.
document_count_through = None
document_count_source_field = None
def get_document_count_filter(self): def get_document_count_filter(self):
request = getattr(self, "request", None) request = getattr(self, "request", None)
user = getattr(request, "user", None) if request else None user = getattr(request, "user", None) if request else None
return get_document_count_filter_for_user(user) return get_document_count_filter_for_user(user)
def get_queryset(self): def get_queryset(self):
base_qs = super().get_queryset()
# Use optimized through-table counting when configured.
if self.document_count_through:
user = getattr(getattr(self, "request", None), "user", None)
return annotate_document_count_for_related_queryset(
base_qs,
through_model=self.document_count_through,
source_field=self.document_count_source_field,
user=user,
)
# Fallback: simple Count on relation with permission filter.
filter = self.get_document_count_filter() filter = self.get_document_count_filter()
return ( return base_qs.annotate(
super() document_count=Count("documents", filter=filter),
.get_queryset()
.annotate(document_count=Count("documents", filter=filter))
) )
@extend_schema_view(**generate_object_with_permissions_schema(CorrespondentSerializer)) @extend_schema_view(**generate_object_with_permissions_schema(CorrespondentSerializer))
class CorrespondentViewSet(ModelViewSet, PermissionsAwareDocumentCountMixin): class CorrespondentViewSet(PermissionsAwareDocumentCountMixin, ModelViewSet):
model = Correspondent model = Correspondent
queryset = Correspondent.objects.select_related("owner").order_by(Lower("name")) queryset = Correspondent.objects.select_related("owner").order_by(Lower("name"))
@@ -422,8 +438,10 @@ class CorrespondentViewSet(ModelViewSet, PermissionsAwareDocumentCountMixin):
@extend_schema_view(**generate_object_with_permissions_schema(TagSerializer)) @extend_schema_view(**generate_object_with_permissions_schema(TagSerializer))
class TagViewSet(ModelViewSet, PermissionsAwareDocumentCountMixin): class TagViewSet(PermissionsAwareDocumentCountMixin, ModelViewSet):
model = Tag model = Tag
document_count_through = Document.tags.through
document_count_source_field = "tag_id"
queryset = Tag.objects.select_related("owner").order_by( queryset = Tag.objects.select_related("owner").order_by(
Lower("name"), Lower("name"),
@@ -448,8 +466,51 @@ class TagViewSet(ModelViewSet, PermissionsAwareDocumentCountMixin):
def get_serializer_context(self): def get_serializer_context(self):
context = super().get_serializer_context() context = super().get_serializer_context()
context["document_count_filter"] = self.get_document_count_filter() context["document_count_filter"] = self.get_document_count_filter()
if hasattr(self, "_children_map"):
context["children_map"] = self._children_map
return context return context
def list(self, request, *args, **kwargs):
"""
Build a children map once to avoid per-parent queries in the serializer.
"""
queryset = self.filter_queryset(self.get_queryset())
ordering = OrderingFilter().get_ordering(request, queryset, self) or (
Lower("name"),
)
queryset = queryset.order_by(*ordering)
all_tags = list(queryset)
descendant_pks = {pk for tag in all_tags for pk in tag.get_descendants_pks()}
if descendant_pks:
user = getattr(getattr(self, "request", None), "user", None)
children_source = list(
annotate_document_count_for_related_queryset(
Tag.objects.filter(pk__in=descendant_pks | {t.pk for t in all_tags})
.select_related("owner")
.order_by(*ordering),
through_model=self.document_count_through,
source_field=self.document_count_source_field,
user=user,
),
)
else:
children_source = all_tags
children_map = {}
for tag in children_source:
children_map.setdefault(tag.tn_parent_id, []).append(tag)
self._children_map = children_map
page = self.paginate_queryset(queryset)
serializer = self.get_serializer(page, many=True)
response = self.get_paginated_response(serializer.data)
if descendant_pks:
# Include children in the "all" field, if needed
response.data["all"] = [tag.pk for tag in children_source]
return response
def perform_update(self, serializer): def perform_update(self, serializer):
old_parent = self.get_object().get_parent() old_parent = self.get_object().get_parent()
tag = serializer.save() tag = serializer.save()
@@ -459,7 +520,7 @@ class TagViewSet(ModelViewSet, PermissionsAwareDocumentCountMixin):
@extend_schema_view(**generate_object_with_permissions_schema(DocumentTypeSerializer)) @extend_schema_view(**generate_object_with_permissions_schema(DocumentTypeSerializer))
class DocumentTypeViewSet(ModelViewSet, PermissionsAwareDocumentCountMixin): class DocumentTypeViewSet(PermissionsAwareDocumentCountMixin, ModelViewSet):
model = DocumentType model = DocumentType
queryset = DocumentType.objects.select_related("owner").order_by(Lower("name")) queryset = DocumentType.objects.select_related("owner").order_by(Lower("name"))
@@ -708,6 +769,7 @@ class DocumentViewSet(
"title", "title",
"correspondent__name", "correspondent__name",
"document_type__name", "document_type__name",
"storage_path__name",
"created", "created",
"modified", "modified",
"added", "added",
@@ -1059,7 +1121,7 @@ class DocumentViewSet(
): ):
return HttpResponseForbidden("Insufficient permissions to delete notes") return HttpResponseForbidden("Insufficient permissions to delete notes")
note = Note.objects.get(id=int(request.GET.get("id"))) note = Note.objects.get(id=int(request.GET.get("id")), document=doc)
if settings.AUDIT_LOG_ENABLED: if settings.AUDIT_LOG_ENABLED:
LogEntry.objects.log_create( LogEntry.objects.log_create(
instance=doc, instance=doc,
@@ -2302,7 +2364,7 @@ class BulkDownloadView(GenericAPIView):
@extend_schema_view(**generate_object_with_permissions_schema(StoragePathSerializer)) @extend_schema_view(**generate_object_with_permissions_schema(StoragePathSerializer))
class StoragePathViewSet(ModelViewSet, PermissionsAwareDocumentCountMixin): class StoragePathViewSet(PermissionsAwareDocumentCountMixin, ModelViewSet):
model = StoragePath model = StoragePath
queryset = StoragePath.objects.select_related("owner").order_by( queryset = StoragePath.objects.select_related("owner").order_by(
@@ -2819,7 +2881,7 @@ class WorkflowViewSet(ModelViewSet):
) )
class CustomFieldViewSet(ModelViewSet): class CustomFieldViewSet(PermissionsAwareDocumentCountMixin, ModelViewSet):
permission_classes = (IsAuthenticated, PaperlessObjectPermissions) permission_classes = (IsAuthenticated, PaperlessObjectPermissions)
serializer_class = CustomFieldSerializer serializer_class = CustomFieldSerializer
@@ -2831,35 +2893,11 @@ class CustomFieldViewSet(ModelViewSet):
filterset_class = CustomFieldFilterSet filterset_class = CustomFieldFilterSet
model = CustomField model = CustomField
document_count_through = CustomFieldInstance
document_count_source_field = "field_id"
queryset = CustomField.objects.all().order_by("-created") queryset = CustomField.objects.all().order_by("-created")
def get_queryset(self):
filter = (
Q(fields__document__deleted_at__isnull=True)
if self.request.user is None or self.request.user.is_superuser
else (
Q(
fields__document__deleted_at__isnull=True,
fields__document__id__in=get_objects_for_user_owner_aware(
self.request.user,
"documents.view_document",
Document,
).values_list("id", flat=True),
)
)
)
return (
super()
.get_queryset()
.annotate(
document_count=Count(
"fields",
filter=filter,
),
)
)
@extend_schema_view( @extend_schema_view(
get=extend_schema( get=extend_schema(

View File

@@ -20,9 +20,6 @@ def get_workflows_for_trigger(
wrap it in a list; otherwise fetch enabled workflows for the trigger with wrap it in a list; otherwise fetch enabled workflows for the trigger with
the prefetches used by the runner. the prefetches used by the runner.
""" """
if workflow_to_run is not None:
return [workflow_to_run]
annotated_actions = ( annotated_actions = (
WorkflowAction.objects.select_related( WorkflowAction.objects.select_related(
"assign_correspondent", "assign_correspondent",
@@ -105,10 +102,25 @@ def get_workflows_for_trigger(
) )
) )
action_prefetch = Prefetch(
"actions",
queryset=annotated_actions.order_by("order", "pk"),
)
if workflow_to_run is not None:
return (
Workflow.objects.filter(pk=workflow_to_run.pk)
.prefetch_related(
action_prefetch,
"triggers",
)
.distinct()
)
return ( return (
Workflow.objects.filter(enabled=True, triggers__type=trigger_type) Workflow.objects.filter(enabled=True, triggers__type=trigger_type)
.prefetch_related( .prefetch_related(
Prefetch("actions", queryset=annotated_actions), action_prefetch,
"triggers", "triggers",
) )
.order_by("order") .order_by("order")

View File

@@ -1,6 +1,6 @@
from typing import Final from typing import Final
__version__: Final[tuple[int, int, int]] = (2, 20, 3) __version__: Final[tuple[int, int, int]] = (2, 20, 5)
# Version string like X.Y.Z # Version string like X.Y.Z
__full_version_str__: Final[str] = ".".join(map(str, __version__)) __full_version_str__: Final[str] = ".".join(map(str, __version__))
# Version string like X.Y # Version string like X.Y

2
uv.lock generated
View File

@@ -2115,7 +2115,7 @@ wheels = [
[[package]] [[package]]
name = "paperless-ngx" name = "paperless-ngx"
version = "2.20.3" version = "2.20.5"
source = { virtual = "." } source = { virtual = "." }
dependencies = [ dependencies = [
{ name = "babel", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "babel", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },