Compare commits

..

4 Commits

Author SHA1 Message Date
shamoon
5fe46cac55 Mas testing 2026-01-24 20:05:25 -08:00
shamoon
c0c2202564 skip_asn_if_exists 2026-01-24 20:05:25 -08:00
shamoon
d65d9a2b88 "Handoff" ASN when merging or editing PDFs 2026-01-24 20:05:25 -08:00
shamoon
8e12f3e93c First, release ASNs before document replacement (and restore if needed) 2026-01-24 20:05:25 -08:00
35 changed files with 603 additions and 726 deletions

View File

@@ -5,8 +5,10 @@ set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py management_command "$@"
elif [[ $(id -un) == "paperless" ]]; then
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py management_command "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py management_command "$@"
else
echo "Unknown user."
fi

View File

@@ -5,8 +5,10 @@ set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py convert_mariadb_uuid "$@"
elif [[ $(id -un) == "paperless" ]]; then
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py convert_mariadb_uuid "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py convert_mariadb_uuid "$@"
else
echo "Unknown user."
fi

View File

@@ -5,8 +5,10 @@ set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py createsuperuser "$@"
elif [[ $(id -un) == "paperless" ]]; then
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py createsuperuser "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py createsuperuser "$@"
else
echo "Unknown user."
fi

View File

@@ -5,8 +5,10 @@ set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py document_archiver "$@"
elif [[ $(id -un) == "paperless" ]]; then
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py document_archiver "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py document_archiver "$@"
else
echo "Unknown user."
fi

View File

@@ -5,8 +5,10 @@ set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py document_create_classifier "$@"
elif [[ $(id -un) == "paperless" ]]; then
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py document_create_classifier "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py document_create_classifier "$@"
else
echo "Unknown user."
fi

View File

@@ -5,8 +5,10 @@ set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py document_exporter "$@"
elif [[ $(id -un) == "paperless" ]]; then
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py document_exporter "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py document_exporter "$@"
else
echo "Unknown user."
fi

View File

@@ -5,8 +5,10 @@ set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py document_fuzzy_match "$@"
elif [[ $(id -un) == "paperless" ]]; then
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py document_fuzzy_match "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py document_fuzzy_match "$@"
else
echo "Unknown user."
fi

View File

@@ -5,8 +5,10 @@ set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py document_importer "$@"
elif [[ $(id -un) == "paperless" ]]; then
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py document_importer "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py document_importer "$@"
else
echo "Unknown user."
fi

View File

@@ -5,8 +5,10 @@ set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py document_index "$@"
elif [[ $(id -un) == "paperless" ]]; then
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py document_index "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py document_index "$@"
else
echo "Unknown user."
fi

View File

@@ -5,8 +5,10 @@ set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py document_renamer "$@"
elif [[ $(id -un) == "paperless" ]]; then
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py document_renamer "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py document_renamer "$@"
else
echo "Unknown user."
fi

View File

@@ -5,8 +5,10 @@ set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py document_retagger "$@"
elif [[ $(id -un) == "paperless" ]]; then
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py document_retagger "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py document_retagger "$@"
else
echo "Unknown user."
fi

View File

@@ -5,8 +5,10 @@ set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py document_sanity_checker "$@"
elif [[ $(id -un) == "paperless" ]]; then
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py document_sanity_checker "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py document_sanity_checker "$@"
else
echo "Unknown user."
fi

View File

@@ -5,8 +5,10 @@ set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py document_thumbnails "$@"
elif [[ $(id -un) == "paperless" ]]; then
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py document_thumbnails "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py document_thumbnails "$@"
else
echo "Unknown user."
fi

View File

@@ -5,8 +5,10 @@ set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py mail_fetcher "$@"
elif [[ $(id -un) == "paperless" ]]; then
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py mail_fetcher "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py mail_fetcher "$@"
else
echo "Unknown user."
fi

View File

@@ -5,8 +5,10 @@ set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py manage_superuser "$@"
elif [[ $(id -un) == "paperless" ]]; then
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py manage_superuser "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py manage_superuser "$@"
else
echo "Unknown user."
fi

View File

@@ -5,8 +5,10 @@ set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py prune_audit_logs "$@"
elif [[ $(id -un) == "paperless" ]]; then
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py prune_audit_logs "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py prune_audit_logs "$@"
else
echo "Unknown user."
fi

View File

@@ -257,7 +257,7 @@ lint.isort.force-single-line = true
[tool.codespell]
write-changes = true
ignore-words-list = "criterias,afterall,valeu,ureue,equest,ure,assertIn,Oktober,commitish"
ignore-words-list = "criterias,afterall,valeu,ureue,equest,ure,assertIn,Oktober"
skip = "src-ui/src/locale/*,src-ui/pnpm-lock.yaml,src-ui/e2e/*,src/paperless_mail/tests/samples/*,src/documents/tests/samples/*,*.po,*.json"
[tool.pytest.ini_options]

View File

@@ -5257,105 +5257,84 @@
<source>Has any of these tags</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
<context context-type="linenumber">209</context>
<context context-type="linenumber">203</context>
</context-group>
</trans-unit>
<trans-unit id="4166903555074156852" datatype="html">
<source>Has all of these tags</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
<context context-type="linenumber">216</context>
<context context-type="linenumber">210</context>
</context-group>
</trans-unit>
<trans-unit id="6624363795312783141" datatype="html">
<source>Does not have these tags</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
<context context-type="linenumber">223</context>
</context-group>
</trans-unit>
<trans-unit id="7168528512669831184" datatype="html">
<source>Has any of these correspondents</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
<context context-type="linenumber">230</context>
<context context-type="linenumber">217</context>
</context-group>
</trans-unit>
<trans-unit id="5281365940563983618" datatype="html">
<source>Has correspondent</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
<context context-type="linenumber">238</context>
<context context-type="linenumber">224</context>
</context-group>
</trans-unit>
<trans-unit id="6884498632428600393" datatype="html">
<source>Does not have correspondents</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
<context context-type="linenumber">246</context>
<context context-type="linenumber">232</context>
</context-group>
</trans-unit>
<trans-unit id="4806713133917046341" datatype="html">
<source>Has document type</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
<context context-type="linenumber">254</context>
</context-group>
</trans-unit>
<trans-unit id="8801397520369995032" datatype="html">
<source>Has any of these document types</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
<context context-type="linenumber">262</context>
<context context-type="linenumber">240</context>
</context-group>
</trans-unit>
<trans-unit id="1507843981661822403" datatype="html">
<source>Does not have document types</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
<context context-type="linenumber">270</context>
<context context-type="linenumber">248</context>
</context-group>
</trans-unit>
<trans-unit id="4277260190522078330" datatype="html">
<source>Has storage path</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
<context context-type="linenumber">278</context>
</context-group>
</trans-unit>
<trans-unit id="8858580062214623097" datatype="html">
<source>Has any of these storage paths</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
<context context-type="linenumber">286</context>
<context context-type="linenumber">256</context>
</context-group>
</trans-unit>
<trans-unit id="6070943364927280151" datatype="html">
<source>Does not have storage paths</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
<context context-type="linenumber">294</context>
<context context-type="linenumber">264</context>
</context-group>
</trans-unit>
<trans-unit id="6250799006816371860" datatype="html">
<source>Matches custom field query</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
<context context-type="linenumber">302</context>
<context context-type="linenumber">272</context>
</context-group>
</trans-unit>
<trans-unit id="3138206142174978019" datatype="html">
<source>Create new workflow</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
<context context-type="linenumber">531</context>
<context context-type="linenumber">474</context>
</context-group>
</trans-unit>
<trans-unit id="5996779210524133604" datatype="html">
<source>Edit workflow</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
<context context-type="linenumber">535</context>
<context context-type="linenumber">478</context>
</context-group>
</trans-unit>
<trans-unit id="5457837313196342910" datatype="html">

View File

@@ -412,9 +412,6 @@ describe('WorkflowEditDialogComponent', () => {
return newFilter
}
const correspondentAny = addFilterOfType(TriggerFilterType.CorrespondentAny)
correspondentAny.get('values').setValue([11])
const correspondentIs = addFilterOfType(TriggerFilterType.CorrespondentIs)
correspondentIs.get('values').setValue(1)
@@ -424,18 +421,12 @@ describe('WorkflowEditDialogComponent', () => {
const documentTypeIs = addFilterOfType(TriggerFilterType.DocumentTypeIs)
documentTypeIs.get('values').setValue(1)
const documentTypeAny = addFilterOfType(TriggerFilterType.DocumentTypeAny)
documentTypeAny.get('values').setValue([12])
const documentTypeNot = addFilterOfType(TriggerFilterType.DocumentTypeNot)
documentTypeNot.get('values').setValue([1])
const storagePathIs = addFilterOfType(TriggerFilterType.StoragePathIs)
storagePathIs.get('values').setValue(1)
const storagePathAny = addFilterOfType(TriggerFilterType.StoragePathAny)
storagePathAny.get('values').setValue([13])
const storagePathNot = addFilterOfType(TriggerFilterType.StoragePathNot)
storagePathNot.get('values').setValue([1])
@@ -450,13 +441,10 @@ describe('WorkflowEditDialogComponent', () => {
expect(formValues.triggers[0].filter_has_tags).toEqual([1])
expect(formValues.triggers[0].filter_has_all_tags).toEqual([2, 3])
expect(formValues.triggers[0].filter_has_not_tags).toEqual([4])
expect(formValues.triggers[0].filter_has_any_correspondents).toEqual([11])
expect(formValues.triggers[0].filter_has_correspondent).toEqual(1)
expect(formValues.triggers[0].filter_has_not_correspondents).toEqual([1])
expect(formValues.triggers[0].filter_has_any_document_types).toEqual([12])
expect(formValues.triggers[0].filter_has_document_type).toEqual(1)
expect(formValues.triggers[0].filter_has_not_document_types).toEqual([1])
expect(formValues.triggers[0].filter_has_any_storage_paths).toEqual([13])
expect(formValues.triggers[0].filter_has_storage_path).toEqual(1)
expect(formValues.triggers[0].filter_has_not_storage_paths).toEqual([1])
expect(formValues.triggers[0].filter_custom_field_query).toEqual(
@@ -519,22 +507,16 @@ describe('WorkflowEditDialogComponent', () => {
setFilter(TriggerFilterType.TagsAll, 11)
setFilter(TriggerFilterType.TagsNone, 12)
setFilter(TriggerFilterType.CorrespondentAny, 16)
setFilter(TriggerFilterType.CorrespondentNot, 13)
setFilter(TriggerFilterType.DocumentTypeAny, 17)
setFilter(TriggerFilterType.DocumentTypeNot, 14)
setFilter(TriggerFilterType.StoragePathAny, 18)
setFilter(TriggerFilterType.StoragePathNot, 15)
const formValues = component['getFormValues']()
expect(formValues.triggers[0].filter_has_all_tags).toEqual([11])
expect(formValues.triggers[0].filter_has_not_tags).toEqual([12])
expect(formValues.triggers[0].filter_has_any_correspondents).toEqual([16])
expect(formValues.triggers[0].filter_has_not_correspondents).toEqual([13])
expect(formValues.triggers[0].filter_has_any_document_types).toEqual([17])
expect(formValues.triggers[0].filter_has_not_document_types).toEqual([14])
expect(formValues.triggers[0].filter_has_any_storage_paths).toEqual([18])
expect(formValues.triggers[0].filter_has_not_storage_paths).toEqual([15])
})
@@ -658,11 +640,8 @@ describe('WorkflowEditDialogComponent', () => {
filter_has_tags: [],
filter_has_all_tags: [],
filter_has_not_tags: [],
filter_has_any_correspondents: [],
filter_has_not_correspondents: [],
filter_has_any_document_types: [],
filter_has_not_document_types: [],
filter_has_any_storage_paths: [],
filter_has_not_storage_paths: [],
filter_has_correspondent: null,
filter_has_document_type: null,
@@ -720,14 +699,11 @@ describe('WorkflowEditDialogComponent', () => {
trigger.filter_has_tags = [1]
trigger.filter_has_all_tags = [2, 3]
trigger.filter_has_not_tags = [4]
trigger.filter_has_any_correspondents = [10] as any
trigger.filter_has_correspondent = 5 as any
trigger.filter_has_not_correspondents = [6] as any
trigger.filter_has_document_type = 7 as any
trigger.filter_has_any_document_types = [11] as any
trigger.filter_has_not_document_types = [8] as any
trigger.filter_has_storage_path = 9 as any
trigger.filter_has_any_storage_paths = [12] as any
trigger.filter_has_not_storage_paths = [10] as any
trigger.filter_custom_field_query = JSON.stringify([
'AND',
@@ -738,8 +714,8 @@ describe('WorkflowEditDialogComponent', () => {
component.ngOnInit()
const triggerGroup = component.triggerFields.at(0) as FormGroup
const filters = component.getFiltersFormArray(triggerGroup)
expect(filters.length).toBe(13)
const customFieldFilter = filters.at(12) as FormGroup
expect(filters.length).toBe(10)
const customFieldFilter = filters.at(9) as FormGroup
expect(customFieldFilter.get('type').value).toBe(
TriggerFilterType.CustomFieldQuery
)
@@ -748,27 +724,12 @@ describe('WorkflowEditDialogComponent', () => {
})
it('should expose select metadata helpers', () => {
expect(component.isSelectMultiple(TriggerFilterType.CorrespondentAny)).toBe(
true
)
expect(component.isSelectMultiple(TriggerFilterType.CorrespondentNot)).toBe(
true
)
expect(component.isSelectMultiple(TriggerFilterType.CorrespondentIs)).toBe(
false
)
expect(component.isSelectMultiple(TriggerFilterType.DocumentTypeAny)).toBe(
true
)
expect(component.isSelectMultiple(TriggerFilterType.DocumentTypeIs)).toBe(
false
)
expect(component.isSelectMultiple(TriggerFilterType.StoragePathAny)).toBe(
true
)
expect(component.isSelectMultiple(TriggerFilterType.StoragePathIs)).toBe(
false
)
component.correspondents = [{ id: 1, name: 'C1' } as any]
component.documentTypes = [{ id: 2, name: 'DT' } as any]
@@ -780,15 +741,9 @@ describe('WorkflowEditDialogComponent', () => {
expect(
component.getFilterSelectItems(TriggerFilterType.DocumentTypeIs)
).toEqual(component.documentTypes)
expect(
component.getFilterSelectItems(TriggerFilterType.DocumentTypeAny)
).toEqual(component.documentTypes)
expect(
component.getFilterSelectItems(TriggerFilterType.StoragePathIs)
).toEqual(component.storagePaths)
expect(
component.getFilterSelectItems(TriggerFilterType.StoragePathAny)
).toEqual(component.storagePaths)
expect(component.getFilterSelectItems(TriggerFilterType.TagsAll)).toEqual(
[]
)

View File

@@ -145,13 +145,10 @@ export enum TriggerFilterType {
TagsAny = 'tags_any',
TagsAll = 'tags_all',
TagsNone = 'tags_none',
CorrespondentAny = 'correspondent_any',
CorrespondentIs = 'correspondent_is',
CorrespondentNot = 'correspondent_not',
DocumentTypeAny = 'document_type_any',
DocumentTypeIs = 'document_type_is',
DocumentTypeNot = 'document_type_not',
StoragePathAny = 'storage_path_any',
StoragePathIs = 'storage_path_is',
StoragePathNot = 'storage_path_not',
CustomFieldQuery = 'custom_field_query',
@@ -175,11 +172,8 @@ type TriggerFilterAggregate = {
filter_has_tags: number[]
filter_has_all_tags: number[]
filter_has_not_tags: number[]
filter_has_any_correspondents: number[]
filter_has_not_correspondents: number[]
filter_has_any_document_types: number[]
filter_has_not_document_types: number[]
filter_has_any_storage_paths: number[]
filter_has_not_storage_paths: number[]
filter_has_correspondent: number | null
filter_has_document_type: number | null
@@ -225,14 +219,6 @@ const TRIGGER_FILTER_DEFINITIONS: TriggerFilterDefinition[] = [
allowMultipleEntries: false,
allowMultipleValues: true,
},
{
id: TriggerFilterType.CorrespondentAny,
name: $localize`Has any of these correspondents`,
inputType: 'select',
allowMultipleEntries: false,
allowMultipleValues: true,
selectItems: 'correspondents',
},
{
id: TriggerFilterType.CorrespondentIs,
name: $localize`Has correspondent`,
@@ -257,14 +243,6 @@ const TRIGGER_FILTER_DEFINITIONS: TriggerFilterDefinition[] = [
allowMultipleValues: false,
selectItems: 'documentTypes',
},
{
id: TriggerFilterType.DocumentTypeAny,
name: $localize`Has any of these document types`,
inputType: 'select',
allowMultipleEntries: false,
allowMultipleValues: true,
selectItems: 'documentTypes',
},
{
id: TriggerFilterType.DocumentTypeNot,
name: $localize`Does not have document types`,
@@ -281,14 +259,6 @@ const TRIGGER_FILTER_DEFINITIONS: TriggerFilterDefinition[] = [
allowMultipleValues: false,
selectItems: 'storagePaths',
},
{
id: TriggerFilterType.StoragePathAny,
name: $localize`Has any of these storage paths`,
inputType: 'select',
allowMultipleEntries: false,
allowMultipleValues: true,
selectItems: 'storagePaths',
},
{
id: TriggerFilterType.StoragePathNot,
name: $localize`Does not have storage paths`,
@@ -336,15 +306,6 @@ const FILTER_HANDLERS: Record<TriggerFilterType, FilterHandler> = {
extract: (trigger) => trigger.filter_has_not_tags,
hasValue: (value) => Array.isArray(value) && value.length > 0,
},
[TriggerFilterType.CorrespondentAny]: {
apply: (aggregate, values) => {
aggregate.filter_has_any_correspondents = Array.isArray(values)
? [...values]
: [values]
},
extract: (trigger) => trigger.filter_has_any_correspondents,
hasValue: (value) => Array.isArray(value) && value.length > 0,
},
[TriggerFilterType.CorrespondentIs]: {
apply: (aggregate, values) => {
aggregate.filter_has_correspondent = Array.isArray(values)
@@ -372,15 +333,6 @@ const FILTER_HANDLERS: Record<TriggerFilterType, FilterHandler> = {
extract: (trigger) => trigger.filter_has_document_type,
hasValue: (value) => value !== null && value !== undefined,
},
[TriggerFilterType.DocumentTypeAny]: {
apply: (aggregate, values) => {
aggregate.filter_has_any_document_types = Array.isArray(values)
? [...values]
: [values]
},
extract: (trigger) => trigger.filter_has_any_document_types,
hasValue: (value) => Array.isArray(value) && value.length > 0,
},
[TriggerFilterType.DocumentTypeNot]: {
apply: (aggregate, values) => {
aggregate.filter_has_not_document_types = Array.isArray(values)
@@ -399,15 +351,6 @@ const FILTER_HANDLERS: Record<TriggerFilterType, FilterHandler> = {
extract: (trigger) => trigger.filter_has_storage_path,
hasValue: (value) => value !== null && value !== undefined,
},
[TriggerFilterType.StoragePathAny]: {
apply: (aggregate, values) => {
aggregate.filter_has_any_storage_paths = Array.isArray(values)
? [...values]
: [values]
},
extract: (trigger) => trigger.filter_has_any_storage_paths,
hasValue: (value) => Array.isArray(value) && value.length > 0,
},
[TriggerFilterType.StoragePathNot]: {
apply: (aggregate, values) => {
aggregate.filter_has_not_storage_paths = Array.isArray(values)
@@ -699,11 +642,8 @@ export class WorkflowEditDialogComponent
filter_has_tags: [],
filter_has_all_tags: [],
filter_has_not_tags: [],
filter_has_any_correspondents: [],
filter_has_not_correspondents: [],
filter_has_any_document_types: [],
filter_has_not_document_types: [],
filter_has_any_storage_paths: [],
filter_has_not_storage_paths: [],
filter_has_correspondent: null,
filter_has_document_type: null,
@@ -730,16 +670,10 @@ export class WorkflowEditDialogComponent
trigger.filter_has_tags = aggregate.filter_has_tags
trigger.filter_has_all_tags = aggregate.filter_has_all_tags
trigger.filter_has_not_tags = aggregate.filter_has_not_tags
trigger.filter_has_any_correspondents =
aggregate.filter_has_any_correspondents
trigger.filter_has_not_correspondents =
aggregate.filter_has_not_correspondents
trigger.filter_has_any_document_types =
aggregate.filter_has_any_document_types
trigger.filter_has_not_document_types =
aggregate.filter_has_not_document_types
trigger.filter_has_any_storage_paths =
aggregate.filter_has_any_storage_paths
trigger.filter_has_not_storage_paths =
aggregate.filter_has_not_storage_paths
trigger.filter_has_correspondent =
@@ -922,11 +856,8 @@ export class WorkflowEditDialogComponent
case TriggerFilterType.TagsAny:
case TriggerFilterType.TagsAll:
case TriggerFilterType.TagsNone:
case TriggerFilterType.CorrespondentAny:
case TriggerFilterType.CorrespondentNot:
case TriggerFilterType.DocumentTypeAny:
case TriggerFilterType.DocumentTypeNot:
case TriggerFilterType.StoragePathAny:
case TriggerFilterType.StoragePathNot:
return true
default:
@@ -1248,11 +1179,8 @@ export class WorkflowEditDialogComponent
filter_has_tags: [],
filter_has_all_tags: [],
filter_has_not_tags: [],
filter_has_any_correspondents: [],
filter_has_not_correspondents: [],
filter_has_any_document_types: [],
filter_has_not_document_types: [],
filter_has_any_storage_paths: [],
filter_has_not_storage_paths: [],
filter_custom_field_query: null,
filter_has_correspondent: null,

View File

@@ -44,16 +44,10 @@ export interface WorkflowTrigger extends ObjectWithId {
filter_has_not_tags?: number[] // Tag.id[]
filter_has_any_correspondents?: number[] // Correspondent.id[]
filter_has_not_correspondents?: number[] // Correspondent.id[]
filter_has_any_document_types?: number[] // DocumentType.id[]
filter_has_not_document_types?: number[] // DocumentType.id[]
filter_has_any_storage_paths?: number[] // StoragePath.id[]
filter_has_not_storage_paths?: number[] // StoragePath.id[]
filter_custom_field_query?: string

View File

@@ -16,6 +16,7 @@ from pikepdf import Pdf
from documents.converters import convert_from_tiff_to_pdf
from documents.data_models import ConsumableDocument
from documents.data_models import DocumentMetadataOverrides
from documents.models import Document
from documents.models import Tag
from documents.plugins.base import ConsumeTaskPlugin
from documents.plugins.base import StopConsumeTaskError
@@ -115,6 +116,24 @@ class BarcodePlugin(ConsumeTaskPlugin):
self._tiff_conversion_done = False
self.barcodes: list[Barcode] = []
def _apply_detected_asn(self, detected_asn: int) -> None:
"""
Apply a detected ASN to metadata if allowed.
"""
if (
self.metadata.skip_asn_if_exists
and Document.global_objects.filter(
archive_serial_number=detected_asn,
).exists()
):
logger.info(
f"Found ASN in barcode {detected_asn} but skipping because it already exists.",
)
return
logger.info(f"Found ASN in barcode: {detected_asn}")
self.metadata.asn = detected_asn
def run(self) -> None:
# Some operations may use PIL, override pixel setting if needed
maybe_override_pixel_limit()
@@ -186,13 +205,8 @@ class BarcodePlugin(ConsumeTaskPlugin):
# Update/overwrite an ASN if possible
# After splitting, as otherwise each split document gets the same ASN
if (
self.settings.barcode_enable_asn
and not self.metadata.skip_asn
and (located_asn := self.asn) is not None
):
logger.info(f"Found ASN in barcode: {located_asn}")
self.metadata.asn = located_asn
if self.settings.barcode_enable_asn and (located_asn := self.asn) is not None:
self._apply_detected_asn(located_asn)
def cleanup(self) -> None:
self.temp_dir.cleanup()

View File

@@ -7,7 +7,6 @@ from pathlib import Path
from typing import TYPE_CHECKING
from typing import Literal
from celery import chain
from celery import chord
from celery import group
from celery import shared_task
@@ -38,6 +37,42 @@ if TYPE_CHECKING:
logger: logging.Logger = logging.getLogger("paperless.bulk_edit")
@shared_task(bind=True)
def restore_archive_serial_numbers_task(
self,
backup: dict[int, int],
*args,
**kwargs,
) -> None:
restore_archive_serial_numbers(backup)
def release_archive_serial_numbers(doc_ids: list[int]) -> dict[int, int]:
"""
Clears ASNs on documents that are about to be replaced so new documents
can be assigned ASNs without uniqueness collisions. Returns a backup map
of doc_id -> previous ASN for potential restoration.
"""
qs = Document.objects.filter(
id__in=doc_ids,
archive_serial_number__isnull=False,
).only("pk", "archive_serial_number")
backup = dict(qs.values_list("pk", "archive_serial_number"))
qs.update(archive_serial_number=None)
logger.info(f"Released archive serial numbers for documents {list(backup.keys())}")
return backup
def restore_archive_serial_numbers(backup: dict[int, int]) -> None:
"""
Restores ASNs using the provided backup map, intended for
rollback when replacement consumption fails.
"""
for doc_id, asn in backup.items():
Document.objects.filter(pk=doc_id).update(archive_serial_number=asn)
logger.info(f"Restored archive serial numbers for documents {list(backup.keys())}")
def set_correspondent(
doc_ids: list[int],
correspondent: Correspondent,
@@ -386,6 +421,7 @@ def merge(
merged_pdf = pikepdf.new()
version: str = merged_pdf.pdf_version
handoff_asn: int | None = None
# use doc_ids to preserve order
for doc_id in doc_ids:
doc = qs.get(id=doc_id)
@@ -401,6 +437,8 @@ def merge(
version = max(version, pdf.pdf_version)
merged_pdf.pages.extend(pdf.pages)
affected_docs.append(doc.id)
if handoff_asn is None and doc.archive_serial_number is not None:
handoff_asn = doc.archive_serial_number
except Exception as e:
logger.exception(
f"Error merging document {doc.id}, it will not be included in the merge: {e}",
@@ -426,6 +464,8 @@ def merge(
DocumentMetadataOverrides.from_document(metadata_document)
)
overrides.title = metadata_document.title + " (merged)"
if metadata_document.archive_serial_number is not None:
handoff_asn = metadata_document.archive_serial_number
else:
overrides = DocumentMetadataOverrides()
else:
@@ -433,8 +473,11 @@ def merge(
if user is not None:
overrides.owner_id = user.id
# Avoid copying or detecting ASN from merged PDFs to prevent collision
overrides.skip_asn = True
if not delete_originals:
overrides.skip_asn_if_exists = True
if delete_originals and handoff_asn is not None:
overrides.asn = handoff_asn
logger.info("Adding merged document to the task queue.")
@@ -447,12 +490,20 @@ def merge(
)
if delete_originals:
backup = release_archive_serial_numbers(affected_docs)
logger.info(
"Queueing removal of original documents after consumption of merged document",
)
chain(consume_task, delete.si(affected_docs)).delay()
else:
consume_task.delay()
try:
consume_task.apply_async(
link=[delete.si(affected_docs)],
link_error=[restore_archive_serial_numbers_task.s(backup)],
)
except Exception:
restore_archive_serial_numbers(backup)
raise
else:
consume_task.delay()
return "OK"
@@ -494,6 +545,8 @@ def split(
overrides.title = f"{doc.title} (split {idx + 1})"
if user is not None:
overrides.owner_id = user.id
if not delete_originals:
overrides.skip_asn_if_exists = True
logger.info(
f"Adding split document with pages {split_doc} to the task queue.",
)
@@ -508,10 +561,20 @@ def split(
)
if delete_originals:
backup = release_archive_serial_numbers([doc.id])
logger.info(
"Queueing removal of original document after consumption of the split documents",
)
chord(header=consume_tasks, body=delete.si([doc.id])).delay()
try:
chord(
header=consume_tasks,
body=delete.si([doc.id]),
).apply_async(
link_error=[restore_archive_serial_numbers_task.s(backup)],
)
except Exception:
restore_archive_serial_numbers(backup)
raise
else:
group(consume_tasks).delay()
@@ -614,7 +677,10 @@ def edit_pdf(
)
if user is not None:
overrides.owner_id = user.id
if not delete_original:
overrides.skip_asn_if_exists = True
if delete_original and len(pdf_docs) == 1:
overrides.asn = doc.archive_serial_number
for idx, pdf in enumerate(pdf_docs, start=1):
filepath: Path = (
Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR))
@@ -633,7 +699,17 @@ def edit_pdf(
)
if delete_original:
chord(header=consume_tasks, body=delete.si([doc.id])).delay()
backup = release_archive_serial_numbers([doc.id])
try:
chord(
header=consume_tasks,
body=delete.si([doc.id]),
).apply_async(
link_error=[restore_archive_serial_numbers_task.s(backup)],
)
except Exception:
restore_archive_serial_numbers(backup)
raise
else:
group(consume_tasks).delay()

View File

@@ -690,7 +690,7 @@ class ConsumerPlugin(
pk=self.metadata.storage_path_id,
)
if self.metadata.asn is not None and not self.metadata.skip_asn:
if self.metadata.asn is not None:
document.archive_serial_number = self.metadata.asn
if self.metadata.owner_id:
@@ -806,8 +806,8 @@ class ConsumerPreflightPlugin(
"""
Check that if override_asn is given, it is unique and within a valid range
"""
if self.metadata.skip_asn or self.metadata.asn is None:
# if skip is set or ASN is None
if self.metadata.asn is None:
# if ASN is None
return
# Validate the range is above zero and less than uint32_t max
# otherwise, Whoosh can't handle it in the index

View File

@@ -30,7 +30,7 @@ class DocumentMetadataOverrides:
change_users: list[int] | None = None
change_groups: list[int] | None = None
custom_fields: dict | None = None
skip_asn: bool = False
skip_asn_if_exists: bool = False
def update(self, other: "DocumentMetadataOverrides") -> "DocumentMetadataOverrides":
"""
@@ -50,8 +50,8 @@ class DocumentMetadataOverrides:
self.storage_path_id = other.storage_path_id
if other.owner_id is not None:
self.owner_id = other.owner_id
if other.skip_asn:
self.skip_asn = True
if other.skip_asn_if_exists:
self.skip_asn_if_exists = True
# merge
if self.tag_ids is None:
@@ -118,7 +118,7 @@ class DocumentMetadataOverrides:
).values_list("id", flat=True),
)
overrides.custom_fields = {
custom_field.field.id: custom_field.value
custom_field.id: custom_field.value
for custom_field in doc.custom_fields.all()
}

View File

@@ -403,18 +403,6 @@ def existing_document_matches_workflow(
f"Document tags {list(document.tags.all())} include excluded tags {list(trigger_has_not_tags_qs)}",
)
allowed_correspondent_ids = set(
trigger.filter_has_any_correspondents.values_list("id", flat=True),
)
if (
allowed_correspondent_ids
and document.correspondent_id not in allowed_correspondent_ids
):
return (
False,
f"Document correspondent {document.correspondent} is not one of {list(trigger.filter_has_any_correspondents.all())}",
)
# Document correspondent vs trigger has_correspondent
if (
trigger.filter_has_correspondent_id is not None
@@ -436,17 +424,6 @@ def existing_document_matches_workflow(
f"Document correspondent {document.correspondent} is excluded by {list(trigger.filter_has_not_correspondents.all())}",
)
allowed_document_type_ids = set(
trigger.filter_has_any_document_types.values_list("id", flat=True),
)
if allowed_document_type_ids and (
document.document_type_id not in allowed_document_type_ids
):
return (
False,
f"Document doc type {document.document_type} is not one of {list(trigger.filter_has_any_document_types.all())}",
)
# Document document_type vs trigger has_document_type
if (
trigger.filter_has_document_type_id is not None
@@ -468,17 +445,6 @@ def existing_document_matches_workflow(
f"Document doc type {document.document_type} is excluded by {list(trigger.filter_has_not_document_types.all())}",
)
allowed_storage_path_ids = set(
trigger.filter_has_any_storage_paths.values_list("id", flat=True),
)
if allowed_storage_path_ids and (
document.storage_path_id not in allowed_storage_path_ids
):
return (
False,
f"Document storage path {document.storage_path} is not one of {list(trigger.filter_has_any_storage_paths.all())}",
)
# Document storage_path vs trigger has_storage_path
if (
trigger.filter_has_storage_path_id is not None
@@ -566,10 +532,6 @@ def prefilter_documents_by_workflowtrigger(
# Correspondent, DocumentType, etc. filtering
if trigger.filter_has_any_correspondents.exists():
documents = documents.filter(
correspondent__in=trigger.filter_has_any_correspondents.all(),
)
if trigger.filter_has_correspondent is not None:
documents = documents.filter(
correspondent=trigger.filter_has_correspondent,
@@ -579,10 +541,6 @@ def prefilter_documents_by_workflowtrigger(
correspondent__in=trigger.filter_has_not_correspondents.all(),
)
if trigger.filter_has_any_document_types.exists():
documents = documents.filter(
document_type__in=trigger.filter_has_any_document_types.all(),
)
if trigger.filter_has_document_type is not None:
documents = documents.filter(
document_type=trigger.filter_has_document_type,
@@ -592,10 +550,6 @@ def prefilter_documents_by_workflowtrigger(
document_type__in=trigger.filter_has_not_document_types.all(),
)
if trigger.filter_has_any_storage_paths.exists():
documents = documents.filter(
storage_path__in=trigger.filter_has_any_storage_paths.all(),
)
if trigger.filter_has_storage_path is not None:
documents = documents.filter(
storage_path=trigger.filter_has_storage_path,
@@ -650,11 +604,8 @@ def document_matches_workflow(
"filter_has_tags",
"filter_has_all_tags",
"filter_has_not_tags",
"filter_has_any_document_types",
"filter_has_not_document_types",
"filter_has_any_correspondents",
"filter_has_not_correspondents",
"filter_has_any_storage_paths",
"filter_has_not_storage_paths",
)
)

View File

@@ -1,43 +0,0 @@
# Generated by Django 5.2.7 on 2025-12-17 22:25
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "0004_remove_document_storage_type"),
]
operations = [
migrations.AddField(
model_name="workflowtrigger",
name="filter_has_any_correspondents",
field=models.ManyToManyField(
blank=True,
related_name="workflowtriggers_has_any_correspondent",
to="documents.correspondent",
verbose_name="has one of these correspondents",
),
),
migrations.AddField(
model_name="workflowtrigger",
name="filter_has_any_document_types",
field=models.ManyToManyField(
blank=True,
related_name="workflowtriggers_has_any_document_type",
to="documents.documenttype",
verbose_name="has one of these document types",
),
),
migrations.AddField(
model_name="workflowtrigger",
name="filter_has_any_storage_paths",
field=models.ManyToManyField(
blank=True,
related_name="workflowtriggers_has_any_storage_path",
to="documents.storagepath",
verbose_name="has one of these storage paths",
),
),
]

View File

@@ -1066,13 +1066,6 @@ class WorkflowTrigger(models.Model):
verbose_name=_("has this document type"),
)
filter_has_any_document_types = models.ManyToManyField(
DocumentType,
blank=True,
related_name="workflowtriggers_has_any_document_type",
verbose_name=_("has one of these document types"),
)
filter_has_not_document_types = models.ManyToManyField(
DocumentType,
blank=True,
@@ -1095,13 +1088,6 @@ class WorkflowTrigger(models.Model):
verbose_name=_("does not have these correspondent(s)"),
)
filter_has_any_correspondents = models.ManyToManyField(
Correspondent,
blank=True,
related_name="workflowtriggers_has_any_correspondent",
verbose_name=_("has one of these correspondents"),
)
filter_has_storage_path = models.ForeignKey(
StoragePath,
null=True,
@@ -1110,13 +1096,6 @@ class WorkflowTrigger(models.Model):
verbose_name=_("has this storage path"),
)
filter_has_any_storage_paths = models.ManyToManyField(
StoragePath,
blank=True,
related_name="workflowtriggers_has_any_storage_path",
verbose_name=_("has one of these storage paths"),
)
filter_has_not_storage_paths = models.ManyToManyField(
StoragePath,
blank=True,

View File

@@ -2299,11 +2299,8 @@ class WorkflowTriggerSerializer(serializers.ModelSerializer):
"filter_has_all_tags",
"filter_has_not_tags",
"filter_custom_field_query",
"filter_has_any_correspondents",
"filter_has_not_correspondents",
"filter_has_any_document_types",
"filter_has_not_document_types",
"filter_has_any_storage_paths",
"filter_has_not_storage_paths",
"filter_has_correspondent",
"filter_has_document_type",
@@ -2541,26 +2538,14 @@ class WorkflowSerializer(serializers.ModelSerializer):
filter_has_tags = trigger.pop("filter_has_tags", None)
filter_has_all_tags = trigger.pop("filter_has_all_tags", None)
filter_has_not_tags = trigger.pop("filter_has_not_tags", None)
filter_has_any_correspondents = trigger.pop(
"filter_has_any_correspondents",
None,
)
filter_has_not_correspondents = trigger.pop(
"filter_has_not_correspondents",
None,
)
filter_has_any_document_types = trigger.pop(
"filter_has_any_document_types",
None,
)
filter_has_not_document_types = trigger.pop(
"filter_has_not_document_types",
None,
)
filter_has_any_storage_paths = trigger.pop(
"filter_has_any_storage_paths",
None,
)
filter_has_not_storage_paths = trigger.pop(
"filter_has_not_storage_paths",
None,
@@ -2577,26 +2562,14 @@ class WorkflowSerializer(serializers.ModelSerializer):
trigger_instance.filter_has_all_tags.set(filter_has_all_tags)
if filter_has_not_tags is not None:
trigger_instance.filter_has_not_tags.set(filter_has_not_tags)
if filter_has_any_correspondents is not None:
trigger_instance.filter_has_any_correspondents.set(
filter_has_any_correspondents,
)
if filter_has_not_correspondents is not None:
trigger_instance.filter_has_not_correspondents.set(
filter_has_not_correspondents,
)
if filter_has_any_document_types is not None:
trigger_instance.filter_has_any_document_types.set(
filter_has_any_document_types,
)
if filter_has_not_document_types is not None:
trigger_instance.filter_has_not_document_types.set(
filter_has_not_document_types,
)
if filter_has_any_storage_paths is not None:
trigger_instance.filter_has_any_storage_paths.set(
filter_has_any_storage_paths,
)
if filter_has_not_storage_paths is not None:
trigger_instance.filter_has_not_storage_paths.set(
filter_has_not_storage_paths,

View File

@@ -19,7 +19,6 @@ from django.db import DatabaseError
from django.db import close_old_connections
from django.db import connections
from django.db import models
from django.db import transaction
from django.db.models import Q
from django.dispatch import receiver
from django.utils import timezone
@@ -454,94 +453,62 @@ def update_filename_and_move_files(
# This will in turn cause this logic to move the file where it belongs.
return
move_original = False
move_archive = False
old_filename = None
old_archive_filename = None
old_source_path = None
old_archive_path = None
with FileLock(settings.MEDIA_LOCK):
try:
# If this was waiting for the lock, the filename or archive_filename
# of this document may have been updated. This happens if multiple updates
# get queued from the UI for the same document
# So freshen up the data before doing anything
instance.refresh_from_db()
try:
with transaction.atomic():
Document.global_objects.select_for_update().get(pk=instance.pk)
with FileLock(settings.MEDIA_LOCK):
# If this was waiting for the lock, the filename or archive_filename
# of this document may have been updated. This happens if multiple updates
# get queued from the UI for the same document
# So freshen up the data before doing anything
instance.refresh_from_db()
old_filename = instance.filename
old_source_path = instance.source_path
old_filename = instance.filename
old_source_path = instance.source_path
candidate_filename = generate_filename(instance)
candidate_source_path = (
settings.ORIGINALS_DIR / candidate_filename
).resolve()
if candidate_filename == Path(old_filename):
new_filename = Path(old_filename)
elif (
candidate_source_path.exists()
and candidate_source_path != old_source_path
):
# Only fall back to unique search when there is an actual conflict
new_filename = generate_unique_filename(instance)
else:
new_filename = candidate_filename
candidate_filename = generate_filename(instance)
candidate_source_path = (
settings.ORIGINALS_DIR / candidate_filename
# Need to convert to string to be able to save it to the db
instance.filename = str(new_filename)
move_original = old_filename != instance.filename
old_archive_filename = instance.archive_filename
old_archive_path = instance.archive_path
if instance.has_archive_version:
archive_candidate = generate_filename(instance, archive_filename=True)
archive_candidate_path = (
settings.ARCHIVE_DIR / archive_candidate
).resolve()
if candidate_filename == Path(old_filename):
new_filename = Path(old_filename)
if archive_candidate == Path(old_archive_filename):
new_archive_filename = Path(old_archive_filename)
elif (
candidate_source_path.exists()
and candidate_source_path != old_source_path
archive_candidate_path.exists()
and archive_candidate_path != old_archive_path
):
# Only fall back to unique search when there is an actual conflict
new_filename = generate_unique_filename(instance)
else:
new_filename = candidate_filename
# Need to convert to string to be able to save it to the db
instance.filename = str(new_filename)
move_original = old_filename != instance.filename
old_archive_filename = instance.archive_filename
old_archive_path = instance.archive_path
if instance.has_archive_version:
archive_candidate = generate_filename(
new_archive_filename = generate_unique_filename(
instance,
archive_filename=True,
)
archive_candidate_path = (
settings.ARCHIVE_DIR / archive_candidate
).resolve()
if archive_candidate == Path(old_archive_filename):
new_archive_filename = Path(old_archive_filename)
elif (
archive_candidate_path.exists()
and archive_candidate_path != old_archive_path
):
new_archive_filename = generate_unique_filename(
instance,
archive_filename=True,
)
else:
new_archive_filename = archive_candidate
instance.archive_filename = str(new_archive_filename)
move_archive = old_archive_filename != instance.archive_filename
else:
move_archive = False
new_archive_filename = archive_candidate
if move_original:
validate_move(
instance,
old_source_path,
instance.source_path,
settings.ORIGINALS_DIR,
)
create_source_path_directory(instance.source_path)
shutil.move(old_source_path, instance.source_path)
instance.archive_filename = str(new_archive_filename)
if move_archive:
validate_move(
instance,
old_archive_path,
instance.archive_path,
settings.ARCHIVE_DIR,
)
create_source_path_directory(instance.archive_path)
shutil.move(old_archive_path, instance.archive_path)
move_archive = old_archive_filename != instance.archive_filename
else:
move_archive = False
if not move_original and not move_archive:
# Just update modified. Also, don't save() here to prevent infinite recursion.
@@ -550,6 +517,26 @@ def update_filename_and_move_files(
)
return
if move_original:
validate_move(
instance,
old_source_path,
instance.source_path,
settings.ORIGINALS_DIR,
)
create_source_path_directory(instance.source_path)
shutil.move(old_source_path, instance.source_path)
if move_archive:
validate_move(
instance,
old_archive_path,
instance.archive_path,
settings.ARCHIVE_DIR,
)
create_source_path_directory(instance.archive_path)
shutil.move(old_archive_path, instance.archive_path)
# Don't save() here to prevent infinite recursion.
Document.global_objects.filter(pk=instance.pk).update(
filename=instance.filename,
@@ -559,24 +546,22 @@ def update_filename_and_move_files(
# Clear any caching for this document. Slightly overkill, but not terrible
clear_document_caches(instance.pk)
except (OSError, DatabaseError, CannotMoveFilesException) as e:
logger.warning(f"Exception during file handling: {e}")
# This happens when either:
# - moving the files failed due to file system errors
# - saving to the database failed due to database errors
# In both cases, we need to revert to the original state.
except (OSError, DatabaseError, CannotMoveFilesException) as e:
logger.warning(f"Exception during file handling: {e}")
# This happens when either:
# - moving the files failed due to file system errors
# - saving to the database failed due to database errors
# In both cases, we need to revert to the original state.
if move_original or move_archive:
# Try to move files to their original location.
try:
with FileLock(settings.MEDIA_LOCK):
if move_original and instance.source_path.is_file():
logger.info("Restoring previous original path")
shutil.move(instance.source_path, old_source_path)
if move_original and instance.source_path.is_file():
logger.info("Restoring previous original path")
shutil.move(instance.source_path, old_source_path)
if move_archive and instance.archive_path.is_file():
logger.info("Restoring previous archive path")
shutil.move(instance.archive_path, old_archive_path)
if move_archive and instance.archive_path.is_file():
logger.info("Restoring previous archive path")
shutil.move(instance.archive_path, old_archive_path)
except Exception:
# This is fine, since:
@@ -589,29 +574,23 @@ def update_filename_and_move_files(
# anyway.
pass
# restore old values on the instance
if old_filename is not None:
# restore old values on the instance
instance.filename = old_filename
if old_archive_filename is not None:
instance.archive_filename = old_archive_filename
# finally, remove any empty sub folders. This will do nothing if
# something has failed above.
if old_source_path and not old_source_path.is_file():
delete_empty_directories(
Path(old_source_path).parent,
root=settings.ORIGINALS_DIR,
)
# finally, remove any empty sub folders. This will do nothing if
# something has failed above.
if not old_source_path.is_file():
delete_empty_directories(
Path(old_source_path).parent,
root=settings.ORIGINALS_DIR,
)
if (
instance.has_archive_version
and old_archive_path
and not old_archive_path.is_file()
):
delete_empty_directories(
Path(old_archive_path).parent,
root=settings.ARCHIVE_DIR,
)
if instance.has_archive_version and not old_archive_path.is_file():
delete_empty_directories(
Path(old_archive_path).parent,
root=settings.ARCHIVE_DIR,
)
@shared_task

View File

@@ -186,11 +186,8 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
"filter_has_tags": [self.t1.id],
"filter_has_all_tags": [self.t2.id],
"filter_has_not_tags": [self.t3.id],
"filter_has_any_correspondents": [self.c.id],
"filter_has_not_correspondents": [self.c2.id],
"filter_has_any_document_types": [self.dt.id],
"filter_has_not_document_types": [self.dt2.id],
"filter_has_any_storage_paths": [self.sp.id],
"filter_has_not_storage_paths": [self.sp2.id],
"filter_custom_field_query": json.dumps(
[
@@ -251,26 +248,14 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
set(trigger.filter_has_not_tags.values_list("id", flat=True)),
{self.t3.id},
)
self.assertSetEqual(
set(trigger.filter_has_any_correspondents.values_list("id", flat=True)),
{self.c.id},
)
self.assertSetEqual(
set(trigger.filter_has_not_correspondents.values_list("id", flat=True)),
{self.c2.id},
)
self.assertSetEqual(
set(trigger.filter_has_any_document_types.values_list("id", flat=True)),
{self.dt.id},
)
self.assertSetEqual(
set(trigger.filter_has_not_document_types.values_list("id", flat=True)),
{self.dt2.id},
)
self.assertSetEqual(
set(trigger.filter_has_any_storage_paths.values_list("id", flat=True)),
{self.sp.id},
)
self.assertSetEqual(
set(trigger.filter_has_not_storage_paths.values_list("id", flat=True)),
{self.sp2.id},
@@ -434,11 +419,8 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
"filter_has_tags": [self.t1.id],
"filter_has_all_tags": [self.t2.id],
"filter_has_not_tags": [self.t3.id],
"filter_has_any_correspondents": [self.c.id],
"filter_has_not_correspondents": [self.c2.id],
"filter_has_any_document_types": [self.dt.id],
"filter_has_not_document_types": [self.dt2.id],
"filter_has_any_storage_paths": [self.sp.id],
"filter_has_not_storage_paths": [self.sp2.id],
"filter_custom_field_query": json.dumps(
["AND", [[self.cf1.id, "exact", "value"]]],
@@ -468,26 +450,14 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
workflow.triggers.first().filter_has_not_tags.first(),
self.t3,
)
self.assertEqual(
workflow.triggers.first().filter_has_any_correspondents.first(),
self.c,
)
self.assertEqual(
workflow.triggers.first().filter_has_not_correspondents.first(),
self.c2,
)
self.assertEqual(
workflow.triggers.first().filter_has_any_document_types.first(),
self.dt,
)
self.assertEqual(
workflow.triggers.first().filter_has_not_document_types.first(),
self.dt2,
)
self.assertEqual(
workflow.triggers.first().filter_has_any_storage_paths.first(),
self.sp,
)
self.assertEqual(
workflow.triggers.first().filter_has_not_storage_paths.first(),
self.sp2,

View File

@@ -603,23 +603,21 @@ class TestPDFActions(DirectoriesMixin, TestCase):
expected_filename,
)
self.assertEqual(consume_file_args[1].title, None)
self.assertTrue(consume_file_args[1].skip_asn)
# No metadata_document_id, delete_originals False, so ASN should be None
self.assertIsNone(consume_file_args[1].asn)
# With metadata_document_id overrides
result = bulk_edit.merge(doc_ids, metadata_document_id=metadata_document_id)
consume_file_args, _ = mock_consume_file.call_args
self.assertEqual(consume_file_args[1].title, "B (merged)")
self.assertEqual(consume_file_args[1].created, self.doc2.created)
self.assertTrue(consume_file_args[1].skip_asn)
self.assertEqual(result, "OK")
@mock.patch("documents.bulk_edit.delete.si")
@mock.patch("documents.tasks.consume_file.s")
@mock.patch("documents.bulk_edit.chain")
def test_merge_and_delete_originals(
self,
mock_chain,
mock_consume_file,
mock_delete_documents,
):
@@ -633,6 +631,12 @@ class TestPDFActions(DirectoriesMixin, TestCase):
- Document deletion task should be called
"""
doc_ids = [self.doc1.id, self.doc2.id, self.doc3.id]
self.doc1.archive_serial_number = 101
self.doc2.archive_serial_number = 102
self.doc3.archive_serial_number = 103
self.doc1.save()
self.doc2.save()
self.doc3.save()
result = bulk_edit.merge(doc_ids, delete_originals=True)
self.assertEqual(result, "OK")
@@ -643,7 +647,8 @@ class TestPDFActions(DirectoriesMixin, TestCase):
mock_consume_file.assert_called()
mock_delete_documents.assert_called()
mock_chain.assert_called_once()
consume_sig = mock_consume_file.return_value
consume_sig.apply_async.assert_called_once()
consume_file_args, _ = mock_consume_file.call_args
self.assertEqual(
@@ -651,7 +656,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
expected_filename,
)
self.assertEqual(consume_file_args[1].title, None)
self.assertTrue(consume_file_args[1].skip_asn)
self.assertEqual(consume_file_args[1].asn, 101)
delete_documents_args, _ = mock_delete_documents.call_args
self.assertEqual(
@@ -659,6 +664,92 @@ class TestPDFActions(DirectoriesMixin, TestCase):
doc_ids,
)
self.doc1.refresh_from_db()
self.doc2.refresh_from_db()
self.doc3.refresh_from_db()
self.assertIsNone(self.doc1.archive_serial_number)
self.assertIsNone(self.doc2.archive_serial_number)
self.assertIsNone(self.doc3.archive_serial_number)
@mock.patch("documents.bulk_edit.delete.si")
@mock.patch("documents.tasks.consume_file.s")
def test_merge_and_delete_originals_restore_on_failure(
self,
mock_consume_file,
mock_delete_documents,
):
"""
GIVEN:
- Existing documents
WHEN:
- Merge action with deleting documents is called with 1 document
- Error occurs when queuing consume file task
THEN:
- Archive serial numbers are restored
"""
doc_ids = [self.doc1.id]
self.doc1.archive_serial_number = 111
self.doc1.save()
sig = mock.Mock()
sig.apply_async.side_effect = Exception("boom")
mock_consume_file.return_value = sig
with self.assertRaises(Exception):
bulk_edit.merge(doc_ids, delete_originals=True)
self.doc1.refresh_from_db()
self.assertEqual(self.doc1.archive_serial_number, 111)
@mock.patch("documents.bulk_edit.delete.si")
@mock.patch("documents.tasks.consume_file.s")
def test_merge_and_delete_originals_metadata_handoff(
self,
mock_consume_file,
mock_delete_documents,
):
"""
GIVEN:
- Existing documents with ASNs
WHEN:
- Merge with delete_originals=True and metadata_document_id set
THEN:
- Handoff ASN uses metadata document ASN
"""
doc_ids = [self.doc1.id, self.doc2.id]
self.doc1.archive_serial_number = 101
self.doc2.archive_serial_number = 202
self.doc1.save()
self.doc2.save()
result = bulk_edit.merge(
doc_ids,
metadata_document_id=self.doc2.id,
delete_originals=True,
)
self.assertEqual(result, "OK")
consume_file_args, _ = mock_consume_file.call_args
self.assertEqual(consume_file_args[1].asn, 202)
def test_restore_archive_serial_numbers_task(self):
"""
GIVEN:
- Existing document with no archive serial number
WHEN:
- Restore archive serial number task is called with backup data
THEN:
- Document archive serial number is restored
"""
self.doc1.archive_serial_number = 444
self.doc1.save()
Document.objects.filter(pk=self.doc1.id).update(archive_serial_number=None)
backup = {self.doc1.id: 444}
bulk_edit.restore_archive_serial_numbers_task(backup)
self.doc1.refresh_from_db()
self.assertEqual(self.doc1.archive_serial_number, 444)
@mock.patch("documents.tasks.consume_file.s")
def test_merge_with_archive_fallback(self, mock_consume_file):
"""
@@ -727,6 +818,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
self.assertEqual(mock_consume_file.call_count, 2)
consume_file_args, _ = mock_consume_file.call_args
self.assertEqual(consume_file_args[1].title, "B (split 2)")
self.assertIsNone(consume_file_args[1].asn)
self.assertEqual(result, "OK")
@@ -751,6 +843,8 @@ class TestPDFActions(DirectoriesMixin, TestCase):
"""
doc_ids = [self.doc2.id]
pages = [[1, 2], [3]]
self.doc2.archive_serial_number = 200
self.doc2.save()
result = bulk_edit.split(doc_ids, pages, delete_originals=True)
self.assertEqual(result, "OK")
@@ -768,6 +862,42 @@ class TestPDFActions(DirectoriesMixin, TestCase):
doc_ids,
)
self.doc2.refresh_from_db()
self.assertIsNone(self.doc2.archive_serial_number)
@mock.patch("documents.bulk_edit.delete.si")
@mock.patch("documents.tasks.consume_file.s")
@mock.patch("documents.bulk_edit.chord")
def test_split_restore_on_failure(
self,
mock_chord,
mock_consume_file,
mock_delete_documents,
):
"""
GIVEN:
- Existing documents
WHEN:
- Split action with deleting documents is called with 1 document and 2 page groups
- Error occurs when queuing chord task
THEN:
- Archive serial numbers are restored
"""
doc_ids = [self.doc2.id]
pages = [[1, 2]]
self.doc2.archive_serial_number = 222
self.doc2.save()
sig = mock.Mock()
sig.apply_async.side_effect = Exception("boom")
mock_chord.return_value = sig
result = bulk_edit.split(doc_ids, pages, delete_originals=True)
self.assertEqual(result, "OK")
self.doc2.refresh_from_db()
self.assertEqual(self.doc2.archive_serial_number, 222)
@mock.patch("documents.tasks.consume_file.delay")
@mock.patch("pikepdf.Pdf.save")
def test_split_with_errors(self, mock_save_pdf, mock_consume_file):
@@ -968,10 +1098,49 @@ class TestPDFActions(DirectoriesMixin, TestCase):
mock_chord.return_value.delay.return_value = None
doc_ids = [self.doc2.id]
operations = [{"page": 1}, {"page": 2}]
self.doc2.archive_serial_number = 250
self.doc2.save()
result = bulk_edit.edit_pdf(doc_ids, operations, delete_original=True)
self.assertEqual(result, "OK")
mock_chord.assert_called_once()
consume_file_args, _ = mock_consume_file.call_args
self.assertEqual(consume_file_args[1].asn, 250)
self.doc2.refresh_from_db()
self.assertIsNone(self.doc2.archive_serial_number)
@mock.patch("documents.bulk_edit.delete.si")
@mock.patch("documents.tasks.consume_file.s")
@mock.patch("documents.bulk_edit.chord")
def test_edit_pdf_restore_on_failure(
self,
mock_chord,
mock_consume_file,
mock_delete_documents,
):
"""
GIVEN:
- Existing document
WHEN:
- edit_pdf is called with delete_original=True
- Error occurs when queuing chord task
THEN:
- Archive serial numbers are restored
"""
doc_ids = [self.doc2.id]
operations = [{"page": 1}]
self.doc2.archive_serial_number = 333
self.doc2.save()
sig = mock.Mock()
sig.apply_async.side_effect = Exception("boom")
mock_chord.return_value = sig
with self.assertRaises(Exception):
bulk_edit.edit_pdf(doc_ids, operations, delete_original=True)
self.doc2.refresh_from_db()
self.assertEqual(self.doc2.archive_serial_number, 333)
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.delay")
def test_edit_pdf_with_update_document(self, mock_update_document):

View File

@@ -14,6 +14,7 @@ from django.test import override_settings
from django.utils import timezone
from guardian.core import ObjectPermissionChecker
from documents.barcodes import BarcodePlugin
from documents.consumer import ConsumerError
from documents.data_models import DocumentMetadataOverrides
from documents.data_models import DocumentSource
@@ -412,14 +413,6 @@ class TestConsumer(
self.assertEqual(document.archive_serial_number, 123)
self._assert_first_last_send_progress()
def testMetadataOverridesSkipAsnPropagation(self):
overrides = DocumentMetadataOverrides()
incoming = DocumentMetadataOverrides(skip_asn=True)
overrides.update(incoming)
self.assertTrue(overrides.skip_asn)
def testOverrideTitlePlaceholders(self):
c = Correspondent.objects.create(name="Correspondent Name")
dt = DocumentType.objects.create(name="DocType Name")
@@ -1240,3 +1233,46 @@ class PostConsumeTestCase(DirectoriesMixin, GetConsumerMixin, TestCase):
r"sample\.pdf: Error while executing post-consume script: Command '\[.*\]' returned non-zero exit status \d+\.",
):
consumer.run_post_consume_script(doc)
class TestMetadataOverrides(TestCase):
def test_update_skip_asn_if_exists(self):
base = DocumentMetadataOverrides()
incoming = DocumentMetadataOverrides(skip_asn_if_exists=True)
base.update(incoming)
self.assertTrue(base.skip_asn_if_exists)
class TestBarcodeApplyDetectedASN(TestCase):
"""
GIVEN:
- Existing Documents with ASN 123
WHEN:
- A BarcodePlugin which detected an ASN
THEN:
- If skip_asn_if_exists is set, and ASN exists, do not set ASN
- If skip_asn_if_exists is set, and ASN does not exist, set ASN
"""
def test_apply_detected_asn_skips_existing_when_flag_set(self):
doc = Document.objects.create(
checksum="X1",
title="D1",
archive_serial_number=123,
)
metadata = DocumentMetadataOverrides(skip_asn_if_exists=True)
plugin = BarcodePlugin(
input_doc=mock.Mock(),
metadata=metadata,
status_mgr=mock.Mock(),
base_tmp_dir=tempfile.gettempdir(),
task_id="test-task",
)
plugin._apply_detected_asn(123)
self.assertIsNone(plugin.metadata.asn)
doc.hard_delete()
plugin._apply_detected_asn(123)
self.assertEqual(plugin.metadata.asn, 123)

View File

@@ -1276,76 +1276,6 @@ class TestWorkflows(
)
self.assertIn(expected_str, cm.output[1])
def test_document_added_any_filters(self):
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
)
trigger.filter_has_any_correspondents.set([self.c])
trigger.filter_has_any_document_types.set([self.dt])
trigger.filter_has_any_storage_paths.set([self.sp])
matching_doc = Document.objects.create(
title="sample test",
correspondent=self.c,
document_type=self.dt,
storage_path=self.sp,
original_filename="sample.pdf",
checksum="checksum-any-match",
)
matched, reason = existing_document_matches_workflow(matching_doc, trigger)
self.assertTrue(matched)
self.assertIsNone(reason)
wrong_correspondent = Document.objects.create(
title="wrong correspondent",
correspondent=self.c2,
document_type=self.dt,
storage_path=self.sp,
original_filename="sample2.pdf",
)
matched, reason = existing_document_matches_workflow(
wrong_correspondent,
trigger,
)
self.assertFalse(matched)
self.assertIn("correspondent", reason)
other_document_type = DocumentType.objects.create(name="Other")
wrong_document_type = Document.objects.create(
title="wrong doc type",
correspondent=self.c,
document_type=other_document_type,
storage_path=self.sp,
original_filename="sample3.pdf",
checksum="checksum-wrong-doc-type",
)
matched, reason = existing_document_matches_workflow(
wrong_document_type,
trigger,
)
self.assertFalse(matched)
self.assertIn("doc type", reason)
other_storage_path = StoragePath.objects.create(
name="Other path",
path="/other/",
)
wrong_storage_path = Document.objects.create(
title="wrong storage",
correspondent=self.c,
document_type=self.dt,
storage_path=other_storage_path,
original_filename="sample4.pdf",
checksum="checksum-wrong-storage-path",
)
matched, reason = existing_document_matches_workflow(
wrong_storage_path,
trigger,
)
self.assertFalse(matched)
self.assertIn("storage path", reason)
def test_document_added_custom_field_query_no_match(self):
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
@@ -1454,39 +1384,6 @@ class TestWorkflows(
self.assertIn(doc1, filtered)
self.assertNotIn(doc2, filtered)
def test_prefilter_documents_any_filters(self):
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
)
trigger.filter_has_any_correspondents.set([self.c])
trigger.filter_has_any_document_types.set([self.dt])
trigger.filter_has_any_storage_paths.set([self.sp])
allowed_document = Document.objects.create(
title="allowed",
correspondent=self.c,
document_type=self.dt,
storage_path=self.sp,
original_filename="doc-allowed.pdf",
checksum="checksum-any-allowed",
)
blocked_document = Document.objects.create(
title="blocked",
correspondent=self.c2,
document_type=self.dt,
storage_path=self.sp,
original_filename="doc-blocked.pdf",
checksum="checksum-any-blocked",
)
filtered = prefilter_documents_by_workflowtrigger(
Document.objects.all(),
trigger,
)
self.assertIn(allowed_document, filtered)
self.assertNotIn(blocked_document, filtered)
def test_consumption_trigger_requires_filter_configuration(self):
serializer = WorkflowTriggerSerializer(
data={

View File

@@ -2,7 +2,7 @@ msgid ""
msgstr ""
"Project-Id-Version: paperless-ngx\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2026-01-25 21:46+0000\n"
"POT-Creation-Date: 2026-01-25 03:30+0000\n"
"PO-Revision-Date: 2022-02-17 04:17\n"
"Last-Translator: \n"
"Language-Team: English\n"
@@ -89,7 +89,7 @@ msgstr ""
msgid "Automatic"
msgstr ""
#: documents/models.py:64 documents/models.py:434 documents/models.py:1528
#: documents/models.py:64 documents/models.py:434 documents/models.py:1507
#: paperless_mail/models.py:23 paperless_mail/models.py:143
msgid "name"
msgstr ""
@@ -252,7 +252,7 @@ msgid "The position of this document in your physical document archive."
msgstr ""
#: documents/models.py:303 documents/models.py:678 documents/models.py:732
#: documents/models.py:1571
#: documents/models.py:1550
msgid "document"
msgstr ""
@@ -869,358 +869,346 @@ msgid "has this document type"
msgstr ""
#: documents/models.py:1073
msgid "has one of these document types"
msgstr ""
#: documents/models.py:1080
msgid "does not have these document type(s)"
msgstr ""
#: documents/models.py:1088
#: documents/models.py:1081
msgid "has this correspondent"
msgstr ""
#: documents/models.py:1095
#: documents/models.py:1088
msgid "does not have these correspondent(s)"
msgstr ""
#: documents/models.py:1102
msgid "has one of these correspondents"
msgstr ""
#: documents/models.py:1110
#: documents/models.py:1096
msgid "has this storage path"
msgstr ""
#: documents/models.py:1117
msgid "has one of these storage paths"
msgstr ""
#: documents/models.py:1124
#: documents/models.py:1103
msgid "does not have these storage path(s)"
msgstr ""
#: documents/models.py:1128
#: documents/models.py:1107
msgid "filter custom field query"
msgstr ""
#: documents/models.py:1131
#: documents/models.py:1110
msgid "JSON-encoded custom field query expression."
msgstr ""
#: documents/models.py:1135
#: documents/models.py:1114
msgid "schedule offset days"
msgstr ""
#: documents/models.py:1138
#: documents/models.py:1117
msgid "The number of days to offset the schedule trigger by."
msgstr ""
#: documents/models.py:1143
#: documents/models.py:1122
msgid "schedule is recurring"
msgstr ""
#: documents/models.py:1146
#: documents/models.py:1125
msgid "If the schedule should be recurring."
msgstr ""
#: documents/models.py:1151
#: documents/models.py:1130
msgid "schedule recurring delay in days"
msgstr ""
#: documents/models.py:1155
#: documents/models.py:1134
msgid "The number of days between recurring schedule triggers."
msgstr ""
#: documents/models.py:1160
#: documents/models.py:1139
msgid "schedule date field"
msgstr ""
#: documents/models.py:1165
#: documents/models.py:1144
msgid "The field to check for a schedule trigger."
msgstr ""
#: documents/models.py:1174
#: documents/models.py:1153
msgid "schedule date custom field"
msgstr ""
#: documents/models.py:1178
#: documents/models.py:1157
msgid "workflow trigger"
msgstr ""
#: documents/models.py:1179
#: documents/models.py:1158
msgid "workflow triggers"
msgstr ""
#: documents/models.py:1187
#: documents/models.py:1166
msgid "email subject"
msgstr ""
#: documents/models.py:1191
#: documents/models.py:1170
msgid ""
"The subject of the email, can include some placeholders, see documentation."
msgstr ""
#: documents/models.py:1197
#: documents/models.py:1176
msgid "email body"
msgstr ""
#: documents/models.py:1200
#: documents/models.py:1179
msgid ""
"The body (message) of the email, can include some placeholders, see "
"documentation."
msgstr ""
#: documents/models.py:1206
#: documents/models.py:1185
msgid "emails to"
msgstr ""
#: documents/models.py:1209
#: documents/models.py:1188
msgid "The destination email addresses, comma separated."
msgstr ""
#: documents/models.py:1215
#: documents/models.py:1194
msgid "include document in email"
msgstr ""
#: documents/models.py:1226
#: documents/models.py:1205
msgid "webhook url"
msgstr ""
#: documents/models.py:1229
#: documents/models.py:1208
msgid "The destination URL for the notification."
msgstr ""
#: documents/models.py:1234
#: documents/models.py:1213
msgid "use parameters"
msgstr ""
#: documents/models.py:1239
#: documents/models.py:1218
msgid "send as JSON"
msgstr ""
#: documents/models.py:1243
#: documents/models.py:1222
msgid "webhook parameters"
msgstr ""
#: documents/models.py:1246
#: documents/models.py:1225
msgid "The parameters to send with the webhook URL if body not used."
msgstr ""
#: documents/models.py:1250
#: documents/models.py:1229
msgid "webhook body"
msgstr ""
#: documents/models.py:1253
#: documents/models.py:1232
msgid "The body to send with the webhook URL if parameters not used."
msgstr ""
#: documents/models.py:1257
#: documents/models.py:1236
msgid "webhook headers"
msgstr ""
#: documents/models.py:1260
#: documents/models.py:1239
msgid "The headers to send with the webhook URL."
msgstr ""
#: documents/models.py:1265
#: documents/models.py:1244
msgid "include document in webhook"
msgstr ""
#: documents/models.py:1276
#: documents/models.py:1255
msgid "Assignment"
msgstr ""
#: documents/models.py:1280
#: documents/models.py:1259
msgid "Removal"
msgstr ""
#: documents/models.py:1284 documents/templates/account/password_reset.html:15
#: documents/models.py:1263 documents/templates/account/password_reset.html:15
msgid "Email"
msgstr ""
#: documents/models.py:1288
#: documents/models.py:1267
msgid "Webhook"
msgstr ""
#: documents/models.py:1292
#: documents/models.py:1271
msgid "Workflow Action Type"
msgstr ""
#: documents/models.py:1297 documents/models.py:1530
#: documents/models.py:1276 documents/models.py:1509
#: paperless_mail/models.py:145
msgid "order"
msgstr ""
#: documents/models.py:1300
#: documents/models.py:1279
msgid "assign title"
msgstr ""
#: documents/models.py:1304
#: documents/models.py:1283
msgid "Assign a document title, must be a Jinja2 template, see documentation."
msgstr ""
#: documents/models.py:1312 paperless_mail/models.py:274
#: documents/models.py:1291 paperless_mail/models.py:274
msgid "assign this tag"
msgstr ""
#: documents/models.py:1321 paperless_mail/models.py:282
#: documents/models.py:1300 paperless_mail/models.py:282
msgid "assign this document type"
msgstr ""
#: documents/models.py:1330 paperless_mail/models.py:296
#: documents/models.py:1309 paperless_mail/models.py:296
msgid "assign this correspondent"
msgstr ""
#: documents/models.py:1339
#: documents/models.py:1318
msgid "assign this storage path"
msgstr ""
#: documents/models.py:1348
#: documents/models.py:1327
msgid "assign this owner"
msgstr ""
#: documents/models.py:1355
#: documents/models.py:1334
msgid "grant view permissions to these users"
msgstr ""
#: documents/models.py:1362
#: documents/models.py:1341
msgid "grant view permissions to these groups"
msgstr ""
#: documents/models.py:1369
#: documents/models.py:1348
msgid "grant change permissions to these users"
msgstr ""
#: documents/models.py:1376
#: documents/models.py:1355
msgid "grant change permissions to these groups"
msgstr ""
#: documents/models.py:1383
#: documents/models.py:1362
msgid "assign these custom fields"
msgstr ""
#: documents/models.py:1387
#: documents/models.py:1366
msgid "custom field values"
msgstr ""
#: documents/models.py:1391
#: documents/models.py:1370
msgid "Optional values to assign to the custom fields."
msgstr ""
#: documents/models.py:1400
#: documents/models.py:1379
msgid "remove these tag(s)"
msgstr ""
#: documents/models.py:1405
#: documents/models.py:1384
msgid "remove all tags"
msgstr ""
#: documents/models.py:1412
#: documents/models.py:1391
msgid "remove these document type(s)"
msgstr ""
#: documents/models.py:1417
#: documents/models.py:1396
msgid "remove all document types"
msgstr ""
#: documents/models.py:1424
#: documents/models.py:1403
msgid "remove these correspondent(s)"
msgstr ""
#: documents/models.py:1429
#: documents/models.py:1408
msgid "remove all correspondents"
msgstr ""
#: documents/models.py:1436
#: documents/models.py:1415
msgid "remove these storage path(s)"
msgstr ""
#: documents/models.py:1441
#: documents/models.py:1420
msgid "remove all storage paths"
msgstr ""
#: documents/models.py:1448
#: documents/models.py:1427
msgid "remove these owner(s)"
msgstr ""
#: documents/models.py:1453
#: documents/models.py:1432
msgid "remove all owners"
msgstr ""
#: documents/models.py:1460
#: documents/models.py:1439
msgid "remove view permissions for these users"
msgstr ""
#: documents/models.py:1467
#: documents/models.py:1446
msgid "remove view permissions for these groups"
msgstr ""
#: documents/models.py:1474
#: documents/models.py:1453
msgid "remove change permissions for these users"
msgstr ""
#: documents/models.py:1481
#: documents/models.py:1460
msgid "remove change permissions for these groups"
msgstr ""
#: documents/models.py:1486
#: documents/models.py:1465
msgid "remove all permissions"
msgstr ""
#: documents/models.py:1493
#: documents/models.py:1472
msgid "remove these custom fields"
msgstr ""
#: documents/models.py:1498
#: documents/models.py:1477
msgid "remove all custom fields"
msgstr ""
#: documents/models.py:1507
#: documents/models.py:1486
msgid "email"
msgstr ""
#: documents/models.py:1516
#: documents/models.py:1495
msgid "webhook"
msgstr ""
#: documents/models.py:1520
#: documents/models.py:1499
msgid "workflow action"
msgstr ""
#: documents/models.py:1521
#: documents/models.py:1500
msgid "workflow actions"
msgstr ""
#: documents/models.py:1536
#: documents/models.py:1515
msgid "triggers"
msgstr ""
#: documents/models.py:1543
#: documents/models.py:1522
msgid "actions"
msgstr ""
#: documents/models.py:1546 paperless_mail/models.py:154
#: documents/models.py:1525 paperless_mail/models.py:154
msgid "enabled"
msgstr ""
#: documents/models.py:1557
#: documents/models.py:1536
msgid "workflow"
msgstr ""
#: documents/models.py:1561
#: documents/models.py:1540
msgid "workflow trigger type"
msgstr ""
#: documents/models.py:1575
#: documents/models.py:1554
msgid "date run"
msgstr ""
#: documents/models.py:1581
#: documents/models.py:1560
msgid "workflow run"
msgstr ""
#: documents/models.py:1582
#: documents/models.py:1561
msgid "workflow runs"
msgstr ""