Compare commits

..

4 Commits

Author SHA1 Message Date
shamoon
5fe46cac55 Mas testing 2026-01-24 20:05:25 -08:00
shamoon
c0c2202564 skip_asn_if_exists 2026-01-24 20:05:25 -08:00
shamoon
d65d9a2b88 "Handoff" ASN when merging or editing PDFs 2026-01-24 20:05:25 -08:00
shamoon
8e12f3e93c First, release ASNs before document replacement (and restore if needed) 2026-01-24 20:05:25 -08:00
64 changed files with 754 additions and 1265 deletions

View File

@@ -23,7 +23,7 @@ env:
jobs:
build:
name: Build Documentation
runs-on: ubuntu-slim
runs-on: ubuntu-24.04
steps:
- name: Checkout
uses: actions/checkout@v6
@@ -58,7 +58,7 @@ jobs:
name: Deploy Documentation
needs: build
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
runs-on: ubuntu-slim
runs-on: ubuntu-24.04
steps:
- name: Checkout
uses: actions/checkout@v6

View File

@@ -12,7 +12,7 @@ concurrency:
jobs:
pre-commit:
name: Pre-commit Checks
runs-on: ubuntu-slim
runs-on: ubuntu-24.04
steps:
- name: Checkout
uses: actions/checkout@v6

View File

@@ -10,7 +10,7 @@ jobs:
synchronize-with-crowdin:
name: Crowdin Sync
if: github.repository_owner == 'paperless-ngx'
runs-on: ubuntu-slim
runs-on: ubuntu-24.04
steps:
- name: Checkout
uses: actions/checkout@v6

View File

@@ -8,7 +8,7 @@ permissions:
jobs:
pr-bot:
name: Automated PR Bot
runs-on: ubuntu-slim
runs-on: ubuntu-latest
steps:
- name: Label PR by file path or branch name
# see .github/labeler.yml for the labeler config

View File

@@ -12,7 +12,7 @@ permissions:
jobs:
pr_opened_or_reopened:
name: pr_opened_or_reopened
runs-on: ubuntu-slim
runs-on: ubuntu-24.04
permissions:
# write permission is required for autolabeler
pull-requests: write

View File

@@ -13,7 +13,7 @@ jobs:
stale:
name: 'Stale'
if: github.repository_owner == 'paperless-ngx'
runs-on: ubuntu-slim
runs-on: ubuntu-24.04
steps:
- uses: actions/stale@v10
with:
@@ -35,7 +35,7 @@ jobs:
lock-threads:
name: 'Lock Old Threads'
if: github.repository_owner == 'paperless-ngx'
runs-on: ubuntu-slim
runs-on: ubuntu-24.04
steps:
- uses: dessant/lock-threads@v6
with:
@@ -55,7 +55,7 @@ jobs:
close-answered-discussions:
name: 'Close Answered Discussions'
if: github.repository_owner == 'paperless-ngx'
runs-on: ubuntu-slim
runs-on: ubuntu-24.04
steps:
- uses: actions/github-script@v8
with:
@@ -112,7 +112,7 @@ jobs:
close-outdated-discussions:
name: 'Close Outdated Discussions'
if: github.repository_owner == 'paperless-ngx'
runs-on: ubuntu-slim
runs-on: ubuntu-24.04
steps:
- uses: actions/github-script@v8
with:
@@ -204,7 +204,7 @@ jobs:
close-unsupported-feature-requests:
name: 'Close Unsupported Feature Requests'
if: github.repository_owner == 'paperless-ngx'
runs-on: ubuntu-slim
runs-on: ubuntu-24.04
steps:
- uses: actions/github-script@v8
with:

View File

@@ -6,7 +6,7 @@ on:
jobs:
generate-translate-strings:
name: Generate Translation Strings
runs-on: ubuntu-slim
runs-on: ubuntu-latest
permissions:
contents: write
steps:

View File

@@ -5,8 +5,10 @@ set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py management_command "$@"
elif [[ $(id -un) == "paperless" ]]; then
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py management_command "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py management_command "$@"
else
echo "Unknown user."
fi

View File

@@ -5,8 +5,10 @@ set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py convert_mariadb_uuid "$@"
elif [[ $(id -un) == "paperless" ]]; then
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py convert_mariadb_uuid "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py convert_mariadb_uuid "$@"
else
echo "Unknown user."
fi

View File

@@ -5,8 +5,10 @@ set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py createsuperuser "$@"
elif [[ $(id -un) == "paperless" ]]; then
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py createsuperuser "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py createsuperuser "$@"
else
echo "Unknown user."
fi

View File

@@ -5,8 +5,10 @@ set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py document_archiver "$@"
elif [[ $(id -un) == "paperless" ]]; then
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py document_archiver "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py document_archiver "$@"
else
echo "Unknown user."
fi

View File

@@ -5,8 +5,10 @@ set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py document_create_classifier "$@"
elif [[ $(id -un) == "paperless" ]]; then
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py document_create_classifier "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py document_create_classifier "$@"
else
echo "Unknown user."
fi

View File

@@ -5,8 +5,10 @@ set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py document_exporter "$@"
elif [[ $(id -un) == "paperless" ]]; then
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py document_exporter "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py document_exporter "$@"
else
echo "Unknown user."
fi

View File

@@ -5,8 +5,10 @@ set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py document_fuzzy_match "$@"
elif [[ $(id -un) == "paperless" ]]; then
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py document_fuzzy_match "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py document_fuzzy_match "$@"
else
echo "Unknown user."
fi

View File

@@ -5,8 +5,10 @@ set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py document_importer "$@"
elif [[ $(id -un) == "paperless" ]]; then
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py document_importer "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py document_importer "$@"
else
echo "Unknown user."
fi

View File

@@ -5,8 +5,10 @@ set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py document_index "$@"
elif [[ $(id -un) == "paperless" ]]; then
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py document_index "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py document_index "$@"
else
echo "Unknown user."
fi

View File

@@ -5,8 +5,10 @@ set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py document_renamer "$@"
elif [[ $(id -un) == "paperless" ]]; then
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py document_renamer "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py document_renamer "$@"
else
echo "Unknown user."
fi

View File

@@ -5,8 +5,10 @@ set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py document_retagger "$@"
elif [[ $(id -un) == "paperless" ]]; then
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py document_retagger "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py document_retagger "$@"
else
echo "Unknown user."
fi

View File

@@ -5,8 +5,10 @@ set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py document_sanity_checker "$@"
elif [[ $(id -un) == "paperless" ]]; then
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py document_sanity_checker "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py document_sanity_checker "$@"
else
echo "Unknown user."
fi

View File

@@ -5,8 +5,10 @@ set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py document_thumbnails "$@"
elif [[ $(id -un) == "paperless" ]]; then
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py document_thumbnails "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py document_thumbnails "$@"
else
echo "Unknown user."
fi

View File

@@ -5,8 +5,10 @@ set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py mail_fetcher "$@"
elif [[ $(id -un) == "paperless" ]]; then
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py mail_fetcher "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py mail_fetcher "$@"
else
echo "Unknown user."
fi

View File

@@ -5,8 +5,10 @@ set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py manage_superuser "$@"
elif [[ $(id -un) == "paperless" ]]; then
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py manage_superuser "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py manage_superuser "$@"
else
echo "Unknown user."
fi

View File

@@ -5,8 +5,10 @@ set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py prune_audit_logs "$@"
elif [[ $(id -un) == "paperless" ]]; then
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py prune_audit_logs "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py prune_audit_logs "$@"
else
echo "Unknown user."
fi

View File

@@ -8,7 +8,7 @@ Further documentation is provided here for some endpoints and features.
## Authorization
The REST api provides five different forms of authentication.
The REST api provides four different forms of authentication.
1. Basic authentication
@@ -52,14 +52,6 @@ The REST api provides five different forms of authentication.
[configuration](configuration.md#PAPERLESS_ENABLE_HTTP_REMOTE_USER_API)),
you can authenticate against the API using Remote User auth.
5. Headless OIDC via [`django-allauth`](https://codeberg.org/allauth/django-allauth)
`django-allauth` exposes API endpoints under `api/auth/` which enable tools
like third-party apps to authenticate with social accounts that are
configured. See
[here](advanced_usage.md#openid-connect-and-social-authentication) for more
information on social accounts.
## Searching for documents
Full text searching is available on the `/api/documents/` endpoint. Two

View File

@@ -659,7 +659,7 @@ system. See the corresponding
: Sync groups from the third party authentication system (e.g. OIDC) to Paperless-ngx. When enabled, users will be added or removed from groups based on their group membership in the third party authentication system. Groups must already exist in Paperless-ngx and have the same name as in the third party authentication system. Groups are updated upon logging in via the third party authentication system, see the corresponding [django-allauth documentation](https://docs.allauth.org/en/dev/socialaccount/signals.html).
: In order to pass groups from the authentication system you will need to update your [PAPERLESS_SOCIALACCOUNT_PROVIDERS](#PAPERLESS_SOCIALACCOUNT_PROVIDERS) setting by adding a top-level "SCOPES" setting which includes "groups", or the custom groups claim configured in [`PAPERLESS_SOCIAL_ACCOUNT_SYNC_GROUPS_CLAIM`](#PAPERLESS_SOCIAL_ACCOUNT_SYNC_GROUPS_CLAIM) e.g.:
: In order to pass groups from the authentication system you will need to update your [PAPERLESS_SOCIALACCOUNT_PROVIDERS](#PAPERLESS_SOCIALACCOUNT_PROVIDERS) setting by adding a top-level "SCOPES" setting which includes "groups", e.g.:
```json
{"openid_connect":{"SCOPE": ["openid","profile","email","groups"]...
@@ -667,12 +667,6 @@ system. See the corresponding
Defaults to False
#### [`PAPERLESS_SOCIAL_ACCOUNT_SYNC_GROUPS_CLAIM=<str>`](#PAPERLESS_SOCIAL_ACCOUNT_SYNC_GROUPS_CLAIM) {#PAPERLESS_SOCIAL_ACCOUNT_SYNC_GROUPS_CLAIM}
: Allows you to define a custom groups claim. See [PAPERLESS_SOCIAL_ACCOUNT_SYNC_GROUPS](#PAPERLESS_SOCIAL_ACCOUNT_SYNC_GROUPS) which is required for this setting to take effect.
Defaults to "groups"
#### [`PAPERLESS_SOCIAL_ACCOUNT_DEFAULT_GROUPS=<comma-separated-list>`](#PAPERLESS_SOCIAL_ACCOUNT_DEFAULT_GROUPS) {#PAPERLESS_SOCIAL_ACCOUNT_DEFAULT_GROUPS}
: A list of group names that users who signup via social accounts will be added to upon signup. Groups listed here must already exist.

View File

@@ -257,7 +257,7 @@ lint.isort.force-single-line = true
[tool.codespell]
write-changes = true
ignore-words-list = "criterias,afterall,valeu,ureue,equest,ure,assertIn,Oktober,commitish"
ignore-words-list = "criterias,afterall,valeu,ureue,equest,ure,assertIn,Oktober"
skip = "src-ui/src/locale/*,src-ui/pnpm-lock.yaml,src-ui/e2e/*,src/paperless_mail/tests/samples/*,src/documents/tests/samples/*,*.po,*.json"
[tool.pytest.ini_options]

File diff suppressed because it is too large Load Diff

View File

@@ -412,9 +412,6 @@ describe('WorkflowEditDialogComponent', () => {
return newFilter
}
const correspondentAny = addFilterOfType(TriggerFilterType.CorrespondentAny)
correspondentAny.get('values').setValue([11])
const correspondentIs = addFilterOfType(TriggerFilterType.CorrespondentIs)
correspondentIs.get('values').setValue(1)
@@ -424,18 +421,12 @@ describe('WorkflowEditDialogComponent', () => {
const documentTypeIs = addFilterOfType(TriggerFilterType.DocumentTypeIs)
documentTypeIs.get('values').setValue(1)
const documentTypeAny = addFilterOfType(TriggerFilterType.DocumentTypeAny)
documentTypeAny.get('values').setValue([12])
const documentTypeNot = addFilterOfType(TriggerFilterType.DocumentTypeNot)
documentTypeNot.get('values').setValue([1])
const storagePathIs = addFilterOfType(TriggerFilterType.StoragePathIs)
storagePathIs.get('values').setValue(1)
const storagePathAny = addFilterOfType(TriggerFilterType.StoragePathAny)
storagePathAny.get('values').setValue([13])
const storagePathNot = addFilterOfType(TriggerFilterType.StoragePathNot)
storagePathNot.get('values').setValue([1])
@@ -450,13 +441,10 @@ describe('WorkflowEditDialogComponent', () => {
expect(formValues.triggers[0].filter_has_tags).toEqual([1])
expect(formValues.triggers[0].filter_has_all_tags).toEqual([2, 3])
expect(formValues.triggers[0].filter_has_not_tags).toEqual([4])
expect(formValues.triggers[0].filter_has_any_correspondents).toEqual([11])
expect(formValues.triggers[0].filter_has_correspondent).toEqual(1)
expect(formValues.triggers[0].filter_has_not_correspondents).toEqual([1])
expect(formValues.triggers[0].filter_has_any_document_types).toEqual([12])
expect(formValues.triggers[0].filter_has_document_type).toEqual(1)
expect(formValues.triggers[0].filter_has_not_document_types).toEqual([1])
expect(formValues.triggers[0].filter_has_any_storage_paths).toEqual([13])
expect(formValues.triggers[0].filter_has_storage_path).toEqual(1)
expect(formValues.triggers[0].filter_has_not_storage_paths).toEqual([1])
expect(formValues.triggers[0].filter_custom_field_query).toEqual(
@@ -519,22 +507,16 @@ describe('WorkflowEditDialogComponent', () => {
setFilter(TriggerFilterType.TagsAll, 11)
setFilter(TriggerFilterType.TagsNone, 12)
setFilter(TriggerFilterType.CorrespondentAny, 16)
setFilter(TriggerFilterType.CorrespondentNot, 13)
setFilter(TriggerFilterType.DocumentTypeAny, 17)
setFilter(TriggerFilterType.DocumentTypeNot, 14)
setFilter(TriggerFilterType.StoragePathAny, 18)
setFilter(TriggerFilterType.StoragePathNot, 15)
const formValues = component['getFormValues']()
expect(formValues.triggers[0].filter_has_all_tags).toEqual([11])
expect(formValues.triggers[0].filter_has_not_tags).toEqual([12])
expect(formValues.triggers[0].filter_has_any_correspondents).toEqual([16])
expect(formValues.triggers[0].filter_has_not_correspondents).toEqual([13])
expect(formValues.triggers[0].filter_has_any_document_types).toEqual([17])
expect(formValues.triggers[0].filter_has_not_document_types).toEqual([14])
expect(formValues.triggers[0].filter_has_any_storage_paths).toEqual([18])
expect(formValues.triggers[0].filter_has_not_storage_paths).toEqual([15])
})
@@ -658,11 +640,8 @@ describe('WorkflowEditDialogComponent', () => {
filter_has_tags: [],
filter_has_all_tags: [],
filter_has_not_tags: [],
filter_has_any_correspondents: [],
filter_has_not_correspondents: [],
filter_has_any_document_types: [],
filter_has_not_document_types: [],
filter_has_any_storage_paths: [],
filter_has_not_storage_paths: [],
filter_has_correspondent: null,
filter_has_document_type: null,
@@ -720,14 +699,11 @@ describe('WorkflowEditDialogComponent', () => {
trigger.filter_has_tags = [1]
trigger.filter_has_all_tags = [2, 3]
trigger.filter_has_not_tags = [4]
trigger.filter_has_any_correspondents = [10] as any
trigger.filter_has_correspondent = 5 as any
trigger.filter_has_not_correspondents = [6] as any
trigger.filter_has_document_type = 7 as any
trigger.filter_has_any_document_types = [11] as any
trigger.filter_has_not_document_types = [8] as any
trigger.filter_has_storage_path = 9 as any
trigger.filter_has_any_storage_paths = [12] as any
trigger.filter_has_not_storage_paths = [10] as any
trigger.filter_custom_field_query = JSON.stringify([
'AND',
@@ -738,8 +714,8 @@ describe('WorkflowEditDialogComponent', () => {
component.ngOnInit()
const triggerGroup = component.triggerFields.at(0) as FormGroup
const filters = component.getFiltersFormArray(triggerGroup)
expect(filters.length).toBe(13)
const customFieldFilter = filters.at(12) as FormGroup
expect(filters.length).toBe(10)
const customFieldFilter = filters.at(9) as FormGroup
expect(customFieldFilter.get('type').value).toBe(
TriggerFilterType.CustomFieldQuery
)
@@ -748,27 +724,12 @@ describe('WorkflowEditDialogComponent', () => {
})
it('should expose select metadata helpers', () => {
expect(component.isSelectMultiple(TriggerFilterType.CorrespondentAny)).toBe(
true
)
expect(component.isSelectMultiple(TriggerFilterType.CorrespondentNot)).toBe(
true
)
expect(component.isSelectMultiple(TriggerFilterType.CorrespondentIs)).toBe(
false
)
expect(component.isSelectMultiple(TriggerFilterType.DocumentTypeAny)).toBe(
true
)
expect(component.isSelectMultiple(TriggerFilterType.DocumentTypeIs)).toBe(
false
)
expect(component.isSelectMultiple(TriggerFilterType.StoragePathAny)).toBe(
true
)
expect(component.isSelectMultiple(TriggerFilterType.StoragePathIs)).toBe(
false
)
component.correspondents = [{ id: 1, name: 'C1' } as any]
component.documentTypes = [{ id: 2, name: 'DT' } as any]
@@ -780,15 +741,9 @@ describe('WorkflowEditDialogComponent', () => {
expect(
component.getFilterSelectItems(TriggerFilterType.DocumentTypeIs)
).toEqual(component.documentTypes)
expect(
component.getFilterSelectItems(TriggerFilterType.DocumentTypeAny)
).toEqual(component.documentTypes)
expect(
component.getFilterSelectItems(TriggerFilterType.StoragePathIs)
).toEqual(component.storagePaths)
expect(
component.getFilterSelectItems(TriggerFilterType.StoragePathAny)
).toEqual(component.storagePaths)
expect(component.getFilterSelectItems(TriggerFilterType.TagsAll)).toEqual(
[]
)

View File

@@ -145,13 +145,10 @@ export enum TriggerFilterType {
TagsAny = 'tags_any',
TagsAll = 'tags_all',
TagsNone = 'tags_none',
CorrespondentAny = 'correspondent_any',
CorrespondentIs = 'correspondent_is',
CorrespondentNot = 'correspondent_not',
DocumentTypeAny = 'document_type_any',
DocumentTypeIs = 'document_type_is',
DocumentTypeNot = 'document_type_not',
StoragePathAny = 'storage_path_any',
StoragePathIs = 'storage_path_is',
StoragePathNot = 'storage_path_not',
CustomFieldQuery = 'custom_field_query',
@@ -175,11 +172,8 @@ type TriggerFilterAggregate = {
filter_has_tags: number[]
filter_has_all_tags: number[]
filter_has_not_tags: number[]
filter_has_any_correspondents: number[]
filter_has_not_correspondents: number[]
filter_has_any_document_types: number[]
filter_has_not_document_types: number[]
filter_has_any_storage_paths: number[]
filter_has_not_storage_paths: number[]
filter_has_correspondent: number | null
filter_has_document_type: number | null
@@ -225,14 +219,6 @@ const TRIGGER_FILTER_DEFINITIONS: TriggerFilterDefinition[] = [
allowMultipleEntries: false,
allowMultipleValues: true,
},
{
id: TriggerFilterType.CorrespondentAny,
name: $localize`Has any of these correspondents`,
inputType: 'select',
allowMultipleEntries: false,
allowMultipleValues: true,
selectItems: 'correspondents',
},
{
id: TriggerFilterType.CorrespondentIs,
name: $localize`Has correspondent`,
@@ -257,14 +243,6 @@ const TRIGGER_FILTER_DEFINITIONS: TriggerFilterDefinition[] = [
allowMultipleValues: false,
selectItems: 'documentTypes',
},
{
id: TriggerFilterType.DocumentTypeAny,
name: $localize`Has any of these document types`,
inputType: 'select',
allowMultipleEntries: false,
allowMultipleValues: true,
selectItems: 'documentTypes',
},
{
id: TriggerFilterType.DocumentTypeNot,
name: $localize`Does not have document types`,
@@ -281,14 +259,6 @@ const TRIGGER_FILTER_DEFINITIONS: TriggerFilterDefinition[] = [
allowMultipleValues: false,
selectItems: 'storagePaths',
},
{
id: TriggerFilterType.StoragePathAny,
name: $localize`Has any of these storage paths`,
inputType: 'select',
allowMultipleEntries: false,
allowMultipleValues: true,
selectItems: 'storagePaths',
},
{
id: TriggerFilterType.StoragePathNot,
name: $localize`Does not have storage paths`,
@@ -336,15 +306,6 @@ const FILTER_HANDLERS: Record<TriggerFilterType, FilterHandler> = {
extract: (trigger) => trigger.filter_has_not_tags,
hasValue: (value) => Array.isArray(value) && value.length > 0,
},
[TriggerFilterType.CorrespondentAny]: {
apply: (aggregate, values) => {
aggregate.filter_has_any_correspondents = Array.isArray(values)
? [...values]
: [values]
},
extract: (trigger) => trigger.filter_has_any_correspondents,
hasValue: (value) => Array.isArray(value) && value.length > 0,
},
[TriggerFilterType.CorrespondentIs]: {
apply: (aggregate, values) => {
aggregate.filter_has_correspondent = Array.isArray(values)
@@ -372,15 +333,6 @@ const FILTER_HANDLERS: Record<TriggerFilterType, FilterHandler> = {
extract: (trigger) => trigger.filter_has_document_type,
hasValue: (value) => value !== null && value !== undefined,
},
[TriggerFilterType.DocumentTypeAny]: {
apply: (aggregate, values) => {
aggregate.filter_has_any_document_types = Array.isArray(values)
? [...values]
: [values]
},
extract: (trigger) => trigger.filter_has_any_document_types,
hasValue: (value) => Array.isArray(value) && value.length > 0,
},
[TriggerFilterType.DocumentTypeNot]: {
apply: (aggregate, values) => {
aggregate.filter_has_not_document_types = Array.isArray(values)
@@ -399,15 +351,6 @@ const FILTER_HANDLERS: Record<TriggerFilterType, FilterHandler> = {
extract: (trigger) => trigger.filter_has_storage_path,
hasValue: (value) => value !== null && value !== undefined,
},
[TriggerFilterType.StoragePathAny]: {
apply: (aggregate, values) => {
aggregate.filter_has_any_storage_paths = Array.isArray(values)
? [...values]
: [values]
},
extract: (trigger) => trigger.filter_has_any_storage_paths,
hasValue: (value) => Array.isArray(value) && value.length > 0,
},
[TriggerFilterType.StoragePathNot]: {
apply: (aggregate, values) => {
aggregate.filter_has_not_storage_paths = Array.isArray(values)
@@ -699,11 +642,8 @@ export class WorkflowEditDialogComponent
filter_has_tags: [],
filter_has_all_tags: [],
filter_has_not_tags: [],
filter_has_any_correspondents: [],
filter_has_not_correspondents: [],
filter_has_any_document_types: [],
filter_has_not_document_types: [],
filter_has_any_storage_paths: [],
filter_has_not_storage_paths: [],
filter_has_correspondent: null,
filter_has_document_type: null,
@@ -730,16 +670,10 @@ export class WorkflowEditDialogComponent
trigger.filter_has_tags = aggregate.filter_has_tags
trigger.filter_has_all_tags = aggregate.filter_has_all_tags
trigger.filter_has_not_tags = aggregate.filter_has_not_tags
trigger.filter_has_any_correspondents =
aggregate.filter_has_any_correspondents
trigger.filter_has_not_correspondents =
aggregate.filter_has_not_correspondents
trigger.filter_has_any_document_types =
aggregate.filter_has_any_document_types
trigger.filter_has_not_document_types =
aggregate.filter_has_not_document_types
trigger.filter_has_any_storage_paths =
aggregate.filter_has_any_storage_paths
trigger.filter_has_not_storage_paths =
aggregate.filter_has_not_storage_paths
trigger.filter_has_correspondent =
@@ -922,11 +856,8 @@ export class WorkflowEditDialogComponent
case TriggerFilterType.TagsAny:
case TriggerFilterType.TagsAll:
case TriggerFilterType.TagsNone:
case TriggerFilterType.CorrespondentAny:
case TriggerFilterType.CorrespondentNot:
case TriggerFilterType.DocumentTypeAny:
case TriggerFilterType.DocumentTypeNot:
case TriggerFilterType.StoragePathAny:
case TriggerFilterType.StoragePathNot:
return true
default:
@@ -1248,11 +1179,8 @@ export class WorkflowEditDialogComponent
filter_has_tags: [],
filter_has_all_tags: [],
filter_has_not_tags: [],
filter_has_any_correspondents: [],
filter_has_not_correspondents: [],
filter_has_any_document_types: [],
filter_has_not_document_types: [],
filter_has_any_storage_paths: [],
filter_has_not_storage_paths: [],
filter_custom_field_query: null,
filter_has_correspondent: null,

View File

@@ -1,18 +1,9 @@
<div class="row pt-3 pb-3 pb-md-2 align-items-center">
<div class="col-md text-truncate">
<h3 class="d-flex align-items-center mb-1" style="line-height: 1.4">
<span class="text-truncate">{{title}}</span>
@if (id) {
<span class="badge bg-primary text-primary-text-contrast ms-3 small fs-normal cursor-pointer" (click)="copyID()">
@if (copied) {
<i-bs width="1em" height="1em" name="clipboard-check"></i-bs>&nbsp;<ng-container i18n>Copied!</ng-container>
} @else {
ID: {{id}}
}
</span>
}
<h3 class="text-truncate" style="line-height: 1.4">
{{title}}
@if (subTitle) {
<span class="h6 mb-0 mt-1 d-block d-md-inline fw-normal ms-md-3 text-truncate" style="line-height: 1.4">{{subTitle}}</span>
<span class="h6 mb-0 d-block d-md-inline fw-normal ms-md-3 text-truncate" style="line-height: 1.4">{{subTitle}}</span>
}
@if (info) {
<button class="btn btn-sm btn-link text-muted me-auto p-0 p-md-2" title="What's this?" i18n-title type="button" [ngbPopover]="infoPopover" [autoClose]="true">

View File

@@ -1,10 +1,5 @@
h3 {
min-height: calc(1.325rem + 0.9vw);
.badge {
font-size: 0.65rem;
line-height: 1;
}
}
@media (min-width: 1200px) {

View File

@@ -1,4 +1,3 @@
import { Clipboard } from '@angular/cdk/clipboard'
import { ComponentFixture, TestBed } from '@angular/core/testing'
import { Title } from '@angular/platform-browser'
import { environment } from 'src/environments/environment'
@@ -8,7 +7,6 @@ describe('PageHeaderComponent', () => {
let component: PageHeaderComponent
let fixture: ComponentFixture<PageHeaderComponent>
let titleService: Title
let clipboard: Clipboard
beforeEach(async () => {
TestBed.configureTestingModule({
@@ -17,7 +15,6 @@ describe('PageHeaderComponent', () => {
}).compileComponents()
titleService = TestBed.inject(Title)
clipboard = TestBed.inject(Clipboard)
fixture = TestBed.createComponent(PageHeaderComponent)
component = fixture.componentInstance
fixture.detectChanges()
@@ -27,8 +24,7 @@ describe('PageHeaderComponent', () => {
component.title = 'Foo'
component.subTitle = 'Bar'
fixture.detectChanges()
expect(fixture.nativeElement.textContent).toContain('Foo')
expect(fixture.nativeElement.textContent).toContain('Bar')
expect(fixture.nativeElement.textContent).toContain('Foo Bar')
})
it('should set html title', () => {
@@ -36,16 +32,4 @@ describe('PageHeaderComponent', () => {
component.title = 'Foo Bar'
expect(titleSpy).toHaveBeenCalledWith(`Foo Bar - ${environment.appTitle}`)
})
it('should copy id to clipboard, reset after 3 seconds', () => {
jest.useFakeTimers()
component.id = 42 as any
jest.spyOn(clipboard, 'copy').mockReturnValue(true)
component.copyID()
expect(clipboard.copy).toHaveBeenCalledWith('42')
expect(component.copied).toBe(true)
jest.advanceTimersByTime(3000)
expect(component.copied).toBe(false)
})
})

View File

@@ -1,4 +1,3 @@
import { Clipboard } from '@angular/cdk/clipboard'
import { Component, Input, inject } from '@angular/core'
import { Title } from '@angular/platform-browser'
import { NgbPopoverModule } from '@ng-bootstrap/ng-bootstrap'
@@ -14,11 +13,8 @@ import { environment } from 'src/environments/environment'
})
export class PageHeaderComponent {
private titleService = inject(Title)
private clipboard = inject(Clipboard)
private _title = ''
public copied: boolean = false
private copyTimeout: any
_title = ''
@Input()
set title(title: string) {
@@ -30,9 +26,6 @@ export class PageHeaderComponent {
return this._title
}
@Input()
id: number
@Input()
subTitle: string = ''
@@ -41,12 +34,4 @@ export class PageHeaderComponent {
@Input()
infoLink: string
public copyID() {
this.copied = this.clipboard.copy(this.id.toString())
clearTimeout(this.copyTimeout)
this.copyTimeout = setTimeout(() => {
this.copied = false
}, 3000)
}
}

View File

@@ -1,4 +1,4 @@
<pngx-page-header [(title)]="title" [id]="documentId">
<pngx-page-header [(title)]="title">
@if (archiveContentRenderType === ContentRenderType.PDF && !useNativePdfViewer) {
@if (previewNumPages) {
<div class="input-group input-group-sm d-none d-md-flex">

View File

@@ -14,7 +14,6 @@ import { SortableDirective } from 'src/app/directives/sortable.directive'
import { CustomDatePipe } from 'src/app/pipes/custom-date.pipe'
import { PermissionType } from 'src/app/services/permissions.service'
import { CorrespondentService } from 'src/app/services/rest/correspondent.service'
import { ClearableBadgeComponent } from '../../common/clearable-badge/clearable-badge.component'
import { CorrespondentEditDialogComponent } from '../../common/edit-dialog/correspondent-edit-dialog/correspondent-edit-dialog.component'
import { PageHeaderComponent } from '../../common/page-header/page-header.component'
import { ManagementListComponent } from '../management-list/management-list.component'
@@ -37,7 +36,6 @@ import { ManagementListComponent } from '../management-list/management-list.comp
NgbDropdownModule,
NgbPaginationModule,
NgxBootstrapIconsModule,
ClearableBadgeComponent,
],
})
export class CorrespondentListComponent extends ManagementListComponent<Correspondent> {

View File

@@ -13,7 +13,6 @@ import { IfPermissionsDirective } from 'src/app/directives/if-permissions.direct
import { SortableDirective } from 'src/app/directives/sortable.directive'
import { PermissionType } from 'src/app/services/permissions.service'
import { DocumentTypeService } from 'src/app/services/rest/document-type.service'
import { ClearableBadgeComponent } from '../../common/clearable-badge/clearable-badge.component'
import { DocumentTypeEditDialogComponent } from '../../common/edit-dialog/document-type-edit-dialog/document-type-edit-dialog.component'
import { PageHeaderComponent } from '../../common/page-header/page-header.component'
import { ManagementListComponent } from '../management-list/management-list.component'
@@ -35,7 +34,6 @@ import { ManagementListComponent } from '../management-list/management-list.comp
NgbDropdownModule,
NgbPaginationModule,
NgxBootstrapIconsModule,
ClearableBadgeComponent,
],
})
export class DocumentTypeListComponent extends ManagementListComponent<DocumentType> {

View File

@@ -1,48 +1,17 @@
<pngx-page-header title="{{ typeNamePlural | titlecase }}" info="View, add, edit and delete {{ typeNamePlural }}." infoLink="usage/#terms-and-definitions">
<div ngbDropdown class="btn-group flex-fill d-sm-none">
<button class="btn btn-sm btn-outline-primary" id="dropdownSelectMobile" ngbDropdownToggle>
<i-bs name="text-indent-left"></i-bs>
<div class="d-none d-sm-inline">&nbsp;<ng-container i18n>Select</ng-container></div>
@if (selectedObjects.size > 0) {
<pngx-clearable-badge [selected]="selectedObjects.size > 0" [number]="selectedObjects.size" (cleared)="selectNone()"></pngx-clearable-badge><span class="visually-hidden">selected</span>
}
<button class="btn btn-sm btn-outline-secondary" (click)="clearSelection()" [hidden]="selectedObjects.size === 0">
<i-bs name="x"></i-bs>&nbsp;<ng-container i18n>Clear selection</ng-container>
</button>
<button type="button" class="btn btn-sm btn-outline-primary" (click)="setPermissions()" [disabled]="!userCanBulkEdit(PermissionAction.Change) || selectedObjects.size === 0">
<i-bs name="person-fill-lock"></i-bs>&nbsp;<ng-container i18n>Permissions</ng-container>
</button>
<button type="button" class="btn btn-sm btn-outline-danger" (click)="delete()" [disabled]="!userCanBulkEdit(PermissionAction.Delete) || selectedObjects.size === 0">
<i-bs name="trash"></i-bs>&nbsp;<ng-container i18n>Delete</ng-container>
</button>
<button type="button" class="btn btn-sm btn-outline-primary ms-md-5" (click)="openCreateDialog()" *pngxIfPermissions="{ action: PermissionAction.Add, type: permissionType }">
<i-bs name="plus-circle"></i-bs>&nbsp;<ng-container i18n>Create</ng-container>
</button>
<div ngbDropdownMenu aria-labelledby="dropdownSelectMobile" class="shadow">
<button ngbDropdownItem (click)="selectNone()" i18n>Select none</button>
<button ngbDropdownItem (click)="selectPage(true)" i18n>Select page</button>
<button ngbDropdownItem (click)="selectAll()" i18n>Select all</button>
</div>
</div>
<div class="d-none d-sm-flex flex-fill me-3">
<div class="input-group input-group-sm">
<span class="input-group-text border-0" i18n>Select:</span>
</div>
<div class="btn-group btn-group-sm flex-nowrap">
@if (selectedObjects.size > 0) {
<button class="btn btn-sm btn-outline-secondary" (click)="selectNone()">
<i-bs name="slash-circle"></i-bs>&nbsp;<ng-container i18n>None</ng-container>
</button>
}
<button class="btn btn-sm btn-outline-primary" (click)="selectPage(true)">
<i-bs name="file-earmark-check"></i-bs>&nbsp;<ng-container i18n>Page</ng-container>
</button>
<button class="btn btn-sm btn-outline-primary" (click)="selectAll()">
<i-bs name="check-all"></i-bs>&nbsp;<ng-container i18n>All</ng-container>
</button>
</div>
</div>
<button type="button" class="btn btn-sm btn-outline-primary" (click)="setPermissions()" [disabled]="!userCanBulkEdit(PermissionAction.Change) || selectedObjects.size === 0">
<i-bs name="person-fill-lock"></i-bs>&nbsp;<ng-container i18n>Permissions</ng-container>
</button>
<button type="button" class="btn btn-sm btn-outline-danger" (click)="delete()" [disabled]="!userCanBulkEdit(PermissionAction.Delete) || selectedObjects.size === 0">
<i-bs name="trash"></i-bs>&nbsp;<ng-container i18n>Delete</ng-container>
</button>
<button type="button" class="btn btn-sm btn-outline-primary ms-md-5" (click)="openCreateDialog()" *pngxIfPermissions="{ action: PermissionAction.Add, type: permissionType }">
<i-bs name="plus-circle"></i-bs>&nbsp;<ng-container i18n>Create</ng-container>
</button>
</pngx-page-header>
<div class="row mb-3">
@@ -62,7 +31,7 @@
<tr>
<th scope="col">
<div class="form-check m-0 ms-2 me-n2">
<input type="checkbox" class="form-check-input" id="all-objects" [(ngModel)]="togggleAll" [disabled]="data.length === 0" (change)="selectPage($event.target.checked); $event.stopPropagation();">
<input type="checkbox" class="form-check-input" id="all-objects" [(ngModel)]="togggleAll" [disabled]="data.length === 0" (click)="toggleAll($event); $event.stopPropagation();">
<label class="form-check-label" for="all-objects"></label>
</div>
</th>

View File

@@ -163,7 +163,8 @@ describe('ManagementListComponent', () => {
const toastInfoSpy = jest.spyOn(toastService, 'showInfo')
const reloadSpy = jest.spyOn(component, 'reloadData')
component.openCreateDialog()
const createButton = fixture.debugElement.queryAll(By.css('button'))[4]
createButton.triggerEventHandler('click')
expect(modal).not.toBeUndefined()
const editDialog = modal.componentInstance as EditDialogComponent<Tag>
@@ -186,7 +187,8 @@ describe('ManagementListComponent', () => {
const toastInfoSpy = jest.spyOn(toastService, 'showInfo')
const reloadSpy = jest.spyOn(component, 'reloadData')
component.openEditDialog(tags[0])
const editButton = fixture.debugElement.queryAll(By.css('button'))[7]
editButton.triggerEventHandler('click')
expect(modal).not.toBeUndefined()
const editDialog = modal.componentInstance as EditDialogComponent<Tag>
@@ -210,7 +212,8 @@ describe('ManagementListComponent', () => {
const deleteSpy = jest.spyOn(tagService, 'delete')
const reloadSpy = jest.spyOn(component, 'reloadData')
component.openDeleteDialog(tags[0])
const deleteButton = fixture.debugElement.queryAll(By.css('button'))[8]
deleteButton.triggerEventHandler('click')
expect(modal).not.toBeUndefined()
const editDialog = modal.componentInstance as ConfirmDialogComponent
@@ -227,21 +230,6 @@ describe('ManagementListComponent', () => {
expect(reloadSpy).toHaveBeenCalled()
})
it('should use the all list length for collection size when provided', fakeAsync(() => {
jest.spyOn(tagService, 'listFiltered').mockReturnValueOnce(
of({
count: 1,
all: [1, 2, 3],
results: tags.slice(0, 1),
})
)
component.reloadData()
tick(100)
expect(component.collectionSize).toBe(3)
}))
it('should support quick filter for objects', () => {
const expectedUrl = documentListViewService.getQuickFilterUrl([
{ rule_type: FILTER_HAS_TAGS_ALL, value: tags[0].id.toString() },
@@ -276,84 +264,19 @@ describe('ManagementListComponent', () => {
expect(component.page).toEqual(1)
})
it('should support toggle select page in vew', () => {
it('should support toggle all items in view', () => {
expect(component.selectedObjects.size).toEqual(0)
const selectPageSpy = jest.spyOn(component, 'selectPage')
const toggleAllSpy = jest.spyOn(component, 'toggleAll')
const checkButton = fixture.debugElement.queryAll(
By.css('input.form-check-input')
)[0]
checkButton.nativeElement.dispatchEvent(new Event('change'))
checkButton.nativeElement.dispatchEvent(new Event('click'))
checkButton.nativeElement.checked = true
checkButton.nativeElement.dispatchEvent(new Event('change'))
expect(selectPageSpy).toHaveBeenCalled()
checkButton.nativeElement.dispatchEvent(new Event('click'))
expect(toggleAllSpy).toHaveBeenCalled()
expect(component.selectedObjects.size).toEqual(tags.length)
})
it('selectNone should clear selection and reset toggle flag', () => {
component.selectedObjects = new Set([tags[0].id, tags[1].id])
component.togggleAll = true
component.selectNone()
expect(component.selectedObjects.size).toBe(0)
expect(component.togggleAll).toBe(false)
})
it('selectPage should select current page items or clear selection', () => {
component.selectPage(true)
expect(component.selectedObjects).toEqual(new Set(tags.map((t) => t.id)))
expect(component.togggleAll).toBe(true)
component.togggleAll = true
component.selectPage(false)
expect(component.selectedObjects.size).toBe(0)
expect(component.togggleAll).toBe(false)
})
it('selectAll should use all IDs when collection size exists', () => {
;(component as any).allIDs = [1, 2, 3, 4]
component.collectionSize = 4
component.selectAll()
expect(component.selectedObjects).toEqual(new Set([1, 2, 3, 4]))
expect(component.togggleAll).toBe(true)
})
it('selectAll should clear selection when collection size is zero', () => {
component.selectedObjects = new Set([1])
component.collectionSize = 0
component.togggleAll = true
component.selectAll()
expect(component.selectedObjects.size).toBe(0)
expect(component.togggleAll).toBe(false)
})
it('toggleSelected should toggle object selection and update toggle state', () => {
component.toggleSelected(tags[0])
expect(component.selectedObjects.has(tags[0].id)).toBe(true)
expect(component.togggleAll).toBe(false)
component.toggleSelected(tags[1])
component.toggleSelected(tags[2])
expect(component.togggleAll).toBe(true)
component.toggleSelected(tags[1])
expect(component.selectedObjects.has(tags[1].id)).toBe(false)
expect(component.togggleAll).toBe(false)
})
it('areAllPageItemsSelected should return false when page has no selectable items', () => {
component.data = []
component.selectedObjects.clear()
expect((component as any).areAllPageItemsSelected()).toBe(false)
component.data = tags
})
it('should support bulk edit permissions', () => {
const bulkEditPermsSpy = jest.spyOn(tagService, 'bulk_edit_objects')
component.toggleSelected(tags[0])

View File

@@ -84,7 +84,6 @@ export abstract class ManagementListComponent<T extends MatchingModel>
public data: T[] = []
private unfilteredData: T[] = []
private allIDs: number[] = []
public page = 1
@@ -172,8 +171,7 @@ export abstract class ManagementListComponent<T extends MatchingModel>
tap((c) => {
this.unfilteredData = c.results
this.data = this.filterData(c.results)
this.collectionSize = c.all?.length ?? c.count
this.allIDs = c.all
this.collectionSize = c.count
}),
delay(100)
)
@@ -302,6 +300,16 @@ export abstract class ManagementListComponent<T extends MatchingModel>
return ownsAll
}
toggleAll(event: PointerEvent) {
const checked = (event.target as HTMLInputElement).checked
this.togggleAll = checked
if (checked) {
this.selectedObjects = new Set(this.getSelectableIDs(this.data))
} else {
this.clearSelection()
}
}
protected getSelectableIDs(objects: T[]): number[] {
return objects.map((o) => o.id)
}
@@ -311,38 +319,10 @@ export abstract class ManagementListComponent<T extends MatchingModel>
this.selectedObjects.clear()
}
selectNone() {
this.clearSelection()
}
selectPage(select: boolean) {
if (select) {
this.selectedObjects = new Set(this.getSelectableIDs(this.data))
this.togggleAll = this.areAllPageItemsSelected()
} else {
this.clearSelection()
}
}
selectAll() {
if (!this.collectionSize) {
this.clearSelection()
return
}
this.selectedObjects = new Set(this.allIDs)
this.togggleAll = this.areAllPageItemsSelected()
}
toggleSelected(object) {
this.selectedObjects.has(object.id)
? this.selectedObjects.delete(object.id)
: this.selectedObjects.add(object.id)
this.togggleAll = this.areAllPageItemsSelected()
}
protected areAllPageItemsSelected(): boolean {
const ids = this.getSelectableIDs(this.data)
return ids.length > 0 && ids.every((id) => this.selectedObjects.has(id))
}
setPermissions() {

View File

@@ -13,7 +13,6 @@ import { IfPermissionsDirective } from 'src/app/directives/if-permissions.direct
import { SortableDirective } from 'src/app/directives/sortable.directive'
import { PermissionType } from 'src/app/services/permissions.service'
import { StoragePathService } from 'src/app/services/rest/storage-path.service'
import { ClearableBadgeComponent } from '../../common/clearable-badge/clearable-badge.component'
import { StoragePathEditDialogComponent } from '../../common/edit-dialog/storage-path-edit-dialog/storage-path-edit-dialog.component'
import { PageHeaderComponent } from '../../common/page-header/page-header.component'
import { ManagementListComponent } from '../management-list/management-list.component'
@@ -35,7 +34,6 @@ import { ManagementListComponent } from '../management-list/management-list.comp
NgbDropdownModule,
NgbPaginationModule,
NgxBootstrapIconsModule,
ClearableBadgeComponent,
],
})
export class StoragePathListComponent extends ManagementListComponent<StoragePath> {

View File

@@ -138,12 +138,16 @@ describe('TagListComponent', () => {
}
component.data = [parent as any]
component.selectPage(true)
const selectEvent = { target: { checked: true } } as unknown as PointerEvent
component.toggleAll(selectEvent)
expect(component.selectedObjects.has(10)).toBe(true)
expect(component.selectedObjects.has(11)).toBe(true)
component.selectPage(false)
const deselectEvent = {
target: { checked: false },
} as unknown as PointerEvent
component.toggleAll(deselectEvent)
expect(component.selectedObjects.size).toBe(0)
})
})

View File

@@ -13,7 +13,6 @@ import { IfPermissionsDirective } from 'src/app/directives/if-permissions.direct
import { SortableDirective } from 'src/app/directives/sortable.directive'
import { PermissionType } from 'src/app/services/permissions.service'
import { TagService } from 'src/app/services/rest/tag.service'
import { ClearableBadgeComponent } from '../../common/clearable-badge/clearable-badge.component'
import { TagEditDialogComponent } from '../../common/edit-dialog/tag-edit-dialog/tag-edit-dialog.component'
import { PageHeaderComponent } from '../../common/page-header/page-header.component'
import { ManagementListComponent } from '../management-list/management-list.component'
@@ -35,7 +34,6 @@ import { ManagementListComponent } from '../management-list/management-list.comp
NgbDropdownModule,
NgbPaginationModule,
NgxBootstrapIconsModule,
ClearableBadgeComponent,
],
})
export class TagListComponent extends ManagementListComponent<Tag> {

View File

@@ -44,16 +44,10 @@ export interface WorkflowTrigger extends ObjectWithId {
filter_has_not_tags?: number[] // Tag.id[]
filter_has_any_correspondents?: number[] // Correspondent.id[]
filter_has_not_correspondents?: number[] // Correspondent.id[]
filter_has_any_document_types?: number[] // DocumentType.id[]
filter_has_not_document_types?: number[] // DocumentType.id[]
filter_has_any_storage_paths?: number[] // StoragePath.id[]
filter_has_not_storage_paths?: number[] // StoragePath.id[]
filter_custom_field_query?: string

View File

@@ -16,6 +16,7 @@ from pikepdf import Pdf
from documents.converters import convert_from_tiff_to_pdf
from documents.data_models import ConsumableDocument
from documents.data_models import DocumentMetadataOverrides
from documents.models import Document
from documents.models import Tag
from documents.plugins.base import ConsumeTaskPlugin
from documents.plugins.base import StopConsumeTaskError
@@ -115,6 +116,24 @@ class BarcodePlugin(ConsumeTaskPlugin):
self._tiff_conversion_done = False
self.barcodes: list[Barcode] = []
def _apply_detected_asn(self, detected_asn: int) -> None:
"""
Apply a detected ASN to metadata if allowed.
"""
if (
self.metadata.skip_asn_if_exists
and Document.global_objects.filter(
archive_serial_number=detected_asn,
).exists()
):
logger.info(
f"Found ASN in barcode {detected_asn} but skipping because it already exists.",
)
return
logger.info(f"Found ASN in barcode: {detected_asn}")
self.metadata.asn = detected_asn
def run(self) -> None:
# Some operations may use PIL, override pixel setting if needed
maybe_override_pixel_limit()
@@ -186,13 +205,8 @@ class BarcodePlugin(ConsumeTaskPlugin):
# Update/overwrite an ASN if possible
# After splitting, as otherwise each split document gets the same ASN
if (
self.settings.barcode_enable_asn
and not self.metadata.skip_asn
and (located_asn := self.asn) is not None
):
logger.info(f"Found ASN in barcode: {located_asn}")
self.metadata.asn = located_asn
if self.settings.barcode_enable_asn and (located_asn := self.asn) is not None:
self._apply_detected_asn(located_asn)
def cleanup(self) -> None:
self.temp_dir.cleanup()

View File

@@ -7,7 +7,6 @@ from pathlib import Path
from typing import TYPE_CHECKING
from typing import Literal
from celery import chain
from celery import chord
from celery import group
from celery import shared_task
@@ -38,6 +37,42 @@ if TYPE_CHECKING:
logger: logging.Logger = logging.getLogger("paperless.bulk_edit")
@shared_task(bind=True)
def restore_archive_serial_numbers_task(
self,
backup: dict[int, int],
*args,
**kwargs,
) -> None:
restore_archive_serial_numbers(backup)
def release_archive_serial_numbers(doc_ids: list[int]) -> dict[int, int]:
"""
Clears ASNs on documents that are about to be replaced so new documents
can be assigned ASNs without uniqueness collisions. Returns a backup map
of doc_id -> previous ASN for potential restoration.
"""
qs = Document.objects.filter(
id__in=doc_ids,
archive_serial_number__isnull=False,
).only("pk", "archive_serial_number")
backup = dict(qs.values_list("pk", "archive_serial_number"))
qs.update(archive_serial_number=None)
logger.info(f"Released archive serial numbers for documents {list(backup.keys())}")
return backup
def restore_archive_serial_numbers(backup: dict[int, int]) -> None:
"""
Restores ASNs using the provided backup map, intended for
rollback when replacement consumption fails.
"""
for doc_id, asn in backup.items():
Document.objects.filter(pk=doc_id).update(archive_serial_number=asn)
logger.info(f"Restored archive serial numbers for documents {list(backup.keys())}")
def set_correspondent(
doc_ids: list[int],
correspondent: Correspondent,
@@ -386,6 +421,7 @@ def merge(
merged_pdf = pikepdf.new()
version: str = merged_pdf.pdf_version
handoff_asn: int | None = None
# use doc_ids to preserve order
for doc_id in doc_ids:
doc = qs.get(id=doc_id)
@@ -401,6 +437,8 @@ def merge(
version = max(version, pdf.pdf_version)
merged_pdf.pages.extend(pdf.pages)
affected_docs.append(doc.id)
if handoff_asn is None and doc.archive_serial_number is not None:
handoff_asn = doc.archive_serial_number
except Exception as e:
logger.exception(
f"Error merging document {doc.id}, it will not be included in the merge: {e}",
@@ -426,6 +464,8 @@ def merge(
DocumentMetadataOverrides.from_document(metadata_document)
)
overrides.title = metadata_document.title + " (merged)"
if metadata_document.archive_serial_number is not None:
handoff_asn = metadata_document.archive_serial_number
else:
overrides = DocumentMetadataOverrides()
else:
@@ -433,8 +473,11 @@ def merge(
if user is not None:
overrides.owner_id = user.id
# Avoid copying or detecting ASN from merged PDFs to prevent collision
overrides.skip_asn = True
if not delete_originals:
overrides.skip_asn_if_exists = True
if delete_originals and handoff_asn is not None:
overrides.asn = handoff_asn
logger.info("Adding merged document to the task queue.")
@@ -447,12 +490,20 @@ def merge(
)
if delete_originals:
backup = release_archive_serial_numbers(affected_docs)
logger.info(
"Queueing removal of original documents after consumption of merged document",
)
chain(consume_task, delete.si(affected_docs)).delay()
else:
consume_task.delay()
try:
consume_task.apply_async(
link=[delete.si(affected_docs)],
link_error=[restore_archive_serial_numbers_task.s(backup)],
)
except Exception:
restore_archive_serial_numbers(backup)
raise
else:
consume_task.delay()
return "OK"
@@ -494,6 +545,8 @@ def split(
overrides.title = f"{doc.title} (split {idx + 1})"
if user is not None:
overrides.owner_id = user.id
if not delete_originals:
overrides.skip_asn_if_exists = True
logger.info(
f"Adding split document with pages {split_doc} to the task queue.",
)
@@ -508,10 +561,20 @@ def split(
)
if delete_originals:
backup = release_archive_serial_numbers([doc.id])
logger.info(
"Queueing removal of original document after consumption of the split documents",
)
chord(header=consume_tasks, body=delete.si([doc.id])).delay()
try:
chord(
header=consume_tasks,
body=delete.si([doc.id]),
).apply_async(
link_error=[restore_archive_serial_numbers_task.s(backup)],
)
except Exception:
restore_archive_serial_numbers(backup)
raise
else:
group(consume_tasks).delay()
@@ -614,7 +677,10 @@ def edit_pdf(
)
if user is not None:
overrides.owner_id = user.id
if not delete_original:
overrides.skip_asn_if_exists = True
if delete_original and len(pdf_docs) == 1:
overrides.asn = doc.archive_serial_number
for idx, pdf in enumerate(pdf_docs, start=1):
filepath: Path = (
Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR))
@@ -633,7 +699,17 @@ def edit_pdf(
)
if delete_original:
chord(header=consume_tasks, body=delete.si([doc.id])).delay()
backup = release_archive_serial_numbers([doc.id])
try:
chord(
header=consume_tasks,
body=delete.si([doc.id]),
).apply_async(
link_error=[restore_archive_serial_numbers_task.s(backup)],
)
except Exception:
restore_archive_serial_numbers(backup)
raise
else:
group(consume_tasks).delay()

View File

@@ -690,7 +690,7 @@ class ConsumerPlugin(
pk=self.metadata.storage_path_id,
)
if self.metadata.asn is not None and not self.metadata.skip_asn:
if self.metadata.asn is not None:
document.archive_serial_number = self.metadata.asn
if self.metadata.owner_id:
@@ -806,8 +806,8 @@ class ConsumerPreflightPlugin(
"""
Check that if override_asn is given, it is unique and within a valid range
"""
if self.metadata.skip_asn or self.metadata.asn is None:
# if skip is set or ASN is None
if self.metadata.asn is None:
# if ASN is None
return
# Validate the range is above zero and less than uint32_t max
# otherwise, Whoosh can't handle it in the index

View File

@@ -30,7 +30,7 @@ class DocumentMetadataOverrides:
change_users: list[int] | None = None
change_groups: list[int] | None = None
custom_fields: dict | None = None
skip_asn: bool = False
skip_asn_if_exists: bool = False
def update(self, other: "DocumentMetadataOverrides") -> "DocumentMetadataOverrides":
"""
@@ -50,8 +50,8 @@ class DocumentMetadataOverrides:
self.storage_path_id = other.storage_path_id
if other.owner_id is not None:
self.owner_id = other.owner_id
if other.skip_asn:
self.skip_asn = True
if other.skip_asn_if_exists:
self.skip_asn_if_exists = True
# merge
if self.tag_ids is None:
@@ -118,7 +118,7 @@ class DocumentMetadataOverrides:
).values_list("id", flat=True),
)
overrides.custom_fields = {
custom_field.field.id: custom_field.value
custom_field.id: custom_field.value
for custom_field in doc.custom_fields.all()
}

View File

@@ -602,7 +602,7 @@ def rewrite_natural_date_keywords(query_string: str) -> str:
case "this year":
start = datetime(local_now.year, 1, 1, 0, 0, 0, tzinfo=tz)
end = datetime(local_now.year, 12, 31, 23, 59, 59, tzinfo=tz)
end = datetime.combine(today, time.max, tzinfo=tz)
case "previous week":
days_since_monday = local_now.weekday()

View File

@@ -403,18 +403,6 @@ def existing_document_matches_workflow(
f"Document tags {list(document.tags.all())} include excluded tags {list(trigger_has_not_tags_qs)}",
)
allowed_correspondent_ids = set(
trigger.filter_has_any_correspondents.values_list("id", flat=True),
)
if (
allowed_correspondent_ids
and document.correspondent_id not in allowed_correspondent_ids
):
return (
False,
f"Document correspondent {document.correspondent} is not one of {list(trigger.filter_has_any_correspondents.all())}",
)
# Document correspondent vs trigger has_correspondent
if (
trigger.filter_has_correspondent_id is not None
@@ -436,17 +424,6 @@ def existing_document_matches_workflow(
f"Document correspondent {document.correspondent} is excluded by {list(trigger.filter_has_not_correspondents.all())}",
)
allowed_document_type_ids = set(
trigger.filter_has_any_document_types.values_list("id", flat=True),
)
if allowed_document_type_ids and (
document.document_type_id not in allowed_document_type_ids
):
return (
False,
f"Document doc type {document.document_type} is not one of {list(trigger.filter_has_any_document_types.all())}",
)
# Document document_type vs trigger has_document_type
if (
trigger.filter_has_document_type_id is not None
@@ -468,17 +445,6 @@ def existing_document_matches_workflow(
f"Document doc type {document.document_type} is excluded by {list(trigger.filter_has_not_document_types.all())}",
)
allowed_storage_path_ids = set(
trigger.filter_has_any_storage_paths.values_list("id", flat=True),
)
if allowed_storage_path_ids and (
document.storage_path_id not in allowed_storage_path_ids
):
return (
False,
f"Document storage path {document.storage_path} is not one of {list(trigger.filter_has_any_storage_paths.all())}",
)
# Document storage_path vs trigger has_storage_path
if (
trigger.filter_has_storage_path_id is not None
@@ -566,10 +532,6 @@ def prefilter_documents_by_workflowtrigger(
# Correspondent, DocumentType, etc. filtering
if trigger.filter_has_any_correspondents.exists():
documents = documents.filter(
correspondent__in=trigger.filter_has_any_correspondents.all(),
)
if trigger.filter_has_correspondent is not None:
documents = documents.filter(
correspondent=trigger.filter_has_correspondent,
@@ -579,10 +541,6 @@ def prefilter_documents_by_workflowtrigger(
correspondent__in=trigger.filter_has_not_correspondents.all(),
)
if trigger.filter_has_any_document_types.exists():
documents = documents.filter(
document_type__in=trigger.filter_has_any_document_types.all(),
)
if trigger.filter_has_document_type is not None:
documents = documents.filter(
document_type=trigger.filter_has_document_type,
@@ -592,10 +550,6 @@ def prefilter_documents_by_workflowtrigger(
document_type__in=trigger.filter_has_not_document_types.all(),
)
if trigger.filter_has_any_storage_paths.exists():
documents = documents.filter(
storage_path__in=trigger.filter_has_any_storage_paths.all(),
)
if trigger.filter_has_storage_path is not None:
documents = documents.filter(
storage_path=trigger.filter_has_storage_path,
@@ -650,11 +604,8 @@ def document_matches_workflow(
"filter_has_tags",
"filter_has_all_tags",
"filter_has_not_tags",
"filter_has_any_document_types",
"filter_has_not_document_types",
"filter_has_any_correspondents",
"filter_has_not_correspondents",
"filter_has_any_storage_paths",
"filter_has_not_storage_paths",
)
)

View File

@@ -1,43 +0,0 @@
# Generated by Django 5.2.7 on 2025-12-17 22:25
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "0004_remove_document_storage_type"),
]
operations = [
migrations.AddField(
model_name="workflowtrigger",
name="filter_has_any_correspondents",
field=models.ManyToManyField(
blank=True,
related_name="workflowtriggers_has_any_correspondent",
to="documents.correspondent",
verbose_name="has one of these correspondents",
),
),
migrations.AddField(
model_name="workflowtrigger",
name="filter_has_any_document_types",
field=models.ManyToManyField(
blank=True,
related_name="workflowtriggers_has_any_document_type",
to="documents.documenttype",
verbose_name="has one of these document types",
),
),
migrations.AddField(
model_name="workflowtrigger",
name="filter_has_any_storage_paths",
field=models.ManyToManyField(
blank=True,
related_name="workflowtriggers_has_any_storage_path",
to="documents.storagepath",
verbose_name="has one of these storage paths",
),
),
]

View File

@@ -1066,13 +1066,6 @@ class WorkflowTrigger(models.Model):
verbose_name=_("has this document type"),
)
filter_has_any_document_types = models.ManyToManyField(
DocumentType,
blank=True,
related_name="workflowtriggers_has_any_document_type",
verbose_name=_("has one of these document types"),
)
filter_has_not_document_types = models.ManyToManyField(
DocumentType,
blank=True,
@@ -1095,13 +1088,6 @@ class WorkflowTrigger(models.Model):
verbose_name=_("does not have these correspondent(s)"),
)
filter_has_any_correspondents = models.ManyToManyField(
Correspondent,
blank=True,
related_name="workflowtriggers_has_any_correspondent",
verbose_name=_("has one of these correspondents"),
)
filter_has_storage_path = models.ForeignKey(
StoragePath,
null=True,
@@ -1110,13 +1096,6 @@ class WorkflowTrigger(models.Model):
verbose_name=_("has this storage path"),
)
filter_has_any_storage_paths = models.ManyToManyField(
StoragePath,
blank=True,
related_name="workflowtriggers_has_any_storage_path",
verbose_name=_("has one of these storage paths"),
)
filter_has_not_storage_paths = models.ManyToManyField(
StoragePath,
blank=True,

View File

@@ -2299,11 +2299,8 @@ class WorkflowTriggerSerializer(serializers.ModelSerializer):
"filter_has_all_tags",
"filter_has_not_tags",
"filter_custom_field_query",
"filter_has_any_correspondents",
"filter_has_not_correspondents",
"filter_has_any_document_types",
"filter_has_not_document_types",
"filter_has_any_storage_paths",
"filter_has_not_storage_paths",
"filter_has_correspondent",
"filter_has_document_type",
@@ -2541,26 +2538,14 @@ class WorkflowSerializer(serializers.ModelSerializer):
filter_has_tags = trigger.pop("filter_has_tags", None)
filter_has_all_tags = trigger.pop("filter_has_all_tags", None)
filter_has_not_tags = trigger.pop("filter_has_not_tags", None)
filter_has_any_correspondents = trigger.pop(
"filter_has_any_correspondents",
None,
)
filter_has_not_correspondents = trigger.pop(
"filter_has_not_correspondents",
None,
)
filter_has_any_document_types = trigger.pop(
"filter_has_any_document_types",
None,
)
filter_has_not_document_types = trigger.pop(
"filter_has_not_document_types",
None,
)
filter_has_any_storage_paths = trigger.pop(
"filter_has_any_storage_paths",
None,
)
filter_has_not_storage_paths = trigger.pop(
"filter_has_not_storage_paths",
None,
@@ -2577,26 +2562,14 @@ class WorkflowSerializer(serializers.ModelSerializer):
trigger_instance.filter_has_all_tags.set(filter_has_all_tags)
if filter_has_not_tags is not None:
trigger_instance.filter_has_not_tags.set(filter_has_not_tags)
if filter_has_any_correspondents is not None:
trigger_instance.filter_has_any_correspondents.set(
filter_has_any_correspondents,
)
if filter_has_not_correspondents is not None:
trigger_instance.filter_has_not_correspondents.set(
filter_has_not_correspondents,
)
if filter_has_any_document_types is not None:
trigger_instance.filter_has_any_document_types.set(
filter_has_any_document_types,
)
if filter_has_not_document_types is not None:
trigger_instance.filter_has_not_document_types.set(
filter_has_not_document_types,
)
if filter_has_any_storage_paths is not None:
trigger_instance.filter_has_any_storage_paths.set(
filter_has_any_storage_paths,
)
if filter_has_not_storage_paths is not None:
trigger_instance.filter_has_not_storage_paths.set(
filter_has_not_storage_paths,

View File

@@ -186,11 +186,8 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
"filter_has_tags": [self.t1.id],
"filter_has_all_tags": [self.t2.id],
"filter_has_not_tags": [self.t3.id],
"filter_has_any_correspondents": [self.c.id],
"filter_has_not_correspondents": [self.c2.id],
"filter_has_any_document_types": [self.dt.id],
"filter_has_not_document_types": [self.dt2.id],
"filter_has_any_storage_paths": [self.sp.id],
"filter_has_not_storage_paths": [self.sp2.id],
"filter_custom_field_query": json.dumps(
[
@@ -251,26 +248,14 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
set(trigger.filter_has_not_tags.values_list("id", flat=True)),
{self.t3.id},
)
self.assertSetEqual(
set(trigger.filter_has_any_correspondents.values_list("id", flat=True)),
{self.c.id},
)
self.assertSetEqual(
set(trigger.filter_has_not_correspondents.values_list("id", flat=True)),
{self.c2.id},
)
self.assertSetEqual(
set(trigger.filter_has_any_document_types.values_list("id", flat=True)),
{self.dt.id},
)
self.assertSetEqual(
set(trigger.filter_has_not_document_types.values_list("id", flat=True)),
{self.dt2.id},
)
self.assertSetEqual(
set(trigger.filter_has_any_storage_paths.values_list("id", flat=True)),
{self.sp.id},
)
self.assertSetEqual(
set(trigger.filter_has_not_storage_paths.values_list("id", flat=True)),
{self.sp2.id},
@@ -434,11 +419,8 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
"filter_has_tags": [self.t1.id],
"filter_has_all_tags": [self.t2.id],
"filter_has_not_tags": [self.t3.id],
"filter_has_any_correspondents": [self.c.id],
"filter_has_not_correspondents": [self.c2.id],
"filter_has_any_document_types": [self.dt.id],
"filter_has_not_document_types": [self.dt2.id],
"filter_has_any_storage_paths": [self.sp.id],
"filter_has_not_storage_paths": [self.sp2.id],
"filter_custom_field_query": json.dumps(
["AND", [[self.cf1.id, "exact", "value"]]],
@@ -468,26 +450,14 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
workflow.triggers.first().filter_has_not_tags.first(),
self.t3,
)
self.assertEqual(
workflow.triggers.first().filter_has_any_correspondents.first(),
self.c,
)
self.assertEqual(
workflow.triggers.first().filter_has_not_correspondents.first(),
self.c2,
)
self.assertEqual(
workflow.triggers.first().filter_has_any_document_types.first(),
self.dt,
)
self.assertEqual(
workflow.triggers.first().filter_has_not_document_types.first(),
self.dt2,
)
self.assertEqual(
workflow.triggers.first().filter_has_any_storage_paths.first(),
self.sp,
)
self.assertEqual(
workflow.triggers.first().filter_has_not_storage_paths.first(),
self.sp2,

View File

@@ -603,23 +603,21 @@ class TestPDFActions(DirectoriesMixin, TestCase):
expected_filename,
)
self.assertEqual(consume_file_args[1].title, None)
self.assertTrue(consume_file_args[1].skip_asn)
# No metadata_document_id, delete_originals False, so ASN should be None
self.assertIsNone(consume_file_args[1].asn)
# With metadata_document_id overrides
result = bulk_edit.merge(doc_ids, metadata_document_id=metadata_document_id)
consume_file_args, _ = mock_consume_file.call_args
self.assertEqual(consume_file_args[1].title, "B (merged)")
self.assertEqual(consume_file_args[1].created, self.doc2.created)
self.assertTrue(consume_file_args[1].skip_asn)
self.assertEqual(result, "OK")
@mock.patch("documents.bulk_edit.delete.si")
@mock.patch("documents.tasks.consume_file.s")
@mock.patch("documents.bulk_edit.chain")
def test_merge_and_delete_originals(
self,
mock_chain,
mock_consume_file,
mock_delete_documents,
):
@@ -633,6 +631,12 @@ class TestPDFActions(DirectoriesMixin, TestCase):
- Document deletion task should be called
"""
doc_ids = [self.doc1.id, self.doc2.id, self.doc3.id]
self.doc1.archive_serial_number = 101
self.doc2.archive_serial_number = 102
self.doc3.archive_serial_number = 103
self.doc1.save()
self.doc2.save()
self.doc3.save()
result = bulk_edit.merge(doc_ids, delete_originals=True)
self.assertEqual(result, "OK")
@@ -643,7 +647,8 @@ class TestPDFActions(DirectoriesMixin, TestCase):
mock_consume_file.assert_called()
mock_delete_documents.assert_called()
mock_chain.assert_called_once()
consume_sig = mock_consume_file.return_value
consume_sig.apply_async.assert_called_once()
consume_file_args, _ = mock_consume_file.call_args
self.assertEqual(
@@ -651,7 +656,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
expected_filename,
)
self.assertEqual(consume_file_args[1].title, None)
self.assertTrue(consume_file_args[1].skip_asn)
self.assertEqual(consume_file_args[1].asn, 101)
delete_documents_args, _ = mock_delete_documents.call_args
self.assertEqual(
@@ -659,6 +664,92 @@ class TestPDFActions(DirectoriesMixin, TestCase):
doc_ids,
)
self.doc1.refresh_from_db()
self.doc2.refresh_from_db()
self.doc3.refresh_from_db()
self.assertIsNone(self.doc1.archive_serial_number)
self.assertIsNone(self.doc2.archive_serial_number)
self.assertIsNone(self.doc3.archive_serial_number)
@mock.patch("documents.bulk_edit.delete.si")
@mock.patch("documents.tasks.consume_file.s")
def test_merge_and_delete_originals_restore_on_failure(
self,
mock_consume_file,
mock_delete_documents,
):
"""
GIVEN:
- Existing documents
WHEN:
- Merge action with deleting documents is called with 1 document
- Error occurs when queuing consume file task
THEN:
- Archive serial numbers are restored
"""
doc_ids = [self.doc1.id]
self.doc1.archive_serial_number = 111
self.doc1.save()
sig = mock.Mock()
sig.apply_async.side_effect = Exception("boom")
mock_consume_file.return_value = sig
with self.assertRaises(Exception):
bulk_edit.merge(doc_ids, delete_originals=True)
self.doc1.refresh_from_db()
self.assertEqual(self.doc1.archive_serial_number, 111)
@mock.patch("documents.bulk_edit.delete.si")
@mock.patch("documents.tasks.consume_file.s")
def test_merge_and_delete_originals_metadata_handoff(
self,
mock_consume_file,
mock_delete_documents,
):
"""
GIVEN:
- Existing documents with ASNs
WHEN:
- Merge with delete_originals=True and metadata_document_id set
THEN:
- Handoff ASN uses metadata document ASN
"""
doc_ids = [self.doc1.id, self.doc2.id]
self.doc1.archive_serial_number = 101
self.doc2.archive_serial_number = 202
self.doc1.save()
self.doc2.save()
result = bulk_edit.merge(
doc_ids,
metadata_document_id=self.doc2.id,
delete_originals=True,
)
self.assertEqual(result, "OK")
consume_file_args, _ = mock_consume_file.call_args
self.assertEqual(consume_file_args[1].asn, 202)
def test_restore_archive_serial_numbers_task(self):
"""
GIVEN:
- Existing document with no archive serial number
WHEN:
- Restore archive serial number task is called with backup data
THEN:
- Document archive serial number is restored
"""
self.doc1.archive_serial_number = 444
self.doc1.save()
Document.objects.filter(pk=self.doc1.id).update(archive_serial_number=None)
backup = {self.doc1.id: 444}
bulk_edit.restore_archive_serial_numbers_task(backup)
self.doc1.refresh_from_db()
self.assertEqual(self.doc1.archive_serial_number, 444)
@mock.patch("documents.tasks.consume_file.s")
def test_merge_with_archive_fallback(self, mock_consume_file):
"""
@@ -727,6 +818,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
self.assertEqual(mock_consume_file.call_count, 2)
consume_file_args, _ = mock_consume_file.call_args
self.assertEqual(consume_file_args[1].title, "B (split 2)")
self.assertIsNone(consume_file_args[1].asn)
self.assertEqual(result, "OK")
@@ -751,6 +843,8 @@ class TestPDFActions(DirectoriesMixin, TestCase):
"""
doc_ids = [self.doc2.id]
pages = [[1, 2], [3]]
self.doc2.archive_serial_number = 200
self.doc2.save()
result = bulk_edit.split(doc_ids, pages, delete_originals=True)
self.assertEqual(result, "OK")
@@ -768,6 +862,42 @@ class TestPDFActions(DirectoriesMixin, TestCase):
doc_ids,
)
self.doc2.refresh_from_db()
self.assertIsNone(self.doc2.archive_serial_number)
@mock.patch("documents.bulk_edit.delete.si")
@mock.patch("documents.tasks.consume_file.s")
@mock.patch("documents.bulk_edit.chord")
def test_split_restore_on_failure(
self,
mock_chord,
mock_consume_file,
mock_delete_documents,
):
"""
GIVEN:
- Existing documents
WHEN:
- Split action with deleting documents is called with 1 document and 2 page groups
- Error occurs when queuing chord task
THEN:
- Archive serial numbers are restored
"""
doc_ids = [self.doc2.id]
pages = [[1, 2]]
self.doc2.archive_serial_number = 222
self.doc2.save()
sig = mock.Mock()
sig.apply_async.side_effect = Exception("boom")
mock_chord.return_value = sig
result = bulk_edit.split(doc_ids, pages, delete_originals=True)
self.assertEqual(result, "OK")
self.doc2.refresh_from_db()
self.assertEqual(self.doc2.archive_serial_number, 222)
@mock.patch("documents.tasks.consume_file.delay")
@mock.patch("pikepdf.Pdf.save")
def test_split_with_errors(self, mock_save_pdf, mock_consume_file):
@@ -968,10 +1098,49 @@ class TestPDFActions(DirectoriesMixin, TestCase):
mock_chord.return_value.delay.return_value = None
doc_ids = [self.doc2.id]
operations = [{"page": 1}, {"page": 2}]
self.doc2.archive_serial_number = 250
self.doc2.save()
result = bulk_edit.edit_pdf(doc_ids, operations, delete_original=True)
self.assertEqual(result, "OK")
mock_chord.assert_called_once()
consume_file_args, _ = mock_consume_file.call_args
self.assertEqual(consume_file_args[1].asn, 250)
self.doc2.refresh_from_db()
self.assertIsNone(self.doc2.archive_serial_number)
@mock.patch("documents.bulk_edit.delete.si")
@mock.patch("documents.tasks.consume_file.s")
@mock.patch("documents.bulk_edit.chord")
def test_edit_pdf_restore_on_failure(
self,
mock_chord,
mock_consume_file,
mock_delete_documents,
):
"""
GIVEN:
- Existing document
WHEN:
- edit_pdf is called with delete_original=True
- Error occurs when queuing chord task
THEN:
- Archive serial numbers are restored
"""
doc_ids = [self.doc2.id]
operations = [{"page": 1}]
self.doc2.archive_serial_number = 333
self.doc2.save()
sig = mock.Mock()
sig.apply_async.side_effect = Exception("boom")
mock_chord.return_value = sig
with self.assertRaises(Exception):
bulk_edit.edit_pdf(doc_ids, operations, delete_original=True)
self.doc2.refresh_from_db()
self.assertEqual(self.doc2.archive_serial_number, 333)
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.delay")
def test_edit_pdf_with_update_document(self, mock_update_document):

View File

@@ -14,6 +14,7 @@ from django.test import override_settings
from django.utils import timezone
from guardian.core import ObjectPermissionChecker
from documents.barcodes import BarcodePlugin
from documents.consumer import ConsumerError
from documents.data_models import DocumentMetadataOverrides
from documents.data_models import DocumentSource
@@ -412,14 +413,6 @@ class TestConsumer(
self.assertEqual(document.archive_serial_number, 123)
self._assert_first_last_send_progress()
def testMetadataOverridesSkipAsnPropagation(self):
overrides = DocumentMetadataOverrides()
incoming = DocumentMetadataOverrides(skip_asn=True)
overrides.update(incoming)
self.assertTrue(overrides.skip_asn)
def testOverrideTitlePlaceholders(self):
c = Correspondent.objects.create(name="Correspondent Name")
dt = DocumentType.objects.create(name="DocType Name")
@@ -1240,3 +1233,46 @@ class PostConsumeTestCase(DirectoriesMixin, GetConsumerMixin, TestCase):
r"sample\.pdf: Error while executing post-consume script: Command '\[.*\]' returned non-zero exit status \d+\.",
):
consumer.run_post_consume_script(doc)
class TestMetadataOverrides(TestCase):
def test_update_skip_asn_if_exists(self):
base = DocumentMetadataOverrides()
incoming = DocumentMetadataOverrides(skip_asn_if_exists=True)
base.update(incoming)
self.assertTrue(base.skip_asn_if_exists)
class TestBarcodeApplyDetectedASN(TestCase):
"""
GIVEN:
- Existing Documents with ASN 123
WHEN:
- A BarcodePlugin which detected an ASN
THEN:
- If skip_asn_if_exists is set, and ASN exists, do not set ASN
- If skip_asn_if_exists is set, and ASN does not exist, set ASN
"""
def test_apply_detected_asn_skips_existing_when_flag_set(self):
doc = Document.objects.create(
checksum="X1",
title="D1",
archive_serial_number=123,
)
metadata = DocumentMetadataOverrides(skip_asn_if_exists=True)
plugin = BarcodePlugin(
input_doc=mock.Mock(),
metadata=metadata,
status_mgr=mock.Mock(),
base_tmp_dir=tempfile.gettempdir(),
task_id="test-task",
)
plugin._apply_detected_asn(123)
self.assertIsNone(plugin.metadata.asn)
doc.hard_delete()
plugin._apply_detected_asn(123)
self.assertEqual(plugin.metadata.asn, 123)

View File

@@ -180,7 +180,7 @@ class TestRewriteNaturalDateKeywords(SimpleTestCase):
(
"added:this year",
datetime(2025, 7, 15, 12, 0, 0, tzinfo=timezone.utc),
("added:[20250101", "TO 20251231"),
("added:[20250101", "TO 20250715"),
),
(
"added:previous year",

View File

@@ -1276,76 +1276,6 @@ class TestWorkflows(
)
self.assertIn(expected_str, cm.output[1])
def test_document_added_any_filters(self):
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
)
trigger.filter_has_any_correspondents.set([self.c])
trigger.filter_has_any_document_types.set([self.dt])
trigger.filter_has_any_storage_paths.set([self.sp])
matching_doc = Document.objects.create(
title="sample test",
correspondent=self.c,
document_type=self.dt,
storage_path=self.sp,
original_filename="sample.pdf",
checksum="checksum-any-match",
)
matched, reason = existing_document_matches_workflow(matching_doc, trigger)
self.assertTrue(matched)
self.assertIsNone(reason)
wrong_correspondent = Document.objects.create(
title="wrong correspondent",
correspondent=self.c2,
document_type=self.dt,
storage_path=self.sp,
original_filename="sample2.pdf",
)
matched, reason = existing_document_matches_workflow(
wrong_correspondent,
trigger,
)
self.assertFalse(matched)
self.assertIn("correspondent", reason)
other_document_type = DocumentType.objects.create(name="Other")
wrong_document_type = Document.objects.create(
title="wrong doc type",
correspondent=self.c,
document_type=other_document_type,
storage_path=self.sp,
original_filename="sample3.pdf",
checksum="checksum-wrong-doc-type",
)
matched, reason = existing_document_matches_workflow(
wrong_document_type,
trigger,
)
self.assertFalse(matched)
self.assertIn("doc type", reason)
other_storage_path = StoragePath.objects.create(
name="Other path",
path="/other/",
)
wrong_storage_path = Document.objects.create(
title="wrong storage",
correspondent=self.c,
document_type=self.dt,
storage_path=other_storage_path,
original_filename="sample4.pdf",
checksum="checksum-wrong-storage-path",
)
matched, reason = existing_document_matches_workflow(
wrong_storage_path,
trigger,
)
self.assertFalse(matched)
self.assertIn("storage path", reason)
def test_document_added_custom_field_query_no_match(self):
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
@@ -1454,39 +1384,6 @@ class TestWorkflows(
self.assertIn(doc1, filtered)
self.assertNotIn(doc2, filtered)
def test_prefilter_documents_any_filters(self):
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
)
trigger.filter_has_any_correspondents.set([self.c])
trigger.filter_has_any_document_types.set([self.dt])
trigger.filter_has_any_storage_paths.set([self.sp])
allowed_document = Document.objects.create(
title="allowed",
correspondent=self.c,
document_type=self.dt,
storage_path=self.sp,
original_filename="doc-allowed.pdf",
checksum="checksum-any-allowed",
)
blocked_document = Document.objects.create(
title="blocked",
correspondent=self.c2,
document_type=self.dt,
storage_path=self.sp,
original_filename="doc-blocked.pdf",
checksum="checksum-any-blocked",
)
filtered = prefilter_documents_by_workflowtrigger(
Document.objects.all(),
trigger,
)
self.assertIn(allowed_document, filtered)
self.assertNotIn(blocked_document, filtered)
def test_consumption_trigger_requires_filter_configuration(self):
serializer = WorkflowTriggerSerializer(
data={

View File

@@ -479,11 +479,11 @@ class TagViewSet(ModelViewSet, PermissionsAwareDocumentCountMixin):
if descendant_pks:
filter_q = self.get_document_count_filter()
children_source = list(
children_source = (
Tag.objects.filter(pk__in=descendant_pks | {t.pk for t in all_tags})
.select_related("owner")
.annotate(document_count=Count("documents", filter=filter_q))
.order_by(*ordering),
.order_by(*ordering)
)
else:
children_source = all_tags
@@ -495,11 +495,7 @@ class TagViewSet(ModelViewSet, PermissionsAwareDocumentCountMixin):
page = self.paginate_queryset(queryset)
serializer = self.get_serializer(page, many=True)
response = self.get_paginated_response(serializer.data)
if descendant_pks:
# Include children in the "all" field, if needed
response.data["all"] = [tag.pk for tag in children_source]
return response
return self.get_paginated_response(serializer.data)
def perform_update(self, serializer):
old_parent = self.get_object().get_parent()

View File

@@ -2,7 +2,7 @@ msgid ""
msgstr ""
"Project-Id-Version: paperless-ngx\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2026-01-26 18:31+0000\n"
"POT-Creation-Date: 2026-01-25 03:30+0000\n"
"PO-Revision-Date: 2022-02-17 04:17\n"
"Last-Translator: \n"
"Language-Team: English\n"
@@ -89,7 +89,7 @@ msgstr ""
msgid "Automatic"
msgstr ""
#: documents/models.py:64 documents/models.py:434 documents/models.py:1528
#: documents/models.py:64 documents/models.py:434 documents/models.py:1507
#: paperless_mail/models.py:23 paperless_mail/models.py:143
msgid "name"
msgstr ""
@@ -252,7 +252,7 @@ msgid "The position of this document in your physical document archive."
msgstr ""
#: documents/models.py:303 documents/models.py:678 documents/models.py:732
#: documents/models.py:1571
#: documents/models.py:1550
msgid "document"
msgstr ""
@@ -869,358 +869,346 @@ msgid "has this document type"
msgstr ""
#: documents/models.py:1073
msgid "has one of these document types"
msgstr ""
#: documents/models.py:1080
msgid "does not have these document type(s)"
msgstr ""
#: documents/models.py:1088
#: documents/models.py:1081
msgid "has this correspondent"
msgstr ""
#: documents/models.py:1095
#: documents/models.py:1088
msgid "does not have these correspondent(s)"
msgstr ""
#: documents/models.py:1102
msgid "has one of these correspondents"
msgstr ""
#: documents/models.py:1110
#: documents/models.py:1096
msgid "has this storage path"
msgstr ""
#: documents/models.py:1117
msgid "has one of these storage paths"
msgstr ""
#: documents/models.py:1124
#: documents/models.py:1103
msgid "does not have these storage path(s)"
msgstr ""
#: documents/models.py:1128
#: documents/models.py:1107
msgid "filter custom field query"
msgstr ""
#: documents/models.py:1131
#: documents/models.py:1110
msgid "JSON-encoded custom field query expression."
msgstr ""
#: documents/models.py:1135
#: documents/models.py:1114
msgid "schedule offset days"
msgstr ""
#: documents/models.py:1138
#: documents/models.py:1117
msgid "The number of days to offset the schedule trigger by."
msgstr ""
#: documents/models.py:1143
#: documents/models.py:1122
msgid "schedule is recurring"
msgstr ""
#: documents/models.py:1146
#: documents/models.py:1125
msgid "If the schedule should be recurring."
msgstr ""
#: documents/models.py:1151
#: documents/models.py:1130
msgid "schedule recurring delay in days"
msgstr ""
#: documents/models.py:1155
#: documents/models.py:1134
msgid "The number of days between recurring schedule triggers."
msgstr ""
#: documents/models.py:1160
#: documents/models.py:1139
msgid "schedule date field"
msgstr ""
#: documents/models.py:1165
#: documents/models.py:1144
msgid "The field to check for a schedule trigger."
msgstr ""
#: documents/models.py:1174
#: documents/models.py:1153
msgid "schedule date custom field"
msgstr ""
#: documents/models.py:1178
#: documents/models.py:1157
msgid "workflow trigger"
msgstr ""
#: documents/models.py:1179
#: documents/models.py:1158
msgid "workflow triggers"
msgstr ""
#: documents/models.py:1187
#: documents/models.py:1166
msgid "email subject"
msgstr ""
#: documents/models.py:1191
#: documents/models.py:1170
msgid ""
"The subject of the email, can include some placeholders, see documentation."
msgstr ""
#: documents/models.py:1197
#: documents/models.py:1176
msgid "email body"
msgstr ""
#: documents/models.py:1200
#: documents/models.py:1179
msgid ""
"The body (message) of the email, can include some placeholders, see "
"documentation."
msgstr ""
#: documents/models.py:1206
#: documents/models.py:1185
msgid "emails to"
msgstr ""
#: documents/models.py:1209
#: documents/models.py:1188
msgid "The destination email addresses, comma separated."
msgstr ""
#: documents/models.py:1215
#: documents/models.py:1194
msgid "include document in email"
msgstr ""
#: documents/models.py:1226
#: documents/models.py:1205
msgid "webhook url"
msgstr ""
#: documents/models.py:1229
#: documents/models.py:1208
msgid "The destination URL for the notification."
msgstr ""
#: documents/models.py:1234
#: documents/models.py:1213
msgid "use parameters"
msgstr ""
#: documents/models.py:1239
#: documents/models.py:1218
msgid "send as JSON"
msgstr ""
#: documents/models.py:1243
#: documents/models.py:1222
msgid "webhook parameters"
msgstr ""
#: documents/models.py:1246
#: documents/models.py:1225
msgid "The parameters to send with the webhook URL if body not used."
msgstr ""
#: documents/models.py:1250
#: documents/models.py:1229
msgid "webhook body"
msgstr ""
#: documents/models.py:1253
#: documents/models.py:1232
msgid "The body to send with the webhook URL if parameters not used."
msgstr ""
#: documents/models.py:1257
#: documents/models.py:1236
msgid "webhook headers"
msgstr ""
#: documents/models.py:1260
#: documents/models.py:1239
msgid "The headers to send with the webhook URL."
msgstr ""
#: documents/models.py:1265
#: documents/models.py:1244
msgid "include document in webhook"
msgstr ""
#: documents/models.py:1276
#: documents/models.py:1255
msgid "Assignment"
msgstr ""
#: documents/models.py:1280
#: documents/models.py:1259
msgid "Removal"
msgstr ""
#: documents/models.py:1284 documents/templates/account/password_reset.html:15
#: documents/models.py:1263 documents/templates/account/password_reset.html:15
msgid "Email"
msgstr ""
#: documents/models.py:1288
#: documents/models.py:1267
msgid "Webhook"
msgstr ""
#: documents/models.py:1292
#: documents/models.py:1271
msgid "Workflow Action Type"
msgstr ""
#: documents/models.py:1297 documents/models.py:1530
#: documents/models.py:1276 documents/models.py:1509
#: paperless_mail/models.py:145
msgid "order"
msgstr ""
#: documents/models.py:1300
#: documents/models.py:1279
msgid "assign title"
msgstr ""
#: documents/models.py:1304
#: documents/models.py:1283
msgid "Assign a document title, must be a Jinja2 template, see documentation."
msgstr ""
#: documents/models.py:1312 paperless_mail/models.py:274
#: documents/models.py:1291 paperless_mail/models.py:274
msgid "assign this tag"
msgstr ""
#: documents/models.py:1321 paperless_mail/models.py:282
#: documents/models.py:1300 paperless_mail/models.py:282
msgid "assign this document type"
msgstr ""
#: documents/models.py:1330 paperless_mail/models.py:296
#: documents/models.py:1309 paperless_mail/models.py:296
msgid "assign this correspondent"
msgstr ""
#: documents/models.py:1339
#: documents/models.py:1318
msgid "assign this storage path"
msgstr ""
#: documents/models.py:1348
#: documents/models.py:1327
msgid "assign this owner"
msgstr ""
#: documents/models.py:1355
#: documents/models.py:1334
msgid "grant view permissions to these users"
msgstr ""
#: documents/models.py:1362
#: documents/models.py:1341
msgid "grant view permissions to these groups"
msgstr ""
#: documents/models.py:1369
#: documents/models.py:1348
msgid "grant change permissions to these users"
msgstr ""
#: documents/models.py:1376
#: documents/models.py:1355
msgid "grant change permissions to these groups"
msgstr ""
#: documents/models.py:1383
#: documents/models.py:1362
msgid "assign these custom fields"
msgstr ""
#: documents/models.py:1387
#: documents/models.py:1366
msgid "custom field values"
msgstr ""
#: documents/models.py:1391
#: documents/models.py:1370
msgid "Optional values to assign to the custom fields."
msgstr ""
#: documents/models.py:1400
#: documents/models.py:1379
msgid "remove these tag(s)"
msgstr ""
#: documents/models.py:1405
#: documents/models.py:1384
msgid "remove all tags"
msgstr ""
#: documents/models.py:1412
#: documents/models.py:1391
msgid "remove these document type(s)"
msgstr ""
#: documents/models.py:1417
#: documents/models.py:1396
msgid "remove all document types"
msgstr ""
#: documents/models.py:1424
#: documents/models.py:1403
msgid "remove these correspondent(s)"
msgstr ""
#: documents/models.py:1429
#: documents/models.py:1408
msgid "remove all correspondents"
msgstr ""
#: documents/models.py:1436
#: documents/models.py:1415
msgid "remove these storage path(s)"
msgstr ""
#: documents/models.py:1441
#: documents/models.py:1420
msgid "remove all storage paths"
msgstr ""
#: documents/models.py:1448
#: documents/models.py:1427
msgid "remove these owner(s)"
msgstr ""
#: documents/models.py:1453
#: documents/models.py:1432
msgid "remove all owners"
msgstr ""
#: documents/models.py:1460
#: documents/models.py:1439
msgid "remove view permissions for these users"
msgstr ""
#: documents/models.py:1467
#: documents/models.py:1446
msgid "remove view permissions for these groups"
msgstr ""
#: documents/models.py:1474
#: documents/models.py:1453
msgid "remove change permissions for these users"
msgstr ""
#: documents/models.py:1481
#: documents/models.py:1460
msgid "remove change permissions for these groups"
msgstr ""
#: documents/models.py:1486
#: documents/models.py:1465
msgid "remove all permissions"
msgstr ""
#: documents/models.py:1493
#: documents/models.py:1472
msgid "remove these custom fields"
msgstr ""
#: documents/models.py:1498
#: documents/models.py:1477
msgid "remove all custom fields"
msgstr ""
#: documents/models.py:1507
#: documents/models.py:1486
msgid "email"
msgstr ""
#: documents/models.py:1516
#: documents/models.py:1495
msgid "webhook"
msgstr ""
#: documents/models.py:1520
#: documents/models.py:1499
msgid "workflow action"
msgstr ""
#: documents/models.py:1521
#: documents/models.py:1500
msgid "workflow actions"
msgstr ""
#: documents/models.py:1536
#: documents/models.py:1515
msgid "triggers"
msgstr ""
#: documents/models.py:1543
#: documents/models.py:1522
msgid "actions"
msgstr ""
#: documents/models.py:1546 paperless_mail/models.py:154
#: documents/models.py:1525 paperless_mail/models.py:154
msgid "enabled"
msgstr ""
#: documents/models.py:1557
#: documents/models.py:1536
msgid "workflow"
msgstr ""
#: documents/models.py:1561
#: documents/models.py:1540
msgid "workflow trigger type"
msgstr ""
#: documents/models.py:1575
#: documents/models.py:1554
msgid "date run"
msgstr ""
#: documents/models.py:1581
#: documents/models.py:1560
msgid "workflow run"
msgstr ""
#: documents/models.py:1582
#: documents/models.py:1561
msgid "workflow runs"
msgstr ""
@@ -1747,155 +1735,155 @@ msgstr ""
msgid "paperless application settings"
msgstr ""
#: paperless/settings.py:807
#: paperless/settings.py:800
msgid "English (US)"
msgstr ""
#: paperless/settings.py:808
#: paperless/settings.py:801
msgid "Arabic"
msgstr ""
#: paperless/settings.py:809
#: paperless/settings.py:802
msgid "Afrikaans"
msgstr ""
#: paperless/settings.py:810
#: paperless/settings.py:803
msgid "Belarusian"
msgstr ""
#: paperless/settings.py:811
#: paperless/settings.py:804
msgid "Bulgarian"
msgstr ""
#: paperless/settings.py:812
#: paperless/settings.py:805
msgid "Catalan"
msgstr ""
#: paperless/settings.py:813
#: paperless/settings.py:806
msgid "Czech"
msgstr ""
#: paperless/settings.py:814
#: paperless/settings.py:807
msgid "Danish"
msgstr ""
#: paperless/settings.py:815
#: paperless/settings.py:808
msgid "German"
msgstr ""
#: paperless/settings.py:816
#: paperless/settings.py:809
msgid "Greek"
msgstr ""
#: paperless/settings.py:817
#: paperless/settings.py:810
msgid "English (GB)"
msgstr ""
#: paperless/settings.py:818
#: paperless/settings.py:811
msgid "Spanish"
msgstr ""
#: paperless/settings.py:819
#: paperless/settings.py:812
msgid "Persian"
msgstr ""
#: paperless/settings.py:820
#: paperless/settings.py:813
msgid "Finnish"
msgstr ""
#: paperless/settings.py:821
#: paperless/settings.py:814
msgid "French"
msgstr ""
#: paperless/settings.py:822
#: paperless/settings.py:815
msgid "Hungarian"
msgstr ""
#: paperless/settings.py:823
#: paperless/settings.py:816
msgid "Indonesian"
msgstr ""
#: paperless/settings.py:824
#: paperless/settings.py:817
msgid "Italian"
msgstr ""
#: paperless/settings.py:825
#: paperless/settings.py:818
msgid "Japanese"
msgstr ""
#: paperless/settings.py:826
#: paperless/settings.py:819
msgid "Korean"
msgstr ""
#: paperless/settings.py:827
#: paperless/settings.py:820
msgid "Luxembourgish"
msgstr ""
#: paperless/settings.py:828
#: paperless/settings.py:821
msgid "Norwegian"
msgstr ""
#: paperless/settings.py:829
#: paperless/settings.py:822
msgid "Dutch"
msgstr ""
#: paperless/settings.py:830
#: paperless/settings.py:823
msgid "Polish"
msgstr ""
#: paperless/settings.py:831
#: paperless/settings.py:824
msgid "Portuguese (Brazil)"
msgstr ""
#: paperless/settings.py:832
#: paperless/settings.py:825
msgid "Portuguese"
msgstr ""
#: paperless/settings.py:833
#: paperless/settings.py:826
msgid "Romanian"
msgstr ""
#: paperless/settings.py:834
#: paperless/settings.py:827
msgid "Russian"
msgstr ""
#: paperless/settings.py:835
#: paperless/settings.py:828
msgid "Slovak"
msgstr ""
#: paperless/settings.py:836
#: paperless/settings.py:829
msgid "Slovenian"
msgstr ""
#: paperless/settings.py:837
#: paperless/settings.py:830
msgid "Serbian"
msgstr ""
#: paperless/settings.py:838
#: paperless/settings.py:831
msgid "Swedish"
msgstr ""
#: paperless/settings.py:839
#: paperless/settings.py:832
msgid "Turkish"
msgstr ""
#: paperless/settings.py:840
#: paperless/settings.py:833
msgid "Ukrainian"
msgstr ""
#: paperless/settings.py:841
#: paperless/settings.py:834
msgid "Vietnamese"
msgstr ""
#: paperless/settings.py:842
#: paperless/settings.py:835
msgid "Chinese Simplified"
msgstr ""
#: paperless/settings.py:843
#: paperless/settings.py:836
msgid "Chinese Traditional"
msgstr ""
#: paperless/urls.py:377
#: paperless/urls.py:376
msgid "Paperless-ngx administration"
msgstr ""

View File

@@ -3,15 +3,12 @@ from urllib.parse import quote
from allauth.account.adapter import DefaultAccountAdapter
from allauth.core import context
from allauth.headless.tokens.sessions import SessionTokenStrategy
from allauth.socialaccount.adapter import DefaultSocialAccountAdapter
from django.conf import settings
from django.contrib.auth.models import Group
from django.contrib.auth.models import User
from django.forms import ValidationError
from django.http import HttpRequest
from django.urls import reverse
from rest_framework.authtoken.models import Token
from documents.models import Document
from paperless.signals import handle_social_account_updated
@@ -162,11 +159,3 @@ class CustomSocialAccountAdapter(DefaultSocialAccountAdapter):
exception,
extra_context,
)
class DrfTokenStrategy(SessionTokenStrategy):
def create_access_token(self, request: HttpRequest) -> str | None:
if not request.user.is_authenticated:
return None
token, _ = Token.objects.get_or_create(user=request.user)
return token.key

View File

@@ -345,7 +345,6 @@ INSTALLED_APPS = [
"allauth.account",
"allauth.socialaccount",
"allauth.mfa",
"allauth.headless",
"drf_spectacular",
"drf_spectacular_sidecar",
"treenode",
@@ -540,12 +539,6 @@ SOCIALACCOUNT_PROVIDERS = json.loads(
)
SOCIAL_ACCOUNT_DEFAULT_GROUPS = __get_list("PAPERLESS_SOCIAL_ACCOUNT_DEFAULT_GROUPS")
SOCIAL_ACCOUNT_SYNC_GROUPS = __get_boolean("PAPERLESS_SOCIAL_ACCOUNT_SYNC_GROUPS")
SOCIAL_ACCOUNT_SYNC_GROUPS_CLAIM: Final[str] = os.getenv(
"PAPERLESS_SOCIAL_ACCOUNT_SYNC_GROUPS_CLAIM",
"groups",
)
HEADLESS_TOKEN_STRATEGY = "paperless.adapter.DrfTokenStrategy"
MFA_TOTP_ISSUER = "Paperless-ngx"

View File

@@ -40,19 +40,15 @@ def handle_social_account_updated(sender, request, sociallogin, **kwargs):
extra_data = sociallogin.account.extra_data or {}
social_account_groups = extra_data.get(
settings.SOCIAL_ACCOUNT_SYNC_GROUPS_CLAIM,
"groups",
[],
) # pre-allauth 65.11.0 structure
if not social_account_groups:
# allauth 65.11.0+ nests claims under `userinfo`/`id_token`
social_account_groups = (
extra_data.get("userinfo", {}).get(
settings.SOCIAL_ACCOUNT_SYNC_GROUPS_CLAIM,
)
or extra_data.get("id_token", {}).get(
settings.SOCIAL_ACCOUNT_SYNC_GROUPS_CLAIM,
)
extra_data.get("userinfo", {}).get("groups")
or extra_data.get("id_token", {}).get("groups")
or []
)
if settings.SOCIAL_ACCOUNT_SYNC_GROUPS and social_account_groups is not None:

View File

@@ -4,7 +4,6 @@ from allauth.account.adapter import get_adapter
from allauth.core import context
from allauth.socialaccount.adapter import get_adapter as get_social_adapter
from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from django.contrib.auth.models import Group
from django.contrib.auth.models import User
from django.forms import ValidationError
@@ -12,9 +11,6 @@ from django.http import HttpRequest
from django.test import TestCase
from django.test import override_settings
from django.urls import reverse
from rest_framework.authtoken.models import Token
from paperless.adapter import DrfTokenStrategy
class TestCustomAccountAdapter(TestCase):
@@ -185,74 +181,3 @@ class TestCustomSocialAccountAdapter(TestCase):
self.assertTrue(
any("Test authentication error" in message for message in log_cm.output),
)
class TestDrfTokenStrategy(TestCase):
def test_create_access_token_creates_new_token(self):
"""
GIVEN:
- A user with no existing DRF token
WHEN:
- create_access_token is called
THEN:
- A new token is created and its key is returned
"""
user = User.objects.create_user("testuser")
request = HttpRequest()
request.user = user
strategy = DrfTokenStrategy()
token_key = strategy.create_access_token(request)
# Verify a token was created
self.assertIsNotNone(token_key)
self.assertTrue(Token.objects.filter(user=user).exists())
# Verify the returned key matches the created token
token = Token.objects.get(user=user)
self.assertEqual(token_key, token.key)
def test_create_access_token_returns_existing_token(self):
"""
GIVEN:
- A user with an existing DRF token
WHEN:
- create_access_token is called again
THEN:
- The same token key is returned (no new token created)
"""
user = User.objects.create_user("testuser")
existing_token = Token.objects.create(user=user)
request = HttpRequest()
request.user = user
strategy = DrfTokenStrategy()
token_key = strategy.create_access_token(request)
# Verify the existing token key is returned
self.assertEqual(token_key, existing_token.key)
# Verify only one token exists (no duplicate created)
self.assertEqual(Token.objects.filter(user=user).count(), 1)
def test_create_access_token_returns_none_for_unauthenticated_user(self):
"""
GIVEN:
- An unauthenticated request
WHEN:
- create_access_token is called
THEN:
- None is returned and no token is created
"""
request = HttpRequest()
request.user = AnonymousUser()
strategy = DrfTokenStrategy()
token_key = strategy.create_access_token(request)
self.assertIsNone(token_key)
self.assertEqual(Token.objects.count(), 0)

View File

@@ -228,7 +228,6 @@ urlpatterns = [
],
),
),
re_path("^auth/headless/", include("allauth.headless.urls")),
re_path(
"^$", # Redirect to the API swagger view
RedirectView.as_view(url="schema/view/"),