mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2026-01-26 22:49:01 -06:00
Compare commits
3 Commits
chore/ubun
...
feature-bu
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0df0f3a21f | ||
|
|
d81488d054 | ||
|
|
8dc4f34743 |
1
.github/release-drafter.yml
vendored
1
.github/release-drafter.yml
vendored
@@ -44,7 +44,6 @@ include-labels:
|
||||
- 'notable'
|
||||
exclude-labels:
|
||||
- 'skip-changelog'
|
||||
filter-by-commitish: true
|
||||
category-template: '### $TITLE'
|
||||
change-template: '- $TITLE @$AUTHOR ([#$NUMBER]($URL))'
|
||||
change-title-escapes: '\<*_&#@'
|
||||
|
||||
4
.github/workflows/ci-docs.yml
vendored
4
.github/workflows/ci-docs.yml
vendored
@@ -23,7 +23,7 @@ env:
|
||||
jobs:
|
||||
build:
|
||||
name: Build Documentation
|
||||
runs-on: ubuntu-slim
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
@@ -58,7 +58,7 @@ jobs:
|
||||
name: Deploy Documentation
|
||||
needs: build
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
runs-on: ubuntu-slim
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
|
||||
2
.github/workflows/ci-lint.yml
vendored
2
.github/workflows/ci-lint.yml
vendored
@@ -12,7 +12,7 @@ concurrency:
|
||||
jobs:
|
||||
pre-commit:
|
||||
name: Pre-commit Checks
|
||||
runs-on: ubuntu-slim
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
|
||||
2
.github/workflows/crowdin.yml
vendored
2
.github/workflows/crowdin.yml
vendored
@@ -10,7 +10,7 @@ jobs:
|
||||
synchronize-with-crowdin:
|
||||
name: Crowdin Sync
|
||||
if: github.repository_owner == 'paperless-ngx'
|
||||
runs-on: ubuntu-slim
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
|
||||
2
.github/workflows/pr-bot.yml
vendored
2
.github/workflows/pr-bot.yml
vendored
@@ -8,7 +8,7 @@ permissions:
|
||||
jobs:
|
||||
pr-bot:
|
||||
name: Automated PR Bot
|
||||
runs-on: ubuntu-slim
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Label PR by file path or branch name
|
||||
# see .github/labeler.yml for the labeler config
|
||||
|
||||
2
.github/workflows/project-actions.yml
vendored
2
.github/workflows/project-actions.yml
vendored
@@ -12,7 +12,7 @@ permissions:
|
||||
jobs:
|
||||
pr_opened_or_reopened:
|
||||
name: pr_opened_or_reopened
|
||||
runs-on: ubuntu-slim
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
# write permission is required for autolabeler
|
||||
pull-requests: write
|
||||
|
||||
10
.github/workflows/repo-maintenance.yml
vendored
10
.github/workflows/repo-maintenance.yml
vendored
@@ -13,7 +13,7 @@ jobs:
|
||||
stale:
|
||||
name: 'Stale'
|
||||
if: github.repository_owner == 'paperless-ngx'
|
||||
runs-on: ubuntu-slim
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/stale@v10
|
||||
with:
|
||||
@@ -35,7 +35,7 @@ jobs:
|
||||
lock-threads:
|
||||
name: 'Lock Old Threads'
|
||||
if: github.repository_owner == 'paperless-ngx'
|
||||
runs-on: ubuntu-slim
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: dessant/lock-threads@v6
|
||||
with:
|
||||
@@ -55,7 +55,7 @@ jobs:
|
||||
close-answered-discussions:
|
||||
name: 'Close Answered Discussions'
|
||||
if: github.repository_owner == 'paperless-ngx'
|
||||
runs-on: ubuntu-slim
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/github-script@v8
|
||||
with:
|
||||
@@ -112,7 +112,7 @@ jobs:
|
||||
close-outdated-discussions:
|
||||
name: 'Close Outdated Discussions'
|
||||
if: github.repository_owner == 'paperless-ngx'
|
||||
runs-on: ubuntu-slim
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/github-script@v8
|
||||
with:
|
||||
@@ -204,7 +204,7 @@ jobs:
|
||||
close-unsupported-feature-requests:
|
||||
name: 'Close Unsupported Feature Requests'
|
||||
if: github.repository_owner == 'paperless-ngx'
|
||||
runs-on: ubuntu-slim
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/github-script@v8
|
||||
with:
|
||||
|
||||
2
.github/workflows/translate-strings.yml
vendored
2
.github/workflows/translate-strings.yml
vendored
@@ -6,7 +6,7 @@ on:
|
||||
jobs:
|
||||
generate-translate-strings:
|
||||
name: Generate Translation Strings
|
||||
runs-on: ubuntu-slim
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
|
||||
@@ -37,7 +37,7 @@ repos:
|
||||
- json
|
||||
# See https://github.com/prettier/prettier/issues/15742 for the fork reason
|
||||
- repo: https://github.com/rbubley/mirrors-prettier
|
||||
rev: 'v3.6.2'
|
||||
rev: 'v3.8.0'
|
||||
hooks:
|
||||
- id: prettier
|
||||
types_or:
|
||||
@@ -49,7 +49,7 @@ repos:
|
||||
- 'prettier-plugin-organize-imports@4.1.0'
|
||||
# Python hooks
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.14.5
|
||||
rev: v0.14.13
|
||||
hooks:
|
||||
- id: ruff-check
|
||||
- id: ruff-format
|
||||
@@ -76,7 +76,7 @@ repos:
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
- repo: https://github.com/google/yamlfmt
|
||||
rev: v0.20.0
|
||||
rev: v0.21.0
|
||||
hooks:
|
||||
- id: yamlfmt
|
||||
exclude: "^src-ui/pnpm-lock.yaml"
|
||||
|
||||
@@ -4,7 +4,8 @@
|
||||
|
||||
set -eu
|
||||
|
||||
for command in document_archiver \
|
||||
for command in decrypt_documents \
|
||||
document_archiver \
|
||||
document_exporter \
|
||||
document_importer \
|
||||
mail_fetcher \
|
||||
|
||||
@@ -5,8 +5,10 @@ set -e
|
||||
|
||||
cd "${PAPERLESS_SRC_DIR}"
|
||||
|
||||
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||
python3 manage.py management_command "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
if [[ $(id -u) == 0 ]]; then
|
||||
s6-setuidgid paperless python3 manage.py management_command "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
python3 manage.py management_command "$@"
|
||||
else
|
||||
echo "Unknown user."
|
||||
fi
|
||||
|
||||
@@ -5,8 +5,10 @@ set -e
|
||||
|
||||
cd "${PAPERLESS_SRC_DIR}"
|
||||
|
||||
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||
python3 manage.py convert_mariadb_uuid "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
if [[ $(id -u) == 0 ]]; then
|
||||
s6-setuidgid paperless python3 manage.py convert_mariadb_uuid "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
python3 manage.py convert_mariadb_uuid "$@"
|
||||
else
|
||||
echo "Unknown user."
|
||||
fi
|
||||
|
||||
@@ -5,8 +5,10 @@ set -e
|
||||
|
||||
cd "${PAPERLESS_SRC_DIR}"
|
||||
|
||||
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||
python3 manage.py createsuperuser "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
if [[ $(id -u) == 0 ]]; then
|
||||
s6-setuidgid paperless python3 manage.py createsuperuser "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
python3 manage.py createsuperuser "$@"
|
||||
else
|
||||
echo "Unknown user."
|
||||
fi
|
||||
|
||||
14
docker/rootfs/usr/local/bin/decrypt_documents
Executable file
14
docker/rootfs/usr/local/bin/decrypt_documents
Executable file
@@ -0,0 +1,14 @@
|
||||
#!/command/with-contenv /usr/bin/bash
|
||||
# shellcheck shell=bash
|
||||
|
||||
set -e
|
||||
|
||||
cd "${PAPERLESS_SRC_DIR}"
|
||||
|
||||
if [[ $(id -u) == 0 ]]; then
|
||||
s6-setuidgid paperless python3 manage.py decrypt_documents "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
python3 manage.py decrypt_documents "$@"
|
||||
else
|
||||
echo "Unknown user."
|
||||
fi
|
||||
@@ -5,8 +5,10 @@ set -e
|
||||
|
||||
cd "${PAPERLESS_SRC_DIR}"
|
||||
|
||||
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||
python3 manage.py document_archiver "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
if [[ $(id -u) == 0 ]]; then
|
||||
s6-setuidgid paperless python3 manage.py document_archiver "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
python3 manage.py document_archiver "$@"
|
||||
else
|
||||
echo "Unknown user."
|
||||
fi
|
||||
|
||||
@@ -5,8 +5,10 @@ set -e
|
||||
|
||||
cd "${PAPERLESS_SRC_DIR}"
|
||||
|
||||
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||
python3 manage.py document_create_classifier "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
if [[ $(id -u) == 0 ]]; then
|
||||
s6-setuidgid paperless python3 manage.py document_create_classifier "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
python3 manage.py document_create_classifier "$@"
|
||||
else
|
||||
echo "Unknown user."
|
||||
fi
|
||||
|
||||
@@ -5,8 +5,10 @@ set -e
|
||||
|
||||
cd "${PAPERLESS_SRC_DIR}"
|
||||
|
||||
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||
python3 manage.py document_exporter "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
if [[ $(id -u) == 0 ]]; then
|
||||
s6-setuidgid paperless python3 manage.py document_exporter "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
python3 manage.py document_exporter "$@"
|
||||
else
|
||||
echo "Unknown user."
|
||||
fi
|
||||
|
||||
@@ -5,8 +5,10 @@ set -e
|
||||
|
||||
cd "${PAPERLESS_SRC_DIR}"
|
||||
|
||||
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||
python3 manage.py document_fuzzy_match "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
if [[ $(id -u) == 0 ]]; then
|
||||
s6-setuidgid paperless python3 manage.py document_fuzzy_match "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
python3 manage.py document_fuzzy_match "$@"
|
||||
else
|
||||
echo "Unknown user."
|
||||
fi
|
||||
|
||||
@@ -5,8 +5,10 @@ set -e
|
||||
|
||||
cd "${PAPERLESS_SRC_DIR}"
|
||||
|
||||
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||
python3 manage.py document_importer "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
if [[ $(id -u) == 0 ]]; then
|
||||
s6-setuidgid paperless python3 manage.py document_importer "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
python3 manage.py document_importer "$@"
|
||||
else
|
||||
echo "Unknown user."
|
||||
fi
|
||||
|
||||
@@ -5,8 +5,10 @@ set -e
|
||||
|
||||
cd "${PAPERLESS_SRC_DIR}"
|
||||
|
||||
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||
python3 manage.py document_index "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
if [[ $(id -u) == 0 ]]; then
|
||||
s6-setuidgid paperless python3 manage.py document_index "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
python3 manage.py document_index "$@"
|
||||
else
|
||||
echo "Unknown user."
|
||||
fi
|
||||
|
||||
@@ -5,8 +5,10 @@ set -e
|
||||
|
||||
cd "${PAPERLESS_SRC_DIR}"
|
||||
|
||||
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||
python3 manage.py document_renamer "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
if [[ $(id -u) == 0 ]]; then
|
||||
s6-setuidgid paperless python3 manage.py document_renamer "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
python3 manage.py document_renamer "$@"
|
||||
else
|
||||
echo "Unknown user."
|
||||
fi
|
||||
|
||||
@@ -5,8 +5,10 @@ set -e
|
||||
|
||||
cd "${PAPERLESS_SRC_DIR}"
|
||||
|
||||
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||
python3 manage.py document_retagger "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
if [[ $(id -u) == 0 ]]; then
|
||||
s6-setuidgid paperless python3 manage.py document_retagger "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
python3 manage.py document_retagger "$@"
|
||||
else
|
||||
echo "Unknown user."
|
||||
fi
|
||||
|
||||
@@ -5,8 +5,10 @@ set -e
|
||||
|
||||
cd "${PAPERLESS_SRC_DIR}"
|
||||
|
||||
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||
python3 manage.py document_sanity_checker "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
if [[ $(id -u) == 0 ]]; then
|
||||
s6-setuidgid paperless python3 manage.py document_sanity_checker "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
python3 manage.py document_sanity_checker "$@"
|
||||
else
|
||||
echo "Unknown user."
|
||||
fi
|
||||
|
||||
@@ -5,8 +5,10 @@ set -e
|
||||
|
||||
cd "${PAPERLESS_SRC_DIR}"
|
||||
|
||||
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||
python3 manage.py document_thumbnails "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
if [[ $(id -u) == 0 ]]; then
|
||||
s6-setuidgid paperless python3 manage.py document_thumbnails "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
python3 manage.py document_thumbnails "$@"
|
||||
else
|
||||
echo "Unknown user."
|
||||
fi
|
||||
|
||||
@@ -5,8 +5,10 @@ set -e
|
||||
|
||||
cd "${PAPERLESS_SRC_DIR}"
|
||||
|
||||
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||
python3 manage.py mail_fetcher "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
if [[ $(id -u) == 0 ]]; then
|
||||
s6-setuidgid paperless python3 manage.py mail_fetcher "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
python3 manage.py mail_fetcher "$@"
|
||||
else
|
||||
echo "Unknown user."
|
||||
fi
|
||||
|
||||
@@ -5,8 +5,10 @@ set -e
|
||||
|
||||
cd "${PAPERLESS_SRC_DIR}"
|
||||
|
||||
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||
python3 manage.py manage_superuser "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
if [[ $(id -u) == 0 ]]; then
|
||||
s6-setuidgid paperless python3 manage.py manage_superuser "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
python3 manage.py manage_superuser "$@"
|
||||
else
|
||||
echo "Unknown user."
|
||||
fi
|
||||
|
||||
@@ -5,8 +5,10 @@ set -e
|
||||
|
||||
cd "${PAPERLESS_SRC_DIR}"
|
||||
|
||||
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||
python3 manage.py prune_audit_logs "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
if [[ $(id -u) == 0 ]]; then
|
||||
s6-setuidgid paperless python3 manage.py prune_audit_logs "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
python3 manage.py prune_audit_logs "$@"
|
||||
else
|
||||
echo "Unknown user."
|
||||
fi
|
||||
|
||||
@@ -580,6 +580,36 @@ document.
|
||||
documents, such as encrypted PDF documents. The archiver will skip over
|
||||
these documents each time it sees them.
|
||||
|
||||
### Managing encryption {#encryption}
|
||||
|
||||
!!! warning
|
||||
|
||||
Encryption was removed in [paperless-ng 0.9](changelog.md#paperless-ng-090)
|
||||
because it did not really provide any additional security, the passphrase
|
||||
was stored in a configuration file on the same system as the documents.
|
||||
Furthermore, the entire text content of the documents is stored plain in
|
||||
the database, even if your documents are encrypted. Filenames are not
|
||||
encrypted as well. Finally, the web server provides transparent access to
|
||||
your encrypted documents.
|
||||
|
||||
Consider running paperless on an encrypted filesystem instead, which
|
||||
will then at least provide security against physical hardware theft.
|
||||
|
||||
#### Enabling encryption
|
||||
|
||||
Enabling encryption is no longer supported.
|
||||
|
||||
#### Disabling encryption
|
||||
|
||||
Basic usage to disable encryption of your document store:
|
||||
|
||||
(Note: If `PAPERLESS_PASSPHRASE` isn't set already, you need to specify
|
||||
it here)
|
||||
|
||||
```
|
||||
decrypt_documents [--passphrase SECR3TP4SSPHRA$E]
|
||||
```
|
||||
|
||||
### Detecting duplicates {#fuzzy_duplicate}
|
||||
|
||||
Paperless already catches and prevents upload of exactly matching documents,
|
||||
|
||||
@@ -501,7 +501,7 @@ The `datetime` filter formats a datetime string or datetime object using Python'
|
||||
See the [strftime format code documentation](https://docs.python.org/3.13/library/datetime.html#strftime-and-strptime-format-codes)
|
||||
for the possible codes and their meanings.
|
||||
|
||||
##### Date Localization {#date-localization}
|
||||
##### Date Localization
|
||||
|
||||
The `localize_date` filter formats a date or datetime object into a localized string using Babel internationalization.
|
||||
This takes into account the provided locale for translation. Since this must be used on a date or datetime object,
|
||||
@@ -851,8 +851,8 @@ followed by the even pages.
|
||||
|
||||
It's important that the scan files get consumed in the correct order, and one at a time.
|
||||
You therefore need to make sure that Paperless is running while you upload the files into
|
||||
the directory; and if you're using polling, make sure that
|
||||
`CONSUMER_POLLING_INTERVAL` is set to a value lower than it takes for the second scan to appear,
|
||||
the directory; and if you're using [polling](configuration.md#polling), make sure that
|
||||
`CONSUMER_POLLING` is set to a value lower than it takes for the second scan to appear,
|
||||
like 5-10 or even lower.
|
||||
|
||||
Another thing that might happen is that you start a double sided scan, but then forget
|
||||
|
||||
10
docs/api.md
10
docs/api.md
@@ -8,7 +8,7 @@ Further documentation is provided here for some endpoints and features.
|
||||
|
||||
## Authorization
|
||||
|
||||
The REST api provides five different forms of authentication.
|
||||
The REST api provides four different forms of authentication.
|
||||
|
||||
1. Basic authentication
|
||||
|
||||
@@ -52,14 +52,6 @@ The REST api provides five different forms of authentication.
|
||||
[configuration](configuration.md#PAPERLESS_ENABLE_HTTP_REMOTE_USER_API)),
|
||||
you can authenticate against the API using Remote User auth.
|
||||
|
||||
5. Headless OIDC via [`django-allauth`](https://codeberg.org/allauth/django-allauth)
|
||||
|
||||
`django-allauth` exposes API endpoints under `api/auth/` which enable tools
|
||||
like third-party apps to authenticate with social accounts that are
|
||||
configured. See
|
||||
[here](advanced_usage.md#openid-connect-and-social-authentication) for more
|
||||
information on social accounts.
|
||||
|
||||
## Searching for documents
|
||||
|
||||
Full text searching is available on the `/api/documents/` endpoint. Two
|
||||
|
||||
@@ -1,21 +1,5 @@
|
||||
# Changelog
|
||||
|
||||
## paperless-ngx 2.20.5
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Fix: ensure horizontal scroll for long tag names in list, wrap tags without parent [@shamoon](https://github.com/shamoon) ([#11811](https://github.com/paperless-ngx/paperless-ngx/pull/11811))
|
||||
- Fix: use explicit order field for workflow actions [@shamoon](https://github.com/shamoon) [@stumpylog](https://github.com/stumpylog) ([#11781](https://github.com/paperless-ngx/paperless-ngx/pull/11781))
|
||||
|
||||
### All App Changes
|
||||
|
||||
<details>
|
||||
<summary>2 changes</summary>
|
||||
|
||||
- Fix: ensure horizontal scroll for long tag names in list, wrap tags without parent [@shamoon](https://github.com/shamoon) ([#11811](https://github.com/paperless-ngx/paperless-ngx/pull/11811))
|
||||
- Fix: use explicit order field for workflow actions [@shamoon](https://github.com/shamoon) [@stumpylog](https://github.com/stumpylog) ([#11781](https://github.com/paperless-ngx/paperless-ngx/pull/11781))
|
||||
</details>
|
||||
|
||||
## paperless-ngx 2.20.4
|
||||
|
||||
### Security
|
||||
|
||||
@@ -659,7 +659,7 @@ system. See the corresponding
|
||||
|
||||
: Sync groups from the third party authentication system (e.g. OIDC) to Paperless-ngx. When enabled, users will be added or removed from groups based on their group membership in the third party authentication system. Groups must already exist in Paperless-ngx and have the same name as in the third party authentication system. Groups are updated upon logging in via the third party authentication system, see the corresponding [django-allauth documentation](https://docs.allauth.org/en/dev/socialaccount/signals.html).
|
||||
|
||||
: In order to pass groups from the authentication system you will need to update your [PAPERLESS_SOCIALACCOUNT_PROVIDERS](#PAPERLESS_SOCIALACCOUNT_PROVIDERS) setting by adding a top-level "SCOPES" setting which includes "groups", or the custom groups claim configured in [`PAPERLESS_SOCIAL_ACCOUNT_SYNC_GROUPS_CLAIM`](#PAPERLESS_SOCIAL_ACCOUNT_SYNC_GROUPS_CLAIM) e.g.:
|
||||
: In order to pass groups from the authentication system you will need to update your [PAPERLESS_SOCIALACCOUNT_PROVIDERS](#PAPERLESS_SOCIALACCOUNT_PROVIDERS) setting by adding a top-level "SCOPES" setting which includes "groups", e.g.:
|
||||
|
||||
```json
|
||||
{"openid_connect":{"SCOPE": ["openid","profile","email","groups"]...
|
||||
@@ -667,12 +667,6 @@ system. See the corresponding
|
||||
|
||||
Defaults to False
|
||||
|
||||
#### [`PAPERLESS_SOCIAL_ACCOUNT_SYNC_GROUPS_CLAIM=<str>`](#PAPERLESS_SOCIAL_ACCOUNT_SYNC_GROUPS_CLAIM) {#PAPERLESS_SOCIAL_ACCOUNT_SYNC_GROUPS_CLAIM}
|
||||
|
||||
: Allows you to define a custom groups claim. See [PAPERLESS_SOCIAL_ACCOUNT_SYNC_GROUPS](#PAPERLESS_SOCIAL_ACCOUNT_SYNC_GROUPS) which is required for this setting to take effect.
|
||||
|
||||
Defaults to "groups"
|
||||
|
||||
#### [`PAPERLESS_SOCIAL_ACCOUNT_DEFAULT_GROUPS=<comma-separated-list>`](#PAPERLESS_SOCIAL_ACCOUNT_DEFAULT_GROUPS) {#PAPERLESS_SOCIAL_ACCOUNT_DEFAULT_GROUPS}
|
||||
|
||||
: A list of group names that users who signup via social accounts will be added to upon signup. Groups listed here must already exist.
|
||||
@@ -1181,45 +1175,21 @@ don't exist yet.
|
||||
|
||||
#### [`PAPERLESS_CONSUMER_IGNORE_PATTERNS=<json>`](#PAPERLESS_CONSUMER_IGNORE_PATTERNS) {#PAPERLESS_CONSUMER_IGNORE_PATTERNS}
|
||||
|
||||
: Additional regex patterns for files to ignore in the consumption directory. Patterns are matched against filenames only (not full paths)
|
||||
using Python's `re.match()`, which anchors at the start of the filename.
|
||||
: By default, paperless ignores certain files and folders in the
|
||||
consumption directory, such as system files created by the Mac OS
|
||||
or hidden folders some tools use to store data.
|
||||
|
||||
See the [watchfiles documentation](https://watchfiles.helpmanual.io/api/filters/#watchfiles.BaseFilter.ignore_entity_patterns)
|
||||
This can be adjusted by configuring a custom json array with
|
||||
patterns to exclude.
|
||||
|
||||
This setting is for additional patterns beyond the built-in defaults. Common system files and directories are already ignored automatically.
|
||||
The patterns will be compiled via Python's standard `re` module.
|
||||
For example, `.DS_STORE/*` will ignore any files found in a folder
|
||||
named `.DS_STORE`, including `.DS_STORE/bar.pdf` and `foo/.DS_STORE/bar.pdf`
|
||||
|
||||
Example custom patterns:
|
||||
A pattern like `._*` will ignore anything starting with `._`, including:
|
||||
`._foo.pdf` and `._bar/foo.pdf`
|
||||
|
||||
```json
|
||||
["^temp_", "\\.bak$", "^~"]
|
||||
```
|
||||
|
||||
This would ignore:
|
||||
|
||||
- Files starting with `temp_` (e.g., `temp_scan.pdf`)
|
||||
- Files ending with `.bak` (e.g., `document.pdf.bak`)
|
||||
- Files starting with `~` (e.g., `~$document.docx`)
|
||||
|
||||
Defaults to `[]` (empty list, uses only built-in defaults).
|
||||
|
||||
The default ignores are `[.DS_Store, .DS_STORE, ._*, desktop.ini, Thumbs.db]` and cannot be overridden.
|
||||
|
||||
#### [`PAPERLESS_CONSUMER_IGNORE_DIRS=<json>`](#PAPERLESS_CONSUMER_IGNORE_DIRS) {#PAPERLESS_CONSUMER_IGNORE_DIRS}
|
||||
|
||||
: Additional directory names to ignore in the consumption directory. Directories matching these names (and all their contents) will be skipped.
|
||||
|
||||
This setting is for additional directories beyond the built-in defaults. Matching is done by directory name only, not full path.
|
||||
|
||||
Example:
|
||||
|
||||
```json
|
||||
["temp", "incoming", ".hidden"]
|
||||
```
|
||||
|
||||
Defaults to `[]` (empty list, uses only built-in defaults).
|
||||
|
||||
The default ignores are `[.stfolder, .stversions, .localized, @eaDir, .Spotlight-V100, .Trashes, __MACOSX]` and cannot be overridden.
|
||||
Defaults to
|
||||
`[".DS_Store", ".DS_STORE", "._*", ".stfolder/*", ".stversions/*", ".localized/*", "desktop.ini", "@eaDir/*", "Thumbs.db"]`.
|
||||
|
||||
#### [`PAPERLESS_CONSUMER_BARCODE_SCANNER=<string>`](#PAPERLESS_CONSUMER_BARCODE_SCANNER) {#PAPERLESS_CONSUMER_BARCODE_SCANNER}
|
||||
|
||||
@@ -1318,24 +1288,48 @@ within your documents.
|
||||
|
||||
Defaults to false.
|
||||
|
||||
#### [`PAPERLESS_CONSUMER_POLLING_INTERVAL=<num>`](#PAPERLESS_CONSUMER_POLLING_INTERVAL) {#PAPERLESS_CONSUMER_POLLING_INTERVAL}
|
||||
### Polling {#polling}
|
||||
|
||||
: Configures how the consumer detects new files in the consumption directory.
|
||||
#### [`PAPERLESS_CONSUMER_POLLING=<num>`](#PAPERLESS_CONSUMER_POLLING) {#PAPERLESS_CONSUMER_POLLING}
|
||||
|
||||
When set to `0` (default), paperless uses native filesystem notifications for efficient, immediate detection of new files.
|
||||
: If paperless won't find documents added to your consume folder, it
|
||||
might not be able to automatically detect filesystem changes. In
|
||||
that case, specify a polling interval in seconds here, which will
|
||||
then cause paperless to periodically check your consumption
|
||||
directory for changes. This will also disable listening for file
|
||||
system changes with `inotify`.
|
||||
|
||||
When set to a positive number, paperless polls the consumption directory at that interval in seconds. Use polling for network filesystems (NFS, SMB/CIFS) where native notifications may not work reliably.
|
||||
Defaults to 0, which disables polling and uses filesystem
|
||||
notifications.
|
||||
|
||||
Defaults to 0.
|
||||
#### [`PAPERLESS_CONSUMER_POLLING_RETRY_COUNT=<num>`](#PAPERLESS_CONSUMER_POLLING_RETRY_COUNT) {#PAPERLESS_CONSUMER_POLLING_RETRY_COUNT}
|
||||
|
||||
#### [`PAPERLESS_CONSUMER_STABILITY_DELAY=<num>`](#PAPERLESS_CONSUMER_STABILITY_DELAY) {#PAPERLESS_CONSUMER_STABILITY_DELAY}
|
||||
: If consumer polling is enabled, sets the maximum number of times
|
||||
paperless will check for a file to remain unmodified. If a file's
|
||||
modification time and size are identical for two consecutive checks, it
|
||||
will be consumed.
|
||||
|
||||
: Sets the time in seconds that a file must remain unchanged (same size and modification time) before paperless will begin consuming it.
|
||||
Defaults to 5.
|
||||
|
||||
Increase this value if you experience issues with files being consumed before they are fully written, particularly on slower network storage or
|
||||
with certain scanner quirks
|
||||
#### [`PAPERLESS_CONSUMER_POLLING_DELAY=<num>`](#PAPERLESS_CONSUMER_POLLING_DELAY) {#PAPERLESS_CONSUMER_POLLING_DELAY}
|
||||
|
||||
Defaults to 5.0 seconds.
|
||||
: If consumer polling is enabled, sets the delay in seconds between
|
||||
each check (above) paperless will do while waiting for a file to
|
||||
remain unmodified.
|
||||
|
||||
Defaults to 5.
|
||||
|
||||
### iNotify {#inotify}
|
||||
|
||||
#### [`PAPERLESS_CONSUMER_INOTIFY_DELAY=<num>`](#PAPERLESS_CONSUMER_INOTIFY_DELAY) {#PAPERLESS_CONSUMER_INOTIFY_DELAY}
|
||||
|
||||
: Sets the time in seconds the consumer will wait for additional
|
||||
events from inotify before the consumer will consider a file ready
|
||||
and begin consumption. Certain scanners or network setups may
|
||||
generate multiple events for a single file, leading to multiple
|
||||
consumers working on the same file. Configure this to prevent that.
|
||||
|
||||
Defaults to 0.5 seconds.
|
||||
|
||||
## Workflow webhooks
|
||||
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
# v3 Migration Guide
|
||||
|
||||
## Consumer Settings Changes
|
||||
|
||||
The v3 consumer command uses a [different library](https://watchfiles.helpmanual.io/) to unify
|
||||
the watching for new files in the consume directory. For the user, this removes several configuration options related to delays and retries
|
||||
and replaces with a single unified setting. It also adjusts how the consumer ignore filtering happens, replaced `fnmatch` with `regex` and
|
||||
separating the directory ignore from the file ignore.
|
||||
|
||||
### Summary
|
||||
|
||||
| Old Setting | New Setting | Notes |
|
||||
| ------------------------------ | ----------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------ |
|
||||
| `CONSUMER_POLLING` | [`CONSUMER_POLLING_INTERVAL`](configuration.md#PAPERLESS_CONSUMER_POLLING_INTERVAL) | Renamed for clarity |
|
||||
| `CONSUMER_INOTIFY_DELAY` | [`CONSUMER_STABILITY_DELAY`](configuration.md#PAPERLESS_CONSUMER_STABILITY_DELAY) | Unified for all modes |
|
||||
| `CONSUMER_POLLING_DELAY` | _Removed_ | Use `CONSUMER_STABILITY_DELAY` |
|
||||
| `CONSUMER_POLLING_RETRY_COUNT` | _Removed_ | Automatic with stability tracking |
|
||||
| `CONSUMER_IGNORE_PATTERNS` | [`CONSUMER_IGNORE_PATTERNS`](configuration.md#PAPERLESS_CONSUMER_IGNORE_PATTERNS) | **Now regex, not fnmatch**; user patterns are added to (not replacing) default ones |
|
||||
| _New_ | [`CONSUMER_IGNORE_DIRS`](configuration.md#PAPERLESS_CONSUMER_IGNORE_DIRS) | Additional directories to ignore; user entries are added to (not replacing) defaults |
|
||||
|
||||
## Encryption Support
|
||||
|
||||
Document and thumbnail encryption is no longer supported. This was previously deprecated in [paperless-ng 0.9.3](https://github.com/paperless-ngx/paperless-ngx/blob/dev/docs/changelog.md#paperless-ng-093)
|
||||
|
||||
Users must decrypt their document using the `decrypt_documents` command before upgrading.
|
||||
@@ -124,7 +124,8 @@ account. The script essentially automatically performs the steps described in [D
|
||||
system notifications with `inotify`. When storing the consumption
|
||||
directory on such a file system, paperless will not pick up new
|
||||
files with the default configuration. You will need to use
|
||||
[`PAPERLESS_CONSUMER_POLLING_INTERVAL`](configuration.md#PAPERLESS_CONSUMER_POLLING_INTERVAL), which will disable inotify.
|
||||
[`PAPERLESS_CONSUMER_POLLING`](configuration.md#PAPERLESS_CONSUMER_POLLING), which will disable inotify. See
|
||||
[here](configuration.md#polling).
|
||||
|
||||
5. Run `docker compose pull`. This will pull the image from the GitHub container registry
|
||||
by default but you can change the image to pull from Docker Hub by changing the `image`
|
||||
|
||||
@@ -46,9 +46,9 @@ run:
|
||||
If you notice that the consumer will only pickup files in the
|
||||
consumption directory at startup, but won't find any other files added
|
||||
later, you will need to enable filesystem polling with the configuration
|
||||
option [`PAPERLESS_CONSUMER_POLLING_INTERVAL`](configuration.md#PAPERLESS_CONSUMER_POLLING_INTERVAL).
|
||||
option [`PAPERLESS_CONSUMER_POLLING`](configuration.md#PAPERLESS_CONSUMER_POLLING).
|
||||
|
||||
This will disable automatic listening for filesystem changes and
|
||||
This will disable listening to filesystem changes with inotify and
|
||||
paperless will manually check the consumption directory for changes
|
||||
instead.
|
||||
|
||||
@@ -234,9 +234,47 @@ FileNotFoundError: [Errno 2] No such file or directory: '/tmp/ocrmypdf.io.yhk3zb
|
||||
|
||||
This probably indicates paperless tried to consume the same file twice.
|
||||
This can happen for a number of reasons, depending on how documents are
|
||||
placed into the consume folder, such as how a scanner may modify a file multiple times as it scans.
|
||||
Try adjusting the
|
||||
[file stability delay](configuration.md#PAPERLESS_CONSUMER_STABILITY_DELAY) to a larger value.
|
||||
placed into the consume folder. If paperless is using inotify (the
|
||||
default) to check for documents, try adjusting the
|
||||
[inotify configuration](configuration.md#inotify). If polling is enabled, try adjusting the
|
||||
[polling configuration](configuration.md#polling).
|
||||
|
||||
## Consumer fails waiting for file to remain unmodified.
|
||||
|
||||
You might find messages like these in your log files:
|
||||
|
||||
```
|
||||
[ERROR] [paperless.management.consumer] Timeout while waiting on file /usr/src/paperless/src/../consume/SCN_0001.pdf to remain unmodified.
|
||||
```
|
||||
|
||||
This indicates paperless timed out while waiting for the file to be
|
||||
completely written to the consume folder. Adjusting
|
||||
[polling configuration](configuration.md#polling) values should resolve the issue.
|
||||
|
||||
!!! note
|
||||
|
||||
The user will need to manually move the file out of the consume folder
|
||||
and back in, for the initial failing file to be consumed.
|
||||
|
||||
## Consumer fails reporting "OS reports file as busy still".
|
||||
|
||||
You might find messages like these in your log files:
|
||||
|
||||
```
|
||||
[WARNING] [paperless.management.consumer] Not consuming file /usr/src/paperless/src/../consume/SCN_0001.pdf: OS reports file as busy still
|
||||
```
|
||||
|
||||
This indicates paperless was unable to open the file, as the OS reported
|
||||
the file as still being in use. To prevent a crash, paperless did not
|
||||
try to consume the file. If paperless is using inotify (the default) to
|
||||
check for documents, try adjusting the
|
||||
[inotify configuration](configuration.md#inotify). If polling is enabled, try adjusting the
|
||||
[polling configuration](configuration.md#polling).
|
||||
|
||||
!!! note
|
||||
|
||||
The user will need to manually move the file out of the consume folder
|
||||
and back in, for the initial failing file to be consumed.
|
||||
|
||||
## Log reports "Creating PaperlessTask failed".
|
||||
|
||||
|
||||
@@ -565,7 +565,7 @@ This allows for complex logic to be used to generate the title, including [logic
|
||||
and [filters](https://jinja.palletsprojects.com/en/3.1.x/templates/#id11).
|
||||
The template is provided as a string.
|
||||
|
||||
Using Jinja2 Templates is also useful for [Date localization](advanced_usage.md#date-localization) in the title.
|
||||
Using Jinja2 Templates is also useful for [Date localization](advanced_usage.md#Date-Localization) in the title.
|
||||
|
||||
The available inputs differ depending on the type of workflow trigger.
|
||||
This is because at the time of consumption (when the text is to be set), no automatic tags etc. have been
|
||||
@@ -597,7 +597,6 @@ The following placeholders are only available for "added" or "updated" triggers
|
||||
- `{{created_day}}`: created day
|
||||
- `{{created_time}}`: created time in HH:MM format
|
||||
- `{{doc_url}}`: URL to the document in the web UI. Requires the `PAPERLESS_URL` setting to be set.
|
||||
- `{{doc_id}}`: Document ID
|
||||
|
||||
##### Examples
|
||||
|
||||
|
||||
@@ -69,9 +69,8 @@ nav:
|
||||
- development.md
|
||||
- 'FAQs': faq.md
|
||||
- troubleshooting.md
|
||||
- 'Migration to v3': migration.md
|
||||
- changelog.md
|
||||
copyright: Copyright © 2016 - 2026 Daniel Quinn, Jonas Winkler, and the Paperless-ngx team
|
||||
copyright: Copyright © 2016 - 2023 Daniel Quinn, Jonas Winkler, and the Paperless-ngx team
|
||||
extra:
|
||||
social:
|
||||
- icon: fontawesome/brands/github
|
||||
|
||||
@@ -55,10 +55,10 @@
|
||||
#PAPERLESS_TASK_WORKERS=1
|
||||
#PAPERLESS_THREADS_PER_WORKER=1
|
||||
#PAPERLESS_TIME_ZONE=UTC
|
||||
#PAPERLESS_CONSUMER_POLLING_INTERVAL=10
|
||||
#PAPERLESS_CONSUMER_POLLING=10
|
||||
#PAPERLESS_CONSUMER_DELETE_DUPLICATES=false
|
||||
#PAPERLESS_CONSUMER_RECURSIVE=false
|
||||
#PAPERLESS_CONSUMER_IGNORE_PATTERNS=[] # Defaults are built in; add filename regexes, e.g. ["^\\.DS_Store$", "^desktop\\.ini$"]
|
||||
#PAPERLESS_CONSUMER_IGNORE_PATTERNS=[".DS_STORE/*", "._*", ".stfolder/*", ".stversions/*", ".localized/*", "desktop.ini"]
|
||||
#PAPERLESS_CONSUMER_SUBDIRS_AS_TAGS=false
|
||||
#PAPERLESS_CONSUMER_ENABLE_BARCODES=false
|
||||
#PAPERLESS_CONSUMER_BARCODE_STRING=PATCHT
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "paperless-ngx"
|
||||
version = "2.20.5"
|
||||
version = "2.20.4"
|
||||
description = "A community-supported supercharged document management system: scan, index and archive all your physical documents"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.10"
|
||||
@@ -19,15 +19,15 @@ dependencies = [
|
||||
"azure-ai-documentintelligence>=1.0.2",
|
||||
"babel>=2.17",
|
||||
"bleach~=6.3.0",
|
||||
"celery[redis]~=5.5.1",
|
||||
"celery[redis]~=5.6.2",
|
||||
"channels~=4.2",
|
||||
"channels-redis~=4.2",
|
||||
"concurrent-log-handler~=0.9.25",
|
||||
"dateparser~=1.2",
|
||||
# WARNING: django does not use semver.
|
||||
# Only patch versions are guaranteed to not introduce breaking changes.
|
||||
"django~=5.2.5",
|
||||
"django-allauth[mfa,socialaccount]~=65.13.1",
|
||||
"django==5.2.10",
|
||||
"django-allauth[mfa,socialaccount]~=65.13.0",
|
||||
"django-auditlog~=3.4.1",
|
||||
"django-cachalot~=2.8.0",
|
||||
"django-celery-results~=2.6.0",
|
||||
@@ -50,6 +50,7 @@ dependencies = [
|
||||
"gotenberg-client~=0.13.1",
|
||||
"httpx-oauth~=0.16",
|
||||
"imap-tools~=1.11.0",
|
||||
"inotifyrecursive~=0.3",
|
||||
"jinja2~=3.1.5",
|
||||
"langdetect~=1.0.9",
|
||||
"llama-index-core>=0.14.12",
|
||||
@@ -78,8 +79,8 @@ dependencies = [
|
||||
"tika-client~=0.10.0",
|
||||
"torch~=2.9.1",
|
||||
"tqdm~=4.67.1",
|
||||
"watchfiles>=1.1.1",
|
||||
"whitenoise~=6.9",
|
||||
"watchdog~=6.0",
|
||||
"whitenoise~=6.11",
|
||||
"whoosh-reloaded>=2.7.5",
|
||||
"zxing-cpp~=2.3.0",
|
||||
]
|
||||
@@ -88,13 +89,13 @@ optional-dependencies.mariadb = [
|
||||
"mysqlclient~=2.2.7",
|
||||
]
|
||||
optional-dependencies.postgres = [
|
||||
"psycopg[c,pool]==3.2.12",
|
||||
"psycopg[c,pool]==3.3",
|
||||
# Direct dependency for proper resolution of the pre-built wheels
|
||||
"psycopg-c==3.2.12",
|
||||
"psycopg-c==3.3",
|
||||
"psycopg-pool==3.3",
|
||||
]
|
||||
optional-dependencies.webserver = [
|
||||
"granian[uvloop]~=2.5.1",
|
||||
"granian[uvloop]~=2.6.0",
|
||||
]
|
||||
|
||||
[dependency-groups]
|
||||
@@ -151,7 +152,7 @@ typing = [
|
||||
]
|
||||
|
||||
[tool.uv]
|
||||
required-version = ">=0.5.14"
|
||||
required-version = ">=0.9.0"
|
||||
package = false
|
||||
environments = [
|
||||
"sys_platform == 'darwin'",
|
||||
@@ -161,8 +162,8 @@ environments = [
|
||||
[tool.uv.sources]
|
||||
# Markers are chosen to select these almost exclusively when building the Docker image
|
||||
psycopg-c = [
|
||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/psycopg-bookworm-3.2.12/psycopg_c-3.2.12-cp312-cp312-linux_x86_64.whl", marker = "sys_platform == 'linux' and platform_machine == 'x86_64' and python_version == '3.12'" },
|
||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/psycopg-bookworm-3.2.12/psycopg_c-3.2.12-cp312-cp312-linux_aarch64.whl", marker = "sys_platform == 'linux' and platform_machine == 'aarch64' and python_version == '3.12'" },
|
||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/psycopg-trixie-3.3.0/psycopg_c-3.3.0-cp312-cp312-linux_x86_64.whl", marker = "sys_platform == 'linux' and platform_machine == 'x86_64' and python_version == '3.12'" },
|
||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/psycopg-trixie-3.3.0/psycopg_c-3.3.0-cp312-cp312-linux_aarch64.whl", marker = "sys_platform == 'linux' and platform_machine == 'aarch64' and python_version == '3.12'" },
|
||||
]
|
||||
zxing-cpp = [
|
||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_x86_64.whl", marker = "sys_platform == 'linux' and platform_machine == 'x86_64' and python_version == '3.12'" },
|
||||
@@ -257,7 +258,7 @@ lint.isort.force-single-line = true
|
||||
|
||||
[tool.codespell]
|
||||
write-changes = true
|
||||
ignore-words-list = "criterias,afterall,valeu,ureue,equest,ure,assertIn,Oktober,commitish"
|
||||
ignore-words-list = "criterias,afterall,valeu,ureue,equest,ure,assertIn,Oktober"
|
||||
skip = "src-ui/src/locale/*,src-ui/pnpm-lock.yaml,src-ui/e2e/*,src/paperless_mail/tests/samples/*,src/documents/tests/samples/*,*.po,*.json"
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "paperless-ngx-ui",
|
||||
"version": "2.20.5",
|
||||
"version": "2.20.4",
|
||||
"scripts": {
|
||||
"preinstall": "npx only-allow pnpm",
|
||||
"ng": "ng",
|
||||
|
||||
@@ -412,9 +412,6 @@ describe('WorkflowEditDialogComponent', () => {
|
||||
return newFilter
|
||||
}
|
||||
|
||||
const correspondentAny = addFilterOfType(TriggerFilterType.CorrespondentAny)
|
||||
correspondentAny.get('values').setValue([11])
|
||||
|
||||
const correspondentIs = addFilterOfType(TriggerFilterType.CorrespondentIs)
|
||||
correspondentIs.get('values').setValue(1)
|
||||
|
||||
@@ -424,18 +421,12 @@ describe('WorkflowEditDialogComponent', () => {
|
||||
const documentTypeIs = addFilterOfType(TriggerFilterType.DocumentTypeIs)
|
||||
documentTypeIs.get('values').setValue(1)
|
||||
|
||||
const documentTypeAny = addFilterOfType(TriggerFilterType.DocumentTypeAny)
|
||||
documentTypeAny.get('values').setValue([12])
|
||||
|
||||
const documentTypeNot = addFilterOfType(TriggerFilterType.DocumentTypeNot)
|
||||
documentTypeNot.get('values').setValue([1])
|
||||
|
||||
const storagePathIs = addFilterOfType(TriggerFilterType.StoragePathIs)
|
||||
storagePathIs.get('values').setValue(1)
|
||||
|
||||
const storagePathAny = addFilterOfType(TriggerFilterType.StoragePathAny)
|
||||
storagePathAny.get('values').setValue([13])
|
||||
|
||||
const storagePathNot = addFilterOfType(TriggerFilterType.StoragePathNot)
|
||||
storagePathNot.get('values').setValue([1])
|
||||
|
||||
@@ -450,13 +441,10 @@ describe('WorkflowEditDialogComponent', () => {
|
||||
expect(formValues.triggers[0].filter_has_tags).toEqual([1])
|
||||
expect(formValues.triggers[0].filter_has_all_tags).toEqual([2, 3])
|
||||
expect(formValues.triggers[0].filter_has_not_tags).toEqual([4])
|
||||
expect(formValues.triggers[0].filter_has_any_correspondents).toEqual([11])
|
||||
expect(formValues.triggers[0].filter_has_correspondent).toEqual(1)
|
||||
expect(formValues.triggers[0].filter_has_not_correspondents).toEqual([1])
|
||||
expect(formValues.triggers[0].filter_has_any_document_types).toEqual([12])
|
||||
expect(formValues.triggers[0].filter_has_document_type).toEqual(1)
|
||||
expect(formValues.triggers[0].filter_has_not_document_types).toEqual([1])
|
||||
expect(formValues.triggers[0].filter_has_any_storage_paths).toEqual([13])
|
||||
expect(formValues.triggers[0].filter_has_storage_path).toEqual(1)
|
||||
expect(formValues.triggers[0].filter_has_not_storage_paths).toEqual([1])
|
||||
expect(formValues.triggers[0].filter_custom_field_query).toEqual(
|
||||
@@ -519,22 +507,16 @@ describe('WorkflowEditDialogComponent', () => {
|
||||
|
||||
setFilter(TriggerFilterType.TagsAll, 11)
|
||||
setFilter(TriggerFilterType.TagsNone, 12)
|
||||
setFilter(TriggerFilterType.CorrespondentAny, 16)
|
||||
setFilter(TriggerFilterType.CorrespondentNot, 13)
|
||||
setFilter(TriggerFilterType.DocumentTypeAny, 17)
|
||||
setFilter(TriggerFilterType.DocumentTypeNot, 14)
|
||||
setFilter(TriggerFilterType.StoragePathAny, 18)
|
||||
setFilter(TriggerFilterType.StoragePathNot, 15)
|
||||
|
||||
const formValues = component['getFormValues']()
|
||||
|
||||
expect(formValues.triggers[0].filter_has_all_tags).toEqual([11])
|
||||
expect(formValues.triggers[0].filter_has_not_tags).toEqual([12])
|
||||
expect(formValues.triggers[0].filter_has_any_correspondents).toEqual([16])
|
||||
expect(formValues.triggers[0].filter_has_not_correspondents).toEqual([13])
|
||||
expect(formValues.triggers[0].filter_has_any_document_types).toEqual([17])
|
||||
expect(formValues.triggers[0].filter_has_not_document_types).toEqual([14])
|
||||
expect(formValues.triggers[0].filter_has_any_storage_paths).toEqual([18])
|
||||
expect(formValues.triggers[0].filter_has_not_storage_paths).toEqual([15])
|
||||
})
|
||||
|
||||
@@ -658,11 +640,8 @@ describe('WorkflowEditDialogComponent', () => {
|
||||
filter_has_tags: [],
|
||||
filter_has_all_tags: [],
|
||||
filter_has_not_tags: [],
|
||||
filter_has_any_correspondents: [],
|
||||
filter_has_not_correspondents: [],
|
||||
filter_has_any_document_types: [],
|
||||
filter_has_not_document_types: [],
|
||||
filter_has_any_storage_paths: [],
|
||||
filter_has_not_storage_paths: [],
|
||||
filter_has_correspondent: null,
|
||||
filter_has_document_type: null,
|
||||
@@ -720,14 +699,11 @@ describe('WorkflowEditDialogComponent', () => {
|
||||
trigger.filter_has_tags = [1]
|
||||
trigger.filter_has_all_tags = [2, 3]
|
||||
trigger.filter_has_not_tags = [4]
|
||||
trigger.filter_has_any_correspondents = [10] as any
|
||||
trigger.filter_has_correspondent = 5 as any
|
||||
trigger.filter_has_not_correspondents = [6] as any
|
||||
trigger.filter_has_document_type = 7 as any
|
||||
trigger.filter_has_any_document_types = [11] as any
|
||||
trigger.filter_has_not_document_types = [8] as any
|
||||
trigger.filter_has_storage_path = 9 as any
|
||||
trigger.filter_has_any_storage_paths = [12] as any
|
||||
trigger.filter_has_not_storage_paths = [10] as any
|
||||
trigger.filter_custom_field_query = JSON.stringify([
|
||||
'AND',
|
||||
@@ -738,8 +714,8 @@ describe('WorkflowEditDialogComponent', () => {
|
||||
component.ngOnInit()
|
||||
const triggerGroup = component.triggerFields.at(0) as FormGroup
|
||||
const filters = component.getFiltersFormArray(triggerGroup)
|
||||
expect(filters.length).toBe(13)
|
||||
const customFieldFilter = filters.at(12) as FormGroup
|
||||
expect(filters.length).toBe(10)
|
||||
const customFieldFilter = filters.at(9) as FormGroup
|
||||
expect(customFieldFilter.get('type').value).toBe(
|
||||
TriggerFilterType.CustomFieldQuery
|
||||
)
|
||||
@@ -748,27 +724,12 @@ describe('WorkflowEditDialogComponent', () => {
|
||||
})
|
||||
|
||||
it('should expose select metadata helpers', () => {
|
||||
expect(component.isSelectMultiple(TriggerFilterType.CorrespondentAny)).toBe(
|
||||
true
|
||||
)
|
||||
expect(component.isSelectMultiple(TriggerFilterType.CorrespondentNot)).toBe(
|
||||
true
|
||||
)
|
||||
expect(component.isSelectMultiple(TriggerFilterType.CorrespondentIs)).toBe(
|
||||
false
|
||||
)
|
||||
expect(component.isSelectMultiple(TriggerFilterType.DocumentTypeAny)).toBe(
|
||||
true
|
||||
)
|
||||
expect(component.isSelectMultiple(TriggerFilterType.DocumentTypeIs)).toBe(
|
||||
false
|
||||
)
|
||||
expect(component.isSelectMultiple(TriggerFilterType.StoragePathAny)).toBe(
|
||||
true
|
||||
)
|
||||
expect(component.isSelectMultiple(TriggerFilterType.StoragePathIs)).toBe(
|
||||
false
|
||||
)
|
||||
|
||||
component.correspondents = [{ id: 1, name: 'C1' } as any]
|
||||
component.documentTypes = [{ id: 2, name: 'DT' } as any]
|
||||
@@ -780,15 +741,9 @@ describe('WorkflowEditDialogComponent', () => {
|
||||
expect(
|
||||
component.getFilterSelectItems(TriggerFilterType.DocumentTypeIs)
|
||||
).toEqual(component.documentTypes)
|
||||
expect(
|
||||
component.getFilterSelectItems(TriggerFilterType.DocumentTypeAny)
|
||||
).toEqual(component.documentTypes)
|
||||
expect(
|
||||
component.getFilterSelectItems(TriggerFilterType.StoragePathIs)
|
||||
).toEqual(component.storagePaths)
|
||||
expect(
|
||||
component.getFilterSelectItems(TriggerFilterType.StoragePathAny)
|
||||
).toEqual(component.storagePaths)
|
||||
expect(component.getFilterSelectItems(TriggerFilterType.TagsAll)).toEqual(
|
||||
[]
|
||||
)
|
||||
|
||||
@@ -145,13 +145,10 @@ export enum TriggerFilterType {
|
||||
TagsAny = 'tags_any',
|
||||
TagsAll = 'tags_all',
|
||||
TagsNone = 'tags_none',
|
||||
CorrespondentAny = 'correspondent_any',
|
||||
CorrespondentIs = 'correspondent_is',
|
||||
CorrespondentNot = 'correspondent_not',
|
||||
DocumentTypeAny = 'document_type_any',
|
||||
DocumentTypeIs = 'document_type_is',
|
||||
DocumentTypeNot = 'document_type_not',
|
||||
StoragePathAny = 'storage_path_any',
|
||||
StoragePathIs = 'storage_path_is',
|
||||
StoragePathNot = 'storage_path_not',
|
||||
CustomFieldQuery = 'custom_field_query',
|
||||
@@ -175,11 +172,8 @@ type TriggerFilterAggregate = {
|
||||
filter_has_tags: number[]
|
||||
filter_has_all_tags: number[]
|
||||
filter_has_not_tags: number[]
|
||||
filter_has_any_correspondents: number[]
|
||||
filter_has_not_correspondents: number[]
|
||||
filter_has_any_document_types: number[]
|
||||
filter_has_not_document_types: number[]
|
||||
filter_has_any_storage_paths: number[]
|
||||
filter_has_not_storage_paths: number[]
|
||||
filter_has_correspondent: number | null
|
||||
filter_has_document_type: number | null
|
||||
@@ -225,14 +219,6 @@ const TRIGGER_FILTER_DEFINITIONS: TriggerFilterDefinition[] = [
|
||||
allowMultipleEntries: false,
|
||||
allowMultipleValues: true,
|
||||
},
|
||||
{
|
||||
id: TriggerFilterType.CorrespondentAny,
|
||||
name: $localize`Has any of these correspondents`,
|
||||
inputType: 'select',
|
||||
allowMultipleEntries: false,
|
||||
allowMultipleValues: true,
|
||||
selectItems: 'correspondents',
|
||||
},
|
||||
{
|
||||
id: TriggerFilterType.CorrespondentIs,
|
||||
name: $localize`Has correspondent`,
|
||||
@@ -257,14 +243,6 @@ const TRIGGER_FILTER_DEFINITIONS: TriggerFilterDefinition[] = [
|
||||
allowMultipleValues: false,
|
||||
selectItems: 'documentTypes',
|
||||
},
|
||||
{
|
||||
id: TriggerFilterType.DocumentTypeAny,
|
||||
name: $localize`Has any of these document types`,
|
||||
inputType: 'select',
|
||||
allowMultipleEntries: false,
|
||||
allowMultipleValues: true,
|
||||
selectItems: 'documentTypes',
|
||||
},
|
||||
{
|
||||
id: TriggerFilterType.DocumentTypeNot,
|
||||
name: $localize`Does not have document types`,
|
||||
@@ -281,14 +259,6 @@ const TRIGGER_FILTER_DEFINITIONS: TriggerFilterDefinition[] = [
|
||||
allowMultipleValues: false,
|
||||
selectItems: 'storagePaths',
|
||||
},
|
||||
{
|
||||
id: TriggerFilterType.StoragePathAny,
|
||||
name: $localize`Has any of these storage paths`,
|
||||
inputType: 'select',
|
||||
allowMultipleEntries: false,
|
||||
allowMultipleValues: true,
|
||||
selectItems: 'storagePaths',
|
||||
},
|
||||
{
|
||||
id: TriggerFilterType.StoragePathNot,
|
||||
name: $localize`Does not have storage paths`,
|
||||
@@ -336,15 +306,6 @@ const FILTER_HANDLERS: Record<TriggerFilterType, FilterHandler> = {
|
||||
extract: (trigger) => trigger.filter_has_not_tags,
|
||||
hasValue: (value) => Array.isArray(value) && value.length > 0,
|
||||
},
|
||||
[TriggerFilterType.CorrespondentAny]: {
|
||||
apply: (aggregate, values) => {
|
||||
aggregate.filter_has_any_correspondents = Array.isArray(values)
|
||||
? [...values]
|
||||
: [values]
|
||||
},
|
||||
extract: (trigger) => trigger.filter_has_any_correspondents,
|
||||
hasValue: (value) => Array.isArray(value) && value.length > 0,
|
||||
},
|
||||
[TriggerFilterType.CorrespondentIs]: {
|
||||
apply: (aggregate, values) => {
|
||||
aggregate.filter_has_correspondent = Array.isArray(values)
|
||||
@@ -372,15 +333,6 @@ const FILTER_HANDLERS: Record<TriggerFilterType, FilterHandler> = {
|
||||
extract: (trigger) => trigger.filter_has_document_type,
|
||||
hasValue: (value) => value !== null && value !== undefined,
|
||||
},
|
||||
[TriggerFilterType.DocumentTypeAny]: {
|
||||
apply: (aggregate, values) => {
|
||||
aggregate.filter_has_any_document_types = Array.isArray(values)
|
||||
? [...values]
|
||||
: [values]
|
||||
},
|
||||
extract: (trigger) => trigger.filter_has_any_document_types,
|
||||
hasValue: (value) => Array.isArray(value) && value.length > 0,
|
||||
},
|
||||
[TriggerFilterType.DocumentTypeNot]: {
|
||||
apply: (aggregate, values) => {
|
||||
aggregate.filter_has_not_document_types = Array.isArray(values)
|
||||
@@ -399,15 +351,6 @@ const FILTER_HANDLERS: Record<TriggerFilterType, FilterHandler> = {
|
||||
extract: (trigger) => trigger.filter_has_storage_path,
|
||||
hasValue: (value) => value !== null && value !== undefined,
|
||||
},
|
||||
[TriggerFilterType.StoragePathAny]: {
|
||||
apply: (aggregate, values) => {
|
||||
aggregate.filter_has_any_storage_paths = Array.isArray(values)
|
||||
? [...values]
|
||||
: [values]
|
||||
},
|
||||
extract: (trigger) => trigger.filter_has_any_storage_paths,
|
||||
hasValue: (value) => Array.isArray(value) && value.length > 0,
|
||||
},
|
||||
[TriggerFilterType.StoragePathNot]: {
|
||||
apply: (aggregate, values) => {
|
||||
aggregate.filter_has_not_storage_paths = Array.isArray(values)
|
||||
@@ -699,11 +642,8 @@ export class WorkflowEditDialogComponent
|
||||
filter_has_tags: [],
|
||||
filter_has_all_tags: [],
|
||||
filter_has_not_tags: [],
|
||||
filter_has_any_correspondents: [],
|
||||
filter_has_not_correspondents: [],
|
||||
filter_has_any_document_types: [],
|
||||
filter_has_not_document_types: [],
|
||||
filter_has_any_storage_paths: [],
|
||||
filter_has_not_storage_paths: [],
|
||||
filter_has_correspondent: null,
|
||||
filter_has_document_type: null,
|
||||
@@ -730,16 +670,10 @@ export class WorkflowEditDialogComponent
|
||||
trigger.filter_has_tags = aggregate.filter_has_tags
|
||||
trigger.filter_has_all_tags = aggregate.filter_has_all_tags
|
||||
trigger.filter_has_not_tags = aggregate.filter_has_not_tags
|
||||
trigger.filter_has_any_correspondents =
|
||||
aggregate.filter_has_any_correspondents
|
||||
trigger.filter_has_not_correspondents =
|
||||
aggregate.filter_has_not_correspondents
|
||||
trigger.filter_has_any_document_types =
|
||||
aggregate.filter_has_any_document_types
|
||||
trigger.filter_has_not_document_types =
|
||||
aggregate.filter_has_not_document_types
|
||||
trigger.filter_has_any_storage_paths =
|
||||
aggregate.filter_has_any_storage_paths
|
||||
trigger.filter_has_not_storage_paths =
|
||||
aggregate.filter_has_not_storage_paths
|
||||
trigger.filter_has_correspondent =
|
||||
@@ -922,11 +856,8 @@ export class WorkflowEditDialogComponent
|
||||
case TriggerFilterType.TagsAny:
|
||||
case TriggerFilterType.TagsAll:
|
||||
case TriggerFilterType.TagsNone:
|
||||
case TriggerFilterType.CorrespondentAny:
|
||||
case TriggerFilterType.CorrespondentNot:
|
||||
case TriggerFilterType.DocumentTypeAny:
|
||||
case TriggerFilterType.DocumentTypeNot:
|
||||
case TriggerFilterType.StoragePathAny:
|
||||
case TriggerFilterType.StoragePathNot:
|
||||
return true
|
||||
default:
|
||||
@@ -1248,11 +1179,8 @@ export class WorkflowEditDialogComponent
|
||||
filter_has_tags: [],
|
||||
filter_has_all_tags: [],
|
||||
filter_has_not_tags: [],
|
||||
filter_has_any_correspondents: [],
|
||||
filter_has_not_correspondents: [],
|
||||
filter_has_any_document_types: [],
|
||||
filter_has_not_document_types: [],
|
||||
filter_has_any_storage_paths: [],
|
||||
filter_has_not_storage_paths: [],
|
||||
filter_custom_field_query: null,
|
||||
filter_has_correspondent: null,
|
||||
|
||||
@@ -28,7 +28,7 @@
|
||||
</button>
|
||||
</ng-template>
|
||||
<ng-template ng-option-tmp let-item="item" let-index="index" let-search="searchTerm">
|
||||
<div class="tag-option-row d-flex align-items-center" [class.w-auto]="!getTag(item.id)?.parent">
|
||||
<div class="tag-option-row d-flex align-items-center">
|
||||
@if (item.id && tags) {
|
||||
@if (getTag(item.id)?.parent) {
|
||||
<i-bs name="list-nested" class="me-1"></i-bs>
|
||||
|
||||
@@ -22,8 +22,8 @@
|
||||
}
|
||||
|
||||
// Dropdown hierarchy reveal for ng-select options
|
||||
:host ::ng-deep .ng-dropdown-panel .ng-option {
|
||||
overflow-x: auto !important;
|
||||
::ng-deep .ng-dropdown-panel .ng-option {
|
||||
overflow-x: scroll;
|
||||
|
||||
.tag-option-row {
|
||||
font-size: 1rem;
|
||||
@@ -41,12 +41,12 @@
|
||||
}
|
||||
}
|
||||
|
||||
:host ::ng-deep .ng-dropdown-panel .ng-option:hover .hierarchy-reveal,
|
||||
:host ::ng-deep .ng-dropdown-panel .ng-option.ng-option-marked .hierarchy-reveal {
|
||||
::ng-deep .ng-dropdown-panel .ng-option:hover .hierarchy-reveal,
|
||||
::ng-deep .ng-dropdown-panel .ng-option.ng-option-marked .hierarchy-reveal {
|
||||
max-width: 1000px;
|
||||
}
|
||||
|
||||
::ng-deep .ng-dropdown-panel .ng-option:hover .hierarchy-indicator,
|
||||
:host ::ng-deep .ng-dropdown-panel .ng-option.ng-option-marked .hierarchy-indicator {
|
||||
::ng-deep .ng-dropdown-panel .ng-option.ng-option-marked .hierarchy-indicator {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
@@ -1,18 +1,9 @@
|
||||
<div class="row pt-3 pb-3 pb-md-2 align-items-center">
|
||||
<div class="col-md text-truncate">
|
||||
<h3 class="d-flex align-items-center mb-1" style="line-height: 1.4">
|
||||
<span class="text-truncate">{{title}}</span>
|
||||
@if (id) {
|
||||
<span class="badge bg-primary text-primary-text-contrast ms-3 small fs-normal cursor-pointer" (click)="copyID()">
|
||||
@if (copied) {
|
||||
<i-bs width="1em" height="1em" name="clipboard-check"></i-bs> <ng-container i18n>Copied!</ng-container>
|
||||
} @else {
|
||||
ID: {{id}}
|
||||
}
|
||||
</span>
|
||||
}
|
||||
<h3 class="text-truncate" style="line-height: 1.4">
|
||||
{{title}}
|
||||
@if (subTitle) {
|
||||
<span class="h6 mb-0 mt-1 d-block d-md-inline fw-normal ms-md-3 text-truncate" style="line-height: 1.4">{{subTitle}}</span>
|
||||
<span class="h6 mb-0 d-block d-md-inline fw-normal ms-md-3 text-truncate" style="line-height: 1.4">{{subTitle}}</span>
|
||||
}
|
||||
@if (info) {
|
||||
<button class="btn btn-sm btn-link text-muted me-auto p-0 p-md-2" title="What's this?" i18n-title type="button" [ngbPopover]="infoPopover" [autoClose]="true">
|
||||
|
||||
@@ -1,10 +1,5 @@
|
||||
h3 {
|
||||
min-height: calc(1.325rem + 0.9vw);
|
||||
|
||||
.badge {
|
||||
font-size: 0.65rem;
|
||||
line-height: 1;
|
||||
}
|
||||
}
|
||||
|
||||
@media (min-width: 1200px) {
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { Clipboard } from '@angular/cdk/clipboard'
|
||||
import { ComponentFixture, TestBed } from '@angular/core/testing'
|
||||
import { Title } from '@angular/platform-browser'
|
||||
import { environment } from 'src/environments/environment'
|
||||
@@ -8,7 +7,6 @@ describe('PageHeaderComponent', () => {
|
||||
let component: PageHeaderComponent
|
||||
let fixture: ComponentFixture<PageHeaderComponent>
|
||||
let titleService: Title
|
||||
let clipboard: Clipboard
|
||||
|
||||
beforeEach(async () => {
|
||||
TestBed.configureTestingModule({
|
||||
@@ -17,7 +15,6 @@ describe('PageHeaderComponent', () => {
|
||||
}).compileComponents()
|
||||
|
||||
titleService = TestBed.inject(Title)
|
||||
clipboard = TestBed.inject(Clipboard)
|
||||
fixture = TestBed.createComponent(PageHeaderComponent)
|
||||
component = fixture.componentInstance
|
||||
fixture.detectChanges()
|
||||
@@ -27,8 +24,7 @@ describe('PageHeaderComponent', () => {
|
||||
component.title = 'Foo'
|
||||
component.subTitle = 'Bar'
|
||||
fixture.detectChanges()
|
||||
expect(fixture.nativeElement.textContent).toContain('Foo')
|
||||
expect(fixture.nativeElement.textContent).toContain('Bar')
|
||||
expect(fixture.nativeElement.textContent).toContain('Foo Bar')
|
||||
})
|
||||
|
||||
it('should set html title', () => {
|
||||
@@ -36,16 +32,4 @@ describe('PageHeaderComponent', () => {
|
||||
component.title = 'Foo Bar'
|
||||
expect(titleSpy).toHaveBeenCalledWith(`Foo Bar - ${environment.appTitle}`)
|
||||
})
|
||||
|
||||
it('should copy id to clipboard, reset after 3 seconds', () => {
|
||||
jest.useFakeTimers()
|
||||
component.id = 42 as any
|
||||
jest.spyOn(clipboard, 'copy').mockReturnValue(true)
|
||||
component.copyID()
|
||||
expect(clipboard.copy).toHaveBeenCalledWith('42')
|
||||
expect(component.copied).toBe(true)
|
||||
|
||||
jest.advanceTimersByTime(3000)
|
||||
expect(component.copied).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { Clipboard } from '@angular/cdk/clipboard'
|
||||
import { Component, Input, inject } from '@angular/core'
|
||||
import { Title } from '@angular/platform-browser'
|
||||
import { NgbPopoverModule } from '@ng-bootstrap/ng-bootstrap'
|
||||
@@ -14,11 +13,8 @@ import { environment } from 'src/environments/environment'
|
||||
})
|
||||
export class PageHeaderComponent {
|
||||
private titleService = inject(Title)
|
||||
private clipboard = inject(Clipboard)
|
||||
|
||||
private _title = ''
|
||||
public copied: boolean = false
|
||||
private copyTimeout: any
|
||||
_title = ''
|
||||
|
||||
@Input()
|
||||
set title(title: string) {
|
||||
@@ -30,9 +26,6 @@ export class PageHeaderComponent {
|
||||
return this._title
|
||||
}
|
||||
|
||||
@Input()
|
||||
id: number
|
||||
|
||||
@Input()
|
||||
subTitle: string = ''
|
||||
|
||||
@@ -41,12 +34,4 @@ export class PageHeaderComponent {
|
||||
|
||||
@Input()
|
||||
infoLink: string
|
||||
|
||||
public copyID() {
|
||||
this.copied = this.clipboard.copy(this.id.toString())
|
||||
clearTimeout(this.copyTimeout)
|
||||
this.copyTimeout = setTimeout(() => {
|
||||
this.copied = false
|
||||
}, 3000)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
<pngx-page-header [(title)]="title" [id]="documentId">
|
||||
<pngx-page-header [(title)]="title">
|
||||
@if (archiveContentRenderType === ContentRenderType.PDF && !useNativePdfViewer) {
|
||||
@if (previewNumPages) {
|
||||
<div class="input-group input-group-sm d-none d-md-flex">
|
||||
|
||||
@@ -285,10 +285,10 @@ export class DocumentDetailComponent
|
||||
if (
|
||||
element &&
|
||||
element.nativeElement.offsetParent !== null &&
|
||||
this.nav?.activeId == DocumentDetailNavIDs.Preview
|
||||
this.nav?.activeId == 4
|
||||
) {
|
||||
// its visible
|
||||
setTimeout(() => this.nav?.select(DocumentDetailNavIDs.Details))
|
||||
setTimeout(() => this.nav?.select(1))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -14,7 +14,6 @@ import { SortableDirective } from 'src/app/directives/sortable.directive'
|
||||
import { CustomDatePipe } from 'src/app/pipes/custom-date.pipe'
|
||||
import { PermissionType } from 'src/app/services/permissions.service'
|
||||
import { CorrespondentService } from 'src/app/services/rest/correspondent.service'
|
||||
import { ClearableBadgeComponent } from '../../common/clearable-badge/clearable-badge.component'
|
||||
import { CorrespondentEditDialogComponent } from '../../common/edit-dialog/correspondent-edit-dialog/correspondent-edit-dialog.component'
|
||||
import { PageHeaderComponent } from '../../common/page-header/page-header.component'
|
||||
import { ManagementListComponent } from '../management-list/management-list.component'
|
||||
@@ -37,7 +36,6 @@ import { ManagementListComponent } from '../management-list/management-list.comp
|
||||
NgbDropdownModule,
|
||||
NgbPaginationModule,
|
||||
NgxBootstrapIconsModule,
|
||||
ClearableBadgeComponent,
|
||||
],
|
||||
})
|
||||
export class CorrespondentListComponent extends ManagementListComponent<Correspondent> {
|
||||
|
||||
@@ -13,7 +13,6 @@ import { IfPermissionsDirective } from 'src/app/directives/if-permissions.direct
|
||||
import { SortableDirective } from 'src/app/directives/sortable.directive'
|
||||
import { PermissionType } from 'src/app/services/permissions.service'
|
||||
import { DocumentTypeService } from 'src/app/services/rest/document-type.service'
|
||||
import { ClearableBadgeComponent } from '../../common/clearable-badge/clearable-badge.component'
|
||||
import { DocumentTypeEditDialogComponent } from '../../common/edit-dialog/document-type-edit-dialog/document-type-edit-dialog.component'
|
||||
import { PageHeaderComponent } from '../../common/page-header/page-header.component'
|
||||
import { ManagementListComponent } from '../management-list/management-list.component'
|
||||
@@ -35,7 +34,6 @@ import { ManagementListComponent } from '../management-list/management-list.comp
|
||||
NgbDropdownModule,
|
||||
NgbPaginationModule,
|
||||
NgxBootstrapIconsModule,
|
||||
ClearableBadgeComponent,
|
||||
],
|
||||
})
|
||||
export class DocumentTypeListComponent extends ManagementListComponent<DocumentType> {
|
||||
|
||||
@@ -1,48 +1,17 @@
|
||||
<pngx-page-header title="{{ typeNamePlural | titlecase }}" info="View, add, edit and delete {{ typeNamePlural }}." infoLink="usage/#terms-and-definitions">
|
||||
|
||||
<div ngbDropdown class="btn-group flex-fill d-sm-none">
|
||||
<button class="btn btn-sm btn-outline-primary" id="dropdownSelectMobile" ngbDropdownToggle>
|
||||
<i-bs name="text-indent-left"></i-bs>
|
||||
<div class="d-none d-sm-inline"> <ng-container i18n>Select</ng-container></div>
|
||||
@if (selectedObjects.size > 0) {
|
||||
<pngx-clearable-badge [selected]="selectedObjects.size > 0" [number]="selectedObjects.size" (cleared)="selectNone()"></pngx-clearable-badge><span class="visually-hidden">selected</span>
|
||||
}
|
||||
<button class="btn btn-sm btn-outline-secondary" (click)="clearSelection()" [hidden]="selectedObjects.size === 0">
|
||||
<i-bs name="x"></i-bs> <ng-container i18n>Clear selection</ng-container>
|
||||
</button>
|
||||
<button type="button" class="btn btn-sm btn-outline-primary" (click)="setPermissions()" [disabled]="!userCanBulkEdit(PermissionAction.Change) || selectedObjects.size === 0">
|
||||
<i-bs name="person-fill-lock"></i-bs> <ng-container i18n>Permissions</ng-container>
|
||||
</button>
|
||||
<button type="button" class="btn btn-sm btn-outline-danger" (click)="delete()" [disabled]="!userCanBulkEdit(PermissionAction.Delete) || selectedObjects.size === 0">
|
||||
<i-bs name="trash"></i-bs> <ng-container i18n>Delete</ng-container>
|
||||
</button>
|
||||
<button type="button" class="btn btn-sm btn-outline-primary ms-md-5" (click)="openCreateDialog()" *pngxIfPermissions="{ action: PermissionAction.Add, type: permissionType }">
|
||||
<i-bs name="plus-circle"></i-bs> <ng-container i18n>Create</ng-container>
|
||||
</button>
|
||||
<div ngbDropdownMenu aria-labelledby="dropdownSelectMobile" class="shadow">
|
||||
<button ngbDropdownItem (click)="selectNone()" i18n>Select none</button>
|
||||
<button ngbDropdownItem (click)="selectPage(true)" i18n>Select page</button>
|
||||
<button ngbDropdownItem (click)="selectAll()" i18n>Select all</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="d-none d-sm-flex flex-fill me-3">
|
||||
<div class="input-group input-group-sm">
|
||||
<span class="input-group-text border-0" i18n>Select:</span>
|
||||
</div>
|
||||
<div class="btn-group btn-group-sm flex-nowrap">
|
||||
@if (selectedObjects.size > 0) {
|
||||
<button class="btn btn-sm btn-outline-secondary" (click)="selectNone()">
|
||||
<i-bs name="slash-circle"></i-bs> <ng-container i18n>None</ng-container>
|
||||
</button>
|
||||
}
|
||||
<button class="btn btn-sm btn-outline-primary" (click)="selectPage(true)">
|
||||
<i-bs name="file-earmark-check"></i-bs> <ng-container i18n>Page</ng-container>
|
||||
</button>
|
||||
<button class="btn btn-sm btn-outline-primary" (click)="selectAll()">
|
||||
<i-bs name="check-all"></i-bs> <ng-container i18n>All</ng-container>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<button type="button" class="btn btn-sm btn-outline-primary" (click)="setPermissions()" [disabled]="!userCanBulkEdit(PermissionAction.Change) || selectedObjects.size === 0">
|
||||
<i-bs name="person-fill-lock"></i-bs> <ng-container i18n>Permissions</ng-container>
|
||||
</button>
|
||||
<button type="button" class="btn btn-sm btn-outline-danger" (click)="delete()" [disabled]="!userCanBulkEdit(PermissionAction.Delete) || selectedObjects.size === 0">
|
||||
<i-bs name="trash"></i-bs> <ng-container i18n>Delete</ng-container>
|
||||
</button>
|
||||
<button type="button" class="btn btn-sm btn-outline-primary ms-md-5" (click)="openCreateDialog()" *pngxIfPermissions="{ action: PermissionAction.Add, type: permissionType }">
|
||||
<i-bs name="plus-circle"></i-bs> <ng-container i18n>Create</ng-container>
|
||||
</button>
|
||||
</pngx-page-header>
|
||||
|
||||
<div class="row mb-3">
|
||||
@@ -62,7 +31,7 @@
|
||||
<tr>
|
||||
<th scope="col">
|
||||
<div class="form-check m-0 ms-2 me-n2">
|
||||
<input type="checkbox" class="form-check-input" id="all-objects" [(ngModel)]="togggleAll" [disabled]="data.length === 0" (change)="selectPage($event.target.checked); $event.stopPropagation();">
|
||||
<input type="checkbox" class="form-check-input" id="all-objects" [(ngModel)]="togggleAll" [disabled]="data.length === 0" (click)="toggleAll($event); $event.stopPropagation();">
|
||||
<label class="form-check-label" for="all-objects"></label>
|
||||
</div>
|
||||
</th>
|
||||
|
||||
@@ -163,7 +163,8 @@ describe('ManagementListComponent', () => {
|
||||
const toastInfoSpy = jest.spyOn(toastService, 'showInfo')
|
||||
const reloadSpy = jest.spyOn(component, 'reloadData')
|
||||
|
||||
component.openCreateDialog()
|
||||
const createButton = fixture.debugElement.queryAll(By.css('button'))[4]
|
||||
createButton.triggerEventHandler('click')
|
||||
|
||||
expect(modal).not.toBeUndefined()
|
||||
const editDialog = modal.componentInstance as EditDialogComponent<Tag>
|
||||
@@ -186,7 +187,8 @@ describe('ManagementListComponent', () => {
|
||||
const toastInfoSpy = jest.spyOn(toastService, 'showInfo')
|
||||
const reloadSpy = jest.spyOn(component, 'reloadData')
|
||||
|
||||
component.openEditDialog(tags[0])
|
||||
const editButton = fixture.debugElement.queryAll(By.css('button'))[7]
|
||||
editButton.triggerEventHandler('click')
|
||||
|
||||
expect(modal).not.toBeUndefined()
|
||||
const editDialog = modal.componentInstance as EditDialogComponent<Tag>
|
||||
@@ -210,7 +212,8 @@ describe('ManagementListComponent', () => {
|
||||
const deleteSpy = jest.spyOn(tagService, 'delete')
|
||||
const reloadSpy = jest.spyOn(component, 'reloadData')
|
||||
|
||||
component.openDeleteDialog(tags[0])
|
||||
const deleteButton = fixture.debugElement.queryAll(By.css('button'))[8]
|
||||
deleteButton.triggerEventHandler('click')
|
||||
|
||||
expect(modal).not.toBeUndefined()
|
||||
const editDialog = modal.componentInstance as ConfirmDialogComponent
|
||||
@@ -227,21 +230,6 @@ describe('ManagementListComponent', () => {
|
||||
expect(reloadSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should use the all list length for collection size when provided', fakeAsync(() => {
|
||||
jest.spyOn(tagService, 'listFiltered').mockReturnValueOnce(
|
||||
of({
|
||||
count: 1,
|
||||
all: [1, 2, 3],
|
||||
results: tags.slice(0, 1),
|
||||
})
|
||||
)
|
||||
|
||||
component.reloadData()
|
||||
tick(100)
|
||||
|
||||
expect(component.collectionSize).toBe(3)
|
||||
}))
|
||||
|
||||
it('should support quick filter for objects', () => {
|
||||
const expectedUrl = documentListViewService.getQuickFilterUrl([
|
||||
{ rule_type: FILTER_HAS_TAGS_ALL, value: tags[0].id.toString() },
|
||||
@@ -276,84 +264,19 @@ describe('ManagementListComponent', () => {
|
||||
expect(component.page).toEqual(1)
|
||||
})
|
||||
|
||||
it('should support toggle select page in vew', () => {
|
||||
it('should support toggle all items in view', () => {
|
||||
expect(component.selectedObjects.size).toEqual(0)
|
||||
const selectPageSpy = jest.spyOn(component, 'selectPage')
|
||||
const toggleAllSpy = jest.spyOn(component, 'toggleAll')
|
||||
const checkButton = fixture.debugElement.queryAll(
|
||||
By.css('input.form-check-input')
|
||||
)[0]
|
||||
checkButton.nativeElement.dispatchEvent(new Event('change'))
|
||||
checkButton.nativeElement.dispatchEvent(new Event('click'))
|
||||
checkButton.nativeElement.checked = true
|
||||
checkButton.nativeElement.dispatchEvent(new Event('change'))
|
||||
expect(selectPageSpy).toHaveBeenCalled()
|
||||
checkButton.nativeElement.dispatchEvent(new Event('click'))
|
||||
expect(toggleAllSpy).toHaveBeenCalled()
|
||||
expect(component.selectedObjects.size).toEqual(tags.length)
|
||||
})
|
||||
|
||||
it('selectNone should clear selection and reset toggle flag', () => {
|
||||
component.selectedObjects = new Set([tags[0].id, tags[1].id])
|
||||
component.togggleAll = true
|
||||
|
||||
component.selectNone()
|
||||
|
||||
expect(component.selectedObjects.size).toBe(0)
|
||||
expect(component.togggleAll).toBe(false)
|
||||
})
|
||||
|
||||
it('selectPage should select current page items or clear selection', () => {
|
||||
component.selectPage(true)
|
||||
expect(component.selectedObjects).toEqual(new Set(tags.map((t) => t.id)))
|
||||
expect(component.togggleAll).toBe(true)
|
||||
|
||||
component.togggleAll = true
|
||||
component.selectPage(false)
|
||||
expect(component.selectedObjects.size).toBe(0)
|
||||
expect(component.togggleAll).toBe(false)
|
||||
})
|
||||
|
||||
it('selectAll should use all IDs when collection size exists', () => {
|
||||
;(component as any).allIDs = [1, 2, 3, 4]
|
||||
component.collectionSize = 4
|
||||
|
||||
component.selectAll()
|
||||
|
||||
expect(component.selectedObjects).toEqual(new Set([1, 2, 3, 4]))
|
||||
expect(component.togggleAll).toBe(true)
|
||||
})
|
||||
|
||||
it('selectAll should clear selection when collection size is zero', () => {
|
||||
component.selectedObjects = new Set([1])
|
||||
component.collectionSize = 0
|
||||
component.togggleAll = true
|
||||
|
||||
component.selectAll()
|
||||
|
||||
expect(component.selectedObjects.size).toBe(0)
|
||||
expect(component.togggleAll).toBe(false)
|
||||
})
|
||||
|
||||
it('toggleSelected should toggle object selection and update toggle state', () => {
|
||||
component.toggleSelected(tags[0])
|
||||
expect(component.selectedObjects.has(tags[0].id)).toBe(true)
|
||||
expect(component.togggleAll).toBe(false)
|
||||
|
||||
component.toggleSelected(tags[1])
|
||||
component.toggleSelected(tags[2])
|
||||
expect(component.togggleAll).toBe(true)
|
||||
|
||||
component.toggleSelected(tags[1])
|
||||
expect(component.selectedObjects.has(tags[1].id)).toBe(false)
|
||||
expect(component.togggleAll).toBe(false)
|
||||
})
|
||||
|
||||
it('areAllPageItemsSelected should return false when page has no selectable items', () => {
|
||||
component.data = []
|
||||
component.selectedObjects.clear()
|
||||
|
||||
expect((component as any).areAllPageItemsSelected()).toBe(false)
|
||||
|
||||
component.data = tags
|
||||
})
|
||||
|
||||
it('should support bulk edit permissions', () => {
|
||||
const bulkEditPermsSpy = jest.spyOn(tagService, 'bulk_edit_objects')
|
||||
component.toggleSelected(tags[0])
|
||||
|
||||
@@ -84,7 +84,6 @@ export abstract class ManagementListComponent<T extends MatchingModel>
|
||||
|
||||
public data: T[] = []
|
||||
private unfilteredData: T[] = []
|
||||
private allIDs: number[] = []
|
||||
|
||||
public page = 1
|
||||
|
||||
@@ -172,8 +171,7 @@ export abstract class ManagementListComponent<T extends MatchingModel>
|
||||
tap((c) => {
|
||||
this.unfilteredData = c.results
|
||||
this.data = this.filterData(c.results)
|
||||
this.collectionSize = c.all?.length ?? c.count
|
||||
this.allIDs = c.all
|
||||
this.collectionSize = c.count
|
||||
}),
|
||||
delay(100)
|
||||
)
|
||||
@@ -302,6 +300,16 @@ export abstract class ManagementListComponent<T extends MatchingModel>
|
||||
return ownsAll
|
||||
}
|
||||
|
||||
toggleAll(event: PointerEvent) {
|
||||
const checked = (event.target as HTMLInputElement).checked
|
||||
this.togggleAll = checked
|
||||
if (checked) {
|
||||
this.selectedObjects = new Set(this.getSelectableIDs(this.data))
|
||||
} else {
|
||||
this.clearSelection()
|
||||
}
|
||||
}
|
||||
|
||||
protected getSelectableIDs(objects: T[]): number[] {
|
||||
return objects.map((o) => o.id)
|
||||
}
|
||||
@@ -311,38 +319,10 @@ export abstract class ManagementListComponent<T extends MatchingModel>
|
||||
this.selectedObjects.clear()
|
||||
}
|
||||
|
||||
selectNone() {
|
||||
this.clearSelection()
|
||||
}
|
||||
|
||||
selectPage(select: boolean) {
|
||||
if (select) {
|
||||
this.selectedObjects = new Set(this.getSelectableIDs(this.data))
|
||||
this.togggleAll = this.areAllPageItemsSelected()
|
||||
} else {
|
||||
this.clearSelection()
|
||||
}
|
||||
}
|
||||
|
||||
selectAll() {
|
||||
if (!this.collectionSize) {
|
||||
this.clearSelection()
|
||||
return
|
||||
}
|
||||
this.selectedObjects = new Set(this.allIDs)
|
||||
this.togggleAll = this.areAllPageItemsSelected()
|
||||
}
|
||||
|
||||
toggleSelected(object) {
|
||||
this.selectedObjects.has(object.id)
|
||||
? this.selectedObjects.delete(object.id)
|
||||
: this.selectedObjects.add(object.id)
|
||||
this.togggleAll = this.areAllPageItemsSelected()
|
||||
}
|
||||
|
||||
protected areAllPageItemsSelected(): boolean {
|
||||
const ids = this.getSelectableIDs(this.data)
|
||||
return ids.length > 0 && ids.every((id) => this.selectedObjects.has(id))
|
||||
}
|
||||
|
||||
setPermissions() {
|
||||
|
||||
@@ -13,7 +13,6 @@ import { IfPermissionsDirective } from 'src/app/directives/if-permissions.direct
|
||||
import { SortableDirective } from 'src/app/directives/sortable.directive'
|
||||
import { PermissionType } from 'src/app/services/permissions.service'
|
||||
import { StoragePathService } from 'src/app/services/rest/storage-path.service'
|
||||
import { ClearableBadgeComponent } from '../../common/clearable-badge/clearable-badge.component'
|
||||
import { StoragePathEditDialogComponent } from '../../common/edit-dialog/storage-path-edit-dialog/storage-path-edit-dialog.component'
|
||||
import { PageHeaderComponent } from '../../common/page-header/page-header.component'
|
||||
import { ManagementListComponent } from '../management-list/management-list.component'
|
||||
@@ -35,7 +34,6 @@ import { ManagementListComponent } from '../management-list/management-list.comp
|
||||
NgbDropdownModule,
|
||||
NgbPaginationModule,
|
||||
NgxBootstrapIconsModule,
|
||||
ClearableBadgeComponent,
|
||||
],
|
||||
})
|
||||
export class StoragePathListComponent extends ManagementListComponent<StoragePath> {
|
||||
|
||||
@@ -138,12 +138,16 @@ describe('TagListComponent', () => {
|
||||
}
|
||||
|
||||
component.data = [parent as any]
|
||||
component.selectPage(true)
|
||||
const selectEvent = { target: { checked: true } } as unknown as PointerEvent
|
||||
component.toggleAll(selectEvent)
|
||||
|
||||
expect(component.selectedObjects.has(10)).toBe(true)
|
||||
expect(component.selectedObjects.has(11)).toBe(true)
|
||||
|
||||
component.selectPage(false)
|
||||
const deselectEvent = {
|
||||
target: { checked: false },
|
||||
} as unknown as PointerEvent
|
||||
component.toggleAll(deselectEvent)
|
||||
expect(component.selectedObjects.size).toBe(0)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -13,7 +13,6 @@ import { IfPermissionsDirective } from 'src/app/directives/if-permissions.direct
|
||||
import { SortableDirective } from 'src/app/directives/sortable.directive'
|
||||
import { PermissionType } from 'src/app/services/permissions.service'
|
||||
import { TagService } from 'src/app/services/rest/tag.service'
|
||||
import { ClearableBadgeComponent } from '../../common/clearable-badge/clearable-badge.component'
|
||||
import { TagEditDialogComponent } from '../../common/edit-dialog/tag-edit-dialog/tag-edit-dialog.component'
|
||||
import { PageHeaderComponent } from '../../common/page-header/page-header.component'
|
||||
import { ManagementListComponent } from '../management-list/management-list.component'
|
||||
@@ -35,7 +34,6 @@ import { ManagementListComponent } from '../management-list/management-list.comp
|
||||
NgbDropdownModule,
|
||||
NgbPaginationModule,
|
||||
NgxBootstrapIconsModule,
|
||||
ClearableBadgeComponent,
|
||||
],
|
||||
})
|
||||
export class TagListComponent extends ManagementListComponent<Tag> {
|
||||
|
||||
@@ -44,16 +44,10 @@ export interface WorkflowTrigger extends ObjectWithId {
|
||||
|
||||
filter_has_not_tags?: number[] // Tag.id[]
|
||||
|
||||
filter_has_any_correspondents?: number[] // Correspondent.id[]
|
||||
|
||||
filter_has_not_correspondents?: number[] // Correspondent.id[]
|
||||
|
||||
filter_has_any_document_types?: number[] // DocumentType.id[]
|
||||
|
||||
filter_has_not_document_types?: number[] // DocumentType.id[]
|
||||
|
||||
filter_has_any_storage_paths?: number[] // StoragePath.id[]
|
||||
|
||||
filter_has_not_storage_paths?: number[] // StoragePath.id[]
|
||||
|
||||
filter_custom_field_query?: string
|
||||
|
||||
@@ -6,7 +6,7 @@ export const environment = {
|
||||
apiVersion: '9', // match src/paperless/settings.py
|
||||
appTitle: 'Paperless-ngx',
|
||||
tag: 'prod',
|
||||
version: '2.20.5',
|
||||
version: '2.20.4',
|
||||
webSocketHost: window.location.host,
|
||||
webSocketProtocol: window.location.protocol == 'https:' ? 'wss:' : 'ws:',
|
||||
webSocketBaseUrl: base_url.pathname + 'ws/',
|
||||
|
||||
@@ -73,7 +73,6 @@ $form-check-radio-checked-bg-image-dark: url("data:image/svg+xml,<svg xmlns='htt
|
||||
}
|
||||
|
||||
@mixin dark-mode {
|
||||
color-scheme: dark;
|
||||
--pngx-body-color-accent: #{$text-color-dark-bg-accent};
|
||||
--pngx-bg-alt: #242529;
|
||||
--pngx-bg-alt2: #232323;
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
# this is here so that django finds the checks.
|
||||
from documents.checks import changed_password_check
|
||||
from documents.checks import parser_check
|
||||
|
||||
__all__ = ["parser_check"]
|
||||
__all__ = ["changed_password_check", "parser_check"]
|
||||
|
||||
@@ -60,6 +60,7 @@ class DocumentAdmin(GuardedModelAdmin):
|
||||
"added",
|
||||
"modified",
|
||||
"mime_type",
|
||||
"storage_type",
|
||||
"filename",
|
||||
"checksum",
|
||||
"archive_filename",
|
||||
|
||||
@@ -1,12 +1,60 @@
|
||||
import textwrap
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.checks import Error
|
||||
from django.core.checks import Warning
|
||||
from django.core.checks import register
|
||||
from django.core.exceptions import FieldError
|
||||
from django.db.utils import OperationalError
|
||||
from django.db.utils import ProgrammingError
|
||||
|
||||
from documents.signals import document_consumer_declaration
|
||||
from documents.templating.utils import convert_format_str_to_template_format
|
||||
|
||||
|
||||
@register()
|
||||
def changed_password_check(app_configs, **kwargs):
|
||||
from documents.models import Document
|
||||
from paperless.db import GnuPG
|
||||
|
||||
try:
|
||||
encrypted_doc = (
|
||||
Document.objects.filter(
|
||||
storage_type=Document.STORAGE_TYPE_GPG,
|
||||
)
|
||||
.only("pk", "storage_type")
|
||||
.first()
|
||||
)
|
||||
except (OperationalError, ProgrammingError, FieldError):
|
||||
return [] # No documents table yet
|
||||
|
||||
if encrypted_doc:
|
||||
if not settings.PASSPHRASE:
|
||||
return [
|
||||
Error(
|
||||
"The database contains encrypted documents but no password is set.",
|
||||
),
|
||||
]
|
||||
|
||||
if not GnuPG.decrypted(encrypted_doc.source_file):
|
||||
return [
|
||||
Error(
|
||||
textwrap.dedent(
|
||||
"""
|
||||
The current password doesn't match the password of the
|
||||
existing documents.
|
||||
|
||||
If you intend to change your password, you must first export
|
||||
all of the old documents, start fresh with the new password
|
||||
and then re-import them."
|
||||
""",
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
return []
|
||||
|
||||
|
||||
@register()
|
||||
def parser_check(app_configs, **kwargs):
|
||||
parsers = []
|
||||
|
||||
@@ -128,7 +128,7 @@ def thumbnail_last_modified(request, pk: int) -> datetime | None:
|
||||
Cache should be (slightly?) faster than filesystem
|
||||
"""
|
||||
try:
|
||||
doc = Document.objects.only("pk").get(pk=pk)
|
||||
doc = Document.objects.only("storage_type").get(pk=pk)
|
||||
if not doc.thumbnail_path.exists():
|
||||
return None
|
||||
doc_key = get_thumbnail_modified_key(pk)
|
||||
|
||||
@@ -497,6 +497,7 @@ class ConsumerPlugin(
|
||||
create_source_path_directory(document.source_path)
|
||||
|
||||
self._write(
|
||||
document.storage_type,
|
||||
self.unmodified_original
|
||||
if self.unmodified_original is not None
|
||||
else self.working_copy,
|
||||
@@ -504,6 +505,7 @@ class ConsumerPlugin(
|
||||
)
|
||||
|
||||
self._write(
|
||||
document.storage_type,
|
||||
thumbnail,
|
||||
document.thumbnail_path,
|
||||
)
|
||||
@@ -515,6 +517,7 @@ class ConsumerPlugin(
|
||||
)
|
||||
create_source_path_directory(document.archive_path)
|
||||
self._write(
|
||||
document.storage_type,
|
||||
archive_path,
|
||||
document.archive_path,
|
||||
)
|
||||
@@ -634,6 +637,8 @@ class ConsumerPlugin(
|
||||
)
|
||||
self.log.debug(f"Creation date from st_mtime: {create_date}")
|
||||
|
||||
storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
||||
|
||||
if self.metadata.filename:
|
||||
title = Path(self.metadata.filename).stem
|
||||
else:
|
||||
@@ -660,6 +665,7 @@ class ConsumerPlugin(
|
||||
checksum=hashlib.md5(file_for_checksum.read_bytes()).hexdigest(),
|
||||
created=create_date,
|
||||
modified=create_date,
|
||||
storage_type=storage_type,
|
||||
page_count=page_count,
|
||||
original_filename=self.filename,
|
||||
)
|
||||
@@ -730,7 +736,7 @@ class ConsumerPlugin(
|
||||
}
|
||||
CustomFieldInstance.objects.create(**args) # adds to document
|
||||
|
||||
def _write(self, source, target):
|
||||
def _write(self, storage_type, source, target):
|
||||
with (
|
||||
Path(source).open("rb") as read_file,
|
||||
Path(target).open("wb") as write_file,
|
||||
|
||||
@@ -118,7 +118,7 @@ class DocumentMetadataOverrides:
|
||||
).values_list("id", flat=True),
|
||||
)
|
||||
overrides.custom_fields = {
|
||||
custom_field.field.id: custom_field.value
|
||||
custom_field.id: custom_field.value
|
||||
for custom_field in doc.custom_fields.all()
|
||||
}
|
||||
|
||||
|
||||
@@ -126,6 +126,7 @@ def generate_filename(
|
||||
doc: Document,
|
||||
*,
|
||||
counter=0,
|
||||
append_gpg=True,
|
||||
archive_filename=False,
|
||||
) -> Path:
|
||||
base_path: Path | None = None
|
||||
@@ -169,4 +170,8 @@ def generate_filename(
|
||||
final_filename = f"{doc.pk:07}{counter_str}{filetype_str}"
|
||||
full_path = Path(final_filename)
|
||||
|
||||
# Add GPG extension if needed
|
||||
if append_gpg and doc.storage_type == doc.STORAGE_TYPE_GPG:
|
||||
full_path = full_path.with_suffix(full_path.suffix + ".gpg")
|
||||
|
||||
return full_path
|
||||
|
||||
@@ -602,7 +602,7 @@ def rewrite_natural_date_keywords(query_string: str) -> str:
|
||||
|
||||
case "this year":
|
||||
start = datetime(local_now.year, 1, 1, 0, 0, 0, tzinfo=tz)
|
||||
end = datetime(local_now.year, 12, 31, 23, 59, 59, tzinfo=tz)
|
||||
end = datetime.combine(today, time.max, tzinfo=tz)
|
||||
|
||||
case "previous week":
|
||||
days_since_monday = local_now.weekday()
|
||||
|
||||
93
src/documents/management/commands/decrypt_documents.py
Normal file
93
src/documents/management/commands/decrypt_documents.py
Normal file
@@ -0,0 +1,93 @@
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.core.management.base import CommandError
|
||||
|
||||
from documents.models import Document
|
||||
from paperless.db import GnuPG
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = (
|
||||
"This is how you migrate your stored documents from an encrypted "
|
||||
"state to an unencrypted one (or vice-versa)"
|
||||
)
|
||||
|
||||
def add_arguments(self, parser) -> None:
|
||||
parser.add_argument(
|
||||
"--passphrase",
|
||||
help=(
|
||||
"If PAPERLESS_PASSPHRASE isn't set already, you need to specify it here"
|
||||
),
|
||||
)
|
||||
|
||||
def handle(self, *args, **options) -> None:
|
||||
try:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(
|
||||
"\n\n"
|
||||
"WARNING: This script is going to work directly on your "
|
||||
"document originals, so\n"
|
||||
"WARNING: you probably shouldn't run "
|
||||
"this unless you've got a recent backup\n"
|
||||
"WARNING: handy. It "
|
||||
"*should* work without a hitch, but be safe and backup your\n"
|
||||
"WARNING: stuff first.\n\n"
|
||||
"Hit Ctrl+C to exit now, or Enter to "
|
||||
"continue.\n\n",
|
||||
),
|
||||
)
|
||||
_ = input()
|
||||
except KeyboardInterrupt:
|
||||
return
|
||||
|
||||
passphrase = options["passphrase"] or settings.PASSPHRASE
|
||||
if not passphrase:
|
||||
raise CommandError(
|
||||
"Passphrase not defined. Please set it with --passphrase or "
|
||||
"by declaring it in your environment or your config.",
|
||||
)
|
||||
|
||||
self.__gpg_to_unencrypted(passphrase)
|
||||
|
||||
def __gpg_to_unencrypted(self, passphrase: str) -> None:
|
||||
encrypted_files = Document.objects.filter(
|
||||
storage_type=Document.STORAGE_TYPE_GPG,
|
||||
)
|
||||
|
||||
for document in encrypted_files:
|
||||
self.stdout.write(f"Decrypting {document}")
|
||||
|
||||
old_paths = [document.source_path, document.thumbnail_path]
|
||||
|
||||
with document.source_file as file_handle:
|
||||
raw_document = GnuPG.decrypted(file_handle, passphrase)
|
||||
with document.thumbnail_file as file_handle:
|
||||
raw_thumb = GnuPG.decrypted(file_handle, passphrase)
|
||||
|
||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
||||
|
||||
ext: str = Path(document.filename).suffix
|
||||
|
||||
if not ext == ".gpg":
|
||||
raise CommandError(
|
||||
f"Abort: encrypted file {document.source_path} does not "
|
||||
f"end with .gpg",
|
||||
)
|
||||
|
||||
document.filename = Path(document.filename).stem
|
||||
|
||||
with document.source_path.open("wb") as f:
|
||||
f.write(raw_document)
|
||||
|
||||
with document.thumbnail_path.open("wb") as f:
|
||||
f.write(raw_thumb)
|
||||
|
||||
Document.objects.filter(id=document.id).update(
|
||||
storage_type=document.storage_type,
|
||||
filename=document.filename,
|
||||
)
|
||||
|
||||
for path in old_paths:
|
||||
path.unlink()
|
||||
@@ -1,343 +1,135 @@
|
||||
"""
|
||||
Document consumer management command.
|
||||
|
||||
Watches a consumption directory for new documents and queues them for processing.
|
||||
Uses watchfiles for efficient file system monitoring with support for both
|
||||
native OS notifications and polling fallback.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
import os
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from fnmatch import filter
|
||||
from pathlib import Path
|
||||
from pathlib import PurePath
|
||||
from threading import Event
|
||||
from time import monotonic
|
||||
from typing import TYPE_CHECKING
|
||||
from time import sleep
|
||||
from typing import Final
|
||||
|
||||
from django import db
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.core.management.base import CommandError
|
||||
from watchfiles import Change
|
||||
from watchfiles import DefaultFilter
|
||||
from watchfiles import watch
|
||||
from watchdog.events import FileSystemEventHandler
|
||||
from watchdog.observers.polling import PollingObserver
|
||||
|
||||
from documents.data_models import ConsumableDocument
|
||||
from documents.data_models import DocumentMetadataOverrides
|
||||
from documents.data_models import DocumentSource
|
||||
from documents.models import Tag
|
||||
from documents.parsers import get_supported_file_extensions
|
||||
from documents.parsers import is_file_ext_supported
|
||||
from documents.tasks import consume_file
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterator
|
||||
|
||||
try:
|
||||
from inotifyrecursive import INotify
|
||||
from inotifyrecursive import flags
|
||||
except ImportError: # pragma: no cover
|
||||
INotify = flags = None
|
||||
|
||||
logger = logging.getLogger("paperless.management.consumer")
|
||||
|
||||
|
||||
@dataclass
|
||||
class TrackedFile:
|
||||
"""Represents a file being tracked for stability."""
|
||||
|
||||
path: Path
|
||||
last_event_time: float
|
||||
last_mtime: float | None = None
|
||||
last_size: int | None = None
|
||||
|
||||
def update_stats(self) -> bool:
|
||||
"""
|
||||
Update file stats. Returns True if file exists and stats were updated.
|
||||
"""
|
||||
try:
|
||||
stat = self.path.stat()
|
||||
self.last_mtime = stat.st_mtime
|
||||
self.last_size = stat.st_size
|
||||
return True
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
def is_unchanged(self) -> bool:
|
||||
"""
|
||||
Check if file stats match the previously recorded values.
|
||||
Returns False if file doesn't exist or stats changed.
|
||||
"""
|
||||
try:
|
||||
stat = self.path.stat()
|
||||
return stat.st_mtime == self.last_mtime and stat.st_size == self.last_size
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
|
||||
class FileStabilityTracker:
|
||||
def _tags_from_path(filepath: Path) -> list[int]:
|
||||
"""
|
||||
Tracks file events and determines when files are stable for consumption.
|
||||
|
||||
A file is considered stable when:
|
||||
1. No new events have been received for it within the stability delay
|
||||
2. Its size and modification time haven't changed
|
||||
3. It still exists as a regular file
|
||||
|
||||
This handles various edge cases:
|
||||
- Network copies that write in chunks
|
||||
- Scanners that open/close files multiple times
|
||||
- Temporary files that get renamed
|
||||
- Files that are deleted before becoming stable
|
||||
"""
|
||||
|
||||
def __init__(self, stability_delay: float = 1.0) -> None:
|
||||
"""
|
||||
Initialize the tracker.
|
||||
|
||||
Args:
|
||||
stability_delay: Time in seconds a file must remain unchanged
|
||||
before being considered stable.
|
||||
"""
|
||||
self.stability_delay = stability_delay
|
||||
self._tracked: dict[Path, TrackedFile] = {}
|
||||
|
||||
def track(self, path: Path, change: Change) -> None:
|
||||
"""
|
||||
Register a file event.
|
||||
|
||||
Args:
|
||||
path: The file path that changed.
|
||||
change: The type of change (added, modified, deleted).
|
||||
"""
|
||||
path = path.resolve()
|
||||
|
||||
match change:
|
||||
case Change.deleted:
|
||||
self._tracked.pop(path, None)
|
||||
logger.debug(f"Stopped tracking deleted file: {path}")
|
||||
case Change.added | Change.modified:
|
||||
current_time = monotonic()
|
||||
if path in self._tracked:
|
||||
tracked = self._tracked[path]
|
||||
tracked.last_event_time = current_time
|
||||
tracked.update_stats()
|
||||
logger.debug(f"Updated tracking for: {path}")
|
||||
else:
|
||||
tracked = TrackedFile(path=path, last_event_time=current_time)
|
||||
if tracked.update_stats():
|
||||
self._tracked[path] = tracked
|
||||
logger.debug(f"Started tracking: {path}")
|
||||
else:
|
||||
logger.debug(f"Could not stat file, not tracking: {path}")
|
||||
|
||||
def get_stable_files(self) -> Iterator[Path]:
|
||||
"""
|
||||
Yield files that have been stable for the configured delay.
|
||||
|
||||
Files are removed from tracking once yielded or determined to be invalid.
|
||||
"""
|
||||
current_time = monotonic()
|
||||
to_remove: list[Path] = []
|
||||
to_yield: list[Path] = []
|
||||
|
||||
for path, tracked in self._tracked.items():
|
||||
time_since_event = current_time - tracked.last_event_time
|
||||
|
||||
if time_since_event < self.stability_delay:
|
||||
continue
|
||||
|
||||
# File has waited long enough, verify it's unchanged
|
||||
if not tracked.is_unchanged():
|
||||
# Stats changed or file gone - update and wait again
|
||||
if tracked.update_stats():
|
||||
tracked.last_event_time = current_time
|
||||
logger.debug(f"File changed during stability check: {path}")
|
||||
else:
|
||||
# File no longer exists, remove from tracking
|
||||
to_remove.append(path)
|
||||
logger.debug(f"File disappeared during stability check: {path}")
|
||||
continue
|
||||
|
||||
# File is stable, we can return it
|
||||
to_yield.append(path)
|
||||
logger.info(f"File is stable: {path}")
|
||||
|
||||
# Remove files that are no longer valid
|
||||
for path in to_remove:
|
||||
self._tracked.pop(path, None)
|
||||
|
||||
# Remove and yield stable files
|
||||
for path in to_yield:
|
||||
self._tracked.pop(path, None)
|
||||
yield path
|
||||
|
||||
def has_pending_files(self) -> bool:
|
||||
"""Check if there are files waiting for stability check."""
|
||||
return len(self._tracked) > 0
|
||||
|
||||
@property
|
||||
def pending_count(self) -> int:
|
||||
"""Number of files being tracked."""
|
||||
return len(self._tracked)
|
||||
|
||||
|
||||
class ConsumerFilter(DefaultFilter):
|
||||
"""
|
||||
Filter for watchfiles that accepts only supported document types
|
||||
and ignores system files/directories.
|
||||
|
||||
Extends DefaultFilter leveraging its built-in filtering:
|
||||
- `ignore_dirs`: Directory names to ignore (and all their contents)
|
||||
- `ignore_entity_patterns`: Regex patterns matched against filename/dirname only
|
||||
|
||||
We add custom logic for file extension filtering (only accept supported
|
||||
document types), which the library doesn't provide.
|
||||
"""
|
||||
|
||||
# Regex patterns for files to always ignore (matched against filename only)
|
||||
# These are passed to DefaultFilter.ignore_entity_patterns
|
||||
DEFAULT_IGNORE_PATTERNS: Final[tuple[str, ...]] = (
|
||||
r"^\.DS_Store$",
|
||||
r"^\.DS_STORE$",
|
||||
r"^\._.*",
|
||||
r"^desktop\.ini$",
|
||||
r"^Thumbs\.db$",
|
||||
)
|
||||
|
||||
# Directories to always ignore (passed to DefaultFilter.ignore_dirs)
|
||||
# These are matched by directory name, not full path
|
||||
DEFAULT_IGNORE_DIRS: Final[tuple[str, ...]] = (
|
||||
".stfolder", # Syncthing
|
||||
".stversions", # Syncthing
|
||||
".localized", # macOS
|
||||
"@eaDir", # Synology NAS
|
||||
".Spotlight-V100", # macOS
|
||||
".Trashes", # macOS
|
||||
"__MACOSX", # macOS archive artifacts
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
supported_extensions: frozenset[str] | None = None,
|
||||
ignore_patterns: list[str] | None = None,
|
||||
ignore_dirs: list[str] | None = None,
|
||||
) -> None:
|
||||
"""
|
||||
Initialize the consumer filter.
|
||||
|
||||
Args:
|
||||
supported_extensions: Set of file extensions to accept (e.g., {".pdf", ".png"}).
|
||||
If None, uses get_supported_file_extensions().
|
||||
ignore_patterns: Additional regex patterns to ignore (matched against filename).
|
||||
ignore_dirs: Additional directory names to ignore (merged with defaults).
|
||||
"""
|
||||
# Get supported extensions
|
||||
if supported_extensions is None:
|
||||
supported_extensions = frozenset(get_supported_file_extensions())
|
||||
self._supported_extensions = supported_extensions
|
||||
|
||||
# Combine default and user patterns
|
||||
all_patterns: list[str] = list(self.DEFAULT_IGNORE_PATTERNS)
|
||||
if ignore_patterns:
|
||||
all_patterns.extend(ignore_patterns)
|
||||
|
||||
# Combine default and user ignore_dirs
|
||||
all_ignore_dirs: list[str] = list(self.DEFAULT_IGNORE_DIRS)
|
||||
if ignore_dirs:
|
||||
all_ignore_dirs.extend(ignore_dirs)
|
||||
|
||||
# Let DefaultFilter handle all the pattern and directory filtering
|
||||
super().__init__(
|
||||
ignore_dirs=tuple(all_ignore_dirs),
|
||||
ignore_entity_patterns=tuple(all_patterns),
|
||||
ignore_paths=(),
|
||||
)
|
||||
|
||||
def __call__(self, change: Change, path: str) -> bool:
|
||||
"""
|
||||
Filter function for watchfiles.
|
||||
|
||||
Returns True if the path should be watched, False to ignore.
|
||||
|
||||
The parent DefaultFilter handles:
|
||||
- Hidden files/directories (starting with .)
|
||||
- Directories in ignore_dirs
|
||||
- Files/directories matching ignore_entity_patterns
|
||||
|
||||
We additionally filter files by extension.
|
||||
"""
|
||||
# Let parent filter handle directory ignoring and pattern matching
|
||||
if not super().__call__(change, path):
|
||||
return False
|
||||
|
||||
path_obj = Path(path)
|
||||
|
||||
# For directories, parent filter already handled everything
|
||||
if path_obj.is_dir():
|
||||
return True
|
||||
|
||||
# For files, check extension
|
||||
return self._has_supported_extension(path_obj)
|
||||
|
||||
def _has_supported_extension(self, path: Path) -> bool:
|
||||
"""Check if the file has a supported extension."""
|
||||
suffix = path.suffix.lower()
|
||||
return suffix in self._supported_extensions
|
||||
|
||||
|
||||
def _tags_from_path(filepath: Path, consumption_dir: Path) -> list[int]:
|
||||
"""
|
||||
Walk up the directory tree from filepath to consumption_dir
|
||||
Walk up the directory tree from filepath to CONSUMPTION_DIR
|
||||
and get or create Tag IDs for every directory.
|
||||
|
||||
Returns list of Tag primary keys.
|
||||
Returns set of Tag models
|
||||
"""
|
||||
db.close_old_connections()
|
||||
tag_ids: set[int] = set()
|
||||
path_parts = filepath.relative_to(consumption_dir).parent.parts
|
||||
|
||||
tag_ids = set()
|
||||
path_parts = filepath.relative_to(settings.CONSUMPTION_DIR).parent.parts
|
||||
for part in path_parts:
|
||||
tag, _ = Tag.objects.get_or_create(
|
||||
name__iexact=part,
|
||||
defaults={"name": part},
|
||||
tag_ids.add(
|
||||
Tag.objects.get_or_create(name__iexact=part, defaults={"name": part})[0].pk,
|
||||
)
|
||||
tag_ids.add(tag.pk)
|
||||
|
||||
return list(tag_ids)
|
||||
|
||||
|
||||
def _consume_file(
|
||||
filepath: Path,
|
||||
consumption_dir: Path,
|
||||
*,
|
||||
subdirs_as_tags: bool,
|
||||
) -> None:
|
||||
def _is_ignored(filepath: Path) -> bool:
|
||||
"""
|
||||
Queue a file for consumption.
|
||||
Checks if the given file should be ignored, based on configured
|
||||
patterns.
|
||||
|
||||
Args:
|
||||
filepath: Path to the file to consume.
|
||||
consumption_dir: Base consumption directory.
|
||||
subdirs_as_tags: Whether to create tags from subdirectory names.
|
||||
Returns True if the file is ignored, False otherwise
|
||||
"""
|
||||
# Verify file still exists and is accessible
|
||||
# Trim out the consume directory, leaving only filename and it's
|
||||
# path relative to the consume directory
|
||||
filepath_relative = PurePath(filepath).relative_to(settings.CONSUMPTION_DIR)
|
||||
|
||||
# March through the components of the path, including directories and the filename
|
||||
# looking for anything matching
|
||||
# foo/bar/baz/file.pdf -> (foo, bar, baz, file.pdf)
|
||||
parts = []
|
||||
for part in filepath_relative.parts:
|
||||
# If the part is not the name (ie, it's a dir)
|
||||
# Need to append the trailing slash or fnmatch doesn't match
|
||||
# fnmatch("dir", "dir/*") == False
|
||||
# fnmatch("dir/", "dir/*") == True
|
||||
if part != filepath_relative.name:
|
||||
part = part + "/"
|
||||
parts.append(part)
|
||||
|
||||
for pattern in settings.CONSUMER_IGNORE_PATTERNS:
|
||||
if len(filter(parts, pattern)):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _consume(filepath: Path) -> None:
|
||||
# Check permissions early
|
||||
try:
|
||||
if not filepath.is_file():
|
||||
logger.debug(f"Not consuming {filepath}: not a file or doesn't exist")
|
||||
return
|
||||
except OSError as e:
|
||||
logger.warning(f"Not consuming {filepath}: {e}")
|
||||
filepath.stat()
|
||||
except (PermissionError, OSError):
|
||||
logger.warning(f"Not consuming file {filepath}: Permission denied.")
|
||||
return
|
||||
|
||||
# Get tags from path if configured
|
||||
tag_ids: list[int] | None = None
|
||||
if subdirs_as_tags:
|
||||
try:
|
||||
tag_ids = _tags_from_path(filepath, consumption_dir)
|
||||
except Exception:
|
||||
logger.exception(f"Error creating tags from path for {filepath}")
|
||||
if filepath.is_dir() or _is_ignored(filepath):
|
||||
return
|
||||
|
||||
# Queue for consumption
|
||||
if not filepath.is_file():
|
||||
logger.debug(f"Not consuming file {filepath}: File has moved.")
|
||||
return
|
||||
|
||||
if not is_file_ext_supported(filepath.suffix):
|
||||
logger.warning(f"Not consuming file {filepath}: Unknown file extension.")
|
||||
return
|
||||
|
||||
# Total wait time: up to 500ms
|
||||
os_error_retry_count: Final[int] = 50
|
||||
os_error_retry_wait: Final[float] = 0.01
|
||||
|
||||
read_try_count = 0
|
||||
file_open_ok = False
|
||||
os_error_str = None
|
||||
|
||||
while (read_try_count < os_error_retry_count) and not file_open_ok:
|
||||
try:
|
||||
with filepath.open("rb"):
|
||||
file_open_ok = True
|
||||
except OSError as e:
|
||||
read_try_count += 1
|
||||
os_error_str = str(e)
|
||||
sleep(os_error_retry_wait)
|
||||
|
||||
if read_try_count >= os_error_retry_count:
|
||||
logger.warning(f"Not consuming file {filepath}: OS reports {os_error_str}")
|
||||
return
|
||||
|
||||
tag_ids = None
|
||||
try:
|
||||
logger.info(f"Adding {filepath} to the task queue")
|
||||
if settings.CONSUMER_SUBDIRS_AS_TAGS:
|
||||
tag_ids = _tags_from_path(filepath)
|
||||
except Exception:
|
||||
logger.exception("Error creating tags from path")
|
||||
|
||||
try:
|
||||
logger.info(f"Adding {filepath} to the task queue.")
|
||||
consume_file.delay(
|
||||
ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
@@ -346,209 +138,228 @@ def _consume_file(
|
||||
DocumentMetadataOverrides(tag_ids=tag_ids),
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(f"Error while queuing document {filepath}")
|
||||
# Catch all so that the consumer won't crash.
|
||||
# This is also what the test case is listening for to check for
|
||||
# errors.
|
||||
logger.exception("Error while consuming document")
|
||||
|
||||
|
||||
def _consume_wait_unmodified(file: Path) -> None:
|
||||
"""
|
||||
Waits for the given file to appear unmodified based on file size
|
||||
and modification time. Will wait a configured number of seconds
|
||||
and retry a configured number of times before either consuming or
|
||||
giving up
|
||||
"""
|
||||
if _is_ignored(file):
|
||||
return
|
||||
|
||||
logger.debug(f"Waiting for file {file} to remain unmodified")
|
||||
mtime = -1
|
||||
size = -1
|
||||
current_try = 0
|
||||
while current_try < settings.CONSUMER_POLLING_RETRY_COUNT:
|
||||
try:
|
||||
stat_data = file.stat()
|
||||
new_mtime = stat_data.st_mtime
|
||||
new_size = stat_data.st_size
|
||||
except FileNotFoundError:
|
||||
logger.debug(
|
||||
f"File {file} moved while waiting for it to remain unmodified.",
|
||||
)
|
||||
return
|
||||
if new_mtime == mtime and new_size == size:
|
||||
_consume(file)
|
||||
return
|
||||
mtime = new_mtime
|
||||
size = new_size
|
||||
sleep(settings.CONSUMER_POLLING_DELAY)
|
||||
current_try += 1
|
||||
|
||||
logger.error(f"Timeout while waiting on file {file} to remain unmodified.")
|
||||
|
||||
|
||||
class Handler(FileSystemEventHandler):
|
||||
def __init__(self, pool: ThreadPoolExecutor) -> None:
|
||||
super().__init__()
|
||||
self._pool = pool
|
||||
|
||||
def on_created(self, event):
|
||||
self._pool.submit(_consume_wait_unmodified, Path(event.src_path))
|
||||
|
||||
def on_moved(self, event):
|
||||
self._pool.submit(_consume_wait_unmodified, Path(event.dest_path))
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""
|
||||
Watch a consumption directory and queue new documents for processing.
|
||||
|
||||
Uses watchfiles for efficient file system monitoring. Supports both
|
||||
native OS notifications (inotify on Linux, FSEvents on macOS) and
|
||||
polling for network filesystems.
|
||||
On every iteration of an infinite loop, consume what we can from the
|
||||
consumption directory.
|
||||
"""
|
||||
|
||||
help = "Watch the consumption directory for new documents"
|
||||
|
||||
# For testing - allows tests to stop the consumer
|
||||
stop_flag: Event = Event()
|
||||
|
||||
# Testing timeout in seconds
|
||||
# This is here primarily for the tests and is irrelevant in production.
|
||||
stop_flag = Event()
|
||||
# Also only for testing, configures in one place the timeout used before checking
|
||||
# the stop flag
|
||||
testing_timeout_s: Final[float] = 0.5
|
||||
testing_timeout_ms: Final[float] = testing_timeout_s * 1000.0
|
||||
|
||||
def add_arguments(self, parser) -> None:
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"directory",
|
||||
default=None,
|
||||
default=settings.CONSUMPTION_DIR,
|
||||
nargs="?",
|
||||
help="The consumption directory (defaults to CONSUMPTION_DIR setting)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--oneshot",
|
||||
action="store_true",
|
||||
help="Process existing files and exit without watching",
|
||||
help="The consumption directory.",
|
||||
)
|
||||
parser.add_argument("--oneshot", action="store_true", help="Run only once.")
|
||||
|
||||
# Only use during unit testing, will configure a timeout
|
||||
# Leaving it unset or false and the consumer will exit when it
|
||||
# receives SIGINT
|
||||
parser.add_argument(
|
||||
"--testing",
|
||||
action="store_true",
|
||||
help="Enable testing mode with shorter timeouts",
|
||||
help="Flag used only for unit testing",
|
||||
default=False,
|
||||
)
|
||||
|
||||
def handle(self, *args, **options) -> None:
|
||||
# Resolve consumption directory
|
||||
directory = options.get("directory")
|
||||
def handle(self, *args, **options):
|
||||
directory = options["directory"]
|
||||
recursive = settings.CONSUMER_RECURSIVE
|
||||
|
||||
if not directory:
|
||||
directory = getattr(settings, "CONSUMPTION_DIR", None)
|
||||
if not directory:
|
||||
raise CommandError("CONSUMPTION_DIR is not configured")
|
||||
raise CommandError("CONSUMPTION_DIR does not appear to be set.")
|
||||
|
||||
directory = Path(directory).resolve()
|
||||
|
||||
if not directory.exists():
|
||||
raise CommandError(f"Consumption directory does not exist: {directory}")
|
||||
|
||||
if not directory.is_dir():
|
||||
raise CommandError(f"Consumption path is not a directory: {directory}")
|
||||
raise CommandError(f"Consumption directory {directory} does not exist")
|
||||
|
||||
# Ensure scratch directory exists
|
||||
# Consumer will need this
|
||||
settings.SCRATCH_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Get settings
|
||||
recursive: bool = settings.CONSUMER_RECURSIVE
|
||||
subdirs_as_tags: bool = settings.CONSUMER_SUBDIRS_AS_TAGS
|
||||
polling_interval: float = settings.CONSUMER_POLLING_INTERVAL
|
||||
stability_delay: float = settings.CONSUMER_STABILITY_DELAY
|
||||
ignore_patterns: list[str] = settings.CONSUMER_IGNORE_PATTERNS
|
||||
ignore_dirs: list[str] = settings.CONSUMER_IGNORE_DIRS
|
||||
is_testing: bool = options.get("testing", False)
|
||||
is_oneshot: bool = options.get("oneshot", False)
|
||||
if recursive:
|
||||
for dirpath, _, filenames in os.walk(directory):
|
||||
for filename in filenames:
|
||||
filepath = Path(dirpath) / filename
|
||||
_consume(filepath)
|
||||
else:
|
||||
for filepath in directory.iterdir():
|
||||
_consume(filepath)
|
||||
|
||||
# Create filter
|
||||
consumer_filter = ConsumerFilter(
|
||||
ignore_patterns=ignore_patterns,
|
||||
ignore_dirs=ignore_dirs,
|
||||
)
|
||||
|
||||
# Process existing files
|
||||
self._process_existing_files(
|
||||
directory=directory,
|
||||
recursive=recursive,
|
||||
subdirs_as_tags=subdirs_as_tags,
|
||||
consumer_filter=consumer_filter,
|
||||
)
|
||||
|
||||
if is_oneshot:
|
||||
logger.info("Oneshot mode: processed existing files, exiting")
|
||||
if options["oneshot"]:
|
||||
return
|
||||
|
||||
# Start watching
|
||||
self._watch_directory(
|
||||
directory=directory,
|
||||
recursive=recursive,
|
||||
subdirs_as_tags=subdirs_as_tags,
|
||||
consumer_filter=consumer_filter,
|
||||
polling_interval=polling_interval,
|
||||
stability_delay=stability_delay,
|
||||
is_testing=is_testing,
|
||||
)
|
||||
|
||||
logger.debug("Consumer exiting")
|
||||
|
||||
def _process_existing_files(
|
||||
self,
|
||||
*,
|
||||
directory: Path,
|
||||
recursive: bool,
|
||||
subdirs_as_tags: bool,
|
||||
consumer_filter: ConsumerFilter,
|
||||
) -> None:
|
||||
"""Process any existing files in the consumption directory."""
|
||||
logger.info(f"Processing existing files in {directory}")
|
||||
|
||||
glob_pattern = "**/*" if recursive else "*"
|
||||
|
||||
for filepath in directory.glob(glob_pattern):
|
||||
# Use filter to check if file should be processed
|
||||
if not filepath.is_file():
|
||||
continue
|
||||
|
||||
if not consumer_filter(Change.added, str(filepath)):
|
||||
continue
|
||||
|
||||
_consume_file(
|
||||
filepath=filepath,
|
||||
consumption_dir=directory,
|
||||
subdirs_as_tags=subdirs_as_tags,
|
||||
)
|
||||
|
||||
def _watch_directory(
|
||||
self,
|
||||
*,
|
||||
directory: Path,
|
||||
recursive: bool,
|
||||
subdirs_as_tags: bool,
|
||||
consumer_filter: ConsumerFilter,
|
||||
polling_interval: float,
|
||||
stability_delay: float,
|
||||
is_testing: bool,
|
||||
) -> None:
|
||||
"""Watch directory for changes and process stable files."""
|
||||
use_polling = polling_interval > 0
|
||||
poll_delay_ms = int(polling_interval * 1000) if use_polling else 0
|
||||
|
||||
if use_polling:
|
||||
logger.info(
|
||||
f"Watching {directory} using polling (interval: {polling_interval}s)",
|
||||
)
|
||||
if settings.CONSUMER_POLLING == 0 and INotify:
|
||||
self.handle_inotify(directory, recursive, is_testing=options["testing"])
|
||||
else:
|
||||
logger.info(f"Watching {directory} using native file system events")
|
||||
if INotify is None and settings.CONSUMER_POLLING == 0: # pragma: no cover
|
||||
logger.warning("Using polling as INotify import failed")
|
||||
self.handle_polling(directory, recursive, is_testing=options["testing"])
|
||||
|
||||
# Create stability tracker
|
||||
tracker = FileStabilityTracker(stability_delay=stability_delay)
|
||||
logger.debug("Consumer exiting.")
|
||||
|
||||
# Calculate timeouts
|
||||
stability_timeout_ms = int(stability_delay * 1000)
|
||||
testing_timeout_ms = int(self.testing_timeout_s * 1000)
|
||||
def handle_polling(self, directory, recursive, *, is_testing: bool):
|
||||
logger.info(f"Polling directory for changes: {directory}")
|
||||
|
||||
# Start with no timeout (wait indefinitely for first event)
|
||||
# unless in testing mode
|
||||
timeout_ms = testing_timeout_ms if is_testing else 0
|
||||
timeout = None
|
||||
if is_testing:
|
||||
timeout = self.testing_timeout_s
|
||||
logger.debug(f"Configuring timeout to {timeout}s")
|
||||
|
||||
self.stop_flag.clear()
|
||||
polling_interval = settings.CONSUMER_POLLING
|
||||
if polling_interval == 0: # pragma: no cover
|
||||
# Only happens if INotify failed to import
|
||||
logger.warning("Using polling of 10s, consider setting this")
|
||||
polling_interval = 10
|
||||
|
||||
while not self.stop_flag.is_set():
|
||||
with ThreadPoolExecutor(max_workers=4) as pool:
|
||||
observer = PollingObserver(timeout=polling_interval)
|
||||
observer.schedule(Handler(pool), directory, recursive=recursive)
|
||||
observer.start()
|
||||
try:
|
||||
for changes in watch(
|
||||
directory,
|
||||
watch_filter=consumer_filter,
|
||||
rust_timeout=timeout_ms,
|
||||
yield_on_timeout=True,
|
||||
force_polling=use_polling,
|
||||
poll_delay_ms=poll_delay_ms,
|
||||
recursive=recursive,
|
||||
stop_event=self.stop_flag,
|
||||
):
|
||||
# Process each change
|
||||
for change_type, path in changes:
|
||||
path = Path(path).resolve()
|
||||
if not path.is_file():
|
||||
while observer.is_alive():
|
||||
observer.join(timeout)
|
||||
if self.stop_flag.is_set():
|
||||
observer.stop()
|
||||
except KeyboardInterrupt:
|
||||
observer.stop()
|
||||
observer.join()
|
||||
|
||||
def handle_inotify(self, directory, recursive, *, is_testing: bool):
|
||||
logger.info(f"Using inotify to watch directory for changes: {directory}")
|
||||
|
||||
timeout_ms = None
|
||||
if is_testing:
|
||||
timeout_ms = self.testing_timeout_ms
|
||||
logger.debug(f"Configuring timeout to {timeout_ms}ms")
|
||||
|
||||
inotify = INotify()
|
||||
inotify_flags = flags.CLOSE_WRITE | flags.MOVED_TO | flags.MODIFY
|
||||
if recursive:
|
||||
inotify.add_watch_recursive(directory, inotify_flags)
|
||||
else:
|
||||
inotify.add_watch(directory, inotify_flags)
|
||||
|
||||
inotify_debounce_secs: Final[float] = settings.CONSUMER_INOTIFY_DELAY
|
||||
inotify_debounce_ms: Final[int] = inotify_debounce_secs * 1000
|
||||
|
||||
finished = False
|
||||
|
||||
notified_files = {}
|
||||
|
||||
try:
|
||||
while not finished:
|
||||
try:
|
||||
for event in inotify.read(timeout=timeout_ms):
|
||||
path = inotify.get_path(event.wd) if recursive else directory
|
||||
filepath = Path(path) / event.name
|
||||
if flags.MODIFY in flags.from_mask(event.mask):
|
||||
notified_files.pop(filepath, None)
|
||||
else:
|
||||
notified_files[filepath] = monotonic()
|
||||
|
||||
# Check the files against the timeout
|
||||
still_waiting = {}
|
||||
# last_event_time is time of the last inotify event for this file
|
||||
for filepath, last_event_time in notified_files.items():
|
||||
# Current time - last time over the configured timeout
|
||||
waited_long_enough = (
|
||||
monotonic() - last_event_time
|
||||
) > inotify_debounce_secs
|
||||
|
||||
# Also make sure the file exists still, some scanners might write a
|
||||
# temporary file first
|
||||
try:
|
||||
file_still_exists = filepath.exists() and filepath.is_file()
|
||||
except (PermissionError, OSError): # pragma: no cover
|
||||
# If we can't check, let it fail in the _consume function
|
||||
file_still_exists = True
|
||||
continue
|
||||
logger.debug(f"Event: {change_type.name} for {path}")
|
||||
tracker.track(path, change_type)
|
||||
|
||||
# Check for stable files
|
||||
for stable_path in tracker.get_stable_files():
|
||||
_consume_file(
|
||||
filepath=stable_path,
|
||||
consumption_dir=directory,
|
||||
subdirs_as_tags=subdirs_as_tags,
|
||||
)
|
||||
if waited_long_enough and file_still_exists:
|
||||
_consume(filepath)
|
||||
elif file_still_exists:
|
||||
still_waiting[filepath] = last_event_time
|
||||
|
||||
# Exit watch loop to reconfigure timeout
|
||||
break
|
||||
# These files are still waiting to hit the timeout
|
||||
notified_files = still_waiting
|
||||
|
||||
# Determine next timeout
|
||||
if tracker.has_pending_files():
|
||||
# Check pending files at stability interval
|
||||
timeout_ms = stability_timeout_ms
|
||||
elif is_testing:
|
||||
# In testing, use short timeout to check stop flag
|
||||
timeout_ms = testing_timeout_ms
|
||||
else: # pragma: nocover
|
||||
# No pending files, wait indefinitely
|
||||
timeout_ms = 0
|
||||
# If files are waiting, need to exit read() to check them
|
||||
# Otherwise, go back to infinite sleep time, but only if not testing
|
||||
if len(notified_files) > 0:
|
||||
timeout_ms = inotify_debounce_ms
|
||||
elif is_testing:
|
||||
timeout_ms = self.testing_timeout_ms
|
||||
else:
|
||||
timeout_ms = None
|
||||
|
||||
except KeyboardInterrupt: # pragma: nocover
|
||||
logger.info("Received interrupt, stopping consumer")
|
||||
self.stop_flag.set()
|
||||
if self.stop_flag.is_set():
|
||||
logger.debug("Finishing because event is set")
|
||||
finished = True
|
||||
|
||||
except KeyboardInterrupt:
|
||||
logger.info("Received SIGINT, stopping inotify")
|
||||
finished = True
|
||||
finally:
|
||||
inotify.close()
|
||||
|
||||
@@ -3,6 +3,7 @@ import json
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
@@ -55,6 +56,7 @@ from documents.settings import EXPORTER_FILE_NAME
|
||||
from documents.settings import EXPORTER_THUMBNAIL_NAME
|
||||
from documents.utils import copy_file_with_basic_stats
|
||||
from paperless import version
|
||||
from paperless.db import GnuPG
|
||||
from paperless.models import ApplicationConfiguration
|
||||
from paperless_mail.models import MailAccount
|
||||
from paperless_mail.models import MailRule
|
||||
@@ -314,17 +316,20 @@ class Command(CryptMixin, BaseCommand):
|
||||
total=len(document_manifest),
|
||||
disable=self.no_progress_bar,
|
||||
):
|
||||
# 3.1. store files unencrypted
|
||||
document_dict["fields"]["storage_type"] = Document.STORAGE_TYPE_UNENCRYPTED
|
||||
|
||||
document = document_map[document_dict["pk"]]
|
||||
|
||||
# 3.1. generate a unique filename
|
||||
# 3.2. generate a unique filename
|
||||
base_name = self.generate_base_name(document)
|
||||
|
||||
# 3.2. write filenames into manifest
|
||||
# 3.3. write filenames into manifest
|
||||
original_target, thumbnail_target, archive_target = (
|
||||
self.generate_document_targets(document, base_name, document_dict)
|
||||
)
|
||||
|
||||
# 3.3. write files to target folder
|
||||
# 3.4. write files to target folder
|
||||
if not self.data_only:
|
||||
self.copy_document_files(
|
||||
document,
|
||||
@@ -418,6 +423,7 @@ class Command(CryptMixin, BaseCommand):
|
||||
base_name = generate_filename(
|
||||
document,
|
||||
counter=filename_counter,
|
||||
append_gpg=False,
|
||||
)
|
||||
else:
|
||||
base_name = document.get_public_filename(counter=filename_counter)
|
||||
@@ -476,24 +482,46 @@ class Command(CryptMixin, BaseCommand):
|
||||
|
||||
If the document is encrypted, the files are decrypted before copying them to the target location.
|
||||
"""
|
||||
self.check_and_copy(
|
||||
document.source_path,
|
||||
document.checksum,
|
||||
original_target,
|
||||
)
|
||||
if document.storage_type == Document.STORAGE_TYPE_GPG:
|
||||
t = int(time.mktime(document.created.timetuple()))
|
||||
|
||||
if thumbnail_target:
|
||||
self.check_and_copy(document.thumbnail_path, None, thumbnail_target)
|
||||
original_target.parent.mkdir(parents=True, exist_ok=True)
|
||||
with document.source_file as out_file:
|
||||
original_target.write_bytes(GnuPG.decrypted(out_file))
|
||||
os.utime(original_target, times=(t, t))
|
||||
|
||||
if archive_target:
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(document.archive_path, Path)
|
||||
if thumbnail_target:
|
||||
thumbnail_target.parent.mkdir(parents=True, exist_ok=True)
|
||||
with document.thumbnail_file as out_file:
|
||||
thumbnail_target.write_bytes(GnuPG.decrypted(out_file))
|
||||
os.utime(thumbnail_target, times=(t, t))
|
||||
|
||||
if archive_target:
|
||||
archive_target.parent.mkdir(parents=True, exist_ok=True)
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(document.archive_path, Path)
|
||||
with document.archive_path as out_file:
|
||||
archive_target.write_bytes(GnuPG.decrypted(out_file))
|
||||
os.utime(archive_target, times=(t, t))
|
||||
else:
|
||||
self.check_and_copy(
|
||||
document.archive_path,
|
||||
document.archive_checksum,
|
||||
archive_target,
|
||||
document.source_path,
|
||||
document.checksum,
|
||||
original_target,
|
||||
)
|
||||
|
||||
if thumbnail_target:
|
||||
self.check_and_copy(document.thumbnail_path, None, thumbnail_target)
|
||||
|
||||
if archive_target:
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(document.archive_path, Path)
|
||||
self.check_and_copy(
|
||||
document.archive_path,
|
||||
document.archive_checksum,
|
||||
archive_target,
|
||||
)
|
||||
|
||||
def check_and_write_json(
|
||||
self,
|
||||
content: list[dict] | dict,
|
||||
|
||||
@@ -383,6 +383,8 @@ class Command(CryptMixin, BaseCommand):
|
||||
else:
|
||||
archive_path = None
|
||||
|
||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
||||
|
||||
with FileLock(settings.MEDIA_LOCK):
|
||||
if Path(document.source_path).is_file():
|
||||
raise FileExistsError(document.source_path)
|
||||
|
||||
@@ -403,18 +403,6 @@ def existing_document_matches_workflow(
|
||||
f"Document tags {list(document.tags.all())} include excluded tags {list(trigger_has_not_tags_qs)}",
|
||||
)
|
||||
|
||||
allowed_correspondent_ids = set(
|
||||
trigger.filter_has_any_correspondents.values_list("id", flat=True),
|
||||
)
|
||||
if (
|
||||
allowed_correspondent_ids
|
||||
and document.correspondent_id not in allowed_correspondent_ids
|
||||
):
|
||||
return (
|
||||
False,
|
||||
f"Document correspondent {document.correspondent} is not one of {list(trigger.filter_has_any_correspondents.all())}",
|
||||
)
|
||||
|
||||
# Document correspondent vs trigger has_correspondent
|
||||
if (
|
||||
trigger.filter_has_correspondent_id is not None
|
||||
@@ -436,17 +424,6 @@ def existing_document_matches_workflow(
|
||||
f"Document correspondent {document.correspondent} is excluded by {list(trigger.filter_has_not_correspondents.all())}",
|
||||
)
|
||||
|
||||
allowed_document_type_ids = set(
|
||||
trigger.filter_has_any_document_types.values_list("id", flat=True),
|
||||
)
|
||||
if allowed_document_type_ids and (
|
||||
document.document_type_id not in allowed_document_type_ids
|
||||
):
|
||||
return (
|
||||
False,
|
||||
f"Document doc type {document.document_type} is not one of {list(trigger.filter_has_any_document_types.all())}",
|
||||
)
|
||||
|
||||
# Document document_type vs trigger has_document_type
|
||||
if (
|
||||
trigger.filter_has_document_type_id is not None
|
||||
@@ -468,17 +445,6 @@ def existing_document_matches_workflow(
|
||||
f"Document doc type {document.document_type} is excluded by {list(trigger.filter_has_not_document_types.all())}",
|
||||
)
|
||||
|
||||
allowed_storage_path_ids = set(
|
||||
trigger.filter_has_any_storage_paths.values_list("id", flat=True),
|
||||
)
|
||||
if allowed_storage_path_ids and (
|
||||
document.storage_path_id not in allowed_storage_path_ids
|
||||
):
|
||||
return (
|
||||
False,
|
||||
f"Document storage path {document.storage_path} is not one of {list(trigger.filter_has_any_storage_paths.all())}",
|
||||
)
|
||||
|
||||
# Document storage_path vs trigger has_storage_path
|
||||
if (
|
||||
trigger.filter_has_storage_path_id is not None
|
||||
@@ -566,10 +532,6 @@ def prefilter_documents_by_workflowtrigger(
|
||||
|
||||
# Correspondent, DocumentType, etc. filtering
|
||||
|
||||
if trigger.filter_has_any_correspondents.exists():
|
||||
documents = documents.filter(
|
||||
correspondent__in=trigger.filter_has_any_correspondents.all(),
|
||||
)
|
||||
if trigger.filter_has_correspondent is not None:
|
||||
documents = documents.filter(
|
||||
correspondent=trigger.filter_has_correspondent,
|
||||
@@ -579,10 +541,6 @@ def prefilter_documents_by_workflowtrigger(
|
||||
correspondent__in=trigger.filter_has_not_correspondents.all(),
|
||||
)
|
||||
|
||||
if trigger.filter_has_any_document_types.exists():
|
||||
documents = documents.filter(
|
||||
document_type__in=trigger.filter_has_any_document_types.all(),
|
||||
)
|
||||
if trigger.filter_has_document_type is not None:
|
||||
documents = documents.filter(
|
||||
document_type=trigger.filter_has_document_type,
|
||||
@@ -592,10 +550,6 @@ def prefilter_documents_by_workflowtrigger(
|
||||
document_type__in=trigger.filter_has_not_document_types.all(),
|
||||
)
|
||||
|
||||
if trigger.filter_has_any_storage_paths.exists():
|
||||
documents = documents.filter(
|
||||
storage_path__in=trigger.filter_has_any_storage_paths.all(),
|
||||
)
|
||||
if trigger.filter_has_storage_path is not None:
|
||||
documents = documents.filter(
|
||||
storage_path=trigger.filter_has_storage_path,
|
||||
@@ -650,11 +604,8 @@ def document_matches_workflow(
|
||||
"filter_has_tags",
|
||||
"filter_has_all_tags",
|
||||
"filter_has_not_tags",
|
||||
"filter_has_any_document_types",
|
||||
"filter_has_not_document_types",
|
||||
"filter_has_any_correspondents",
|
||||
"filter_has_not_correspondents",
|
||||
"filter_has_any_storage_paths",
|
||||
"filter_has_not_storage_paths",
|
||||
)
|
||||
)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
26
src/documents/migrations/0002_auto_20151226_1316.py
Normal file
26
src/documents/migrations/0002_auto_20151226_1316.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# Generated by Django 1.9 on 2015-12-26 13:16
|
||||
|
||||
import django.utils.timezone
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name="document",
|
||||
options={"ordering": ("sender", "title")},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="created",
|
||||
field=models.DateTimeField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
),
|
||||
),
|
||||
]
|
||||
70
src/documents/migrations/0003_sender.py
Normal file
70
src/documents/migrations/0003_sender.py
Normal file
@@ -0,0 +1,70 @@
|
||||
# Generated by Django 1.9 on 2016-01-11 12:21
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
from django.template.defaultfilters import slugify
|
||||
|
||||
DOCUMENT_SENDER_MAP = {}
|
||||
|
||||
|
||||
def move_sender_strings_to_sender_model(apps, schema_editor):
|
||||
sender_model = apps.get_model("documents", "Sender")
|
||||
document_model = apps.get_model("documents", "Document")
|
||||
|
||||
# Create the sender and log the relationship with the document
|
||||
for document in document_model.objects.all():
|
||||
if document.sender:
|
||||
(
|
||||
DOCUMENT_SENDER_MAP[document.pk],
|
||||
_,
|
||||
) = sender_model.objects.get_or_create(
|
||||
name=document.sender,
|
||||
defaults={"slug": slugify(document.sender)},
|
||||
)
|
||||
|
||||
|
||||
def realign_senders(apps, schema_editor):
|
||||
document_model = apps.get_model("documents", "Document")
|
||||
for pk, sender in DOCUMENT_SENDER_MAP.items():
|
||||
document_model.objects.filter(pk=pk).update(sender=sender)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0002_auto_20151226_1316"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Sender",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("name", models.CharField(max_length=128, unique=True)),
|
||||
("slug", models.SlugField()),
|
||||
],
|
||||
),
|
||||
migrations.RunPython(move_sender_strings_to_sender_model),
|
||||
migrations.RemoveField(
|
||||
model_name="document",
|
||||
name="sender",
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="document",
|
||||
name="sender",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="documents.Sender",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(realign_senders),
|
||||
]
|
||||
25
src/documents/migrations/0004_auto_20160114_1844.py
Normal file
25
src/documents/migrations/0004_auto_20160114_1844.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Generated by Django 1.9 on 2016-01-14 18:44
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0003_sender"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="sender",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="documents",
|
||||
to="documents.Sender",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,178 @@
|
||||
# Generated by Django 4.2.13 on 2024-06-28 17:52
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
replaces = [
|
||||
("documents", "0004_auto_20160114_1844"),
|
||||
("documents", "0005_auto_20160123_0313"),
|
||||
("documents", "0006_auto_20160123_0430"),
|
||||
("documents", "0007_auto_20160126_2114"),
|
||||
("documents", "0008_document_file_type"),
|
||||
("documents", "0009_auto_20160214_0040"),
|
||||
("documents", "0010_log"),
|
||||
("documents", "0011_auto_20160303_1929"),
|
||||
]
|
||||
|
||||
dependencies = [
|
||||
("documents", "0003_sender"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="sender",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="documents",
|
||||
to="documents.sender",
|
||||
),
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="sender",
|
||||
options={"ordering": ("name",)},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="Tag",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("name", models.CharField(max_length=128, unique=True)),
|
||||
("slug", models.SlugField(blank=True)),
|
||||
(
|
||||
"colour",
|
||||
models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "#a6cee3"),
|
||||
(2, "#1f78b4"),
|
||||
(3, "#b2df8a"),
|
||||
(4, "#33a02c"),
|
||||
(5, "#fb9a99"),
|
||||
(6, "#e31a1c"),
|
||||
(7, "#fdbf6f"),
|
||||
(8, "#ff7f00"),
|
||||
(9, "#cab2d6"),
|
||||
(10, "#6a3d9a"),
|
||||
(11, "#b15928"),
|
||||
(12, "#000000"),
|
||||
(13, "#cccccc"),
|
||||
],
|
||||
default=1,
|
||||
),
|
||||
),
|
||||
("match", models.CharField(blank=True, max_length=256)),
|
||||
(
|
||||
"matching_algorithm",
|
||||
models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "Any"),
|
||||
(2, "All"),
|
||||
(3, "Literal"),
|
||||
(4, "Regular Expression"),
|
||||
],
|
||||
default=1,
|
||||
help_text='Which algorithm you want to use when matching text to the OCR\'d PDF. Here, "any" looks for any occurrence of any word provided in the PDF, while "all" requires that every word provided appear in the PDF, albeit not in the order provided. A "literal" match means that the text you enter must appear in the PDF exactly as you\'ve entered it, and "regular expression" uses a regex to match the PDF. If you don\'t know what a regex is, you probably don\'t want this option.',
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="sender",
|
||||
name="slug",
|
||||
field=models.SlugField(blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="document",
|
||||
name="file_type",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("pdf", "PDF"),
|
||||
("png", "PNG"),
|
||||
("jpg", "JPG"),
|
||||
("gif", "GIF"),
|
||||
("tiff", "TIFF"),
|
||||
],
|
||||
default="pdf",
|
||||
editable=False,
|
||||
max_length=4,
|
||||
),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="document",
|
||||
name="tags",
|
||||
field=models.ManyToManyField(
|
||||
blank=True,
|
||||
related_name="documents",
|
||||
to="documents.tag",
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="Log",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("group", models.UUIDField(blank=True)),
|
||||
("message", models.TextField()),
|
||||
(
|
||||
"level",
|
||||
models.PositiveIntegerField(
|
||||
choices=[
|
||||
(10, "Debugging"),
|
||||
(20, "Informational"),
|
||||
(30, "Warning"),
|
||||
(40, "Error"),
|
||||
(50, "Critical"),
|
||||
],
|
||||
default=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"component",
|
||||
models.PositiveIntegerField(
|
||||
choices=[(1, "Consumer"), (2, "Mail Fetcher")],
|
||||
),
|
||||
),
|
||||
("created", models.DateTimeField(auto_now_add=True)),
|
||||
("modified", models.DateTimeField(auto_now=True)),
|
||||
],
|
||||
options={
|
||||
"ordering": ("-modified",),
|
||||
},
|
||||
),
|
||||
migrations.RenameModel(
|
||||
old_name="Sender",
|
||||
new_name="Correspondent",
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="document",
|
||||
options={"ordering": ("correspondent", "title")},
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name="document",
|
||||
old_name="sender",
|
||||
new_name="correspondent",
|
||||
),
|
||||
]
|
||||
16
src/documents/migrations/0005_auto_20160123_0313.py
Normal file
16
src/documents/migrations/0005_auto_20160123_0313.py
Normal file
@@ -0,0 +1,16 @@
|
||||
# Generated by Django 1.9 on 2016-01-23 03:13
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0004_auto_20160114_1844"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name="sender",
|
||||
options={"ordering": ("name",)},
|
||||
),
|
||||
]
|
||||
@@ -1,43 +0,0 @@
|
||||
# Generated by Django 5.2.7 on 2025-12-17 22:25
|
||||
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0004_remove_document_storage_type"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="workflowtrigger",
|
||||
name="filter_has_any_correspondents",
|
||||
field=models.ManyToManyField(
|
||||
blank=True,
|
||||
related_name="workflowtriggers_has_any_correspondent",
|
||||
to="documents.correspondent",
|
||||
verbose_name="has one of these correspondents",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="workflowtrigger",
|
||||
name="filter_has_any_document_types",
|
||||
field=models.ManyToManyField(
|
||||
blank=True,
|
||||
related_name="workflowtriggers_has_any_document_type",
|
||||
to="documents.documenttype",
|
||||
verbose_name="has one of these document types",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="workflowtrigger",
|
||||
name="filter_has_any_storage_paths",
|
||||
field=models.ManyToManyField(
|
||||
blank=True,
|
||||
related_name="workflowtriggers_has_any_storage_path",
|
||||
to="documents.storagepath",
|
||||
verbose_name="has one of these storage paths",
|
||||
),
|
||||
),
|
||||
]
|
||||
64
src/documents/migrations/0006_auto_20160123_0430.py
Normal file
64
src/documents/migrations/0006_auto_20160123_0430.py
Normal file
@@ -0,0 +1,64 @@
|
||||
# Generated by Django 1.9 on 2016-01-23 04:30
|
||||
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0005_auto_20160123_0313"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Tag",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("name", models.CharField(max_length=128, unique=True)),
|
||||
("slug", models.SlugField(blank=True)),
|
||||
(
|
||||
"colour",
|
||||
models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "#a6cee3"),
|
||||
(2, "#1f78b4"),
|
||||
(3, "#b2df8a"),
|
||||
(4, "#33a02c"),
|
||||
(5, "#fb9a99"),
|
||||
(6, "#e31a1c"),
|
||||
(7, "#fdbf6f"),
|
||||
(8, "#ff7f00"),
|
||||
(9, "#cab2d6"),
|
||||
(10, "#6a3d9a"),
|
||||
(11, "#ffff99"),
|
||||
(12, "#b15928"),
|
||||
(13, "#000000"),
|
||||
(14, "#cccccc"),
|
||||
],
|
||||
default=1,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="sender",
|
||||
name="slug",
|
||||
field=models.SlugField(blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="document",
|
||||
name="tags",
|
||||
field=models.ManyToManyField(related_name="documents", to="documents.Tag"),
|
||||
),
|
||||
]
|
||||
55
src/documents/migrations/0007_auto_20160126_2114.py
Normal file
55
src/documents/migrations/0007_auto_20160126_2114.py
Normal file
@@ -0,0 +1,55 @@
|
||||
# Generated by Django 1.9 on 2016-01-26 21:14
|
||||
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0006_auto_20160123_0430"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="tag",
|
||||
name="match",
|
||||
field=models.CharField(blank=True, max_length=256),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="tag",
|
||||
name="matching_algorithm",
|
||||
field=models.PositiveIntegerField(
|
||||
blank=True,
|
||||
choices=[
|
||||
(1, "Any"),
|
||||
(2, "All"),
|
||||
(3, "Literal"),
|
||||
(4, "Regular Expression"),
|
||||
],
|
||||
help_text='Which algorithm you want to use when matching text to the OCR\'d PDF. Here, "any" looks for any occurrence of any word provided in the PDF, while "all" requires that every word provided appear in the PDF, albeit not in the order provided. A "literal" match means that the text you enter must appear in the PDF exactly as you\'ve entered it, and "regular expression" uses a regex to match the PDF. If you don\'t know what a regex is, you probably don\'t want this option.',
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="tag",
|
||||
name="colour",
|
||||
field=models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "#a6cee3"),
|
||||
(2, "#1f78b4"),
|
||||
(3, "#b2df8a"),
|
||||
(4, "#33a02c"),
|
||||
(5, "#fb9a99"),
|
||||
(6, "#e31a1c"),
|
||||
(7, "#fdbf6f"),
|
||||
(8, "#ff7f00"),
|
||||
(9, "#cab2d6"),
|
||||
(10, "#6a3d9a"),
|
||||
(11, "#b15928"),
|
||||
(12, "#000000"),
|
||||
(13, "#cccccc"),
|
||||
],
|
||||
default=1,
|
||||
),
|
||||
),
|
||||
]
|
||||
39
src/documents/migrations/0008_document_file_type.py
Normal file
39
src/documents/migrations/0008_document_file_type.py
Normal file
@@ -0,0 +1,39 @@
|
||||
# Generated by Django 1.9 on 2016-01-29 22:58
|
||||
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0007_auto_20160126_2114"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="document",
|
||||
name="file_type",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("pdf", "PDF"),
|
||||
("png", "PNG"),
|
||||
("jpg", "JPG"),
|
||||
("gif", "GIF"),
|
||||
("tiff", "TIFF"),
|
||||
],
|
||||
default="pdf",
|
||||
editable=False,
|
||||
max_length=4,
|
||||
),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="tags",
|
||||
field=models.ManyToManyField(
|
||||
blank=True,
|
||||
related_name="documents",
|
||||
to="documents.Tag",
|
||||
),
|
||||
),
|
||||
]
|
||||
27
src/documents/migrations/0009_auto_20160214_0040.py
Normal file
27
src/documents/migrations/0009_auto_20160214_0040.py
Normal file
@@ -0,0 +1,27 @@
|
||||
# Generated by Django 1.9 on 2016-02-14 00:40
|
||||
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0008_document_file_type"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="tag",
|
||||
name="matching_algorithm",
|
||||
field=models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "Any"),
|
||||
(2, "All"),
|
||||
(3, "Literal"),
|
||||
(4, "Regular Expression"),
|
||||
],
|
||||
default=1,
|
||||
help_text='Which algorithm you want to use when matching text to the OCR\'d PDF. Here, "any" looks for any occurrence of any word provided in the PDF, while "all" requires that every word provided appear in the PDF, albeit not in the order provided. A "literal" match means that the text you enter must appear in the PDF exactly as you\'ve entered it, and "regular expression" uses a regex to match the PDF. If you don\'t know what a regex is, you probably don\'t want this option.',
|
||||
),
|
||||
),
|
||||
]
|
||||
53
src/documents/migrations/0010_log.py
Normal file
53
src/documents/migrations/0010_log.py
Normal file
@@ -0,0 +1,53 @@
|
||||
# Generated by Django 1.9 on 2016-02-27 17:54
|
||||
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0009_auto_20160214_0040"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Log",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("group", models.UUIDField(blank=True)),
|
||||
("message", models.TextField()),
|
||||
(
|
||||
"level",
|
||||
models.PositiveIntegerField(
|
||||
choices=[
|
||||
(10, "Debugging"),
|
||||
(20, "Informational"),
|
||||
(30, "Warning"),
|
||||
(40, "Error"),
|
||||
(50, "Critical"),
|
||||
],
|
||||
default=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"component",
|
||||
models.PositiveIntegerField(
|
||||
choices=[(1, "Consumer"), (2, "Mail Fetcher")],
|
||||
),
|
||||
),
|
||||
("created", models.DateTimeField(auto_now_add=True)),
|
||||
("modified", models.DateTimeField(auto_now=True)),
|
||||
],
|
||||
options={
|
||||
"ordering": ("-modified",),
|
||||
},
|
||||
),
|
||||
]
|
||||
26
src/documents/migrations/0011_auto_20160303_1929.py
Normal file
26
src/documents/migrations/0011_auto_20160303_1929.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# Generated by Django 1.9.2 on 2016-03-03 19:29
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
dependencies = [
|
||||
("documents", "0010_log"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameModel(
|
||||
old_name="Sender",
|
||||
new_name="Correspondent",
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="document",
|
||||
options={"ordering": ("correspondent", "title")},
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name="document",
|
||||
old_name="sender",
|
||||
new_name="correspondent",
|
||||
),
|
||||
]
|
||||
128
src/documents/migrations/0012_auto_20160305_0040.py
Normal file
128
src/documents/migrations/0012_auto_20160305_0040.py
Normal file
@@ -0,0 +1,128 @@
|
||||
# Generated by Django 1.9.2 on 2016-03-05 00:40
|
||||
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
import gnupg
|
||||
from django.conf import settings
|
||||
from django.db import migrations
|
||||
from django.utils.termcolors import colorize as colourise # Spelling hurts me
|
||||
|
||||
|
||||
class GnuPG:
|
||||
"""
|
||||
A handy singleton to use when handling encrypted files.
|
||||
"""
|
||||
|
||||
gpg = gnupg.GPG(gnupghome=settings.GNUPG_HOME)
|
||||
|
||||
@classmethod
|
||||
def decrypted(cls, file_handle):
|
||||
return cls.gpg.decrypt_file(file_handle, passphrase=settings.PASSPHRASE).data
|
||||
|
||||
@classmethod
|
||||
def encrypted(cls, file_handle):
|
||||
return cls.gpg.encrypt_file(
|
||||
file_handle,
|
||||
recipients=None,
|
||||
passphrase=settings.PASSPHRASE,
|
||||
symmetric=True,
|
||||
).data
|
||||
|
||||
|
||||
def move_documents_and_create_thumbnails(apps, schema_editor):
|
||||
(Path(settings.MEDIA_ROOT) / "documents" / "originals").mkdir(
|
||||
parents=True,
|
||||
exist_ok=True,
|
||||
)
|
||||
(Path(settings.MEDIA_ROOT) / "documents" / "thumbnails").mkdir(
|
||||
parents=True,
|
||||
exist_ok=True,
|
||||
)
|
||||
|
||||
documents: list[str] = os.listdir(Path(settings.MEDIA_ROOT) / "documents") # noqa: PTH208
|
||||
|
||||
if set(documents) == {"originals", "thumbnails"}:
|
||||
return
|
||||
|
||||
print(
|
||||
colourise(
|
||||
"\n\n"
|
||||
" This is a one-time only migration to generate thumbnails for all of your\n"
|
||||
" documents so that future UIs will have something to work with. If you have\n"
|
||||
" a lot of documents though, this may take a while, so a coffee break may be\n"
|
||||
" in order."
|
||||
"\n",
|
||||
opts=("bold",),
|
||||
),
|
||||
)
|
||||
|
||||
Path(settings.SCRATCH_DIR).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for f in sorted(documents):
|
||||
if not f.endswith("gpg"):
|
||||
continue
|
||||
|
||||
print(
|
||||
" {} {} {}".format(
|
||||
colourise("*", fg="green"),
|
||||
colourise("Generating a thumbnail for", fg="white"),
|
||||
colourise(f, fg="cyan"),
|
||||
),
|
||||
)
|
||||
|
||||
thumb_temp: str = tempfile.mkdtemp(prefix="paperless", dir=settings.SCRATCH_DIR)
|
||||
orig_temp: str = tempfile.mkdtemp(prefix="paperless", dir=settings.SCRATCH_DIR)
|
||||
|
||||
orig_source: Path = Path(settings.MEDIA_ROOT) / "documents" / f
|
||||
orig_target: Path = Path(orig_temp) / f.replace(".gpg", "")
|
||||
|
||||
with orig_source.open("rb") as encrypted, orig_target.open("wb") as unencrypted:
|
||||
unencrypted.write(GnuPG.decrypted(encrypted))
|
||||
|
||||
subprocess.Popen(
|
||||
(
|
||||
settings.CONVERT_BINARY,
|
||||
"-scale",
|
||||
"500x5000",
|
||||
"-alpha",
|
||||
"remove",
|
||||
orig_target,
|
||||
Path(thumb_temp) / "convert-%04d.png",
|
||||
),
|
||||
).wait()
|
||||
|
||||
thumb_source: Path = Path(thumb_temp) / "convert-0000.png"
|
||||
thumb_target: Path = (
|
||||
Path(settings.MEDIA_ROOT)
|
||||
/ "documents"
|
||||
/ "thumbnails"
|
||||
/ re.sub(r"(\d+)\.\w+(\.gpg)", "\\1.png\\2", f)
|
||||
)
|
||||
with (
|
||||
thumb_source.open("rb") as unencrypted,
|
||||
thumb_target.open("wb") as encrypted,
|
||||
):
|
||||
encrypted.write(GnuPG.encrypted(unencrypted))
|
||||
|
||||
shutil.rmtree(thumb_temp)
|
||||
shutil.rmtree(orig_temp)
|
||||
|
||||
shutil.move(
|
||||
Path(settings.MEDIA_ROOT) / "documents" / f,
|
||||
Path(settings.MEDIA_ROOT) / "documents" / "originals" / f,
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0011_auto_20160303_1929"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(move_documents_and_create_thumbnails),
|
||||
]
|
||||
42
src/documents/migrations/0013_auto_20160325_2111.py
Normal file
42
src/documents/migrations/0013_auto_20160325_2111.py
Normal file
@@ -0,0 +1,42 @@
|
||||
# Generated by Django 1.9.4 on 2016-03-25 21:11
|
||||
|
||||
import django.utils.timezone
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0012_auto_20160305_0040"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="correspondent",
|
||||
name="match",
|
||||
field=models.CharField(blank=True, max_length=256),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="correspondent",
|
||||
name="matching_algorithm",
|
||||
field=models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "Any"),
|
||||
(2, "All"),
|
||||
(3, "Literal"),
|
||||
(4, "Regular Expression"),
|
||||
],
|
||||
default=1,
|
||||
help_text='Which algorithm you want to use when matching text to the OCR\'d PDF. Here, "any" looks for any occurrence of any word provided in the PDF, while "all" requires that every word provided appear in the PDF, albeit not in the order provided. A "literal" match means that the text you enter must appear in the PDF exactly as you\'ve entered it, and "regular expression" uses a regex to match the PDF. If you don\'t know what a regex is, you probably don\'t want this option.',
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="created",
|
||||
field=models.DateTimeField(default=django.utils.timezone.now),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="log",
|
||||
name="component",
|
||||
),
|
||||
]
|
||||
182
src/documents/migrations/0014_document_checksum.py
Normal file
182
src/documents/migrations/0014_document_checksum.py
Normal file
@@ -0,0 +1,182 @@
|
||||
# Generated by Django 1.9.4 on 2016-03-28 19:09
|
||||
|
||||
import hashlib
|
||||
from pathlib import Path
|
||||
|
||||
import django.utils.timezone
|
||||
import gnupg
|
||||
from django.conf import settings
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
from django.template.defaultfilters import slugify
|
||||
from django.utils.termcolors import colorize as colourise # Spelling hurts me
|
||||
|
||||
|
||||
class GnuPG:
|
||||
"""
|
||||
A handy singleton to use when handling encrypted files.
|
||||
"""
|
||||
|
||||
gpg = gnupg.GPG(gnupghome=settings.GNUPG_HOME)
|
||||
|
||||
@classmethod
|
||||
def decrypted(cls, file_handle):
|
||||
return cls.gpg.decrypt_file(file_handle, passphrase=settings.PASSPHRASE).data
|
||||
|
||||
@classmethod
|
||||
def encrypted(cls, file_handle):
|
||||
return cls.gpg.encrypt_file(
|
||||
file_handle,
|
||||
recipients=None,
|
||||
passphrase=settings.PASSPHRASE,
|
||||
symmetric=True,
|
||||
).data
|
||||
|
||||
|
||||
class Document:
|
||||
"""
|
||||
Django's migrations restrict access to model methods, so this is a snapshot
|
||||
of the methods that existed at the time this migration was written, since
|
||||
we need to make use of a lot of these shortcuts here.
|
||||
"""
|
||||
|
||||
def __init__(self, doc):
|
||||
self.pk = doc.pk
|
||||
self.correspondent = doc.correspondent
|
||||
self.title = doc.title
|
||||
self.file_type = doc.file_type
|
||||
self.tags = doc.tags
|
||||
self.created = doc.created
|
||||
|
||||
def __str__(self):
|
||||
created = self.created.strftime("%Y%m%d%H%M%S")
|
||||
if self.correspondent and self.title:
|
||||
return f"{created}: {self.correspondent} - {self.title}"
|
||||
if self.correspondent or self.title:
|
||||
return f"{created}: {self.correspondent or self.title}"
|
||||
return str(created)
|
||||
|
||||
@property
|
||||
def source_path(self):
|
||||
return (
|
||||
Path(settings.MEDIA_ROOT)
|
||||
/ "documents"
|
||||
/ "originals"
|
||||
/ f"{self.pk:07}.{self.file_type}.gpg"
|
||||
)
|
||||
|
||||
@property
|
||||
def source_file(self):
|
||||
return self.source_path.open("rb")
|
||||
|
||||
@property
|
||||
def file_name(self):
|
||||
return slugify(str(self)) + "." + self.file_type
|
||||
|
||||
|
||||
def set_checksums(apps, schema_editor):
|
||||
document_model = apps.get_model("documents", "Document")
|
||||
|
||||
if not document_model.objects.all().exists():
|
||||
return
|
||||
|
||||
print(
|
||||
colourise(
|
||||
"\n\n"
|
||||
" This is a one-time only migration to generate checksums for all\n"
|
||||
" of your existing documents. If you have a lot of documents\n"
|
||||
" though, this may take a while, so a coffee break may be in\n"
|
||||
" order."
|
||||
"\n",
|
||||
opts=("bold",),
|
||||
),
|
||||
)
|
||||
|
||||
sums = {}
|
||||
for d in document_model.objects.all():
|
||||
document = Document(d)
|
||||
|
||||
print(
|
||||
" {} {} {}".format(
|
||||
colourise("*", fg="green"),
|
||||
colourise("Generating a checksum for", fg="white"),
|
||||
colourise(document.file_name, fg="cyan"),
|
||||
),
|
||||
)
|
||||
|
||||
with document.source_file as encrypted:
|
||||
checksum = hashlib.md5(GnuPG.decrypted(encrypted)).hexdigest()
|
||||
|
||||
if checksum in sums:
|
||||
error = "\n{line}{p1}\n\n{doc1}\n{doc2}\n\n{p2}\n\n{code}\n\n{p3}{line}".format(
|
||||
p1=colourise(
|
||||
"It appears that you have two identical documents in your collection and \nPaperless no longer supports this (see issue #97). The documents in question\nare:",
|
||||
fg="yellow",
|
||||
),
|
||||
p2=colourise(
|
||||
"To fix this problem, you'll have to remove one of them from the database, a task\nmost easily done by running the following command in the same\ndirectory as manage.py:",
|
||||
fg="yellow",
|
||||
),
|
||||
p3=colourise(
|
||||
"When that's finished, re-run the migrate, and provided that there aren't any\nother duplicates, you should be good to go.",
|
||||
fg="yellow",
|
||||
),
|
||||
doc1=colourise(
|
||||
f" * {sums[checksum][1]} (id: {sums[checksum][0]})",
|
||||
fg="red",
|
||||
),
|
||||
doc2=colourise(
|
||||
f" * {document.file_name} (id: {document.pk})",
|
||||
fg="red",
|
||||
),
|
||||
code=colourise(
|
||||
f" $ echo 'DELETE FROM documents_document WHERE id = {document.pk};' | ./manage.py dbshell",
|
||||
fg="green",
|
||||
),
|
||||
line=colourise("\n{}\n".format("=" * 80), fg="white", opts=("bold",)),
|
||||
)
|
||||
raise RuntimeError(error)
|
||||
sums[checksum] = (document.pk, document.file_name)
|
||||
|
||||
document_model.objects.filter(pk=document.pk).update(checksum=checksum)
|
||||
|
||||
|
||||
def do_nothing(apps, schema_editor):
|
||||
pass
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0013_auto_20160325_2111"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="document",
|
||||
name="checksum",
|
||||
field=models.CharField(
|
||||
default="-",
|
||||
db_index=True,
|
||||
editable=False,
|
||||
max_length=32,
|
||||
help_text="The checksum of the original document (before it "
|
||||
"was encrypted). We use this to prevent duplicate "
|
||||
"document imports.",
|
||||
),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.RunPython(set_checksums, do_nothing),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="created",
|
||||
field=models.DateTimeField(
|
||||
db_index=True,
|
||||
default=django.utils.timezone.now,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="modified",
|
||||
field=models.DateTimeField(auto_now=True, db_index=True),
|
||||
),
|
||||
]
|
||||
33
src/documents/migrations/0015_add_insensitive_to_match.py
Normal file
33
src/documents/migrations/0015_add_insensitive_to_match.py
Normal file
@@ -0,0 +1,33 @@
|
||||
# Generated by Django 1.10.2 on 2016-10-05 21:38
|
||||
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0014_document_checksum"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="checksum",
|
||||
field=models.CharField(
|
||||
editable=False,
|
||||
help_text="The checksum of the original document (before it was encrypted). We use this to prevent duplicate document imports.",
|
||||
max_length=32,
|
||||
unique=True,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="correspondent",
|
||||
name="is_insensitive",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="tag",
|
||||
name="is_insensitive",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,92 @@
|
||||
# Generated by Django 4.2.13 on 2024-06-28 17:57
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
replaces = [
|
||||
("documents", "0015_add_insensitive_to_match"),
|
||||
("documents", "0016_auto_20170325_1558"),
|
||||
("documents", "0017_auto_20170512_0507"),
|
||||
("documents", "0018_auto_20170715_1712"),
|
||||
]
|
||||
|
||||
dependencies = [
|
||||
("documents", "0014_document_checksum"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="checksum",
|
||||
field=models.CharField(
|
||||
editable=False,
|
||||
help_text="The checksum of the original document (before it was encrypted). We use this to prevent duplicate document imports.",
|
||||
max_length=32,
|
||||
unique=True,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="correspondent",
|
||||
name="is_insensitive",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="tag",
|
||||
name="is_insensitive",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="content",
|
||||
field=models.TextField(
|
||||
blank=True,
|
||||
db_index=("mysql" not in settings.DATABASES["default"]["ENGINE"]),
|
||||
help_text="The raw, text-only data of the document. This field is primarily used for searching.",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="correspondent",
|
||||
name="matching_algorithm",
|
||||
field=models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "Any"),
|
||||
(2, "All"),
|
||||
(3, "Literal"),
|
||||
(4, "Regular Expression"),
|
||||
(5, "Fuzzy Match"),
|
||||
],
|
||||
default=1,
|
||||
help_text='Which algorithm you want to use when matching text to the OCR\'d PDF. Here, "any" looks for any occurrence of any word provided in the PDF, while "all" requires that every word provided appear in the PDF, albeit not in the order provided. A "literal" match means that the text you enter must appear in the PDF exactly as you\'ve entered it, and "regular expression" uses a regex to match the PDF. (If you don\'t know what a regex is, you probably don\'t want this option.) Finally, a "fuzzy match" looks for words or phrases that are mostly—but not exactly—the same, which can be useful for matching against documents containing imperfections that foil accurate OCR.',
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="tag",
|
||||
name="matching_algorithm",
|
||||
field=models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "Any"),
|
||||
(2, "All"),
|
||||
(3, "Literal"),
|
||||
(4, "Regular Expression"),
|
||||
(5, "Fuzzy Match"),
|
||||
],
|
||||
default=1,
|
||||
help_text='Which algorithm you want to use when matching text to the OCR\'d PDF. Here, "any" looks for any occurrence of any word provided in the PDF, while "all" requires that every word provided appear in the PDF, albeit not in the order provided. A "literal" match means that the text you enter must appear in the PDF exactly as you\'ve entered it, and "regular expression" uses a regex to match the PDF. (If you don\'t know what a regex is, you probably don\'t want this option.) Finally, a "fuzzy match" looks for words or phrases that are mostly—but not exactly—the same, which can be useful for matching against documents containing imperfections that foil accurate OCR.',
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="correspondent",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="documents",
|
||||
to="documents.correspondent",
|
||||
),
|
||||
),
|
||||
]
|
||||
23
src/documents/migrations/0016_auto_20170325_1558.py
Normal file
23
src/documents/migrations/0016_auto_20170325_1558.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 1.10.5 on 2017-03-25 15:58
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0015_add_insensitive_to_match"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="content",
|
||||
field=models.TextField(
|
||||
blank=True,
|
||||
db_index=("mysql" not in settings.DATABASES["default"]["ENGINE"]),
|
||||
help_text="The raw, text-only data of the document. This field is primarily used for searching.",
|
||||
),
|
||||
),
|
||||
]
|
||||
43
src/documents/migrations/0017_auto_20170512_0507.py
Normal file
43
src/documents/migrations/0017_auto_20170512_0507.py
Normal file
@@ -0,0 +1,43 @@
|
||||
# Generated by Django 1.10.5 on 2017-05-12 05:07
|
||||
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0016_auto_20170325_1558"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="correspondent",
|
||||
name="matching_algorithm",
|
||||
field=models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "Any"),
|
||||
(2, "All"),
|
||||
(3, "Literal"),
|
||||
(4, "Regular Expression"),
|
||||
(5, "Fuzzy Match"),
|
||||
],
|
||||
default=1,
|
||||
help_text='Which algorithm you want to use when matching text to the OCR\'d PDF. Here, "any" looks for any occurrence of any word provided in the PDF, while "all" requires that every word provided appear in the PDF, albeit not in the order provided. A "literal" match means that the text you enter must appear in the PDF exactly as you\'ve entered it, and "regular expression" uses a regex to match the PDF. (If you don\'t know what a regex is, you probably don\'t want this option.) Finally, a "fuzzy match" looks for words or phrases that are mostly—but not exactly—the same, which can be useful for matching against documents containing imperfections that foil accurate OCR.',
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="tag",
|
||||
name="matching_algorithm",
|
||||
field=models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "Any"),
|
||||
(2, "All"),
|
||||
(3, "Literal"),
|
||||
(4, "Regular Expression"),
|
||||
(5, "Fuzzy Match"),
|
||||
],
|
||||
default=1,
|
||||
help_text='Which algorithm you want to use when matching text to the OCR\'d PDF. Here, "any" looks for any occurrence of any word provided in the PDF, while "all" requires that every word provided appear in the PDF, albeit not in the order provided. A "literal" match means that the text you enter must appear in the PDF exactly as you\'ve entered it, and "regular expression" uses a regex to match the PDF. (If you don\'t know what a regex is, you probably don\'t want this option.) Finally, a "fuzzy match" looks for words or phrases that are mostly—but not exactly—the same, which can be useful for matching against documents containing imperfections that foil accurate OCR.',
|
||||
),
|
||||
),
|
||||
]
|
||||
25
src/documents/migrations/0018_auto_20170715_1712.py
Normal file
25
src/documents/migrations/0018_auto_20170715_1712.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Generated by Django 1.10.5 on 2017-07-15 17:12
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0017_auto_20170512_0507"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="correspondent",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="documents",
|
||||
to="documents.Correspondent",
|
||||
),
|
||||
),
|
||||
]
|
||||
22
src/documents/migrations/0019_add_consumer_user.py
Normal file
22
src/documents/migrations/0019_add_consumer_user.py
Normal file
@@ -0,0 +1,22 @@
|
||||
# Generated by Django 1.10.5 on 2017-07-15 17:12
|
||||
|
||||
from django.contrib.auth.models import User
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def forwards_func(apps, schema_editor):
|
||||
User.objects.create(username="consumer")
|
||||
|
||||
|
||||
def reverse_func(apps, schema_editor):
|
||||
User.objects.get(username="consumer").delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0018_auto_20170715_1712"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(forwards_func, reverse_func),
|
||||
]
|
||||
29
src/documents/migrations/0020_document_added.py
Normal file
29
src/documents/migrations/0020_document_added.py
Normal file
@@ -0,0 +1,29 @@
|
||||
import django.utils.timezone
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
def set_added_time_to_created_time(apps, schema_editor):
|
||||
Document = apps.get_model("documents", "Document")
|
||||
for doc in Document.objects.all():
|
||||
doc.added = doc.created
|
||||
doc.save()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0019_add_consumer_user"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="document",
|
||||
name="added",
|
||||
field=models.DateTimeField(
|
||||
db_index=True,
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
),
|
||||
),
|
||||
migrations.RunPython(set_added_time_to_created_time),
|
||||
]
|
||||
41
src/documents/migrations/0021_document_storage_type.py
Normal file
41
src/documents/migrations/0021_document_storage_type.py
Normal file
@@ -0,0 +1,41 @@
|
||||
# Generated by Django 1.11.10 on 2018-02-04 13:07
|
||||
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0020_document_added"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
# Add the field with the default GPG-encrypted value
|
||||
migrations.AddField(
|
||||
model_name="document",
|
||||
name="storage_type",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("unencrypted", "Unencrypted"),
|
||||
("gpg", "Encrypted with GNU Privacy Guard"),
|
||||
],
|
||||
default="gpg",
|
||||
editable=False,
|
||||
max_length=11,
|
||||
),
|
||||
),
|
||||
# Now that the field is added, change the default to unencrypted
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="storage_type",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("unencrypted", "Unencrypted"),
|
||||
("gpg", "Encrypted with GNU Privacy Guard"),
|
||||
],
|
||||
default="unencrypted",
|
||||
editable=False,
|
||||
max_length=11,
|
||||
),
|
||||
),
|
||||
]
|
||||
61
src/documents/migrations/0022_auto_20181007_1420.py
Normal file
61
src/documents/migrations/0022_auto_20181007_1420.py
Normal file
@@ -0,0 +1,61 @@
|
||||
# Generated by Django 2.0.8 on 2018-10-07 14:20
|
||||
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
from django.utils.text import slugify
|
||||
|
||||
|
||||
def re_slug_all_the_things(apps, schema_editor):
|
||||
"""
|
||||
Rewrite all slug values to make sure they're actually slugs before we brand
|
||||
them as uneditable.
|
||||
"""
|
||||
|
||||
Tag = apps.get_model("documents", "Tag")
|
||||
Correspondent = apps.get_model("documents", "Correspondent")
|
||||
|
||||
for klass in (Tag, Correspondent):
|
||||
for instance in klass.objects.all():
|
||||
klass.objects.filter(pk=instance.pk).update(slug=slugify(instance.slug))
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0021_document_storage_type"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name="tag",
|
||||
options={"ordering": ("name",)},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="correspondent",
|
||||
name="slug",
|
||||
field=models.SlugField(blank=True, editable=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="file_type",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("pdf", "PDF"),
|
||||
("png", "PNG"),
|
||||
("jpg", "JPG"),
|
||||
("gif", "GIF"),
|
||||
("tiff", "TIFF"),
|
||||
("txt", "TXT"),
|
||||
("csv", "CSV"),
|
||||
("md", "MD"),
|
||||
],
|
||||
editable=False,
|
||||
max_length=4,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="tag",
|
||||
name="slug",
|
||||
field=models.SlugField(blank=True, editable=False),
|
||||
),
|
||||
migrations.RunPython(re_slug_all_the_things, migrations.RunPython.noop),
|
||||
]
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user