mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-04-02 13:45:10 -05:00
Merge branch 'dev'
This commit is contained in:
commit
8eb1dc4f62
3
.codespellrc
Normal file
3
.codespellrc
Normal file
@ -0,0 +1,3 @@
|
||||
[codespell]
|
||||
write-changes = True
|
||||
ignore-words-list = criterias,afterall,valeu,ureue,equest,ure
|
6
.github/workflows/ci.yml
vendored
6
.github/workflows/ci.yml
vendored
@ -169,7 +169,7 @@ jobs:
|
||||
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml down
|
||||
|
||||
install-frontend-depedendencies:
|
||||
name: "Install Frontend Dependendencies"
|
||||
name: "Install Frontend Dependencies"
|
||||
runs-on: ubuntu-22.04
|
||||
needs:
|
||||
- pre-commit
|
||||
@ -182,7 +182,7 @@ jobs:
|
||||
node-version: 20.x
|
||||
cache: 'npm'
|
||||
cache-dependency-path: 'src-ui/package-lock.json'
|
||||
- name: Cache frontend depdendencies
|
||||
- name: Cache frontend dependencies
|
||||
id: cache-frontend-deps
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
@ -219,7 +219,7 @@ jobs:
|
||||
node-version: 20.x
|
||||
cache: 'npm'
|
||||
cache-dependency-path: 'src-ui/package-lock.json'
|
||||
- name: Cache frontend depdendencies
|
||||
- name: Cache frontend dependencies
|
||||
id: cache-frontend-deps
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
|
1
.github/workflows/crowdin.yml
vendored
1
.github/workflows/crowdin.yml
vendored
@ -7,6 +7,7 @@ on:
|
||||
push:
|
||||
paths: [
|
||||
'src/locale/**',
|
||||
'src-ui/messages.xlf',
|
||||
'src-ui/src/locale/**'
|
||||
]
|
||||
branches: [ dev ]
|
||||
|
2
.github/workflows/repo-maintenance.yml
vendored
2
.github/workflows/repo-maintenance.yml
vendored
@ -81,7 +81,7 @@ jobs:
|
||||
console.log(`Found ${result.repository.discussions.nodes.length} open answered discussions`)
|
||||
|
||||
for (const discussion of result.repository.discussions.nodes) {
|
||||
console.log(`Closing dicussion #${discussion.number} (${discussion.id})`)
|
||||
console.log(`Closing discussion #${discussion.number} (${discussion.id})`)
|
||||
|
||||
const addCommentMutation = `mutation($discussion:ID!, $body:String!) {
|
||||
addDiscussionComment(input:{discussionId:$discussion, body:$body}) {
|
||||
|
@ -28,6 +28,14 @@ repos:
|
||||
- svg
|
||||
- id: check-case-conflict
|
||||
- id: detect-private-key
|
||||
- repo: https://github.com/codespell-project/codespell
|
||||
rev: v2.2.6
|
||||
hooks:
|
||||
- id: codespell
|
||||
exclude: "(^src-ui/src/locale/)|(^src-ui/e2e/)|(^src/paperless_mail/tests/samples/)"
|
||||
exclude_types:
|
||||
- pofile
|
||||
- json
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: 'v3.1.0'
|
||||
hooks:
|
||||
@ -39,11 +47,11 @@ repos:
|
||||
exclude: "(^Pipfile\\.lock$)"
|
||||
# Python hooks
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: 'v0.1.5'
|
||||
rev: 'v0.1.11'
|
||||
hooks:
|
||||
- id: ruff
|
||||
- repo: https://github.com/psf/black-pre-commit-mirror
|
||||
rev: 23.11.0
|
||||
rev: 23.12.1
|
||||
hooks:
|
||||
- id: black
|
||||
# Dockerfile hooks
|
||||
|
@ -189,7 +189,7 @@ RUN set -eux \
|
||||
&& chmod 755 /usr/local/bin/paperless_cmd.sh \
|
||||
&& mv flower-conditional.sh /usr/local/bin/flower-conditional.sh \
|
||||
&& chmod 755 /usr/local/bin/flower-conditional.sh \
|
||||
&& echo "Installing managment commands" \
|
||||
&& echo "Installing management commands" \
|
||||
&& chmod +x install_management_commands.sh \
|
||||
&& ./install_management_commands.sh
|
||||
|
||||
|
4
Pipfile
4
Pipfile
@ -7,7 +7,7 @@ name = "pypi"
|
||||
dateparser = "~=1.2"
|
||||
# WARNING: django does not use semver.
|
||||
# Only patch versions are guaranteed to not introduce breaking changes.
|
||||
django = "~=4.2.8"
|
||||
django = "~=4.2.9"
|
||||
django-auditlog = "*"
|
||||
django-celery-results = "*"
|
||||
django-compression-middleware = "*"
|
||||
@ -57,7 +57,7 @@ zxing-cpp = {version = "*", platform_machine = "== 'x86_64'"}
|
||||
|
||||
[dev-packages]
|
||||
# Linting
|
||||
black = "==23.11.0"
|
||||
black = "*"
|
||||
pre-commit = "*"
|
||||
ruff = "*"
|
||||
# Testing
|
||||
|
1591
Pipfile.lock
generated
1591
Pipfile.lock
generated
File diff suppressed because it is too large
Load Diff
@ -1,6 +1,6 @@
|
||||
# Docker Compose file for running paperless testing with actual gotenberg
|
||||
# and Tika containers for a more end to end test of the Tika related functionality
|
||||
# Can be used locally or by the CI to start the nessecary containers with the
|
||||
# Can be used locally or by the CI to start the necessary containers with the
|
||||
# correct networking for the tests
|
||||
|
||||
version: "3.7"
|
||||
|
@ -613,7 +613,7 @@ scan a completely new "odd numbered pages" one. The old staging file will get di
|
||||
|
||||
The collation feature can be used together with the [subdirs as tags](configuration.md#consume_config)
|
||||
feature (but this is not a requirement). Just create a correctly named double-sided subdir
|
||||
in the hierachy and upload your scans there. For example, both `double-sided/foo/bar` as
|
||||
in the hierarchy and upload your scans there. For example, both `double-sided/foo/bar` as
|
||||
well as `foo/bar/double-sided` will cause the collated document to be treated as if it
|
||||
were uploaded into `foo/bar` and receive both `foo` and `bar` tags, but not `double-sided`.
|
||||
|
||||
|
@ -427,7 +427,7 @@ Exports generated in Paperless-ngx v2.0.0–2.0.1 will **not** contain consumpti
|
||||
- Enhancement: support default permissions for object creation via frontend [@shamoon](https://github.com/shamoon) ([#4233](https://github.com/paperless-ngx/paperless-ngx/pull/4233))
|
||||
- Fix: Set permissions before declaring volumes for rootless [@stumpylog](https://github.com/stumpylog) ([#4225](https://github.com/paperless-ngx/paperless-ngx/pull/4225))
|
||||
- Enhancement: bulk edit object permissions [@shamoon](https://github.com/shamoon) ([#4176](https://github.com/paperless-ngx/paperless-ngx/pull/4176))
|
||||
- Enhancement: Allow the user the specifiy the export zip file name [@stumpylog](https://github.com/stumpylog) ([#4189](https://github.com/paperless-ngx/paperless-ngx/pull/4189))
|
||||
- Enhancement: Allow the user the specify the export zip file name [@stumpylog](https://github.com/stumpylog) ([#4189](https://github.com/paperless-ngx/paperless-ngx/pull/4189))
|
||||
- Feature: Share links [@shamoon](https://github.com/shamoon) ([#3996](https://github.com/paperless-ngx/paperless-ngx/pull/3996))
|
||||
- Chore: update docker image and ci to node 20 [@shamoon](https://github.com/shamoon) ([#4184](https://github.com/paperless-ngx/paperless-ngx/pull/4184))
|
||||
- Fix: Trim unneeded libraries from Docker image [@stumpylog](https://github.com/stumpylog) ([#4183](https://github.com/paperless-ngx/paperless-ngx/pull/4183))
|
||||
@ -637,7 +637,7 @@ Exports generated in Paperless-ngx v2.0.0–2.0.1 will **not** contain consumpti
|
||||
- Enhancement: bulk edit object permissions [@shamoon](https://github.com/shamoon) ([#4176](https://github.com/paperless-ngx/paperless-ngx/pull/4176))
|
||||
- Fix: completely hide upload widget if user does not have permissions [@nawramm](https://github.com/nawramm) ([#4198](https://github.com/paperless-ngx/paperless-ngx/pull/4198))
|
||||
- Fix: application of theme color vars at root [@shamoon](https://github.com/shamoon) ([#4193](https://github.com/paperless-ngx/paperless-ngx/pull/4193))
|
||||
- Enhancement: Allow the user the specifiy the export zip file name [@stumpylog](https://github.com/stumpylog) ([#4189](https://github.com/paperless-ngx/paperless-ngx/pull/4189))
|
||||
- Enhancement: Allow the user the specify the export zip file name [@stumpylog](https://github.com/stumpylog) ([#4189](https://github.com/paperless-ngx/paperless-ngx/pull/4189))
|
||||
- Feature: Share links [@shamoon](https://github.com/shamoon) ([#3996](https://github.com/paperless-ngx/paperless-ngx/pull/3996))
|
||||
- Chore: change dark mode to use Bootstrap's color modes [@lkster](https://github.com/lkster) ([#4174](https://github.com/paperless-ngx/paperless-ngx/pull/4174))
|
||||
- Fix: support storage path placeholder via API [@shamoon](https://github.com/shamoon) ([#4179](https://github.com/paperless-ngx/paperless-ngx/pull/4179))
|
||||
@ -669,11 +669,11 @@ Exports generated in Paperless-ngx v2.0.0–2.0.1 will **not** contain consumpti
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Fix: ghostscript rendering error doesnt trigger frontend failure message [@shamoon](https://github.com/shamoon) ([#4092](https://github.com/paperless-ngx/paperless-ngx/pull/4092))
|
||||
- Fix: ghostscript rendering error doesn't trigger frontend failure message [@shamoon](https://github.com/shamoon) ([#4092](https://github.com/paperless-ngx/paperless-ngx/pull/4092))
|
||||
|
||||
### All App Changes
|
||||
|
||||
- Fix: ghostscript rendering error doesnt trigger frontend failure message [@shamoon](https://github.com/shamoon) ([#4092](https://github.com/paperless-ngx/paperless-ngx/pull/4092))
|
||||
- Fix: ghostscript rendering error doesn't trigger frontend failure message [@shamoon](https://github.com/shamoon) ([#4092](https://github.com/paperless-ngx/paperless-ngx/pull/4092))
|
||||
|
||||
## paperless-ngx 1.17.3
|
||||
|
||||
@ -1340,7 +1340,7 @@ Exports generated in Paperless-ngx v2.0.0–2.0.1 will **not** contain consumpti
|
||||
|
||||
### Documentation
|
||||
|
||||
- Whitespace changes, making sure the example is correcly aligned [@denilsonsa](https://github.com/denilsonsa) ([#3089](https://github.com/paperless-ngx/paperless-ngx/pull/3089))
|
||||
- Whitespace changes, making sure the example is correctly aligned [@denilsonsa](https://github.com/denilsonsa) ([#3089](https://github.com/paperless-ngx/paperless-ngx/pull/3089))
|
||||
- Docs: Include additional information about barcodes [@stumpylog](https://github.com/stumpylog) ([#2889](https://github.com/paperless-ngx/paperless-ngx/pull/2889))
|
||||
- Fix formatting in Setup documentation page [@igrybkov](https://github.com/igrybkov) ([#2880](https://github.com/paperless-ngx/paperless-ngx/pull/2880))
|
||||
- [Documentation] Update docker-compose steps to support podman [@white-gecko](https://github.com/white-gecko) ([#2855](https://github.com/paperless-ngx/paperless-ngx/pull/2855))
|
||||
@ -1395,7 +1395,7 @@ Exports generated in Paperless-ngx v2.0.0–2.0.1 will **not** contain consumpti
|
||||
- Fix: update PaperlessTask on hard failures [@shamoon](https://github.com/shamoon) ([#3062](https://github.com/paperless-ngx/paperless-ngx/pull/3062))
|
||||
- Bump typescript from 4.8.4 to 4.9.5 in /src-ui [@dependabot](https://github.com/dependabot) ([#3071](https://github.com/paperless-ngx/paperless-ngx/pull/3071))
|
||||
- Bulk Bump npm packages 04.23 [@dependabot](https://github.com/dependabot) ([#3068](https://github.com/paperless-ngx/paperless-ngx/pull/3068))
|
||||
- Fix: Hide UI tour steps if user doesnt have permissions [@shamoon](https://github.com/shamoon) ([#3060](https://github.com/paperless-ngx/paperless-ngx/pull/3060))
|
||||
- Fix: Hide UI tour steps if user doesn't have permissions [@shamoon](https://github.com/shamoon) ([#3060](https://github.com/paperless-ngx/paperless-ngx/pull/3060))
|
||||
- Fix: Hide Permissions tab if user cannot view users [@shamoon](https://github.com/shamoon) ([#3061](https://github.com/paperless-ngx/paperless-ngx/pull/3061))
|
||||
- v1.14.0 delete document fixes [@shamoon](https://github.com/shamoon) ([#3020](https://github.com/paperless-ngx/paperless-ngx/pull/3020))
|
||||
- Bump wait-on from 6.0.1 to 7.0.1 in /src-ui [@dependabot](https://github.com/dependabot) ([#2990](https://github.com/paperless-ngx/paperless-ngx/pull/2990))
|
||||
@ -1600,7 +1600,7 @@ older comments. The Docker image will automatically perform this reindex, bare m
|
||||
- [Docs] Add Paperless Mobile app to docs [@astubenbord](https://github.com/astubenbord) ([#2378](https://github.com/paperless-ngx/paperless-ngx/pull/2378))
|
||||
- Tiny spelling change [@veverkap](https://github.com/veverkap) ([#2369](https://github.com/paperless-ngx/paperless-ngx/pull/2369))
|
||||
- Documentation: update build instructions to remove deprecated [@shamoon](https://github.com/shamoon) ([#2334](https://github.com/paperless-ngx/paperless-ngx/pull/2334))
|
||||
- [Documentation] Add note that PAPERLESS_URL cant contain a path [@shamoon](https://github.com/shamoon) ([#2319](https://github.com/paperless-ngx/paperless-ngx/pull/2319))
|
||||
- [Documentation] Add note that PAPERLESS_URL can't contain a path [@shamoon](https://github.com/shamoon) ([#2319](https://github.com/paperless-ngx/paperless-ngx/pull/2319))
|
||||
- [Documentation] Add v1.11.3 changelog [@github-actions](https://github.com/github-actions) ([#2311](https://github.com/paperless-ngx/paperless-ngx/pull/2311))
|
||||
|
||||
### Maintenance
|
||||
@ -1931,7 +1931,7 @@ Versions 1.11.1 and 1.11.2 contain bug fixes from v1.11.0 that prevented use of
|
||||
|
||||
### All App Changes
|
||||
|
||||
- Add info that re-do OCR doesnt automatically refresh content [@shamoon](https://github.com/shamoon) ([#2025](https://github.com/paperless-ngx/paperless-ngx/pull/2025))
|
||||
- Add info that re-do OCR doesn't automatically refresh content [@shamoon](https://github.com/shamoon) ([#2025](https://github.com/paperless-ngx/paperless-ngx/pull/2025))
|
||||
- Bugfix: Fix created_date being a string [@stumpylog](https://github.com/stumpylog) ([#2023](https://github.com/paperless-ngx/paperless-ngx/pull/2023))
|
||||
- Bugfix: Fixes an issue with mixed text and images when redoing OCR [@stumpylog](https://github.com/stumpylog) ([#2017](https://github.com/paperless-ngx/paperless-ngx/pull/2017))
|
||||
- Bugfix: Don't allow exceptions during date parsing to fail consume [@stumpylog](https://github.com/stumpylog) ([#1998](https://github.com/paperless-ngx/paperless-ngx/pull/1998))
|
||||
@ -2342,7 +2342,7 @@ Versions 1.11.1 and 1.11.2 contain bug fixes from v1.11.0 that prevented use of
|
||||
- Fix local Docker image building [\@stumpylog](https://github.com/stumpylog) ([\#849](https://github.com/paperless-ngx/paperless-ngx/pull/849))
|
||||
- Fix: show errors on invalid date input [\@shamoon](https://github.com/shamoon) ([\#862](https://github.com/paperless-ngx/paperless-ngx/pull/862))
|
||||
- Fix: Older dates do not display on frontend [\@shamoon](https://github.com/shamoon) ([\#852](https://github.com/paperless-ngx/paperless-ngx/pull/852))
|
||||
- Fixes IMAP UTF8 Authenication [\@stumpylog](https://github.com/stumpylog) ([\#725](https://github.com/paperless-ngx/paperless-ngx/pull/725))
|
||||
- Fixes IMAP UTF8 Authentication [\@stumpylog](https://github.com/stumpylog) ([\#725](https://github.com/paperless-ngx/paperless-ngx/pull/725))
|
||||
- Fix password field remains visible [\@shamoon](https://github.com/shamoon) ([\#840](https://github.com/paperless-ngx/paperless-ngx/pull/840))
|
||||
- Fixes Pillow build for armv7 [\@stumpylog](https://github.com/stumpylog) ([\#815](https://github.com/paperless-ngx/paperless-ngx/pull/815))
|
||||
- Update frontend localization source file [\@shamoon](https://github.com/shamoon) ([\#814](https://github.com/paperless-ngx/paperless-ngx/pull/814))
|
||||
@ -2463,7 +2463,7 @@ Versions 1.11.1 and 1.11.2 contain bug fixes from v1.11.0 that prevented use of
|
||||
[\@shamoon](https://github.com/shamoon) ([\#313](https://github.com/paperless-ngx/paperless-ngx/pull/313))
|
||||
- Fix imap tools bug [\@stumpylog](https://github.com/stumpylog)
|
||||
([\#393](https://github.com/paperless-ngx/paperless-ngx/pull/393))
|
||||
- Fix filterable dropdown buttons arent translated
|
||||
- Fix filterable dropdown buttons aren't translated
|
||||
[\@shamoon](https://github.com/shamoon) ([\#366](https://github.com/paperless-ngx/paperless-ngx/pull/366))
|
||||
- Fix 224: "Auto-detected date is day before receipt date"
|
||||
[\@a17t](https://github.com/a17t) ([\#246](https://github.com/paperless-ngx/paperless-ngx/pull/246))
|
||||
@ -3299,7 +3299,7 @@ primarily.
|
||||
[OCRmyPDF](https://github.com/jbarlow83/OCRmyPDF) to perform OCR
|
||||
on documents. It still uses tesseract under the hood, but the
|
||||
PDF parser of Paperless has changed considerably and will behave
|
||||
different for some douments.
|
||||
different for some documents.
|
||||
- OCRmyPDF creates archived PDF/A documents with embedded text
|
||||
that can be selected in the front end.
|
||||
- Paperless stores archived versions of documents alongside with
|
||||
@ -3350,7 +3350,7 @@ primarily.
|
||||
crash.
|
||||
- Mail handling no longer exits entirely when encountering errors.
|
||||
It will skip the account/rule/message on which the error
|
||||
occured.
|
||||
occurred.
|
||||
- Assigning correspondents from mail sender names failed for very
|
||||
long names. Paperless no longer assigns correspondents in these
|
||||
cases.
|
||||
|
@ -96,7 +96,7 @@ steps described in [Docker setup](#docker_hub) automatically.
|
||||
- /home/jonaswinkler/paperless-inbox:/usr/src/paperless/consume
|
||||
```
|
||||
|
||||
Don't change the part after the colon or paperless wont find your
|
||||
Don't change the part after the colon or paperless won't find your
|
||||
documents.
|
||||
|
||||
You may also need to change the default port that the webserver will
|
||||
|
@ -138,7 +138,7 @@ command:
|
||||
You might encounter errors such as:
|
||||
|
||||
```shell-session
|
||||
The following error occured while consuming document.pdf: [Errno 13] Permission denied: '/usr/src/paperless/src/../consume/document.pdf'
|
||||
The following error occurred while consuming document.pdf: [Errno 13] Permission denied: '/usr/src/paperless/src/../consume/document.pdf'
|
||||
```
|
||||
|
||||
This happens when paperless does not have permission to delete files
|
||||
|
@ -149,7 +149,7 @@ different means. These are as follows:
|
||||
- **Flag:** Sets the 'important' flag on mails with consumed
|
||||
documents. Paperless will not consume flagged mails.
|
||||
- **Move to folder:** Moves consumed mails out of the way so that
|
||||
paperless wont consume them again.
|
||||
paperless won't consume them again.
|
||||
- **Add custom Tag:** Adds a custom tag to mails with consumed
|
||||
documents (the IMAP standard calls these "keywords"). Paperless
|
||||
will not consume mails already tagged. Not all mail servers support
|
||||
@ -411,7 +411,7 @@ The following custom field types are supported:
|
||||
|
||||
## Share Links
|
||||
|
||||
Paperless-ngx added the abiltiy to create shareable links to files in version 2.0. You can find the button for this on the document detail screen.
|
||||
Paperless-ngx added the ability to create shareable links to files in version 2.0. You can find the button for this on the document detail screen.
|
||||
|
||||
- Share links do not require a user to login and thus link directly to a file.
|
||||
- Links are unique and are of the form `{paperless-url}/share/{randomly-generated-slug}`.
|
||||
|
@ -388,19 +388,18 @@
|
||||
<context context-type="linenumber">208</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="3008420115644088420" datatype="html">
|
||||
<source>Configuration</source>
|
||||
<trans-unit id="9063918187161876141" datatype="html">
|
||||
<source>Application Configuration</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/admin/config/config.component.html</context>
|
||||
<context context-type="linenumber">1</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="8833245444477474977" datatype="html">
|
||||
<source>Global Paperless-ngx configuration options</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/app-frame/app-frame.component.html</context>
|
||||
<context context-type="linenumber">276</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/app-frame/app-frame.component.html</context>
|
||||
<context context-type="linenumber">280</context>
|
||||
<context context-type="sourcefile">src/app/components/admin/config/config.component.html</context>
|
||||
<context context-type="linenumber">1</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="187187500641108332" datatype="html">
|
||||
@ -2286,6 +2285,17 @@
|
||||
<context context-type="linenumber">261</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="3008420115644088420" datatype="html">
|
||||
<source>Configuration</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/app-frame/app-frame.component.html</context>
|
||||
<context context-type="linenumber">276</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/app-frame/app-frame.component.html</context>
|
||||
<context context-type="linenumber">280</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6626289114556551491" datatype="html">
|
||||
<source>File Tasks<x id="START_BLOCK_IF" equiv-text="@if (tasksService.failedFileTasks.length > 0) {"/><x id="START_TAG_SPAN_1" ctype="x-span_1" equiv-text="<span>"/><x id="START_TAG_SPAN" ctype="x-span" equiv-text="<span class="badge bg-danger ms-2">"/><x id="INTERPOLATION" equiv-text="{{tasksService.failedFileTasks.length}}"/><x id="CLOSE_TAG_SPAN" ctype="x-span" equiv-text="</span>"/><x id="CLOSE_TAG_SPAN" ctype="x-span" equiv-text="</span>"/><x id="CLOSE_BLOCK_IF" equiv-text="}"/></source>
|
||||
<context-group purpose="location">
|
||||
@ -3696,7 +3706,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/input/switch/switch.component.html</context>
|
||||
<context context-type="linenumber">10</context>
|
||||
<context context-type="linenumber">17</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/input/text/text.component.html</context>
|
||||
@ -3817,6 +3827,13 @@
|
||||
<context context-type="linenumber">92</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6541407358060244620" datatype="html">
|
||||
<source>Note: value has not yet been set and will not apply until explicitly changed</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/input/switch/switch.component.html</context>
|
||||
<context context-type="linenumber">45</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6560126119609945418" datatype="html">
|
||||
<source>Add tag</source>
|
||||
<context-group purpose="location">
|
||||
|
@ -1,4 +1,4 @@
|
||||
<pngx-page-header title="Configuration" i18n-title></pngx-page-header>
|
||||
<pngx-page-header title="Application Configuration" subTitle="Global Paperless-ngx configuration options" i18n-title i18n-subTitle></pngx-page-header>
|
||||
|
||||
<form [formGroup]="configForm" (ngSubmit)="saveConfig()" class="pb-4">
|
||||
|
||||
@ -27,7 +27,7 @@
|
||||
@switch (option.type) {
|
||||
@case (ConfigOptionType.Select) { <pngx-input-select [formControlName]="option.key" [error]="errors[option.key]" [items]="option.choices" [allowNull]="true"></pngx-input-select> }
|
||||
@case (ConfigOptionType.Number) { <pngx-input-number [formControlName]="option.key" [error]="errors[option.key]" [showAdd]="false"></pngx-input-number> }
|
||||
@case (ConfigOptionType.Boolean) { <pngx-input-switch [formControlName]="option.key" [error]="errors[option.key]" [horizontal]="true" title="Enable" i18n-title></pngx-input-switch> }
|
||||
@case (ConfigOptionType.Boolean) { <pngx-input-switch [formControlName]="option.key" [error]="errors[option.key]" [showUnsetNote]="true" [horizontal]="true" title="Enable" i18n-title></pngx-input-switch> }
|
||||
@case (ConfigOptionType.String) { <pngx-input-text [formControlName]="option.key" [error]="errors[option.key]"></pngx-input-text> }
|
||||
@case (ConfigOptionType.JSON) { <pngx-input-text [formControlName]="option.key" [error]="errors[option.key]"></pngx-input-text> }
|
||||
}
|
||||
|
@ -363,7 +363,7 @@ export class SettingsComponent
|
||||
}
|
||||
|
||||
ngOnDestroy() {
|
||||
if (this.isDirty) this.settings.updateAppearanceSettings() // in case user changed appearance but didnt save
|
||||
if (this.isDirty) this.settings.updateAppearanceSettings() // in case user changed appearance but didn't save
|
||||
this.storeSub && this.storeSub.unsubscribe()
|
||||
this.settings.organizingSidebarSavedViews = false
|
||||
}
|
||||
|
@ -248,7 +248,7 @@ describe('AppFrameComponent', () => {
|
||||
expect(toastSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should support collapsable menu', () => {
|
||||
it('should support collapsible menu', () => {
|
||||
const button: HTMLButtonElement = (
|
||||
fixture.nativeElement as HTMLDivElement
|
||||
).querySelector('button[data-toggle=collapse]')
|
||||
|
@ -97,7 +97,7 @@ export abstract class EditDialogComponent<
|
||||
})
|
||||
}
|
||||
|
||||
// wait to enable close button so it doesnt steal focus from input since its the first clickable element in the DOM
|
||||
// wait to enable close button so it doesn't steal focus from input since its the first clickable element in the DOM
|
||||
setTimeout(() => {
|
||||
this.closeEnabled = true
|
||||
})
|
||||
|
@ -99,7 +99,7 @@
|
||||
<input type="hidden" formControlName="id" />
|
||||
<div class="row">
|
||||
<div class="col">
|
||||
<pngx-input-text i18n-title title="Assign title" formControlName="assign_title" i18n-hint hint="Can include some placeholders, see <a target='_blank' href='https://docs.paperless-ngx.com/usage/#workflows'>documentation</a>." [error]="error?.assign_title"></pngx-input-text>
|
||||
<pngx-input-text i18n-title title="Assign title" formControlName="assign_title" i18n-hint hint="Can include some placeholders, see <a target='_blank' href='https://docs.paperless-ngx.com/usage/#workflows'>documentation</a>." [error]="error?.actions?.[i]?.assign_title"></pngx-input-text>
|
||||
<pngx-input-tags [allowCreate]="false" i18n-title title="Assign tags" formControlName="assign_tags"></pngx-input-tags>
|
||||
<pngx-input-select i18n-title title="Assign document type" [items]="documentTypes" [allowNull]="true" formControlName="assign_document_type"></pngx-input-select>
|
||||
<pngx-input-select i18n-title title="Assign correspondent" [items]="correspondents" [allowNull]="true" formControlName="assign_correspondent"></pngx-input-select>
|
||||
|
@ -56,7 +56,7 @@ describe('NumberComponent', () => {
|
||||
component.step = 0.1
|
||||
component.writeValue(12.3456)
|
||||
expect(component.value).toEqual(12.3456)
|
||||
// float (step = .1) doesnt force 2 decimals
|
||||
// float (step = .1) doesn't force 2 decimals
|
||||
component.writeValue(11.1)
|
||||
expect(component.value).toEqual(11.1)
|
||||
})
|
||||
|
@ -28,7 +28,7 @@ describe('PasswordComponent', () => {
|
||||
|
||||
it('should support use of input field', () => {
|
||||
expect(component.value).toBeUndefined()
|
||||
// TODO: why doesnt this work?
|
||||
// TODO: why doesn't this work?
|
||||
// input.value = 'foo'
|
||||
// input.dispatchEvent(new Event('change'))
|
||||
// fixture.detectChanges()
|
||||
|
@ -2,7 +2,14 @@
|
||||
<div class="row">
|
||||
@if (!horizontal) {
|
||||
<div class="d-flex align-items-center position-relative hidden-button-container col-md-3">
|
||||
<label class="form-label" [for]="inputId">{{title}}</label>
|
||||
<label class="form-label" [for]="inputId" [ngbTooltip]="showUnsetNote && isUnset ? tipContent: null" placement="end">
|
||||
{{title}}
|
||||
@if (showUnsetNote && isUnset) {
|
||||
<svg class="sidebaricon-sm ms-1" fill="currentColor">
|
||||
<use xlink:href="assets/bootstrap-icons.svg#exclamation-triangle"/>
|
||||
</svg>
|
||||
}
|
||||
</label>
|
||||
@if (removable) {
|
||||
<button type="button" class="btn btn-sm btn-danger position-absolute left-0" (click)="removed.emit(this)">
|
||||
<svg class="sidebaricon" fill="currentColor">
|
||||
@ -16,7 +23,14 @@
|
||||
<div class="form-check form-switch">
|
||||
<input #inputField type="checkbox" class="form-check-input" [id]="inputId" [(ngModel)]="value" (change)="onChange(value)" (blur)="onTouched()" [disabled]="disabled">
|
||||
@if (horizontal) {
|
||||
<label class="form-check-label" [for]="inputId">{{title}}</label>
|
||||
<label class="form-check-label" [class.text-muted]="showUnsetNote && isUnset" [for]="inputId" [ngbTooltip]="showUnsetNote && isUnset ? tipContent: null" placement="end">
|
||||
{{title}}
|
||||
@if (showUnsetNote && isUnset) {
|
||||
<svg class="sidebaricon-sm ms-1" fill="currentColor">
|
||||
<use xlink:href="assets/bootstrap-icons.svg#exclamation-triangle"/>
|
||||
</svg>
|
||||
}
|
||||
</label>
|
||||
}
|
||||
@if (hint) {
|
||||
<div class="form-text text-muted">{{hint}}</div>
|
||||
@ -25,3 +39,8 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
<ng-template #tipContent>
|
||||
<span class="text-light fst-italic" i18n>Note: value has not yet been set and will not apply until explicitly changed</span>
|
||||
</ng-template>
|
||||
|
@ -5,6 +5,7 @@ import {
|
||||
NG_VALUE_ACCESSOR,
|
||||
ReactiveFormsModule,
|
||||
} from '@angular/forms'
|
||||
import { NgbTooltipModule } from '@ng-bootstrap/ng-bootstrap'
|
||||
|
||||
describe('SwitchComponent', () => {
|
||||
let component: SwitchComponent
|
||||
@ -15,7 +16,7 @@ describe('SwitchComponent', () => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [SwitchComponent],
|
||||
providers: [],
|
||||
imports: [FormsModule, ReactiveFormsModule],
|
||||
imports: [FormsModule, ReactiveFormsModule, NgbTooltipModule],
|
||||
}).compileComponents()
|
||||
|
||||
fixture = TestBed.createComponent(SwitchComponent)
|
||||
@ -36,4 +37,9 @@ describe('SwitchComponent', () => {
|
||||
fixture.detectChanges()
|
||||
expect(component.value).toBeFalsy()
|
||||
})
|
||||
|
||||
it('should show note if unset', () => {
|
||||
component.value = null
|
||||
expect(component.isUnset).toBeTruthy()
|
||||
})
|
||||
})
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { Component, forwardRef } from '@angular/core'
|
||||
import { Component, Input, forwardRef } from '@angular/core'
|
||||
import { NG_VALUE_ACCESSOR } from '@angular/forms'
|
||||
import { AbstractInputComponent } from '../abstract-input'
|
||||
|
||||
@ -15,7 +15,14 @@ import { AbstractInputComponent } from '../abstract-input'
|
||||
styleUrls: ['./switch.component.scss'],
|
||||
})
|
||||
export class SwitchComponent extends AbstractInputComponent<boolean> {
|
||||
@Input()
|
||||
showUnsetNote: boolean = false
|
||||
|
||||
constructor() {
|
||||
super()
|
||||
}
|
||||
|
||||
get isUnset(): boolean {
|
||||
return this.value === null || this.value === undefined
|
||||
}
|
||||
}
|
||||
|
@ -27,7 +27,7 @@ describe('TextComponent', () => {
|
||||
|
||||
it('should support use of input field', () => {
|
||||
expect(component.value).toBeUndefined()
|
||||
// TODO: why doesnt this work?
|
||||
// TODO: why doesn't this work?
|
||||
// input.value = 'foo'
|
||||
// input.dispatchEvent(new Event('change'))
|
||||
// fixture.detectChanges()
|
||||
|
@ -27,7 +27,7 @@ describe('TextComponent', () => {
|
||||
|
||||
it('should support use of input field', () => {
|
||||
expect(component.value).toBeUndefined()
|
||||
// TODO: why doesnt this work?
|
||||
// TODO: why doesn't this work?
|
||||
// input.value = 'foo'
|
||||
// input.dispatchEvent(new Event('change'))
|
||||
// fixture.detectChanges()
|
||||
|
@ -303,7 +303,7 @@ describe('DocumentDetailComponent', () => {
|
||||
discardPeriodicTasks()
|
||||
}))
|
||||
|
||||
it('should update title before doc change if wasnt updated via debounce', fakeAsync(() => {
|
||||
it('should update title before doc change if was not updated via debounce', fakeAsync(() => {
|
||||
initNormally()
|
||||
component.titleInput.value = 'Foo Bar'
|
||||
component.titleInput.inputField.nativeElement.dispatchEvent(
|
||||
|
@ -157,7 +157,7 @@ export class DocumentDetailComponent
|
||||
|
||||
@ViewChild('nav') nav: NgbNav
|
||||
@ViewChild('pdfPreview') set pdfPreview(element) {
|
||||
// this gets called when compontent added or removed from DOM
|
||||
// this gets called when component added or removed from DOM
|
||||
if (
|
||||
element &&
|
||||
element.nativeElement.offsetParent !== null &&
|
||||
@ -316,7 +316,7 @@ export class DocumentDetailComponent
|
||||
.subscribe({
|
||||
next: (titleValue) => {
|
||||
// In the rare case when the field changed just after debounced event was fired.
|
||||
// We dont want to overwrite whats actually in the text field, so just return
|
||||
// We dont want to overwrite what's actually in the text field, so just return
|
||||
if (titleValue !== this.titleInput.value) return
|
||||
|
||||
this.title = titleValue
|
||||
|
@ -82,7 +82,7 @@ export class DocumentCardLargeComponent extends ComponentWithPermissions {
|
||||
// only show notes with a match
|
||||
highlights = (this.document['__search_hit__'].note_highlights as string)
|
||||
.split(',')
|
||||
.filter((higlight) => higlight.includes('<span'))
|
||||
.filter((highlight) => highlight.includes('<span'))
|
||||
}
|
||||
return highlights
|
||||
}
|
||||
|
@ -355,7 +355,7 @@ describe('FilterEditorComponent', () => {
|
||||
expect(component.textFilterTarget).toEqual('fulltext-morelike') // TEXT_FILTER_TARGET_FULLTEXT_MORELIKE
|
||||
expect(moreLikeSpy).toHaveBeenCalledWith(1)
|
||||
expect(component.textFilter).toEqual('Foo Bar')
|
||||
// we have to do this here because it cant be done by user input
|
||||
// we have to do this here because it can't be done by user input
|
||||
expect(component.filterRules).toEqual([
|
||||
{
|
||||
rule_type: FILTER_FULLTEXT_MORELIKE,
|
||||
@ -1264,7 +1264,7 @@ describe('FilterEditorComponent', () => {
|
||||
|
||||
dateCreatedAfter.nativeElement.value = '05/14/2023'
|
||||
// dateCreatedAfter.triggerEventHandler('change')
|
||||
// TODO: why isnt ngModel triggering this on change?
|
||||
// TODO: why isn't ngModel triggering this on change?
|
||||
component.dateCreatedAfter = '2023-05-14'
|
||||
fixture.detectChanges()
|
||||
tick(400)
|
||||
@ -1284,7 +1284,7 @@ describe('FilterEditorComponent', () => {
|
||||
|
||||
dateCreatedBefore.nativeElement.value = '05/14/2023'
|
||||
// dateCreatedBefore.triggerEventHandler('change')
|
||||
// TODO: why isnt ngModel triggering this on change?
|
||||
// TODO: why isn't ngModel triggering this on change?
|
||||
component.dateCreatedBefore = '2023-05-14'
|
||||
fixture.detectChanges()
|
||||
tick(400)
|
||||
@ -1341,7 +1341,7 @@ describe('FilterEditorComponent', () => {
|
||||
|
||||
dateAddedAfter.nativeElement.value = '05/14/2023'
|
||||
// dateAddedAfter.triggerEventHandler('change')
|
||||
// TODO: why isnt ngModel triggering this on change?
|
||||
// TODO: why isn't ngModel triggering this on change?
|
||||
component.dateAddedAfter = '2023-05-14'
|
||||
fixture.detectChanges()
|
||||
tick(400)
|
||||
@ -1361,7 +1361,7 @@ describe('FilterEditorComponent', () => {
|
||||
|
||||
dateAddedBefore.nativeElement.value = '05/14/2023'
|
||||
// dateAddedBefore.triggerEventHandler('change')
|
||||
// TODO: why isnt ngModel triggering this on change?
|
||||
// TODO: why isn't ngModel triggering this on change?
|
||||
component.dateAddedBefore = '2023-05-14'
|
||||
fixture.detectChanges()
|
||||
tick(400)
|
||||
@ -1524,7 +1524,7 @@ describe('FilterEditorComponent', () => {
|
||||
)
|
||||
ownerToggle.nativeElement.checked = true
|
||||
// ownerToggle.triggerEventHandler('change')
|
||||
// TODO: ngModel isnt doing this here
|
||||
// TODO: ngModel isn't doing this here
|
||||
component.permissionsSelectionModel.hideUnowned = true
|
||||
fixture.detectChanges()
|
||||
expect(component.filterRules).toEqual([
|
||||
|
@ -40,7 +40,7 @@ export class SaveViewConfigDialogComponent implements OnInit {
|
||||
})
|
||||
|
||||
ngOnInit(): void {
|
||||
// wait to enable close button so it doesnt steal focus from input since its the first clickable element in the DOM
|
||||
// wait to enable close button so it doesn't steal focus from input since its the first clickable element in the DOM
|
||||
setTimeout(() => {
|
||||
this.closeEnabled = true
|
||||
})
|
||||
|
@ -34,7 +34,7 @@ describe('CorrespondentListComponent', () => {
|
||||
correspondentsService = TestBed.inject(CorrespondentService)
|
||||
})
|
||||
|
||||
// Tests are included in management-list.compontent.spec.ts
|
||||
// Tests are included in management-list.component.spec.ts
|
||||
|
||||
it('should use correct delete message', () => {
|
||||
jest.spyOn(correspondentsService, 'listFiltered').mockReturnValue(
|
||||
|
@ -58,7 +58,7 @@ describe('DocumentTypeListComponent', () => {
|
||||
fixture.detectChanges()
|
||||
})
|
||||
|
||||
// Tests are included in management-list.compontent.spec.ts
|
||||
// Tests are included in management-list.component.spec.ts
|
||||
|
||||
it('should use correct delete message', () => {
|
||||
expect(
|
||||
|
@ -58,7 +58,7 @@ describe('StoragePathListComponent', () => {
|
||||
fixture.detectChanges()
|
||||
})
|
||||
|
||||
// Tests are included in management-list.compontent.spec.ts
|
||||
// Tests are included in management-list.component.spec.ts
|
||||
|
||||
it('should use correct delete message', () => {
|
||||
expect(component.getDeleteMessage({ id: 1, name: 'StoragePath1' })).toEqual(
|
||||
|
@ -60,7 +60,7 @@ describe('TagListComponent', () => {
|
||||
fixture.detectChanges()
|
||||
})
|
||||
|
||||
// Tests are included in management-list.compontent.spec.ts
|
||||
// Tests are included in management-list.component.spec.ts
|
||||
|
||||
it('should use correct delete message', () => {
|
||||
expect(component.getDeleteMessage({ id: 1, name: 'Tag1' })).toEqual(
|
||||
|
@ -66,7 +66,7 @@ export class WorkflowsComponent
|
||||
? EditDialogMode.EDIT
|
||||
: EditDialogMode.CREATE
|
||||
if (workflow) {
|
||||
// quick "deep" clone so original doesnt get modified
|
||||
// quick "deep" clone so original doesn't get modified
|
||||
const clone = Object.assign({}, workflow)
|
||||
clone.actions = [...workflow.actions]
|
||||
clone.triggers = [...workflow.triggers]
|
||||
|
@ -146,7 +146,7 @@ export const PaperlessConfigOptions: ConfigOption[] = [
|
||||
key: 'max_image_pixels',
|
||||
title: $localize`Max Image Pixels`,
|
||||
type: ConfigOptionType.Number,
|
||||
config_key: 'PAPERLESS_OCR_IMAGE_DPI',
|
||||
config_key: 'PAPERLESS_OCR_MAX_IMAGE_PIXELS',
|
||||
category: ConfigCategory.OCR,
|
||||
},
|
||||
{
|
||||
|
@ -146,7 +146,7 @@ export class ConsumerStatusService {
|
||||
this.statusWebSocket.onmessage = (ev) => {
|
||||
let statusMessage: WebsocketConsumerStatusMessage = JSON.parse(ev['data'])
|
||||
|
||||
// fallback if backend didnt restrict message
|
||||
// fallback if backend didn't restrict message
|
||||
if (
|
||||
statusMessage.owner_id &&
|
||||
statusMessage.owner_id !== this.settingsService.currentUser?.id &&
|
||||
|
@ -208,7 +208,7 @@ export class DocumentListViewService {
|
||||
this.activeListViewState.sortField = newState.sortField
|
||||
this.activeListViewState.sortReverse = newState.sortReverse
|
||||
this.activeListViewState.currentPage = newState.currentPage
|
||||
this.reload(null, paramsEmpty) // update the params if there arent any
|
||||
this.reload(null, paramsEmpty) // update the params if there aren't any
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -59,7 +59,7 @@ export class OpenDocumentsService {
|
||||
openDocument(doc: Document): Observable<boolean> {
|
||||
if (this.openDocuments.find((d) => d.id == doc.id) == null) {
|
||||
if (this.openDocuments.length == this.MAX_OPEN_DOCUMENTS) {
|
||||
// at max, ensure changes arent lost
|
||||
// at max, ensure changes aren't lost
|
||||
const docToRemove = this.openDocuments[this.MAX_OPEN_DOCUMENTS - 1]
|
||||
const closeObservable = this.closeDocument(docToRemove)
|
||||
closeObservable.pipe(first()).subscribe((closed) => {
|
||||
|
@ -23,7 +23,7 @@ export class GroupService extends AbstractNameFilterService<Group> {
|
||||
const { typeKey, actionKey } =
|
||||
this.permissionService.getPermissionKeys(perm)
|
||||
if (!typeKey || !actionKey) {
|
||||
// dont lose permissions the UI doesnt use
|
||||
// dont lose permissions the UI doesn't use
|
||||
o.permissions.push(perm)
|
||||
}
|
||||
})
|
||||
|
@ -23,7 +23,7 @@ export class UserService extends AbstractNameFilterService<User> {
|
||||
const { typeKey, actionKey } =
|
||||
this.permissionService.getPermissionKeys(perm)
|
||||
if (!typeKey || !actionKey) {
|
||||
// dont lose permissions the UI doesnt use
|
||||
// dont lose permissions the UI doesn't use
|
||||
o.user_permissions.push(perm)
|
||||
}
|
||||
})
|
||||
|
@ -53,7 +53,7 @@ export class LocalizedDateParserFormatter extends NgbDateParserFormatter {
|
||||
if (this.separatorRegExp.test(value)) {
|
||||
let segments = value.split(this.separatorRegExp)
|
||||
|
||||
// always accept strict yyyy*mm*dd format even if thats not the input format since we can be certain its not yyyy*dd*mm
|
||||
// always accept strict yyyy*mm*dd format even if that's not the input format since we can be certain its not yyyy*dd*mm
|
||||
if (
|
||||
value.length == 10 &&
|
||||
segments.length == 3 &&
|
||||
|
@ -5,7 +5,7 @@ export const environment = {
|
||||
apiBaseUrl: document.baseURI + 'api/',
|
||||
apiVersion: '4',
|
||||
appTitle: 'Paperless-ngx',
|
||||
version: '2.3.2',
|
||||
version: '2.3.2-dev',
|
||||
webSocketHost: window.location.host,
|
||||
webSocketProtocol: window.location.protocol == 'https:' ? 'wss:' : 'ws:',
|
||||
webSocketBaseUrl: base_url.pathname + 'ws/',
|
||||
|
@ -204,10 +204,10 @@ class DocumentClassifier:
|
||||
) and self.last_auto_type_hash == hasher.digest():
|
||||
return False
|
||||
|
||||
# substract 1 since -1 (null) is also part of the classes.
|
||||
# subtract 1 since -1 (null) is also part of the classes.
|
||||
|
||||
# union with {-1} accounts for cases where all documents have
|
||||
# correspondents and types assigned, so -1 isnt part of labels_x, which
|
||||
# correspondents and types assigned, so -1 isn't part of labels_x, which
|
||||
# it usually is.
|
||||
num_correspondents = len(set(labels_correspondent) | {-1}) - 1
|
||||
num_document_types = len(set(labels_document_type) | {-1}) - 1
|
||||
|
@ -726,12 +726,17 @@ class Consumer(LoggingMixin):
|
||||
|
||||
storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
||||
|
||||
title = file_info.title[:127]
|
||||
if self.override_title is not None:
|
||||
try:
|
||||
title = self._parse_title_placeholders(self.override_title)
|
||||
except Exception as e:
|
||||
self.log.error(
|
||||
f"Error occurred parsing title override '{self.override_title}', falling back to original. Exception: {e}",
|
||||
)
|
||||
|
||||
document = Document.objects.create(
|
||||
title=(
|
||||
self._parse_title_placeholders(self.override_title)
|
||||
if self.override_title is not None
|
||||
else file_info.title
|
||||
)[:127],
|
||||
title=title,
|
||||
content=text,
|
||||
mime_type=mime_type,
|
||||
checksum=hashlib.md5(self.working_copy.read_bytes()).hexdigest(),
|
||||
|
@ -35,7 +35,7 @@ def collate(input_doc: ConsumableDocument) -> str:
|
||||
in reverse order, since the ADF will have scanned the pages from bottom
|
||||
to top.
|
||||
|
||||
Returns a status message on succcess, or raises a ConsumerError
|
||||
Returns a status message on success, or raises a ConsumerError
|
||||
in case of failure.
|
||||
"""
|
||||
|
||||
|
@ -224,7 +224,7 @@ def generate_filename(
|
||||
if settings.FILENAME_FORMAT_REMOVE_NONE:
|
||||
path = path.replace("/-none-/", "/") # remove empty directories
|
||||
path = path.replace(" -none-", "") # remove when spaced, with space
|
||||
path = path.replace("-none-", "") # remove rest of the occurences
|
||||
path = path.replace("-none-", "") # remove rest of the occurrences
|
||||
|
||||
path = path.replace("-none-", "none") # backward compatibility
|
||||
path = path.strip(os.sep)
|
||||
|
@ -264,7 +264,7 @@ class Command(BaseCommand):
|
||||
polling_interval = settings.CONSUMER_POLLING
|
||||
if polling_interval == 0: # pragma: no cover
|
||||
# Only happens if INotify failed to import
|
||||
logger.warn("Using polling of 10s, consider settng this")
|
||||
logger.warn("Using polling of 10s, consider setting this")
|
||||
polling_interval = 10
|
||||
|
||||
with ThreadPoolExecutor(max_workers=4) as pool:
|
||||
|
@ -345,7 +345,7 @@ def existing_document_matches_workflow(
|
||||
)
|
||||
trigger_matched = False
|
||||
|
||||
# Document correpondent vs trigger has_correspondent
|
||||
# Document correspondent vs trigger has_correspondent
|
||||
if (
|
||||
trigger.filter_has_correspondent is not None
|
||||
and document.correspondent != trigger.filter_has_correspondent
|
||||
|
@ -22,7 +22,7 @@ class Migration(migrations.Migration):
|
||||
(5, "Fuzzy Match"),
|
||||
],
|
||||
default=1,
|
||||
help_text='Which algorithm you want to use when matching text to the OCR\'d PDF. Here, "any" looks for any occurrence of any word provided in the PDF, while "all" requires that every word provided appear in the PDF, albeit not in the order provided. A "literal" match means that the text you enter must appear in the PDF exactly as you\'ve entered it, and "regular expression" uses a regex to match the PDF. (If you don\'t know what a regex is, you probably don\'t want this option.) Finally, a "fuzzy match" looks for words or phrases that are mostly—but not exactly—the same, which can be useful for matching against documents containg imperfections that foil accurate OCR.',
|
||||
help_text='Which algorithm you want to use when matching text to the OCR\'d PDF. Here, "any" looks for any occurrence of any word provided in the PDF, while "all" requires that every word provided appear in the PDF, albeit not in the order provided. A "literal" match means that the text you enter must appear in the PDF exactly as you\'ve entered it, and "regular expression" uses a regex to match the PDF. (If you don\'t know what a regex is, you probably don\'t want this option.) Finally, a "fuzzy match" looks for words or phrases that are mostly—but not exactly—the same, which can be useful for matching against documents containing imperfections that foil accurate OCR.',
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
@ -37,7 +37,7 @@ class Migration(migrations.Migration):
|
||||
(5, "Fuzzy Match"),
|
||||
],
|
||||
default=1,
|
||||
help_text='Which algorithm you want to use when matching text to the OCR\'d PDF. Here, "any" looks for any occurrence of any word provided in the PDF, while "all" requires that every word provided appear in the PDF, albeit not in the order provided. A "literal" match means that the text you enter must appear in the PDF exactly as you\'ve entered it, and "regular expression" uses a regex to match the PDF. (If you don\'t know what a regex is, you probably don\'t want this option.) Finally, a "fuzzy match" looks for words or phrases that are mostly—but not exactly—the same, which can be useful for matching against documents containg imperfections that foil accurate OCR.',
|
||||
help_text='Which algorithm you want to use when matching text to the OCR\'d PDF. Here, "any" looks for any occurrence of any word provided in the PDF, while "all" requires that every word provided appear in the PDF, albeit not in the order provided. A "literal" match means that the text you enter must appear in the PDF exactly as you\'ve entered it, and "regular expression" uses a regex to match the PDF. (If you don\'t know what a regex is, you probably don\'t want this option.) Finally, a "fuzzy match" looks for words or phrases that are mostly—but not exactly—the same, which can be useful for matching against documents containing imperfections that foil accurate OCR.',
|
||||
),
|
||||
),
|
||||
]
|
||||
|
@ -66,7 +66,7 @@ class Migration(migrations.Migration):
|
||||
(6, "Automatic Classification"),
|
||||
],
|
||||
default=1,
|
||||
help_text='Which algorithm you want to use when matching text to the OCR\'d PDF. Here, "any" looks for any occurrence of any word provided in the PDF, while "all" requires that every word provided appear in the PDF, albeit not in the order provided. A "literal" match means that the text you enter must appear in the PDF exactly as you\'ve entered it, and "regular expression" uses a regex to match the PDF. (If you don\'t know what a regex is, you probably don\'t want this option.) Finally, a "fuzzy match" looks for words or phrases that are mostly—but not exactly—the same, which can be useful for matching against documents containg imperfections that foil accurate OCR.',
|
||||
help_text='Which algorithm you want to use when matching text to the OCR\'d PDF. Here, "any" looks for any occurrence of any word provided in the PDF, while "all" requires that every word provided appear in the PDF, albeit not in the order provided. A "literal" match means that the text you enter must appear in the PDF exactly as you\'ve entered it, and "regular expression" uses a regex to match the PDF. (If you don\'t know what a regex is, you probably don\'t want this option.) Finally, a "fuzzy match" looks for words or phrases that are mostly—but not exactly—the same, which can be useful for matching against documents containing imperfections that foil accurate OCR.',
|
||||
),
|
||||
),
|
||||
("is_insensitive", models.BooleanField(default=True)),
|
||||
@ -100,7 +100,7 @@ class Migration(migrations.Migration):
|
||||
(6, "Automatic Classification"),
|
||||
],
|
||||
default=1,
|
||||
help_text='Which algorithm you want to use when matching text to the OCR\'d PDF. Here, "any" looks for any occurrence of any word provided in the PDF, while "all" requires that every word provided appear in the PDF, albeit not in the order provided. A "literal" match means that the text you enter must appear in the PDF exactly as you\'ve entered it, and "regular expression" uses a regex to match the PDF. (If you don\'t know what a regex is, you probably don\'t want this option.) Finally, a "fuzzy match" looks for words or phrases that are mostly—but not exactly—the same, which can be useful for matching against documents containg imperfections that foil accurate OCR.',
|
||||
help_text='Which algorithm you want to use when matching text to the OCR\'d PDF. Here, "any" looks for any occurrence of any word provided in the PDF, while "all" requires that every word provided appear in the PDF, albeit not in the order provided. A "literal" match means that the text you enter must appear in the PDF exactly as you\'ve entered it, and "regular expression" uses a regex to match the PDF. (If you don\'t know what a regex is, you probably don\'t want this option.) Finally, a "fuzzy match" looks for words or phrases that are mostly—but not exactly—the same, which can be useful for matching against documents containing imperfections that foil accurate OCR.',
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
@ -116,7 +116,7 @@ class Migration(migrations.Migration):
|
||||
(6, "Automatic Classification"),
|
||||
],
|
||||
default=1,
|
||||
help_text='Which algorithm you want to use when matching text to the OCR\'d PDF. Here, "any" looks for any occurrence of any word provided in the PDF, while "all" requires that every word provided appear in the PDF, albeit not in the order provided. A "literal" match means that the text you enter must appear in the PDF exactly as you\'ve entered it, and "regular expression" uses a regex to match the PDF. (If you don\'t know what a regex is, you probably don\'t want this option.) Finally, a "fuzzy match" looks for words or phrases that are mostly—but not exactly—the same, which can be useful for matching against documents containg imperfections that foil accurate OCR.',
|
||||
help_text='Which algorithm you want to use when matching text to the OCR\'d PDF. Here, "any" looks for any occurrence of any word provided in the PDF, while "all" requires that every word provided appear in the PDF, albeit not in the order provided. A "literal" match means that the text you enter must appear in the PDF exactly as you\'ve entered it, and "regular expression" uses a regex to match the PDF. (If you don\'t know what a regex is, you probably don\'t want this option.) Finally, a "fuzzy match" looks for words or phrases that are mostly—but not exactly—the same, which can be useful for matching against documents containing imperfections that foil accurate OCR.',
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
|
@ -207,7 +207,7 @@ def create_archive_version(doc, retry_count=3):
|
||||
return
|
||||
else:
|
||||
# This is mostly here for the tika parser in docker
|
||||
# environemnts. The servers for parsing need to come up first,
|
||||
# environments. The servers for parsing need to come up first,
|
||||
# and the docker setup doesn't ensure that tika is running
|
||||
# before attempting migrations.
|
||||
logger.error("Parse error, will try again in 5 seconds...")
|
||||
|
@ -394,11 +394,6 @@ class Log(models.Model):
|
||||
|
||||
|
||||
class SavedView(ModelWithOwner):
|
||||
class Meta:
|
||||
ordering = ("name",)
|
||||
verbose_name = _("saved view")
|
||||
verbose_name_plural = _("saved views")
|
||||
|
||||
name = models.CharField(_("name"), max_length=128)
|
||||
|
||||
show_on_dashboard = models.BooleanField(
|
||||
@ -416,6 +411,14 @@ class SavedView(ModelWithOwner):
|
||||
)
|
||||
sort_reverse = models.BooleanField(_("sort reverse"), default=False)
|
||||
|
||||
class Meta:
|
||||
ordering = ("name",)
|
||||
verbose_name = _("saved view")
|
||||
verbose_name_plural = _("saved views")
|
||||
|
||||
def __str__(self):
|
||||
return f"SavedView {self.name}"
|
||||
|
||||
|
||||
class SavedViewFilterRule(models.Model):
|
||||
RULE_TYPES = [
|
||||
|
@ -33,7 +33,7 @@ from documents.utils import copy_file_with_basic_stats
|
||||
# - XX MON ZZZZ with XX being 1 or 2 and ZZZZ being 4 digits. MONTH is 3 letters
|
||||
# - XXPP MONTH ZZZZ with XX being 1 or 2 and PP being 2 letters and ZZZZ being 4 digits
|
||||
|
||||
# TODO: isnt there a date parsing library for this?
|
||||
# TODO: isn't there a date parsing library for this?
|
||||
|
||||
DATE_REGEX = re.compile(
|
||||
r"(\b|(?!=([_-])))([0-9]{1,2})[\.\/-]([0-9]{1,2})[\.\/-]([0-9]{4}|[0-9]{2})(\b|(?=([_-])))|"
|
||||
@ -113,8 +113,6 @@ def get_parser_class_for_mime_type(mime_type: str) -> Optional[type["DocumentPar
|
||||
|
||||
options = []
|
||||
|
||||
# Sein letzter Befehl war: KOMMT! Und sie kamen. Alle. Sogar die Parser.
|
||||
|
||||
for response in document_consumer_declaration.send(None):
|
||||
parser_declaration = response[1]
|
||||
supported_mime_types = parser_declaration["mime_types"]
|
||||
|
@ -546,7 +546,7 @@ class CustomFieldInstanceSerializer(serializers.ModelSerializer):
|
||||
if doc_id not in target_doc_ids:
|
||||
self.remove_doclink(document, field, doc_id)
|
||||
|
||||
# Create an instance if target doc doesnt have this field or append it to an existing one
|
||||
# Create an instance if target doc doesn't have this field or append it to an existing one
|
||||
existing_custom_field_instances = {
|
||||
custom_field.document_id: custom_field
|
||||
for custom_field in CustomFieldInstance.objects.filter(
|
||||
@ -1385,13 +1385,39 @@ class WorkflowActionSerializer(serializers.ModelSerializer):
|
||||
]
|
||||
|
||||
def validate(self, attrs):
|
||||
# Empty strings treated as None to avoid unexpected behavior
|
||||
if (
|
||||
"assign_title" in attrs
|
||||
and attrs["assign_title"] is not None
|
||||
and len(attrs["assign_title"]) == 0
|
||||
):
|
||||
attrs["assign_title"] = None
|
||||
if "assign_title" in attrs and attrs["assign_title"] is not None:
|
||||
if len(attrs["assign_title"]) == 0:
|
||||
# Empty strings treated as None to avoid unexpected behavior
|
||||
attrs["assign_title"] = None
|
||||
else:
|
||||
try:
|
||||
# test against all placeholders, see consumer.py `parse_doc_title_w_placeholders`
|
||||
attrs["assign_title"].format(
|
||||
correspondent="",
|
||||
document_type="",
|
||||
added="",
|
||||
added_year="",
|
||||
added_year_short="",
|
||||
added_month="",
|
||||
added_month_name="",
|
||||
added_month_name_short="",
|
||||
added_day="",
|
||||
added_time="",
|
||||
owner_username="",
|
||||
original_filename="",
|
||||
created="",
|
||||
created_year="",
|
||||
created_year_short="",
|
||||
created_month="",
|
||||
created_month_name="",
|
||||
created_month_name_short="",
|
||||
created_day="",
|
||||
created_time="",
|
||||
)
|
||||
except (ValueError, KeyError) as e:
|
||||
raise serializers.ValidationError(
|
||||
{"assign_title": f'Invalid f-string detected: "{e.args[0]}"'},
|
||||
)
|
||||
|
||||
return attrs
|
||||
|
||||
|
@ -570,19 +570,27 @@ def run_workflow(
|
||||
document.owner = action.assign_owner
|
||||
|
||||
if action.assign_title is not None:
|
||||
document.title = parse_doc_title_w_placeholders(
|
||||
action.assign_title,
|
||||
document.correspondent.name
|
||||
if document.correspondent is not None
|
||||
else "",
|
||||
document.document_type.name
|
||||
if document.document_type is not None
|
||||
else "",
|
||||
document.owner.username if document.owner is not None else "",
|
||||
timezone.localtime(document.added),
|
||||
document.original_filename,
|
||||
timezone.localtime(document.created),
|
||||
)
|
||||
try:
|
||||
document.title = parse_doc_title_w_placeholders(
|
||||
action.assign_title,
|
||||
document.correspondent.name
|
||||
if document.correspondent is not None
|
||||
else "",
|
||||
document.document_type.name
|
||||
if document.document_type is not None
|
||||
else "",
|
||||
document.owner.username
|
||||
if document.owner is not None
|
||||
else "",
|
||||
document.added,
|
||||
document.original_filename,
|
||||
document.created,
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
f"Error occurred parsing title assignment '{action.assign_title}', falling back to original",
|
||||
extra={"group": logging_group},
|
||||
)
|
||||
|
||||
if (
|
||||
action.assign_view_users is not None
|
||||
@ -617,7 +625,7 @@ def run_workflow(
|
||||
).count()
|
||||
== 0
|
||||
):
|
||||
# can be triggered on existing docs, so only add the field if it doesnt already exist
|
||||
# can be triggered on existing docs, so only add the field if it doesn't already exist
|
||||
CustomFieldInstance.objects.create(
|
||||
field=field,
|
||||
document=document,
|
||||
|
102
src/documents/tests/test_api_app_config.py
Normal file
102
src/documents/tests/test_api_app_config.py
Normal file
@ -0,0 +1,102 @@
|
||||
import json
|
||||
|
||||
from django.contrib.auth.models import User
|
||||
from rest_framework import status
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
from paperless.models import ApplicationConfiguration
|
||||
from paperless.models import ColorConvertChoices
|
||||
|
||||
|
||||
class TestApiAppConfig(DirectoriesMixin, APITestCase):
|
||||
ENDPOINT = "/api/config/"
|
||||
|
||||
def setUp(self) -> None:
|
||||
super().setUp()
|
||||
|
||||
user = User.objects.create_superuser(username="temp_admin")
|
||||
self.client.force_authenticate(user=user)
|
||||
|
||||
def test_api_get_config(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- API request to get app config
|
||||
WHEN:
|
||||
- API is called
|
||||
THEN:
|
||||
- Existing config
|
||||
"""
|
||||
response = self.client.get(self.ENDPOINT, format="json")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
|
||||
self.assertEqual(
|
||||
json.dumps(response.data[0]),
|
||||
json.dumps(
|
||||
{
|
||||
"id": 1,
|
||||
"user_args": None,
|
||||
"output_type": None,
|
||||
"pages": None,
|
||||
"language": None,
|
||||
"mode": None,
|
||||
"skip_archive_file": None,
|
||||
"image_dpi": None,
|
||||
"unpaper_clean": None,
|
||||
"deskew": None,
|
||||
"rotate_pages": None,
|
||||
"rotate_pages_threshold": None,
|
||||
"max_image_pixels": None,
|
||||
"color_conversion_strategy": None,
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
def test_api_update_config(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- API request to update app config
|
||||
WHEN:
|
||||
- API is called
|
||||
THEN:
|
||||
- Correct HTTP response
|
||||
- Config is updated
|
||||
"""
|
||||
response = self.client.patch(
|
||||
f"{self.ENDPOINT}1/",
|
||||
json.dumps(
|
||||
{
|
||||
"color_conversion_strategy": ColorConvertChoices.RGB,
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
config = ApplicationConfiguration.objects.first()
|
||||
self.assertEqual(config.color_conversion_strategy, ColorConvertChoices.RGB)
|
||||
|
||||
def test_api_update_config_empty_fields(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- API request to update app config with empty string for user_args JSONField and language field
|
||||
WHEN:
|
||||
- API is called
|
||||
THEN:
|
||||
- Correct HTTP response
|
||||
- user_args is set to None
|
||||
"""
|
||||
response = self.client.patch(
|
||||
f"{self.ENDPOINT}1/",
|
||||
json.dumps(
|
||||
{
|
||||
"user_args": "",
|
||||
"language": "",
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
config = ApplicationConfiguration.objects.first()
|
||||
self.assertEqual(config.user_args, None)
|
||||
self.assertEqual(config.language, None)
|
@ -1276,7 +1276,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
GIVEN:
|
||||
- NUMBER_OF_SUGGESTED_DATES = 0 (disables feature)
|
||||
WHEN:
|
||||
- API reuqest for document suggestions
|
||||
- API request for document suggestions
|
||||
THEN:
|
||||
- Dont check for suggested dates at all
|
||||
"""
|
||||
@ -1526,7 +1526,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
GIVEN:
|
||||
- A document with a single note
|
||||
WHEN:
|
||||
- API reuqest for document notes is made
|
||||
- API request for document notes is made
|
||||
THEN:
|
||||
- The associated note is returned
|
||||
"""
|
||||
|
@ -55,7 +55,7 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
|
||||
with AsyncWriter(index.open_index()) as writer:
|
||||
# Note to future self: there is a reason we dont use a model signal handler to update the index: some operations edit many documents at once
|
||||
# (retagger, renamer) and we don't want to open a writer for each of these, but rather perform the entire operation with one writer.
|
||||
# That's why we cant open the writer in a model on_save handler or something.
|
||||
# That's why we can't open the writer in a model on_save handler or something.
|
||||
index.update_document(writer, d1)
|
||||
index.update_document(writer, d2)
|
||||
index.update_document(writer, d3)
|
||||
@ -903,8 +903,8 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
|
||||
GIVEN:
|
||||
- Documents with owners set & without
|
||||
WHEN:
|
||||
- API reuqest for advanced query (search) is made by non-superuser
|
||||
- API reuqest for advanced query (search) is made by superuser
|
||||
- API request for advanced query (search) is made by non-superuser
|
||||
- API request for advanced query (search) is made by superuser
|
||||
THEN:
|
||||
- Only owned docs are returned for regular users
|
||||
- All docs are returned for superuser
|
||||
@ -959,7 +959,7 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
|
||||
GIVEN:
|
||||
- Documents with granted view permissions to others
|
||||
WHEN:
|
||||
- API reuqest for advanced query (search) is made by user
|
||||
- API request for advanced query (search) is made by user
|
||||
THEN:
|
||||
- Only docs with granted view permissions are returned
|
||||
"""
|
||||
|
@ -248,6 +248,45 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
|
||||
|
||||
self.assertEqual(WorkflowTrigger.objects.count(), 1)
|
||||
|
||||
def test_api_create_invalid_assign_title(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- API request to create a workflow
|
||||
- Invalid f-string for assign_title
|
||||
WHEN:
|
||||
- API is called
|
||||
THEN:
|
||||
- Correct HTTP 400 response
|
||||
- No objects are created
|
||||
"""
|
||||
response = self.client.post(
|
||||
self.ENDPOINT,
|
||||
json.dumps(
|
||||
{
|
||||
"name": "Workflow 1",
|
||||
"order": 1,
|
||||
"triggers": [
|
||||
{
|
||||
"type": WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
|
||||
},
|
||||
],
|
||||
"actions": [
|
||||
{
|
||||
"assign_title": "{created_year]",
|
||||
},
|
||||
],
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
self.assertIn(
|
||||
"Invalid f-string detected",
|
||||
response.data["actions"][0]["assign_title"][0],
|
||||
)
|
||||
|
||||
self.assertEqual(Workflow.objects.count(), 1)
|
||||
|
||||
def test_api_create_workflow_trigger_action_empty_fields(self):
|
||||
"""
|
||||
GIVEN:
|
||||
|
@ -414,7 +414,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
|
||||
)
|
||||
doc2 = Document.objects.create(
|
||||
title="doc2",
|
||||
content="this is a document from noone",
|
||||
content="this is a document from no one",
|
||||
checksum="B",
|
||||
)
|
||||
|
||||
|
@ -423,6 +423,16 @@ class TestConsumer(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
self.assertEqual(document.title, "Override Title")
|
||||
self._assert_first_last_send_progress()
|
||||
|
||||
def testOverrideTitleInvalidPlaceholders(self):
|
||||
with self.assertLogs("paperless.consumer", level="ERROR") as cm:
|
||||
document = self.consumer.try_consume_file(
|
||||
self.get_test_file(),
|
||||
override_title="Override {correspondent]",
|
||||
)
|
||||
self.assertEqual(document.title, "sample")
|
||||
expected_str = "Error occurred parsing title override 'Override {correspondent]', falling back to original"
|
||||
self.assertIn(expected_str, cm.output[0])
|
||||
|
||||
def testOverrideCorrespondent(self):
|
||||
c = Correspondent.objects.create(name="test")
|
||||
|
||||
@ -665,7 +675,7 @@ class TestConsumer(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
@override_settings(FILENAME_FORMAT="{correspondent}/{title}")
|
||||
@mock.patch("documents.signals.handlers.generate_unique_filename")
|
||||
def testFilenameHandlingUnstableFormat(self, m):
|
||||
filenames = ["this", "that", "now this", "i cant decide"]
|
||||
filenames = ["this", "that", "now this", "i cannot decide"]
|
||||
|
||||
def get_filename():
|
||||
f = filenames.pop()
|
||||
|
@ -212,8 +212,8 @@ class TestDate(TestCase):
|
||||
|
||||
def test_multiple_dates(self):
|
||||
text = """This text has multiple dates.
|
||||
For example 02.02.2018, 22 July 2022 and Dezember 2021.
|
||||
But not 24-12-9999 because its in the future..."""
|
||||
For example 02.02.2018, 22 July 2022 and December 2021.
|
||||
But not 24-12-9999 because it's in the future..."""
|
||||
dates = list(parse_date_generator("", text))
|
||||
self.assertEqual(len(dates), 3)
|
||||
self.assertEqual(
|
||||
|
@ -43,7 +43,7 @@ class TestDelayedQuery(TestCase):
|
||||
)
|
||||
|
||||
def test_get_permission_criteria(self):
|
||||
# tests contains touples of user instances and the expected filter
|
||||
# tests contains tuples of user instances and the expected filter
|
||||
tests = (
|
||||
(None, [query.Term("has_owner", False)]),
|
||||
(User(42, username="foo", is_superuser=True), []),
|
||||
@ -113,7 +113,7 @@ class TestDelayedQuery(TestCase):
|
||||
)
|
||||
|
||||
def test_tags_query_filters(self):
|
||||
# tests contains touples of query_parameter dics and the expected whoosh query
|
||||
# tests contains tuples of query_parameter dics and the expected whoosh query
|
||||
param = "tags"
|
||||
field, _ = DelayedQuery.param_map[param]
|
||||
tests = (
|
||||
|
@ -165,7 +165,7 @@ class TestMigrateToEncrytpedWebPThumbnails(TestMigrations):
|
||||
):
|
||||
"""
|
||||
GIVEN:
|
||||
- Encrytped document exists with existing encrypted WebP thumbnail path
|
||||
- Encrypted document exists with existing encrypted WebP thumbnail path
|
||||
WHEN:
|
||||
- Migration is attempted
|
||||
THEN:
|
||||
|
@ -966,6 +966,50 @@ class TestWorkflows(DirectoriesMixin, FileSystemAssertsMixin, APITestCase):
|
||||
expected_str = f"Document correspondent {doc.correspondent} does not match {trigger.filter_has_correspondent}"
|
||||
self.assertIn(expected_str, cm.output[1])
|
||||
|
||||
def test_document_added_invalid_title_placeholders(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing workflow with added trigger type
|
||||
- Assign title field has an error
|
||||
WHEN:
|
||||
- File that matches is added
|
||||
THEN:
|
||||
- Title is not updated, error is output
|
||||
"""
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
|
||||
filter_filename="*sample*",
|
||||
)
|
||||
action = WorkflowAction.objects.create(
|
||||
assign_title="Doc {created_year]",
|
||||
)
|
||||
w = Workflow.objects.create(
|
||||
name="Workflow 1",
|
||||
order=0,
|
||||
)
|
||||
w.triggers.add(trigger)
|
||||
w.actions.add(action)
|
||||
w.save()
|
||||
|
||||
now = timezone.localtime(timezone.now())
|
||||
created = now - timedelta(weeks=520)
|
||||
doc = Document.objects.create(
|
||||
original_filename="sample.pdf",
|
||||
title="sample test",
|
||||
content="Hello world bar",
|
||||
created=created,
|
||||
)
|
||||
|
||||
with self.assertLogs("paperless.handlers", level="ERROR") as cm:
|
||||
document_consumption_finished.send(
|
||||
sender=self.__class__,
|
||||
document=doc,
|
||||
)
|
||||
expected_str = f"Error occurred parsing title assignment '{action.assign_title}', falling back to original"
|
||||
self.assertIn(expected_str, cm.output[0])
|
||||
|
||||
self.assertEqual(doc.title, "sample test")
|
||||
|
||||
def test_document_updated_workflow(self):
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
|
||||
|
@ -95,8 +95,8 @@ def debug_mode_check(app_configs, **kwargs):
|
||||
return [
|
||||
Warning(
|
||||
"DEBUG mode is enabled. Disable Debug mode. This is a serious "
|
||||
"security issue, since it puts security overides in place which "
|
||||
"are meant to be only used during development. This "
|
||||
"security issue, since it puts security overrides in place "
|
||||
"which are meant to be only used during development. This "
|
||||
"also means that paperless will tell anyone various "
|
||||
"debugging information when something goes wrong.",
|
||||
),
|
||||
|
@ -122,7 +122,15 @@ class ProfileSerializer(serializers.ModelSerializer):
|
||||
|
||||
|
||||
class ApplicationConfigurationSerializer(serializers.ModelSerializer):
|
||||
user_args = serializers.JSONField(binary=True)
|
||||
user_args = serializers.JSONField(binary=True, allow_null=True)
|
||||
|
||||
def run_validation(self, data):
|
||||
# Empty strings treated as None to avoid unexpected behavior
|
||||
if "user_args" in data and data["user_args"] == "":
|
||||
data["user_args"] = None
|
||||
if "language" in data and data["language"] == "":
|
||||
data["language"] = None
|
||||
return super().run_validation(data)
|
||||
|
||||
class Meta:
|
||||
model = ApplicationConfiguration
|
||||
|
@ -499,8 +499,8 @@ AUTH_PASSWORD_VALIDATORS = [
|
||||
|
||||
# Disable Django's artificial limit on the number of form fields to submit at
|
||||
# once. This is a protection against overloading the server, but since this is
|
||||
# a self-hosted sort of gig, the benefits of being able to mass-delete a tonne
|
||||
# of log entries outweight the benefits of such a safeguard.
|
||||
# a self-hosted sort of gig, the benefits of being able to mass-delete a ton
|
||||
# of log entries outweigh the benefits of such a safeguard.
|
||||
|
||||
DATA_UPLOAD_MAX_NUMBER_FIELDS = None
|
||||
|
||||
|
@ -767,7 +767,7 @@ class MailAccountHandler(LoggingMixin):
|
||||
message=message,
|
||||
)
|
||||
else:
|
||||
# No files to consume, just mark as processed if it wasnt by .eml processing
|
||||
# No files to consume, just mark as processed if it wasn't by .eml processing
|
||||
if not ProcessedMail.objects.filter(
|
||||
rule=rule,
|
||||
uid=message.uid,
|
||||
|
@ -223,7 +223,7 @@ class TestMail(
|
||||
attachments: Union[int, list[_AttachmentDef]] = 1,
|
||||
body: str = "",
|
||||
subject: str = "the subject",
|
||||
from_: str = "noone@mail.com",
|
||||
from_: str = "no_one@mail.com",
|
||||
to: Optional[list[str]] = None,
|
||||
seen: bool = False,
|
||||
flagged: bool = False,
|
||||
|
Loading…
x
Reference in New Issue
Block a user