mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2026-01-28 22:59:03 -06:00
Compare commits
38 Commits
chore/ubun
...
feature-da
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
972f9a069c | ||
|
|
bd99fb66cf | ||
|
|
7704bc5399 | ||
|
|
a055de0ce4 | ||
|
|
e0fdf1caa9 | ||
|
|
f80ae51a7d | ||
|
|
e101019924 | ||
|
|
7afc8ceb24 | ||
|
|
dfe0012872 | ||
|
|
32771391ad | ||
|
|
9b7ae1c8ea | ||
|
|
66593ec660 | ||
|
|
5af0d1da26 | ||
|
|
3281ec2401 | ||
|
|
dc9061eb97 | ||
|
|
6859e7e3c2 | ||
|
|
3e645bd9e2 | ||
|
|
09d39de200 | ||
|
|
94231dbb0f | ||
|
|
2f76350023 | ||
|
|
4cbe56e3af | ||
|
|
01b21377af | ||
|
|
56b5d838d7 | ||
|
|
d294508982 | ||
|
|
02002620d2 | ||
|
|
6d93ae93b4 | ||
|
|
c84f2f04b3 | ||
|
|
d9d83e3045 | ||
|
|
1f074390e4 | ||
|
|
50d676c592 | ||
|
|
94b0f4e114 | ||
|
|
045994042b | ||
|
|
6997a2ab8b | ||
|
|
f82f31f383 | ||
|
|
ac76710296 | ||
|
|
df07b8a03e | ||
|
|
cac1b721b9 | ||
|
|
4428354150 |
@@ -89,6 +89,18 @@ Additional tasks are available for common maintenance operations:
|
||||
- **Migrate Database**: To apply database migrations.
|
||||
- **Create Superuser**: To create an admin user for the application.
|
||||
|
||||
## Committing from the Host Machine
|
||||
|
||||
The DevContainer automatically installs pre-commit hooks during setup. However, these hooks are configured for use inside the container.
|
||||
|
||||
If you want to commit changes from your host machine (outside the DevContainer), you need to set up pre-commit on your host. This installs it as a standalone tool.
|
||||
|
||||
```bash
|
||||
uv tool install pre-commit && pre-commit install
|
||||
```
|
||||
|
||||
After this, you can commit either from inside the DevContainer or from your host machine.
|
||||
|
||||
## Let's Get Started!
|
||||
|
||||
Follow the steps above to get your development environment up and running. Happy coding!
|
||||
|
||||
@@ -3,26 +3,30 @@
|
||||
"dockerComposeFile": "docker-compose.devcontainer.sqlite-tika.yml",
|
||||
"service": "paperless-development",
|
||||
"workspaceFolder": "/usr/src/paperless/paperless-ngx",
|
||||
"postCreateCommand": "/bin/bash -c 'rm -rf .venv/.* && uv sync --group dev && uv run pre-commit install'",
|
||||
"containerEnv": {
|
||||
"UV_CACHE_DIR": "/usr/src/paperless/paperless-ngx/.uv-cache"
|
||||
},
|
||||
"postCreateCommand": "/bin/bash -c 'rm -rf .venv/.* && uv sync --group dev && uv run pre-commit install'",
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
"mhutchie.git-graph",
|
||||
"ms-python.python",
|
||||
"ms-vscode.js-debug-nightly",
|
||||
"eamodio.gitlens",
|
||||
"yzhang.markdown-all-in-one"
|
||||
],
|
||||
"settings": {
|
||||
"python.defaultInterpreterPath": "/usr/src/paperless/paperless-ngx/.venv/bin/python",
|
||||
"python.pythonPath": "/usr/src/paperless/paperless-ngx/.venv/bin/python",
|
||||
"python.terminal.activateEnvInCurrentTerminal": true,
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnType": true,
|
||||
"files.trimTrailingWhitespace": true
|
||||
}
|
||||
"extensions": [
|
||||
"mhutchie.git-graph",
|
||||
"ms-python.python",
|
||||
"ms-vscode.js-debug-nightly",
|
||||
"eamodio.gitlens",
|
||||
"yzhang.markdown-all-in-one",
|
||||
"pnpm.pnpm"
|
||||
],
|
||||
"settings": {
|
||||
"python.defaultInterpreterPath": "/usr/src/paperless/paperless-ngx/.venv/bin/python",
|
||||
"python.pythonPath": "/usr/src/paperless/paperless-ngx/.venv/bin/python",
|
||||
"python.terminal.activateEnvInCurrentTerminal": true,
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnType": true,
|
||||
"files.trimTrailingWhitespace": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"remoteUser": "paperless"
|
||||
}
|
||||
},
|
||||
"remoteUser": "paperless"
|
||||
}
|
||||
|
||||
@@ -174,12 +174,22 @@
|
||||
{
|
||||
"label": "Maintenance: Install Frontend Dependencies",
|
||||
"description": "Install frontend (pnpm) dependencies",
|
||||
"type": "pnpm",
|
||||
"script": "install",
|
||||
"path": "src-ui",
|
||||
"type": "shell",
|
||||
"command": "pnpm install",
|
||||
"group": "clean",
|
||||
"problemMatcher": [],
|
||||
"detail": "install dependencies from package"
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/src-ui"
|
||||
},
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": true,
|
||||
"panel": "shared",
|
||||
"showReuseMessage": false,
|
||||
"clear": true,
|
||||
"revealProblems": "onProblem"
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Clean install frontend dependencies and build the frontend for production",
|
||||
|
||||
3
.github/workflows/ci-backend.yml
vendored
3
.github/workflows/ci-backend.yml
vendored
@@ -75,9 +75,6 @@ jobs:
|
||||
env:
|
||||
NLTK_DATA: ${{ env.NLTK_DATA }}
|
||||
PAPERLESS_CI_TEST: 1
|
||||
PAPERLESS_MAIL_TEST_HOST: ${{ secrets.TEST_MAIL_HOST }}
|
||||
PAPERLESS_MAIL_TEST_USER: ${{ secrets.TEST_MAIL_USER }}
|
||||
PAPERLESS_MAIL_TEST_PASSWD: ${{ secrets.TEST_MAIL_PASSWD }}
|
||||
run: |
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
|
||||
19
.github/workflows/ci-docker.yml
vendored
19
.github/workflows/ci-docker.yml
vendored
@@ -46,14 +46,13 @@ jobs:
|
||||
id: ref
|
||||
run: |
|
||||
ref_name="${GITHUB_HEAD_REF:-$GITHUB_REF_NAME}"
|
||||
# Sanitize by replacing / with - for cache keys
|
||||
cache_ref="${ref_name//\//-}"
|
||||
# Sanitize by replacing / with - for use in tags and cache keys
|
||||
sanitized_ref="${ref_name//\//-}"
|
||||
|
||||
echo "ref_name=${ref_name}"
|
||||
echo "cache_ref=${cache_ref}"
|
||||
echo "sanitized_ref=${sanitized_ref}"
|
||||
|
||||
echo "name=${ref_name}" >> $GITHUB_OUTPUT
|
||||
echo "cache-ref=${cache_ref}" >> $GITHUB_OUTPUT
|
||||
echo "name=${sanitized_ref}" >> $GITHUB_OUTPUT
|
||||
- name: Check push permissions
|
||||
id: check-push
|
||||
env:
|
||||
@@ -62,12 +61,14 @@ jobs:
|
||||
# should-push: Should we push to GHCR?
|
||||
# True for:
|
||||
# 1. Pushes (tags/dev/beta) - filtered via the workflow triggers
|
||||
# 2. Internal PRs where the branch name starts with 'feature-' - filtered here when a PR is synced
|
||||
# 2. Manual dispatch - always push to GHCR
|
||||
# 3. Internal PRs where the branch name starts with 'feature-' or 'fix-'
|
||||
|
||||
should_push="false"
|
||||
|
||||
if [[ "${{ github.event_name }}" == "push" ]]; then
|
||||
should_push="true"
|
||||
elif [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
|
||||
should_push="true"
|
||||
elif [[ "${{ github.event_name }}" == "pull_request" && "${{ github.event.pull_request.head.repo.full_name }}" == "${{ github.repository }}" ]]; then
|
||||
if [[ "${REF_NAME}" == feature-* || "${REF_NAME}" == fix-* ]]; then
|
||||
should_push="true"
|
||||
@@ -139,9 +140,9 @@ jobs:
|
||||
PNGX_TAG_VERSION=${{ steps.docker-meta.outputs.version }}
|
||||
outputs: type=image,name=${{ env.REGISTRY }}/${{ steps.repo.outputs.name }},push-by-digest=true,name-canonical=true,push=${{ steps.check-push.outputs.should-push }}
|
||||
cache-from: |
|
||||
type=registry,ref=${{ env.REGISTRY }}/${{ steps.repo.outputs.name }}/cache/app:${{ steps.ref.outputs.cache-ref }}-${{ matrix.arch }}
|
||||
type=registry,ref=${{ env.REGISTRY }}/${{ steps.repo.outputs.name }}/cache/app:${{ steps.ref.outputs.name }}-${{ matrix.arch }}
|
||||
type=registry,ref=${{ env.REGISTRY }}/${{ steps.repo.outputs.name }}/cache/app:dev-${{ matrix.arch }}
|
||||
cache-to: ${{ steps.check-push.outputs.should-push == 'true' && format('type=registry,mode=max,ref={0}/{1}/cache/app:{2}-{3}', env.REGISTRY, steps.repo.outputs.name, steps.ref.outputs.cache-ref, matrix.arch) || '' }}
|
||||
cache-to: ${{ steps.check-push.outputs.should-push == 'true' && format('type=registry,mode=max,ref={0}/{1}/cache/app:{2}-{3}', env.REGISTRY, steps.repo.outputs.name, steps.ref.outputs.name, matrix.arch) || '' }}
|
||||
- name: Export digest
|
||||
if: steps.check-push.outputs.should-push == 'true'
|
||||
run: |
|
||||
|
||||
4
.github/workflows/ci-docs.yml
vendored
4
.github/workflows/ci-docs.yml
vendored
@@ -23,7 +23,7 @@ env:
|
||||
jobs:
|
||||
build:
|
||||
name: Build Documentation
|
||||
runs-on: ubuntu-slim
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
@@ -58,7 +58,7 @@ jobs:
|
||||
name: Deploy Documentation
|
||||
needs: build
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
runs-on: ubuntu-slim
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
|
||||
2
.github/workflows/ci-lint.yml
vendored
2
.github/workflows/ci-lint.yml
vendored
@@ -12,7 +12,7 @@ concurrency:
|
||||
jobs:
|
||||
pre-commit:
|
||||
name: Pre-commit Checks
|
||||
runs-on: ubuntu-slim
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
|
||||
2
.github/workflows/crowdin.yml
vendored
2
.github/workflows/crowdin.yml
vendored
@@ -10,7 +10,7 @@ jobs:
|
||||
synchronize-with-crowdin:
|
||||
name: Crowdin Sync
|
||||
if: github.repository_owner == 'paperless-ngx'
|
||||
runs-on: ubuntu-slim
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
|
||||
2
.github/workflows/pr-bot.yml
vendored
2
.github/workflows/pr-bot.yml
vendored
@@ -8,7 +8,7 @@ permissions:
|
||||
jobs:
|
||||
pr-bot:
|
||||
name: Automated PR Bot
|
||||
runs-on: ubuntu-slim
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Label PR by file path or branch name
|
||||
# see .github/labeler.yml for the labeler config
|
||||
|
||||
2
.github/workflows/project-actions.yml
vendored
2
.github/workflows/project-actions.yml
vendored
@@ -12,7 +12,7 @@ permissions:
|
||||
jobs:
|
||||
pr_opened_or_reopened:
|
||||
name: pr_opened_or_reopened
|
||||
runs-on: ubuntu-slim
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
# write permission is required for autolabeler
|
||||
pull-requests: write
|
||||
|
||||
10
.github/workflows/repo-maintenance.yml
vendored
10
.github/workflows/repo-maintenance.yml
vendored
@@ -13,7 +13,7 @@ jobs:
|
||||
stale:
|
||||
name: 'Stale'
|
||||
if: github.repository_owner == 'paperless-ngx'
|
||||
runs-on: ubuntu-slim
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/stale@v10
|
||||
with:
|
||||
@@ -35,7 +35,7 @@ jobs:
|
||||
lock-threads:
|
||||
name: 'Lock Old Threads'
|
||||
if: github.repository_owner == 'paperless-ngx'
|
||||
runs-on: ubuntu-slim
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: dessant/lock-threads@v6
|
||||
with:
|
||||
@@ -55,7 +55,7 @@ jobs:
|
||||
close-answered-discussions:
|
||||
name: 'Close Answered Discussions'
|
||||
if: github.repository_owner == 'paperless-ngx'
|
||||
runs-on: ubuntu-slim
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/github-script@v8
|
||||
with:
|
||||
@@ -112,7 +112,7 @@ jobs:
|
||||
close-outdated-discussions:
|
||||
name: 'Close Outdated Discussions'
|
||||
if: github.repository_owner == 'paperless-ngx'
|
||||
runs-on: ubuntu-slim
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/github-script@v8
|
||||
with:
|
||||
@@ -204,7 +204,7 @@ jobs:
|
||||
close-unsupported-feature-requests:
|
||||
name: 'Close Unsupported Feature Requests'
|
||||
if: github.repository_owner == 'paperless-ngx'
|
||||
runs-on: ubuntu-slim
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/github-script@v8
|
||||
with:
|
||||
|
||||
2
.github/workflows/translate-strings.yml
vendored
2
.github/workflows/translate-strings.yml
vendored
@@ -6,7 +6,7 @@ on:
|
||||
jobs:
|
||||
generate-translate-strings:
|
||||
name: Generate Translation Strings
|
||||
runs-on: ubuntu-slim
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -40,6 +40,7 @@ htmlcov/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
.uv-cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*,cover
|
||||
|
||||
@@ -37,7 +37,7 @@ repos:
|
||||
- json
|
||||
# See https://github.com/prettier/prettier/issues/15742 for the fork reason
|
||||
- repo: https://github.com/rbubley/mirrors-prettier
|
||||
rev: 'v3.6.2'
|
||||
rev: 'v3.8.1'
|
||||
hooks:
|
||||
- id: prettier
|
||||
types_or:
|
||||
@@ -49,7 +49,7 @@ repos:
|
||||
- 'prettier-plugin-organize-imports@4.1.0'
|
||||
# Python hooks
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.14.5
|
||||
rev: v0.14.14
|
||||
hooks:
|
||||
- id: ruff-check
|
||||
- id: ruff-format
|
||||
@@ -76,7 +76,7 @@ repos:
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
- repo: https://github.com/google/yamlfmt
|
||||
rev: v0.20.0
|
||||
rev: v0.21.0
|
||||
hooks:
|
||||
- id: yamlfmt
|
||||
exclude: "^src-ui/pnpm-lock.yaml"
|
||||
|
||||
@@ -23,3 +23,24 @@ services:
|
||||
container_name: tika
|
||||
network_mode: host
|
||||
restart: unless-stopped
|
||||
greenmail:
|
||||
image: greenmail/standalone:2.1.8
|
||||
hostname: greenmail
|
||||
container_name: greenmail
|
||||
environment:
|
||||
# Enable only IMAP for now (SMTP available via 3025 if needed later)
|
||||
GREENMAIL_OPTS: >-
|
||||
-Dgreenmail.setup.test.imap -Dgreenmail.users=test@localhost:test -Dgreenmail.users.login=test@localhost -Dgreenmail.verbose
|
||||
ports:
|
||||
- "3143:3143" # IMAP
|
||||
restart: unless-stopped
|
||||
nginx:
|
||||
image: docker.io/nginx:1.29-alpine
|
||||
hostname: nginx
|
||||
container_name: nginx
|
||||
ports:
|
||||
- "8080:8080"
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- ../../docs/assets:/usr/share/nginx/html/assets:ro
|
||||
- ./test-nginx.conf:/etc/nginx/conf.d/default.conf:ro
|
||||
|
||||
14
docker/compose/test-nginx.conf
Normal file
14
docker/compose/test-nginx.conf
Normal file
@@ -0,0 +1,14 @@
|
||||
server {
|
||||
listen 8080;
|
||||
server_name localhost;
|
||||
|
||||
root /usr/share/nginx/html;
|
||||
|
||||
# Enable CORS for test requests
|
||||
add_header 'Access-Control-Allow-Origin' '*' always;
|
||||
add_header 'Access-Control-Allow-Methods' 'GET, HEAD, OPTIONS' always;
|
||||
|
||||
location / {
|
||||
try_files $uri $uri/ =404;
|
||||
}
|
||||
}
|
||||
@@ -582,7 +582,7 @@ document.
|
||||
|
||||
### Detecting duplicates {#fuzzy_duplicate}
|
||||
|
||||
Paperless already catches and prevents upload of exactly matching documents,
|
||||
Paperless-ngx already catches and warns of exactly matching documents,
|
||||
however a new scan of an existing document may not produce an exact bit for bit
|
||||
duplicate. But the content should be exact or close, allowing detection.
|
||||
|
||||
|
||||
@@ -1152,8 +1152,9 @@ via the consumption directory, you can disable the consumer to save resources.
|
||||
|
||||
#### [`PAPERLESS_CONSUMER_DELETE_DUPLICATES=<bool>`](#PAPERLESS_CONSUMER_DELETE_DUPLICATES) {#PAPERLESS_CONSUMER_DELETE_DUPLICATES}
|
||||
|
||||
: When the consumer detects a duplicate document, it will not touch
|
||||
the original document. This default behavior can be changed here.
|
||||
: As of version 3.0 Paperless-ngx allows duplicate documents to be consumed by default, _except_ when
|
||||
this setting is enabled. When enabled, Paperless will check if a document with the same hash already
|
||||
exists in the system and delete the duplicate file from the consumption directory without consuming it.
|
||||
|
||||
Defaults to false.
|
||||
|
||||
@@ -1616,6 +1617,16 @@ processing. This only has an effect if
|
||||
|
||||
Defaults to `0 1 * * *`, once per day.
|
||||
|
||||
## Share links
|
||||
|
||||
#### [`PAPERLESS_SHARE_LINK_BUNDLE_CLEANUP_CRON=<cron expression>`](#PAPERLESS_SHARE_LINK_BUNDLE_CLEANUP_CRON) {#PAPERLESS_SHARE_LINK_BUNDLE_CLEANUP_CRON}
|
||||
|
||||
: Controls how often Paperless-ngx removes expired share link bundles (and their generated ZIP archives).
|
||||
|
||||
: If set to the string "disable", expired bundles are not cleaned up automatically.
|
||||
|
||||
Defaults to `0 2 * * *`, once per day at 02:00.
|
||||
|
||||
## Binaries
|
||||
|
||||
There are a few external software packages that Paperless expects to
|
||||
|
||||
@@ -308,12 +308,14 @@ or using [email](#workflow-action-email) or [webhook](#workflow-action-webhook)
|
||||
|
||||
### Share Links
|
||||
|
||||
"Share links" are shareable public links to files and can be created and managed under the 'Send' button on the document detail screen.
|
||||
"Share links" are public links to files (or an archive of files) and can be created and managed under the 'Send' button on the document detail screen or from the bulk editor.
|
||||
|
||||
- Share links do not require a user to login and thus link directly to a file.
|
||||
- Share links do not require a user to login and thus link directly to a file or bundled download.
|
||||
- Links are unique and are of the form `{paperless-url}/share/{randomly-generated-slug}`.
|
||||
- Links can optionally have an expiration time set.
|
||||
- After a link expires or is deleted users will be redirected to the regular paperless-ngx login.
|
||||
- From the document detail screen you can create a share link for that single document.
|
||||
- From the bulk editor you can create a **share link bundle** for any selection. Paperless-ngx prepares a ZIP archive in the background and exposes a single share link. You can revisit the "Manage share link bundles" dialog to monitor progress, retry failed bundles, or delete links.
|
||||
|
||||
!!! tip
|
||||
|
||||
|
||||
@@ -19,14 +19,14 @@ dependencies = [
|
||||
"azure-ai-documentintelligence>=1.0.2",
|
||||
"babel>=2.17",
|
||||
"bleach~=6.3.0",
|
||||
"celery[redis]~=5.5.1",
|
||||
"celery[redis]~=5.6.2",
|
||||
"channels~=4.2",
|
||||
"channels-redis~=4.2",
|
||||
"concurrent-log-handler~=0.9.25",
|
||||
"dateparser~=1.2",
|
||||
# WARNING: django does not use semver.
|
||||
# Only patch versions are guaranteed to not introduce breaking changes.
|
||||
"django~=5.2.5",
|
||||
"django~=5.2.10",
|
||||
"django-allauth[mfa,socialaccount]~=65.13.1",
|
||||
"django-auditlog~=3.4.1",
|
||||
"django-cachalot~=2.8.0",
|
||||
@@ -79,7 +79,7 @@ dependencies = [
|
||||
"torch~=2.9.1",
|
||||
"tqdm~=4.67.1",
|
||||
"watchfiles>=1.1.1",
|
||||
"whitenoise~=6.9",
|
||||
"whitenoise~=6.11",
|
||||
"whoosh-reloaded>=2.7.5",
|
||||
"zxing-cpp~=2.3.0",
|
||||
]
|
||||
@@ -88,13 +88,13 @@ optional-dependencies.mariadb = [
|
||||
"mysqlclient~=2.2.7",
|
||||
]
|
||||
optional-dependencies.postgres = [
|
||||
"psycopg[c,pool]==3.2.12",
|
||||
"psycopg[c,pool]==3.3",
|
||||
# Direct dependency for proper resolution of the pre-built wheels
|
||||
"psycopg-c==3.2.12",
|
||||
"psycopg-c==3.3",
|
||||
"psycopg-pool==3.3",
|
||||
]
|
||||
optional-dependencies.webserver = [
|
||||
"granian[uvloop]~=2.5.1",
|
||||
"granian[uvloop]~=2.6.0",
|
||||
]
|
||||
|
||||
[dependency-groups]
|
||||
@@ -114,15 +114,16 @@ testing = [
|
||||
"daphne",
|
||||
"factory-boy~=3.3.1",
|
||||
"imagehash",
|
||||
"pytest~=8.4.1",
|
||||
"pytest~=9.0.0",
|
||||
"pytest-cov~=7.0.0",
|
||||
"pytest-django~=4.11.1",
|
||||
"pytest-env",
|
||||
"pytest-env~=1.2.0",
|
||||
"pytest-httpx",
|
||||
"pytest-mock",
|
||||
"pytest-rerunfailures",
|
||||
"pytest-mock~=3.15.1",
|
||||
#"pytest-randomly~=4.0.1",
|
||||
"pytest-rerunfailures~=16.1",
|
||||
"pytest-sugar",
|
||||
"pytest-xdist",
|
||||
"pytest-xdist~=3.8.0",
|
||||
]
|
||||
|
||||
lint = [
|
||||
@@ -151,7 +152,7 @@ typing = [
|
||||
]
|
||||
|
||||
[tool.uv]
|
||||
required-version = ">=0.5.14"
|
||||
required-version = ">=0.9.0"
|
||||
package = false
|
||||
environments = [
|
||||
"sys_platform == 'darwin'",
|
||||
@@ -161,8 +162,8 @@ environments = [
|
||||
[tool.uv.sources]
|
||||
# Markers are chosen to select these almost exclusively when building the Docker image
|
||||
psycopg-c = [
|
||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/psycopg-bookworm-3.2.12/psycopg_c-3.2.12-cp312-cp312-linux_x86_64.whl", marker = "sys_platform == 'linux' and platform_machine == 'x86_64' and python_version == '3.12'" },
|
||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/psycopg-bookworm-3.2.12/psycopg_c-3.2.12-cp312-cp312-linux_aarch64.whl", marker = "sys_platform == 'linux' and platform_machine == 'aarch64' and python_version == '3.12'" },
|
||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/psycopg-trixie-3.3.0/psycopg_c-3.3.0-cp312-cp312-linux_x86_64.whl", marker = "sys_platform == 'linux' and platform_machine == 'x86_64' and python_version == '3.12'" },
|
||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/psycopg-trixie-3.3.0/psycopg_c-3.3.0-cp312-cp312-linux_aarch64.whl", marker = "sys_platform == 'linux' and platform_machine == 'aarch64' and python_version == '3.12'" },
|
||||
]
|
||||
zxing-cpp = [
|
||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_x86_64.whl", marker = "sys_platform == 'linux' and platform_machine == 'x86_64' and python_version == '3.12'" },
|
||||
@@ -260,11 +261,15 @@ write-changes = true
|
||||
ignore-words-list = "criterias,afterall,valeu,ureue,equest,ure,assertIn,Oktober,commitish"
|
||||
skip = "src-ui/src/locale/*,src-ui/pnpm-lock.yaml,src-ui/e2e/*,src/paperless_mail/tests/samples/*,src/documents/tests/samples/*,*.po,*.json"
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
minversion = "8.0"
|
||||
pythonpath = [
|
||||
"src",
|
||||
]
|
||||
[tool.pytest]
|
||||
minversion = "9.0"
|
||||
pythonpath = [ "src" ]
|
||||
|
||||
strict_config = true
|
||||
strict_markers = true
|
||||
strict_parametrization_ids = true
|
||||
strict_xfail = true
|
||||
|
||||
testpaths = [
|
||||
"src/documents/tests/",
|
||||
"src/paperless/tests/",
|
||||
@@ -275,6 +280,7 @@ testpaths = [
|
||||
"src/paperless_remote/tests/",
|
||||
"src/paperless_ai/tests",
|
||||
]
|
||||
|
||||
addopts = [
|
||||
"--pythonwarnings=all",
|
||||
"--cov",
|
||||
@@ -282,15 +288,27 @@ addopts = [
|
||||
"--cov-report=xml",
|
||||
"--numprocesses=auto",
|
||||
"--maxprocesses=16",
|
||||
"--quiet",
|
||||
"--dist=loadscope",
|
||||
"--durations=50",
|
||||
"--durations-min=0.5",
|
||||
"--junitxml=junit.xml",
|
||||
"-o junit_family=legacy",
|
||||
"-o",
|
||||
"junit_family=legacy",
|
||||
]
|
||||
|
||||
norecursedirs = [ "src/locale/", ".venv/", "src-ui/" ]
|
||||
|
||||
DJANGO_SETTINGS_MODULE = "paperless.settings"
|
||||
|
||||
markers = [
|
||||
"live: Integration tests requiring external services (Gotenberg, Tika, nginx, etc)",
|
||||
"nginx: Tests that make HTTP requests to the local nginx service",
|
||||
"gotenberg: Tests requiring Gotenberg service",
|
||||
"tika: Tests requiring Tika service",
|
||||
"greenmail: Tests requiring Greenmail service",
|
||||
"date_parsing: Tests which cover date parsing from content or filename",
|
||||
]
|
||||
|
||||
[tool.pytest_env]
|
||||
PAPERLESS_DISABLE_DBHANDLER = "true"
|
||||
PAPERLESS_CACHE_BACKEND = "django.core.cache.backends.locmem.LocMemCache"
|
||||
@@ -315,6 +333,10 @@ exclude_also = [
|
||||
|
||||
[tool.mypy]
|
||||
mypy_path = "src"
|
||||
files = [
|
||||
"src/documents/plugins/date_parsing",
|
||||
"src/documents/tests/date_parsing",
|
||||
]
|
||||
plugins = [
|
||||
"mypy_django_plugin.main",
|
||||
"mypy_drf_plugin.main",
|
||||
@@ -326,5 +348,28 @@ disallow_untyped_defs = true
|
||||
warn_redundant_casts = true
|
||||
warn_unused_ignores = true
|
||||
|
||||
# This prevents errors from imports, but allows type-checking logic to work
|
||||
follow_imports = "silent"
|
||||
|
||||
[[tool.mypy.overrides]]
|
||||
module = [
|
||||
"documents.*",
|
||||
"paperless.*",
|
||||
"paperless_ai.*",
|
||||
"paperless_mail.*",
|
||||
"paperless_tesseract.*",
|
||||
"paperless_remote.*",
|
||||
"paperless_text.*",
|
||||
"paperless_tika.*",
|
||||
]
|
||||
ignore_errors = true
|
||||
|
||||
[[tool.mypy.overrides]]
|
||||
module = [
|
||||
"documents.plugins.date_parsing.*",
|
||||
"documents.tests.date_parsing.*",
|
||||
]
|
||||
ignore_errors = false
|
||||
|
||||
[tool.django-stubs]
|
||||
django_settings_module = "paperless.settings"
|
||||
|
||||
1342
src-ui/messages.xlf
1342
src-ui/messages.xlf
File diff suppressed because it is too large
Load Diff
@@ -103,22 +103,6 @@
|
||||
</div>
|
||||
|
||||
<div class="row mb-3">
|
||||
<div class="col-md-3 col-form-label pt-0">
|
||||
<span i18n>Items per page</span>
|
||||
</div>
|
||||
<div class="col">
|
||||
|
||||
<select class="form-select" formControlName="documentListItemPerPage">
|
||||
<option [ngValue]="10">10</option>
|
||||
<option [ngValue]="25">25</option>
|
||||
<option [ngValue]="50">50</option>
|
||||
<option [ngValue]="100">100</option>
|
||||
</select>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<div class="col-md-3 col-form-label pt-0">
|
||||
<span i18n>Sidebar</span>
|
||||
</div>
|
||||
@@ -153,8 +137,28 @@
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-xl-6 ps-xl-5">
|
||||
<h5 class="mt-3 mt-md-0" i18n>Global search</h5>
|
||||
<div class="row">
|
||||
<div class="col">
|
||||
<pngx-input-check i18n-title title="Do not include advanced search results" formControlName="searchDbOnly"></pngx-input-check>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<h5 class="mt-3" id="update-checking" i18n>Update checking</h5>
|
||||
<div class="row mb-3">
|
||||
<div class="col-md-3 col-form-label pt-0">
|
||||
<span i18n>Full search links to</span>
|
||||
</div>
|
||||
<div class="col mb-3">
|
||||
<select class="form-select" formControlName="searchLink">
|
||||
<option [ngValue]="GlobalSearchType.TITLE_CONTENT" i18n>Title and content search</option>
|
||||
<option [ngValue]="GlobalSearchType.ADVANCED" i18n>Advanced search</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<h5 class="mt-3 mt-md-0" id="update-checking" i18n>Update checking</h5>
|
||||
<div class="row mb-3">
|
||||
<div class="col d-flex flex-row align-items-start">
|
||||
<pngx-input-check i18n-title title="Enable update checking" formControlName="updateCheckingEnabled"></pngx-input-check>
|
||||
@@ -179,11 +183,33 @@
|
||||
<pngx-input-check i18n-title title="Show document counts in sidebar saved views" formControlName="sidebarViewsShowCount"></pngx-input-check>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
<div class="col-xl-6 ps-xl-5">
|
||||
<h5 class="mt-3 mt-md-0" i18n>Document editing</h5>
|
||||
</div>
|
||||
|
||||
</ng-template>
|
||||
</li>
|
||||
|
||||
<li [ngbNavItem]="SettingsNavIDs.Documents">
|
||||
<a ngbNavLink i18n>Documents</a>
|
||||
<ng-template ngbNavContent>
|
||||
<div class="row">
|
||||
<div class="col-xl-6 pe-xl-5">
|
||||
<h5 i18n>Documents</h5>
|
||||
<div class="row mb-3">
|
||||
<div class="col-md-3 col-form-label pt-0">
|
||||
<span i18n>Items per page</span>
|
||||
</div>
|
||||
<div class="col">
|
||||
<select class="form-select" formControlName="documentListItemPerPage">
|
||||
<option [ngValue]="10">10</option>
|
||||
<option [ngValue]="25">25</option>
|
||||
<option [ngValue]="50">50</option>
|
||||
<option [ngValue]="100">100</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<h5 class="mt-3" i18n>Document editing</h5>
|
||||
<div class="row">
|
||||
<div class="col">
|
||||
<pngx-input-check i18n-title title="Use PDF viewer provided by the browser" i18n-hint hint="This is usually faster for displaying large PDF documents, but it might not work on some browsers." formControlName="useNativePdfViewer"></pngx-input-check>
|
||||
@@ -209,31 +235,32 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="row mb-3">
|
||||
<div class="row">
|
||||
<div class="col">
|
||||
<pngx-input-check i18n-title title="Show document thumbnail during loading" formControlName="documentEditingOverlayThumbnail"></pngx-input-check>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<h5 class="mt-3" i18n>Global search</h5>
|
||||
<div class="row">
|
||||
<div class="col">
|
||||
<pngx-input-check i18n-title title="Do not include advanced search results" formControlName="searchDbOnly"></pngx-input-check>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="row mb-3">
|
||||
<div class="col-md-3 col-form-label pt-0">
|
||||
<span i18n>Full search links to</span>
|
||||
</div>
|
||||
<div class="col mb-3">
|
||||
<select class="form-select" formControlName="searchLink">
|
||||
<option [ngValue]="GlobalSearchType.TITLE_CONTENT" i18n>Title and content search</option>
|
||||
<option [ngValue]="GlobalSearchType.ADVANCED" i18n>Advanced search</option>
|
||||
</select>
|
||||
<div class="col">
|
||||
<p class="mb-2" i18n>Built-in fields to show:</p>
|
||||
@for (option of documentDetailFieldOptions; track option.id) {
|
||||
<div class="form-check ms-3">
|
||||
<input class="form-check-input" type="checkbox"
|
||||
[id]="'documentDetailField-' + option.id"
|
||||
[checked]="isDocumentDetailFieldShown(option.id)"
|
||||
(change)="toggleDocumentDetailField(option.id, $event.target.checked)" />
|
||||
<label class="form-check-label" [for]="'documentDetailField-' + option.id">
|
||||
{{ option.label }}
|
||||
</label>
|
||||
</div>
|
||||
}
|
||||
<p class="small text-muted mt-1" i18n>Uncheck fields to hide them on the document details page.</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="col-xl-6 ps-xl-5">
|
||||
<h5 class="mt-3" i18n>Bulk editing</h5>
|
||||
<div class="row mb-3">
|
||||
<div class="col">
|
||||
@@ -242,16 +269,27 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<h5 class="mt-3" i18n>PDF Editor</h5>
|
||||
<div class="row">
|
||||
<div class="col-md-3 col-form-label pt-0">
|
||||
<span i18n>Default editing mode</span>
|
||||
</div>
|
||||
<div class="col">
|
||||
<select class="form-select" formControlName="pdfEditorDefaultEditMode">
|
||||
<option [ngValue]="PdfEditorEditMode.Create" i18n>Create new document(s)</option>
|
||||
<option [ngValue]="PdfEditorEditMode.Update" i18n>Update existing document</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<h5 class="mt-3" i18n>Notes</h5>
|
||||
<div class="row mb-3">
|
||||
<div class="col">
|
||||
<pngx-input-check i18n-title title="Enable notes" formControlName="notesEnabled"></pngx-input-check>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</ng-template>
|
||||
</li>
|
||||
|
||||
|
||||
@@ -201,9 +201,9 @@ describe('SettingsComponent', () => {
|
||||
const navigateSpy = jest.spyOn(router, 'navigate')
|
||||
const tabButtons = fixture.debugElement.queryAll(By.directive(NgbNavLink))
|
||||
tabButtons[1].nativeElement.dispatchEvent(new MouseEvent('click'))
|
||||
expect(navigateSpy).toHaveBeenCalledWith(['settings', 'permissions'])
|
||||
expect(navigateSpy).toHaveBeenCalledWith(['settings', 'documents'])
|
||||
tabButtons[2].nativeElement.dispatchEvent(new MouseEvent('click'))
|
||||
expect(navigateSpy).toHaveBeenCalledWith(['settings', 'notifications'])
|
||||
expect(navigateSpy).toHaveBeenCalledWith(['settings', 'permissions'])
|
||||
|
||||
const initSpy = jest.spyOn(component, 'initialize')
|
||||
component.isDirty = true // mock dirty
|
||||
@@ -213,8 +213,8 @@ describe('SettingsComponent', () => {
|
||||
expect(initSpy).not.toHaveBeenCalled()
|
||||
|
||||
navigateSpy.mockResolvedValueOnce(true) // nav accepted even though dirty
|
||||
tabButtons[1].nativeElement.dispatchEvent(new MouseEvent('click'))
|
||||
expect(navigateSpy).toHaveBeenCalledWith(['settings', 'notifications'])
|
||||
tabButtons[2].nativeElement.dispatchEvent(new MouseEvent('click'))
|
||||
expect(navigateSpy).toHaveBeenCalledWith(['settings', 'permissions'])
|
||||
expect(initSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
@@ -226,7 +226,7 @@ describe('SettingsComponent', () => {
|
||||
activatedRoute.snapshot.fragment = '#notifications'
|
||||
const scrollSpy = jest.spyOn(viewportScroller, 'scrollToAnchor')
|
||||
component.ngOnInit()
|
||||
expect(component.activeNavID).toEqual(3) // Notifications
|
||||
expect(component.activeNavID).toEqual(4) // Notifications
|
||||
component.ngAfterViewInit()
|
||||
expect(scrollSpy).toHaveBeenCalledWith('#notifications')
|
||||
})
|
||||
@@ -251,7 +251,7 @@ describe('SettingsComponent', () => {
|
||||
expect(toastErrorSpy).toHaveBeenCalled()
|
||||
expect(storeSpy).toHaveBeenCalled()
|
||||
expect(appearanceSettingsSpy).not.toHaveBeenCalled()
|
||||
expect(setSpy).toHaveBeenCalledTimes(30)
|
||||
expect(setSpy).toHaveBeenCalledTimes(32)
|
||||
|
||||
// succeed
|
||||
storeSpy.mockReturnValueOnce(of(true))
|
||||
@@ -366,4 +366,22 @@ describe('SettingsComponent', () => {
|
||||
settingsService.settingsSaved.emit(true)
|
||||
expect(maybeRefreshSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should support toggling document detail fields', () => {
|
||||
completeSetup()
|
||||
const field = 'storage_path'
|
||||
expect(
|
||||
component.settingsForm.get('documentDetailsHiddenFields').value.length
|
||||
).toEqual(0)
|
||||
component.toggleDocumentDetailField(field, false)
|
||||
expect(
|
||||
component.settingsForm.get('documentDetailsHiddenFields').value.length
|
||||
).toEqual(1)
|
||||
expect(component.isDocumentDetailFieldShown(field)).toBeFalsy()
|
||||
component.toggleDocumentDetailField(field, true)
|
||||
expect(
|
||||
component.settingsForm.get('documentDetailsHiddenFields').value.length
|
||||
).toEqual(0)
|
||||
expect(component.isDocumentDetailFieldShown(field)).toBeTruthy()
|
||||
})
|
||||
})
|
||||
|
||||
@@ -64,15 +64,16 @@ import { PermissionsGroupComponent } from '../../common/input/permissions/permis
|
||||
import { PermissionsUserComponent } from '../../common/input/permissions/permissions-user/permissions-user.component'
|
||||
import { SelectComponent } from '../../common/input/select/select.component'
|
||||
import { PageHeaderComponent } from '../../common/page-header/page-header.component'
|
||||
import { PdfEditorEditMode } from '../../common/pdf-editor/pdf-editor-edit-mode'
|
||||
import { SystemStatusDialogComponent } from '../../common/system-status-dialog/system-status-dialog.component'
|
||||
import { ZoomSetting } from '../../document-detail/document-detail.component'
|
||||
import { ZoomSetting } from '../../document-detail/zoom-setting'
|
||||
import { ComponentWithPermissions } from '../../with-permissions/with-permissions.component'
|
||||
|
||||
enum SettingsNavIDs {
|
||||
General = 1,
|
||||
Permissions = 2,
|
||||
Notifications = 3,
|
||||
SavedViews = 4,
|
||||
Documents = 2,
|
||||
Permissions = 3,
|
||||
Notifications = 4,
|
||||
}
|
||||
|
||||
const systemLanguage = { code: '', name: $localize`Use system language` }
|
||||
@@ -81,6 +82,25 @@ const systemDateFormat = {
|
||||
name: $localize`Use date format of display language`,
|
||||
}
|
||||
|
||||
export enum DocumentDetailFieldID {
|
||||
ArchiveSerialNumber = 'archive_serial_number',
|
||||
Correspondent = 'correspondent',
|
||||
DocumentType = 'document_type',
|
||||
StoragePath = 'storage_path',
|
||||
Tags = 'tags',
|
||||
}
|
||||
|
||||
const documentDetailFieldOptions = [
|
||||
{
|
||||
id: DocumentDetailFieldID.ArchiveSerialNumber,
|
||||
label: $localize`Archive serial number`,
|
||||
},
|
||||
{ id: DocumentDetailFieldID.Correspondent, label: $localize`Correspondent` },
|
||||
{ id: DocumentDetailFieldID.DocumentType, label: $localize`Document type` },
|
||||
{ id: DocumentDetailFieldID.StoragePath, label: $localize`Storage path` },
|
||||
{ id: DocumentDetailFieldID.Tags, label: $localize`Tags` },
|
||||
]
|
||||
|
||||
@Component({
|
||||
selector: 'pngx-settings',
|
||||
templateUrl: './settings.component.html',
|
||||
@@ -144,8 +164,10 @@ export class SettingsComponent
|
||||
defaultPermsEditGroups: new FormControl(null),
|
||||
useNativePdfViewer: new FormControl(null),
|
||||
pdfViewerDefaultZoom: new FormControl(null),
|
||||
pdfEditorDefaultEditMode: new FormControl(null),
|
||||
documentEditingRemoveInboxTags: new FormControl(null),
|
||||
documentEditingOverlayThumbnail: new FormControl(null),
|
||||
documentDetailsHiddenFields: new FormControl([]),
|
||||
searchDbOnly: new FormControl(null),
|
||||
searchLink: new FormControl(null),
|
||||
|
||||
@@ -176,6 +198,10 @@ export class SettingsComponent
|
||||
|
||||
public readonly ZoomSetting = ZoomSetting
|
||||
|
||||
public readonly PdfEditorEditMode = PdfEditorEditMode
|
||||
|
||||
public readonly documentDetailFieldOptions = documentDetailFieldOptions
|
||||
|
||||
get systemStatusHasErrors(): boolean {
|
||||
return (
|
||||
this.systemStatus.database.status === SystemStatusItemStatus.ERROR ||
|
||||
@@ -292,6 +318,9 @@ export class SettingsComponent
|
||||
pdfViewerDefaultZoom: this.settings.get(
|
||||
SETTINGS_KEYS.PDF_VIEWER_ZOOM_SETTING
|
||||
),
|
||||
pdfEditorDefaultEditMode: this.settings.get(
|
||||
SETTINGS_KEYS.PDF_EDITOR_DEFAULT_EDIT_MODE
|
||||
),
|
||||
displayLanguage: this.settings.getLanguage(),
|
||||
dateLocale: this.settings.get(SETTINGS_KEYS.DATE_LOCALE),
|
||||
dateFormat: this.settings.get(SETTINGS_KEYS.DATE_FORMAT),
|
||||
@@ -336,6 +365,9 @@ export class SettingsComponent
|
||||
documentEditingOverlayThumbnail: this.settings.get(
|
||||
SETTINGS_KEYS.DOCUMENT_EDITING_OVERLAY_THUMBNAIL
|
||||
),
|
||||
documentDetailsHiddenFields: this.settings.get(
|
||||
SETTINGS_KEYS.DOCUMENT_DETAILS_HIDDEN_FIELDS
|
||||
),
|
||||
searchDbOnly: this.settings.get(SETTINGS_KEYS.SEARCH_DB_ONLY),
|
||||
searchLink: this.settings.get(SETTINGS_KEYS.SEARCH_FULL_TYPE),
|
||||
}
|
||||
@@ -458,6 +490,10 @@ export class SettingsComponent
|
||||
SETTINGS_KEYS.PDF_VIEWER_ZOOM_SETTING,
|
||||
this.settingsForm.value.pdfViewerDefaultZoom
|
||||
)
|
||||
this.settings.set(
|
||||
SETTINGS_KEYS.PDF_EDITOR_DEFAULT_EDIT_MODE,
|
||||
this.settingsForm.value.pdfEditorDefaultEditMode
|
||||
)
|
||||
this.settings.set(
|
||||
SETTINGS_KEYS.DATE_LOCALE,
|
||||
this.settingsForm.value.dateLocale
|
||||
@@ -526,6 +562,10 @@ export class SettingsComponent
|
||||
SETTINGS_KEYS.DOCUMENT_EDITING_OVERLAY_THUMBNAIL,
|
||||
this.settingsForm.value.documentEditingOverlayThumbnail
|
||||
)
|
||||
this.settings.set(
|
||||
SETTINGS_KEYS.DOCUMENT_DETAILS_HIDDEN_FIELDS,
|
||||
this.settingsForm.value.documentDetailsHiddenFields
|
||||
)
|
||||
this.settings.set(
|
||||
SETTINGS_KEYS.SEARCH_DB_ONLY,
|
||||
this.settingsForm.value.searchDbOnly
|
||||
@@ -587,6 +627,26 @@ export class SettingsComponent
|
||||
this.settingsForm.get('themeColor').patchValue('')
|
||||
}
|
||||
|
||||
isDocumentDetailFieldShown(fieldId: string): boolean {
|
||||
const hiddenFields =
|
||||
this.settingsForm.value.documentDetailsHiddenFields || []
|
||||
return !hiddenFields.includes(fieldId)
|
||||
}
|
||||
|
||||
toggleDocumentDetailField(fieldId: string, checked: boolean) {
|
||||
const hiddenFields = new Set(
|
||||
this.settingsForm.value.documentDetailsHiddenFields || []
|
||||
)
|
||||
if (checked) {
|
||||
hiddenFields.delete(fieldId)
|
||||
} else {
|
||||
hiddenFields.add(fieldId)
|
||||
}
|
||||
this.settingsForm
|
||||
.get('documentDetailsHiddenFields')
|
||||
.setValue(Array.from(hiddenFields))
|
||||
}
|
||||
|
||||
showSystemStatus() {
|
||||
const modal: NgbModalRef = this.modalService.open(
|
||||
SystemStatusDialogComponent,
|
||||
|
||||
@@ -97,6 +97,12 @@
|
||||
<br/><em>(<ng-container i18n>click for full output</ng-container>)</em>
|
||||
}
|
||||
</ng-template>
|
||||
@if (task.duplicate_documents?.length > 0) {
|
||||
<div class="small text-warning-emphasis d-flex align-items-center gap-1">
|
||||
<i-bs class="lh-1" width="1em" height="1em" name="exclamation-triangle"></i-bs>
|
||||
<span i18n>Duplicate(s) detected</span>
|
||||
</div>
|
||||
}
|
||||
</td>
|
||||
}
|
||||
<td class="d-lg-none">
|
||||
|
||||
@@ -248,7 +248,7 @@ main {
|
||||
}
|
||||
}
|
||||
|
||||
@media screen and (min-width: 366px) and (max-width: 768px) {
|
||||
@media screen and (min-width: 376px) and (max-width: 768px) {
|
||||
.navbar-toggler {
|
||||
// compensate for 2 buttons on the right
|
||||
margin-right: 45px;
|
||||
|
||||
@@ -164,9 +164,11 @@
|
||||
{{ item.name }}
|
||||
<span class="ms-auto text-muted small">
|
||||
@if (item.dateEnd) {
|
||||
{{ item.date | customDate:'MMM d' }} – {{ item.dateEnd | customDate:'mediumDate' }}
|
||||
{{ item.date | customDate:'mediumDate' }} – {{ item.dateEnd | customDate:'mediumDate' }}
|
||||
} @else if (item.dateTilNow) {
|
||||
{{ item.dateTilNow | customDate:'mediumDate' }} – <ng-container i18n>now</ng-container>
|
||||
} @else {
|
||||
{{ item.date | customDate:'mediumDate' }} – <ng-container i18n>now</ng-container>
|
||||
{{ item.date | customDate:'mediumDate' }}
|
||||
}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
@@ -79,32 +79,34 @@ export class DatesDropdownComponent implements OnInit, OnDestroy {
|
||||
{
|
||||
id: RelativeDate.WITHIN_1_WEEK,
|
||||
name: $localize`Within 1 week`,
|
||||
date: new Date().setDate(new Date().getDate() - 7),
|
||||
dateTilNow: new Date().setDate(new Date().getDate() - 7),
|
||||
},
|
||||
{
|
||||
id: RelativeDate.WITHIN_1_MONTH,
|
||||
name: $localize`Within 1 month`,
|
||||
date: new Date().setMonth(new Date().getMonth() - 1),
|
||||
dateTilNow: new Date().setMonth(new Date().getMonth() - 1),
|
||||
},
|
||||
{
|
||||
id: RelativeDate.WITHIN_3_MONTHS,
|
||||
name: $localize`Within 3 months`,
|
||||
date: new Date().setMonth(new Date().getMonth() - 3),
|
||||
dateTilNow: new Date().setMonth(new Date().getMonth() - 3),
|
||||
},
|
||||
{
|
||||
id: RelativeDate.WITHIN_1_YEAR,
|
||||
name: $localize`Within 1 year`,
|
||||
date: new Date().setFullYear(new Date().getFullYear() - 1),
|
||||
dateTilNow: new Date().setFullYear(new Date().getFullYear() - 1),
|
||||
},
|
||||
{
|
||||
id: RelativeDate.THIS_YEAR,
|
||||
name: $localize`This year`,
|
||||
date: new Date('1/1/' + new Date().getFullYear()),
|
||||
dateEnd: new Date('12/31/' + new Date().getFullYear()),
|
||||
},
|
||||
{
|
||||
id: RelativeDate.THIS_MONTH,
|
||||
name: $localize`This month`,
|
||||
date: new Date().setDate(1),
|
||||
dateEnd: new Date(new Date().getFullYear(), new Date().getMonth() + 1, 0),
|
||||
},
|
||||
{
|
||||
id: RelativeDate.TODAY,
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
export enum PdfEditorEditMode {
|
||||
Update = 'update',
|
||||
Create = 'create',
|
||||
}
|
||||
@@ -8,8 +8,11 @@ import { FormsModule } from '@angular/forms'
|
||||
import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap'
|
||||
import { PDFDocumentProxy, PdfViewerModule } from 'ng2-pdf-viewer'
|
||||
import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
|
||||
import { SETTINGS_KEYS } from 'src/app/data/ui-settings'
|
||||
import { DocumentService } from 'src/app/services/rest/document.service'
|
||||
import { SettingsService } from 'src/app/services/settings.service'
|
||||
import { ConfirmDialogComponent } from '../confirm-dialog/confirm-dialog.component'
|
||||
import { PdfEditorEditMode } from './pdf-editor-edit-mode'
|
||||
|
||||
interface PageOperation {
|
||||
page: number
|
||||
@@ -19,11 +22,6 @@ interface PageOperation {
|
||||
loaded?: boolean
|
||||
}
|
||||
|
||||
export enum PdfEditorEditMode {
|
||||
Update = 'update',
|
||||
Create = 'create',
|
||||
}
|
||||
|
||||
@Component({
|
||||
selector: 'pngx-pdf-editor',
|
||||
templateUrl: './pdf-editor.component.html',
|
||||
@@ -39,12 +37,15 @@ export class PDFEditorComponent extends ConfirmDialogComponent {
|
||||
public PdfEditorEditMode = PdfEditorEditMode
|
||||
|
||||
private documentService = inject(DocumentService)
|
||||
private readonly settingsService = inject(SettingsService)
|
||||
activeModal: NgbActiveModal = inject(NgbActiveModal)
|
||||
|
||||
documentID: number
|
||||
pages: PageOperation[] = []
|
||||
totalPages = 0
|
||||
editMode: PdfEditorEditMode = PdfEditorEditMode.Create
|
||||
editMode: PdfEditorEditMode = this.settingsService.get(
|
||||
SETTINGS_KEYS.PDF_EDITOR_DEFAULT_EDIT_MODE
|
||||
)
|
||||
deleteOriginal: boolean = false
|
||||
includeMetadata: boolean = true
|
||||
|
||||
|
||||
@@ -0,0 +1,129 @@
|
||||
<div class="modal-header">
|
||||
<h4 class="modal-title">{{ title }}</h4>
|
||||
<button type="button" class="btn-close" aria-label="Close" (click)="cancel()"></button>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
@if (!createdBundle) {
|
||||
<form [formGroup]="form" class="d-flex flex-column gap-3">
|
||||
<div>
|
||||
<p class="mb-1">
|
||||
<ng-container i18n>Selected documents:</ng-container>
|
||||
{{ selectionCount }}
|
||||
</p>
|
||||
@if (documentPreview.length > 0) {
|
||||
<ul class="list-unstyled small mb-0">
|
||||
@for (doc of documentPreview; track doc.id) {
|
||||
<li>
|
||||
<strong>{{ doc.title | documentTitle }}</strong>
|
||||
</li>
|
||||
}
|
||||
@if (selectionCount > documentPreview.length) {
|
||||
<li>
|
||||
<ng-container i18n>+ {{ selectionCount - documentPreview.length }} more…</ng-container>
|
||||
</li>
|
||||
}
|
||||
</ul>
|
||||
}
|
||||
</div>
|
||||
|
||||
<div class="d-flex align-items-center justify-content-between">
|
||||
<div class="input-group">
|
||||
<label class="input-group-text" for="expirationDays"><ng-container i18n>Expires</ng-container>:</label>
|
||||
<select class="form-select" id="expirationDays" formControlName="expirationDays">
|
||||
@for (option of expirationOptions; track option.value) {
|
||||
<option [ngValue]="option.value">{{ option.label }}</option>
|
||||
}
|
||||
</select>
|
||||
</div>
|
||||
<div class="form-check form-switch w-100 ms-3">
|
||||
<input
|
||||
class="form-check-input"
|
||||
type="checkbox"
|
||||
role="switch"
|
||||
id="shareArchiveSwitch"
|
||||
formControlName="shareArchiveVersion"
|
||||
aria-checked="{{ shareArchiveVersion }}"
|
||||
/>
|
||||
<label class="form-check-label" for="shareArchiveSwitch" i18n>Share archive version (if available)</label>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
} @else {
|
||||
<div class="d-flex flex-column gap-3">
|
||||
<div class="alert alert-success mb-0" role="status">
|
||||
<h6 class="alert-heading mb-1" i18n>Share link bundle requested</h6>
|
||||
<p class="mb-0 small" i18n>
|
||||
You can copy the share link below or open the manager to monitor progress. The link will start working once the bundle is ready.
|
||||
</p>
|
||||
</div>
|
||||
<dl class="row mb-0 small">
|
||||
<dt class="col-sm-4" i18n>Status</dt>
|
||||
<dd class="col-sm-8">
|
||||
<span class="badge text-bg-secondary text-uppercase">{{ statusLabel(createdBundle.status) }}</span>
|
||||
</dd>
|
||||
<dt class="col-sm-4" i18n>Slug</dt>
|
||||
<dd class="col-sm-8"><code>{{ createdBundle.slug }}</code></dd>
|
||||
<dt class="col-sm-4" i18n>Link</dt>
|
||||
<dd class="col-sm-8">
|
||||
<div class="input-group input-group-sm">
|
||||
<input class="form-control" type="text" [value]="getShareUrl(createdBundle)" readonly>
|
||||
<button
|
||||
class="btn btn-outline-primary"
|
||||
type="button"
|
||||
(click)="copy(createdBundle)"
|
||||
>
|
||||
@if (copied) {
|
||||
<i-bs name="clipboard-check"></i-bs>
|
||||
}
|
||||
@if (!copied) {
|
||||
<i-bs name="clipboard"></i-bs>
|
||||
}
|
||||
<span class="visually-hidden" i18n>Copy link</span>
|
||||
</button>
|
||||
</div>
|
||||
</dd>
|
||||
<dt class="col-sm-4" i18n>Documents</dt>
|
||||
<dd class="col-sm-8">{{ createdBundle.document_count }}</dd>
|
||||
<dt class="col-sm-4" i18n>Expires</dt>
|
||||
<dd class="col-sm-8">
|
||||
@if (createdBundle.expiration) {
|
||||
{{ createdBundle.expiration | date: 'short' }}
|
||||
}
|
||||
@if (!createdBundle.expiration) {
|
||||
<span i18n>Never</span>
|
||||
}
|
||||
</dd>
|
||||
<dt class="col-sm-4" i18n>File version</dt>
|
||||
<dd class="col-sm-8">{{ fileVersionLabel(createdBundle.file_version) }}</dd>
|
||||
@if (createdBundle.size_bytes !== undefined && createdBundle.size_bytes !== null) {
|
||||
<dt class="col-sm-4" i18n>Size</dt>
|
||||
<dd class="col-sm-8">{{ createdBundle.size_bytes | fileSize }}</dd>
|
||||
}
|
||||
</dl>
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<div class="d-flex align-items-center gap-2 w-100">
|
||||
<div class="text-light fst-italic small">
|
||||
<ng-container i18n>A zip file containing the selected documents will be created for this share link bundle. This process happens in the background and may take some time, especially for large bundles.</ng-container>
|
||||
</div>
|
||||
<button type="button" class="btn btn-outline-secondary btn-sm ms-auto" (click)="cancel()">{{ cancelBtnCaption }}</button>
|
||||
@if (createdBundle) {
|
||||
<button type="button" class="btn btn-outline-secondary btn-sm text-nowrap" (click)="openManage()" i18n>Manage share link bundles</button>
|
||||
}
|
||||
|
||||
@if (!createdBundle) {
|
||||
<button
|
||||
type="button"
|
||||
class="btn btn-primary btn-sm d-inline-flex align-items-center gap-2 text-nowrap"
|
||||
(click)="submit()"
|
||||
[disabled]="loading || !buttonsEnabled">
|
||||
@if (loading) {
|
||||
<span class="spinner-border spinner-border-sm" role="status" aria-hidden="true"></span>
|
||||
}
|
||||
{{ btnCaption }}
|
||||
</button>
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
@@ -0,0 +1,161 @@
|
||||
import { Clipboard } from '@angular/cdk/clipboard'
|
||||
import {
|
||||
ComponentFixture,
|
||||
TestBed,
|
||||
fakeAsync,
|
||||
tick,
|
||||
} from '@angular/core/testing'
|
||||
import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap'
|
||||
import { NgxBootstrapIconsModule, allIcons } from 'ngx-bootstrap-icons'
|
||||
import { FileVersion } from 'src/app/data/share-link'
|
||||
import {
|
||||
ShareLinkBundleStatus,
|
||||
ShareLinkBundleSummary,
|
||||
} from 'src/app/data/share-link-bundle'
|
||||
import { ToastService } from 'src/app/services/toast.service'
|
||||
import { environment } from 'src/environments/environment'
|
||||
import { ShareLinkBundleDialogComponent } from './share-link-bundle-dialog.component'
|
||||
|
||||
class MockToastService {
|
||||
showInfo = jest.fn()
|
||||
showError = jest.fn()
|
||||
}
|
||||
|
||||
describe('ShareLinkBundleDialogComponent', () => {
|
||||
let component: ShareLinkBundleDialogComponent
|
||||
let fixture: ComponentFixture<ShareLinkBundleDialogComponent>
|
||||
let clipboard: Clipboard
|
||||
let toastService: MockToastService
|
||||
let activeModal: NgbActiveModal
|
||||
let originalApiBaseUrl: string
|
||||
|
||||
beforeEach(() => {
|
||||
originalApiBaseUrl = environment.apiBaseUrl
|
||||
toastService = new MockToastService()
|
||||
|
||||
TestBed.configureTestingModule({
|
||||
imports: [
|
||||
ShareLinkBundleDialogComponent,
|
||||
NgxBootstrapIconsModule.pick(allIcons),
|
||||
],
|
||||
providers: [
|
||||
NgbActiveModal,
|
||||
{ provide: ToastService, useValue: toastService },
|
||||
],
|
||||
})
|
||||
|
||||
fixture = TestBed.createComponent(ShareLinkBundleDialogComponent)
|
||||
component = fixture.componentInstance
|
||||
clipboard = TestBed.inject(Clipboard)
|
||||
activeModal = TestBed.inject(NgbActiveModal)
|
||||
fixture.detectChanges()
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllTimers()
|
||||
environment.apiBaseUrl = originalApiBaseUrl
|
||||
})
|
||||
|
||||
it('builds payload and emits confirm on submit', () => {
|
||||
const confirmSpy = jest.spyOn(component.confirmClicked, 'emit')
|
||||
component.documents = [
|
||||
{ id: 1, title: 'Doc 1' } as any,
|
||||
{ id: 2, title: 'Doc 2' } as any,
|
||||
]
|
||||
component.form.setValue({
|
||||
shareArchiveVersion: false,
|
||||
expirationDays: 3,
|
||||
})
|
||||
|
||||
component.submit()
|
||||
|
||||
expect(component.payload).toEqual({
|
||||
document_ids: [1, 2],
|
||||
file_version: FileVersion.Original,
|
||||
expiration_days: 3,
|
||||
})
|
||||
expect(component.buttonsEnabled).toBe(false)
|
||||
expect(confirmSpy).toHaveBeenCalled()
|
||||
|
||||
component.form.setValue({
|
||||
shareArchiveVersion: true,
|
||||
expirationDays: 7,
|
||||
})
|
||||
component.submit()
|
||||
|
||||
expect(component.payload).toEqual({
|
||||
document_ids: [1, 2],
|
||||
file_version: FileVersion.Archive,
|
||||
expiration_days: 7,
|
||||
})
|
||||
})
|
||||
|
||||
it('ignores submit when bundle already created', () => {
|
||||
component.createdBundle = { id: 1 } as ShareLinkBundleSummary
|
||||
const confirmSpy = jest.spyOn(component, 'confirm')
|
||||
component.submit()
|
||||
expect(confirmSpy).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('limits preview to ten documents', () => {
|
||||
const docs = Array.from({ length: 12 }).map((_, index) => ({
|
||||
id: index + 1,
|
||||
}))
|
||||
component.documents = docs as any
|
||||
|
||||
expect(component.selectionCount).toBe(12)
|
||||
expect(component.documentPreview).toHaveLength(10)
|
||||
expect(component.documentPreview[0].id).toBe(1)
|
||||
})
|
||||
|
||||
it('copies share link and resets state after timeout', fakeAsync(() => {
|
||||
const copySpy = jest.spyOn(clipboard, 'copy').mockReturnValue(true)
|
||||
const bundle = {
|
||||
slug: 'bundle-slug',
|
||||
status: ShareLinkBundleStatus.Ready,
|
||||
} as ShareLinkBundleSummary
|
||||
|
||||
component.copy(bundle)
|
||||
|
||||
expect(copySpy).toHaveBeenCalledWith(component.getShareUrl(bundle))
|
||||
expect(component.copied).toBe(true)
|
||||
expect(toastService.showInfo).toHaveBeenCalled()
|
||||
|
||||
tick(3000)
|
||||
expect(component.copied).toBe(false)
|
||||
}))
|
||||
|
||||
it('generates share URLs based on API base URL', () => {
|
||||
environment.apiBaseUrl = 'https://example.com/api/'
|
||||
expect(
|
||||
component.getShareUrl({ slug: 'abc' } as ShareLinkBundleSummary)
|
||||
).toBe('https://example.com/share/abc')
|
||||
})
|
||||
|
||||
it('opens manage dialog when callback provided', () => {
|
||||
const manageSpy = jest.fn()
|
||||
component.onOpenManage = manageSpy
|
||||
component.openManage()
|
||||
expect(manageSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('falls back to cancel when manage callback missing', () => {
|
||||
const cancelSpy = jest.spyOn(component, 'cancel')
|
||||
component.onOpenManage = undefined
|
||||
component.openManage()
|
||||
expect(cancelSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('maps status and file version labels', () => {
|
||||
expect(component.statusLabel(ShareLinkBundleStatus.Processing)).toContain(
|
||||
'Processing'
|
||||
)
|
||||
expect(component.fileVersionLabel(FileVersion.Archive)).toContain('Archive')
|
||||
})
|
||||
|
||||
it('closes dialog when cancel invoked', () => {
|
||||
const closeSpy = jest.spyOn(activeModal, 'close')
|
||||
component.cancel()
|
||||
expect(closeSpy).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,118 @@
|
||||
import { Clipboard } from '@angular/cdk/clipboard'
|
||||
import { CommonModule } from '@angular/common'
|
||||
import { Component, Input, inject } from '@angular/core'
|
||||
import { FormBuilder, FormGroup, ReactiveFormsModule } from '@angular/forms'
|
||||
import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
|
||||
import { Document } from 'src/app/data/document'
|
||||
import {
|
||||
FileVersion,
|
||||
SHARE_LINK_EXPIRATION_OPTIONS,
|
||||
} from 'src/app/data/share-link'
|
||||
import {
|
||||
SHARE_LINK_BUNDLE_FILE_VERSION_LABELS,
|
||||
SHARE_LINK_BUNDLE_STATUS_LABELS,
|
||||
ShareLinkBundleCreatePayload,
|
||||
ShareLinkBundleStatus,
|
||||
ShareLinkBundleSummary,
|
||||
} from 'src/app/data/share-link-bundle'
|
||||
import { DocumentTitlePipe } from 'src/app/pipes/document-title.pipe'
|
||||
import { FileSizePipe } from 'src/app/pipes/file-size.pipe'
|
||||
import { ToastService } from 'src/app/services/toast.service'
|
||||
import { environment } from 'src/environments/environment'
|
||||
import { ConfirmDialogComponent } from '../confirm-dialog/confirm-dialog.component'
|
||||
|
||||
@Component({
|
||||
selector: 'pngx-share-link-bundle-dialog',
|
||||
templateUrl: './share-link-bundle-dialog.component.html',
|
||||
imports: [
|
||||
CommonModule,
|
||||
ReactiveFormsModule,
|
||||
NgxBootstrapIconsModule,
|
||||
FileSizePipe,
|
||||
DocumentTitlePipe,
|
||||
],
|
||||
providers: [],
|
||||
})
|
||||
export class ShareLinkBundleDialogComponent extends ConfirmDialogComponent {
|
||||
private readonly formBuilder = inject(FormBuilder)
|
||||
private readonly clipboard = inject(Clipboard)
|
||||
private readonly toastService = inject(ToastService)
|
||||
|
||||
private _documents: Document[] = []
|
||||
|
||||
selectionCount = 0
|
||||
documentPreview: Document[] = []
|
||||
form: FormGroup = this.formBuilder.group({
|
||||
shareArchiveVersion: true,
|
||||
expirationDays: [7],
|
||||
})
|
||||
payload: ShareLinkBundleCreatePayload | null = null
|
||||
|
||||
readonly expirationOptions = SHARE_LINK_EXPIRATION_OPTIONS
|
||||
|
||||
createdBundle: ShareLinkBundleSummary | null = null
|
||||
copied = false
|
||||
onOpenManage?: () => void
|
||||
readonly statuses = ShareLinkBundleStatus
|
||||
|
||||
constructor() {
|
||||
super()
|
||||
this.loading = false
|
||||
this.title = $localize`Create share link bundle`
|
||||
this.btnCaption = $localize`Create link`
|
||||
}
|
||||
|
||||
@Input()
|
||||
set documents(docs: Document[]) {
|
||||
this._documents = docs.concat()
|
||||
this.selectionCount = this._documents.length
|
||||
this.documentPreview = this._documents.slice(0, 10)
|
||||
}
|
||||
|
||||
submit() {
|
||||
if (this.createdBundle) return
|
||||
this.payload = {
|
||||
document_ids: this._documents.map((doc) => doc.id),
|
||||
file_version: this.form.value.shareArchiveVersion
|
||||
? FileVersion.Archive
|
||||
: FileVersion.Original,
|
||||
expiration_days: this.form.value.expirationDays,
|
||||
}
|
||||
this.buttonsEnabled = false
|
||||
super.confirm()
|
||||
}
|
||||
|
||||
getShareUrl(bundle: ShareLinkBundleSummary): string {
|
||||
const apiURL = new URL(environment.apiBaseUrl)
|
||||
return `${apiURL.origin}${apiURL.pathname.replace(/\/api\/$/, '/share/')}${
|
||||
bundle.slug
|
||||
}`
|
||||
}
|
||||
|
||||
copy(bundle: ShareLinkBundleSummary): void {
|
||||
const success = this.clipboard.copy(this.getShareUrl(bundle))
|
||||
if (success) {
|
||||
this.copied = true
|
||||
this.toastService.showInfo($localize`Share link copied to clipboard.`)
|
||||
setTimeout(() => {
|
||||
this.copied = false
|
||||
}, 3000)
|
||||
}
|
||||
}
|
||||
|
||||
openManage(): void {
|
||||
if (this.onOpenManage) {
|
||||
this.onOpenManage()
|
||||
} else {
|
||||
this.cancel()
|
||||
}
|
||||
}
|
||||
|
||||
statusLabel(status: ShareLinkBundleSummary['status']): string {
|
||||
return SHARE_LINK_BUNDLE_STATUS_LABELS[status] ?? status
|
||||
}
|
||||
|
||||
fileVersionLabel(version: FileVersion): string {
|
||||
return SHARE_LINK_BUNDLE_FILE_VERSION_LABELS[version] ?? version
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,156 @@
|
||||
<div class="modal-header">
|
||||
<h4 class="modal-title">{{ title }}</h4>
|
||||
<button type="button" class="btn-close" aria-label="Close" (click)="close()"></button>
|
||||
</div>
|
||||
|
||||
<div class="modal-body">
|
||||
@if (loading) {
|
||||
<div class="d-flex align-items-center gap-2">
|
||||
<div class="spinner-border spinner-border-sm" role="status"></div>
|
||||
<span i18n>Loading share link bundles…</span>
|
||||
</div>
|
||||
}
|
||||
@if (!loading && error) {
|
||||
<div class="alert alert-danger mb-0" role="alert">
|
||||
{{ error }}
|
||||
</div>
|
||||
}
|
||||
@if (!loading && !error) {
|
||||
<div class="d-flex justify-content-between align-items-center mb-2">
|
||||
<p class="mb-0 text-muted small">
|
||||
<ng-container i18n>Status updates every few seconds while bundles are being prepared.</ng-container>
|
||||
</p>
|
||||
</div>
|
||||
@if (bundles.length === 0) {
|
||||
<p class="mb-0 text-muted fst-italic" i18n>No share link bundles currently exist.</p>
|
||||
}
|
||||
@if (bundles.length > 0) {
|
||||
<div class="table-responsive">
|
||||
<table class="table table-sm align-middle mb-0">
|
||||
<thead>
|
||||
<tr>
|
||||
<th scope="col" i18n>Created</th>
|
||||
<th scope="col" i18n>Status</th>
|
||||
<th scope="col" i18n>Size</th>
|
||||
<th scope="col" i18n>Expires</th>
|
||||
<th scope="col" i18n>Documents</th>
|
||||
<th scope="col" i18n>File version</th>
|
||||
<th scope="col" class="text-end" i18n>Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
@for (bundle of bundles; track bundle.id) {
|
||||
<tr>
|
||||
<td>
|
||||
<div>{{ bundle.created | date: 'short' }}</div>
|
||||
@if (bundle.built_at) {
|
||||
<div class="small text-muted">
|
||||
<ng-container i18n>Built:</ng-container> {{ bundle.built_at | date: 'short' }}
|
||||
</div>
|
||||
}
|
||||
</td>
|
||||
<td>
|
||||
<div class="d-flex align-items-center gap-2">
|
||||
@if (bundle.status === statuses.Failed && bundle.last_error) {
|
||||
<button
|
||||
type="button"
|
||||
class="btn btn-link p-0 text-danger"
|
||||
[ngbPopover]="errorDetail"
|
||||
popoverClass="popover-sm"
|
||||
triggers="mouseover:mouseleave"
|
||||
placement="auto"
|
||||
aria-label="View error details"
|
||||
i18n-aria-label
|
||||
>
|
||||
<span class="badge text-bg-warning text-uppercase me-2">{{ statusLabel(bundle.status) }}</span>
|
||||
<i-bs name="exclamation-triangle-fill" class="text-warning"></i-bs>
|
||||
</button>
|
||||
<ng-template #errorDetail>
|
||||
@if (bundle.last_error.timestamp) {
|
||||
<div class="text-muted small mb-1">
|
||||
{{ bundle.last_error.timestamp | date: 'short' }}
|
||||
</div>
|
||||
}
|
||||
<h6>{{ bundle.last_error.exception_type || ($localize`Unknown error`) }}</h6>
|
||||
@if (bundle.last_error.message) {
|
||||
<pre class="text-muted small"><code>{{ bundle.last_error.message }}</code></pre>
|
||||
}
|
||||
</ng-template>
|
||||
}
|
||||
@if (bundle.status === statuses.Processing || bundle.status === statuses.Pending) {
|
||||
<span class="spinner-border spinner-border-sm" role="status"></span>
|
||||
}
|
||||
@if (bundle.status !== statuses.Failed) {
|
||||
<span class="badge text-bg-secondary text-uppercase">{{ statusLabel(bundle.status) }}</span>
|
||||
}
|
||||
</div>
|
||||
</td>
|
||||
<td>
|
||||
@if (bundle.size_bytes !== undefined && bundle.size_bytes !== null) {
|
||||
{{ bundle.size_bytes | fileSize }}
|
||||
}
|
||||
@if (bundle.size_bytes === undefined || bundle.size_bytes === null) {
|
||||
<span class="text-muted">—</span>
|
||||
}
|
||||
</td>
|
||||
<td>
|
||||
@if (bundle.expiration) {
|
||||
{{ bundle.expiration | date: 'short' }}
|
||||
}
|
||||
@if (!bundle.expiration) {
|
||||
<span i18n>Never</span>
|
||||
}
|
||||
</td>
|
||||
<td>{{ bundle.document_count }}</td>
|
||||
<td>{{ fileVersionLabel(bundle.file_version) }}</td>
|
||||
<td class="text-end">
|
||||
<div class="btn-group btn-group-sm">
|
||||
<button
|
||||
type="button"
|
||||
class="btn btn-outline-primary"
|
||||
[disabled]="bundle.status !== statuses.Ready"
|
||||
(click)="copy(bundle)"
|
||||
title="Copy share link"
|
||||
i18n-title
|
||||
>
|
||||
@if (copiedSlug === bundle.slug) {
|
||||
<i-bs name="clipboard-check"></i-bs>
|
||||
}
|
||||
@if (copiedSlug !== bundle.slug) {
|
||||
<i-bs name="clipboard"></i-bs>
|
||||
}
|
||||
<span class="visually-hidden" i18n>Copy share link</span>
|
||||
</button>
|
||||
@if (bundle.status === statuses.Failed) {
|
||||
<button
|
||||
type="button"
|
||||
class="btn btn-outline-warning"
|
||||
[disabled]="loading"
|
||||
(click)="retry(bundle)"
|
||||
>
|
||||
<i-bs name="arrow-clockwise"></i-bs>
|
||||
<span class="visually-hidden" i18n>Retry</span>
|
||||
</button>
|
||||
}
|
||||
<pngx-confirm-button
|
||||
buttonClasses="btn btn-sm btn-outline-danger"
|
||||
[disabled]="loading"
|
||||
(confirm)="delete(bundle)"
|
||||
iconName="trash"
|
||||
>
|
||||
<span class="visually-hidden" i18n>Delete share link bundle</span>
|
||||
</pngx-confirm-button>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
}
|
||||
}
|
||||
</div>
|
||||
|
||||
<div class="modal-footer">
|
||||
<button type="button" class="btn btn-outline-secondary btn-sm" (click)="close()" i18n>Close</button>
|
||||
</div>
|
||||
@@ -0,0 +1,4 @@
|
||||
:host ::ng-deep .popover {
|
||||
min-width: 300px;
|
||||
max-width: 400px;
|
||||
}
|
||||
@@ -0,0 +1,251 @@
|
||||
import { Clipboard } from '@angular/cdk/clipboard'
|
||||
import {
|
||||
ComponentFixture,
|
||||
TestBed,
|
||||
fakeAsync,
|
||||
tick,
|
||||
} from '@angular/core/testing'
|
||||
import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap'
|
||||
import { NgxBootstrapIconsModule, allIcons } from 'ngx-bootstrap-icons'
|
||||
import { of, throwError } from 'rxjs'
|
||||
import { FileVersion } from 'src/app/data/share-link'
|
||||
import {
|
||||
ShareLinkBundleStatus,
|
||||
ShareLinkBundleSummary,
|
||||
} from 'src/app/data/share-link-bundle'
|
||||
import { ShareLinkBundleService } from 'src/app/services/rest/share-link-bundle.service'
|
||||
import { ToastService } from 'src/app/services/toast.service'
|
||||
import { environment } from 'src/environments/environment'
|
||||
import { ShareLinkBundleManageDialogComponent } from './share-link-bundle-manage-dialog.component'
|
||||
|
||||
class MockShareLinkBundleService {
|
||||
listAllBundles = jest.fn()
|
||||
delete = jest.fn()
|
||||
rebuildBundle = jest.fn()
|
||||
}
|
||||
|
||||
class MockToastService {
|
||||
showInfo = jest.fn()
|
||||
showError = jest.fn()
|
||||
}
|
||||
|
||||
describe('ShareLinkBundleManageDialogComponent', () => {
|
||||
let component: ShareLinkBundleManageDialogComponent
|
||||
let fixture: ComponentFixture<ShareLinkBundleManageDialogComponent>
|
||||
let service: MockShareLinkBundleService
|
||||
let toastService: MockToastService
|
||||
let clipboard: Clipboard
|
||||
let activeModal: NgbActiveModal
|
||||
let originalApiBaseUrl: string
|
||||
|
||||
beforeEach(() => {
|
||||
service = new MockShareLinkBundleService()
|
||||
toastService = new MockToastService()
|
||||
originalApiBaseUrl = environment.apiBaseUrl
|
||||
|
||||
service.listAllBundles.mockReturnValue(of([]))
|
||||
service.delete.mockReturnValue(of(true))
|
||||
service.rebuildBundle.mockReturnValue(of(sampleBundle()))
|
||||
|
||||
TestBed.configureTestingModule({
|
||||
imports: [
|
||||
ShareLinkBundleManageDialogComponent,
|
||||
NgxBootstrapIconsModule.pick(allIcons),
|
||||
],
|
||||
providers: [
|
||||
NgbActiveModal,
|
||||
{ provide: ShareLinkBundleService, useValue: service },
|
||||
{ provide: ToastService, useValue: toastService },
|
||||
],
|
||||
})
|
||||
|
||||
fixture = TestBed.createComponent(ShareLinkBundleManageDialogComponent)
|
||||
component = fixture.componentInstance
|
||||
clipboard = TestBed.inject(Clipboard)
|
||||
activeModal = TestBed.inject(NgbActiveModal)
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
component.ngOnDestroy()
|
||||
fixture.destroy()
|
||||
environment.apiBaseUrl = originalApiBaseUrl
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
const sampleBundle = (overrides: Partial<ShareLinkBundleSummary> = {}) =>
|
||||
({
|
||||
id: 1,
|
||||
slug: 'bundle-slug',
|
||||
created: new Date().toISOString(),
|
||||
document_count: 1,
|
||||
documents: [1],
|
||||
status: ShareLinkBundleStatus.Pending,
|
||||
file_version: FileVersion.Archive,
|
||||
last_error: undefined,
|
||||
...overrides,
|
||||
}) as ShareLinkBundleSummary
|
||||
|
||||
it('loads bundles on init and polls periodically', fakeAsync(() => {
|
||||
const bundles = [sampleBundle({ status: ShareLinkBundleStatus.Ready })]
|
||||
service.listAllBundles.mockReset()
|
||||
service.listAllBundles
|
||||
.mockReturnValueOnce(of(bundles))
|
||||
.mockReturnValue(of(bundles))
|
||||
|
||||
fixture.detectChanges()
|
||||
tick()
|
||||
|
||||
expect(service.listAllBundles).toHaveBeenCalledTimes(1)
|
||||
expect(component.bundles).toEqual(bundles)
|
||||
expect(component.loading).toBe(false)
|
||||
expect(component.error).toBeNull()
|
||||
|
||||
tick(5000)
|
||||
expect(service.listAllBundles).toHaveBeenCalledTimes(2)
|
||||
}))
|
||||
|
||||
it('handles errors when loading bundles', fakeAsync(() => {
|
||||
service.listAllBundles.mockReset()
|
||||
service.listAllBundles
|
||||
.mockReturnValueOnce(throwError(() => new Error('load fail')))
|
||||
.mockReturnValue(of([]))
|
||||
|
||||
fixture.detectChanges()
|
||||
tick()
|
||||
|
||||
expect(component.error).toContain('Failed to load share link bundles.')
|
||||
expect(toastService.showError).toHaveBeenCalled()
|
||||
expect(component.loading).toBe(false)
|
||||
|
||||
tick(5000)
|
||||
expect(service.listAllBundles).toHaveBeenCalledTimes(2)
|
||||
}))
|
||||
|
||||
it('copies bundle links when ready', fakeAsync(() => {
|
||||
jest.spyOn(clipboard, 'copy').mockReturnValue(true)
|
||||
fixture.detectChanges()
|
||||
tick()
|
||||
|
||||
const readyBundle = sampleBundle({
|
||||
slug: 'ready-slug',
|
||||
status: ShareLinkBundleStatus.Ready,
|
||||
})
|
||||
component.copy(readyBundle)
|
||||
|
||||
expect(clipboard.copy).toHaveBeenCalledWith(
|
||||
component.getShareUrl(readyBundle)
|
||||
)
|
||||
expect(component.copiedSlug).toBe('ready-slug')
|
||||
expect(toastService.showInfo).toHaveBeenCalled()
|
||||
|
||||
tick(3000)
|
||||
expect(component.copiedSlug).toBeNull()
|
||||
}))
|
||||
|
||||
it('ignores copy requests for non-ready bundles', fakeAsync(() => {
|
||||
const copySpy = jest.spyOn(clipboard, 'copy')
|
||||
fixture.detectChanges()
|
||||
tick()
|
||||
component.copy(sampleBundle({ status: ShareLinkBundleStatus.Pending }))
|
||||
expect(copySpy).not.toHaveBeenCalled()
|
||||
}))
|
||||
|
||||
it('deletes bundles and refreshes list', fakeAsync(() => {
|
||||
service.listAllBundles.mockReturnValue(of([]))
|
||||
service.delete.mockReturnValue(of(true))
|
||||
|
||||
fixture.detectChanges()
|
||||
tick()
|
||||
|
||||
component.delete(sampleBundle())
|
||||
tick()
|
||||
|
||||
expect(service.delete).toHaveBeenCalled()
|
||||
expect(toastService.showInfo).toHaveBeenCalledWith(
|
||||
expect.stringContaining('deleted.')
|
||||
)
|
||||
expect(service.listAllBundles).toHaveBeenCalledTimes(2)
|
||||
expect(component.loading).toBe(false)
|
||||
}))
|
||||
|
||||
it('handles delete errors gracefully', fakeAsync(() => {
|
||||
service.listAllBundles.mockReturnValue(of([]))
|
||||
service.delete.mockReturnValue(throwError(() => new Error('delete fail')))
|
||||
|
||||
fixture.detectChanges()
|
||||
tick()
|
||||
|
||||
component.delete(sampleBundle())
|
||||
tick()
|
||||
|
||||
expect(toastService.showError).toHaveBeenCalled()
|
||||
expect(component.loading).toBe(false)
|
||||
}))
|
||||
|
||||
it('retries bundle build and replaces existing entry', fakeAsync(() => {
|
||||
service.listAllBundles.mockReturnValue(of([]))
|
||||
const updated = sampleBundle({ status: ShareLinkBundleStatus.Ready })
|
||||
service.rebuildBundle.mockReturnValue(of(updated))
|
||||
|
||||
fixture.detectChanges()
|
||||
tick()
|
||||
|
||||
component.bundles = [sampleBundle()]
|
||||
component.retry(component.bundles[0])
|
||||
tick()
|
||||
|
||||
expect(service.rebuildBundle).toHaveBeenCalledWith(updated.id)
|
||||
expect(component.bundles[0].status).toBe(ShareLinkBundleStatus.Ready)
|
||||
expect(toastService.showInfo).toHaveBeenCalled()
|
||||
}))
|
||||
|
||||
it('adds new bundle when retry returns unknown entry', fakeAsync(() => {
|
||||
service.listAllBundles.mockReturnValue(of([]))
|
||||
service.rebuildBundle.mockReturnValue(
|
||||
of(sampleBundle({ id: 99, slug: 'new-slug' }))
|
||||
)
|
||||
|
||||
fixture.detectChanges()
|
||||
tick()
|
||||
|
||||
component.bundles = [sampleBundle()]
|
||||
component.retry({ id: 99 } as ShareLinkBundleSummary)
|
||||
tick()
|
||||
|
||||
expect(component.bundles.find((bundle) => bundle.id === 99)).toBeTruthy()
|
||||
}))
|
||||
|
||||
it('handles retry errors', fakeAsync(() => {
|
||||
service.listAllBundles.mockReturnValue(of([]))
|
||||
service.rebuildBundle.mockReturnValue(throwError(() => new Error('fail')))
|
||||
|
||||
fixture.detectChanges()
|
||||
tick()
|
||||
|
||||
component.retry(sampleBundle())
|
||||
tick()
|
||||
|
||||
expect(toastService.showError).toHaveBeenCalled()
|
||||
}))
|
||||
|
||||
it('maps helpers and closes dialog', fakeAsync(() => {
|
||||
service.listAllBundles.mockReturnValue(of([]))
|
||||
fixture.detectChanges()
|
||||
tick()
|
||||
|
||||
expect(component.statusLabel(ShareLinkBundleStatus.Processing)).toContain(
|
||||
'Processing'
|
||||
)
|
||||
expect(component.fileVersionLabel(FileVersion.Original)).toContain(
|
||||
'Original'
|
||||
)
|
||||
|
||||
environment.apiBaseUrl = 'https://example.com/api/'
|
||||
const url = component.getShareUrl(sampleBundle({ slug: 'sluggy' }))
|
||||
expect(url).toBe('https://example.com/share/sluggy')
|
||||
|
||||
const closeSpy = jest.spyOn(activeModal, 'close')
|
||||
component.close()
|
||||
expect(closeSpy).toHaveBeenCalled()
|
||||
}))
|
||||
})
|
||||
@@ -0,0 +1,177 @@
|
||||
import { Clipboard } from '@angular/cdk/clipboard'
|
||||
import { CommonModule } from '@angular/common'
|
||||
import { Component, OnDestroy, OnInit, inject } from '@angular/core'
|
||||
import { NgbActiveModal, NgbPopoverModule } from '@ng-bootstrap/ng-bootstrap'
|
||||
import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
|
||||
import { Subject, catchError, of, switchMap, takeUntil, timer } from 'rxjs'
|
||||
import { FileVersion } from 'src/app/data/share-link'
|
||||
import {
|
||||
SHARE_LINK_BUNDLE_FILE_VERSION_LABELS,
|
||||
SHARE_LINK_BUNDLE_STATUS_LABELS,
|
||||
ShareLinkBundleStatus,
|
||||
ShareLinkBundleSummary,
|
||||
} from 'src/app/data/share-link-bundle'
|
||||
import { FileSizePipe } from 'src/app/pipes/file-size.pipe'
|
||||
import { ShareLinkBundleService } from 'src/app/services/rest/share-link-bundle.service'
|
||||
import { ToastService } from 'src/app/services/toast.service'
|
||||
import { environment } from 'src/environments/environment'
|
||||
import { LoadingComponentWithPermissions } from '../../loading-component/loading.component'
|
||||
import { ConfirmButtonComponent } from '../confirm-button/confirm-button.component'
|
||||
|
||||
@Component({
|
||||
selector: 'pngx-share-link-bundle-manage-dialog',
|
||||
templateUrl: './share-link-bundle-manage-dialog.component.html',
|
||||
styleUrls: ['./share-link-bundle-manage-dialog.component.scss'],
|
||||
imports: [
|
||||
ConfirmButtonComponent,
|
||||
CommonModule,
|
||||
NgbPopoverModule,
|
||||
NgxBootstrapIconsModule,
|
||||
FileSizePipe,
|
||||
],
|
||||
})
|
||||
export class ShareLinkBundleManageDialogComponent
|
||||
extends LoadingComponentWithPermissions
|
||||
implements OnInit, OnDestroy
|
||||
{
|
||||
private readonly activeModal = inject(NgbActiveModal)
|
||||
private readonly shareLinkBundleService = inject(ShareLinkBundleService)
|
||||
private readonly toastService = inject(ToastService)
|
||||
private readonly clipboard = inject(Clipboard)
|
||||
|
||||
title = $localize`Share link bundles`
|
||||
|
||||
bundles: ShareLinkBundleSummary[] = []
|
||||
error: string | null = null
|
||||
copiedSlug: string | null = null
|
||||
|
||||
readonly statuses = ShareLinkBundleStatus
|
||||
readonly fileVersions = FileVersion
|
||||
|
||||
private readonly refresh$ = new Subject<boolean>()
|
||||
|
||||
ngOnInit(): void {
|
||||
this.refresh$
|
||||
.pipe(
|
||||
switchMap((silent) => {
|
||||
if (!silent) {
|
||||
this.loading = true
|
||||
}
|
||||
this.error = null
|
||||
return this.shareLinkBundleService.listAllBundles().pipe(
|
||||
catchError((error) => {
|
||||
if (!silent) {
|
||||
this.loading = false
|
||||
}
|
||||
this.error = $localize`Failed to load share link bundles.`
|
||||
this.toastService.showError(
|
||||
$localize`Error retrieving share link bundles.`,
|
||||
error
|
||||
)
|
||||
return of(null)
|
||||
})
|
||||
)
|
||||
}),
|
||||
takeUntil(this.unsubscribeNotifier)
|
||||
)
|
||||
.subscribe((results) => {
|
||||
if (results) {
|
||||
this.bundles = results
|
||||
this.copiedSlug = null
|
||||
}
|
||||
this.loading = false
|
||||
})
|
||||
|
||||
this.triggerRefresh(false)
|
||||
timer(5000, 5000)
|
||||
.pipe(takeUntil(this.unsubscribeNotifier))
|
||||
.subscribe(() => this.triggerRefresh(true))
|
||||
}
|
||||
|
||||
ngOnDestroy(): void {
|
||||
super.ngOnDestroy()
|
||||
}
|
||||
|
||||
getShareUrl(bundle: ShareLinkBundleSummary): string {
|
||||
const apiURL = new URL(environment.apiBaseUrl)
|
||||
return `${apiURL.origin}${apiURL.pathname.replace(/\/api\/$/, '/share/')}${
|
||||
bundle.slug
|
||||
}`
|
||||
}
|
||||
|
||||
copy(bundle: ShareLinkBundleSummary): void {
|
||||
if (bundle.status !== ShareLinkBundleStatus.Ready) {
|
||||
return
|
||||
}
|
||||
const success = this.clipboard.copy(this.getShareUrl(bundle))
|
||||
if (success) {
|
||||
this.copiedSlug = bundle.slug
|
||||
setTimeout(() => {
|
||||
this.copiedSlug = null
|
||||
}, 3000)
|
||||
this.toastService.showInfo($localize`Share link copied to clipboard.`)
|
||||
}
|
||||
}
|
||||
|
||||
delete(bundle: ShareLinkBundleSummary): void {
|
||||
this.error = null
|
||||
this.loading = true
|
||||
this.shareLinkBundleService.delete(bundle).subscribe({
|
||||
next: () => {
|
||||
this.toastService.showInfo($localize`Share link bundle deleted.`)
|
||||
this.triggerRefresh(false)
|
||||
},
|
||||
error: (e) => {
|
||||
this.loading = false
|
||||
this.toastService.showError(
|
||||
$localize`Error deleting share link bundle.`,
|
||||
e
|
||||
)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
retry(bundle: ShareLinkBundleSummary): void {
|
||||
this.error = null
|
||||
this.shareLinkBundleService.rebuildBundle(bundle.id).subscribe({
|
||||
next: (updated) => {
|
||||
this.toastService.showInfo(
|
||||
$localize`Share link bundle rebuild requested.`
|
||||
)
|
||||
this.replaceBundle(updated)
|
||||
},
|
||||
error: (e) => {
|
||||
this.toastService.showError($localize`Error requesting rebuild.`, e)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
statusLabel(status: ShareLinkBundleStatus): string {
|
||||
return SHARE_LINK_BUNDLE_STATUS_LABELS[status] ?? status
|
||||
}
|
||||
|
||||
fileVersionLabel(version: FileVersion): string {
|
||||
return SHARE_LINK_BUNDLE_FILE_VERSION_LABELS[version] ?? version
|
||||
}
|
||||
|
||||
close(): void {
|
||||
this.activeModal.close()
|
||||
}
|
||||
|
||||
private replaceBundle(updated: ShareLinkBundleSummary): void {
|
||||
const index = this.bundles.findIndex((bundle) => bundle.id === updated.id)
|
||||
if (index >= 0) {
|
||||
this.bundles = [
|
||||
...this.bundles.slice(0, index),
|
||||
updated,
|
||||
...this.bundles.slice(index + 1),
|
||||
]
|
||||
} else {
|
||||
this.bundles = [updated, ...this.bundles]
|
||||
}
|
||||
}
|
||||
|
||||
private triggerRefresh(silent: boolean): void {
|
||||
this.refresh$.next(silent)
|
||||
}
|
||||
}
|
||||
@@ -51,7 +51,7 @@
|
||||
<div class="input-group w-100 mt-2">
|
||||
<label class="input-group-text" for="addLink"><ng-container i18n>Expires</ng-container>:</label>
|
||||
<select class="form-select fs-6" [(ngModel)]="expirationDays">
|
||||
@for (option of EXPIRATION_OPTIONS; track option) {
|
||||
@for (option of expirationOptions; track option) {
|
||||
<option [ngValue]="option.value">{{ option.label }}</option>
|
||||
}
|
||||
</select>
|
||||
|
||||
@@ -4,7 +4,11 @@ import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
||||
import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap'
|
||||
import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
|
||||
import { first } from 'rxjs'
|
||||
import { FileVersion, ShareLink } from 'src/app/data/share-link'
|
||||
import {
|
||||
FileVersion,
|
||||
SHARE_LINK_EXPIRATION_OPTIONS,
|
||||
ShareLink,
|
||||
} from 'src/app/data/share-link'
|
||||
import { ShareLinkService } from 'src/app/services/rest/share-link.service'
|
||||
import { ToastService } from 'src/app/services/toast.service'
|
||||
import { environment } from 'src/environments/environment'
|
||||
@@ -21,12 +25,7 @@ export class ShareLinksDialogComponent implements OnInit {
|
||||
private toastService = inject(ToastService)
|
||||
private clipboard = inject(Clipboard)
|
||||
|
||||
EXPIRATION_OPTIONS = [
|
||||
{ label: $localize`1 day`, value: 1 },
|
||||
{ label: $localize`7 days`, value: 7 },
|
||||
{ label: $localize`30 days`, value: 30 },
|
||||
{ label: $localize`Never`, value: null },
|
||||
]
|
||||
readonly expirationOptions = SHARE_LINK_EXPIRATION_OPTIONS
|
||||
|
||||
@Input()
|
||||
title = $localize`Share Links`
|
||||
|
||||
@@ -146,16 +146,26 @@
|
||||
<ng-template ngbNavContent>
|
||||
<div>
|
||||
<pngx-input-text #inputTitle i18n-title title="Title" formControlName="title" [horizontal]="true" [suggestion]="suggestions?.title" (keyup)="titleKeyUp($event)" [error]="error?.title"></pngx-input-text>
|
||||
<pngx-input-number i18n-title title="Archive serial number" [error]="error?.archive_serial_number" [horizontal]="true" formControlName='archive_serial_number'></pngx-input-number>
|
||||
@if (!isFieldHidden(DocumentDetailFieldID.ArchiveSerialNumber)) {
|
||||
<pngx-input-number i18n-title title="Archive serial number" [error]="error?.archive_serial_number" [horizontal]="true" formControlName='archive_serial_number'></pngx-input-number>
|
||||
}
|
||||
<pngx-input-date i18n-title title="Date created" formControlName="created" [suggestions]="suggestions?.dates" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event)"
|
||||
[error]="error?.created"></pngx-input-date>
|
||||
<pngx-input-select [items]="correspondents" i18n-title title="Correspondent" formControlName="correspondent" [allowNull]="true" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event, DataType.Correspondent)"
|
||||
(createNew)="createCorrespondent($event)" [hideAddButton]="createDisabled(DataType.Correspondent)" [suggestions]="suggestions?.correspondents" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Correspondent }"></pngx-input-select>
|
||||
<pngx-input-select [items]="documentTypes" i18n-title title="Document type" formControlName="document_type" [allowNull]="true" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event, DataType.DocumentType)"
|
||||
(createNew)="createDocumentType($event)" [hideAddButton]="createDisabled(DataType.DocumentType)" [suggestions]="suggestions?.document_types" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.DocumentType }"></pngx-input-select>
|
||||
<pngx-input-select [items]="storagePaths" i18n-title title="Storage path" formControlName="storage_path" [allowNull]="true" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event, DataType.StoragePath)"
|
||||
(createNew)="createStoragePath($event)" [hideAddButton]="createDisabled(DataType.StoragePath)" [suggestions]="suggestions?.storage_paths" i18n-placeholder placeholder="Default" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.StoragePath }"></pngx-input-select>
|
||||
<pngx-input-tags #tagsInput formControlName="tags" [suggestions]="suggestions?.tags" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event, DataType.Tag)" [hideAddButton]="createDisabled(DataType.Tag)" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Tag }"></pngx-input-tags>
|
||||
@if (!isFieldHidden(DocumentDetailFieldID.Correspondent)) {
|
||||
<pngx-input-select [items]="correspondents" i18n-title title="Correspondent" formControlName="correspondent" [allowNull]="true" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event, DataType.Correspondent)"
|
||||
(createNew)="createCorrespondent($event)" [hideAddButton]="createDisabled(DataType.Correspondent)" [suggestions]="suggestions?.correspondents" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Correspondent }"></pngx-input-select>
|
||||
}
|
||||
@if (!isFieldHidden(DocumentDetailFieldID.DocumentType)) {
|
||||
<pngx-input-select [items]="documentTypes" i18n-title title="Document type" formControlName="document_type" [allowNull]="true" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event, DataType.DocumentType)"
|
||||
(createNew)="createDocumentType($event)" [hideAddButton]="createDisabled(DataType.DocumentType)" [suggestions]="suggestions?.document_types" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.DocumentType }"></pngx-input-select>
|
||||
}
|
||||
@if (!isFieldHidden(DocumentDetailFieldID.StoragePath)) {
|
||||
<pngx-input-select [items]="storagePaths" i18n-title title="Storage path" formControlName="storage_path" [allowNull]="true" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event, DataType.StoragePath)"
|
||||
(createNew)="createStoragePath($event)" [hideAddButton]="createDisabled(DataType.StoragePath)" [suggestions]="suggestions?.storage_paths" i18n-placeholder placeholder="Default" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.StoragePath }"></pngx-input-select>
|
||||
}
|
||||
@if (!isFieldHidden(DocumentDetailFieldID.Tags)) {
|
||||
<pngx-input-tags #tagsInput formControlName="tags" [suggestions]="suggestions?.tags" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event, DataType.Tag)" [hideAddButton]="createDisabled(DataType.Tag)" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Tag }"></pngx-input-tags>
|
||||
}
|
||||
@for (fieldInstance of document?.custom_fields; track fieldInstance.field; let i = $index) {
|
||||
<div [formGroup]="customFieldFormFields.controls[i]">
|
||||
@switch (getCustomFieldFromInstance(fieldInstance)?.data_type) {
|
||||
@@ -370,6 +380,37 @@
|
||||
</ng-template>
|
||||
</li>
|
||||
}
|
||||
|
||||
@if (document?.duplicate_documents?.length) {
|
||||
<li [ngbNavItem]="DocumentDetailNavIDs.Duplicates">
|
||||
<a class="text-nowrap" ngbNavLink i18n>
|
||||
Duplicates
|
||||
<span class="badge text-bg-secondary ms-1">{{ document.duplicate_documents.length }}</span>
|
||||
</a>
|
||||
<ng-template ngbNavContent>
|
||||
<div class="d-flex flex-column gap-2">
|
||||
<div class="fst-italic" i18n>Duplicate documents detected:</div>
|
||||
<div class="list-group">
|
||||
@for (duplicate of document.duplicate_documents; track duplicate.id) {
|
||||
<a
|
||||
class="list-group-item list-group-item-action d-flex justify-content-between align-items-center"
|
||||
[routerLink]="['/documents', duplicate.id, 'details']"
|
||||
[class.disabled]="duplicate.deleted_at"
|
||||
>
|
||||
<span class="d-flex align-items-center gap-2">
|
||||
<span>{{ duplicate.title || ('#' + duplicate.id) }}</span>
|
||||
@if (duplicate.deleted_at) {
|
||||
<span class="badge text-bg-secondary" i18n>In trash</span>
|
||||
}
|
||||
</span>
|
||||
<span class="text-secondary">#{{ duplicate.id }}</span>
|
||||
</a>
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
</ng-template>
|
||||
</li>
|
||||
}
|
||||
</ul>
|
||||
|
||||
<div [ngbNavOutlet]="nav" class="mt-3"></div>
|
||||
|
||||
@@ -48,6 +48,7 @@ import {
|
||||
} from 'src/app/data/filter-rule-type'
|
||||
import { StoragePath } from 'src/app/data/storage-path'
|
||||
import { Tag } from 'src/app/data/tag'
|
||||
import { SETTINGS_KEYS } from 'src/app/data/ui-settings'
|
||||
import { PermissionsGuard } from 'src/app/guards/permissions.guard'
|
||||
import { CustomDatePipe } from 'src/app/pipes/custom-date.pipe'
|
||||
import { DocumentTitlePipe } from 'src/app/pipes/document-title.pipe'
|
||||
@@ -68,10 +69,8 @@ import { environment } from 'src/environments/environment'
|
||||
import { ConfirmDialogComponent } from '../common/confirm-dialog/confirm-dialog.component'
|
||||
import { PasswordRemovalConfirmDialogComponent } from '../common/confirm-dialog/password-removal-confirm-dialog/password-removal-confirm-dialog.component'
|
||||
import { CustomFieldsDropdownComponent } from '../common/custom-fields-dropdown/custom-fields-dropdown.component'
|
||||
import {
|
||||
DocumentDetailComponent,
|
||||
ZoomSetting,
|
||||
} from './document-detail.component'
|
||||
import { DocumentDetailComponent } from './document-detail.component'
|
||||
import { ZoomSetting } from './zoom-setting'
|
||||
|
||||
const doc: Document = {
|
||||
id: 3,
|
||||
@@ -301,16 +300,16 @@ describe('DocumentDetailComponent', () => {
|
||||
.spyOn(openDocumentsService, 'openDocument')
|
||||
.mockReturnValueOnce(of(true))
|
||||
fixture.detectChanges()
|
||||
expect(component.activeNavID).toEqual(5) // DocumentDetailNavIDs.Notes
|
||||
expect(component.activeNavID).toEqual(component.DocumentDetailNavIDs.Notes)
|
||||
})
|
||||
|
||||
it('should change url on tab switch', () => {
|
||||
initNormally()
|
||||
const navigateSpy = jest.spyOn(router, 'navigate')
|
||||
component.nav.select(5)
|
||||
component.nav.select(component.DocumentDetailNavIDs.Notes)
|
||||
component.nav.navChange.next({
|
||||
activeId: 1,
|
||||
nextId: 5,
|
||||
nextId: component.DocumentDetailNavIDs.Notes,
|
||||
preventDefault: () => {},
|
||||
})
|
||||
fixture.detectChanges()
|
||||
@@ -352,6 +351,18 @@ describe('DocumentDetailComponent', () => {
|
||||
expect(component.document).toEqual(doc)
|
||||
})
|
||||
|
||||
it('should fall back to details tab when duplicates tab is active but no duplicates', () => {
|
||||
initNormally()
|
||||
component.activeNavID = component.DocumentDetailNavIDs.Duplicates
|
||||
const noDupDoc = { ...doc, duplicate_documents: [] }
|
||||
|
||||
component.updateComponent(noDupDoc)
|
||||
|
||||
expect(component.activeNavID).toEqual(
|
||||
component.DocumentDetailNavIDs.Details
|
||||
)
|
||||
})
|
||||
|
||||
it('should load already-opened document via param', () => {
|
||||
initNormally()
|
||||
jest.spyOn(documentService, 'get').mockReturnValueOnce(of(doc))
|
||||
@@ -367,6 +378,38 @@ describe('DocumentDetailComponent', () => {
|
||||
expect(component.document).toEqual(doc)
|
||||
})
|
||||
|
||||
it('should update cached open document duplicates when reloading an open doc', () => {
|
||||
const openDoc = { ...doc, duplicate_documents: [{ id: 1, title: 'Old' }] }
|
||||
const updatedDuplicates = [
|
||||
{ id: 2, title: 'Newer duplicate', deleted_at: null },
|
||||
]
|
||||
jest
|
||||
.spyOn(activatedRoute, 'paramMap', 'get')
|
||||
.mockReturnValue(of(convertToParamMap({ id: 3, section: 'details' })))
|
||||
jest.spyOn(documentService, 'get').mockReturnValue(
|
||||
of({
|
||||
...doc,
|
||||
modified: new Date('2024-01-02T00:00:00Z'),
|
||||
duplicate_documents: updatedDuplicates,
|
||||
})
|
||||
)
|
||||
jest.spyOn(openDocumentsService, 'getOpenDocument').mockReturnValue(openDoc)
|
||||
const saveSpy = jest.spyOn(openDocumentsService, 'save')
|
||||
jest.spyOn(openDocumentsService, 'openDocument').mockReturnValue(of(true))
|
||||
jest.spyOn(customFieldsService, 'listAll').mockReturnValue(
|
||||
of({
|
||||
count: customFields.length,
|
||||
all: customFields.map((f) => f.id),
|
||||
results: customFields,
|
||||
})
|
||||
)
|
||||
|
||||
fixture.detectChanges()
|
||||
|
||||
expect(openDoc.duplicate_documents).toEqual(updatedDuplicates)
|
||||
expect(saveSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should disable form if user cannot edit', () => {
|
||||
currentUserHasObjectPermissions = false
|
||||
initNormally()
|
||||
@@ -971,7 +1014,7 @@ describe('DocumentDetailComponent', () => {
|
||||
it('should display built-in pdf viewer if not disabled', () => {
|
||||
initNormally()
|
||||
component.document.archived_file_name = 'file.pdf'
|
||||
jest.spyOn(settingsService, 'get').mockReturnValue(false)
|
||||
settingsService.set(SETTINGS_KEYS.USE_NATIVE_PDF_VIEWER, false)
|
||||
expect(component.useNativePdfViewer).toBeFalsy()
|
||||
fixture.detectChanges()
|
||||
expect(fixture.debugElement.query(By.css('pdf-viewer'))).not.toBeNull()
|
||||
@@ -980,7 +1023,7 @@ describe('DocumentDetailComponent', () => {
|
||||
it('should display native pdf viewer if enabled', () => {
|
||||
initNormally()
|
||||
component.document.archived_file_name = 'file.pdf'
|
||||
jest.spyOn(settingsService, 'get').mockReturnValue(true)
|
||||
settingsService.set(SETTINGS_KEYS.USE_NATIVE_PDF_VIEWER, true)
|
||||
expect(component.useNativePdfViewer).toBeTruthy()
|
||||
fixture.detectChanges()
|
||||
expect(fixture.debugElement.query(By.css('object'))).not.toBeNull()
|
||||
|
||||
@@ -8,7 +8,7 @@ import {
|
||||
FormsModule,
|
||||
ReactiveFormsModule,
|
||||
} from '@angular/forms'
|
||||
import { ActivatedRoute, Router } from '@angular/router'
|
||||
import { ActivatedRoute, Router, RouterModule } from '@angular/router'
|
||||
import {
|
||||
NgbDateStruct,
|
||||
NgbDropdownModule,
|
||||
@@ -84,6 +84,7 @@ import { ToastService } from 'src/app/services/toast.service'
|
||||
import { getFilenameFromContentDisposition } from 'src/app/utils/http'
|
||||
import { ISODateAdapter } from 'src/app/utils/ngb-iso-date-adapter'
|
||||
import * as UTIF from 'utif'
|
||||
import { DocumentDetailFieldID } from '../admin/settings/settings.component'
|
||||
import { ConfirmDialogComponent } from '../common/confirm-dialog/confirm-dialog.component'
|
||||
import { PasswordRemovalConfirmDialogComponent } from '../common/confirm-dialog/password-removal-confirm-dialog/password-removal-confirm-dialog.component'
|
||||
import { CustomFieldsDropdownComponent } from '../common/custom-fields-dropdown/custom-fields-dropdown.component'
|
||||
@@ -105,16 +106,15 @@ import { TextComponent } from '../common/input/text/text.component'
|
||||
import { TextAreaComponent } from '../common/input/textarea/textarea.component'
|
||||
import { UrlComponent } from '../common/input/url/url.component'
|
||||
import { PageHeaderComponent } from '../common/page-header/page-header.component'
|
||||
import {
|
||||
PDFEditorComponent,
|
||||
PdfEditorEditMode,
|
||||
} from '../common/pdf-editor/pdf-editor.component'
|
||||
import { PdfEditorEditMode } from '../common/pdf-editor/pdf-editor-edit-mode'
|
||||
import { PDFEditorComponent } from '../common/pdf-editor/pdf-editor.component'
|
||||
import { ShareLinksDialogComponent } from '../common/share-links-dialog/share-links-dialog.component'
|
||||
import { SuggestionsDropdownComponent } from '../common/suggestions-dropdown/suggestions-dropdown.component'
|
||||
import { DocumentHistoryComponent } from '../document-history/document-history.component'
|
||||
import { DocumentNotesComponent } from '../document-notes/document-notes.component'
|
||||
import { ComponentWithPermissions } from '../with-permissions/with-permissions.component'
|
||||
import { MetadataCollapseComponent } from './metadata-collapse/metadata-collapse.component'
|
||||
import { ZoomSetting } from './zoom-setting'
|
||||
|
||||
enum DocumentDetailNavIDs {
|
||||
Details = 1,
|
||||
@@ -124,6 +124,7 @@ enum DocumentDetailNavIDs {
|
||||
Notes = 5,
|
||||
Permissions = 6,
|
||||
History = 7,
|
||||
Duplicates = 8,
|
||||
}
|
||||
|
||||
enum ContentRenderType {
|
||||
@@ -135,18 +136,6 @@ enum ContentRenderType {
|
||||
TIFF = 'tiff',
|
||||
}
|
||||
|
||||
export enum ZoomSetting {
|
||||
PageFit = 'page-fit',
|
||||
PageWidth = 'page-width',
|
||||
Quarter = '.25',
|
||||
Half = '.5',
|
||||
ThreeQuarters = '.75',
|
||||
One = '1',
|
||||
OneAndHalf = '1.5',
|
||||
Two = '2',
|
||||
Three = '3',
|
||||
}
|
||||
|
||||
@Component({
|
||||
selector: 'pngx-document-detail',
|
||||
templateUrl: './document-detail.component.html',
|
||||
@@ -181,6 +170,7 @@ export enum ZoomSetting {
|
||||
NgxBootstrapIconsModule,
|
||||
PdfViewerModule,
|
||||
TextAreaComponent,
|
||||
RouterModule,
|
||||
],
|
||||
})
|
||||
export class DocumentDetailComponent
|
||||
@@ -279,6 +269,8 @@ export class DocumentDetailComponent
|
||||
|
||||
public readonly DataType = DataType
|
||||
|
||||
public readonly DocumentDetailFieldID = DocumentDetailFieldID
|
||||
|
||||
@ViewChild('nav') nav: NgbNav
|
||||
@ViewChild('pdfPreview') set pdfPreview(element) {
|
||||
// this gets called when component added or removed from DOM
|
||||
@@ -325,6 +317,12 @@ export class DocumentDetailComponent
|
||||
return this.settings.get(SETTINGS_KEYS.DOCUMENT_EDITING_OVERLAY_THUMBNAIL)
|
||||
}
|
||||
|
||||
isFieldHidden(fieldId: DocumentDetailFieldID): boolean {
|
||||
return this.settings
|
||||
.get(SETTINGS_KEYS.DOCUMENT_DETAILS_HIDDEN_FIELDS)
|
||||
.includes(fieldId)
|
||||
}
|
||||
|
||||
private getRenderType(mimeType: string): ContentRenderType {
|
||||
if (!mimeType) return ContentRenderType.Unknown
|
||||
if (mimeType === 'application/pdf') {
|
||||
@@ -454,6 +452,11 @@ export class DocumentDetailComponent
|
||||
const openDocument = this.openDocumentService.getOpenDocument(
|
||||
this.documentId
|
||||
)
|
||||
// update duplicate documents if present
|
||||
if (openDocument && doc?.duplicate_documents) {
|
||||
openDocument.duplicate_documents = doc.duplicate_documents
|
||||
this.openDocumentService.save()
|
||||
}
|
||||
const useDoc = openDocument || doc
|
||||
if (openDocument) {
|
||||
if (
|
||||
@@ -704,6 +707,13 @@ export class DocumentDetailComponent
|
||||
}
|
||||
this.title = this.documentTitlePipe.transform(doc.title)
|
||||
this.prepareForm(doc)
|
||||
|
||||
if (
|
||||
this.activeNavID === DocumentDetailNavIDs.Duplicates &&
|
||||
!doc?.duplicate_documents?.length
|
||||
) {
|
||||
this.activeNavID = DocumentDetailNavIDs.Details
|
||||
}
|
||||
}
|
||||
|
||||
get customFieldFormFields(): FormArray {
|
||||
|
||||
11
src-ui/src/app/components/document-detail/zoom-setting.ts
Normal file
11
src-ui/src/app/components/document-detail/zoom-setting.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
export enum ZoomSetting {
|
||||
PageFit = 'page-fit',
|
||||
PageWidth = 'page-width',
|
||||
Quarter = '.25',
|
||||
Half = '.5',
|
||||
ThreeQuarters = '.75',
|
||||
One = '1',
|
||||
OneAndHalf = '1.5',
|
||||
Two = '2',
|
||||
Three = '3',
|
||||
}
|
||||
@@ -96,14 +96,36 @@
|
||||
<button ngbDropdownItem (click)="mergeSelected()" [disabled]="!userCanAdd || list.selected.size < 2">
|
||||
<i-bs name="journals"></i-bs> <ng-container i18n>Merge</ng-container>
|
||||
</button>
|
||||
@if (emailEnabled) {
|
||||
<button ngbDropdownItem (click)="emailSelected()">
|
||||
<i-bs name="envelope"></i-bs> <ng-container i18n>Email</ng-container>
|
||||
</button>
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="btn-toolbar" ngbDropdown>
|
||||
<button
|
||||
class="btn btn-sm btn-outline-primary"
|
||||
id="dropdownSend"
|
||||
ngbDropdownToggle
|
||||
[disabled]="disabled || list.selected.size === 0"
|
||||
>
|
||||
<i-bs name="send"></i-bs>
|
||||
<div class="d-none d-sm-inline">
|
||||
<ng-container i18n>Send</ng-container>
|
||||
</div>
|
||||
</button>
|
||||
<div ngbDropdownMenu aria-labelledby="dropdownSend" class="shadow">
|
||||
<button ngbDropdownItem (click)="createShareLinkBundle()">
|
||||
<i-bs name="link"></i-bs> <ng-container i18n>Create a share link bundle</ng-container>
|
||||
</button>
|
||||
<button ngbDropdownItem (click)="manageShareLinkBundles()">
|
||||
<i-bs name="list-ul"></i-bs> <ng-container i18n>Manage share link bundles</ng-container>
|
||||
</button>
|
||||
<div class="dropdown-divider"></div>
|
||||
@if (emailEnabled) {
|
||||
<button ngbDropdownItem (click)="emailSelected()">
|
||||
<i-bs name="envelope"></i-bs> <ng-container i18n>Email</ng-container>
|
||||
</button>
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
<div class="btn-group btn-group-sm">
|
||||
<button class="btn btn-sm btn-outline-primary" [disabled]="awaitingDownload" (click)="downloadSelected()">
|
||||
@if (!awaitingDownload) {
|
||||
|
||||
@@ -3,6 +3,7 @@ import {
|
||||
HttpTestingController,
|
||||
provideHttpClientTesting,
|
||||
} from '@angular/common/http/testing'
|
||||
import { EventEmitter } from '@angular/core'
|
||||
import { ComponentFixture, TestBed } from '@angular/core/testing'
|
||||
import { By } from '@angular/platform-browser'
|
||||
import { NgbModal, NgbModalRef } from '@ng-bootstrap/ng-bootstrap'
|
||||
@@ -25,6 +26,7 @@ import {
|
||||
SelectionData,
|
||||
} from 'src/app/services/rest/document.service'
|
||||
import { GroupService } from 'src/app/services/rest/group.service'
|
||||
import { ShareLinkBundleService } from 'src/app/services/rest/share-link-bundle.service'
|
||||
import { StoragePathService } from 'src/app/services/rest/storage-path.service'
|
||||
import { TagService } from 'src/app/services/rest/tag.service'
|
||||
import { UserService } from 'src/app/services/rest/user.service'
|
||||
@@ -38,6 +40,8 @@ import { EditDialogMode } from '../../common/edit-dialog/edit-dialog.component'
|
||||
import { StoragePathEditDialogComponent } from '../../common/edit-dialog/storage-path-edit-dialog/storage-path-edit-dialog.component'
|
||||
import { TagEditDialogComponent } from '../../common/edit-dialog/tag-edit-dialog/tag-edit-dialog.component'
|
||||
import { FilterableDropdownComponent } from '../../common/filterable-dropdown/filterable-dropdown.component'
|
||||
import { ShareLinkBundleDialogComponent } from '../../common/share-link-bundle-dialog/share-link-bundle-dialog.component'
|
||||
import { ShareLinkBundleManageDialogComponent } from '../../common/share-link-bundle-manage-dialog/share-link-bundle-manage-dialog.component'
|
||||
import { BulkEditorComponent } from './bulk-editor.component'
|
||||
|
||||
const selectionData: SelectionData = {
|
||||
@@ -72,6 +76,7 @@ describe('BulkEditorComponent', () => {
|
||||
let storagePathService: StoragePathService
|
||||
let customFieldsService: CustomFieldsService
|
||||
let httpTestingController: HttpTestingController
|
||||
let shareLinkBundleService: ShareLinkBundleService
|
||||
|
||||
beforeEach(async () => {
|
||||
TestBed.configureTestingModule({
|
||||
@@ -152,6 +157,15 @@ describe('BulkEditorComponent', () => {
|
||||
}),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: ShareLinkBundleService,
|
||||
useValue: {
|
||||
createBundle: jest.fn(),
|
||||
listAllBundles: jest.fn(),
|
||||
rebuildBundle: jest.fn(),
|
||||
delete: jest.fn(),
|
||||
},
|
||||
},
|
||||
provideHttpClient(withInterceptorsFromDi()),
|
||||
provideHttpClientTesting(),
|
||||
],
|
||||
@@ -168,6 +182,7 @@ describe('BulkEditorComponent', () => {
|
||||
storagePathService = TestBed.inject(StoragePathService)
|
||||
customFieldsService = TestBed.inject(CustomFieldsService)
|
||||
httpTestingController = TestBed.inject(HttpTestingController)
|
||||
shareLinkBundleService = TestBed.inject(ShareLinkBundleService)
|
||||
|
||||
fixture = TestBed.createComponent(BulkEditorComponent)
|
||||
component = fixture.componentInstance
|
||||
@@ -1454,4 +1469,130 @@ describe('BulkEditorComponent', () => {
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=100000&fields=id`
|
||||
) // listAllFilteredIds
|
||||
})
|
||||
|
||||
it('should create share link bundle and enable manage callback', () => {
|
||||
jest.spyOn(permissionsService, 'currentUserCan').mockReturnValue(true)
|
||||
jest
|
||||
.spyOn(documentListViewService, 'documents', 'get')
|
||||
.mockReturnValue([{ id: 5 }, { id: 7 }] as any)
|
||||
jest
|
||||
.spyOn(documentListViewService, 'selected', 'get')
|
||||
.mockReturnValue(new Set([5, 7]))
|
||||
|
||||
const confirmClicked = new EventEmitter<void>()
|
||||
const modalRef: Partial<NgbModalRef> = {
|
||||
close: jest.fn(),
|
||||
componentInstance: {
|
||||
documents: [],
|
||||
confirmClicked,
|
||||
payload: {
|
||||
document_ids: [5, 7],
|
||||
file_version: 'archive',
|
||||
expiration_days: 7,
|
||||
},
|
||||
loading: false,
|
||||
buttonsEnabled: true,
|
||||
copied: false,
|
||||
},
|
||||
}
|
||||
|
||||
const openSpy = jest.spyOn(modalService, 'open')
|
||||
openSpy.mockReturnValueOnce(modalRef as NgbModalRef)
|
||||
openSpy.mockReturnValueOnce({} as NgbModalRef)
|
||||
;(shareLinkBundleService.createBundle as jest.Mock).mockReturnValueOnce(
|
||||
of({ id: 42 })
|
||||
)
|
||||
const toastInfoSpy = jest.spyOn(toastService, 'showInfo')
|
||||
|
||||
component.createShareLinkBundle()
|
||||
|
||||
expect(openSpy).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
ShareLinkBundleDialogComponent,
|
||||
expect.objectContaining({ backdrop: 'static', size: 'lg' })
|
||||
)
|
||||
|
||||
const dialogInstance = modalRef.componentInstance as any
|
||||
expect(dialogInstance.documents).toEqual([{ id: 5 }, { id: 7 }])
|
||||
|
||||
confirmClicked.emit()
|
||||
|
||||
expect(shareLinkBundleService.createBundle).toHaveBeenCalledWith({
|
||||
document_ids: [5, 7],
|
||||
file_version: 'archive',
|
||||
expiration_days: 7,
|
||||
})
|
||||
expect(dialogInstance.loading).toBe(false)
|
||||
expect(dialogInstance.buttonsEnabled).toBe(false)
|
||||
expect(dialogInstance.createdBundle).toEqual({ id: 42 })
|
||||
expect(typeof dialogInstance.onOpenManage).toBe('function')
|
||||
expect(toastInfoSpy).toHaveBeenCalledWith(
|
||||
$localize`Share link bundle creation requested.`
|
||||
)
|
||||
|
||||
dialogInstance.onOpenManage()
|
||||
expect(modalRef.close).toHaveBeenCalled()
|
||||
expect(openSpy).toHaveBeenNthCalledWith(
|
||||
2,
|
||||
ShareLinkBundleManageDialogComponent,
|
||||
expect.objectContaining({ backdrop: 'static', size: 'lg' })
|
||||
)
|
||||
openSpy.mockRestore()
|
||||
})
|
||||
|
||||
it('should handle share link bundle creation errors', () => {
|
||||
jest.spyOn(permissionsService, 'currentUserCan').mockReturnValue(true)
|
||||
jest
|
||||
.spyOn(documentListViewService, 'documents', 'get')
|
||||
.mockReturnValue([{ id: 9 }] as any)
|
||||
jest
|
||||
.spyOn(documentListViewService, 'selected', 'get')
|
||||
.mockReturnValue(new Set([9]))
|
||||
|
||||
const confirmClicked = new EventEmitter<void>()
|
||||
const modalRef: Partial<NgbModalRef> = {
|
||||
componentInstance: {
|
||||
documents: [],
|
||||
confirmClicked,
|
||||
payload: {
|
||||
document_ids: [9],
|
||||
file_version: 'original',
|
||||
expiration_days: null,
|
||||
},
|
||||
loading: false,
|
||||
buttonsEnabled: true,
|
||||
},
|
||||
}
|
||||
|
||||
const openSpy = jest
|
||||
.spyOn(modalService, 'open')
|
||||
.mockReturnValue(modalRef as NgbModalRef)
|
||||
;(shareLinkBundleService.createBundle as jest.Mock).mockReturnValueOnce(
|
||||
throwError(() => new Error('bundle failure'))
|
||||
)
|
||||
const toastErrorSpy = jest.spyOn(toastService, 'showError')
|
||||
|
||||
component.createShareLinkBundle()
|
||||
|
||||
const dialogInstance = modalRef.componentInstance as any
|
||||
confirmClicked.emit()
|
||||
|
||||
expect(toastErrorSpy).toHaveBeenCalledWith(
|
||||
$localize`Share link bundle creation is not available yet.`,
|
||||
expect.any(Error)
|
||||
)
|
||||
expect(dialogInstance.loading).toBe(false)
|
||||
expect(dialogInstance.buttonsEnabled).toBe(true)
|
||||
openSpy.mockRestore()
|
||||
})
|
||||
|
||||
it('should open share link bundle management dialog', () => {
|
||||
const openSpy = jest.spyOn(modalService, 'open')
|
||||
component.manageShareLinkBundles()
|
||||
expect(openSpy).toHaveBeenCalledWith(
|
||||
ShareLinkBundleManageDialogComponent,
|
||||
expect.objectContaining({ backdrop: 'static', size: 'lg' })
|
||||
)
|
||||
openSpy.mockRestore()
|
||||
})
|
||||
})
|
||||
|
||||
@@ -33,6 +33,7 @@ import {
|
||||
SelectionDataItem,
|
||||
} from 'src/app/services/rest/document.service'
|
||||
import { SavedViewService } from 'src/app/services/rest/saved-view.service'
|
||||
import { ShareLinkBundleService } from 'src/app/services/rest/share-link-bundle.service'
|
||||
import { StoragePathService } from 'src/app/services/rest/storage-path.service'
|
||||
import { TagService } from 'src/app/services/rest/tag.service'
|
||||
import { SettingsService } from 'src/app/services/settings.service'
|
||||
@@ -54,6 +55,8 @@ import {
|
||||
} from '../../common/filterable-dropdown/filterable-dropdown.component'
|
||||
import { ToggleableItemState } from '../../common/filterable-dropdown/toggleable-dropdown-button/toggleable-dropdown-button.component'
|
||||
import { PermissionsDialogComponent } from '../../common/permissions-dialog/permissions-dialog.component'
|
||||
import { ShareLinkBundleDialogComponent } from '../../common/share-link-bundle-dialog/share-link-bundle-dialog.component'
|
||||
import { ShareLinkBundleManageDialogComponent } from '../../common/share-link-bundle-manage-dialog/share-link-bundle-manage-dialog.component'
|
||||
import { ComponentWithPermissions } from '../../with-permissions/with-permissions.component'
|
||||
import { CustomFieldsBulkEditDialogComponent } from './custom-fields-bulk-edit-dialog/custom-fields-bulk-edit-dialog.component'
|
||||
|
||||
@@ -87,6 +90,7 @@ export class BulkEditorComponent
|
||||
private customFieldService = inject(CustomFieldsService)
|
||||
private permissionService = inject(PermissionsService)
|
||||
private savedViewService = inject(SavedViewService)
|
||||
private readonly shareLinkBundleService = inject(ShareLinkBundleService)
|
||||
|
||||
tagSelectionModel = new FilterableDropdownSelectionModel(true)
|
||||
correspondentSelectionModel = new FilterableDropdownSelectionModel()
|
||||
@@ -908,6 +912,58 @@ export class BulkEditorComponent
|
||||
return this.settings.get(SETTINGS_KEYS.EMAIL_ENABLED)
|
||||
}
|
||||
|
||||
createShareLinkBundle() {
|
||||
const modal = this.modalService.open(ShareLinkBundleDialogComponent, {
|
||||
backdrop: 'static',
|
||||
size: 'lg',
|
||||
})
|
||||
const dialog = modal.componentInstance as ShareLinkBundleDialogComponent
|
||||
const selectedDocuments = this.list.documents.filter((d) =>
|
||||
this.list.selected.has(d.id)
|
||||
)
|
||||
dialog.documents = selectedDocuments
|
||||
dialog.confirmClicked
|
||||
.pipe(takeUntil(this.unsubscribeNotifier))
|
||||
.subscribe(() => {
|
||||
dialog.loading = true
|
||||
dialog.buttonsEnabled = false
|
||||
this.shareLinkBundleService
|
||||
.createBundle(dialog.payload)
|
||||
.pipe(first())
|
||||
.subscribe({
|
||||
next: (result) => {
|
||||
dialog.loading = false
|
||||
dialog.buttonsEnabled = false
|
||||
dialog.createdBundle = result
|
||||
dialog.copied = false
|
||||
dialog.payload = null
|
||||
dialog.onOpenManage = () => {
|
||||
modal.close()
|
||||
this.manageShareLinkBundles()
|
||||
}
|
||||
this.toastService.showInfo(
|
||||
$localize`Share link bundle creation requested.`
|
||||
)
|
||||
},
|
||||
error: (error) => {
|
||||
dialog.loading = false
|
||||
dialog.buttonsEnabled = true
|
||||
this.toastService.showError(
|
||||
$localize`Share link bundle creation is not available yet.`,
|
||||
error
|
||||
)
|
||||
},
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
manageShareLinkBundles() {
|
||||
this.modalService.open(ShareLinkBundleManageDialogComponent, {
|
||||
backdrop: 'static',
|
||||
size: 'lg',
|
||||
})
|
||||
}
|
||||
|
||||
emailSelected() {
|
||||
const allHaveArchiveVersion = this.list.documents
|
||||
.filter((d) => this.list.selected.has(d.id))
|
||||
|
||||
@@ -159,6 +159,8 @@ export interface Document extends ObjectWithPermissions {
|
||||
|
||||
page_count?: number
|
||||
|
||||
duplicate_documents?: Document[]
|
||||
|
||||
// Frontend only
|
||||
__changedFields?: string[]
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { Document } from './document'
|
||||
import { ObjectWithId } from './object-with-id'
|
||||
|
||||
export enum PaperlessTaskType {
|
||||
@@ -42,5 +43,7 @@ export interface PaperlessTask extends ObjectWithId {
|
||||
|
||||
related_document?: number
|
||||
|
||||
duplicate_documents?: Document[]
|
||||
|
||||
owner?: number
|
||||
}
|
||||
|
||||
53
src-ui/src/app/data/share-link-bundle.ts
Normal file
53
src-ui/src/app/data/share-link-bundle.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import { FileVersion } from './share-link'
|
||||
|
||||
export enum ShareLinkBundleStatus {
|
||||
Pending = 'pending',
|
||||
Processing = 'processing',
|
||||
Ready = 'ready',
|
||||
Failed = 'failed',
|
||||
}
|
||||
|
||||
export type ShareLinkBundleError = {
|
||||
bundle_id: number
|
||||
message?: string
|
||||
exception_type?: string
|
||||
timestamp?: string
|
||||
}
|
||||
|
||||
export interface ShareLinkBundleSummary {
|
||||
id: number
|
||||
slug: string
|
||||
created: string // Date
|
||||
expiration?: string // Date
|
||||
documents: number[]
|
||||
document_count: number
|
||||
file_version: FileVersion
|
||||
status: ShareLinkBundleStatus
|
||||
built_at?: string
|
||||
size_bytes?: number
|
||||
last_error?: ShareLinkBundleError
|
||||
}
|
||||
|
||||
export interface ShareLinkBundleCreatePayload {
|
||||
document_ids: number[]
|
||||
file_version: FileVersion
|
||||
expiration_days: number | null
|
||||
}
|
||||
|
||||
export const SHARE_LINK_BUNDLE_STATUS_LABELS: Record<
|
||||
ShareLinkBundleStatus,
|
||||
string
|
||||
> = {
|
||||
[ShareLinkBundleStatus.Pending]: $localize`Pending`,
|
||||
[ShareLinkBundleStatus.Processing]: $localize`Processing`,
|
||||
[ShareLinkBundleStatus.Ready]: $localize`Ready`,
|
||||
[ShareLinkBundleStatus.Failed]: $localize`Failed`,
|
||||
}
|
||||
|
||||
export const SHARE_LINK_BUNDLE_FILE_VERSION_LABELS: Record<
|
||||
FileVersion,
|
||||
string
|
||||
> = {
|
||||
[FileVersion.Archive]: $localize`Archive`,
|
||||
[FileVersion.Original]: $localize`Original`,
|
||||
}
|
||||
@@ -5,6 +5,18 @@ export enum FileVersion {
|
||||
Original = 'original',
|
||||
}
|
||||
|
||||
export interface ShareLinkExpirationOption {
|
||||
label: string
|
||||
value: number | null
|
||||
}
|
||||
|
||||
export const SHARE_LINK_EXPIRATION_OPTIONS: ShareLinkExpirationOption[] = [
|
||||
{ label: $localize`1 day`, value: 1 },
|
||||
{ label: $localize`7 days`, value: 7 },
|
||||
{ label: $localize`30 days`, value: 30 },
|
||||
{ label: $localize`Never`, value: null },
|
||||
]
|
||||
|
||||
export interface ShareLink extends ObjectWithPermissions {
|
||||
created: string // Date
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import { PdfEditorEditMode } from '../components/common/pdf-editor/pdf-editor-edit-mode'
|
||||
import { ZoomSetting } from '../components/document-detail/zoom-setting'
|
||||
import { User } from './user'
|
||||
|
||||
export interface UiSettings {
|
||||
@@ -70,8 +72,12 @@ export const SETTINGS_KEYS = {
|
||||
'general-settings:document-editing:remove-inbox-tags',
|
||||
DOCUMENT_EDITING_OVERLAY_THUMBNAIL:
|
||||
'general-settings:document-editing:overlay-thumbnail',
|
||||
DOCUMENT_DETAILS_HIDDEN_FIELDS:
|
||||
'general-settings:document-details:hidden-fields',
|
||||
SEARCH_DB_ONLY: 'general-settings:search:db-only',
|
||||
SEARCH_FULL_TYPE: 'general-settings:search:more-link',
|
||||
PDF_EDITOR_DEFAULT_EDIT_MODE:
|
||||
'general-settings:document-editing:default-edit-mode',
|
||||
EMPTY_TRASH_DELAY: 'trash_delay',
|
||||
GMAIL_OAUTH_URL: 'gmail_oauth_url',
|
||||
OUTLOOK_OAUTH_URL: 'outlook_oauth_url',
|
||||
@@ -255,6 +261,11 @@ export const SETTINGS: UiSetting[] = [
|
||||
type: 'boolean',
|
||||
default: true,
|
||||
},
|
||||
{
|
||||
key: SETTINGS_KEYS.DOCUMENT_DETAILS_HIDDEN_FIELDS,
|
||||
type: 'array',
|
||||
default: [],
|
||||
},
|
||||
{
|
||||
key: SETTINGS_KEYS.SEARCH_DB_ONLY,
|
||||
type: 'boolean',
|
||||
@@ -288,11 +299,16 @@ export const SETTINGS: UiSetting[] = [
|
||||
{
|
||||
key: SETTINGS_KEYS.PDF_VIEWER_ZOOM_SETTING,
|
||||
type: 'string',
|
||||
default: 'page-width', // ZoomSetting from 'document-detail.component'
|
||||
default: ZoomSetting.PageWidth,
|
||||
},
|
||||
{
|
||||
key: SETTINGS_KEYS.AI_ENABLED,
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
},
|
||||
{
|
||||
key: SETTINGS_KEYS.PDF_EDITOR_DEFAULT_EDIT_MODE,
|
||||
type: 'string',
|
||||
default: PdfEditorEditMode.Create,
|
||||
},
|
||||
]
|
||||
|
||||
@@ -1,30 +1,41 @@
|
||||
import { HttpEvent, HttpRequest } from '@angular/common/http'
|
||||
import {
|
||||
HttpClient,
|
||||
provideHttpClient,
|
||||
withInterceptors,
|
||||
} from '@angular/common/http'
|
||||
import {
|
||||
HttpTestingController,
|
||||
provideHttpClientTesting,
|
||||
} from '@angular/common/http/testing'
|
||||
import { TestBed } from '@angular/core/testing'
|
||||
import { of } from 'rxjs'
|
||||
import { environment } from 'src/environments/environment'
|
||||
import { ApiVersionInterceptor } from './api-version.interceptor'
|
||||
import { withApiVersionInterceptor } from './api-version.interceptor'
|
||||
|
||||
describe('ApiVersionInterceptor', () => {
|
||||
let interceptor: ApiVersionInterceptor
|
||||
let httpClient: HttpClient
|
||||
let httpMock: HttpTestingController
|
||||
|
||||
beforeEach(() => {
|
||||
TestBed.configureTestingModule({
|
||||
providers: [ApiVersionInterceptor],
|
||||
providers: [
|
||||
provideHttpClient(withInterceptors([withApiVersionInterceptor])),
|
||||
provideHttpClientTesting(),
|
||||
],
|
||||
})
|
||||
|
||||
interceptor = TestBed.inject(ApiVersionInterceptor)
|
||||
httpClient = TestBed.inject(HttpClient)
|
||||
httpMock = TestBed.inject(HttpTestingController)
|
||||
})
|
||||
|
||||
it('should add api version to headers', () => {
|
||||
interceptor.intercept(new HttpRequest('GET', 'https://example.com'), {
|
||||
handle: (request) => {
|
||||
const header = request.headers['lazyUpdate'][0]
|
||||
expect(header.name).toEqual('Accept')
|
||||
expect(header.value).toEqual(
|
||||
`application/json; version=${environment.apiVersion}`
|
||||
)
|
||||
return of({} as HttpEvent<any>)
|
||||
},
|
||||
})
|
||||
httpClient.get('https://example.com').subscribe()
|
||||
const request = httpMock.expectOne('https://example.com')
|
||||
const header = request.request.headers['lazyUpdate'][0]
|
||||
|
||||
expect(header.name).toEqual('Accept')
|
||||
expect(header.value).toEqual(
|
||||
`application/json; version=${environment.apiVersion}`
|
||||
)
|
||||
request.flush({})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,27 +1,20 @@
|
||||
import {
|
||||
HttpEvent,
|
||||
HttpHandler,
|
||||
HttpInterceptor,
|
||||
HttpHandlerFn,
|
||||
HttpInterceptorFn,
|
||||
HttpRequest,
|
||||
} from '@angular/common/http'
|
||||
import { Injectable } from '@angular/core'
|
||||
import { Observable } from 'rxjs'
|
||||
import { environment } from 'src/environments/environment'
|
||||
|
||||
@Injectable()
|
||||
export class ApiVersionInterceptor implements HttpInterceptor {
|
||||
constructor() {}
|
||||
|
||||
intercept(
|
||||
request: HttpRequest<unknown>,
|
||||
next: HttpHandler
|
||||
): Observable<HttpEvent<unknown>> {
|
||||
request = request.clone({
|
||||
setHeaders: {
|
||||
Accept: `application/json; version=${environment.apiVersion}`,
|
||||
},
|
||||
})
|
||||
|
||||
return next.handle(request)
|
||||
}
|
||||
export const withApiVersionInterceptor: HttpInterceptorFn = (
|
||||
request: HttpRequest<unknown>,
|
||||
next: HttpHandlerFn
|
||||
): Observable<HttpEvent<unknown>> => {
|
||||
request = request.clone({
|
||||
setHeaders: {
|
||||
Accept: `application/json; version=${environment.apiVersion}`,
|
||||
},
|
||||
})
|
||||
return next(request)
|
||||
}
|
||||
|
||||
@@ -1,35 +1,52 @@
|
||||
import { HttpEvent, HttpRequest } from '@angular/common/http'
|
||||
import {
|
||||
HttpClient,
|
||||
provideHttpClient,
|
||||
withInterceptors,
|
||||
} from '@angular/common/http'
|
||||
import {
|
||||
HttpTestingController,
|
||||
provideHttpClientTesting,
|
||||
} from '@angular/common/http/testing'
|
||||
import { TestBed } from '@angular/core/testing'
|
||||
import { Meta } from '@angular/platform-browser'
|
||||
import { CookieService } from 'ngx-cookie-service'
|
||||
import { of } from 'rxjs'
|
||||
import { CsrfInterceptor } from './csrf.interceptor'
|
||||
import { withCsrfInterceptor } from './csrf.interceptor'
|
||||
|
||||
describe('CsrfInterceptor', () => {
|
||||
let interceptor: CsrfInterceptor
|
||||
let meta: Meta
|
||||
let cookieService: CookieService
|
||||
let httpClient: HttpClient
|
||||
let httpMock: HttpTestingController
|
||||
|
||||
beforeEach(() => {
|
||||
TestBed.configureTestingModule({
|
||||
providers: [CsrfInterceptor, Meta, CookieService],
|
||||
providers: [
|
||||
Meta,
|
||||
CookieService,
|
||||
provideHttpClient(withInterceptors([withCsrfInterceptor])),
|
||||
provideHttpClientTesting(),
|
||||
],
|
||||
})
|
||||
|
||||
meta = TestBed.inject(Meta)
|
||||
cookieService = TestBed.inject(CookieService)
|
||||
interceptor = TestBed.inject(CsrfInterceptor)
|
||||
httpClient = TestBed.inject(HttpClient)
|
||||
httpMock = TestBed.inject(HttpTestingController)
|
||||
})
|
||||
|
||||
it('should get csrf token', () => {
|
||||
meta.addTag({ name: 'cookie_prefix', content: 'ngx-' }, true)
|
||||
|
||||
const cookieServiceSpy = jest.spyOn(cookieService, 'get')
|
||||
cookieServiceSpy.mockReturnValue('csrftoken')
|
||||
interceptor.intercept(new HttpRequest('GET', 'https://example.com'), {
|
||||
handle: (request) => {
|
||||
expect(request.headers['lazyUpdate'][0]['name']).toEqual('X-CSRFToken')
|
||||
return of({} as HttpEvent<any>)
|
||||
},
|
||||
})
|
||||
|
||||
httpClient.get('https://example.com').subscribe()
|
||||
const request = httpMock.expectOne('https://example.com')
|
||||
|
||||
expect(request.request.headers['lazyUpdate'][0]['name']).toEqual(
|
||||
'X-CSRFToken'
|
||||
)
|
||||
expect(cookieServiceSpy).toHaveBeenCalled()
|
||||
request.flush({})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,36 +1,32 @@
|
||||
import {
|
||||
HttpEvent,
|
||||
HttpHandler,
|
||||
HttpInterceptor,
|
||||
HttpHandlerFn,
|
||||
HttpInterceptorFn,
|
||||
HttpRequest,
|
||||
} from '@angular/common/http'
|
||||
import { inject, Injectable } from '@angular/core'
|
||||
import { inject } from '@angular/core'
|
||||
import { Meta } from '@angular/platform-browser'
|
||||
import { CookieService } from 'ngx-cookie-service'
|
||||
import { Observable } from 'rxjs'
|
||||
|
||||
@Injectable()
|
||||
export class CsrfInterceptor implements HttpInterceptor {
|
||||
private cookieService: CookieService = inject(CookieService)
|
||||
private meta: Meta = inject(Meta)
|
||||
export const withCsrfInterceptor: HttpInterceptorFn = (
|
||||
request: HttpRequest<unknown>,
|
||||
next: HttpHandlerFn
|
||||
): Observable<HttpEvent<unknown>> => {
|
||||
const cookieService: CookieService = inject(CookieService)
|
||||
const meta: Meta = inject(Meta)
|
||||
|
||||
intercept(
|
||||
request: HttpRequest<unknown>,
|
||||
next: HttpHandler
|
||||
): Observable<HttpEvent<unknown>> {
|
||||
let prefix = ''
|
||||
if (this.meta.getTag('name=cookie_prefix')) {
|
||||
prefix = this.meta.getTag('name=cookie_prefix').content
|
||||
}
|
||||
let csrfToken = this.cookieService.get(`${prefix}csrftoken`)
|
||||
if (csrfToken) {
|
||||
request = request.clone({
|
||||
setHeaders: {
|
||||
'X-CSRFToken': csrfToken,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
return next.handle(request)
|
||||
let prefix = ''
|
||||
if (meta.getTag('name=cookie_prefix')) {
|
||||
prefix = meta.getTag('name=cookie_prefix').content
|
||||
}
|
||||
let csrfToken = cookieService.get(`${prefix}csrftoken`)
|
||||
if (csrfToken) {
|
||||
request = request.clone({
|
||||
setHeaders: {
|
||||
'X-CSRFToken': csrfToken,
|
||||
},
|
||||
})
|
||||
}
|
||||
return next(request)
|
||||
}
|
||||
|
||||
@@ -0,0 +1,60 @@
|
||||
import { HttpTestingController } from '@angular/common/http/testing'
|
||||
import { TestBed } from '@angular/core/testing'
|
||||
import { Subscription } from 'rxjs'
|
||||
import { environment } from 'src/environments/environment'
|
||||
import { commonAbstractPaperlessServiceTests } from './abstract-paperless-service.spec'
|
||||
import { ShareLinkBundleService } from './share-link-bundle.service'
|
||||
|
||||
const endpoint = 'share_link_bundles'
|
||||
|
||||
commonAbstractPaperlessServiceTests(endpoint, ShareLinkBundleService)
|
||||
|
||||
describe('ShareLinkBundleService', () => {
|
||||
let httpTestingController: HttpTestingController
|
||||
let service: ShareLinkBundleService
|
||||
let subscription: Subscription | undefined
|
||||
|
||||
beforeEach(() => {
|
||||
httpTestingController = TestBed.inject(HttpTestingController)
|
||||
service = TestBed.inject(ShareLinkBundleService)
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
subscription?.unsubscribe()
|
||||
httpTestingController.verify()
|
||||
})
|
||||
|
||||
it('creates bundled share links', () => {
|
||||
const payload = {
|
||||
document_ids: [1, 2],
|
||||
file_version: 'archive',
|
||||
expiration_days: 7,
|
||||
}
|
||||
subscription = service.createBundle(payload as any).subscribe()
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}${endpoint}/`
|
||||
)
|
||||
expect(req.request.method).toBe('POST')
|
||||
expect(req.request.body).toEqual(payload)
|
||||
req.flush({})
|
||||
})
|
||||
|
||||
it('rebuilds bundles', () => {
|
||||
subscription = service.rebuildBundle(12).subscribe()
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}${endpoint}/12/rebuild/`
|
||||
)
|
||||
expect(req.request.method).toBe('POST')
|
||||
expect(req.request.body).toEqual({})
|
||||
req.flush({})
|
||||
})
|
||||
|
||||
it('lists bundles with expected parameters', () => {
|
||||
subscription = service.listAllBundles().subscribe()
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}${endpoint}/?page=1&page_size=1000&ordering=-created`
|
||||
)
|
||||
expect(req.request.method).toBe('GET')
|
||||
req.flush({ results: [] })
|
||||
})
|
||||
})
|
||||
41
src-ui/src/app/services/rest/share-link-bundle.service.ts
Normal file
41
src-ui/src/app/services/rest/share-link-bundle.service.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import { Injectable } from '@angular/core'
|
||||
import { Observable } from 'rxjs'
|
||||
import { map } from 'rxjs/operators'
|
||||
import {
|
||||
ShareLinkBundleCreatePayload,
|
||||
ShareLinkBundleSummary,
|
||||
} from 'src/app/data/share-link-bundle'
|
||||
import { AbstractNameFilterService } from './abstract-name-filter-service'
|
||||
|
||||
@Injectable({
|
||||
providedIn: 'root',
|
||||
})
|
||||
export class ShareLinkBundleService extends AbstractNameFilterService<ShareLinkBundleSummary> {
|
||||
constructor() {
|
||||
super()
|
||||
this.resourceName = 'share_link_bundles'
|
||||
}
|
||||
|
||||
createBundle(
|
||||
payload: ShareLinkBundleCreatePayload
|
||||
): Observable<ShareLinkBundleSummary> {
|
||||
this.clearCache()
|
||||
return this.http.post<ShareLinkBundleSummary>(
|
||||
this.getResourceUrl(),
|
||||
payload
|
||||
)
|
||||
}
|
||||
rebuildBundle(bundleId: number): Observable<ShareLinkBundleSummary> {
|
||||
this.clearCache()
|
||||
return this.http.post<ShareLinkBundleSummary>(
|
||||
this.getResourceUrl(bundleId, 'rebuild'),
|
||||
{}
|
||||
)
|
||||
}
|
||||
|
||||
listAllBundles(): Observable<ShareLinkBundleSummary[]> {
|
||||
return this.list(1, 1000, 'created', true).pipe(
|
||||
map((response) => response.results)
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,16 +1,16 @@
|
||||
import {
|
||||
APP_INITIALIZER,
|
||||
enableProdMode,
|
||||
importProvidersFrom,
|
||||
inject,
|
||||
provideAppInitializer,
|
||||
provideZoneChangeDetection,
|
||||
} from '@angular/core'
|
||||
|
||||
import { DragDropModule } from '@angular/cdk/drag-drop'
|
||||
import { DatePipe, registerLocaleData } from '@angular/common'
|
||||
import {
|
||||
HTTP_INTERCEPTORS,
|
||||
provideHttpClient,
|
||||
withFetch,
|
||||
withInterceptors,
|
||||
withInterceptorsFromDi,
|
||||
} from '@angular/common/http'
|
||||
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
||||
@@ -151,15 +151,14 @@ import { AppComponent } from './app/app.component'
|
||||
import { DirtyDocGuard } from './app/guards/dirty-doc.guard'
|
||||
import { DirtySavedViewGuard } from './app/guards/dirty-saved-view.guard'
|
||||
import { PermissionsGuard } from './app/guards/permissions.guard'
|
||||
import { ApiVersionInterceptor } from './app/interceptors/api-version.interceptor'
|
||||
import { CsrfInterceptor } from './app/interceptors/csrf.interceptor'
|
||||
import { withApiVersionInterceptor } from './app/interceptors/api-version.interceptor'
|
||||
import { withCsrfInterceptor } from './app/interceptors/csrf.interceptor'
|
||||
import { DocumentTitlePipe } from './app/pipes/document-title.pipe'
|
||||
import { FilterPipe } from './app/pipes/filter.pipe'
|
||||
import { UsernamePipe } from './app/pipes/username.pipe'
|
||||
import { SettingsService } from './app/services/settings.service'
|
||||
import { LocalizedDateParserFormatter } from './app/utils/ngb-date-parser-formatter'
|
||||
import { ISODateAdapter } from './app/utils/ngb-iso-date-adapter'
|
||||
import { environment } from './environments/environment'
|
||||
|
||||
import localeAf from '@angular/common/locales/af'
|
||||
import localeAr from '@angular/common/locales/ar'
|
||||
@@ -237,11 +236,11 @@ registerLocaleData(localeUk)
|
||||
registerLocaleData(localeZh)
|
||||
registerLocaleData(localeZhHant)
|
||||
|
||||
function initializeApp(settings: SettingsService) {
|
||||
return () => {
|
||||
return settings.initializeSettings()
|
||||
}
|
||||
function initializeApp() {
|
||||
const settings = inject(SettingsService)
|
||||
return settings.initializeSettings()
|
||||
}
|
||||
|
||||
const icons = {
|
||||
airplane,
|
||||
archive,
|
||||
@@ -363,10 +362,6 @@ const icons = {
|
||||
xLg,
|
||||
}
|
||||
|
||||
if (environment.production) {
|
||||
enableProdMode()
|
||||
}
|
||||
|
||||
bootstrapApplication(AppComponent, {
|
||||
providers: [
|
||||
provideZoneChangeDetection(),
|
||||
@@ -383,24 +378,9 @@ bootstrapApplication(AppComponent, {
|
||||
DragDropModule,
|
||||
NgxBootstrapIconsModule.pick(icons)
|
||||
),
|
||||
{
|
||||
provide: APP_INITIALIZER,
|
||||
useFactory: initializeApp,
|
||||
deps: [SettingsService],
|
||||
multi: true,
|
||||
},
|
||||
provideAppInitializer(initializeApp),
|
||||
DatePipe,
|
||||
CookieService,
|
||||
{
|
||||
provide: HTTP_INTERCEPTORS,
|
||||
useClass: CsrfInterceptor,
|
||||
multi: true,
|
||||
},
|
||||
{
|
||||
provide: HTTP_INTERCEPTORS,
|
||||
useClass: ApiVersionInterceptor,
|
||||
multi: true,
|
||||
},
|
||||
FilterPipe,
|
||||
DocumentTitlePipe,
|
||||
{ provide: NgbDateAdapter, useClass: ISODateAdapter },
|
||||
@@ -412,6 +392,10 @@ bootstrapApplication(AppComponent, {
|
||||
CorrespondentNamePipe,
|
||||
DocumentTypeNamePipe,
|
||||
StoragePathNamePipe,
|
||||
provideHttpClient(withInterceptorsFromDi(), withFetch()),
|
||||
provideHttpClient(
|
||||
withInterceptorsFromDi(),
|
||||
withInterceptors([withCsrfInterceptor, withApiVersionInterceptor]),
|
||||
withFetch()
|
||||
),
|
||||
],
|
||||
}).catch((err) => console.error(err))
|
||||
|
||||
@@ -13,6 +13,7 @@ from documents.models import PaperlessTask
|
||||
from documents.models import SavedView
|
||||
from documents.models import SavedViewFilterRule
|
||||
from documents.models import ShareLink
|
||||
from documents.models import ShareLinkBundle
|
||||
from documents.models import StoragePath
|
||||
from documents.models import Tag
|
||||
from documents.tasks import update_document_parent_tags
|
||||
@@ -184,6 +185,22 @@ class ShareLinksAdmin(GuardedModelAdmin):
|
||||
return super().get_queryset(request).select_related("document__correspondent")
|
||||
|
||||
|
||||
class ShareLinkBundleAdmin(GuardedModelAdmin):
|
||||
list_display = ("created", "status", "expiration", "owner", "slug")
|
||||
list_filter = ("status", "created", "expiration", "owner")
|
||||
search_fields = ("slug",)
|
||||
|
||||
def get_queryset(self, request): # pragma: no cover
|
||||
return (
|
||||
super()
|
||||
.get_queryset(request)
|
||||
.select_related("owner")
|
||||
.prefetch_related(
|
||||
"documents",
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class CustomFieldsAdmin(GuardedModelAdmin):
|
||||
fields = ("name", "created", "data_type")
|
||||
readonly_fields = ("created", "data_type")
|
||||
@@ -215,6 +232,7 @@ admin.site.register(StoragePath, StoragePathAdmin)
|
||||
admin.site.register(PaperlessTask, TaskAdmin)
|
||||
admin.site.register(Note, NotesAdmin)
|
||||
admin.site.register(ShareLink, ShareLinksAdmin)
|
||||
admin.site.register(ShareLinkBundle, ShareLinkBundleAdmin)
|
||||
admin.site.register(CustomField, CustomFieldsAdmin)
|
||||
admin.site.register(CustomFieldInstance, CustomFieldInstancesAdmin)
|
||||
|
||||
|
||||
@@ -32,12 +32,12 @@ from documents.models import WorkflowTrigger
|
||||
from documents.parsers import DocumentParser
|
||||
from documents.parsers import ParseError
|
||||
from documents.parsers import get_parser_class_for_mime_type
|
||||
from documents.parsers import parse_date
|
||||
from documents.permissions import set_permissions_for_object
|
||||
from documents.plugins.base import AlwaysRunPluginMixin
|
||||
from documents.plugins.base import ConsumeTaskPlugin
|
||||
from documents.plugins.base import NoCleanupPluginMixin
|
||||
from documents.plugins.base import NoSetupPluginMixin
|
||||
from documents.plugins.date_parsing import get_date_parser
|
||||
from documents.plugins.helpers import ProgressManager
|
||||
from documents.plugins.helpers import ProgressStatusOptions
|
||||
from documents.signals import document_consumption_finished
|
||||
@@ -426,7 +426,8 @@ class ConsumerPlugin(
|
||||
ProgressStatusOptions.WORKING,
|
||||
ConsumerStatusShortMessage.PARSE_DATE,
|
||||
)
|
||||
date = parse_date(self.filename, text)
|
||||
with get_date_parser() as date_parser:
|
||||
date = next(date_parser.parse(self.filename, text), None)
|
||||
archive_path = document_parser.get_archive_path()
|
||||
page_count = document_parser.get_page_count(self.working_copy, mime_type)
|
||||
|
||||
@@ -779,19 +780,45 @@ class ConsumerPreflightPlugin(
|
||||
Q(checksum=checksum) | Q(archive_checksum=checksum),
|
||||
)
|
||||
if existing_doc.exists():
|
||||
msg = ConsumerStatusShortMessage.DOCUMENT_ALREADY_EXISTS
|
||||
log_msg = f"Not consuming {self.filename}: It is a duplicate of {existing_doc.get().title} (#{existing_doc.get().pk})."
|
||||
existing_doc = existing_doc.order_by("-created")
|
||||
duplicates_in_trash = existing_doc.filter(deleted_at__isnull=False)
|
||||
log_msg = (
|
||||
f"Consuming duplicate {self.filename}: "
|
||||
f"{existing_doc.count()} existing document(s) share the same content."
|
||||
)
|
||||
|
||||
if existing_doc.first().deleted_at is not None:
|
||||
msg = ConsumerStatusShortMessage.DOCUMENT_ALREADY_EXISTS_IN_TRASH
|
||||
log_msg += " Note: existing document is in the trash."
|
||||
if duplicates_in_trash.exists():
|
||||
log_msg += " Note: at least one existing document is in the trash."
|
||||
|
||||
self.log.warning(log_msg)
|
||||
|
||||
if settings.CONSUMER_DELETE_DUPLICATES:
|
||||
duplicate = existing_doc.first()
|
||||
duplicate_label = (
|
||||
duplicate.title
|
||||
or duplicate.original_filename
|
||||
or (Path(duplicate.filename).name if duplicate.filename else None)
|
||||
or str(duplicate.pk)
|
||||
)
|
||||
|
||||
Path(self.input_doc.original_file).unlink()
|
||||
self._fail(
|
||||
msg,
|
||||
log_msg,
|
||||
)
|
||||
|
||||
failure_msg = (
|
||||
f"Not consuming {self.filename}: "
|
||||
f"It is a duplicate of {duplicate_label} (#{duplicate.pk})"
|
||||
)
|
||||
status_msg = ConsumerStatusShortMessage.DOCUMENT_ALREADY_EXISTS
|
||||
|
||||
if duplicates_in_trash.exists():
|
||||
status_msg = (
|
||||
ConsumerStatusShortMessage.DOCUMENT_ALREADY_EXISTS_IN_TRASH
|
||||
)
|
||||
failure_msg += " Note: existing document is in the trash."
|
||||
|
||||
self._fail(
|
||||
status_msg,
|
||||
failure_msg,
|
||||
)
|
||||
|
||||
def pre_check_directories(self):
|
||||
"""
|
||||
|
||||
@@ -39,6 +39,7 @@ from documents.models import Document
|
||||
from documents.models import DocumentType
|
||||
from documents.models import PaperlessTask
|
||||
from documents.models import ShareLink
|
||||
from documents.models import ShareLinkBundle
|
||||
from documents.models import StoragePath
|
||||
from documents.models import Tag
|
||||
|
||||
@@ -796,6 +797,29 @@ class ShareLinkFilterSet(FilterSet):
|
||||
}
|
||||
|
||||
|
||||
class ShareLinkBundleFilterSet(FilterSet):
|
||||
documents = Filter(method="filter_documents")
|
||||
|
||||
class Meta:
|
||||
model = ShareLinkBundle
|
||||
fields = {
|
||||
"created": DATETIME_KWARGS,
|
||||
"expiration": DATETIME_KWARGS,
|
||||
"status": ["exact"],
|
||||
}
|
||||
|
||||
def filter_documents(self, queryset, name, value):
|
||||
ids = []
|
||||
if value:
|
||||
try:
|
||||
ids = [int(item) for item in value.split(",") if item]
|
||||
except ValueError:
|
||||
return queryset.none()
|
||||
if not ids:
|
||||
return queryset
|
||||
return queryset.filter(documents__in=ids).distinct()
|
||||
|
||||
|
||||
class PaperlessTaskFilterSet(FilterSet):
|
||||
acknowledged = BooleanFilter(
|
||||
label="Acknowledged",
|
||||
|
||||
@@ -501,9 +501,22 @@ class Command(BaseCommand):
|
||||
stability_timeout_ms = int(stability_delay * 1000)
|
||||
testing_timeout_ms = int(self.testing_timeout_s * 1000)
|
||||
|
||||
# Start with no timeout (wait indefinitely for first event)
|
||||
# unless in testing mode
|
||||
timeout_ms = testing_timeout_ms if is_testing else 0
|
||||
# Calculate appropriate timeout for watch loop
|
||||
# In polling mode, rust_timeout must be significantly longer than poll_delay_ms
|
||||
# to ensure poll cycles can complete before timing out
|
||||
if is_testing:
|
||||
if use_polling:
|
||||
# For polling: timeout must be at least 3x the poll interval to allow
|
||||
# multiple poll cycles. This prevents timeouts from interfering with
|
||||
# the polling mechanism.
|
||||
min_polling_timeout_ms = poll_delay_ms * 3
|
||||
timeout_ms = max(min_polling_timeout_ms, testing_timeout_ms)
|
||||
else:
|
||||
# For native watching, use short timeout to check stop flag
|
||||
timeout_ms = testing_timeout_ms
|
||||
else:
|
||||
# Not testing, wait indefinitely for first event
|
||||
timeout_ms = 0
|
||||
|
||||
self.stop_flag.clear()
|
||||
|
||||
@@ -543,8 +556,14 @@ class Command(BaseCommand):
|
||||
# Check pending files at stability interval
|
||||
timeout_ms = stability_timeout_ms
|
||||
elif is_testing:
|
||||
# In testing, use short timeout to check stop flag
|
||||
timeout_ms = testing_timeout_ms
|
||||
# In testing, use appropriate timeout based on watch mode
|
||||
if use_polling:
|
||||
# For polling: ensure timeout allows polls to complete
|
||||
min_polling_timeout_ms = poll_delay_ms * 3
|
||||
timeout_ms = max(min_polling_timeout_ms, testing_timeout_ms)
|
||||
else:
|
||||
# For native watching, use short timeout to check stop flag
|
||||
timeout_ms = testing_timeout_ms
|
||||
else: # pragma: nocover
|
||||
# No pending files, wait indefinitely
|
||||
timeout_ms = 0
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 5.2.7 on 2026-01-14 17:45
|
||||
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0005_workflowtrigger_filter_has_any_correspondents_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="checksum",
|
||||
field=models.CharField(
|
||||
editable=False,
|
||||
max_length=32,
|
||||
verbose_name="checksum",
|
||||
help_text="The checksum of the original document.",
|
||||
),
|
||||
),
|
||||
]
|
||||
25
src/documents/migrations/0007_document_content_length.py
Normal file
25
src/documents/migrations/0007_document_content_length.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Generated by Django 5.2.6 on 2026-01-24 07:33
|
||||
|
||||
import django.db.models.functions.text
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0006_alter_document_checksum_unique"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="document",
|
||||
name="content_length",
|
||||
field=models.GeneratedField(
|
||||
db_persist=True,
|
||||
expression=django.db.models.functions.text.Length("content"),
|
||||
null=False,
|
||||
help_text="Length of the content field in characters. Automatically maintained by the database for faster statistics computation.",
|
||||
output_field=models.PositiveIntegerField(default=0),
|
||||
),
|
||||
),
|
||||
]
|
||||
177
src/documents/migrations/0008_sharelinkbundle.py
Normal file
177
src/documents/migrations/0008_sharelinkbundle.py
Normal file
@@ -0,0 +1,177 @@
|
||||
# Generated by Django 5.2.9 on 2026-01-27 01:09
|
||||
|
||||
import django.db.models.deletion
|
||||
import django.db.models.functions.text
|
||||
import django.utils.timezone
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.management import create_permissions
|
||||
from django.contrib.auth.models import Group
|
||||
from django.contrib.auth.models import Permission
|
||||
from django.contrib.auth.models import User
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
def grant_share_link_bundle_permissions(apps, schema_editor):
|
||||
# Ensure newly introduced permissions are created for all apps
|
||||
for app_config in apps.get_app_configs():
|
||||
app_config.models_module = True
|
||||
create_permissions(app_config, apps=apps, verbosity=0)
|
||||
app_config.models_module = None
|
||||
|
||||
add_document_perm = Permission.objects.filter(codename="add_document").first()
|
||||
share_bundle_permissions = Permission.objects.filter(
|
||||
codename__contains="sharelinkbundle",
|
||||
)
|
||||
|
||||
users = User.objects.filter(user_permissions=add_document_perm).distinct()
|
||||
for user in users:
|
||||
user.user_permissions.add(*share_bundle_permissions)
|
||||
|
||||
groups = Group.objects.filter(permissions=add_document_perm).distinct()
|
||||
for group in groups:
|
||||
group.permissions.add(*share_bundle_permissions)
|
||||
|
||||
|
||||
def revoke_share_link_bundle_permissions(apps, schema_editor):
|
||||
share_bundle_permissions = Permission.objects.filter(
|
||||
codename__contains="sharelinkbundle",
|
||||
)
|
||||
for user in User.objects.all():
|
||||
user.user_permissions.remove(*share_bundle_permissions)
|
||||
for group in Group.objects.all():
|
||||
group.permissions.remove(*share_bundle_permissions)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
("documents", "0007_document_content_length"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="ShareLinkBundle",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
(
|
||||
"created",
|
||||
models.DateTimeField(
|
||||
blank=True,
|
||||
db_index=True,
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"expiration",
|
||||
models.DateTimeField(
|
||||
blank=True,
|
||||
db_index=True,
|
||||
null=True,
|
||||
verbose_name="expiration",
|
||||
),
|
||||
),
|
||||
(
|
||||
"slug",
|
||||
models.SlugField(
|
||||
blank=True,
|
||||
editable=False,
|
||||
unique=True,
|
||||
verbose_name="slug",
|
||||
),
|
||||
),
|
||||
(
|
||||
"file_version",
|
||||
models.CharField(
|
||||
choices=[("archive", "Archive"), ("original", "Original")],
|
||||
default="archive",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
(
|
||||
"status",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("pending", "Pending"),
|
||||
("processing", "Processing"),
|
||||
("ready", "Ready"),
|
||||
("failed", "Failed"),
|
||||
],
|
||||
default="pending",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
(
|
||||
"size_bytes",
|
||||
models.PositiveIntegerField(
|
||||
blank=True,
|
||||
null=True,
|
||||
verbose_name="size (bytes)",
|
||||
),
|
||||
),
|
||||
(
|
||||
"last_error",
|
||||
models.JSONField(
|
||||
blank=True,
|
||||
null=True,
|
||||
default=None,
|
||||
verbose_name="last error",
|
||||
),
|
||||
),
|
||||
(
|
||||
"file_path",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
max_length=512,
|
||||
verbose_name="file path",
|
||||
),
|
||||
),
|
||||
(
|
||||
"built_at",
|
||||
models.DateTimeField(
|
||||
blank=True,
|
||||
null=True,
|
||||
verbose_name="built at",
|
||||
),
|
||||
),
|
||||
(
|
||||
"documents",
|
||||
models.ManyToManyField(
|
||||
related_name="share_link_bundles",
|
||||
to="documents.document",
|
||||
verbose_name="documents",
|
||||
),
|
||||
),
|
||||
(
|
||||
"owner",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="share_link_bundles",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
verbose_name="owner",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"ordering": ("-created",),
|
||||
"verbose_name": "share link bundle",
|
||||
"verbose_name_plural": "share link bundles",
|
||||
},
|
||||
),
|
||||
migrations.RunPython(
|
||||
grant_share_link_bundle_permissions,
|
||||
reverse_code=revoke_share_link_bundle_permissions,
|
||||
),
|
||||
]
|
||||
@@ -20,7 +20,9 @@ if settings.AUDIT_LOG_ENABLED:
|
||||
from auditlog.registry import auditlog
|
||||
|
||||
from django.db.models import Case
|
||||
from django.db.models import PositiveIntegerField
|
||||
from django.db.models.functions import Cast
|
||||
from django.db.models.functions import Length
|
||||
from django.db.models.functions import Substr
|
||||
from django_softdelete.models import SoftDeleteModel
|
||||
|
||||
@@ -192,6 +194,15 @@ class Document(SoftDeleteModel, ModelWithOwner):
|
||||
),
|
||||
)
|
||||
|
||||
content_length = models.GeneratedField(
|
||||
expression=Length("content"),
|
||||
output_field=PositiveIntegerField(default=0),
|
||||
db_persist=True,
|
||||
null=False,
|
||||
serialize=False,
|
||||
help_text="Length of the content field in characters. Automatically maintained by the database for faster statistics computation.",
|
||||
)
|
||||
|
||||
mime_type = models.CharField(_("mime type"), max_length=256, editable=False)
|
||||
|
||||
tags = models.ManyToManyField(
|
||||
@@ -205,7 +216,6 @@ class Document(SoftDeleteModel, ModelWithOwner):
|
||||
_("checksum"),
|
||||
max_length=32,
|
||||
editable=False,
|
||||
unique=True,
|
||||
help_text=_("The checksum of the original document."),
|
||||
)
|
||||
|
||||
@@ -756,6 +766,114 @@ class ShareLink(SoftDeleteModel):
|
||||
return f"Share Link for {self.document.title}"
|
||||
|
||||
|
||||
class ShareLinkBundle(models.Model):
|
||||
class Status(models.TextChoices):
|
||||
PENDING = ("pending", _("Pending"))
|
||||
PROCESSING = ("processing", _("Processing"))
|
||||
READY = ("ready", _("Ready"))
|
||||
FAILED = ("failed", _("Failed"))
|
||||
|
||||
created = models.DateTimeField(
|
||||
_("created"),
|
||||
default=timezone.now,
|
||||
db_index=True,
|
||||
blank=True,
|
||||
editable=False,
|
||||
)
|
||||
|
||||
expiration = models.DateTimeField(
|
||||
_("expiration"),
|
||||
blank=True,
|
||||
null=True,
|
||||
db_index=True,
|
||||
)
|
||||
|
||||
slug = models.SlugField(
|
||||
_("slug"),
|
||||
db_index=True,
|
||||
unique=True,
|
||||
blank=True,
|
||||
editable=False,
|
||||
)
|
||||
|
||||
owner = models.ForeignKey(
|
||||
User,
|
||||
blank=True,
|
||||
null=True,
|
||||
related_name="share_link_bundles",
|
||||
on_delete=models.SET_NULL,
|
||||
verbose_name=_("owner"),
|
||||
)
|
||||
|
||||
file_version = models.CharField(
|
||||
max_length=50,
|
||||
choices=ShareLink.FileVersion.choices,
|
||||
default=ShareLink.FileVersion.ARCHIVE,
|
||||
)
|
||||
|
||||
status = models.CharField(
|
||||
max_length=50,
|
||||
choices=Status.choices,
|
||||
default=Status.PENDING,
|
||||
)
|
||||
|
||||
size_bytes = models.PositiveIntegerField(
|
||||
_("size (bytes)"),
|
||||
blank=True,
|
||||
null=True,
|
||||
)
|
||||
|
||||
last_error = models.JSONField(
|
||||
_("last error"),
|
||||
blank=True,
|
||||
null=True,
|
||||
default=None,
|
||||
)
|
||||
|
||||
file_path = models.CharField(
|
||||
_("file path"),
|
||||
max_length=512,
|
||||
blank=True,
|
||||
)
|
||||
|
||||
built_at = models.DateTimeField(
|
||||
_("built at"),
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
|
||||
documents = models.ManyToManyField(
|
||||
"documents.Document",
|
||||
related_name="share_link_bundles",
|
||||
verbose_name=_("documents"),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
ordering = ("-created",)
|
||||
verbose_name = _("share link bundle")
|
||||
verbose_name_plural = _("share link bundles")
|
||||
|
||||
def __str__(self):
|
||||
return _("Share link bundle %(slug)s") % {"slug": self.slug}
|
||||
|
||||
@property
|
||||
def absolute_file_path(self) -> Path | None:
|
||||
if not self.file_path:
|
||||
return None
|
||||
return (settings.SHARE_LINK_BUNDLE_DIR / Path(self.file_path)).resolve()
|
||||
|
||||
def remove_file(self):
|
||||
if self.absolute_file_path is not None and self.absolute_file_path.exists():
|
||||
try:
|
||||
self.absolute_file_path.unlink()
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def delete(self, using=None, *, keep_parents=False):
|
||||
self.remove_file()
|
||||
return super().delete(using=using, keep_parents=keep_parents)
|
||||
|
||||
|
||||
class CustomField(models.Model):
|
||||
"""
|
||||
Defines the name and type of a custom field
|
||||
@@ -946,7 +1064,7 @@ if settings.AUDIT_LOG_ENABLED:
|
||||
auditlog.register(
|
||||
Document,
|
||||
m2m_fields={"tags"},
|
||||
exclude_fields=["modified"],
|
||||
exclude_fields=["content_length", "modified"],
|
||||
)
|
||||
auditlog.register(Correspondent)
|
||||
auditlog.register(Tag)
|
||||
|
||||
@@ -9,22 +9,17 @@ import subprocess
|
||||
import tempfile
|
||||
from functools import lru_cache
|
||||
from pathlib import Path
|
||||
from re import Match
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
|
||||
from documents.loggers import LoggingMixin
|
||||
from documents.signals import document_consumer_declaration
|
||||
from documents.utils import copy_file_with_basic_stats
|
||||
from documents.utils import run_subprocess
|
||||
from paperless.config import OcrConfig
|
||||
from paperless.utils import ocr_to_dateparser_languages
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import datetime
|
||||
from collections.abc import Iterator
|
||||
|
||||
# This regular expression will try to find dates in the document at
|
||||
# hand and will match the following formats:
|
||||
@@ -259,75 +254,6 @@ def make_thumbnail_from_pdf(in_path: Path, temp_dir: Path, logging_group=None) -
|
||||
return out_path
|
||||
|
||||
|
||||
def parse_date(filename, text) -> datetime.datetime | None:
|
||||
return next(parse_date_generator(filename, text), None)
|
||||
|
||||
|
||||
def parse_date_generator(filename, text) -> Iterator[datetime.datetime]:
|
||||
"""
|
||||
Returns the date of the document.
|
||||
"""
|
||||
|
||||
def __parser(ds: str, date_order: str) -> datetime.datetime:
|
||||
"""
|
||||
Call dateparser.parse with a particular date ordering
|
||||
"""
|
||||
import dateparser
|
||||
|
||||
ocr_config = OcrConfig()
|
||||
languages = settings.DATE_PARSER_LANGUAGES or ocr_to_dateparser_languages(
|
||||
ocr_config.language,
|
||||
)
|
||||
|
||||
return dateparser.parse(
|
||||
ds,
|
||||
settings={
|
||||
"DATE_ORDER": date_order,
|
||||
"PREFER_DAY_OF_MONTH": "first",
|
||||
"RETURN_AS_TIMEZONE_AWARE": True,
|
||||
"TIMEZONE": settings.TIME_ZONE,
|
||||
},
|
||||
locales=languages,
|
||||
)
|
||||
|
||||
def __filter(date: datetime.datetime) -> datetime.datetime | None:
|
||||
if (
|
||||
date is not None
|
||||
and date.year > 1900
|
||||
and date <= timezone.now()
|
||||
and date.date() not in settings.IGNORE_DATES
|
||||
):
|
||||
return date
|
||||
return None
|
||||
|
||||
def __process_match(
|
||||
match: Match[str],
|
||||
date_order: str,
|
||||
) -> datetime.datetime | None:
|
||||
date_string = match.group(0)
|
||||
|
||||
try:
|
||||
date = __parser(date_string, date_order)
|
||||
except Exception:
|
||||
# Skip all matches that do not parse to a proper date
|
||||
date = None
|
||||
|
||||
return __filter(date)
|
||||
|
||||
def __process_content(content: str, date_order: str) -> Iterator[datetime.datetime]:
|
||||
for m in re.finditer(DATE_REGEX, content):
|
||||
date = __process_match(m, date_order)
|
||||
if date is not None:
|
||||
yield date
|
||||
|
||||
# if filename date parsing is enabled, search there first:
|
||||
if settings.FILENAME_DATE_ORDER:
|
||||
yield from __process_content(filename, settings.FILENAME_DATE_ORDER)
|
||||
|
||||
# Iterate through all regex matches in text and try to parse the date
|
||||
yield from __process_content(text, settings.DATE_ORDER)
|
||||
|
||||
|
||||
class ParseError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
@@ -148,13 +148,29 @@ def get_document_count_filter_for_user(user):
|
||||
)
|
||||
|
||||
|
||||
def get_objects_for_user_owner_aware(user, perms, Model) -> QuerySet:
|
||||
objects_owned = Model.objects.filter(owner=user)
|
||||
objects_unowned = Model.objects.filter(owner__isnull=True)
|
||||
def get_objects_for_user_owner_aware(
|
||||
user,
|
||||
perms,
|
||||
Model,
|
||||
*,
|
||||
include_deleted=False,
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Returns objects the user owns, are unowned, or has explicit perms.
|
||||
When include_deleted is True, soft-deleted items are also included.
|
||||
"""
|
||||
manager = (
|
||||
Model.global_objects
|
||||
if include_deleted and hasattr(Model, "global_objects")
|
||||
else Model.objects
|
||||
)
|
||||
|
||||
objects_owned = manager.filter(owner=user)
|
||||
objects_unowned = manager.filter(owner__isnull=True)
|
||||
objects_with_perms = get_objects_for_user(
|
||||
user=user,
|
||||
perms=perms,
|
||||
klass=Model,
|
||||
klass=manager.all(),
|
||||
accept_global_perms=False,
|
||||
)
|
||||
return objects_owned | objects_unowned | objects_with_perms
|
||||
|
||||
92
src/documents/plugins/date_parsing/__init__.py
Normal file
92
src/documents/plugins/date_parsing/__init__.py
Normal file
@@ -0,0 +1,92 @@
|
||||
import logging
|
||||
from functools import lru_cache
|
||||
from importlib.metadata import EntryPoint
|
||||
from importlib.metadata import entry_points
|
||||
from typing import Final
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
|
||||
from documents.plugins.date_parsing.base import DateParserConfig
|
||||
from documents.plugins.date_parsing.base import DateParserPluginBase
|
||||
from documents.plugins.date_parsing.regex_parser import RegexDateParserPlugin
|
||||
from paperless.utils import ocr_to_dateparser_languages
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DATE_PARSER_ENTRY_POINT_GROUP: Final = "paperless_ngx.date_parsers"
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def _discover_parser_class() -> type[DateParserPluginBase]:
|
||||
"""
|
||||
Discovers the date parser plugin class to use.
|
||||
|
||||
- If one or more plugins are found, sorts them by name and returns the first.
|
||||
- If no plugins are found, returns the default RegexDateParser.
|
||||
"""
|
||||
|
||||
eps: tuple[EntryPoint, ...]
|
||||
try:
|
||||
eps = entry_points(group=DATE_PARSER_ENTRY_POINT_GROUP)
|
||||
except Exception as e:
|
||||
# Log a warning
|
||||
logger.warning(f"Could not query entry points for date parsers: {e}")
|
||||
eps = ()
|
||||
|
||||
valid_plugins: list[EntryPoint] = []
|
||||
for ep in eps:
|
||||
try:
|
||||
plugin_class = ep.load()
|
||||
if plugin_class and issubclass(plugin_class, DateParserPluginBase):
|
||||
valid_plugins.append(ep)
|
||||
else:
|
||||
logger.warning(f"Plugin {ep.name} does not subclass DateParser.")
|
||||
except Exception as e:
|
||||
logger.error(f"Unable to load date parser plugin {ep.name}: {e}")
|
||||
|
||||
if not valid_plugins:
|
||||
return RegexDateParserPlugin
|
||||
|
||||
valid_plugins.sort(key=lambda ep: ep.name)
|
||||
|
||||
if len(valid_plugins) > 1:
|
||||
logger.warning(
|
||||
f"Multiple date parsers found: "
|
||||
f"{[ep.name for ep in valid_plugins]}. "
|
||||
f"Using the first one by name: '{valid_plugins[0].name}'.",
|
||||
)
|
||||
|
||||
return valid_plugins[0].load()
|
||||
|
||||
|
||||
def get_date_parser() -> DateParserPluginBase:
|
||||
"""
|
||||
Factory function to get an initialized date parser instance.
|
||||
|
||||
This function is responsible for:
|
||||
1. Discovering the correct parser class (plugin or default).
|
||||
2. Loading configuration from Django settings.
|
||||
3. Instantiating the parser with the configuration.
|
||||
"""
|
||||
# 1. Discover the class (this is cached)
|
||||
parser_class = _discover_parser_class()
|
||||
|
||||
# 2. Load configuration from settings
|
||||
# TODO: Get the language from the settings and/or configuration object, depending
|
||||
languages = languages = (
|
||||
settings.DATE_PARSER_LANGUAGES
|
||||
or ocr_to_dateparser_languages(settings.OCR_LANGUAGE)
|
||||
)
|
||||
|
||||
config = DateParserConfig(
|
||||
languages=languages,
|
||||
timezone_str=settings.TIME_ZONE,
|
||||
ignore_dates=settings.IGNORE_DATES,
|
||||
reference_time=timezone.now(),
|
||||
filename_date_order=settings.FILENAME_DATE_ORDER,
|
||||
content_date_order=settings.DATE_ORDER,
|
||||
)
|
||||
|
||||
# 3. Instantiate the discovered class with the config
|
||||
return parser_class(config=config)
|
||||
124
src/documents/plugins/date_parsing/base.py
Normal file
124
src/documents/plugins/date_parsing/base.py
Normal file
@@ -0,0 +1,124 @@
|
||||
import datetime
|
||||
import logging
|
||||
from abc import ABC
|
||||
from abc import abstractmethod
|
||||
from collections.abc import Iterator
|
||||
from dataclasses import dataclass
|
||||
from types import TracebackType
|
||||
|
||||
try:
|
||||
from typing import Self
|
||||
except ImportError:
|
||||
from typing_extensions import Self
|
||||
|
||||
import dateparser
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class DateParserConfig:
|
||||
"""
|
||||
Configuration for a DateParser instance.
|
||||
|
||||
This object is created by the factory and passed to the
|
||||
parser's constructor, decoupling the parser from settings.
|
||||
"""
|
||||
|
||||
languages: list[str]
|
||||
timezone_str: str
|
||||
ignore_dates: set[datetime.date]
|
||||
|
||||
# A "now" timestamp for filtering future dates.
|
||||
# Passed in by the factory.
|
||||
reference_time: datetime.datetime
|
||||
|
||||
# Settings for the default RegexDateParser
|
||||
# Other plugins should use or consider these, but it is not required
|
||||
filename_date_order: str | None
|
||||
content_date_order: str
|
||||
|
||||
|
||||
class DateParserPluginBase(ABC):
|
||||
"""
|
||||
Abstract base class for date parsing strategies.
|
||||
|
||||
Instances are configured via a DateParserConfig object.
|
||||
"""
|
||||
|
||||
def __init__(self, config: DateParserConfig):
|
||||
"""
|
||||
Initializes the parser with its configuration.
|
||||
"""
|
||||
self.config = config
|
||||
|
||||
def __enter__(self) -> Self:
|
||||
"""
|
||||
Enter the runtime context related to this object.
|
||||
|
||||
Subclasses can override this to acquire resources (connections, handles).
|
||||
"""
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc_val: BaseException | None,
|
||||
exc_tb: TracebackType | None,
|
||||
) -> None:
|
||||
"""
|
||||
Exit the runtime context related to this object.
|
||||
|
||||
Subclasses can override this to release resources.
|
||||
"""
|
||||
# Default implementation does nothing.
|
||||
# Returning None implies exceptions are propagated.
|
||||
|
||||
def _parse_string(
|
||||
self,
|
||||
date_string: str,
|
||||
date_order: str,
|
||||
) -> datetime.datetime | None:
|
||||
"""
|
||||
Helper method to parse a single date string using dateparser.
|
||||
|
||||
Uses configuration from `self.config`.
|
||||
"""
|
||||
try:
|
||||
return dateparser.parse(
|
||||
date_string,
|
||||
settings={
|
||||
"DATE_ORDER": date_order,
|
||||
"PREFER_DAY_OF_MONTH": "first",
|
||||
"RETURN_AS_TIMEZONE_AWARE": True,
|
||||
"TIMEZONE": self.config.timezone_str,
|
||||
},
|
||||
locales=self.config.languages,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error while parsing date string '{date_string}': {e}")
|
||||
return None
|
||||
|
||||
def _filter_date(
|
||||
self,
|
||||
date: datetime.datetime | None,
|
||||
) -> datetime.datetime | None:
|
||||
"""
|
||||
Helper method to validate a parsed datetime object.
|
||||
|
||||
Uses configuration from `self.config`.
|
||||
"""
|
||||
if (
|
||||
date is not None
|
||||
and date.year > 1900
|
||||
and date <= self.config.reference_time
|
||||
and date.date() not in self.config.ignore_dates
|
||||
):
|
||||
return date
|
||||
return None
|
||||
|
||||
@abstractmethod
|
||||
def parse(self, filename: str, content: str) -> Iterator[datetime.datetime]:
|
||||
"""
|
||||
Parses a document's filename and content, yielding valid datetime objects.
|
||||
"""
|
||||
65
src/documents/plugins/date_parsing/regex_parser.py
Normal file
65
src/documents/plugins/date_parsing/regex_parser.py
Normal file
@@ -0,0 +1,65 @@
|
||||
import datetime
|
||||
import re
|
||||
from collections.abc import Iterator
|
||||
from re import Match
|
||||
|
||||
from documents.plugins.date_parsing.base import DateParserPluginBase
|
||||
|
||||
|
||||
class RegexDateParserPlugin(DateParserPluginBase):
|
||||
"""
|
||||
The default date parser, using a series of regular expressions.
|
||||
|
||||
It is configured entirely by the DateParserConfig object
|
||||
passed to its constructor.
|
||||
"""
|
||||
|
||||
DATE_REGEX = re.compile(
|
||||
r"(\b|(?!=([_-])))(\d{1,2})[\.\/-](\d{1,2})[\.\/-](\d{4}|\d{2})(\b|(?=([_-])))|"
|
||||
r"(\b|(?!=([_-])))(\d{4}|\d{2})[\.\/-](\d{1,2})[\.\/-](\d{1,2})(\b|(?=([_-])))|"
|
||||
r"(\b|(?!=([_-])))(\d{1,2}[\. ]+[a-zéûäëčžúřěáíóńźçŞğü]{3,9} \d{4}|[a-zéûäëčžúřěáíóńźçŞğü]{3,9} \d{1,2}, \d{4})(\b|(?=([_-])))|"
|
||||
r"(\b|(?!=([_-])))([^\W\d_]{3,9} \d{1,2}, (\d{4}))(\b|(?=([_-])))|"
|
||||
r"(\b|(?!=([_-])))([^\W\d_]{3,9} \d{4})(\b|(?=([_-])))|"
|
||||
r"(\b|(?!=([_-])))(\d{1,2}[^ 0-9]{2}[\. ]+[^ ]{3,9}[ \.\/-]\d{4})(\b|(?=([_-])))|"
|
||||
r"(\b|(?!=([_-])))(\b\d{1,2}[ \.\/-][a-zéûäëčžúřěáíóńźçŞğü]{3}[ \.\/-]\d{4})(\b|(?=([_-])))",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
def _process_match(
|
||||
self,
|
||||
match: Match[str],
|
||||
date_order: str,
|
||||
) -> datetime.datetime | None:
|
||||
"""
|
||||
Processes a single regex match using the base class helpers.
|
||||
"""
|
||||
date_string = match.group(0)
|
||||
date = self._parse_string(date_string, date_order)
|
||||
return self._filter_date(date)
|
||||
|
||||
def _process_content(
|
||||
self,
|
||||
content: str,
|
||||
date_order: str,
|
||||
) -> Iterator[datetime.datetime]:
|
||||
"""
|
||||
Finds all regex matches in content and yields valid dates.
|
||||
"""
|
||||
for m in re.finditer(self.DATE_REGEX, content):
|
||||
date = self._process_match(m, date_order)
|
||||
if date is not None:
|
||||
yield date
|
||||
|
||||
def parse(self, filename: str, content: str) -> Iterator[datetime.datetime]:
|
||||
"""
|
||||
Implementation of the abstract parse method.
|
||||
|
||||
Reads its configuration from `self.config`.
|
||||
"""
|
||||
if self.config.filename_date_order:
|
||||
yield from self._process_content(
|
||||
filename,
|
||||
self.config.filename_date_order,
|
||||
)
|
||||
|
||||
yield from self._process_content(content, self.config.content_date_order)
|
||||
@@ -4,6 +4,7 @@ import logging
|
||||
import math
|
||||
import re
|
||||
from datetime import datetime
|
||||
from datetime import timedelta
|
||||
from decimal import Decimal
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Literal
|
||||
@@ -23,7 +24,9 @@ from django.core.validators import MinValueValidator
|
||||
from django.core.validators import RegexValidator
|
||||
from django.core.validators import integer_validator
|
||||
from django.db.models import Count
|
||||
from django.db.models import Q
|
||||
from django.db.models.functions import Lower
|
||||
from django.utils import timezone
|
||||
from django.utils.crypto import get_random_string
|
||||
from django.utils.dateparse import parse_datetime
|
||||
from django.utils.text import slugify
|
||||
@@ -61,6 +64,7 @@ from documents.models import PaperlessTask
|
||||
from documents.models import SavedView
|
||||
from documents.models import SavedViewFilterRule
|
||||
from documents.models import ShareLink
|
||||
from documents.models import ShareLinkBundle
|
||||
from documents.models import StoragePath
|
||||
from documents.models import Tag
|
||||
from documents.models import UiSettings
|
||||
@@ -72,6 +76,7 @@ from documents.models import WorkflowTrigger
|
||||
from documents.parsers import is_mime_type_supported
|
||||
from documents.permissions import get_document_count_filter_for_user
|
||||
from documents.permissions import get_groups_with_only_permission
|
||||
from documents.permissions import get_objects_for_user_owner_aware
|
||||
from documents.permissions import set_permissions_for_object
|
||||
from documents.regex import validate_regex_pattern
|
||||
from documents.templating.filepath import validate_filepath_template_and_render
|
||||
@@ -82,6 +87,9 @@ from documents.validators import url_validator
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable
|
||||
|
||||
from django.db.models.query import QuerySet
|
||||
|
||||
|
||||
logger = logging.getLogger("paperless.serializers")
|
||||
|
||||
|
||||
@@ -1014,6 +1022,32 @@ class NotesSerializer(serializers.ModelSerializer):
|
||||
return ret
|
||||
|
||||
|
||||
def _get_viewable_duplicates(
|
||||
document: Document,
|
||||
user: User | None,
|
||||
) -> QuerySet[Document]:
|
||||
checksums = {document.checksum}
|
||||
if document.archive_checksum:
|
||||
checksums.add(document.archive_checksum)
|
||||
duplicates = Document.global_objects.filter(
|
||||
Q(checksum__in=checksums) | Q(archive_checksum__in=checksums),
|
||||
).exclude(pk=document.pk)
|
||||
duplicates = duplicates.order_by("-created")
|
||||
allowed = get_objects_for_user_owner_aware(
|
||||
user,
|
||||
"documents.view_document",
|
||||
Document,
|
||||
include_deleted=True,
|
||||
)
|
||||
return duplicates.filter(id__in=allowed)
|
||||
|
||||
|
||||
class DuplicateDocumentSummarySerializer(serializers.Serializer):
|
||||
id = serializers.IntegerField()
|
||||
title = serializers.CharField()
|
||||
deleted_at = serializers.DateTimeField(allow_null=True)
|
||||
|
||||
|
||||
@extend_schema_serializer(
|
||||
deprecate_fields=["created_date"],
|
||||
)
|
||||
@@ -1031,6 +1065,7 @@ class DocumentSerializer(
|
||||
archived_file_name = SerializerMethodField()
|
||||
created_date = serializers.DateField(required=False)
|
||||
page_count = SerializerMethodField()
|
||||
duplicate_documents = SerializerMethodField()
|
||||
|
||||
notes = NotesSerializer(many=True, required=False, read_only=True)
|
||||
|
||||
@@ -1056,6 +1091,16 @@ class DocumentSerializer(
|
||||
def get_page_count(self, obj) -> int | None:
|
||||
return obj.page_count
|
||||
|
||||
@extend_schema_field(DuplicateDocumentSummarySerializer(many=True))
|
||||
def get_duplicate_documents(self, obj):
|
||||
view = self.context.get("view")
|
||||
if view and getattr(view, "action", None) != "retrieve":
|
||||
return []
|
||||
request = self.context.get("request")
|
||||
user = request.user if request else None
|
||||
duplicates = _get_viewable_duplicates(obj, user)
|
||||
return list(duplicates.values("id", "title", "deleted_at"))
|
||||
|
||||
def get_original_file_name(self, obj) -> str | None:
|
||||
return obj.original_filename
|
||||
|
||||
@@ -1233,6 +1278,7 @@ class DocumentSerializer(
|
||||
"archive_serial_number",
|
||||
"original_file_name",
|
||||
"archived_file_name",
|
||||
"duplicate_documents",
|
||||
"owner",
|
||||
"permissions",
|
||||
"user_can_change",
|
||||
@@ -2094,10 +2140,12 @@ class TasksViewSerializer(OwnedObjectSerializer):
|
||||
"result",
|
||||
"acknowledged",
|
||||
"related_document",
|
||||
"duplicate_documents",
|
||||
"owner",
|
||||
)
|
||||
|
||||
related_document = serializers.SerializerMethodField()
|
||||
duplicate_documents = serializers.SerializerMethodField()
|
||||
created_doc_re = re.compile(r"New document id (\d+) created")
|
||||
duplicate_doc_re = re.compile(r"It is a duplicate of .* \(#(\d+)\)")
|
||||
|
||||
@@ -2122,6 +2170,17 @@ class TasksViewSerializer(OwnedObjectSerializer):
|
||||
|
||||
return result
|
||||
|
||||
@extend_schema_field(DuplicateDocumentSummarySerializer(many=True))
|
||||
def get_duplicate_documents(self, obj):
|
||||
related_document = self.get_related_document(obj)
|
||||
request = self.context.get("request")
|
||||
user = request.user if request else None
|
||||
document = Document.global_objects.filter(pk=related_document).first()
|
||||
if not related_document or not user or not document:
|
||||
return []
|
||||
duplicates = _get_viewable_duplicates(document, user)
|
||||
return list(duplicates.values("id", "title", "deleted_at"))
|
||||
|
||||
|
||||
class RunTaskViewSerializer(serializers.Serializer):
|
||||
task_name = serializers.ChoiceField(
|
||||
@@ -2172,6 +2231,104 @@ class ShareLinkSerializer(OwnedObjectSerializer):
|
||||
return super().create(validated_data)
|
||||
|
||||
|
||||
class ShareLinkBundleSerializer(OwnedObjectSerializer):
|
||||
document_ids = serializers.ListField(
|
||||
child=serializers.IntegerField(min_value=1),
|
||||
allow_empty=False,
|
||||
write_only=True,
|
||||
)
|
||||
expiration_days = serializers.IntegerField(
|
||||
required=False,
|
||||
allow_null=True,
|
||||
min_value=1,
|
||||
write_only=True,
|
||||
)
|
||||
documents = serializers.PrimaryKeyRelatedField(
|
||||
many=True,
|
||||
read_only=True,
|
||||
)
|
||||
document_count = SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = ShareLinkBundle
|
||||
fields = (
|
||||
"id",
|
||||
"created",
|
||||
"expiration",
|
||||
"expiration_days",
|
||||
"slug",
|
||||
"file_version",
|
||||
"status",
|
||||
"size_bytes",
|
||||
"last_error",
|
||||
"built_at",
|
||||
"documents",
|
||||
"document_ids",
|
||||
"document_count",
|
||||
)
|
||||
read_only_fields = (
|
||||
"id",
|
||||
"created",
|
||||
"expiration",
|
||||
"slug",
|
||||
"status",
|
||||
"size_bytes",
|
||||
"last_error",
|
||||
"built_at",
|
||||
"documents",
|
||||
"document_count",
|
||||
)
|
||||
|
||||
def validate_document_ids(self, value):
|
||||
unique_ids = set(value)
|
||||
if len(unique_ids) != len(value):
|
||||
raise serializers.ValidationError(
|
||||
_("Duplicate document identifiers are not allowed."),
|
||||
)
|
||||
return value
|
||||
|
||||
def create(self, validated_data):
|
||||
document_ids = validated_data.pop("document_ids")
|
||||
expiration_days = validated_data.pop("expiration_days", None)
|
||||
validated_data["slug"] = get_random_string(50)
|
||||
if expiration_days:
|
||||
validated_data["expiration"] = timezone.now() + timedelta(
|
||||
days=expiration_days,
|
||||
)
|
||||
else:
|
||||
validated_data["expiration"] = None
|
||||
|
||||
share_link_bundle = super().create(validated_data)
|
||||
|
||||
documents = list(
|
||||
Document.objects.filter(pk__in=document_ids).only(
|
||||
"pk",
|
||||
),
|
||||
)
|
||||
documents_by_id = {doc.pk: doc for doc in documents}
|
||||
missing = [
|
||||
str(doc_id) for doc_id in document_ids if doc_id not in documents_by_id
|
||||
]
|
||||
if missing:
|
||||
raise serializers.ValidationError(
|
||||
{
|
||||
"document_ids": _(
|
||||
"Documents not found: %(ids)s",
|
||||
)
|
||||
% {"ids": ", ".join(missing)},
|
||||
},
|
||||
)
|
||||
|
||||
ordered_documents = [documents_by_id[doc_id] for doc_id in document_ids]
|
||||
share_link_bundle.documents.set(ordered_documents)
|
||||
share_link_bundle.document_total = len(ordered_documents)
|
||||
|
||||
return share_link_bundle
|
||||
|
||||
def get_document_count(self, obj: ShareLinkBundle) -> int:
|
||||
return getattr(obj, "document_total") or obj.documents.count()
|
||||
|
||||
|
||||
class BulkEditObjectsSerializer(SerializerWithPerms, SetPermissionsMixin):
|
||||
objects = serializers.ListField(
|
||||
required=True,
|
||||
|
||||
@@ -3,8 +3,10 @@ import hashlib
|
||||
import logging
|
||||
import shutil
|
||||
import uuid
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
from tempfile import mkstemp
|
||||
|
||||
import tqdm
|
||||
from celery import Task
|
||||
@@ -22,6 +24,8 @@ from whoosh.writing import AsyncWriter
|
||||
from documents import index
|
||||
from documents import sanity_checker
|
||||
from documents.barcodes import BarcodePlugin
|
||||
from documents.bulk_download import ArchiveOnlyStrategy
|
||||
from documents.bulk_download import OriginalsOnlyStrategy
|
||||
from documents.caching import clear_document_caches
|
||||
from documents.classifier import DocumentClassifier
|
||||
from documents.classifier import load_classifier
|
||||
@@ -39,6 +43,8 @@ from documents.models import CustomFieldInstance
|
||||
from documents.models import Document
|
||||
from documents.models import DocumentType
|
||||
from documents.models import PaperlessTask
|
||||
from documents.models import ShareLink
|
||||
from documents.models import ShareLinkBundle
|
||||
from documents.models import StoragePath
|
||||
from documents.models import Tag
|
||||
from documents.models import WorkflowRun
|
||||
@@ -625,3 +631,117 @@ def update_document_in_llm_index(document):
|
||||
@shared_task
|
||||
def remove_document_from_llm_index(document):
|
||||
llm_index_remove_document(document)
|
||||
|
||||
|
||||
@shared_task
|
||||
def build_share_link_bundle(bundle_id: int):
|
||||
try:
|
||||
bundle = (
|
||||
ShareLinkBundle.objects.filter(pk=bundle_id)
|
||||
.prefetch_related("documents")
|
||||
.get()
|
||||
)
|
||||
except ShareLinkBundle.DoesNotExist:
|
||||
logger.warning("Share link bundle %s no longer exists.", bundle_id)
|
||||
return
|
||||
|
||||
bundle.remove_file()
|
||||
bundle.status = ShareLinkBundle.Status.PROCESSING
|
||||
bundle.last_error = None
|
||||
bundle.size_bytes = None
|
||||
bundle.built_at = None
|
||||
bundle.file_path = ""
|
||||
bundle.save(
|
||||
update_fields=[
|
||||
"status",
|
||||
"last_error",
|
||||
"size_bytes",
|
||||
"built_at",
|
||||
"file_path",
|
||||
],
|
||||
)
|
||||
|
||||
documents = list(bundle.documents.all().order_by("pk"))
|
||||
|
||||
_, temp_zip_path_str = mkstemp(suffix=".zip", dir=settings.SCRATCH_DIR)
|
||||
temp_zip_path = Path(temp_zip_path_str)
|
||||
|
||||
try:
|
||||
strategy_class = (
|
||||
ArchiveOnlyStrategy
|
||||
if bundle.file_version == ShareLink.FileVersion.ARCHIVE
|
||||
else OriginalsOnlyStrategy
|
||||
)
|
||||
with zipfile.ZipFile(temp_zip_path, "w", zipfile.ZIP_DEFLATED) as zipf:
|
||||
strategy = strategy_class(zipf)
|
||||
for document in documents:
|
||||
strategy.add_document(document)
|
||||
|
||||
output_dir = settings.SHARE_LINK_BUNDLE_DIR
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
final_path = (output_dir / f"{bundle.slug}.zip").resolve()
|
||||
if final_path.exists():
|
||||
final_path.unlink()
|
||||
shutil.move(temp_zip_path, final_path)
|
||||
|
||||
bundle.file_path = f"{bundle.slug}.zip"
|
||||
bundle.size_bytes = final_path.stat().st_size
|
||||
bundle.status = ShareLinkBundle.Status.READY
|
||||
bundle.built_at = timezone.now()
|
||||
bundle.last_error = None
|
||||
bundle.save(
|
||||
update_fields=[
|
||||
"file_path",
|
||||
"size_bytes",
|
||||
"status",
|
||||
"built_at",
|
||||
"last_error",
|
||||
],
|
||||
)
|
||||
logger.info("Built share link bundle %s", bundle.pk)
|
||||
except Exception as exc:
|
||||
logger.exception(
|
||||
"Failed to build share link bundle %s: %s",
|
||||
bundle_id,
|
||||
exc,
|
||||
)
|
||||
bundle.status = ShareLinkBundle.Status.FAILED
|
||||
bundle.last_error = {
|
||||
"bundle_id": bundle_id,
|
||||
"exception_type": exc.__class__.__name__,
|
||||
"message": str(exc),
|
||||
"timestamp": timezone.now().isoformat(),
|
||||
}
|
||||
bundle.save(update_fields=["status", "last_error"])
|
||||
try:
|
||||
temp_zip_path.unlink()
|
||||
except OSError:
|
||||
pass
|
||||
raise
|
||||
finally:
|
||||
try:
|
||||
temp_zip_path.unlink(missing_ok=True)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
@shared_task
|
||||
def cleanup_expired_share_link_bundles():
|
||||
now = timezone.now()
|
||||
expired_qs = ShareLinkBundle.objects.filter(
|
||||
expiration__isnull=False,
|
||||
expiration__lt=now,
|
||||
)
|
||||
count = 0
|
||||
for bundle in expired_qs.iterator():
|
||||
count += 1
|
||||
try:
|
||||
bundle.delete()
|
||||
except Exception as exc:
|
||||
logger.warning(
|
||||
"Failed to delete expired share link bundle %s: %s",
|
||||
bundle.pk,
|
||||
exc,
|
||||
)
|
||||
if count:
|
||||
logger.info("Deleted %s expired share link bundle(s)", count)
|
||||
|
||||
0
src/documents/tests/date_parsing/__init__.py
Normal file
0
src/documents/tests/date_parsing/__init__.py
Normal file
82
src/documents/tests/date_parsing/conftest.py
Normal file
82
src/documents/tests/date_parsing/conftest.py
Normal file
@@ -0,0 +1,82 @@
|
||||
import datetime
|
||||
from collections.abc import Generator
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
import pytest_django
|
||||
|
||||
from documents.plugins.date_parsing import _discover_parser_class
|
||||
from documents.plugins.date_parsing.base import DateParserConfig
|
||||
from documents.plugins.date_parsing.regex_parser import RegexDateParserPlugin
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def base_config() -> DateParserConfig:
|
||||
"""Basic configuration for date parser testing."""
|
||||
return DateParserConfig(
|
||||
languages=["en"],
|
||||
timezone_str="UTC",
|
||||
ignore_dates=set(),
|
||||
reference_time=datetime.datetime(
|
||||
2024,
|
||||
1,
|
||||
15,
|
||||
12,
|
||||
0,
|
||||
0,
|
||||
tzinfo=datetime.timezone.utc,
|
||||
),
|
||||
filename_date_order="YMD",
|
||||
content_date_order="DMY",
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def config_with_ignore_dates() -> DateParserConfig:
|
||||
"""Configuration with dates to ignore."""
|
||||
return DateParserConfig(
|
||||
languages=["en", "de"],
|
||||
timezone_str="America/New_York",
|
||||
ignore_dates={datetime.date(2024, 1, 1), datetime.date(2024, 12, 25)},
|
||||
reference_time=datetime.datetime(
|
||||
2024,
|
||||
1,
|
||||
15,
|
||||
12,
|
||||
0,
|
||||
0,
|
||||
tzinfo=datetime.timezone.utc,
|
||||
),
|
||||
filename_date_order="DMY",
|
||||
content_date_order="MDY",
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def regex_parser(base_config: DateParserConfig) -> RegexDateParserPlugin:
|
||||
"""Instance of RegexDateParser with base config."""
|
||||
return RegexDateParserPlugin(base_config)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def clear_lru_cache() -> Generator[None, None, None]:
|
||||
"""
|
||||
Ensure the LRU cache for _discover_parser_class is cleared
|
||||
before and after any test that depends on it.
|
||||
"""
|
||||
_discover_parser_class.cache_clear()
|
||||
yield
|
||||
_discover_parser_class.cache_clear()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_date_parser_settings(settings: pytest_django.fixtures.SettingsWrapper) -> Any:
|
||||
"""
|
||||
Override Django settings for the duration of date parser tests.
|
||||
"""
|
||||
settings.DATE_PARSER_LANGUAGES = ["en", "de"]
|
||||
settings.TIME_ZONE = "UTC"
|
||||
settings.IGNORE_DATES = [datetime.date(1900, 1, 1)]
|
||||
settings.FILENAME_DATE_ORDER = "YMD"
|
||||
settings.DATE_ORDER = "DMY"
|
||||
return settings
|
||||
@@ -0,0 +1,228 @@
|
||||
import datetime
|
||||
import logging
|
||||
from collections.abc import Iterator
|
||||
from importlib.metadata import EntryPoint
|
||||
|
||||
import pytest
|
||||
import pytest_mock
|
||||
from django.utils import timezone
|
||||
|
||||
from documents.plugins.date_parsing import DATE_PARSER_ENTRY_POINT_GROUP
|
||||
from documents.plugins.date_parsing import _discover_parser_class
|
||||
from documents.plugins.date_parsing import get_date_parser
|
||||
from documents.plugins.date_parsing.base import DateParserConfig
|
||||
from documents.plugins.date_parsing.base import DateParserPluginBase
|
||||
from documents.plugins.date_parsing.regex_parser import RegexDateParserPlugin
|
||||
|
||||
|
||||
class AlphaParser(DateParserPluginBase):
|
||||
def parse(self, filename: str, content: str) -> Iterator[datetime.datetime]:
|
||||
yield timezone.now()
|
||||
|
||||
|
||||
class BetaParser(DateParserPluginBase):
|
||||
def parse(self, filename: str, content: str) -> Iterator[datetime.datetime]:
|
||||
yield timezone.now()
|
||||
|
||||
|
||||
@pytest.mark.date_parsing
|
||||
@pytest.mark.usefixtures("clear_lru_cache")
|
||||
class TestDiscoverParserClass:
|
||||
"""Tests for the _discover_parser_class() function."""
|
||||
|
||||
def test_returns_default_when_no_plugins_found(
|
||||
self,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
) -> None:
|
||||
mocker.patch(
|
||||
"documents.plugins.date_parsing.entry_points",
|
||||
return_value=(),
|
||||
)
|
||||
result = _discover_parser_class()
|
||||
assert result is RegexDateParserPlugin
|
||||
|
||||
def test_returns_default_when_entrypoint_query_fails(
|
||||
self,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
mocker.patch(
|
||||
"documents.plugins.date_parsing.entry_points",
|
||||
side_effect=RuntimeError("boom"),
|
||||
)
|
||||
result = _discover_parser_class()
|
||||
assert result is RegexDateParserPlugin
|
||||
assert "Could not query entry points" in caplog.text
|
||||
|
||||
def test_filters_out_invalid_plugins(
|
||||
self,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
fake_ep = mocker.MagicMock(spec=EntryPoint)
|
||||
fake_ep.name = "bad_plugin"
|
||||
fake_ep.load.return_value = object # not subclass of DateParser
|
||||
|
||||
mocker.patch(
|
||||
"documents.plugins.date_parsing.entry_points",
|
||||
return_value=(fake_ep,),
|
||||
)
|
||||
|
||||
result = _discover_parser_class()
|
||||
assert result is RegexDateParserPlugin
|
||||
assert "does not subclass DateParser" in caplog.text
|
||||
|
||||
def test_skips_plugins_that_fail_to_load(
|
||||
self,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
fake_ep = mocker.MagicMock(spec=EntryPoint)
|
||||
fake_ep.name = "failing_plugin"
|
||||
fake_ep.load.side_effect = ImportError("cannot import")
|
||||
|
||||
mocker.patch(
|
||||
"documents.plugins.date_parsing.entry_points",
|
||||
return_value=(fake_ep,),
|
||||
)
|
||||
|
||||
result = _discover_parser_class()
|
||||
assert result is RegexDateParserPlugin
|
||||
assert "Unable to load date parser plugin failing_plugin" in caplog.text
|
||||
|
||||
def test_returns_single_valid_plugin_without_warning(
|
||||
self,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""If exactly one valid plugin is discovered, it should be returned without logging a warning."""
|
||||
|
||||
ep = mocker.MagicMock(spec=EntryPoint)
|
||||
ep.name = "alpha"
|
||||
ep.load.return_value = AlphaParser
|
||||
|
||||
mock_entry_points = mocker.patch(
|
||||
"documents.plugins.date_parsing.entry_points",
|
||||
return_value=(ep,),
|
||||
)
|
||||
|
||||
with caplog.at_level(
|
||||
logging.WARNING,
|
||||
logger="documents.plugins.date_parsing",
|
||||
):
|
||||
result = _discover_parser_class()
|
||||
|
||||
# It should have called entry_points with the correct group
|
||||
mock_entry_points.assert_called_once_with(group=DATE_PARSER_ENTRY_POINT_GROUP)
|
||||
|
||||
# The discovered class should be exactly our AlphaParser
|
||||
assert result is AlphaParser
|
||||
|
||||
# No warnings should have been logged
|
||||
assert not any(
|
||||
"Multiple date parsers found" in record.message for record in caplog.records
|
||||
), "Unexpected warning logged when only one plugin was found"
|
||||
|
||||
def test_returns_first_valid_plugin_by_name(
|
||||
self,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
) -> None:
|
||||
ep_a = mocker.MagicMock(spec=EntryPoint)
|
||||
ep_a.name = "alpha"
|
||||
ep_a.load.return_value = AlphaParser
|
||||
|
||||
ep_b = mocker.MagicMock(spec=EntryPoint)
|
||||
ep_b.name = "beta"
|
||||
ep_b.load.return_value = BetaParser
|
||||
|
||||
mocker.patch(
|
||||
"documents.plugins.date_parsing.entry_points",
|
||||
return_value=(ep_b, ep_a),
|
||||
)
|
||||
|
||||
result = _discover_parser_class()
|
||||
assert result is AlphaParser
|
||||
|
||||
def test_logs_warning_if_multiple_plugins_found(
|
||||
self,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
ep1 = mocker.MagicMock(spec=EntryPoint)
|
||||
ep1.name = "a"
|
||||
ep1.load.return_value = AlphaParser
|
||||
|
||||
ep2 = mocker.MagicMock(spec=EntryPoint)
|
||||
ep2.name = "b"
|
||||
ep2.load.return_value = BetaParser
|
||||
|
||||
mocker.patch(
|
||||
"documents.plugins.date_parsing.entry_points",
|
||||
return_value=(ep1, ep2),
|
||||
)
|
||||
|
||||
with caplog.at_level(
|
||||
logging.WARNING,
|
||||
logger="documents.plugins.date_parsing",
|
||||
):
|
||||
result = _discover_parser_class()
|
||||
|
||||
# Should select alphabetically first plugin ("a")
|
||||
assert result is AlphaParser
|
||||
|
||||
# Should log a warning mentioning multiple parsers
|
||||
assert any(
|
||||
"Multiple date parsers found" in record.message for record in caplog.records
|
||||
), "Expected a warning about multiple date parsers"
|
||||
|
||||
def test_cache_behavior_only_runs_once(
|
||||
self,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
) -> None:
|
||||
mock_entry_points = mocker.patch(
|
||||
"documents.plugins.date_parsing.entry_points",
|
||||
return_value=(),
|
||||
)
|
||||
|
||||
# First call populates cache
|
||||
_discover_parser_class()
|
||||
# Second call should not re-invoke entry_points
|
||||
_discover_parser_class()
|
||||
mock_entry_points.assert_called_once()
|
||||
|
||||
|
||||
@pytest.mark.date_parsing
|
||||
@pytest.mark.usefixtures("mock_date_parser_settings")
|
||||
class TestGetDateParser:
|
||||
"""Tests for the get_date_parser() factory function."""
|
||||
|
||||
def test_returns_instance_of_discovered_class(
|
||||
self,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
) -> None:
|
||||
mocker.patch(
|
||||
"documents.plugins.date_parsing._discover_parser_class",
|
||||
return_value=AlphaParser,
|
||||
)
|
||||
parser = get_date_parser()
|
||||
assert isinstance(parser, AlphaParser)
|
||||
assert isinstance(parser.config, DateParserConfig)
|
||||
assert parser.config.languages == ["en", "de"]
|
||||
assert parser.config.timezone_str == "UTC"
|
||||
assert parser.config.ignore_dates == [datetime.date(1900, 1, 1)]
|
||||
assert parser.config.filename_date_order == "YMD"
|
||||
assert parser.config.content_date_order == "DMY"
|
||||
# Check reference_time near now
|
||||
delta = abs((parser.config.reference_time - timezone.now()).total_seconds())
|
||||
assert delta < 2
|
||||
|
||||
def test_uses_default_regex_parser_when_no_plugins(
|
||||
self,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
) -> None:
|
||||
mocker.patch(
|
||||
"documents.plugins.date_parsing._discover_parser_class",
|
||||
return_value=RegexDateParserPlugin,
|
||||
)
|
||||
parser = get_date_parser()
|
||||
assert isinstance(parser, RegexDateParserPlugin)
|
||||
433
src/documents/tests/date_parsing/test_date_parsing.py
Normal file
433
src/documents/tests/date_parsing/test_date_parsing.py
Normal file
@@ -0,0 +1,433 @@
|
||||
import datetime
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
import pytest_mock
|
||||
|
||||
from documents.plugins.date_parsing.base import DateParserConfig
|
||||
from documents.plugins.date_parsing.regex_parser import RegexDateParserPlugin
|
||||
|
||||
|
||||
@pytest.mark.date_parsing
|
||||
class TestParseString:
|
||||
"""Tests for DateParser._parse_string method via RegexDateParser."""
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("date_string", "date_order", "expected_year"),
|
||||
[
|
||||
pytest.param("15/01/2024", "DMY", 2024, id="dmy_slash"),
|
||||
pytest.param("01/15/2024", "MDY", 2024, id="mdy_slash"),
|
||||
pytest.param("2024/01/15", "YMD", 2024, id="ymd_slash"),
|
||||
pytest.param("January 15, 2024", "DMY", 2024, id="month_name_comma"),
|
||||
pytest.param("15 Jan 2024", "DMY", 2024, id="day_abbr_month_year"),
|
||||
pytest.param("15.01.2024", "DMY", 2024, id="dmy_dot"),
|
||||
pytest.param("2024-01-15", "YMD", 2024, id="ymd_dash"),
|
||||
],
|
||||
)
|
||||
def test_parse_string_valid_formats(
|
||||
self,
|
||||
regex_parser: RegexDateParserPlugin,
|
||||
date_string: str,
|
||||
date_order: str,
|
||||
expected_year: int,
|
||||
) -> None:
|
||||
"""Should correctly parse various valid date formats."""
|
||||
result = regex_parser._parse_string(date_string, date_order)
|
||||
|
||||
assert result is not None
|
||||
assert result.year == expected_year
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"invalid_string",
|
||||
[
|
||||
pytest.param("not a date", id="plain_text"),
|
||||
pytest.param("32/13/2024", id="invalid_day_month"),
|
||||
pytest.param("", id="empty_string"),
|
||||
pytest.param("abc123xyz", id="alphanumeric_gibberish"),
|
||||
pytest.param("99/99/9999", id="out_of_range"),
|
||||
],
|
||||
)
|
||||
def test_parse_string_invalid_input(
|
||||
self,
|
||||
regex_parser: RegexDateParserPlugin,
|
||||
invalid_string: str,
|
||||
) -> None:
|
||||
"""Should return None for invalid date strings."""
|
||||
result = regex_parser._parse_string(invalid_string, "DMY")
|
||||
|
||||
assert result is None
|
||||
|
||||
def test_parse_string_handles_exceptions(
|
||||
self,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
regex_parser: RegexDateParserPlugin,
|
||||
) -> None:
|
||||
"""Should handle and log exceptions from dateparser gracefully."""
|
||||
with caplog.at_level(
|
||||
logging.ERROR,
|
||||
logger="documents.plugins.date_parsing.base",
|
||||
):
|
||||
# We still need to mock dateparser.parse to force the exception
|
||||
mocker.patch(
|
||||
"documents.plugins.date_parsing.base.dateparser.parse",
|
||||
side_effect=ValueError(
|
||||
"Parsing error: 01/01/2024",
|
||||
),
|
||||
)
|
||||
|
||||
# 1. Execute the function under test
|
||||
result = regex_parser._parse_string("01/01/2024", "DMY")
|
||||
|
||||
assert result is None
|
||||
|
||||
# Check if an error was logged
|
||||
assert len(caplog.records) == 1
|
||||
assert caplog.records[0].levelname == "ERROR"
|
||||
|
||||
# Check if the specific error message is present
|
||||
assert "Error while parsing date string" in caplog.text
|
||||
# Optional: Check for the exact exception message if it's included in the log
|
||||
assert "Parsing error: 01/01/2024" in caplog.text
|
||||
|
||||
|
||||
@pytest.mark.date_parsing
|
||||
class TestFilterDate:
|
||||
"""Tests for DateParser._filter_date method via RegexDateParser."""
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("date", "expected_output"),
|
||||
[
|
||||
# Valid Dates
|
||||
pytest.param(
|
||||
datetime.datetime(2024, 1, 10, tzinfo=datetime.timezone.utc),
|
||||
datetime.datetime(2024, 1, 10, tzinfo=datetime.timezone.utc),
|
||||
id="valid_past_date",
|
||||
),
|
||||
pytest.param(
|
||||
datetime.datetime(2024, 1, 15, 12, 0, 0, tzinfo=datetime.timezone.utc),
|
||||
datetime.datetime(2024, 1, 15, 12, 0, 0, tzinfo=datetime.timezone.utc),
|
||||
id="exactly_at_reference",
|
||||
),
|
||||
pytest.param(
|
||||
datetime.datetime(1901, 1, 1, tzinfo=datetime.timezone.utc),
|
||||
datetime.datetime(1901, 1, 1, tzinfo=datetime.timezone.utc),
|
||||
id="year_1901_valid",
|
||||
),
|
||||
# Date is > reference_time
|
||||
pytest.param(
|
||||
datetime.datetime(2024, 1, 16, tzinfo=datetime.timezone.utc),
|
||||
None,
|
||||
id="future_date_day_after",
|
||||
),
|
||||
# date.date() in ignore_dates
|
||||
pytest.param(
|
||||
datetime.datetime(2024, 1, 1, 0, 0, 0, tzinfo=datetime.timezone.utc),
|
||||
None,
|
||||
id="ignored_date_midnight_jan1",
|
||||
),
|
||||
pytest.param(
|
||||
datetime.datetime(2024, 1, 1, 10, 30, 0, tzinfo=datetime.timezone.utc),
|
||||
None,
|
||||
id="ignored_date_midday_jan1",
|
||||
),
|
||||
pytest.param(
|
||||
datetime.datetime(2024, 12, 25, 15, 0, 0, tzinfo=datetime.timezone.utc),
|
||||
None,
|
||||
id="ignored_date_dec25_future",
|
||||
),
|
||||
# date.year <= 1900
|
||||
pytest.param(
|
||||
datetime.datetime(1899, 12, 31, tzinfo=datetime.timezone.utc),
|
||||
None,
|
||||
id="year_1899",
|
||||
),
|
||||
pytest.param(
|
||||
datetime.datetime(1900, 1, 1, tzinfo=datetime.timezone.utc),
|
||||
None,
|
||||
id="year_1900_boundary",
|
||||
),
|
||||
# date is None
|
||||
pytest.param(None, None, id="none_input"),
|
||||
],
|
||||
)
|
||||
def test_filter_date_validation_rules(
|
||||
self,
|
||||
config_with_ignore_dates: DateParserConfig,
|
||||
date: datetime.datetime | None,
|
||||
expected_output: datetime.datetime | None,
|
||||
) -> None:
|
||||
"""Should correctly validate dates against various rules."""
|
||||
parser = RegexDateParserPlugin(config_with_ignore_dates)
|
||||
result = parser._filter_date(date)
|
||||
assert result == expected_output
|
||||
|
||||
def test_filter_date_respects_ignore_dates(
|
||||
self,
|
||||
config_with_ignore_dates: DateParserConfig,
|
||||
) -> None:
|
||||
"""Should filter out dates in the ignore_dates set."""
|
||||
parser = RegexDateParserPlugin(config_with_ignore_dates)
|
||||
|
||||
ignored_date = datetime.datetime(
|
||||
2024,
|
||||
1,
|
||||
1,
|
||||
12,
|
||||
0,
|
||||
tzinfo=datetime.timezone.utc,
|
||||
)
|
||||
another_ignored = datetime.datetime(
|
||||
2024,
|
||||
12,
|
||||
25,
|
||||
15,
|
||||
30,
|
||||
tzinfo=datetime.timezone.utc,
|
||||
)
|
||||
allowed_date = datetime.datetime(
|
||||
2024,
|
||||
1,
|
||||
2,
|
||||
12,
|
||||
0,
|
||||
tzinfo=datetime.timezone.utc,
|
||||
)
|
||||
|
||||
assert parser._filter_date(ignored_date) is None
|
||||
assert parser._filter_date(another_ignored) is None
|
||||
assert parser._filter_date(allowed_date) == allowed_date
|
||||
|
||||
def test_filter_date_timezone_aware(
|
||||
self,
|
||||
regex_parser: RegexDateParserPlugin,
|
||||
) -> None:
|
||||
"""Should work with timezone-aware datetimes."""
|
||||
date_utc = datetime.datetime(2024, 1, 10, 12, 0, tzinfo=datetime.timezone.utc)
|
||||
|
||||
result = regex_parser._filter_date(date_utc)
|
||||
|
||||
assert result is not None
|
||||
assert result.tzinfo is not None
|
||||
|
||||
|
||||
@pytest.mark.date_parsing
|
||||
class TestRegexDateParser:
|
||||
@pytest.mark.parametrize(
|
||||
("filename", "content", "expected"),
|
||||
[
|
||||
pytest.param(
|
||||
"report-2023-12-25.txt",
|
||||
"Event recorded on 25/12/2022.",
|
||||
[
|
||||
datetime.datetime(2023, 12, 25, tzinfo=datetime.timezone.utc),
|
||||
datetime.datetime(2022, 12, 25, tzinfo=datetime.timezone.utc),
|
||||
],
|
||||
id="filename-y-m-d_and_content-d-m-y",
|
||||
),
|
||||
pytest.param(
|
||||
"img_2023.01.02.jpg",
|
||||
"Taken on 01/02/2023",
|
||||
[
|
||||
datetime.datetime(2023, 1, 2, tzinfo=datetime.timezone.utc),
|
||||
datetime.datetime(2023, 2, 1, tzinfo=datetime.timezone.utc),
|
||||
],
|
||||
id="ambiguous-dates-respect-orders",
|
||||
),
|
||||
pytest.param(
|
||||
"notes.txt",
|
||||
"bad date 99/99/9999 and 25/12/2022",
|
||||
[
|
||||
datetime.datetime(2022, 12, 25, tzinfo=datetime.timezone.utc),
|
||||
],
|
||||
id="parse-exception-skips-bad-and-yields-good",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_parse_returns_expected_dates(
|
||||
self,
|
||||
base_config: DateParserConfig,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
filename: str,
|
||||
content: str,
|
||||
expected: list[datetime.datetime],
|
||||
) -> None:
|
||||
"""
|
||||
High-level tests that exercise RegexDateParser.parse only.
|
||||
dateparser.parse is mocked so tests are deterministic.
|
||||
"""
|
||||
parser = RegexDateParserPlugin(base_config)
|
||||
|
||||
# Patch the dateparser.parse
|
||||
target = "documents.plugins.date_parsing.base.dateparser.parse"
|
||||
|
||||
def fake_parse(
|
||||
date_string: str,
|
||||
settings: dict[str, Any] | None = None,
|
||||
locales: None = None,
|
||||
) -> datetime.datetime | None:
|
||||
date_order = settings.get("DATE_ORDER") if settings else None
|
||||
|
||||
# Filename-style YYYY-MM-DD / YYYY.MM.DD
|
||||
if (
|
||||
"2023-12-25" in date_string
|
||||
or "2023.12.25" in date_string
|
||||
or "2023-12-25" in date_string
|
||||
):
|
||||
return datetime.datetime(2023, 12, 25, tzinfo=datetime.timezone.utc)
|
||||
|
||||
# content DMY 25/12/2022
|
||||
if "25/12/2022" in date_string or "25-12-2022" in date_string:
|
||||
return datetime.datetime(2022, 12, 25, tzinfo=datetime.timezone.utc)
|
||||
|
||||
# filename YMD 2023.01.02
|
||||
if "2023.01.02" in date_string or "2023-01-02" in date_string:
|
||||
return datetime.datetime(2023, 1, 2, tzinfo=datetime.timezone.utc)
|
||||
|
||||
# ambiguous 01/02/2023 -> respect DATE_ORDER setting
|
||||
if "01/02/2023" in date_string:
|
||||
if date_order == "DMY":
|
||||
return datetime.datetime(2023, 2, 1, tzinfo=datetime.timezone.utc)
|
||||
if date_order == "YMD":
|
||||
return datetime.datetime(2023, 1, 2, tzinfo=datetime.timezone.utc)
|
||||
# fallback
|
||||
return datetime.datetime(2023, 2, 1, tzinfo=datetime.timezone.utc)
|
||||
|
||||
# simulate parse failure for malformed input
|
||||
if "99/99/9999" in date_string or "bad date" in date_string:
|
||||
raise Exception("parse failed for malformed date")
|
||||
|
||||
return None
|
||||
|
||||
mocker.patch(target, side_effect=fake_parse)
|
||||
|
||||
results = list(parser.parse(filename, content))
|
||||
|
||||
assert results == expected
|
||||
for dt in results:
|
||||
assert dt.tzinfo is not None
|
||||
|
||||
def test_parse_filters_future_and_ignored_dates(
|
||||
self,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
) -> None:
|
||||
"""
|
||||
Ensure parser filters out:
|
||||
- dates after reference_time
|
||||
- dates whose .date() are in ignore_dates
|
||||
"""
|
||||
cfg = DateParserConfig(
|
||||
languages=["en"],
|
||||
timezone_str="UTC",
|
||||
ignore_dates={datetime.date(2023, 12, 10)},
|
||||
reference_time=datetime.datetime(
|
||||
2024,
|
||||
1,
|
||||
15,
|
||||
12,
|
||||
0,
|
||||
0,
|
||||
tzinfo=datetime.timezone.utc,
|
||||
),
|
||||
filename_date_order="YMD",
|
||||
content_date_order="DMY",
|
||||
)
|
||||
parser = RegexDateParserPlugin(cfg)
|
||||
|
||||
target = "documents.plugins.date_parsing.base.dateparser.parse"
|
||||
|
||||
def fake_parse(
|
||||
date_string: str,
|
||||
settings: dict[str, Any] | None = None,
|
||||
locales: None = None,
|
||||
) -> datetime.datetime | None:
|
||||
if "10/12/2023" in date_string or "10-12-2023" in date_string:
|
||||
# ignored date
|
||||
return datetime.datetime(2023, 12, 10, tzinfo=datetime.timezone.utc)
|
||||
if "01/02/2024" in date_string or "01-02-2024" in date_string:
|
||||
# future relative to reference_time -> filtered
|
||||
return datetime.datetime(2024, 2, 1, tzinfo=datetime.timezone.utc)
|
||||
if "05/01/2023" in date_string or "05-01-2023" in date_string:
|
||||
# valid
|
||||
return datetime.datetime(2023, 1, 5, tzinfo=datetime.timezone.utc)
|
||||
return None
|
||||
|
||||
mocker.patch(target, side_effect=fake_parse)
|
||||
|
||||
content = "Ignored: 10/12/2023, Future: 01/02/2024, Keep: 05/01/2023"
|
||||
results = list(parser.parse("whatever.txt", content))
|
||||
|
||||
assert results == [datetime.datetime(2023, 1, 5, tzinfo=datetime.timezone.utc)]
|
||||
|
||||
def test_parse_handles_no_matches_and_returns_empty_list(
|
||||
self,
|
||||
base_config: DateParserConfig,
|
||||
) -> None:
|
||||
"""
|
||||
When there are no matching date-like substrings, parse should yield nothing.
|
||||
"""
|
||||
parser = RegexDateParserPlugin(base_config)
|
||||
results = list(
|
||||
parser.parse("no-dates.txt", "this has no dates whatsoever"),
|
||||
)
|
||||
assert results == []
|
||||
|
||||
def test_parse_skips_filename_when_filename_date_order_none(
|
||||
self,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
) -> None:
|
||||
"""
|
||||
When filename_date_order is None the parser must not attempt to parse the filename.
|
||||
Only dates found in the content should be passed to dateparser.parse.
|
||||
"""
|
||||
cfg = DateParserConfig(
|
||||
languages=["en"],
|
||||
timezone_str="UTC",
|
||||
ignore_dates=set(),
|
||||
reference_time=datetime.datetime(
|
||||
2024,
|
||||
1,
|
||||
15,
|
||||
12,
|
||||
0,
|
||||
0,
|
||||
tzinfo=datetime.timezone.utc,
|
||||
),
|
||||
filename_date_order=None,
|
||||
content_date_order="DMY",
|
||||
)
|
||||
parser = RegexDateParserPlugin(cfg)
|
||||
|
||||
# Patch the module's dateparser.parse so we can inspect calls
|
||||
target = "documents.plugins.date_parsing.base.dateparser.parse"
|
||||
|
||||
def fake_parse(
|
||||
date_string: str,
|
||||
settings: dict[str, Any] | None = None,
|
||||
locales: None = None,
|
||||
) -> datetime.datetime | None:
|
||||
# return distinct datetimes so we can tell which source was parsed
|
||||
if "25/12/2022" in date_string:
|
||||
return datetime.datetime(2022, 12, 25, tzinfo=datetime.timezone.utc)
|
||||
if "2023-12-25" in date_string:
|
||||
return datetime.datetime(2023, 12, 25, tzinfo=datetime.timezone.utc)
|
||||
return None
|
||||
|
||||
mock = mocker.patch(target, side_effect=fake_parse)
|
||||
|
||||
filename = "report-2023-12-25.txt"
|
||||
content = "Event recorded on 25/12/2022."
|
||||
|
||||
results = list(parser.parse(filename, content))
|
||||
|
||||
# Only the content date should have been parsed -> one call
|
||||
assert mock.call_count == 1
|
||||
|
||||
# # first call, first positional arg
|
||||
called_date_string = mock.call_args_list[0][0][0]
|
||||
assert "25/12/2022" in called_date_string
|
||||
# And the parser should have yielded the corresponding datetime
|
||||
assert results == [
|
||||
datetime.datetime(2022, 12, 25, tzinfo=datetime.timezone.utc),
|
||||
]
|
||||
@@ -131,6 +131,10 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
self.assertIn("content", results_full[0])
|
||||
self.assertIn("id", results_full[0])
|
||||
|
||||
# Content length is used internally for performance reasons.
|
||||
# No need to expose this field.
|
||||
self.assertNotIn("content_length", results_full[0])
|
||||
|
||||
response = self.client.get("/api/documents/?fields=id", format="json")
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
results = response.data["results"]
|
||||
@@ -1974,11 +1978,11 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
response = self.client.get(f"/api/documents/{doc.pk}/suggestions/")
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
|
||||
@mock.patch("documents.parsers.parse_date_generator")
|
||||
@mock.patch("documents.views.get_date_parser")
|
||||
@override_settings(NUMBER_OF_SUGGESTED_DATES=0)
|
||||
def test_get_suggestions_dates_disabled(
|
||||
self,
|
||||
parse_date_generator,
|
||||
mock_get_date_parser: mock.MagicMock,
|
||||
):
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -1995,7 +1999,8 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
)
|
||||
|
||||
self.client.get(f"/api/documents/{doc.pk}/suggestions/")
|
||||
self.assertFalse(parse_date_generator.called)
|
||||
|
||||
mock_get_date_parser.assert_not_called()
|
||||
|
||||
def test_saved_views(self):
|
||||
u1 = User.objects.create_superuser("user1")
|
||||
|
||||
@@ -7,6 +7,7 @@ from django.contrib.auth.models import User
|
||||
from rest_framework import status
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from documents.models import Document
|
||||
from documents.models import PaperlessTask
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
from documents.views import TasksViewSet
|
||||
@@ -258,7 +259,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
|
||||
task_id=str(uuid.uuid4()),
|
||||
task_file_name="task_one.pdf",
|
||||
status=celery.states.FAILURE,
|
||||
result="test.pdf: Not consuming test.pdf: It is a duplicate.",
|
||||
result="test.pdf: Unexpected error during ingestion.",
|
||||
)
|
||||
|
||||
response = self.client.get(self.ENDPOINT)
|
||||
@@ -270,7 +271,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
|
||||
|
||||
self.assertEqual(
|
||||
returned_data["result"],
|
||||
"test.pdf: Not consuming test.pdf: It is a duplicate.",
|
||||
"test.pdf: Unexpected error during ingestion.",
|
||||
)
|
||||
|
||||
def test_task_name_webui(self):
|
||||
@@ -325,20 +326,34 @@ class TestTasks(DirectoriesMixin, APITestCase):
|
||||
|
||||
self.assertEqual(returned_data["task_file_name"], "anothertest.pdf")
|
||||
|
||||
def test_task_result_failed_duplicate_includes_related_doc(self):
|
||||
def test_task_result_duplicate_warning_includes_count(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- A celery task failed with a duplicate error
|
||||
- A celery task succeeds, but a duplicate exists
|
||||
WHEN:
|
||||
- API call is made to get tasks
|
||||
THEN:
|
||||
- The returned data includes a related document link
|
||||
- The returned data includes duplicate warning metadata
|
||||
"""
|
||||
checksum = "duplicate-checksum"
|
||||
Document.objects.create(
|
||||
title="Existing",
|
||||
content="",
|
||||
mime_type="application/pdf",
|
||||
checksum=checksum,
|
||||
)
|
||||
created_doc = Document.objects.create(
|
||||
title="Created",
|
||||
content="",
|
||||
mime_type="application/pdf",
|
||||
checksum=checksum,
|
||||
archive_checksum="another-checksum",
|
||||
)
|
||||
PaperlessTask.objects.create(
|
||||
task_id=str(uuid.uuid4()),
|
||||
task_file_name="task_one.pdf",
|
||||
status=celery.states.FAILURE,
|
||||
result="Not consuming task_one.pdf: It is a duplicate of task_one_existing.pdf (#1234).",
|
||||
status=celery.states.SUCCESS,
|
||||
result=f"Success. New document id {created_doc.pk} created",
|
||||
)
|
||||
|
||||
response = self.client.get(self.ENDPOINT)
|
||||
@@ -348,7 +363,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
|
||||
|
||||
returned_data = response.data[0]
|
||||
|
||||
self.assertEqual(returned_data["related_document"], "1234")
|
||||
self.assertEqual(returned_data["related_document"], str(created_doc.pk))
|
||||
|
||||
def test_run_train_classifier_task(self):
|
||||
"""
|
||||
|
||||
@@ -485,21 +485,21 @@ class TestConsumer(
|
||||
with self.get_consumer(self.get_test_file()) as consumer:
|
||||
consumer.run()
|
||||
|
||||
with self.assertRaisesMessage(ConsumerError, "It is a duplicate"):
|
||||
with self.get_consumer(self.get_test_file()) as consumer:
|
||||
consumer.run()
|
||||
with self.get_consumer(self.get_test_file()) as consumer:
|
||||
consumer.run()
|
||||
|
||||
self._assert_first_last_send_progress(last_status="FAILED")
|
||||
self.assertEqual(Document.objects.count(), 2)
|
||||
self._assert_first_last_send_progress()
|
||||
|
||||
def testDuplicates2(self):
|
||||
with self.get_consumer(self.get_test_file()) as consumer:
|
||||
consumer.run()
|
||||
|
||||
with self.assertRaisesMessage(ConsumerError, "It is a duplicate"):
|
||||
with self.get_consumer(self.get_test_archive_file()) as consumer:
|
||||
consumer.run()
|
||||
with self.get_consumer(self.get_test_archive_file()) as consumer:
|
||||
consumer.run()
|
||||
|
||||
self._assert_first_last_send_progress(last_status="FAILED")
|
||||
self.assertEqual(Document.objects.count(), 2)
|
||||
self._assert_first_last_send_progress()
|
||||
|
||||
def testDuplicates3(self):
|
||||
with self.get_consumer(self.get_test_archive_file()) as consumer:
|
||||
@@ -513,9 +513,10 @@ class TestConsumer(
|
||||
|
||||
Document.objects.all().delete()
|
||||
|
||||
with self.assertRaisesMessage(ConsumerError, "document is in the trash"):
|
||||
with self.get_consumer(self.get_test_file()) as consumer:
|
||||
consumer.run()
|
||||
with self.get_consumer(self.get_test_file()) as consumer:
|
||||
consumer.run()
|
||||
|
||||
self.assertEqual(Document.objects.count(), 1)
|
||||
|
||||
def testAsnExists(self):
|
||||
with self.get_consumer(
|
||||
@@ -718,12 +719,45 @@ class TestConsumer(
|
||||
dst = self.get_test_file()
|
||||
self.assertIsFile(dst)
|
||||
|
||||
with self.assertRaises(ConsumerError):
|
||||
expected_message = (
|
||||
f"{dst.name}: Not consuming {dst.name}: "
|
||||
f"It is a duplicate of {document.title} (#{document.pk})"
|
||||
)
|
||||
|
||||
with self.assertRaisesMessage(ConsumerError, expected_message):
|
||||
with self.get_consumer(dst) as consumer:
|
||||
consumer.run()
|
||||
|
||||
self.assertIsNotFile(dst)
|
||||
self._assert_first_last_send_progress(last_status="FAILED")
|
||||
self.assertEqual(Document.objects.count(), 1)
|
||||
self._assert_first_last_send_progress(last_status=ProgressStatusOptions.FAILED)
|
||||
|
||||
@override_settings(CONSUMER_DELETE_DUPLICATES=True)
|
||||
def test_delete_duplicate_in_trash(self):
|
||||
dst = self.get_test_file()
|
||||
with self.get_consumer(dst) as consumer:
|
||||
consumer.run()
|
||||
|
||||
# Move the existing document to trash
|
||||
document = Document.objects.first()
|
||||
document.delete()
|
||||
|
||||
dst = self.get_test_file()
|
||||
self.assertIsFile(dst)
|
||||
|
||||
expected_message = (
|
||||
f"{dst.name}: Not consuming {dst.name}: "
|
||||
f"It is a duplicate of {document.title} (#{document.pk})"
|
||||
f" Note: existing document is in the trash."
|
||||
)
|
||||
|
||||
with self.assertRaisesMessage(ConsumerError, expected_message):
|
||||
with self.get_consumer(dst) as consumer:
|
||||
consumer.run()
|
||||
|
||||
self.assertIsNotFile(dst)
|
||||
self.assertEqual(Document.global_objects.count(), 1)
|
||||
self.assertEqual(Document.objects.count(), 0)
|
||||
|
||||
@override_settings(CONSUMER_DELETE_DUPLICATES=False)
|
||||
def test_no_delete_duplicate(self):
|
||||
@@ -743,15 +777,12 @@ class TestConsumer(
|
||||
dst = self.get_test_file()
|
||||
self.assertIsFile(dst)
|
||||
|
||||
with self.assertRaisesRegex(
|
||||
ConsumerError,
|
||||
r"sample\.pdf: Not consuming sample\.pdf: It is a duplicate of sample \(#\d+\)",
|
||||
):
|
||||
with self.get_consumer(dst) as consumer:
|
||||
consumer.run()
|
||||
with self.get_consumer(dst) as consumer:
|
||||
consumer.run()
|
||||
|
||||
self.assertIsFile(dst)
|
||||
self._assert_first_last_send_progress(last_status="FAILED")
|
||||
self.assertIsNotFile(dst)
|
||||
self.assertEqual(Document.objects.count(), 2)
|
||||
self._assert_first_last_send_progress()
|
||||
|
||||
@override_settings(FILENAME_FORMAT="{title}")
|
||||
@mock.patch("documents.parsers.document_consumer_declaration.send")
|
||||
|
||||
@@ -1,538 +0,0 @@
|
||||
import datetime
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
import pytest
|
||||
from pytest_django.fixtures import SettingsWrapper
|
||||
|
||||
from documents.parsers import parse_date
|
||||
from documents.parsers import parse_date_generator
|
||||
|
||||
|
||||
@pytest.mark.django_db()
|
||||
class TestDate:
|
||||
def test_date_format_1(self):
|
||||
text = "lorem ipsum 130218 lorem ipsum"
|
||||
assert parse_date("", text) is None
|
||||
|
||||
def test_date_format_2(self):
|
||||
text = "lorem ipsum 2018 lorem ipsum"
|
||||
assert parse_date("", text) is None
|
||||
|
||||
def test_date_format_3(self):
|
||||
text = "lorem ipsum 20180213 lorem ipsum"
|
||||
assert parse_date("", text) is None
|
||||
|
||||
def test_date_format_4(self, settings_timezone: ZoneInfo):
|
||||
text = "lorem ipsum 13.02.2018 lorem ipsum"
|
||||
date = parse_date("", text)
|
||||
assert date == datetime.datetime(2018, 2, 13, 0, 0, tzinfo=settings_timezone)
|
||||
|
||||
def test_date_format_5(self, settings_timezone: ZoneInfo):
|
||||
text = "lorem ipsum 130218, 2018, 20180213 and lorem 13.02.2018 lorem ipsum"
|
||||
date = parse_date("", text)
|
||||
assert date == datetime.datetime(2018, 2, 13, 0, 0, tzinfo=settings_timezone)
|
||||
|
||||
def test_date_format_6(self):
|
||||
text = (
|
||||
"lorem ipsum\n"
|
||||
"Wohnort\n"
|
||||
"3100\n"
|
||||
"IBAN\n"
|
||||
"AT87 4534\n"
|
||||
"1234\n"
|
||||
"1234 5678\n"
|
||||
"BIC\n"
|
||||
"lorem ipsum"
|
||||
)
|
||||
assert parse_date("", text) is None
|
||||
|
||||
def test_date_format_7(
|
||||
self,
|
||||
settings: SettingsWrapper,
|
||||
settings_timezone: ZoneInfo,
|
||||
):
|
||||
settings.DATE_PARSER_LANGUAGES = ["de"]
|
||||
text = "lorem ipsum\nMärz 2019\nlorem ipsum"
|
||||
date = parse_date("", text)
|
||||
assert date == datetime.datetime(2019, 3, 1, 0, 0, tzinfo=settings_timezone)
|
||||
|
||||
def test_date_format_8(
|
||||
self,
|
||||
settings: SettingsWrapper,
|
||||
settings_timezone: ZoneInfo,
|
||||
):
|
||||
settings.DATE_PARSER_LANGUAGES = ["de"]
|
||||
text = (
|
||||
"lorem ipsum\n"
|
||||
"Wohnort\n"
|
||||
"3100\n"
|
||||
"IBAN\n"
|
||||
"AT87 4534\n"
|
||||
"1234\n"
|
||||
"1234 5678\n"
|
||||
"BIC\n"
|
||||
"lorem ipsum\n"
|
||||
"März 2020"
|
||||
)
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2020,
|
||||
3,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_date_format_9(
|
||||
self,
|
||||
settings: SettingsWrapper,
|
||||
settings_timezone: ZoneInfo,
|
||||
):
|
||||
settings.DATE_PARSER_LANGUAGES = ["de"]
|
||||
text = "lorem ipsum\n27. Nullmonth 2020\nMärz 2020\nlorem ipsum"
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2020,
|
||||
3,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_date_format_10(self, settings_timezone: ZoneInfo):
|
||||
text = "Customer Number Currency 22-MAR-2022 Credit Card 1934829304"
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2022,
|
||||
3,
|
||||
22,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_date_format_11(self, settings_timezone: ZoneInfo):
|
||||
text = "Customer Number Currency 22 MAR 2022 Credit Card 1934829304"
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2022,
|
||||
3,
|
||||
22,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_date_format_12(self, settings_timezone: ZoneInfo):
|
||||
text = "Customer Number Currency 22/MAR/2022 Credit Card 1934829304"
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2022,
|
||||
3,
|
||||
22,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_date_format_13(self, settings_timezone: ZoneInfo):
|
||||
text = "Customer Number Currency 22.MAR.2022 Credit Card 1934829304"
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2022,
|
||||
3,
|
||||
22,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_date_format_14(self, settings_timezone: ZoneInfo):
|
||||
text = "Customer Number Currency 22.MAR 2022 Credit Card 1934829304"
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2022,
|
||||
3,
|
||||
22,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_date_format_15(self):
|
||||
text = "Customer Number Currency 22.MAR.22 Credit Card 1934829304"
|
||||
assert parse_date("", text) is None
|
||||
|
||||
def test_date_format_16(self):
|
||||
text = "Customer Number Currency 22.MAR,22 Credit Card 1934829304"
|
||||
assert parse_date("", text) is None
|
||||
|
||||
def test_date_format_17(self):
|
||||
text = "Customer Number Currency 22,MAR,2022 Credit Card 1934829304"
|
||||
assert parse_date("", text) is None
|
||||
|
||||
def test_date_format_18(self):
|
||||
text = "Customer Number Currency 22 MAR,2022 Credit Card 1934829304"
|
||||
assert parse_date("", text) is None
|
||||
|
||||
def test_date_format_19(self, settings_timezone: ZoneInfo):
|
||||
text = "Customer Number Currency 21st MAR 2022 Credit Card 1934829304"
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2022,
|
||||
3,
|
||||
21,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_date_format_20(self, settings_timezone: ZoneInfo):
|
||||
text = "Customer Number Currency 22nd March 2022 Credit Card 1934829304"
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2022,
|
||||
3,
|
||||
22,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_date_format_21(self, settings_timezone: ZoneInfo):
|
||||
text = "Customer Number Currency 2nd MAR 2022 Credit Card 1934829304"
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2022,
|
||||
3,
|
||||
2,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_date_format_22(self, settings_timezone: ZoneInfo):
|
||||
text = "Customer Number Currency 23rd MAR 2022 Credit Card 1934829304"
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2022,
|
||||
3,
|
||||
23,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_date_format_23(self, settings_timezone: ZoneInfo):
|
||||
text = "Customer Number Currency 24th MAR 2022 Credit Card 1934829304"
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2022,
|
||||
3,
|
||||
24,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_date_format_24(self, settings_timezone: ZoneInfo):
|
||||
text = "Customer Number Currency 21-MAR-2022 Credit Card 1934829304"
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2022,
|
||||
3,
|
||||
21,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_date_format_25(self, settings_timezone: ZoneInfo):
|
||||
text = "Customer Number Currency 25TH MAR 2022 Credit Card 1934829304"
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2022,
|
||||
3,
|
||||
25,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_date_format_26(self, settings_timezone: ZoneInfo):
|
||||
text = "CHASE 0 September 25, 2019 JPMorgan Chase Bank, NA. P0 Box 182051"
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2019,
|
||||
9,
|
||||
25,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_crazy_date_past(self):
|
||||
assert parse_date("", "01-07-0590 00:00:00") is None
|
||||
|
||||
def test_crazy_date_future(self):
|
||||
assert parse_date("", "01-07-2350 00:00:00") is None
|
||||
|
||||
def test_crazy_date_with_spaces(self):
|
||||
assert parse_date("", "20 408000l 2475") is None
|
||||
|
||||
def test_utf_month_names(
|
||||
self,
|
||||
settings: SettingsWrapper,
|
||||
settings_timezone: ZoneInfo,
|
||||
):
|
||||
settings.DATE_PARSER_LANGUAGES = ["fr", "de", "hr", "cs", "pl", "tr"]
|
||||
assert parse_date("", "13 décembre 2023") == datetime.datetime(
|
||||
2023,
|
||||
12,
|
||||
13,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
assert parse_date("", "13 août 2022") == datetime.datetime(
|
||||
2022,
|
||||
8,
|
||||
13,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
assert parse_date("", "11 März 2020") == datetime.datetime(
|
||||
2020,
|
||||
3,
|
||||
11,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
assert parse_date("", "17. ožujka 2018.") == datetime.datetime(
|
||||
2018,
|
||||
3,
|
||||
17,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
assert parse_date("", "1. veljače 2016.") == datetime.datetime(
|
||||
2016,
|
||||
2,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
assert parse_date("", "15. února 1985") == datetime.datetime(
|
||||
1985,
|
||||
2,
|
||||
15,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
assert parse_date("", "30. září 2011") == datetime.datetime(
|
||||
2011,
|
||||
9,
|
||||
30,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
assert parse_date("", "28. května 1990") == datetime.datetime(
|
||||
1990,
|
||||
5,
|
||||
28,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
assert parse_date("", "1. grudzień 1997") == datetime.datetime(
|
||||
1997,
|
||||
12,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
assert parse_date("", "17 Şubat 2024") == datetime.datetime(
|
||||
2024,
|
||||
2,
|
||||
17,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
assert parse_date("", "30 Ağustos 2012") == datetime.datetime(
|
||||
2012,
|
||||
8,
|
||||
30,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
assert parse_date("", "17 Eylül 2000") == datetime.datetime(
|
||||
2000,
|
||||
9,
|
||||
17,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
assert parse_date("", "5. október 1992") == datetime.datetime(
|
||||
1992,
|
||||
10,
|
||||
5,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_multiple_dates(self, settings_timezone: ZoneInfo):
|
||||
text = """This text has multiple dates.
|
||||
For example 02.02.2018, 22 July 2022 and December 2021.
|
||||
But not 24-12-9999 because it's in the future..."""
|
||||
dates = list(parse_date_generator("", text))
|
||||
|
||||
assert dates == [
|
||||
datetime.datetime(2018, 2, 2, 0, 0, tzinfo=settings_timezone),
|
||||
datetime.datetime(
|
||||
2022,
|
||||
7,
|
||||
22,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
),
|
||||
datetime.datetime(
|
||||
2021,
|
||||
12,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
),
|
||||
]
|
||||
|
||||
def test_filename_date_parse_valid_ymd(
|
||||
self,
|
||||
settings: SettingsWrapper,
|
||||
settings_timezone: ZoneInfo,
|
||||
):
|
||||
"""
|
||||
GIVEN:
|
||||
- Date parsing from the filename is enabled
|
||||
- Filename date format is with Year Month Day (YMD)
|
||||
- Filename contains date matching the format
|
||||
|
||||
THEN:
|
||||
- Should parse the date from the filename
|
||||
"""
|
||||
settings.FILENAME_DATE_ORDER = "YMD"
|
||||
|
||||
assert parse_date(
|
||||
"/tmp/Scan-2022-04-01.pdf",
|
||||
"No date in here",
|
||||
) == datetime.datetime(2022, 4, 1, 0, 0, tzinfo=settings_timezone)
|
||||
|
||||
def test_filename_date_parse_valid_dmy(
|
||||
self,
|
||||
settings: SettingsWrapper,
|
||||
settings_timezone: ZoneInfo,
|
||||
):
|
||||
"""
|
||||
GIVEN:
|
||||
- Date parsing from the filename is enabled
|
||||
- Filename date format is with Day Month Year (DMY)
|
||||
- Filename contains date matching the format
|
||||
|
||||
THEN:
|
||||
- Should parse the date from the filename
|
||||
"""
|
||||
settings.FILENAME_DATE_ORDER = "DMY"
|
||||
assert parse_date(
|
||||
"/tmp/Scan-10.01.2021.pdf",
|
||||
"No date in here",
|
||||
) == datetime.datetime(2021, 1, 10, 0, 0, tzinfo=settings_timezone)
|
||||
|
||||
def test_filename_date_parse_invalid(self, settings: SettingsWrapper):
|
||||
"""
|
||||
GIVEN:
|
||||
- Date parsing from the filename is enabled
|
||||
- Filename includes no date
|
||||
- File content includes no date
|
||||
|
||||
THEN:
|
||||
- No date is parsed
|
||||
"""
|
||||
settings.FILENAME_DATE_ORDER = "YMD"
|
||||
assert parse_date("/tmp/20 408000l 2475 - test.pdf", "No date in here") is None
|
||||
|
||||
def test_filename_date_ignored_use_content(
|
||||
self,
|
||||
settings: SettingsWrapper,
|
||||
settings_timezone: ZoneInfo,
|
||||
):
|
||||
"""
|
||||
GIVEN:
|
||||
- Date parsing from the filename is enabled
|
||||
- Filename date format is with Day Month Year (YMD)
|
||||
- Date order is Day Month Year (DMY, the default)
|
||||
- Filename contains date matching the format
|
||||
- Filename date is an ignored date
|
||||
- File content includes a date
|
||||
|
||||
THEN:
|
||||
- Should parse the date from the content not filename
|
||||
"""
|
||||
settings.FILENAME_DATE_ORDER = "YMD"
|
||||
settings.IGNORE_DATES = (datetime.date(2022, 4, 1),)
|
||||
assert parse_date(
|
||||
"/tmp/Scan-2022-04-01.pdf",
|
||||
"The matching date is 24.03.2022",
|
||||
) == datetime.datetime(2022, 3, 24, 0, 0, tzinfo=settings_timezone)
|
||||
|
||||
def test_ignored_dates_default_order(
|
||||
self,
|
||||
settings: SettingsWrapper,
|
||||
settings_timezone: ZoneInfo,
|
||||
):
|
||||
"""
|
||||
GIVEN:
|
||||
- Ignore dates have been set
|
||||
- File content includes ignored dates
|
||||
- File content includes 1 non-ignored date
|
||||
|
||||
THEN:
|
||||
- Should parse the date non-ignored date from content
|
||||
"""
|
||||
settings.IGNORE_DATES = (datetime.date(2019, 11, 3), datetime.date(2020, 1, 17))
|
||||
text = "lorem ipsum 110319, 20200117 and lorem 13.02.2018 lorem ipsum"
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2018,
|
||||
2,
|
||||
13,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_ignored_dates_order_ymd(
|
||||
self,
|
||||
settings: SettingsWrapper,
|
||||
settings_timezone: ZoneInfo,
|
||||
):
|
||||
"""
|
||||
GIVEN:
|
||||
- Ignore dates have been set
|
||||
- Date order is Year Month Date (YMD)
|
||||
- File content includes ignored dates
|
||||
- File content includes 1 non-ignored date
|
||||
|
||||
THEN:
|
||||
- Should parse the date non-ignored date from content
|
||||
"""
|
||||
|
||||
settings.FILENAME_DATE_ORDER = "YMD"
|
||||
settings.IGNORE_DATES = (datetime.date(2019, 11, 3), datetime.date(2020, 1, 17))
|
||||
|
||||
text = "lorem ipsum 190311, 20200117 and lorem 13.02.2018 lorem ipsum"
|
||||
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2018,
|
||||
2,
|
||||
13,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
@@ -224,17 +224,18 @@ class TestDoubleSided(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
THEN:
|
||||
- The collated file gets put into foo/bar
|
||||
"""
|
||||
# TODO: parameterize this instead
|
||||
for path in [
|
||||
Path("foo") / "bar" / "double-sided",
|
||||
Path("double-sided") / "foo" / "bar",
|
||||
]:
|
||||
with self.subTest(path=path):
|
||||
with self.subTest(path=str(path)):
|
||||
# Ensure we get fresh directories for each run
|
||||
self.tearDown()
|
||||
self.setUp()
|
||||
|
||||
self.create_staging_file()
|
||||
self.consume_file("double-sided-odd.pdf", path / "foo.pdf")
|
||||
self.consume_file("double-sided-odd.pdf", Path(path) / "foo.pdf")
|
||||
self.assertIsFile(
|
||||
self.dirs.consumption_dir / "foo" / "bar" / "foo-collated.pdf",
|
||||
)
|
||||
|
||||
@@ -114,6 +114,30 @@ def mock_supported_extensions(mocker: MockerFixture) -> MagicMock:
|
||||
)
|
||||
|
||||
|
||||
def wait_for_mock_call(
|
||||
mock_obj: MagicMock,
|
||||
timeout_s: float = 5.0,
|
||||
poll_interval_s: float = 0.1,
|
||||
) -> bool:
|
||||
"""
|
||||
Actively wait for a mock to be called.
|
||||
|
||||
Args:
|
||||
mock_obj: The mock object to check (e.g., mock.delay)
|
||||
timeout_s: Maximum time to wait in seconds
|
||||
poll_interval_s: How often to check in seconds
|
||||
|
||||
Returns:
|
||||
True if mock was called within timeout, False otherwise
|
||||
"""
|
||||
start_time = monotonic()
|
||||
while monotonic() - start_time < timeout_s:
|
||||
if mock_obj.called:
|
||||
return True
|
||||
sleep(poll_interval_s)
|
||||
return False
|
||||
|
||||
|
||||
class TestTrackedFile:
|
||||
"""Tests for the TrackedFile dataclass."""
|
||||
|
||||
@@ -724,7 +748,7 @@ def start_consumer(
|
||||
thread = ConsumerThread(consumption_dir, scratch_dir, **kwargs)
|
||||
threads.append(thread)
|
||||
thread.start()
|
||||
sleep(0.5) # Give thread time to start
|
||||
sleep(2.0) # Give thread time to start
|
||||
return thread
|
||||
|
||||
try:
|
||||
@@ -767,7 +791,8 @@ class TestCommandWatch:
|
||||
|
||||
target = consumption_dir / "document.pdf"
|
||||
shutil.copy(sample_pdf, target)
|
||||
sleep(0.5)
|
||||
|
||||
wait_for_mock_call(mock_consume_file_delay.delay, timeout_s=2.0)
|
||||
|
||||
if thread.exception:
|
||||
raise thread.exception
|
||||
@@ -788,9 +813,12 @@ class TestCommandWatch:
|
||||
|
||||
thread = start_consumer()
|
||||
|
||||
sleep(0.5)
|
||||
|
||||
target = consumption_dir / "document.pdf"
|
||||
shutil.move(temp_location, target)
|
||||
sleep(0.5)
|
||||
|
||||
wait_for_mock_call(mock_consume_file_delay.delay, timeout_s=2.0)
|
||||
|
||||
if thread.exception:
|
||||
raise thread.exception
|
||||
@@ -816,7 +844,7 @@ class TestCommandWatch:
|
||||
f.flush()
|
||||
sleep(0.05)
|
||||
|
||||
sleep(0.5)
|
||||
wait_for_mock_call(mock_consume_file_delay.delay, timeout_s=2.0)
|
||||
|
||||
if thread.exception:
|
||||
raise thread.exception
|
||||
@@ -837,7 +865,7 @@ class TestCommandWatch:
|
||||
(consumption_dir / "._document.pdf").write_bytes(b"test")
|
||||
shutil.copy(sample_pdf, consumption_dir / "valid.pdf")
|
||||
|
||||
sleep(0.5)
|
||||
wait_for_mock_call(mock_consume_file_delay.delay, timeout_s=2.0)
|
||||
|
||||
if thread.exception:
|
||||
raise thread.exception
|
||||
@@ -868,11 +896,10 @@ class TestCommandWatch:
|
||||
assert not thread.is_alive()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestCommandWatchPolling:
|
||||
"""Tests for polling mode."""
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.flaky(reruns=2)
|
||||
def test_polling_mode_works(
|
||||
self,
|
||||
consumption_dir: Path,
|
||||
@@ -882,7 +909,8 @@ class TestCommandWatchPolling:
|
||||
) -> None:
|
||||
"""
|
||||
Test polling mode detects files.
|
||||
Note: At times, there appears to be a timing issue, where delay has not yet been called, hence this is marked as flaky.
|
||||
|
||||
Uses active waiting with timeout to handle CI delays and polling timing.
|
||||
"""
|
||||
# Use shorter polling interval for faster test
|
||||
thread = start_consumer(polling_interval=0.5, stability_delay=0.1)
|
||||
@@ -890,9 +918,9 @@ class TestCommandWatchPolling:
|
||||
target = consumption_dir / "document.pdf"
|
||||
shutil.copy(sample_pdf, target)
|
||||
|
||||
# Wait for: poll interval + stability delay + another poll + margin
|
||||
# CI can be slow, so use generous timeout
|
||||
sleep(3.0)
|
||||
# Actively wait for consumption
|
||||
# Polling needs: interval (0.5s) + stability (0.1s) + next poll (0.5s) + margin
|
||||
wait_for_mock_call(mock_consume_file_delay.delay, timeout_s=5.0)
|
||||
|
||||
if thread.exception:
|
||||
raise thread.exception
|
||||
@@ -919,7 +947,8 @@ class TestCommandWatchRecursive:
|
||||
|
||||
target = subdir / "document.pdf"
|
||||
shutil.copy(sample_pdf, target)
|
||||
sleep(0.5)
|
||||
|
||||
wait_for_mock_call(mock_consume_file_delay.delay, timeout_s=2.0)
|
||||
|
||||
if thread.exception:
|
||||
raise thread.exception
|
||||
@@ -948,7 +977,8 @@ class TestCommandWatchRecursive:
|
||||
|
||||
target = subdir / "document.pdf"
|
||||
shutil.copy(sample_pdf, target)
|
||||
sleep(0.5)
|
||||
|
||||
wait_for_mock_call(mock_consume_file_delay.delay, timeout_s=2.0)
|
||||
|
||||
if thread.exception:
|
||||
raise thread.exception
|
||||
|
||||
@@ -241,6 +241,10 @@ class TestExportImport(
|
||||
checksum = hashlib.md5(f.read()).hexdigest()
|
||||
self.assertEqual(checksum, element["fields"]["checksum"])
|
||||
|
||||
# Generated field "content_length" should not be exported,
|
||||
# it is automatically computed during import.
|
||||
self.assertNotIn("content_length", element["fields"])
|
||||
|
||||
if document_exporter.EXPORTER_ARCHIVE_NAME in element:
|
||||
fname = (
|
||||
self.target / element[document_exporter.EXPORTER_ARCHIVE_NAME]
|
||||
|
||||
51
src/documents/tests/test_migration_share_link_bundle.py
Normal file
51
src/documents/tests/test_migration_share_link_bundle.py
Normal file
@@ -0,0 +1,51 @@
|
||||
from documents.tests.utils import TestMigrations
|
||||
|
||||
|
||||
class TestMigrateShareLinkBundlePermissions(TestMigrations):
|
||||
migrate_from = "0007_document_content_length"
|
||||
migrate_to = "0008_sharelinkbundle"
|
||||
|
||||
def setUpBeforeMigration(self, apps):
|
||||
User = apps.get_model("auth", "User")
|
||||
Group = apps.get_model("auth", "Group")
|
||||
self.Permission = apps.get_model("auth", "Permission")
|
||||
self.user = User.objects.create(username="user1")
|
||||
self.group = Group.objects.create(name="group1")
|
||||
add_document = self.Permission.objects.get(codename="add_document")
|
||||
self.user.user_permissions.add(add_document.id)
|
||||
self.group.permissions.add(add_document.id)
|
||||
|
||||
def test_share_link_permissions_granted_to_add_document_holders(self):
|
||||
share_perms = self.Permission.objects.filter(
|
||||
codename__contains="sharelinkbundle",
|
||||
)
|
||||
self.assertTrue(self.user.user_permissions.filter(pk__in=share_perms).exists())
|
||||
self.assertTrue(self.group.permissions.filter(pk__in=share_perms).exists())
|
||||
|
||||
|
||||
class TestReverseMigrateShareLinkBundlePermissions(TestMigrations):
|
||||
migrate_from = "0008_sharelinkbundle"
|
||||
migrate_to = "0007_document_content_length"
|
||||
|
||||
def setUpBeforeMigration(self, apps):
|
||||
User = apps.get_model("auth", "User")
|
||||
Group = apps.get_model("auth", "Group")
|
||||
self.Permission = apps.get_model("auth", "Permission")
|
||||
self.user = User.objects.create(username="user1")
|
||||
self.group = Group.objects.create(name="group1")
|
||||
add_document = self.Permission.objects.get(codename="add_document")
|
||||
share_perms = self.Permission.objects.filter(
|
||||
codename__contains="sharelinkbundle",
|
||||
)
|
||||
self.share_perm_ids = list(share_perms.values_list("id", flat=True))
|
||||
|
||||
self.user.user_permissions.add(add_document.id, *self.share_perm_ids)
|
||||
self.group.permissions.add(add_document.id, *self.share_perm_ids)
|
||||
|
||||
def test_share_link_permissions_revoked_on_reverse(self):
|
||||
self.assertFalse(
|
||||
self.user.user_permissions.filter(pk__in=self.share_perm_ids).exists(),
|
||||
)
|
||||
self.assertFalse(
|
||||
self.group.permissions.filter(pk__in=self.share_perm_ids).exists(),
|
||||
)
|
||||
536
src/documents/tests/test_share_link_bundles.py
Normal file
536
src/documents/tests/test_share_link_bundles.py
Normal file
@@ -0,0 +1,536 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import zipfile
|
||||
from datetime import timedelta
|
||||
from pathlib import Path
|
||||
from unittest import mock
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
from django.utils import timezone
|
||||
from rest_framework import serializers
|
||||
from rest_framework import status
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from documents.filters import ShareLinkBundleFilterSet
|
||||
from documents.models import ShareLink
|
||||
from documents.models import ShareLinkBundle
|
||||
from documents.serialisers import ShareLinkBundleSerializer
|
||||
from documents.tasks import build_share_link_bundle
|
||||
from documents.tasks import cleanup_expired_share_link_bundles
|
||||
from documents.tests.factories import DocumentFactory
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
|
||||
|
||||
class ShareLinkBundleAPITests(DirectoriesMixin, APITestCase):
|
||||
ENDPOINT = "/api/share_link_bundles/"
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.user = User.objects.create_superuser(username="bundle_admin")
|
||||
self.client.force_authenticate(self.user)
|
||||
self.document = DocumentFactory.create()
|
||||
|
||||
@mock.patch("documents.views.build_share_link_bundle.delay")
|
||||
def test_create_bundle_triggers_build_job(self, delay_mock):
|
||||
payload = {
|
||||
"document_ids": [self.document.pk],
|
||||
"file_version": ShareLink.FileVersion.ARCHIVE,
|
||||
"expiration_days": 7,
|
||||
}
|
||||
|
||||
response = self.client.post(self.ENDPOINT, payload, format="json")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
|
||||
bundle = ShareLinkBundle.objects.get(pk=response.data["id"])
|
||||
self.assertEqual(bundle.documents.count(), 1)
|
||||
self.assertEqual(bundle.status, ShareLinkBundle.Status.PENDING)
|
||||
delay_mock.assert_called_once_with(bundle.pk)
|
||||
|
||||
def test_create_bundle_rejects_missing_documents(self):
|
||||
payload = {
|
||||
"document_ids": [9999],
|
||||
"file_version": ShareLink.FileVersion.ARCHIVE,
|
||||
"expiration_days": 7,
|
||||
}
|
||||
|
||||
response = self.client.post(self.ENDPOINT, payload, format="json")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
self.assertIn("document_ids", response.data)
|
||||
|
||||
@mock.patch("documents.views.has_perms_owner_aware", return_value=False)
|
||||
def test_create_bundle_rejects_insufficient_permissions(self, perms_mock):
|
||||
payload = {
|
||||
"document_ids": [self.document.pk],
|
||||
"file_version": ShareLink.FileVersion.ARCHIVE,
|
||||
"expiration_days": 7,
|
||||
}
|
||||
|
||||
response = self.client.post(self.ENDPOINT, payload, format="json")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
self.assertIn("document_ids", response.data)
|
||||
perms_mock.assert_called()
|
||||
|
||||
@mock.patch("documents.views.build_share_link_bundle.delay")
|
||||
def test_rebuild_bundle_resets_state(self, delay_mock):
|
||||
bundle = ShareLinkBundle.objects.create(
|
||||
slug="rebuild-slug",
|
||||
file_version=ShareLink.FileVersion.ARCHIVE,
|
||||
status=ShareLinkBundle.Status.FAILED,
|
||||
)
|
||||
bundle.documents.set([self.document])
|
||||
bundle.last_error = {"message": "Something went wrong"}
|
||||
bundle.size_bytes = 100
|
||||
bundle.file_path = "path/to/file.zip"
|
||||
bundle.save()
|
||||
|
||||
response = self.client.post(f"{self.ENDPOINT}{bundle.pk}/rebuild/")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
bundle.refresh_from_db()
|
||||
self.assertEqual(bundle.status, ShareLinkBundle.Status.PENDING)
|
||||
self.assertIsNone(bundle.last_error)
|
||||
self.assertIsNone(bundle.size_bytes)
|
||||
self.assertEqual(bundle.file_path, "")
|
||||
delay_mock.assert_called_once_with(bundle.pk)
|
||||
|
||||
def test_rebuild_bundle_rejects_processing_status(self):
|
||||
bundle = ShareLinkBundle.objects.create(
|
||||
slug="processing-slug",
|
||||
file_version=ShareLink.FileVersion.ARCHIVE,
|
||||
status=ShareLinkBundle.Status.PROCESSING,
|
||||
)
|
||||
bundle.documents.set([self.document])
|
||||
|
||||
response = self.client.post(f"{self.ENDPOINT}{bundle.pk}/rebuild/")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
self.assertIn("detail", response.data)
|
||||
|
||||
def test_create_bundle_rejects_duplicate_documents(self):
|
||||
payload = {
|
||||
"document_ids": [self.document.pk, self.document.pk],
|
||||
"file_version": ShareLink.FileVersion.ARCHIVE,
|
||||
"expiration_days": 7,
|
||||
}
|
||||
|
||||
response = self.client.post(self.ENDPOINT, payload, format="json")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
self.assertIn("document_ids", response.data)
|
||||
|
||||
def test_download_ready_bundle_streams_file(self):
|
||||
bundle_file = Path(self.dirs.media_dir) / "bundles" / "ready.zip"
|
||||
bundle_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
bundle_file.write_bytes(b"binary-zip-content")
|
||||
|
||||
bundle = ShareLinkBundle.objects.create(
|
||||
slug="readyslug",
|
||||
file_version=ShareLink.FileVersion.ARCHIVE,
|
||||
status=ShareLinkBundle.Status.READY,
|
||||
file_path=str(bundle_file),
|
||||
)
|
||||
bundle.documents.set([self.document])
|
||||
|
||||
self.client.logout()
|
||||
response = self.client.get(f"/share/{bundle.slug}/")
|
||||
content = b"".join(response.streaming_content)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(response["Content-Type"], "application/zip")
|
||||
self.assertEqual(content, b"binary-zip-content")
|
||||
self.assertIn("attachment;", response["Content-Disposition"])
|
||||
|
||||
def test_download_pending_bundle_returns_202(self):
|
||||
bundle = ShareLinkBundle.objects.create(
|
||||
slug="pendingslug",
|
||||
file_version=ShareLink.FileVersion.ARCHIVE,
|
||||
status=ShareLinkBundle.Status.PENDING,
|
||||
)
|
||||
bundle.documents.set([self.document])
|
||||
|
||||
self.client.logout()
|
||||
response = self.client.get(f"/share/{bundle.slug}/")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
|
||||
|
||||
def test_download_failed_bundle_returns_503(self):
|
||||
bundle = ShareLinkBundle.objects.create(
|
||||
slug="failedslug",
|
||||
file_version=ShareLink.FileVersion.ARCHIVE,
|
||||
status=ShareLinkBundle.Status.FAILED,
|
||||
)
|
||||
bundle.documents.set([self.document])
|
||||
|
||||
self.client.logout()
|
||||
response = self.client.get(f"/share/{bundle.slug}/")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_503_SERVICE_UNAVAILABLE)
|
||||
|
||||
def test_expired_share_link_redirects(self):
|
||||
share_link = ShareLink.objects.create(
|
||||
slug="expiredlink",
|
||||
document=self.document,
|
||||
file_version=ShareLink.FileVersion.ORIGINAL,
|
||||
expiration=timezone.now() - timedelta(hours=1),
|
||||
)
|
||||
|
||||
self.client.logout()
|
||||
response = self.client.get(f"/share/{share_link.slug}/")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
|
||||
self.assertIn("sharelink_expired=1", response["Location"])
|
||||
|
||||
def test_unknown_share_link_redirects(self):
|
||||
self.client.logout()
|
||||
response = self.client.get("/share/unknownsharelink/")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
|
||||
self.assertIn("sharelink_notfound=1", response["Location"])
|
||||
|
||||
|
||||
class ShareLinkBundleTaskTests(DirectoriesMixin, APITestCase):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.document = DocumentFactory.create()
|
||||
|
||||
def test_cleanup_expired_share_link_bundles(self):
|
||||
expired_path = Path(self.dirs.media_dir) / "expired.zip"
|
||||
expired_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
expired_path.write_bytes(b"expired")
|
||||
|
||||
active_path = Path(self.dirs.media_dir) / "active.zip"
|
||||
active_path.write_bytes(b"active")
|
||||
|
||||
expired_bundle = ShareLinkBundle.objects.create(
|
||||
slug="expired-bundle",
|
||||
file_version=ShareLink.FileVersion.ARCHIVE,
|
||||
status=ShareLinkBundle.Status.READY,
|
||||
expiration=timezone.now() - timedelta(days=1),
|
||||
file_path=str(expired_path),
|
||||
)
|
||||
expired_bundle.documents.set([self.document])
|
||||
|
||||
active_bundle = ShareLinkBundle.objects.create(
|
||||
slug="active-bundle",
|
||||
file_version=ShareLink.FileVersion.ARCHIVE,
|
||||
status=ShareLinkBundle.Status.READY,
|
||||
expiration=timezone.now() + timedelta(days=1),
|
||||
file_path=str(active_path),
|
||||
)
|
||||
active_bundle.documents.set([self.document])
|
||||
|
||||
cleanup_expired_share_link_bundles()
|
||||
|
||||
self.assertFalse(ShareLinkBundle.objects.filter(pk=expired_bundle.pk).exists())
|
||||
self.assertTrue(ShareLinkBundle.objects.filter(pk=active_bundle.pk).exists())
|
||||
self.assertFalse(expired_path.exists())
|
||||
self.assertTrue(active_path.exists())
|
||||
|
||||
def test_cleanup_expired_share_link_bundles_logs_on_failure(self):
|
||||
expired_bundle = ShareLinkBundle.objects.create(
|
||||
slug="expired-bundle",
|
||||
file_version=ShareLink.FileVersion.ARCHIVE,
|
||||
status=ShareLinkBundle.Status.READY,
|
||||
expiration=timezone.now() - timedelta(days=1),
|
||||
)
|
||||
expired_bundle.documents.set([self.document])
|
||||
|
||||
with mock.patch.object(
|
||||
ShareLinkBundle,
|
||||
"delete",
|
||||
side_effect=RuntimeError("fail"),
|
||||
):
|
||||
with self.assertLogs("paperless.tasks", level="WARNING") as logs:
|
||||
cleanup_expired_share_link_bundles()
|
||||
|
||||
self.assertTrue(
|
||||
any(
|
||||
"Failed to delete expired share link bundle" in msg
|
||||
for msg in logs.output
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class ShareLinkBundleBuildTaskTests(DirectoriesMixin, APITestCase):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.document = DocumentFactory.create(
|
||||
mime_type="application/pdf",
|
||||
checksum="123",
|
||||
)
|
||||
self.document.archive_checksum = ""
|
||||
self.document.save()
|
||||
self.addCleanup(
|
||||
setattr,
|
||||
settings,
|
||||
"SHARE_LINK_BUNDLE_DIR",
|
||||
settings.SHARE_LINK_BUNDLE_DIR,
|
||||
)
|
||||
settings.SHARE_LINK_BUNDLE_DIR = (
|
||||
Path(settings.MEDIA_ROOT) / "documents" / "share_link_bundles"
|
||||
)
|
||||
|
||||
def _write_document_file(self, *, archive: bool, content: bytes) -> Path:
|
||||
if archive:
|
||||
self.document.archive_filename = f"{self.document.pk:07}.pdf"
|
||||
self.document.save()
|
||||
path = self.document.archive_path
|
||||
else:
|
||||
path = self.document.source_path
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
path.write_bytes(content)
|
||||
return path
|
||||
|
||||
def test_build_share_link_bundle_creates_zip_and_sets_metadata(self):
|
||||
self._write_document_file(archive=False, content=b"source")
|
||||
archive_path = self._write_document_file(archive=True, content=b"archive")
|
||||
bundle = ShareLinkBundle.objects.create(
|
||||
slug="build-archive",
|
||||
file_version=ShareLink.FileVersion.ARCHIVE,
|
||||
)
|
||||
bundle.documents.set([self.document])
|
||||
|
||||
build_share_link_bundle(bundle.pk)
|
||||
|
||||
bundle.refresh_from_db()
|
||||
self.assertEqual(bundle.status, ShareLinkBundle.Status.READY)
|
||||
self.assertIsNone(bundle.last_error)
|
||||
self.assertIsNotNone(bundle.built_at)
|
||||
self.assertGreater(bundle.size_bytes or 0, 0)
|
||||
final_path = bundle.absolute_file_path
|
||||
self.assertIsNotNone(final_path)
|
||||
self.assertTrue(final_path.exists())
|
||||
with zipfile.ZipFile(final_path) as zipf:
|
||||
names = zipf.namelist()
|
||||
self.assertEqual(len(names), 1)
|
||||
self.assertEqual(zipf.read(names[0]), archive_path.read_bytes())
|
||||
|
||||
def test_build_share_link_bundle_overwrites_existing_file(self):
|
||||
self._write_document_file(archive=False, content=b"source")
|
||||
bundle = ShareLinkBundle.objects.create(
|
||||
slug="overwrite",
|
||||
file_version=ShareLink.FileVersion.ORIGINAL,
|
||||
)
|
||||
bundle.documents.set([self.document])
|
||||
|
||||
existing = settings.SHARE_LINK_BUNDLE_DIR / "overwrite.zip"
|
||||
existing.parent.mkdir(parents=True, exist_ok=True)
|
||||
existing.write_bytes(b"old")
|
||||
|
||||
build_share_link_bundle(bundle.pk)
|
||||
|
||||
bundle.refresh_from_db()
|
||||
final_path = bundle.absolute_file_path
|
||||
self.assertIsNotNone(final_path)
|
||||
self.assertTrue(final_path.exists())
|
||||
self.assertNotEqual(final_path.read_bytes(), b"old")
|
||||
|
||||
def test_build_share_link_bundle_failure_marks_failed(self):
|
||||
self._write_document_file(archive=False, content=b"source")
|
||||
bundle = ShareLinkBundle.objects.create(
|
||||
slug="fail-bundle",
|
||||
file_version=ShareLink.FileVersion.ORIGINAL,
|
||||
)
|
||||
bundle.documents.set([self.document])
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"documents.tasks.OriginalsOnlyStrategy.add_document",
|
||||
side_effect=RuntimeError("zip failure"),
|
||||
),
|
||||
mock.patch("pathlib.Path.unlink") as unlink_mock,
|
||||
):
|
||||
unlink_mock.side_effect = [OSError("unlink"), OSError("unlink-finally")] + [
|
||||
None,
|
||||
] * 5
|
||||
with self.assertRaises(RuntimeError):
|
||||
build_share_link_bundle(bundle.pk)
|
||||
|
||||
bundle.refresh_from_db()
|
||||
self.assertEqual(bundle.status, ShareLinkBundle.Status.FAILED)
|
||||
self.assertIsInstance(bundle.last_error, dict)
|
||||
self.assertEqual(bundle.last_error.get("message"), "zip failure")
|
||||
self.assertEqual(bundle.last_error.get("exception_type"), "RuntimeError")
|
||||
scratch_zips = list(Path(settings.SCRATCH_DIR).glob("*.zip"))
|
||||
self.assertTrue(scratch_zips)
|
||||
for path in scratch_zips:
|
||||
path.unlink(missing_ok=True)
|
||||
|
||||
def test_build_share_link_bundle_missing_bundle_noop(self):
|
||||
# Should not raise when bundle does not exist
|
||||
build_share_link_bundle(99999)
|
||||
|
||||
|
||||
class ShareLinkBundleFilterSetTests(DirectoriesMixin, APITestCase):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.document = DocumentFactory.create()
|
||||
self.document.checksum = "doc1checksum"
|
||||
self.document.save()
|
||||
self.other_document = DocumentFactory.create()
|
||||
self.other_document.checksum = "doc2checksum"
|
||||
self.other_document.save()
|
||||
self.bundle_one = ShareLinkBundle.objects.create(
|
||||
slug="bundle-one",
|
||||
file_version=ShareLink.FileVersion.ORIGINAL,
|
||||
)
|
||||
self.bundle_one.documents.set([self.document])
|
||||
self.bundle_two = ShareLinkBundle.objects.create(
|
||||
slug="bundle-two",
|
||||
file_version=ShareLink.FileVersion.ORIGINAL,
|
||||
)
|
||||
self.bundle_two.documents.set([self.other_document])
|
||||
|
||||
def test_filter_documents_returns_all_for_empty_value(self):
|
||||
filterset = ShareLinkBundleFilterSet(
|
||||
data={"documents": ""},
|
||||
queryset=ShareLinkBundle.objects.all(),
|
||||
)
|
||||
|
||||
self.assertCountEqual(filterset.qs, [self.bundle_one, self.bundle_two])
|
||||
|
||||
def test_filter_documents_handles_invalid_input(self):
|
||||
filterset = ShareLinkBundleFilterSet(
|
||||
data={"documents": "invalid"},
|
||||
queryset=ShareLinkBundle.objects.all(),
|
||||
)
|
||||
|
||||
self.assertFalse(filterset.qs.exists())
|
||||
|
||||
def test_filter_documents_filters_by_multiple_ids(self):
|
||||
filterset = ShareLinkBundleFilterSet(
|
||||
data={"documents": f"{self.document.pk},{self.other_document.pk}"},
|
||||
queryset=ShareLinkBundle.objects.all(),
|
||||
)
|
||||
|
||||
self.assertCountEqual(filterset.qs, [self.bundle_one, self.bundle_two])
|
||||
|
||||
def test_filter_documents_returns_queryset_for_empty_ids(self):
|
||||
filterset = ShareLinkBundleFilterSet(
|
||||
data={"documents": ","},
|
||||
queryset=ShareLinkBundle.objects.all(),
|
||||
)
|
||||
|
||||
self.assertCountEqual(filterset.qs, [self.bundle_one, self.bundle_two])
|
||||
|
||||
|
||||
class ShareLinkBundleModelTests(DirectoriesMixin, APITestCase):
|
||||
def test_absolute_file_path_handles_relative_and_absolute(self):
|
||||
relative_path = Path("relative.zip")
|
||||
bundle = ShareLinkBundle.objects.create(
|
||||
slug="relative-bundle",
|
||||
file_version=ShareLink.FileVersion.ORIGINAL,
|
||||
file_path=str(relative_path),
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
bundle.absolute_file_path,
|
||||
(settings.SHARE_LINK_BUNDLE_DIR / relative_path).resolve(),
|
||||
)
|
||||
|
||||
absolute_path = Path(self.dirs.media_dir) / "absolute.zip"
|
||||
bundle.file_path = str(absolute_path)
|
||||
|
||||
self.assertEqual(bundle.absolute_file_path.resolve(), absolute_path.resolve())
|
||||
|
||||
def test_str_returns_translated_slug(self):
|
||||
bundle = ShareLinkBundle.objects.create(
|
||||
slug="string-slug",
|
||||
file_version=ShareLink.FileVersion.ORIGINAL,
|
||||
)
|
||||
|
||||
self.assertIn("string-slug", str(bundle))
|
||||
|
||||
def test_remove_file_deletes_existing_file(self):
|
||||
bundle_path = settings.SHARE_LINK_BUNDLE_DIR / "remove.zip"
|
||||
bundle_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
bundle_path.write_bytes(b"remove-me")
|
||||
bundle = ShareLinkBundle.objects.create(
|
||||
slug="remove-bundle",
|
||||
file_version=ShareLink.FileVersion.ORIGINAL,
|
||||
file_path=str(bundle_path.relative_to(settings.SHARE_LINK_BUNDLE_DIR)),
|
||||
)
|
||||
|
||||
bundle.remove_file()
|
||||
|
||||
self.assertFalse(bundle_path.exists())
|
||||
|
||||
def test_remove_file_handles_oserror(self):
|
||||
bundle_path = settings.SHARE_LINK_BUNDLE_DIR / "remove-error.zip"
|
||||
bundle_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
bundle_path.write_bytes(b"remove-me")
|
||||
bundle = ShareLinkBundle.objects.create(
|
||||
slug="remove-error",
|
||||
file_version=ShareLink.FileVersion.ORIGINAL,
|
||||
file_path=str(bundle_path.relative_to(settings.SHARE_LINK_BUNDLE_DIR)),
|
||||
)
|
||||
|
||||
with mock.patch("pathlib.Path.unlink", side_effect=OSError("fail")):
|
||||
bundle.remove_file()
|
||||
|
||||
self.assertTrue(bundle_path.exists())
|
||||
|
||||
def test_delete_calls_remove_file(self):
|
||||
bundle_path = settings.SHARE_LINK_BUNDLE_DIR / "delete.zip"
|
||||
bundle_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
bundle_path.write_bytes(b"remove-me")
|
||||
bundle = ShareLinkBundle.objects.create(
|
||||
slug="delete-bundle",
|
||||
file_version=ShareLink.FileVersion.ORIGINAL,
|
||||
file_path=str(bundle_path.relative_to(settings.SHARE_LINK_BUNDLE_DIR)),
|
||||
)
|
||||
|
||||
bundle.delete()
|
||||
self.assertFalse(bundle_path.exists())
|
||||
|
||||
|
||||
class ShareLinkBundleSerializerTests(DirectoriesMixin, APITestCase):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.document = DocumentFactory.create()
|
||||
|
||||
def test_validate_document_ids_rejects_duplicates(self):
|
||||
serializer = ShareLinkBundleSerializer(
|
||||
data={
|
||||
"document_ids": [self.document.pk, self.document.pk],
|
||||
"file_version": ShareLink.FileVersion.ORIGINAL,
|
||||
},
|
||||
)
|
||||
|
||||
self.assertFalse(serializer.is_valid())
|
||||
self.assertIn("document_ids", serializer.errors)
|
||||
|
||||
def test_create_assigns_documents_and_expiration(self):
|
||||
serializer = ShareLinkBundleSerializer(
|
||||
data={
|
||||
"document_ids": [self.document.pk],
|
||||
"file_version": ShareLink.FileVersion.ORIGINAL,
|
||||
"expiration_days": 3,
|
||||
},
|
||||
)
|
||||
|
||||
self.assertTrue(serializer.is_valid(), serializer.errors)
|
||||
bundle = serializer.save()
|
||||
|
||||
self.assertEqual(list(bundle.documents.all()), [self.document])
|
||||
expected_expiration = timezone.now() + timedelta(days=3)
|
||||
self.assertAlmostEqual(
|
||||
bundle.expiration,
|
||||
expected_expiration,
|
||||
delta=timedelta(seconds=10),
|
||||
)
|
||||
|
||||
def test_create_raises_when_missing_documents(self):
|
||||
serializer = ShareLinkBundleSerializer(
|
||||
data={
|
||||
"document_ids": [self.document.pk, 9999],
|
||||
"file_version": ShareLink.FileVersion.ORIGINAL,
|
||||
},
|
||||
)
|
||||
|
||||
self.assertTrue(serializer.is_valid(), serializer.errors)
|
||||
with self.assertRaises(serializers.ValidationError):
|
||||
serializer.save(documents=[self.document])
|
||||
@@ -35,7 +35,6 @@ from django.db.models import Model
|
||||
from django.db.models import Q
|
||||
from django.db.models import Sum
|
||||
from django.db.models import When
|
||||
from django.db.models.functions import Length
|
||||
from django.db.models.functions import Lower
|
||||
from django.db.models.manager import Manager
|
||||
from django.http import FileResponse
|
||||
@@ -51,6 +50,7 @@ from django.utils import timezone
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.utils.timezone import make_aware
|
||||
from django.utils.translation import get_language
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.views import View
|
||||
from django.views.decorators.cache import cache_control
|
||||
from django.views.decorators.csrf import ensure_csrf_cookie
|
||||
@@ -71,6 +71,7 @@ from packaging import version as packaging_version
|
||||
from redis import Redis
|
||||
from rest_framework import parsers
|
||||
from rest_framework import serializers
|
||||
from rest_framework import status
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import NotFound
|
||||
from rest_framework.exceptions import ValidationError
|
||||
@@ -121,6 +122,7 @@ from documents.filters import DocumentTypeFilterSet
|
||||
from documents.filters import ObjectOwnedOrGrantedPermissionsFilter
|
||||
from documents.filters import ObjectOwnedPermissionsFilter
|
||||
from documents.filters import PaperlessTaskFilterSet
|
||||
from documents.filters import ShareLinkBundleFilterSet
|
||||
from documents.filters import ShareLinkFilterSet
|
||||
from documents.filters import StoragePathFilterSet
|
||||
from documents.filters import TagFilterSet
|
||||
@@ -138,6 +140,7 @@ from documents.models import Note
|
||||
from documents.models import PaperlessTask
|
||||
from documents.models import SavedView
|
||||
from documents.models import ShareLink
|
||||
from documents.models import ShareLinkBundle
|
||||
from documents.models import StoragePath
|
||||
from documents.models import Tag
|
||||
from documents.models import UiSettings
|
||||
@@ -145,7 +148,6 @@ from documents.models import Workflow
|
||||
from documents.models import WorkflowAction
|
||||
from documents.models import WorkflowTrigger
|
||||
from documents.parsers import get_parser_class_for_mime_type
|
||||
from documents.parsers import parse_date_generator
|
||||
from documents.permissions import AcknowledgeTasksPermissions
|
||||
from documents.permissions import PaperlessAdminPermissions
|
||||
from documents.permissions import PaperlessNotePermissions
|
||||
@@ -155,6 +157,7 @@ from documents.permissions import get_document_count_filter_for_user
|
||||
from documents.permissions import get_objects_for_user_owner_aware
|
||||
from documents.permissions import has_perms_owner_aware
|
||||
from documents.permissions import set_permissions_for_object
|
||||
from documents.plugins.date_parsing import get_date_parser
|
||||
from documents.schema import generate_object_with_permissions_schema
|
||||
from documents.serialisers import AcknowledgeTasksViewSerializer
|
||||
from documents.serialisers import BulkDownloadSerializer
|
||||
@@ -171,6 +174,7 @@ from documents.serialisers import PostDocumentSerializer
|
||||
from documents.serialisers import RunTaskViewSerializer
|
||||
from documents.serialisers import SavedViewSerializer
|
||||
from documents.serialisers import SearchResultSerializer
|
||||
from documents.serialisers import ShareLinkBundleSerializer
|
||||
from documents.serialisers import ShareLinkSerializer
|
||||
from documents.serialisers import StoragePathSerializer
|
||||
from documents.serialisers import StoragePathTestSerializer
|
||||
@@ -183,6 +187,7 @@ from documents.serialisers import WorkflowActionSerializer
|
||||
from documents.serialisers import WorkflowSerializer
|
||||
from documents.serialisers import WorkflowTriggerSerializer
|
||||
from documents.signals import document_updated
|
||||
from documents.tasks import build_share_link_bundle
|
||||
from documents.tasks import consume_file
|
||||
from documents.tasks import empty_trash
|
||||
from documents.tasks import index_optimize
|
||||
@@ -1018,16 +1023,17 @@ class DocumentViewSet(
|
||||
|
||||
dates = []
|
||||
if settings.NUMBER_OF_SUGGESTED_DATES > 0:
|
||||
gen = parse_date_generator(doc.filename, doc.content)
|
||||
dates = sorted(
|
||||
{
|
||||
i
|
||||
for i in itertools.islice(
|
||||
gen,
|
||||
settings.NUMBER_OF_SUGGESTED_DATES,
|
||||
)
|
||||
},
|
||||
)
|
||||
with get_date_parser() as date_parser:
|
||||
gen = date_parser.parse(doc.filename, doc.content)
|
||||
dates = sorted(
|
||||
{
|
||||
i
|
||||
for i in itertools.islice(
|
||||
gen,
|
||||
settings.NUMBER_OF_SUGGESTED_DATES,
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
resp_data = {
|
||||
"correspondents": [
|
||||
@@ -2326,23 +2332,19 @@ class StatisticsView(GenericAPIView):
|
||||
user = request.user if request.user is not None else None
|
||||
|
||||
documents = (
|
||||
(
|
||||
Document.objects.all()
|
||||
if user is None
|
||||
else get_objects_for_user_owner_aware(
|
||||
user,
|
||||
"documents.view_document",
|
||||
Document,
|
||||
)
|
||||
Document.objects.all()
|
||||
if user is None
|
||||
else get_objects_for_user_owner_aware(
|
||||
user,
|
||||
"documents.view_document",
|
||||
Document,
|
||||
)
|
||||
.only("mime_type", "content")
|
||||
.prefetch_related("tags")
|
||||
)
|
||||
tags = (
|
||||
Tag.objects.all()
|
||||
if user is None
|
||||
else get_objects_for_user_owner_aware(user, "documents.view_tag", Tag)
|
||||
)
|
||||
).only("id", "is_inbox_tag")
|
||||
correspondent_count = (
|
||||
Correspondent.objects.count()
|
||||
if user is None
|
||||
@@ -2371,31 +2373,33 @@ class StatisticsView(GenericAPIView):
|
||||
).count()
|
||||
)
|
||||
|
||||
documents_total = documents.count()
|
||||
|
||||
inbox_tags = tags.filter(is_inbox_tag=True)
|
||||
inbox_tag_pks = list(
|
||||
tags.filter(is_inbox_tag=True).values_list("pk", flat=True),
|
||||
)
|
||||
|
||||
documents_inbox = (
|
||||
documents.filter(tags__id__in=inbox_tags).distinct().count()
|
||||
if inbox_tags.exists()
|
||||
documents.filter(tags__id__in=inbox_tag_pks).values("id").distinct().count()
|
||||
if inbox_tag_pks
|
||||
else None
|
||||
)
|
||||
|
||||
document_file_type_counts = (
|
||||
# Single SQL request for document stats and mime type counts
|
||||
mime_type_stats = list(
|
||||
documents.values("mime_type")
|
||||
.annotate(mime_type_count=Count("mime_type"))
|
||||
.order_by("-mime_type_count")
|
||||
if documents_total > 0
|
||||
else []
|
||||
.annotate(
|
||||
mime_type_count=Count("id"),
|
||||
mime_type_chars=Sum("content_length"),
|
||||
)
|
||||
.order_by("-mime_type_count"),
|
||||
)
|
||||
|
||||
character_count = (
|
||||
documents.annotate(
|
||||
characters=Length("content"),
|
||||
)
|
||||
.aggregate(Sum("characters"))
|
||||
.get("characters__sum")
|
||||
)
|
||||
# Calculate totals from grouped results
|
||||
documents_total = sum(row["mime_type_count"] for row in mime_type_stats)
|
||||
character_count = sum(row["mime_type_chars"] or 0 for row in mime_type_stats)
|
||||
document_file_type_counts = [
|
||||
{"mime_type": row["mime_type"], "mime_type_count": row["mime_type_count"]}
|
||||
for row in mime_type_stats
|
||||
]
|
||||
|
||||
current_asn = Document.objects.aggregate(
|
||||
Max("archive_serial_number", default=0),
|
||||
@@ -2408,11 +2412,9 @@ class StatisticsView(GenericAPIView):
|
||||
"documents_total": documents_total,
|
||||
"documents_inbox": documents_inbox,
|
||||
"inbox_tag": (
|
||||
inbox_tags.first().pk if inbox_tags.exists() else None
|
||||
inbox_tag_pks[0] if inbox_tag_pks else None
|
||||
), # backwards compatibility
|
||||
"inbox_tags": (
|
||||
[tag.pk for tag in inbox_tags] if inbox_tags.exists() else None
|
||||
),
|
||||
"inbox_tags": (inbox_tag_pks if inbox_tag_pks else None),
|
||||
"document_file_type_counts": document_file_type_counts,
|
||||
"character_count": character_count,
|
||||
"tag_count": len(tags),
|
||||
@@ -2440,7 +2442,7 @@ class BulkDownloadView(GenericAPIView):
|
||||
follow_filename_format = serializer.validated_data.get("follow_formatting")
|
||||
|
||||
for document in documents:
|
||||
if not has_perms_owner_aware(request.user, "view_document", document):
|
||||
if not has_perms_owner_aware(request.user, "change_document", document):
|
||||
return HttpResponseForbidden("Insufficient permissions")
|
||||
|
||||
settings.SCRATCH_DIR.mkdir(parents=True, exist_ok=True)
|
||||
@@ -2795,21 +2797,187 @@ class ShareLinkViewSet(ModelViewSet, PassUserMixin):
|
||||
ordering_fields = ("created", "expiration", "document")
|
||||
|
||||
|
||||
class ShareLinkBundleViewSet(ModelViewSet, PassUserMixin):
|
||||
model = ShareLinkBundle
|
||||
|
||||
queryset = ShareLinkBundle.objects.all()
|
||||
|
||||
serializer_class = ShareLinkBundleSerializer
|
||||
pagination_class = StandardPagination
|
||||
permission_classes = (IsAuthenticated, PaperlessObjectPermissions)
|
||||
filter_backends = (
|
||||
DjangoFilterBackend,
|
||||
OrderingFilter,
|
||||
ObjectOwnedOrGrantedPermissionsFilter,
|
||||
)
|
||||
filterset_class = ShareLinkBundleFilterSet
|
||||
ordering_fields = ("created", "expiration", "status")
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
super()
|
||||
.get_queryset()
|
||||
.prefetch_related("documents")
|
||||
.annotate(document_total=Count("documents", distinct=True))
|
||||
)
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
document_ids = serializer.validated_data["document_ids"]
|
||||
documents_qs = Document.objects.filter(pk__in=document_ids).select_related(
|
||||
"owner",
|
||||
)
|
||||
found_ids = set(documents_qs.values_list("pk", flat=True))
|
||||
missing = sorted(set(document_ids) - found_ids)
|
||||
if missing:
|
||||
raise ValidationError(
|
||||
{
|
||||
"document_ids": _(
|
||||
"Documents not found: %(ids)s",
|
||||
)
|
||||
% {"ids": ", ".join(str(item) for item in missing)},
|
||||
},
|
||||
)
|
||||
|
||||
documents = list(documents_qs)
|
||||
for document in documents:
|
||||
if not has_perms_owner_aware(request.user, "view_document", document):
|
||||
raise ValidationError(
|
||||
{
|
||||
"document_ids": _(
|
||||
"Insufficient permissions to share document %(id)s.",
|
||||
)
|
||||
% {"id": document.pk},
|
||||
},
|
||||
)
|
||||
|
||||
document_map = {document.pk: document for document in documents}
|
||||
ordered_documents = [document_map[doc_id] for doc_id in document_ids]
|
||||
|
||||
bundle = serializer.save(
|
||||
owner=request.user,
|
||||
documents=ordered_documents,
|
||||
)
|
||||
bundle.remove_file()
|
||||
bundle.status = ShareLinkBundle.Status.PENDING
|
||||
bundle.last_error = None
|
||||
bundle.size_bytes = None
|
||||
bundle.built_at = None
|
||||
bundle.file_path = ""
|
||||
bundle.save(
|
||||
update_fields=[
|
||||
"status",
|
||||
"last_error",
|
||||
"size_bytes",
|
||||
"built_at",
|
||||
"file_path",
|
||||
],
|
||||
)
|
||||
build_share_link_bundle.delay(bundle.pk)
|
||||
bundle.document_total = len(ordered_documents)
|
||||
response_serializer = self.get_serializer(bundle)
|
||||
headers = self.get_success_headers(response_serializer.data)
|
||||
return Response(
|
||||
response_serializer.data,
|
||||
status=status.HTTP_201_CREATED,
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
@action(detail=True, methods=["post"])
|
||||
def rebuild(self, request, pk=None):
|
||||
bundle = self.get_object()
|
||||
if bundle.status == ShareLinkBundle.Status.PROCESSING:
|
||||
return Response(
|
||||
{"detail": _("Bundle is already being processed.")},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
bundle.remove_file()
|
||||
bundle.status = ShareLinkBundle.Status.PENDING
|
||||
bundle.last_error = None
|
||||
bundle.size_bytes = None
|
||||
bundle.built_at = None
|
||||
bundle.file_path = ""
|
||||
bundle.save(
|
||||
update_fields=[
|
||||
"status",
|
||||
"last_error",
|
||||
"size_bytes",
|
||||
"built_at",
|
||||
"file_path",
|
||||
],
|
||||
)
|
||||
build_share_link_bundle.delay(bundle.pk)
|
||||
bundle.document_total = (
|
||||
getattr(bundle, "document_total", None) or bundle.documents.count()
|
||||
)
|
||||
serializer = self.get_serializer(bundle)
|
||||
return Response(serializer.data)
|
||||
|
||||
|
||||
class SharedLinkView(View):
|
||||
authentication_classes = []
|
||||
permission_classes = []
|
||||
|
||||
def get(self, request, slug):
|
||||
share_link = ShareLink.objects.filter(slug=slug).first()
|
||||
if share_link is None:
|
||||
if share_link is not None:
|
||||
if (
|
||||
share_link.expiration is not None
|
||||
and share_link.expiration < timezone.now()
|
||||
):
|
||||
return HttpResponseRedirect("/accounts/login/?sharelink_expired=1")
|
||||
return serve_file(
|
||||
doc=share_link.document,
|
||||
use_archive=share_link.file_version == "archive",
|
||||
disposition="inline",
|
||||
)
|
||||
|
||||
bundle = ShareLinkBundle.objects.filter(slug=slug).first()
|
||||
if bundle is None:
|
||||
return HttpResponseRedirect("/accounts/login/?sharelink_notfound=1")
|
||||
if share_link.expiration is not None and share_link.expiration < timezone.now():
|
||||
|
||||
if bundle.expiration is not None and bundle.expiration < timezone.now():
|
||||
return HttpResponseRedirect("/accounts/login/?sharelink_expired=1")
|
||||
return serve_file(
|
||||
doc=share_link.document,
|
||||
use_archive=share_link.file_version == "archive",
|
||||
disposition="inline",
|
||||
|
||||
if bundle.status in {
|
||||
ShareLinkBundle.Status.PENDING,
|
||||
ShareLinkBundle.Status.PROCESSING,
|
||||
}:
|
||||
return HttpResponse(
|
||||
_(
|
||||
"The share link bundle is still being prepared. Please try again later.",
|
||||
),
|
||||
status=status.HTTP_202_ACCEPTED,
|
||||
)
|
||||
|
||||
file_path = bundle.absolute_file_path
|
||||
|
||||
if bundle.status == ShareLinkBundle.Status.FAILED or file_path is None:
|
||||
return HttpResponse(
|
||||
_(
|
||||
"The share link bundle is unavailable.",
|
||||
),
|
||||
status=status.HTTP_503_SERVICE_UNAVAILABLE,
|
||||
)
|
||||
|
||||
response = FileResponse(file_path.open("rb"), content_type="application/zip")
|
||||
short_slug = bundle.slug[:12]
|
||||
download_name = f"paperless-share-{short_slug}.zip"
|
||||
filename_normalized = (
|
||||
normalize("NFKD", download_name)
|
||||
.encode(
|
||||
"ascii",
|
||||
"ignore",
|
||||
)
|
||||
.decode("ascii")
|
||||
)
|
||||
filename_encoded = quote(download_name)
|
||||
response["Content-Disposition"] = (
|
||||
f"attachment; filename='{filename_normalized}'; "
|
||||
f"filename*=utf-8''{filename_encoded}"
|
||||
)
|
||||
return response
|
||||
|
||||
|
||||
def serve_file(*, doc: Document, use_archive: bool, disposition: str):
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -3,7 +3,7 @@ from urllib.parse import quote
|
||||
|
||||
from allauth.account.adapter import DefaultAccountAdapter
|
||||
from allauth.core import context
|
||||
from allauth.headless.tokens.sessions import SessionTokenStrategy
|
||||
from allauth.headless.tokens.strategies.sessions import SessionTokenStrategy
|
||||
from allauth.socialaccount.adapter import DefaultSocialAccountAdapter
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import Group
|
||||
|
||||
@@ -241,6 +241,17 @@ def _parse_beat_schedule() -> dict:
|
||||
"expires": 23.0 * 60.0 * 60.0,
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "Cleanup expired share link bundles",
|
||||
"env_key": "PAPERLESS_SHARE_LINK_BUNDLE_CLEANUP_CRON",
|
||||
# Default daily at 02:00
|
||||
"env_default": "0 2 * * *",
|
||||
"task": "documents.tasks.cleanup_expired_share_link_bundles",
|
||||
"options": {
|
||||
# 1 hour before default schedule sends again
|
||||
"expires": 23.0 * 60.0 * 60.0,
|
||||
},
|
||||
},
|
||||
]
|
||||
for task in tasks:
|
||||
# Either get the environment setting or use the default
|
||||
@@ -279,6 +290,7 @@ MEDIA_ROOT = __get_path("PAPERLESS_MEDIA_ROOT", BASE_DIR.parent / "media")
|
||||
ORIGINALS_DIR = MEDIA_ROOT / "documents" / "originals"
|
||||
ARCHIVE_DIR = MEDIA_ROOT / "documents" / "archive"
|
||||
THUMBNAIL_DIR = MEDIA_ROOT / "documents" / "thumbnails"
|
||||
SHARE_LINK_BUNDLE_DIR = MEDIA_ROOT / "documents" / "share_link_bundles"
|
||||
|
||||
DATA_DIR = __get_path("PAPERLESS_DATA_DIR", BASE_DIR.parent / "data")
|
||||
|
||||
|
||||
@@ -161,6 +161,7 @@ class TestCeleryScheduleParsing(TestCase):
|
||||
EMPTY_TRASH_EXPIRE_TIME = 23.0 * 60.0 * 60.0
|
||||
RUN_SCHEDULED_WORKFLOWS_EXPIRE_TIME = 59.0 * 60.0
|
||||
LLM_INDEX_EXPIRE_TIME = 23.0 * 60.0 * 60.0
|
||||
CLEANUP_EXPIRED_SHARE_BUNDLES_EXPIRE_TIME = 23.0 * 60.0 * 60.0
|
||||
|
||||
def test_schedule_configuration_default(self):
|
||||
"""
|
||||
@@ -212,6 +213,13 @@ class TestCeleryScheduleParsing(TestCase):
|
||||
"expires": self.LLM_INDEX_EXPIRE_TIME,
|
||||
},
|
||||
},
|
||||
"Cleanup expired share link bundles": {
|
||||
"task": "documents.tasks.cleanup_expired_share_link_bundles",
|
||||
"schedule": crontab(minute=0, hour=2),
|
||||
"options": {
|
||||
"expires": self.CLEANUP_EXPIRED_SHARE_BUNDLES_EXPIRE_TIME,
|
||||
},
|
||||
},
|
||||
},
|
||||
schedule,
|
||||
)
|
||||
@@ -271,6 +279,13 @@ class TestCeleryScheduleParsing(TestCase):
|
||||
"expires": self.LLM_INDEX_EXPIRE_TIME,
|
||||
},
|
||||
},
|
||||
"Cleanup expired share link bundles": {
|
||||
"task": "documents.tasks.cleanup_expired_share_link_bundles",
|
||||
"schedule": crontab(minute=0, hour=2),
|
||||
"options": {
|
||||
"expires": self.CLEANUP_EXPIRED_SHARE_BUNDLES_EXPIRE_TIME,
|
||||
},
|
||||
},
|
||||
},
|
||||
schedule,
|
||||
)
|
||||
@@ -322,6 +337,13 @@ class TestCeleryScheduleParsing(TestCase):
|
||||
"expires": self.LLM_INDEX_EXPIRE_TIME,
|
||||
},
|
||||
},
|
||||
"Cleanup expired share link bundles": {
|
||||
"task": "documents.tasks.cleanup_expired_share_link_bundles",
|
||||
"schedule": crontab(minute=0, hour=2),
|
||||
"options": {
|
||||
"expires": self.CLEANUP_EXPIRED_SHARE_BUNDLES_EXPIRE_TIME,
|
||||
},
|
||||
},
|
||||
},
|
||||
schedule,
|
||||
)
|
||||
@@ -345,6 +367,7 @@ class TestCeleryScheduleParsing(TestCase):
|
||||
"PAPERLESS_EMPTY_TRASH_TASK_CRON": "disable",
|
||||
"PAPERLESS_WORKFLOW_SCHEDULED_TASK_CRON": "disable",
|
||||
"PAPERLESS_LLM_INDEX_TASK_CRON": "disable",
|
||||
"PAPERLESS_SHARE_LINK_BUNDLE_CLEANUP_CRON": "disable",
|
||||
},
|
||||
):
|
||||
schedule = _parse_beat_schedule()
|
||||
|
||||
@@ -31,6 +31,7 @@ from documents.views import SavedViewViewSet
|
||||
from documents.views import SearchAutoCompleteView
|
||||
from documents.views import SelectionDataView
|
||||
from documents.views import SharedLinkView
|
||||
from documents.views import ShareLinkBundleViewSet
|
||||
from documents.views import ShareLinkViewSet
|
||||
from documents.views import StatisticsView
|
||||
from documents.views import StoragePathViewSet
|
||||
@@ -73,6 +74,7 @@ api_router.register(r"users", UserViewSet, basename="users")
|
||||
api_router.register(r"groups", GroupViewSet, basename="groups")
|
||||
api_router.register(r"mail_accounts", MailAccountViewSet)
|
||||
api_router.register(r"mail_rules", MailRuleViewSet)
|
||||
api_router.register(r"share_link_bundles", ShareLinkBundleViewSet)
|
||||
api_router.register(r"share_links", ShareLinkViewSet)
|
||||
api_router.register(r"workflow_triggers", WorkflowTriggerViewSet)
|
||||
api_router.register(r"workflow_actions", WorkflowActionViewSet)
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||
import logging
|
||||
import shutil
|
||||
from datetime import timedelta
|
||||
from pathlib import Path
|
||||
|
||||
import faiss
|
||||
import llama_index.core.settings as llama_settings
|
||||
import tqdm
|
||||
from celery import states
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
from llama_index.core import Document as LlamaDocument
|
||||
from llama_index.core import StorageContext
|
||||
from llama_index.core import VectorStoreIndex
|
||||
@@ -21,6 +24,7 @@ from llama_index.core.text_splitter import TokenTextSplitter
|
||||
from llama_index.vector_stores.faiss import FaissVectorStore
|
||||
|
||||
from documents.models import Document
|
||||
from documents.models import PaperlessTask
|
||||
from paperless_ai.embedding import build_llm_index_text
|
||||
from paperless_ai.embedding import get_embedding_dim
|
||||
from paperless_ai.embedding import get_embedding_model
|
||||
@@ -28,6 +32,29 @@ from paperless_ai.embedding import get_embedding_model
|
||||
logger = logging.getLogger("paperless_ai.indexing")
|
||||
|
||||
|
||||
def queue_llm_index_update_if_needed(*, rebuild: bool, reason: str) -> bool:
|
||||
from documents.tasks import llmindex_index
|
||||
|
||||
has_running = PaperlessTask.objects.filter(
|
||||
task_name=PaperlessTask.TaskName.LLMINDEX_UPDATE,
|
||||
status__in=[states.PENDING, states.STARTED],
|
||||
).exists()
|
||||
has_recent = PaperlessTask.objects.filter(
|
||||
task_name=PaperlessTask.TaskName.LLMINDEX_UPDATE,
|
||||
date_created__gte=(timezone.now() - timedelta(minutes=5)),
|
||||
).exists()
|
||||
if has_running or has_recent:
|
||||
return False
|
||||
|
||||
llmindex_index.delay(rebuild=rebuild, scheduled=False, auto=True)
|
||||
logger.warning(
|
||||
"Queued LLM index update%s: %s",
|
||||
" (rebuild)" if rebuild else "",
|
||||
reason,
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
def get_or_create_storage_context(*, rebuild=False):
|
||||
"""
|
||||
Loads or creates the StorageContext (vector store, docstore, index store).
|
||||
@@ -93,6 +120,10 @@ def load_or_build_index(nodes=None):
|
||||
except ValueError as e:
|
||||
logger.warning("Failed to load index from storage: %s", e)
|
||||
if not nodes:
|
||||
queue_llm_index_update_if_needed(
|
||||
rebuild=vector_store_file_exists(),
|
||||
reason="LLM index missing or invalid while loading.",
|
||||
)
|
||||
logger.info("No nodes provided for index creation.")
|
||||
raise
|
||||
return VectorStoreIndex(
|
||||
@@ -250,6 +281,13 @@ def query_similar_documents(
|
||||
"""
|
||||
Runs a similarity query and returns top-k similar Document objects.
|
||||
"""
|
||||
if not vector_store_file_exists():
|
||||
queue_llm_index_update_if_needed(
|
||||
rebuild=False,
|
||||
reason="LLM index not found for similarity query.",
|
||||
)
|
||||
return []
|
||||
|
||||
index = load_or_build_index()
|
||||
|
||||
# constrain only the node(s) that match the document IDs, if given
|
||||
|
||||
@@ -3,11 +3,13 @@ from unittest.mock import MagicMock
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from celery import states
|
||||
from django.test import override_settings
|
||||
from django.utils import timezone
|
||||
from llama_index.core.base.embeddings.base import BaseEmbedding
|
||||
|
||||
from documents.models import Document
|
||||
from documents.models import PaperlessTask
|
||||
from paperless_ai import indexing
|
||||
|
||||
|
||||
@@ -288,6 +290,36 @@ def test_update_llm_index_no_documents(
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_queue_llm_index_update_if_needed_enqueues_when_idle_or_skips_recent():
|
||||
# No existing tasks
|
||||
with patch("documents.tasks.llmindex_index") as mock_task:
|
||||
result = indexing.queue_llm_index_update_if_needed(
|
||||
rebuild=True,
|
||||
reason="test enqueue",
|
||||
)
|
||||
|
||||
assert result is True
|
||||
mock_task.delay.assert_called_once_with(rebuild=True, scheduled=False, auto=True)
|
||||
|
||||
PaperlessTask.objects.create(
|
||||
task_id="task-1",
|
||||
task_name=PaperlessTask.TaskName.LLMINDEX_UPDATE,
|
||||
status=states.STARTED,
|
||||
date_created=timezone.now(),
|
||||
)
|
||||
|
||||
# Existing running task
|
||||
with patch("documents.tasks.llmindex_index") as mock_task:
|
||||
result = indexing.queue_llm_index_update_if_needed(
|
||||
rebuild=False,
|
||||
reason="should skip",
|
||||
)
|
||||
|
||||
assert result is False
|
||||
mock_task.delay.assert_not_called()
|
||||
|
||||
|
||||
@override_settings(
|
||||
LLM_EMBEDDING_BACKEND="huggingface",
|
||||
LLM_BACKEND="ollama",
|
||||
@@ -299,11 +331,15 @@ def test_query_similar_documents(
|
||||
with (
|
||||
patch("paperless_ai.indexing.get_or_create_storage_context") as mock_storage,
|
||||
patch("paperless_ai.indexing.load_or_build_index") as mock_load_or_build_index,
|
||||
patch(
|
||||
"paperless_ai.indexing.vector_store_file_exists",
|
||||
) as mock_vector_store_exists,
|
||||
patch("paperless_ai.indexing.VectorIndexRetriever") as mock_retriever_cls,
|
||||
patch("paperless_ai.indexing.Document.objects.filter") as mock_filter,
|
||||
):
|
||||
mock_storage.return_value = MagicMock()
|
||||
mock_storage.return_value.persist_dir = temp_llm_index_dir
|
||||
mock_vector_store_exists.return_value = True
|
||||
|
||||
mock_index = MagicMock()
|
||||
mock_load_or_build_index.return_value = mock_index
|
||||
@@ -332,3 +368,31 @@ def test_query_similar_documents(
|
||||
mock_filter.assert_called_once_with(pk__in=[1, 2])
|
||||
|
||||
assert result == mock_filtered_docs
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_query_similar_documents_triggers_update_when_index_missing(
|
||||
temp_llm_index_dir,
|
||||
real_document,
|
||||
):
|
||||
with (
|
||||
patch(
|
||||
"paperless_ai.indexing.vector_store_file_exists",
|
||||
return_value=False,
|
||||
),
|
||||
patch(
|
||||
"paperless_ai.indexing.queue_llm_index_update_if_needed",
|
||||
) as mock_queue,
|
||||
patch("paperless_ai.indexing.load_or_build_index") as mock_load,
|
||||
):
|
||||
result = indexing.query_similar_documents(
|
||||
real_document,
|
||||
top_k=2,
|
||||
)
|
||||
|
||||
mock_queue.assert_called_once_with(
|
||||
rebuild=False,
|
||||
reason="LLM index not found for similarity query.",
|
||||
)
|
||||
mock_load.assert_not_called()
|
||||
assert result == []
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import os
|
||||
from collections.abc import Generator
|
||||
from pathlib import Path
|
||||
|
||||
@@ -70,20 +69,31 @@ def mail_parser() -> MailDocumentParser:
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def live_mail_account() -> Generator[MailAccount, None, None]:
|
||||
try:
|
||||
account = MailAccount.objects.create(
|
||||
name="test",
|
||||
imap_server=os.environ["PAPERLESS_MAIL_TEST_HOST"],
|
||||
username=os.environ["PAPERLESS_MAIL_TEST_USER"],
|
||||
password=os.environ["PAPERLESS_MAIL_TEST_PASSWD"],
|
||||
imap_port=993,
|
||||
)
|
||||
yield account
|
||||
finally:
|
||||
account.delete()
|
||||
def greenmail_mail_account(db: None) -> Generator[MailAccount, None, None]:
|
||||
"""
|
||||
Create a mail account configured for local Greenmail server.
|
||||
"""
|
||||
account = MailAccount.objects.create(
|
||||
name="Greenmail Test",
|
||||
imap_server="localhost",
|
||||
imap_port=3143,
|
||||
imap_security=MailAccount.ImapSecurity.NONE,
|
||||
username="test@localhost",
|
||||
password="test",
|
||||
character_set="UTF-8",
|
||||
)
|
||||
yield account
|
||||
account.delete()
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def mail_account_handler() -> MailAccountHandler:
|
||||
return MailAccountHandler()
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def nginx_base_url() -> Generator[str, None, None]:
|
||||
"""
|
||||
The base URL for the nginx HTTP server we expect to be alive
|
||||
"""
|
||||
yield "http://localhost:8080"
|
||||
|
||||
@@ -55,7 +55,7 @@ Content-Transfer-Encoding: 7bit
|
||||
<p>Some Text</p>
|
||||
<p>
|
||||
<img src="cid:part1.pNdUSz0s.D3NqVtPg@example.de" alt="Has to be rewritten to work..">
|
||||
<img src="https://docs.paperless-ngx.com/assets/logo_full_white.svg" alt="This image should not be shown.">
|
||||
<img src="http://localhost:8080/assets/logo_full_white.svg" alt="This image should not be shown.">
|
||||
</p>
|
||||
|
||||
<p>and an embedded image.<br>
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
<p>Some Text</p>
|
||||
<p>
|
||||
<img src="cid:part1.pNdUSz0s.D3NqVtPg@example.de" alt="Has to be rewritten to work..">
|
||||
<img src="https://docs.paperless-ngx.com/assets/logo_full_white.svg" alt="This image should not be shown.">
|
||||
<img src="http://localhost:8080/assets/logo_full_white.svg" alt="This image should not be shown.">
|
||||
</p>
|
||||
|
||||
<p>and an embedded image.<br>
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
import os
|
||||
import warnings
|
||||
|
||||
import pytest
|
||||
|
||||
from paperless_mail.mail import MailAccountHandler
|
||||
@@ -9,53 +6,53 @@ from paperless_mail.models import MailAccount
|
||||
from paperless_mail.models import MailRule
|
||||
|
||||
|
||||
# Only run if the environment is setup
|
||||
# And the environment is not empty (forks, I think)
|
||||
@pytest.mark.skipif(
|
||||
"PAPERLESS_MAIL_TEST_HOST" not in os.environ
|
||||
or not len(os.environ["PAPERLESS_MAIL_TEST_HOST"]),
|
||||
reason="Live server testing not enabled",
|
||||
)
|
||||
@pytest.mark.django_db()
|
||||
class TestMailLiveServer:
|
||||
def test_process_non_gmail_server_flag(
|
||||
@pytest.mark.live
|
||||
@pytest.mark.greenmail
|
||||
@pytest.mark.django_db
|
||||
class TestMailGreenmail:
|
||||
"""
|
||||
Mail tests using local Greenmail server
|
||||
"""
|
||||
|
||||
def test_process_flag(
|
||||
self,
|
||||
mail_account_handler: MailAccountHandler,
|
||||
live_mail_account: MailAccount,
|
||||
):
|
||||
greenmail_mail_account: MailAccount,
|
||||
) -> None:
|
||||
"""
|
||||
Test processing mail with FLAG action.
|
||||
"""
|
||||
rule = MailRule.objects.create(
|
||||
name="testrule",
|
||||
account=greenmail_mail_account,
|
||||
action=MailRule.MailAction.FLAG,
|
||||
)
|
||||
|
||||
try:
|
||||
rule1 = MailRule.objects.create(
|
||||
name="testrule",
|
||||
account=live_mail_account,
|
||||
action=MailRule.MailAction.FLAG,
|
||||
)
|
||||
|
||||
mail_account_handler.handle_mail_account(live_mail_account)
|
||||
|
||||
rule1.delete()
|
||||
|
||||
mail_account_handler.handle_mail_account(greenmail_mail_account)
|
||||
except MailError as e:
|
||||
pytest.fail(f"Failure: {e}")
|
||||
except Exception as e:
|
||||
warnings.warn(f"Unhandled exception: {e}")
|
||||
finally:
|
||||
rule.delete()
|
||||
|
||||
def test_process_non_gmail_server_tag(
|
||||
def test_process_tag(
|
||||
self,
|
||||
mail_account_handler: MailAccountHandler,
|
||||
live_mail_account: MailAccount,
|
||||
):
|
||||
greenmail_mail_account: MailAccount,
|
||||
) -> None:
|
||||
"""
|
||||
Test processing mail with TAG action.
|
||||
"""
|
||||
rule = MailRule.objects.create(
|
||||
name="testrule",
|
||||
account=greenmail_mail_account,
|
||||
action=MailRule.MailAction.TAG,
|
||||
action_parameter="TestTag",
|
||||
)
|
||||
|
||||
try:
|
||||
rule2 = MailRule.objects.create(
|
||||
name="testrule",
|
||||
account=live_mail_account,
|
||||
action=MailRule.MailAction.TAG,
|
||||
)
|
||||
|
||||
mail_account_handler.handle_mail_account(live_mail_account)
|
||||
|
||||
rule2.delete()
|
||||
|
||||
mail_account_handler.handle_mail_account(greenmail_mail_account)
|
||||
except MailError as e:
|
||||
pytest.fail(f"Failure: {e}")
|
||||
except Exception as e:
|
||||
warnings.warn(f"Unhandled exception: {e}")
|
||||
finally:
|
||||
rule.delete()
|
||||
|
||||
@@ -17,7 +17,7 @@ from paperless_mail.parsers import MailDocumentParser
|
||||
def extract_text(pdf_path: Path) -> str:
|
||||
"""
|
||||
Using pdftotext from poppler, extracts the text of a PDF into a file,
|
||||
then reads the file contents and returns it
|
||||
then reads the file contents and returns it.
|
||||
"""
|
||||
with tempfile.NamedTemporaryFile(
|
||||
mode="w+",
|
||||
@@ -38,71 +38,107 @@ def extract_text(pdf_path: Path) -> str:
|
||||
|
||||
|
||||
class MailAttachmentMock:
|
||||
def __init__(self, payload, content_id):
|
||||
def __init__(self, payload: bytes, content_id: str) -> None:
|
||||
self.payload = payload
|
||||
self.content_id = content_id
|
||||
self.content_type = "image/png"
|
||||
|
||||
|
||||
@pytest.mark.live
|
||||
@pytest.mark.nginx
|
||||
@pytest.mark.skipif(
|
||||
"PAPERLESS_CI_TEST" not in os.environ,
|
||||
reason="No Gotenberg/Tika servers to test with",
|
||||
)
|
||||
class TestUrlCanary:
|
||||
class TestNginxService:
|
||||
"""
|
||||
Verify certain URLs are still available so testing is valid still
|
||||
Verify the local nginx server is responding correctly.
|
||||
These tests validate that the test infrastructure is working properly
|
||||
before running the actual parser tests that depend on HTTP resources.
|
||||
"""
|
||||
|
||||
def test_online_image_exception_on_not_available(self):
|
||||
def test_non_existent_resource_returns_404(
|
||||
self,
|
||||
nginx_base_url: str,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Fresh start
|
||||
- Local nginx server is running
|
||||
WHEN:
|
||||
- nonexistent image is requested
|
||||
- A non-existent resource is requested
|
||||
THEN:
|
||||
- An exception shall be thrown
|
||||
"""
|
||||
"""
|
||||
A public image is used in the html sample file. We have no control
|
||||
whether this image stays online forever, so here we check if we can detect if is not
|
||||
available anymore.
|
||||
- An HTTP 404 status code shall be returned
|
||||
"""
|
||||
resp = httpx.get(
|
||||
"https://docs.paperless-ngx.com/assets/non-existent.png",
|
||||
f"{nginx_base_url}/assets/non-existent.png",
|
||||
timeout=5.0,
|
||||
)
|
||||
with pytest.raises(httpx.HTTPStatusError) as exec_info:
|
||||
resp.raise_for_status()
|
||||
|
||||
assert exec_info.value.response.status_code == httpx.codes.NOT_FOUND
|
||||
|
||||
def test_is_online_image_still_available(self):
|
||||
def test_valid_resource_is_available(
|
||||
self,
|
||||
nginx_base_url: str,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Fresh start
|
||||
- Local nginx server is running
|
||||
WHEN:
|
||||
- A public image used in the html sample file is requested
|
||||
- A valid test fixture resource is requested
|
||||
THEN:
|
||||
- No exception shall be thrown
|
||||
- The resource shall be returned with HTTP 200 status code
|
||||
- The response shall contain the expected content type
|
||||
"""
|
||||
"""
|
||||
A public image is used in the html sample file. We have no control
|
||||
whether this image stays online forever, so here we check if it is still there
|
||||
"""
|
||||
|
||||
# Now check the URL used in samples/sample.html
|
||||
resp = httpx.get(
|
||||
"https://docs.paperless-ngx.com/assets/logo_full_white.svg",
|
||||
f"{nginx_base_url}/assets/logo_full_white.svg",
|
||||
timeout=5.0,
|
||||
)
|
||||
resp.raise_for_status()
|
||||
|
||||
assert resp.status_code == httpx.codes.OK
|
||||
assert "svg" in resp.headers.get("content-type", "").lower()
|
||||
|
||||
def test_server_connectivity(
|
||||
self,
|
||||
nginx_base_url: str,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Local test fixtures server should be running
|
||||
WHEN:
|
||||
- A request is made to the server root
|
||||
THEN:
|
||||
- The server shall respond without connection errors
|
||||
"""
|
||||
try:
|
||||
resp = httpx.get(
|
||||
nginx_base_url,
|
||||
timeout=5.0,
|
||||
follow_redirects=True,
|
||||
)
|
||||
# We don't care about the status code, just that we can connect
|
||||
assert resp.status_code in {200, 404, 403}
|
||||
except httpx.ConnectError as e:
|
||||
pytest.fail(
|
||||
f"Cannot connect to nginx server at {nginx_base_url}. "
|
||||
f"Ensure the nginx container is running via docker-compose.ci-test.yml. "
|
||||
f"Error: {e}",
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.live
|
||||
@pytest.mark.gotenberg
|
||||
@pytest.mark.tika
|
||||
@pytest.mark.nginx
|
||||
@pytest.mark.skipif(
|
||||
"PAPERLESS_CI_TEST" not in os.environ,
|
||||
reason="No Gotenberg/Tika servers to test with",
|
||||
)
|
||||
class TestParserLive:
|
||||
@staticmethod
|
||||
def imagehash(file, hash_size=18):
|
||||
def imagehash(file: Path, hash_size: int = 18) -> str:
|
||||
return f"{average_hash(Image.open(file), hash_size)}"
|
||||
|
||||
def test_get_thumbnail(
|
||||
@@ -112,14 +148,15 @@ class TestParserLive:
|
||||
simple_txt_email_file: Path,
|
||||
simple_txt_email_pdf_file: Path,
|
||||
simple_txt_email_thumbnail_file: Path,
|
||||
):
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Fresh start
|
||||
- A simple text email file
|
||||
- Mocked PDF generation returning a known PDF
|
||||
WHEN:
|
||||
- The Thumbnail is requested
|
||||
- The thumbnail is requested
|
||||
THEN:
|
||||
- The returned thumbnail image file is as expected
|
||||
- The returned thumbnail image file shall match the expected hash
|
||||
"""
|
||||
mock_generate_pdf = mocker.patch(
|
||||
"paperless_mail.parsers.MailDocumentParser.generate_pdf",
|
||||
@@ -134,22 +171,28 @@ class TestParserLive:
|
||||
assert self.imagehash(thumb) == self.imagehash(
|
||||
simple_txt_email_thumbnail_file,
|
||||
), (
|
||||
f"Created Thumbnail {thumb} differs from expected file {simple_txt_email_thumbnail_file}"
|
||||
f"Created thumbnail {thumb} differs from expected file "
|
||||
f"{simple_txt_email_thumbnail_file}"
|
||||
)
|
||||
|
||||
def test_tika_parse_successful(self, mail_parser: MailDocumentParser):
|
||||
def test_tika_parse_successful(self, mail_parser: MailDocumentParser) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Fresh start
|
||||
- HTML content to parse
|
||||
- Tika server is running
|
||||
WHEN:
|
||||
- tika parsing is called
|
||||
- Tika parsing is called
|
||||
THEN:
|
||||
- a web request to tika shall be done and the reply es returned
|
||||
- A web request to Tika shall be made
|
||||
- The parsed text content shall be returned
|
||||
"""
|
||||
html = '<html><head><meta http-equiv="content-type" content="text/html; charset=UTF-8"></head><body><p>Some Text</p></body></html>'
|
||||
html = (
|
||||
'<html><head><meta http-equiv="content-type" '
|
||||
'content="text/html; charset=UTF-8"></head>'
|
||||
"<body><p>Some Text</p></body></html>"
|
||||
)
|
||||
expected_text = "Some Text"
|
||||
|
||||
# Check successful parsing
|
||||
parsed = mail_parser.tika_parse(html)
|
||||
assert expected_text == parsed.strip()
|
||||
|
||||
@@ -160,14 +203,17 @@ class TestParserLive:
|
||||
html_email_file: Path,
|
||||
merged_pdf_first: Path,
|
||||
merged_pdf_second: Path,
|
||||
):
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Intermediary pdfs to be merged
|
||||
- Intermediary PDFs to be merged
|
||||
- An HTML email file
|
||||
WHEN:
|
||||
- pdf generation is requested with html file requiring merging of pdfs
|
||||
- PDF generation is requested with HTML file requiring merging
|
||||
THEN:
|
||||
- gotenberg is called to merge files and the resulting file is returned
|
||||
- Gotenberg shall be called to merge files
|
||||
- The resulting merged PDF shall be returned
|
||||
- The merged PDF shall contain text from both source PDFs
|
||||
"""
|
||||
mock_generate_pdf_from_html = mocker.patch(
|
||||
"paperless_mail.parsers.MailDocumentParser.generate_pdf_from_html",
|
||||
@@ -200,16 +246,17 @@ class TestParserLive:
|
||||
html_email_file: Path,
|
||||
html_email_pdf_file: Path,
|
||||
html_email_thumbnail_file: Path,
|
||||
):
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Fresh start
|
||||
- An HTML email file
|
||||
WHEN:
|
||||
- pdf generation from simple eml file is requested
|
||||
- PDF generation from the email file is requested
|
||||
THEN:
|
||||
- Gotenberg is called and the resulting file is returned and look as expected.
|
||||
- Gotenberg shall be called to generate the PDF
|
||||
- The archive PDF shall contain the expected content
|
||||
- The generated thumbnail shall match the expected image hash
|
||||
"""
|
||||
|
||||
util_call_with_backoff(mail_parser.parse, [html_email_file, "message/rfc822"])
|
||||
|
||||
# Check the archive PDF
|
||||
@@ -217,7 +264,7 @@ class TestParserLive:
|
||||
archive_text = extract_text(archive_path)
|
||||
expected_archive_text = extract_text(html_email_pdf_file)
|
||||
|
||||
# Archive includes the HTML content, so use in
|
||||
# Archive includes the HTML content
|
||||
assert expected_archive_text in archive_text
|
||||
|
||||
# Check the thumbnail
|
||||
@@ -227,9 +274,12 @@ class TestParserLive:
|
||||
)
|
||||
generated_thumbnail_hash = self.imagehash(generated_thumbnail)
|
||||
|
||||
# The created pdf is not reproducible. But the converted image should always look the same.
|
||||
# The created PDF is not reproducible, but the converted image
|
||||
# should always look the same
|
||||
expected_hash = self.imagehash(html_email_thumbnail_file)
|
||||
|
||||
assert generated_thumbnail_hash == expected_hash, (
|
||||
f"PDF looks different. Check if {generated_thumbnail} looks weird."
|
||||
f"PDF thumbnail differs from expected. "
|
||||
f"Generated: {generated_thumbnail}, "
|
||||
f"Hash: {generated_thumbnail_hash} vs {expected_hash}"
|
||||
)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user