mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2026-01-28 22:59:03 -06:00
Compare commits
5 Commits
dev
...
feature/pe
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c9bb8dacfd | ||
|
|
13eac46f1c | ||
|
|
4d16d1c812 | ||
|
|
d76b600a94 | ||
|
|
4ce07602a8 |
@@ -89,18 +89,6 @@ Additional tasks are available for common maintenance operations:
|
||||
- **Migrate Database**: To apply database migrations.
|
||||
- **Create Superuser**: To create an admin user for the application.
|
||||
|
||||
## Committing from the Host Machine
|
||||
|
||||
The DevContainer automatically installs pre-commit hooks during setup. However, these hooks are configured for use inside the container.
|
||||
|
||||
If you want to commit changes from your host machine (outside the DevContainer), you need to set up pre-commit on your host. This installs it as a standalone tool.
|
||||
|
||||
```bash
|
||||
uv tool install pre-commit && pre-commit install
|
||||
```
|
||||
|
||||
After this, you can commit either from inside the DevContainer or from your host machine.
|
||||
|
||||
## Let's Get Started!
|
||||
|
||||
Follow the steps above to get your development environment up and running. Happy coding!
|
||||
|
||||
@@ -3,31 +3,26 @@
|
||||
"dockerComposeFile": "docker-compose.devcontainer.sqlite-tika.yml",
|
||||
"service": "paperless-development",
|
||||
"workspaceFolder": "/usr/src/paperless/paperless-ngx",
|
||||
"forwardPorts": [4200, 8000],
|
||||
"containerEnv": {
|
||||
"UV_CACHE_DIR": "/usr/src/paperless/paperless-ngx/.uv-cache"
|
||||
},
|
||||
"postCreateCommand": "/bin/bash -c 'rm -rf .venv/.* && uv sync --group dev && uv run pre-commit install'",
|
||||
"postCreateCommand": "/bin/bash -c 'rm -rf .venv/.* && uv sync --group dev && uv run pre-commit install'",
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
"mhutchie.git-graph",
|
||||
"ms-python.python",
|
||||
"ms-vscode.js-debug-nightly",
|
||||
"eamodio.gitlens",
|
||||
"yzhang.markdown-all-in-one",
|
||||
"pnpm.pnpm"
|
||||
],
|
||||
"settings": {
|
||||
"python.defaultInterpreterPath": "/usr/src/paperless/paperless-ngx/.venv/bin/python",
|
||||
"python.pythonPath": "/usr/src/paperless/paperless-ngx/.venv/bin/python",
|
||||
"python.terminal.activateEnvInCurrentTerminal": true,
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnType": true,
|
||||
"files.trimTrailingWhitespace": true
|
||||
}
|
||||
"extensions": [
|
||||
"mhutchie.git-graph",
|
||||
"ms-python.python",
|
||||
"ms-vscode.js-debug-nightly",
|
||||
"eamodio.gitlens",
|
||||
"yzhang.markdown-all-in-one"
|
||||
],
|
||||
"settings": {
|
||||
"python.defaultInterpreterPath": "/usr/src/paperless/paperless-ngx/.venv/bin/python",
|
||||
"python.pythonPath": "/usr/src/paperless/paperless-ngx/.venv/bin/python",
|
||||
"python.terminal.activateEnvInCurrentTerminal": true,
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnType": true,
|
||||
"files.trimTrailingWhitespace": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"remoteUser": "paperless"
|
||||
}
|
||||
},
|
||||
"remoteUser": "paperless"
|
||||
}
|
||||
|
||||
@@ -33,7 +33,7 @@
|
||||
"label": "Start: Frontend Angular",
|
||||
"description": "Start the Frontend Angular Dev Server",
|
||||
"type": "shell",
|
||||
"command": "pnpm exec ng serve --host 0.0.0.0",
|
||||
"command": "pnpm start",
|
||||
"isBackground": true,
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/src-ui"
|
||||
@@ -174,22 +174,12 @@
|
||||
{
|
||||
"label": "Maintenance: Install Frontend Dependencies",
|
||||
"description": "Install frontend (pnpm) dependencies",
|
||||
"type": "shell",
|
||||
"command": "pnpm install",
|
||||
"type": "pnpm",
|
||||
"script": "install",
|
||||
"path": "src-ui",
|
||||
"group": "clean",
|
||||
"problemMatcher": [],
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/src-ui"
|
||||
},
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": true,
|
||||
"panel": "shared",
|
||||
"showReuseMessage": false,
|
||||
"clear": true,
|
||||
"revealProblems": "onProblem"
|
||||
}
|
||||
"detail": "install dependencies from package"
|
||||
},
|
||||
{
|
||||
"description": "Clean install frontend dependencies and build the frontend for production",
|
||||
|
||||
3
.github/workflows/ci-backend.yml
vendored
3
.github/workflows/ci-backend.yml
vendored
@@ -75,6 +75,9 @@ jobs:
|
||||
env:
|
||||
NLTK_DATA: ${{ env.NLTK_DATA }}
|
||||
PAPERLESS_CI_TEST: 1
|
||||
PAPERLESS_MAIL_TEST_HOST: ${{ secrets.TEST_MAIL_HOST }}
|
||||
PAPERLESS_MAIL_TEST_USER: ${{ secrets.TEST_MAIL_USER }}
|
||||
PAPERLESS_MAIL_TEST_PASSWD: ${{ secrets.TEST_MAIL_PASSWD }}
|
||||
run: |
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
|
||||
19
.github/workflows/ci-docker.yml
vendored
19
.github/workflows/ci-docker.yml
vendored
@@ -46,13 +46,14 @@ jobs:
|
||||
id: ref
|
||||
run: |
|
||||
ref_name="${GITHUB_HEAD_REF:-$GITHUB_REF_NAME}"
|
||||
# Sanitize by replacing / with - for use in tags and cache keys
|
||||
sanitized_ref="${ref_name//\//-}"
|
||||
# Sanitize by replacing / with - for cache keys
|
||||
cache_ref="${ref_name//\//-}"
|
||||
|
||||
echo "ref_name=${ref_name}"
|
||||
echo "sanitized_ref=${sanitized_ref}"
|
||||
echo "cache_ref=${cache_ref}"
|
||||
|
||||
echo "name=${sanitized_ref}" >> $GITHUB_OUTPUT
|
||||
echo "name=${ref_name}" >> $GITHUB_OUTPUT
|
||||
echo "cache-ref=${cache_ref}" >> $GITHUB_OUTPUT
|
||||
- name: Check push permissions
|
||||
id: check-push
|
||||
env:
|
||||
@@ -61,14 +62,12 @@ jobs:
|
||||
# should-push: Should we push to GHCR?
|
||||
# True for:
|
||||
# 1. Pushes (tags/dev/beta) - filtered via the workflow triggers
|
||||
# 2. Manual dispatch - always push to GHCR
|
||||
# 3. Internal PRs where the branch name starts with 'feature-' or 'fix-'
|
||||
# 2. Internal PRs where the branch name starts with 'feature-' - filtered here when a PR is synced
|
||||
|
||||
should_push="false"
|
||||
|
||||
if [[ "${{ github.event_name }}" == "push" ]]; then
|
||||
should_push="true"
|
||||
elif [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
|
||||
should_push="true"
|
||||
elif [[ "${{ github.event_name }}" == "pull_request" && "${{ github.event.pull_request.head.repo.full_name }}" == "${{ github.repository }}" ]]; then
|
||||
if [[ "${REF_NAME}" == feature-* || "${REF_NAME}" == fix-* ]]; then
|
||||
should_push="true"
|
||||
@@ -140,9 +139,9 @@ jobs:
|
||||
PNGX_TAG_VERSION=${{ steps.docker-meta.outputs.version }}
|
||||
outputs: type=image,name=${{ env.REGISTRY }}/${{ steps.repo.outputs.name }},push-by-digest=true,name-canonical=true,push=${{ steps.check-push.outputs.should-push }}
|
||||
cache-from: |
|
||||
type=registry,ref=${{ env.REGISTRY }}/${{ steps.repo.outputs.name }}/cache/app:${{ steps.ref.outputs.name }}-${{ matrix.arch }}
|
||||
type=registry,ref=${{ env.REGISTRY }}/${{ steps.repo.outputs.name }}/cache/app:${{ steps.ref.outputs.cache-ref }}-${{ matrix.arch }}
|
||||
type=registry,ref=${{ env.REGISTRY }}/${{ steps.repo.outputs.name }}/cache/app:dev-${{ matrix.arch }}
|
||||
cache-to: ${{ steps.check-push.outputs.should-push == 'true' && format('type=registry,mode=max,ref={0}/{1}/cache/app:{2}-{3}', env.REGISTRY, steps.repo.outputs.name, steps.ref.outputs.name, matrix.arch) || '' }}
|
||||
cache-to: ${{ steps.check-push.outputs.should-push == 'true' && format('type=registry,mode=max,ref={0}/{1}/cache/app:{2}-{3}', env.REGISTRY, steps.repo.outputs.name, steps.ref.outputs.cache-ref, matrix.arch) || '' }}
|
||||
- name: Export digest
|
||||
if: steps.check-push.outputs.should-push == 'true'
|
||||
run: |
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -40,7 +40,6 @@ htmlcov/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
.uv-cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*,cover
|
||||
|
||||
@@ -37,7 +37,7 @@ repos:
|
||||
- json
|
||||
# See https://github.com/prettier/prettier/issues/15742 for the fork reason
|
||||
- repo: https://github.com/rbubley/mirrors-prettier
|
||||
rev: 'v3.8.1'
|
||||
rev: 'v3.6.2'
|
||||
hooks:
|
||||
- id: prettier
|
||||
types_or:
|
||||
@@ -49,7 +49,7 @@ repos:
|
||||
- 'prettier-plugin-organize-imports@4.1.0'
|
||||
# Python hooks
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.14.14
|
||||
rev: v0.14.5
|
||||
hooks:
|
||||
- id: ruff-check
|
||||
- id: ruff-format
|
||||
@@ -76,7 +76,7 @@ repos:
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
- repo: https://github.com/google/yamlfmt
|
||||
rev: v0.21.0
|
||||
rev: v0.20.0
|
||||
hooks:
|
||||
- id: yamlfmt
|
||||
exclude: "^src-ui/pnpm-lock.yaml"
|
||||
|
||||
@@ -23,24 +23,3 @@ services:
|
||||
container_name: tika
|
||||
network_mode: host
|
||||
restart: unless-stopped
|
||||
greenmail:
|
||||
image: greenmail/standalone:2.1.8
|
||||
hostname: greenmail
|
||||
container_name: greenmail
|
||||
environment:
|
||||
# Enable only IMAP for now (SMTP available via 3025 if needed later)
|
||||
GREENMAIL_OPTS: >-
|
||||
-Dgreenmail.setup.test.imap -Dgreenmail.users=test@localhost:test -Dgreenmail.users.login=test@localhost -Dgreenmail.verbose
|
||||
ports:
|
||||
- "3143:3143" # IMAP
|
||||
restart: unless-stopped
|
||||
nginx:
|
||||
image: docker.io/nginx:1.29-alpine
|
||||
hostname: nginx
|
||||
container_name: nginx
|
||||
ports:
|
||||
- "8080:8080"
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- ../../docs/assets:/usr/share/nginx/html/assets:ro
|
||||
- ./test-nginx.conf:/etc/nginx/conf.d/default.conf:ro
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
server {
|
||||
listen 8080;
|
||||
server_name localhost;
|
||||
|
||||
root /usr/share/nginx/html;
|
||||
|
||||
# Enable CORS for test requests
|
||||
add_header 'Access-Control-Allow-Origin' '*' always;
|
||||
add_header 'Access-Control-Allow-Methods' 'GET, HEAD, OPTIONS' always;
|
||||
|
||||
location / {
|
||||
try_files $uri $uri/ =404;
|
||||
}
|
||||
}
|
||||
@@ -582,7 +582,7 @@ document.
|
||||
|
||||
### Detecting duplicates {#fuzzy_duplicate}
|
||||
|
||||
Paperless-ngx already catches and warns of exactly matching documents,
|
||||
Paperless already catches and prevents upload of exactly matching documents,
|
||||
however a new scan of an existing document may not produce an exact bit for bit
|
||||
duplicate. But the content should be exact or close, allowing detection.
|
||||
|
||||
|
||||
@@ -1617,16 +1617,6 @@ processing. This only has an effect if
|
||||
|
||||
Defaults to `0 1 * * *`, once per day.
|
||||
|
||||
## Share links
|
||||
|
||||
#### [`PAPERLESS_SHARE_LINK_BUNDLE_CLEANUP_CRON=<cron expression>`](#PAPERLESS_SHARE_LINK_BUNDLE_CLEANUP_CRON) {#PAPERLESS_SHARE_LINK_BUNDLE_CLEANUP_CRON}
|
||||
|
||||
: Controls how often Paperless-ngx removes expired share link bundles (and their generated ZIP archives).
|
||||
|
||||
: If set to the string "disable", expired bundles are not cleaned up automatically.
|
||||
|
||||
Defaults to `0 2 * * *`, once per day at 02:00.
|
||||
|
||||
## Binaries
|
||||
|
||||
There are a few external software packages that Paperless expects to
|
||||
|
||||
@@ -308,14 +308,12 @@ or using [email](#workflow-action-email) or [webhook](#workflow-action-webhook)
|
||||
|
||||
### Share Links
|
||||
|
||||
"Share links" are public links to files (or an archive of files) and can be created and managed under the 'Send' button on the document detail screen or from the bulk editor.
|
||||
"Share links" are shareable public links to files and can be created and managed under the 'Send' button on the document detail screen.
|
||||
|
||||
- Share links do not require a user to login and thus link directly to a file or bundled download.
|
||||
- Share links do not require a user to login and thus link directly to a file.
|
||||
- Links are unique and are of the form `{paperless-url}/share/{randomly-generated-slug}`.
|
||||
- Links can optionally have an expiration time set.
|
||||
- After a link expires or is deleted users will be redirected to the regular paperless-ngx login.
|
||||
- From the document detail screen you can create a share link for that single document.
|
||||
- From the bulk editor you can create a **share link bundle** for any selection. Paperless-ngx prepares a ZIP archive in the background and exposes a single share link. You can revisit the "Manage share link bundles" dialog to monitor progress, retry failed bundles, or delete links.
|
||||
|
||||
!!! tip
|
||||
|
||||
|
||||
@@ -33,8 +33,6 @@
|
||||
"**/coverage.json": true
|
||||
},
|
||||
"python.defaultInterpreterPath": ".venv/bin/python3",
|
||||
"python.analysis.inlayHints.pytestParameters": true,
|
||||
"python.testing.pytestEnabled": true,
|
||||
},
|
||||
"extensions": {
|
||||
"recommendations": ["ms-python.python", "charliermarsh.ruff", "editorconfig.editorconfig"],
|
||||
|
||||
@@ -19,14 +19,14 @@ dependencies = [
|
||||
"azure-ai-documentintelligence>=1.0.2",
|
||||
"babel>=2.17",
|
||||
"bleach~=6.3.0",
|
||||
"celery[redis]~=5.6.2",
|
||||
"celery[redis]~=5.5.1",
|
||||
"channels~=4.2",
|
||||
"channels-redis~=4.2",
|
||||
"concurrent-log-handler~=0.9.25",
|
||||
"dateparser~=1.2",
|
||||
# WARNING: django does not use semver.
|
||||
# Only patch versions are guaranteed to not introduce breaking changes.
|
||||
"django~=5.2.10",
|
||||
"django~=5.2.5",
|
||||
"django-allauth[mfa,socialaccount]~=65.13.1",
|
||||
"django-auditlog~=3.4.1",
|
||||
"django-cachalot~=2.8.0",
|
||||
@@ -79,7 +79,7 @@ dependencies = [
|
||||
"torch~=2.9.1",
|
||||
"tqdm~=4.67.1",
|
||||
"watchfiles>=1.1.1",
|
||||
"whitenoise~=6.11",
|
||||
"whitenoise~=6.9",
|
||||
"whoosh-reloaded>=2.7.5",
|
||||
"zxing-cpp~=2.3.0",
|
||||
]
|
||||
@@ -88,13 +88,13 @@ optional-dependencies.mariadb = [
|
||||
"mysqlclient~=2.2.7",
|
||||
]
|
||||
optional-dependencies.postgres = [
|
||||
"psycopg[c,pool]==3.3",
|
||||
"psycopg[c,pool]==3.2.12",
|
||||
# Direct dependency for proper resolution of the pre-built wheels
|
||||
"psycopg-c==3.3",
|
||||
"psycopg-c==3.2.12",
|
||||
"psycopg-pool==3.3",
|
||||
]
|
||||
optional-dependencies.webserver = [
|
||||
"granian[uvloop]~=2.6.0",
|
||||
"granian[uvloop]~=2.5.1",
|
||||
]
|
||||
|
||||
[dependency-groups]
|
||||
@@ -114,16 +114,15 @@ testing = [
|
||||
"daphne",
|
||||
"factory-boy~=3.3.1",
|
||||
"imagehash",
|
||||
"pytest~=9.0.0",
|
||||
"pytest~=8.4.1",
|
||||
"pytest-cov~=7.0.0",
|
||||
"pytest-django~=4.11.1",
|
||||
"pytest-env~=1.2.0",
|
||||
"pytest-env",
|
||||
"pytest-httpx",
|
||||
"pytest-mock~=3.15.1",
|
||||
#"pytest-randomly~=4.0.1",
|
||||
"pytest-rerunfailures~=16.1",
|
||||
"pytest-mock",
|
||||
"pytest-rerunfailures",
|
||||
"pytest-sugar",
|
||||
"pytest-xdist~=3.8.0",
|
||||
"pytest-xdist",
|
||||
]
|
||||
|
||||
lint = [
|
||||
@@ -152,7 +151,7 @@ typing = [
|
||||
]
|
||||
|
||||
[tool.uv]
|
||||
required-version = ">=0.9.0"
|
||||
required-version = ">=0.5.14"
|
||||
package = false
|
||||
environments = [
|
||||
"sys_platform == 'darwin'",
|
||||
@@ -162,8 +161,8 @@ environments = [
|
||||
[tool.uv.sources]
|
||||
# Markers are chosen to select these almost exclusively when building the Docker image
|
||||
psycopg-c = [
|
||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/psycopg-trixie-3.3.0/psycopg_c-3.3.0-cp312-cp312-linux_x86_64.whl", marker = "sys_platform == 'linux' and platform_machine == 'x86_64' and python_version == '3.12'" },
|
||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/psycopg-trixie-3.3.0/psycopg_c-3.3.0-cp312-cp312-linux_aarch64.whl", marker = "sys_platform == 'linux' and platform_machine == 'aarch64' and python_version == '3.12'" },
|
||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/psycopg-bookworm-3.2.12/psycopg_c-3.2.12-cp312-cp312-linux_x86_64.whl", marker = "sys_platform == 'linux' and platform_machine == 'x86_64' and python_version == '3.12'" },
|
||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/psycopg-bookworm-3.2.12/psycopg_c-3.2.12-cp312-cp312-linux_aarch64.whl", marker = "sys_platform == 'linux' and platform_machine == 'aarch64' and python_version == '3.12'" },
|
||||
]
|
||||
zxing-cpp = [
|
||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_x86_64.whl", marker = "sys_platform == 'linux' and platform_machine == 'x86_64' and python_version == '3.12'" },
|
||||
@@ -261,15 +260,11 @@ write-changes = true
|
||||
ignore-words-list = "criterias,afterall,valeu,ureue,equest,ure,assertIn,Oktober,commitish"
|
||||
skip = "src-ui/src/locale/*,src-ui/pnpm-lock.yaml,src-ui/e2e/*,src/paperless_mail/tests/samples/*,src/documents/tests/samples/*,*.po,*.json"
|
||||
|
||||
[tool.pytest]
|
||||
minversion = "9.0"
|
||||
pythonpath = [ "src" ]
|
||||
|
||||
strict_config = true
|
||||
strict_markers = true
|
||||
strict_parametrization_ids = true
|
||||
strict_xfail = true
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
minversion = "8.0"
|
||||
pythonpath = [
|
||||
"src",
|
||||
]
|
||||
testpaths = [
|
||||
"src/documents/tests/",
|
||||
"src/paperless/tests/",
|
||||
@@ -280,7 +275,6 @@ testpaths = [
|
||||
"src/paperless_remote/tests/",
|
||||
"src/paperless_ai/tests",
|
||||
]
|
||||
|
||||
addopts = [
|
||||
"--pythonwarnings=all",
|
||||
"--cov",
|
||||
@@ -288,26 +282,15 @@ addopts = [
|
||||
"--cov-report=xml",
|
||||
"--numprocesses=auto",
|
||||
"--maxprocesses=16",
|
||||
"--dist=loadscope",
|
||||
"--quiet",
|
||||
"--durations=50",
|
||||
"--durations-min=0.5",
|
||||
"--junitxml=junit.xml",
|
||||
"-o",
|
||||
"junit_family=legacy",
|
||||
"-o junit_family=legacy",
|
||||
]
|
||||
|
||||
norecursedirs = [ "src/locale/", ".venv/", "src-ui/" ]
|
||||
|
||||
DJANGO_SETTINGS_MODULE = "paperless.settings"
|
||||
|
||||
markers = [
|
||||
"live: Integration tests requiring external services (Gotenberg, Tika, nginx, etc)",
|
||||
"nginx: Tests that make HTTP requests to the local nginx service",
|
||||
"gotenberg: Tests requiring Gotenberg service",
|
||||
"tika: Tests requiring Tika service",
|
||||
"greenmail: Tests requiring Greenmail service",
|
||||
]
|
||||
|
||||
[tool.pytest_env]
|
||||
PAPERLESS_DISABLE_DBHANDLER = "true"
|
||||
PAPERLESS_CACHE_BACKEND = "django.core.cache.backends.locmem.LocMemCache"
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -259,7 +259,6 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="col-xl-6 ps-xl-5">
|
||||
<h5 class="mt-3" i18n>Bulk editing</h5>
|
||||
<div class="row mb-3">
|
||||
@@ -269,19 +268,6 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<h5 class="mt-3" i18n>PDF Editor</h5>
|
||||
<div class="row">
|
||||
<div class="col-md-3 col-form-label pt-0">
|
||||
<span i18n>Default editing mode</span>
|
||||
</div>
|
||||
<div class="col">
|
||||
<select class="form-select" formControlName="pdfEditorDefaultEditMode">
|
||||
<option [ngValue]="PdfEditorEditMode.Create" i18n>Create new document(s)</option>
|
||||
<option [ngValue]="PdfEditorEditMode.Update" i18n>Update existing document</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<h5 class="mt-3" i18n>Notes</h5>
|
||||
<div class="row mb-3">
|
||||
<div class="col">
|
||||
|
||||
@@ -251,7 +251,7 @@ describe('SettingsComponent', () => {
|
||||
expect(toastErrorSpy).toHaveBeenCalled()
|
||||
expect(storeSpy).toHaveBeenCalled()
|
||||
expect(appearanceSettingsSpy).not.toHaveBeenCalled()
|
||||
expect(setSpy).toHaveBeenCalledTimes(32)
|
||||
expect(setSpy).toHaveBeenCalledTimes(31)
|
||||
|
||||
// succeed
|
||||
storeSpy.mockReturnValueOnce(of(true))
|
||||
|
||||
@@ -64,9 +64,8 @@ import { PermissionsGroupComponent } from '../../common/input/permissions/permis
|
||||
import { PermissionsUserComponent } from '../../common/input/permissions/permissions-user/permissions-user.component'
|
||||
import { SelectComponent } from '../../common/input/select/select.component'
|
||||
import { PageHeaderComponent } from '../../common/page-header/page-header.component'
|
||||
import { PdfEditorEditMode } from '../../common/pdf-editor/pdf-editor-edit-mode'
|
||||
import { SystemStatusDialogComponent } from '../../common/system-status-dialog/system-status-dialog.component'
|
||||
import { ZoomSetting } from '../../document-detail/zoom-setting'
|
||||
import { ZoomSetting } from '../../document-detail/document-detail.component'
|
||||
import { ComponentWithPermissions } from '../../with-permissions/with-permissions.component'
|
||||
|
||||
enum SettingsNavIDs {
|
||||
@@ -164,7 +163,6 @@ export class SettingsComponent
|
||||
defaultPermsEditGroups: new FormControl(null),
|
||||
useNativePdfViewer: new FormControl(null),
|
||||
pdfViewerDefaultZoom: new FormControl(null),
|
||||
pdfEditorDefaultEditMode: new FormControl(null),
|
||||
documentEditingRemoveInboxTags: new FormControl(null),
|
||||
documentEditingOverlayThumbnail: new FormControl(null),
|
||||
documentDetailsHiddenFields: new FormControl([]),
|
||||
@@ -198,8 +196,6 @@ export class SettingsComponent
|
||||
|
||||
public readonly ZoomSetting = ZoomSetting
|
||||
|
||||
public readonly PdfEditorEditMode = PdfEditorEditMode
|
||||
|
||||
public readonly documentDetailFieldOptions = documentDetailFieldOptions
|
||||
|
||||
get systemStatusHasErrors(): boolean {
|
||||
@@ -318,9 +314,6 @@ export class SettingsComponent
|
||||
pdfViewerDefaultZoom: this.settings.get(
|
||||
SETTINGS_KEYS.PDF_VIEWER_ZOOM_SETTING
|
||||
),
|
||||
pdfEditorDefaultEditMode: this.settings.get(
|
||||
SETTINGS_KEYS.PDF_EDITOR_DEFAULT_EDIT_MODE
|
||||
),
|
||||
displayLanguage: this.settings.getLanguage(),
|
||||
dateLocale: this.settings.get(SETTINGS_KEYS.DATE_LOCALE),
|
||||
dateFormat: this.settings.get(SETTINGS_KEYS.DATE_FORMAT),
|
||||
@@ -490,10 +483,6 @@ export class SettingsComponent
|
||||
SETTINGS_KEYS.PDF_VIEWER_ZOOM_SETTING,
|
||||
this.settingsForm.value.pdfViewerDefaultZoom
|
||||
)
|
||||
this.settings.set(
|
||||
SETTINGS_KEYS.PDF_EDITOR_DEFAULT_EDIT_MODE,
|
||||
this.settingsForm.value.pdfEditorDefaultEditMode
|
||||
)
|
||||
this.settings.set(
|
||||
SETTINGS_KEYS.DATE_LOCALE,
|
||||
this.settingsForm.value.dateLocale
|
||||
|
||||
@@ -248,7 +248,7 @@ main {
|
||||
}
|
||||
}
|
||||
|
||||
@media screen and (min-width: 376px) and (max-width: 768px) {
|
||||
@media screen and (min-width: 366px) and (max-width: 768px) {
|
||||
.navbar-toggler {
|
||||
// compensate for 2 buttons on the right
|
||||
margin-right: 45px;
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
export enum PdfEditorEditMode {
|
||||
Update = 'update',
|
||||
Create = 'create',
|
||||
}
|
||||
@@ -8,11 +8,8 @@ import { FormsModule } from '@angular/forms'
|
||||
import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap'
|
||||
import { PDFDocumentProxy, PdfViewerModule } from 'ng2-pdf-viewer'
|
||||
import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
|
||||
import { SETTINGS_KEYS } from 'src/app/data/ui-settings'
|
||||
import { DocumentService } from 'src/app/services/rest/document.service'
|
||||
import { SettingsService } from 'src/app/services/settings.service'
|
||||
import { ConfirmDialogComponent } from '../confirm-dialog/confirm-dialog.component'
|
||||
import { PdfEditorEditMode } from './pdf-editor-edit-mode'
|
||||
|
||||
interface PageOperation {
|
||||
page: number
|
||||
@@ -22,6 +19,11 @@ interface PageOperation {
|
||||
loaded?: boolean
|
||||
}
|
||||
|
||||
export enum PdfEditorEditMode {
|
||||
Update = 'update',
|
||||
Create = 'create',
|
||||
}
|
||||
|
||||
@Component({
|
||||
selector: 'pngx-pdf-editor',
|
||||
templateUrl: './pdf-editor.component.html',
|
||||
@@ -37,15 +39,12 @@ export class PDFEditorComponent extends ConfirmDialogComponent {
|
||||
public PdfEditorEditMode = PdfEditorEditMode
|
||||
|
||||
private documentService = inject(DocumentService)
|
||||
private readonly settingsService = inject(SettingsService)
|
||||
activeModal: NgbActiveModal = inject(NgbActiveModal)
|
||||
|
||||
documentID: number
|
||||
pages: PageOperation[] = []
|
||||
totalPages = 0
|
||||
editMode: PdfEditorEditMode = this.settingsService.get(
|
||||
SETTINGS_KEYS.PDF_EDITOR_DEFAULT_EDIT_MODE
|
||||
)
|
||||
editMode: PdfEditorEditMode = PdfEditorEditMode.Create
|
||||
deleteOriginal: boolean = false
|
||||
includeMetadata: boolean = true
|
||||
|
||||
|
||||
@@ -1,129 +0,0 @@
|
||||
<div class="modal-header">
|
||||
<h4 class="modal-title">{{ title }}</h4>
|
||||
<button type="button" class="btn-close" aria-label="Close" (click)="cancel()"></button>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
@if (!createdBundle) {
|
||||
<form [formGroup]="form" class="d-flex flex-column gap-3">
|
||||
<div>
|
||||
<p class="mb-1">
|
||||
<ng-container i18n>Selected documents:</ng-container>
|
||||
{{ selectionCount }}
|
||||
</p>
|
||||
@if (documentPreview.length > 0) {
|
||||
<ul class="list-unstyled small mb-0">
|
||||
@for (doc of documentPreview; track doc.id) {
|
||||
<li>
|
||||
<strong>{{ doc.title | documentTitle }}</strong>
|
||||
</li>
|
||||
}
|
||||
@if (selectionCount > documentPreview.length) {
|
||||
<li>
|
||||
<ng-container i18n>+ {{ selectionCount - documentPreview.length }} more…</ng-container>
|
||||
</li>
|
||||
}
|
||||
</ul>
|
||||
}
|
||||
</div>
|
||||
|
||||
<div class="d-flex align-items-center justify-content-between">
|
||||
<div class="input-group">
|
||||
<label class="input-group-text" for="expirationDays"><ng-container i18n>Expires</ng-container>:</label>
|
||||
<select class="form-select" id="expirationDays" formControlName="expirationDays">
|
||||
@for (option of expirationOptions; track option.value) {
|
||||
<option [ngValue]="option.value">{{ option.label }}</option>
|
||||
}
|
||||
</select>
|
||||
</div>
|
||||
<div class="form-check form-switch w-100 ms-3">
|
||||
<input
|
||||
class="form-check-input"
|
||||
type="checkbox"
|
||||
role="switch"
|
||||
id="shareArchiveSwitch"
|
||||
formControlName="shareArchiveVersion"
|
||||
aria-checked="{{ shareArchiveVersion }}"
|
||||
/>
|
||||
<label class="form-check-label" for="shareArchiveSwitch" i18n>Share archive version (if available)</label>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
} @else {
|
||||
<div class="d-flex flex-column gap-3">
|
||||
<div class="alert alert-success mb-0" role="status">
|
||||
<h6 class="alert-heading mb-1" i18n>Share link bundle requested</h6>
|
||||
<p class="mb-0 small" i18n>
|
||||
You can copy the share link below or open the manager to monitor progress. The link will start working once the bundle is ready.
|
||||
</p>
|
||||
</div>
|
||||
<dl class="row mb-0 small">
|
||||
<dt class="col-sm-4" i18n>Status</dt>
|
||||
<dd class="col-sm-8">
|
||||
<span class="badge text-bg-secondary text-uppercase">{{ statusLabel(createdBundle.status) }}</span>
|
||||
</dd>
|
||||
<dt class="col-sm-4" i18n>Slug</dt>
|
||||
<dd class="col-sm-8"><code>{{ createdBundle.slug }}</code></dd>
|
||||
<dt class="col-sm-4" i18n>Link</dt>
|
||||
<dd class="col-sm-8">
|
||||
<div class="input-group input-group-sm">
|
||||
<input class="form-control" type="text" [value]="getShareUrl(createdBundle)" readonly>
|
||||
<button
|
||||
class="btn btn-outline-primary"
|
||||
type="button"
|
||||
(click)="copy(createdBundle)"
|
||||
>
|
||||
@if (copied) {
|
||||
<i-bs name="clipboard-check"></i-bs>
|
||||
}
|
||||
@if (!copied) {
|
||||
<i-bs name="clipboard"></i-bs>
|
||||
}
|
||||
<span class="visually-hidden" i18n>Copy link</span>
|
||||
</button>
|
||||
</div>
|
||||
</dd>
|
||||
<dt class="col-sm-4" i18n>Documents</dt>
|
||||
<dd class="col-sm-8">{{ createdBundle.document_count }}</dd>
|
||||
<dt class="col-sm-4" i18n>Expires</dt>
|
||||
<dd class="col-sm-8">
|
||||
@if (createdBundle.expiration) {
|
||||
{{ createdBundle.expiration | date: 'short' }}
|
||||
}
|
||||
@if (!createdBundle.expiration) {
|
||||
<span i18n>Never</span>
|
||||
}
|
||||
</dd>
|
||||
<dt class="col-sm-4" i18n>File version</dt>
|
||||
<dd class="col-sm-8">{{ fileVersionLabel(createdBundle.file_version) }}</dd>
|
||||
@if (createdBundle.size_bytes !== undefined && createdBundle.size_bytes !== null) {
|
||||
<dt class="col-sm-4" i18n>Size</dt>
|
||||
<dd class="col-sm-8">{{ createdBundle.size_bytes | fileSize }}</dd>
|
||||
}
|
||||
</dl>
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<div class="d-flex align-items-center gap-2 w-100">
|
||||
<div class="text-light fst-italic small">
|
||||
<ng-container i18n>A zip file containing the selected documents will be created for this share link bundle. This process happens in the background and may take some time, especially for large bundles.</ng-container>
|
||||
</div>
|
||||
<button type="button" class="btn btn-outline-secondary btn-sm ms-auto" (click)="cancel()">{{ cancelBtnCaption }}</button>
|
||||
@if (createdBundle) {
|
||||
<button type="button" class="btn btn-outline-secondary btn-sm text-nowrap" (click)="openManage()" i18n>Manage share link bundles</button>
|
||||
}
|
||||
|
||||
@if (!createdBundle) {
|
||||
<button
|
||||
type="button"
|
||||
class="btn btn-primary btn-sm d-inline-flex align-items-center gap-2 text-nowrap"
|
||||
(click)="submit()"
|
||||
[disabled]="loading || !buttonsEnabled">
|
||||
@if (loading) {
|
||||
<span class="spinner-border spinner-border-sm" role="status" aria-hidden="true"></span>
|
||||
}
|
||||
{{ btnCaption }}
|
||||
</button>
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
@@ -1,161 +0,0 @@
|
||||
import { Clipboard } from '@angular/cdk/clipboard'
|
||||
import {
|
||||
ComponentFixture,
|
||||
TestBed,
|
||||
fakeAsync,
|
||||
tick,
|
||||
} from '@angular/core/testing'
|
||||
import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap'
|
||||
import { NgxBootstrapIconsModule, allIcons } from 'ngx-bootstrap-icons'
|
||||
import { FileVersion } from 'src/app/data/share-link'
|
||||
import {
|
||||
ShareLinkBundleStatus,
|
||||
ShareLinkBundleSummary,
|
||||
} from 'src/app/data/share-link-bundle'
|
||||
import { ToastService } from 'src/app/services/toast.service'
|
||||
import { environment } from 'src/environments/environment'
|
||||
import { ShareLinkBundleDialogComponent } from './share-link-bundle-dialog.component'
|
||||
|
||||
class MockToastService {
|
||||
showInfo = jest.fn()
|
||||
showError = jest.fn()
|
||||
}
|
||||
|
||||
describe('ShareLinkBundleDialogComponent', () => {
|
||||
let component: ShareLinkBundleDialogComponent
|
||||
let fixture: ComponentFixture<ShareLinkBundleDialogComponent>
|
||||
let clipboard: Clipboard
|
||||
let toastService: MockToastService
|
||||
let activeModal: NgbActiveModal
|
||||
let originalApiBaseUrl: string
|
||||
|
||||
beforeEach(() => {
|
||||
originalApiBaseUrl = environment.apiBaseUrl
|
||||
toastService = new MockToastService()
|
||||
|
||||
TestBed.configureTestingModule({
|
||||
imports: [
|
||||
ShareLinkBundleDialogComponent,
|
||||
NgxBootstrapIconsModule.pick(allIcons),
|
||||
],
|
||||
providers: [
|
||||
NgbActiveModal,
|
||||
{ provide: ToastService, useValue: toastService },
|
||||
],
|
||||
})
|
||||
|
||||
fixture = TestBed.createComponent(ShareLinkBundleDialogComponent)
|
||||
component = fixture.componentInstance
|
||||
clipboard = TestBed.inject(Clipboard)
|
||||
activeModal = TestBed.inject(NgbActiveModal)
|
||||
fixture.detectChanges()
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllTimers()
|
||||
environment.apiBaseUrl = originalApiBaseUrl
|
||||
})
|
||||
|
||||
it('builds payload and emits confirm on submit', () => {
|
||||
const confirmSpy = jest.spyOn(component.confirmClicked, 'emit')
|
||||
component.documents = [
|
||||
{ id: 1, title: 'Doc 1' } as any,
|
||||
{ id: 2, title: 'Doc 2' } as any,
|
||||
]
|
||||
component.form.setValue({
|
||||
shareArchiveVersion: false,
|
||||
expirationDays: 3,
|
||||
})
|
||||
|
||||
component.submit()
|
||||
|
||||
expect(component.payload).toEqual({
|
||||
document_ids: [1, 2],
|
||||
file_version: FileVersion.Original,
|
||||
expiration_days: 3,
|
||||
})
|
||||
expect(component.buttonsEnabled).toBe(false)
|
||||
expect(confirmSpy).toHaveBeenCalled()
|
||||
|
||||
component.form.setValue({
|
||||
shareArchiveVersion: true,
|
||||
expirationDays: 7,
|
||||
})
|
||||
component.submit()
|
||||
|
||||
expect(component.payload).toEqual({
|
||||
document_ids: [1, 2],
|
||||
file_version: FileVersion.Archive,
|
||||
expiration_days: 7,
|
||||
})
|
||||
})
|
||||
|
||||
it('ignores submit when bundle already created', () => {
|
||||
component.createdBundle = { id: 1 } as ShareLinkBundleSummary
|
||||
const confirmSpy = jest.spyOn(component, 'confirm')
|
||||
component.submit()
|
||||
expect(confirmSpy).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('limits preview to ten documents', () => {
|
||||
const docs = Array.from({ length: 12 }).map((_, index) => ({
|
||||
id: index + 1,
|
||||
}))
|
||||
component.documents = docs as any
|
||||
|
||||
expect(component.selectionCount).toBe(12)
|
||||
expect(component.documentPreview).toHaveLength(10)
|
||||
expect(component.documentPreview[0].id).toBe(1)
|
||||
})
|
||||
|
||||
it('copies share link and resets state after timeout', fakeAsync(() => {
|
||||
const copySpy = jest.spyOn(clipboard, 'copy').mockReturnValue(true)
|
||||
const bundle = {
|
||||
slug: 'bundle-slug',
|
||||
status: ShareLinkBundleStatus.Ready,
|
||||
} as ShareLinkBundleSummary
|
||||
|
||||
component.copy(bundle)
|
||||
|
||||
expect(copySpy).toHaveBeenCalledWith(component.getShareUrl(bundle))
|
||||
expect(component.copied).toBe(true)
|
||||
expect(toastService.showInfo).toHaveBeenCalled()
|
||||
|
||||
tick(3000)
|
||||
expect(component.copied).toBe(false)
|
||||
}))
|
||||
|
||||
it('generates share URLs based on API base URL', () => {
|
||||
environment.apiBaseUrl = 'https://example.com/api/'
|
||||
expect(
|
||||
component.getShareUrl({ slug: 'abc' } as ShareLinkBundleSummary)
|
||||
).toBe('https://example.com/share/abc')
|
||||
})
|
||||
|
||||
it('opens manage dialog when callback provided', () => {
|
||||
const manageSpy = jest.fn()
|
||||
component.onOpenManage = manageSpy
|
||||
component.openManage()
|
||||
expect(manageSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('falls back to cancel when manage callback missing', () => {
|
||||
const cancelSpy = jest.spyOn(component, 'cancel')
|
||||
component.onOpenManage = undefined
|
||||
component.openManage()
|
||||
expect(cancelSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('maps status and file version labels', () => {
|
||||
expect(component.statusLabel(ShareLinkBundleStatus.Processing)).toContain(
|
||||
'Processing'
|
||||
)
|
||||
expect(component.fileVersionLabel(FileVersion.Archive)).toContain('Archive')
|
||||
})
|
||||
|
||||
it('closes dialog when cancel invoked', () => {
|
||||
const closeSpy = jest.spyOn(activeModal, 'close')
|
||||
component.cancel()
|
||||
expect(closeSpy).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
@@ -1,118 +0,0 @@
|
||||
import { Clipboard } from '@angular/cdk/clipboard'
|
||||
import { CommonModule } from '@angular/common'
|
||||
import { Component, Input, inject } from '@angular/core'
|
||||
import { FormBuilder, FormGroup, ReactiveFormsModule } from '@angular/forms'
|
||||
import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
|
||||
import { Document } from 'src/app/data/document'
|
||||
import {
|
||||
FileVersion,
|
||||
SHARE_LINK_EXPIRATION_OPTIONS,
|
||||
} from 'src/app/data/share-link'
|
||||
import {
|
||||
SHARE_LINK_BUNDLE_FILE_VERSION_LABELS,
|
||||
SHARE_LINK_BUNDLE_STATUS_LABELS,
|
||||
ShareLinkBundleCreatePayload,
|
||||
ShareLinkBundleStatus,
|
||||
ShareLinkBundleSummary,
|
||||
} from 'src/app/data/share-link-bundle'
|
||||
import { DocumentTitlePipe } from 'src/app/pipes/document-title.pipe'
|
||||
import { FileSizePipe } from 'src/app/pipes/file-size.pipe'
|
||||
import { ToastService } from 'src/app/services/toast.service'
|
||||
import { environment } from 'src/environments/environment'
|
||||
import { ConfirmDialogComponent } from '../confirm-dialog/confirm-dialog.component'
|
||||
|
||||
@Component({
|
||||
selector: 'pngx-share-link-bundle-dialog',
|
||||
templateUrl: './share-link-bundle-dialog.component.html',
|
||||
imports: [
|
||||
CommonModule,
|
||||
ReactiveFormsModule,
|
||||
NgxBootstrapIconsModule,
|
||||
FileSizePipe,
|
||||
DocumentTitlePipe,
|
||||
],
|
||||
providers: [],
|
||||
})
|
||||
export class ShareLinkBundleDialogComponent extends ConfirmDialogComponent {
|
||||
private readonly formBuilder = inject(FormBuilder)
|
||||
private readonly clipboard = inject(Clipboard)
|
||||
private readonly toastService = inject(ToastService)
|
||||
|
||||
private _documents: Document[] = []
|
||||
|
||||
selectionCount = 0
|
||||
documentPreview: Document[] = []
|
||||
form: FormGroup = this.formBuilder.group({
|
||||
shareArchiveVersion: true,
|
||||
expirationDays: [7],
|
||||
})
|
||||
payload: ShareLinkBundleCreatePayload | null = null
|
||||
|
||||
readonly expirationOptions = SHARE_LINK_EXPIRATION_OPTIONS
|
||||
|
||||
createdBundle: ShareLinkBundleSummary | null = null
|
||||
copied = false
|
||||
onOpenManage?: () => void
|
||||
readonly statuses = ShareLinkBundleStatus
|
||||
|
||||
constructor() {
|
||||
super()
|
||||
this.loading = false
|
||||
this.title = $localize`Create share link bundle`
|
||||
this.btnCaption = $localize`Create link`
|
||||
}
|
||||
|
||||
@Input()
|
||||
set documents(docs: Document[]) {
|
||||
this._documents = docs.concat()
|
||||
this.selectionCount = this._documents.length
|
||||
this.documentPreview = this._documents.slice(0, 10)
|
||||
}
|
||||
|
||||
submit() {
|
||||
if (this.createdBundle) return
|
||||
this.payload = {
|
||||
document_ids: this._documents.map((doc) => doc.id),
|
||||
file_version: this.form.value.shareArchiveVersion
|
||||
? FileVersion.Archive
|
||||
: FileVersion.Original,
|
||||
expiration_days: this.form.value.expirationDays,
|
||||
}
|
||||
this.buttonsEnabled = false
|
||||
super.confirm()
|
||||
}
|
||||
|
||||
getShareUrl(bundle: ShareLinkBundleSummary): string {
|
||||
const apiURL = new URL(environment.apiBaseUrl)
|
||||
return `${apiURL.origin}${apiURL.pathname.replace(/\/api\/$/, '/share/')}${
|
||||
bundle.slug
|
||||
}`
|
||||
}
|
||||
|
||||
copy(bundle: ShareLinkBundleSummary): void {
|
||||
const success = this.clipboard.copy(this.getShareUrl(bundle))
|
||||
if (success) {
|
||||
this.copied = true
|
||||
this.toastService.showInfo($localize`Share link copied to clipboard.`)
|
||||
setTimeout(() => {
|
||||
this.copied = false
|
||||
}, 3000)
|
||||
}
|
||||
}
|
||||
|
||||
openManage(): void {
|
||||
if (this.onOpenManage) {
|
||||
this.onOpenManage()
|
||||
} else {
|
||||
this.cancel()
|
||||
}
|
||||
}
|
||||
|
||||
statusLabel(status: ShareLinkBundleSummary['status']): string {
|
||||
return SHARE_LINK_BUNDLE_STATUS_LABELS[status] ?? status
|
||||
}
|
||||
|
||||
fileVersionLabel(version: FileVersion): string {
|
||||
return SHARE_LINK_BUNDLE_FILE_VERSION_LABELS[version] ?? version
|
||||
}
|
||||
}
|
||||
@@ -1,156 +0,0 @@
|
||||
<div class="modal-header">
|
||||
<h4 class="modal-title">{{ title }}</h4>
|
||||
<button type="button" class="btn-close" aria-label="Close" (click)="close()"></button>
|
||||
</div>
|
||||
|
||||
<div class="modal-body">
|
||||
@if (loading) {
|
||||
<div class="d-flex align-items-center gap-2">
|
||||
<div class="spinner-border spinner-border-sm" role="status"></div>
|
||||
<span i18n>Loading share link bundles…</span>
|
||||
</div>
|
||||
}
|
||||
@if (!loading && error) {
|
||||
<div class="alert alert-danger mb-0" role="alert">
|
||||
{{ error }}
|
||||
</div>
|
||||
}
|
||||
@if (!loading && !error) {
|
||||
<div class="d-flex justify-content-between align-items-center mb-2">
|
||||
<p class="mb-0 text-muted small">
|
||||
<ng-container i18n>Status updates every few seconds while bundles are being prepared.</ng-container>
|
||||
</p>
|
||||
</div>
|
||||
@if (bundles.length === 0) {
|
||||
<p class="mb-0 text-muted fst-italic" i18n>No share link bundles currently exist.</p>
|
||||
}
|
||||
@if (bundles.length > 0) {
|
||||
<div class="table-responsive">
|
||||
<table class="table table-sm align-middle mb-0">
|
||||
<thead>
|
||||
<tr>
|
||||
<th scope="col" i18n>Created</th>
|
||||
<th scope="col" i18n>Status</th>
|
||||
<th scope="col" i18n>Size</th>
|
||||
<th scope="col" i18n>Expires</th>
|
||||
<th scope="col" i18n>Documents</th>
|
||||
<th scope="col" i18n>File version</th>
|
||||
<th scope="col" class="text-end" i18n>Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
@for (bundle of bundles; track bundle.id) {
|
||||
<tr>
|
||||
<td>
|
||||
<div>{{ bundle.created | date: 'short' }}</div>
|
||||
@if (bundle.built_at) {
|
||||
<div class="small text-muted">
|
||||
<ng-container i18n>Built:</ng-container> {{ bundle.built_at | date: 'short' }}
|
||||
</div>
|
||||
}
|
||||
</td>
|
||||
<td>
|
||||
<div class="d-flex align-items-center gap-2">
|
||||
@if (bundle.status === statuses.Failed && bundle.last_error) {
|
||||
<button
|
||||
type="button"
|
||||
class="btn btn-link p-0 text-danger"
|
||||
[ngbPopover]="errorDetail"
|
||||
popoverClass="popover-sm"
|
||||
triggers="mouseover:mouseleave"
|
||||
placement="auto"
|
||||
aria-label="View error details"
|
||||
i18n-aria-label
|
||||
>
|
||||
<span class="badge text-bg-warning text-uppercase me-2">{{ statusLabel(bundle.status) }}</span>
|
||||
<i-bs name="exclamation-triangle-fill" class="text-warning"></i-bs>
|
||||
</button>
|
||||
<ng-template #errorDetail>
|
||||
@if (bundle.last_error.timestamp) {
|
||||
<div class="text-muted small mb-1">
|
||||
{{ bundle.last_error.timestamp | date: 'short' }}
|
||||
</div>
|
||||
}
|
||||
<h6>{{ bundle.last_error.exception_type || ($localize`Unknown error`) }}</h6>
|
||||
@if (bundle.last_error.message) {
|
||||
<pre class="text-muted small"><code>{{ bundle.last_error.message }}</code></pre>
|
||||
}
|
||||
</ng-template>
|
||||
}
|
||||
@if (bundle.status === statuses.Processing || bundle.status === statuses.Pending) {
|
||||
<span class="spinner-border spinner-border-sm" role="status"></span>
|
||||
}
|
||||
@if (bundle.status !== statuses.Failed) {
|
||||
<span class="badge text-bg-secondary text-uppercase">{{ statusLabel(bundle.status) }}</span>
|
||||
}
|
||||
</div>
|
||||
</td>
|
||||
<td>
|
||||
@if (bundle.size_bytes !== undefined && bundle.size_bytes !== null) {
|
||||
{{ bundle.size_bytes | fileSize }}
|
||||
}
|
||||
@if (bundle.size_bytes === undefined || bundle.size_bytes === null) {
|
||||
<span class="text-muted">—</span>
|
||||
}
|
||||
</td>
|
||||
<td>
|
||||
@if (bundle.expiration) {
|
||||
{{ bundle.expiration | date: 'short' }}
|
||||
}
|
||||
@if (!bundle.expiration) {
|
||||
<span i18n>Never</span>
|
||||
}
|
||||
</td>
|
||||
<td>{{ bundle.document_count }}</td>
|
||||
<td>{{ fileVersionLabel(bundle.file_version) }}</td>
|
||||
<td class="text-end">
|
||||
<div class="btn-group btn-group-sm">
|
||||
<button
|
||||
type="button"
|
||||
class="btn btn-outline-primary"
|
||||
[disabled]="bundle.status !== statuses.Ready"
|
||||
(click)="copy(bundle)"
|
||||
title="Copy share link"
|
||||
i18n-title
|
||||
>
|
||||
@if (copiedSlug === bundle.slug) {
|
||||
<i-bs name="clipboard-check"></i-bs>
|
||||
}
|
||||
@if (copiedSlug !== bundle.slug) {
|
||||
<i-bs name="clipboard"></i-bs>
|
||||
}
|
||||
<span class="visually-hidden" i18n>Copy share link</span>
|
||||
</button>
|
||||
@if (bundle.status === statuses.Failed) {
|
||||
<button
|
||||
type="button"
|
||||
class="btn btn-outline-warning"
|
||||
[disabled]="loading"
|
||||
(click)="retry(bundle)"
|
||||
>
|
||||
<i-bs name="arrow-clockwise"></i-bs>
|
||||
<span class="visually-hidden" i18n>Retry</span>
|
||||
</button>
|
||||
}
|
||||
<pngx-confirm-button
|
||||
buttonClasses="btn btn-sm btn-outline-danger"
|
||||
[disabled]="loading"
|
||||
(confirm)="delete(bundle)"
|
||||
iconName="trash"
|
||||
>
|
||||
<span class="visually-hidden" i18n>Delete share link bundle</span>
|
||||
</pngx-confirm-button>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
}
|
||||
}
|
||||
</div>
|
||||
|
||||
<div class="modal-footer">
|
||||
<button type="button" class="btn btn-outline-secondary btn-sm" (click)="close()" i18n>Close</button>
|
||||
</div>
|
||||
@@ -1,4 +0,0 @@
|
||||
:host ::ng-deep .popover {
|
||||
min-width: 300px;
|
||||
max-width: 400px;
|
||||
}
|
||||
@@ -1,251 +0,0 @@
|
||||
import { Clipboard } from '@angular/cdk/clipboard'
|
||||
import {
|
||||
ComponentFixture,
|
||||
TestBed,
|
||||
fakeAsync,
|
||||
tick,
|
||||
} from '@angular/core/testing'
|
||||
import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap'
|
||||
import { NgxBootstrapIconsModule, allIcons } from 'ngx-bootstrap-icons'
|
||||
import { of, throwError } from 'rxjs'
|
||||
import { FileVersion } from 'src/app/data/share-link'
|
||||
import {
|
||||
ShareLinkBundleStatus,
|
||||
ShareLinkBundleSummary,
|
||||
} from 'src/app/data/share-link-bundle'
|
||||
import { ShareLinkBundleService } from 'src/app/services/rest/share-link-bundle.service'
|
||||
import { ToastService } from 'src/app/services/toast.service'
|
||||
import { environment } from 'src/environments/environment'
|
||||
import { ShareLinkBundleManageDialogComponent } from './share-link-bundle-manage-dialog.component'
|
||||
|
||||
class MockShareLinkBundleService {
|
||||
listAllBundles = jest.fn()
|
||||
delete = jest.fn()
|
||||
rebuildBundle = jest.fn()
|
||||
}
|
||||
|
||||
class MockToastService {
|
||||
showInfo = jest.fn()
|
||||
showError = jest.fn()
|
||||
}
|
||||
|
||||
describe('ShareLinkBundleManageDialogComponent', () => {
|
||||
let component: ShareLinkBundleManageDialogComponent
|
||||
let fixture: ComponentFixture<ShareLinkBundleManageDialogComponent>
|
||||
let service: MockShareLinkBundleService
|
||||
let toastService: MockToastService
|
||||
let clipboard: Clipboard
|
||||
let activeModal: NgbActiveModal
|
||||
let originalApiBaseUrl: string
|
||||
|
||||
beforeEach(() => {
|
||||
service = new MockShareLinkBundleService()
|
||||
toastService = new MockToastService()
|
||||
originalApiBaseUrl = environment.apiBaseUrl
|
||||
|
||||
service.listAllBundles.mockReturnValue(of([]))
|
||||
service.delete.mockReturnValue(of(true))
|
||||
service.rebuildBundle.mockReturnValue(of(sampleBundle()))
|
||||
|
||||
TestBed.configureTestingModule({
|
||||
imports: [
|
||||
ShareLinkBundleManageDialogComponent,
|
||||
NgxBootstrapIconsModule.pick(allIcons),
|
||||
],
|
||||
providers: [
|
||||
NgbActiveModal,
|
||||
{ provide: ShareLinkBundleService, useValue: service },
|
||||
{ provide: ToastService, useValue: toastService },
|
||||
],
|
||||
})
|
||||
|
||||
fixture = TestBed.createComponent(ShareLinkBundleManageDialogComponent)
|
||||
component = fixture.componentInstance
|
||||
clipboard = TestBed.inject(Clipboard)
|
||||
activeModal = TestBed.inject(NgbActiveModal)
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
component.ngOnDestroy()
|
||||
fixture.destroy()
|
||||
environment.apiBaseUrl = originalApiBaseUrl
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
const sampleBundle = (overrides: Partial<ShareLinkBundleSummary> = {}) =>
|
||||
({
|
||||
id: 1,
|
||||
slug: 'bundle-slug',
|
||||
created: new Date().toISOString(),
|
||||
document_count: 1,
|
||||
documents: [1],
|
||||
status: ShareLinkBundleStatus.Pending,
|
||||
file_version: FileVersion.Archive,
|
||||
last_error: undefined,
|
||||
...overrides,
|
||||
}) as ShareLinkBundleSummary
|
||||
|
||||
it('loads bundles on init and polls periodically', fakeAsync(() => {
|
||||
const bundles = [sampleBundle({ status: ShareLinkBundleStatus.Ready })]
|
||||
service.listAllBundles.mockReset()
|
||||
service.listAllBundles
|
||||
.mockReturnValueOnce(of(bundles))
|
||||
.mockReturnValue(of(bundles))
|
||||
|
||||
fixture.detectChanges()
|
||||
tick()
|
||||
|
||||
expect(service.listAllBundles).toHaveBeenCalledTimes(1)
|
||||
expect(component.bundles).toEqual(bundles)
|
||||
expect(component.loading).toBe(false)
|
||||
expect(component.error).toBeNull()
|
||||
|
||||
tick(5000)
|
||||
expect(service.listAllBundles).toHaveBeenCalledTimes(2)
|
||||
}))
|
||||
|
||||
it('handles errors when loading bundles', fakeAsync(() => {
|
||||
service.listAllBundles.mockReset()
|
||||
service.listAllBundles
|
||||
.mockReturnValueOnce(throwError(() => new Error('load fail')))
|
||||
.mockReturnValue(of([]))
|
||||
|
||||
fixture.detectChanges()
|
||||
tick()
|
||||
|
||||
expect(component.error).toContain('Failed to load share link bundles.')
|
||||
expect(toastService.showError).toHaveBeenCalled()
|
||||
expect(component.loading).toBe(false)
|
||||
|
||||
tick(5000)
|
||||
expect(service.listAllBundles).toHaveBeenCalledTimes(2)
|
||||
}))
|
||||
|
||||
it('copies bundle links when ready', fakeAsync(() => {
|
||||
jest.spyOn(clipboard, 'copy').mockReturnValue(true)
|
||||
fixture.detectChanges()
|
||||
tick()
|
||||
|
||||
const readyBundle = sampleBundle({
|
||||
slug: 'ready-slug',
|
||||
status: ShareLinkBundleStatus.Ready,
|
||||
})
|
||||
component.copy(readyBundle)
|
||||
|
||||
expect(clipboard.copy).toHaveBeenCalledWith(
|
||||
component.getShareUrl(readyBundle)
|
||||
)
|
||||
expect(component.copiedSlug).toBe('ready-slug')
|
||||
expect(toastService.showInfo).toHaveBeenCalled()
|
||||
|
||||
tick(3000)
|
||||
expect(component.copiedSlug).toBeNull()
|
||||
}))
|
||||
|
||||
it('ignores copy requests for non-ready bundles', fakeAsync(() => {
|
||||
const copySpy = jest.spyOn(clipboard, 'copy')
|
||||
fixture.detectChanges()
|
||||
tick()
|
||||
component.copy(sampleBundle({ status: ShareLinkBundleStatus.Pending }))
|
||||
expect(copySpy).not.toHaveBeenCalled()
|
||||
}))
|
||||
|
||||
it('deletes bundles and refreshes list', fakeAsync(() => {
|
||||
service.listAllBundles.mockReturnValue(of([]))
|
||||
service.delete.mockReturnValue(of(true))
|
||||
|
||||
fixture.detectChanges()
|
||||
tick()
|
||||
|
||||
component.delete(sampleBundle())
|
||||
tick()
|
||||
|
||||
expect(service.delete).toHaveBeenCalled()
|
||||
expect(toastService.showInfo).toHaveBeenCalledWith(
|
||||
expect.stringContaining('deleted.')
|
||||
)
|
||||
expect(service.listAllBundles).toHaveBeenCalledTimes(2)
|
||||
expect(component.loading).toBe(false)
|
||||
}))
|
||||
|
||||
it('handles delete errors gracefully', fakeAsync(() => {
|
||||
service.listAllBundles.mockReturnValue(of([]))
|
||||
service.delete.mockReturnValue(throwError(() => new Error('delete fail')))
|
||||
|
||||
fixture.detectChanges()
|
||||
tick()
|
||||
|
||||
component.delete(sampleBundle())
|
||||
tick()
|
||||
|
||||
expect(toastService.showError).toHaveBeenCalled()
|
||||
expect(component.loading).toBe(false)
|
||||
}))
|
||||
|
||||
it('retries bundle build and replaces existing entry', fakeAsync(() => {
|
||||
service.listAllBundles.mockReturnValue(of([]))
|
||||
const updated = sampleBundle({ status: ShareLinkBundleStatus.Ready })
|
||||
service.rebuildBundle.mockReturnValue(of(updated))
|
||||
|
||||
fixture.detectChanges()
|
||||
tick()
|
||||
|
||||
component.bundles = [sampleBundle()]
|
||||
component.retry(component.bundles[0])
|
||||
tick()
|
||||
|
||||
expect(service.rebuildBundle).toHaveBeenCalledWith(updated.id)
|
||||
expect(component.bundles[0].status).toBe(ShareLinkBundleStatus.Ready)
|
||||
expect(toastService.showInfo).toHaveBeenCalled()
|
||||
}))
|
||||
|
||||
it('adds new bundle when retry returns unknown entry', fakeAsync(() => {
|
||||
service.listAllBundles.mockReturnValue(of([]))
|
||||
service.rebuildBundle.mockReturnValue(
|
||||
of(sampleBundle({ id: 99, slug: 'new-slug' }))
|
||||
)
|
||||
|
||||
fixture.detectChanges()
|
||||
tick()
|
||||
|
||||
component.bundles = [sampleBundle()]
|
||||
component.retry({ id: 99 } as ShareLinkBundleSummary)
|
||||
tick()
|
||||
|
||||
expect(component.bundles.find((bundle) => bundle.id === 99)).toBeTruthy()
|
||||
}))
|
||||
|
||||
it('handles retry errors', fakeAsync(() => {
|
||||
service.listAllBundles.mockReturnValue(of([]))
|
||||
service.rebuildBundle.mockReturnValue(throwError(() => new Error('fail')))
|
||||
|
||||
fixture.detectChanges()
|
||||
tick()
|
||||
|
||||
component.retry(sampleBundle())
|
||||
tick()
|
||||
|
||||
expect(toastService.showError).toHaveBeenCalled()
|
||||
}))
|
||||
|
||||
it('maps helpers and closes dialog', fakeAsync(() => {
|
||||
service.listAllBundles.mockReturnValue(of([]))
|
||||
fixture.detectChanges()
|
||||
tick()
|
||||
|
||||
expect(component.statusLabel(ShareLinkBundleStatus.Processing)).toContain(
|
||||
'Processing'
|
||||
)
|
||||
expect(component.fileVersionLabel(FileVersion.Original)).toContain(
|
||||
'Original'
|
||||
)
|
||||
|
||||
environment.apiBaseUrl = 'https://example.com/api/'
|
||||
const url = component.getShareUrl(sampleBundle({ slug: 'sluggy' }))
|
||||
expect(url).toBe('https://example.com/share/sluggy')
|
||||
|
||||
const closeSpy = jest.spyOn(activeModal, 'close')
|
||||
component.close()
|
||||
expect(closeSpy).toHaveBeenCalled()
|
||||
}))
|
||||
})
|
||||
@@ -1,177 +0,0 @@
|
||||
import { Clipboard } from '@angular/cdk/clipboard'
|
||||
import { CommonModule } from '@angular/common'
|
||||
import { Component, OnDestroy, OnInit, inject } from '@angular/core'
|
||||
import { NgbActiveModal, NgbPopoverModule } from '@ng-bootstrap/ng-bootstrap'
|
||||
import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
|
||||
import { Subject, catchError, of, switchMap, takeUntil, timer } from 'rxjs'
|
||||
import { FileVersion } from 'src/app/data/share-link'
|
||||
import {
|
||||
SHARE_LINK_BUNDLE_FILE_VERSION_LABELS,
|
||||
SHARE_LINK_BUNDLE_STATUS_LABELS,
|
||||
ShareLinkBundleStatus,
|
||||
ShareLinkBundleSummary,
|
||||
} from 'src/app/data/share-link-bundle'
|
||||
import { FileSizePipe } from 'src/app/pipes/file-size.pipe'
|
||||
import { ShareLinkBundleService } from 'src/app/services/rest/share-link-bundle.service'
|
||||
import { ToastService } from 'src/app/services/toast.service'
|
||||
import { environment } from 'src/environments/environment'
|
||||
import { LoadingComponentWithPermissions } from '../../loading-component/loading.component'
|
||||
import { ConfirmButtonComponent } from '../confirm-button/confirm-button.component'
|
||||
|
||||
@Component({
|
||||
selector: 'pngx-share-link-bundle-manage-dialog',
|
||||
templateUrl: './share-link-bundle-manage-dialog.component.html',
|
||||
styleUrls: ['./share-link-bundle-manage-dialog.component.scss'],
|
||||
imports: [
|
||||
ConfirmButtonComponent,
|
||||
CommonModule,
|
||||
NgbPopoverModule,
|
||||
NgxBootstrapIconsModule,
|
||||
FileSizePipe,
|
||||
],
|
||||
})
|
||||
export class ShareLinkBundleManageDialogComponent
|
||||
extends LoadingComponentWithPermissions
|
||||
implements OnInit, OnDestroy
|
||||
{
|
||||
private readonly activeModal = inject(NgbActiveModal)
|
||||
private readonly shareLinkBundleService = inject(ShareLinkBundleService)
|
||||
private readonly toastService = inject(ToastService)
|
||||
private readonly clipboard = inject(Clipboard)
|
||||
|
||||
title = $localize`Share link bundles`
|
||||
|
||||
bundles: ShareLinkBundleSummary[] = []
|
||||
error: string | null = null
|
||||
copiedSlug: string | null = null
|
||||
|
||||
readonly statuses = ShareLinkBundleStatus
|
||||
readonly fileVersions = FileVersion
|
||||
|
||||
private readonly refresh$ = new Subject<boolean>()
|
||||
|
||||
ngOnInit(): void {
|
||||
this.refresh$
|
||||
.pipe(
|
||||
switchMap((silent) => {
|
||||
if (!silent) {
|
||||
this.loading = true
|
||||
}
|
||||
this.error = null
|
||||
return this.shareLinkBundleService.listAllBundles().pipe(
|
||||
catchError((error) => {
|
||||
if (!silent) {
|
||||
this.loading = false
|
||||
}
|
||||
this.error = $localize`Failed to load share link bundles.`
|
||||
this.toastService.showError(
|
||||
$localize`Error retrieving share link bundles.`,
|
||||
error
|
||||
)
|
||||
return of(null)
|
||||
})
|
||||
)
|
||||
}),
|
||||
takeUntil(this.unsubscribeNotifier)
|
||||
)
|
||||
.subscribe((results) => {
|
||||
if (results) {
|
||||
this.bundles = results
|
||||
this.copiedSlug = null
|
||||
}
|
||||
this.loading = false
|
||||
})
|
||||
|
||||
this.triggerRefresh(false)
|
||||
timer(5000, 5000)
|
||||
.pipe(takeUntil(this.unsubscribeNotifier))
|
||||
.subscribe(() => this.triggerRefresh(true))
|
||||
}
|
||||
|
||||
ngOnDestroy(): void {
|
||||
super.ngOnDestroy()
|
||||
}
|
||||
|
||||
getShareUrl(bundle: ShareLinkBundleSummary): string {
|
||||
const apiURL = new URL(environment.apiBaseUrl)
|
||||
return `${apiURL.origin}${apiURL.pathname.replace(/\/api\/$/, '/share/')}${
|
||||
bundle.slug
|
||||
}`
|
||||
}
|
||||
|
||||
copy(bundle: ShareLinkBundleSummary): void {
|
||||
if (bundle.status !== ShareLinkBundleStatus.Ready) {
|
||||
return
|
||||
}
|
||||
const success = this.clipboard.copy(this.getShareUrl(bundle))
|
||||
if (success) {
|
||||
this.copiedSlug = bundle.slug
|
||||
setTimeout(() => {
|
||||
this.copiedSlug = null
|
||||
}, 3000)
|
||||
this.toastService.showInfo($localize`Share link copied to clipboard.`)
|
||||
}
|
||||
}
|
||||
|
||||
delete(bundle: ShareLinkBundleSummary): void {
|
||||
this.error = null
|
||||
this.loading = true
|
||||
this.shareLinkBundleService.delete(bundle).subscribe({
|
||||
next: () => {
|
||||
this.toastService.showInfo($localize`Share link bundle deleted.`)
|
||||
this.triggerRefresh(false)
|
||||
},
|
||||
error: (e) => {
|
||||
this.loading = false
|
||||
this.toastService.showError(
|
||||
$localize`Error deleting share link bundle.`,
|
||||
e
|
||||
)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
retry(bundle: ShareLinkBundleSummary): void {
|
||||
this.error = null
|
||||
this.shareLinkBundleService.rebuildBundle(bundle.id).subscribe({
|
||||
next: (updated) => {
|
||||
this.toastService.showInfo(
|
||||
$localize`Share link bundle rebuild requested.`
|
||||
)
|
||||
this.replaceBundle(updated)
|
||||
},
|
||||
error: (e) => {
|
||||
this.toastService.showError($localize`Error requesting rebuild.`, e)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
statusLabel(status: ShareLinkBundleStatus): string {
|
||||
return SHARE_LINK_BUNDLE_STATUS_LABELS[status] ?? status
|
||||
}
|
||||
|
||||
fileVersionLabel(version: FileVersion): string {
|
||||
return SHARE_LINK_BUNDLE_FILE_VERSION_LABELS[version] ?? version
|
||||
}
|
||||
|
||||
close(): void {
|
||||
this.activeModal.close()
|
||||
}
|
||||
|
||||
private replaceBundle(updated: ShareLinkBundleSummary): void {
|
||||
const index = this.bundles.findIndex((bundle) => bundle.id === updated.id)
|
||||
if (index >= 0) {
|
||||
this.bundles = [
|
||||
...this.bundles.slice(0, index),
|
||||
updated,
|
||||
...this.bundles.slice(index + 1),
|
||||
]
|
||||
} else {
|
||||
this.bundles = [updated, ...this.bundles]
|
||||
}
|
||||
}
|
||||
|
||||
private triggerRefresh(silent: boolean): void {
|
||||
this.refresh$.next(silent)
|
||||
}
|
||||
}
|
||||
@@ -51,7 +51,7 @@
|
||||
<div class="input-group w-100 mt-2">
|
||||
<label class="input-group-text" for="addLink"><ng-container i18n>Expires</ng-container>:</label>
|
||||
<select class="form-select fs-6" [(ngModel)]="expirationDays">
|
||||
@for (option of expirationOptions; track option) {
|
||||
@for (option of EXPIRATION_OPTIONS; track option) {
|
||||
<option [ngValue]="option.value">{{ option.label }}</option>
|
||||
}
|
||||
</select>
|
||||
|
||||
@@ -4,11 +4,7 @@ import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
||||
import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap'
|
||||
import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
|
||||
import { first } from 'rxjs'
|
||||
import {
|
||||
FileVersion,
|
||||
SHARE_LINK_EXPIRATION_OPTIONS,
|
||||
ShareLink,
|
||||
} from 'src/app/data/share-link'
|
||||
import { FileVersion, ShareLink } from 'src/app/data/share-link'
|
||||
import { ShareLinkService } from 'src/app/services/rest/share-link.service'
|
||||
import { ToastService } from 'src/app/services/toast.service'
|
||||
import { environment } from 'src/environments/environment'
|
||||
@@ -25,7 +21,12 @@ export class ShareLinksDialogComponent implements OnInit {
|
||||
private toastService = inject(ToastService)
|
||||
private clipboard = inject(Clipboard)
|
||||
|
||||
readonly expirationOptions = SHARE_LINK_EXPIRATION_OPTIONS
|
||||
EXPIRATION_OPTIONS = [
|
||||
{ label: $localize`1 day`, value: 1 },
|
||||
{ label: $localize`7 days`, value: 7 },
|
||||
{ label: $localize`30 days`, value: 30 },
|
||||
{ label: $localize`Never`, value: null },
|
||||
]
|
||||
|
||||
@Input()
|
||||
title = $localize`Share Links`
|
||||
|
||||
@@ -69,8 +69,10 @@ import { environment } from 'src/environments/environment'
|
||||
import { ConfirmDialogComponent } from '../common/confirm-dialog/confirm-dialog.component'
|
||||
import { PasswordRemovalConfirmDialogComponent } from '../common/confirm-dialog/password-removal-confirm-dialog/password-removal-confirm-dialog.component'
|
||||
import { CustomFieldsDropdownComponent } from '../common/custom-fields-dropdown/custom-fields-dropdown.component'
|
||||
import { DocumentDetailComponent } from './document-detail.component'
|
||||
import { ZoomSetting } from './zoom-setting'
|
||||
import {
|
||||
DocumentDetailComponent,
|
||||
ZoomSetting,
|
||||
} from './document-detail.component'
|
||||
|
||||
const doc: Document = {
|
||||
id: 3,
|
||||
|
||||
@@ -106,15 +106,16 @@ import { TextComponent } from '../common/input/text/text.component'
|
||||
import { TextAreaComponent } from '../common/input/textarea/textarea.component'
|
||||
import { UrlComponent } from '../common/input/url/url.component'
|
||||
import { PageHeaderComponent } from '../common/page-header/page-header.component'
|
||||
import { PdfEditorEditMode } from '../common/pdf-editor/pdf-editor-edit-mode'
|
||||
import { PDFEditorComponent } from '../common/pdf-editor/pdf-editor.component'
|
||||
import {
|
||||
PDFEditorComponent,
|
||||
PdfEditorEditMode,
|
||||
} from '../common/pdf-editor/pdf-editor.component'
|
||||
import { ShareLinksDialogComponent } from '../common/share-links-dialog/share-links-dialog.component'
|
||||
import { SuggestionsDropdownComponent } from '../common/suggestions-dropdown/suggestions-dropdown.component'
|
||||
import { DocumentHistoryComponent } from '../document-history/document-history.component'
|
||||
import { DocumentNotesComponent } from '../document-notes/document-notes.component'
|
||||
import { ComponentWithPermissions } from '../with-permissions/with-permissions.component'
|
||||
import { MetadataCollapseComponent } from './metadata-collapse/metadata-collapse.component'
|
||||
import { ZoomSetting } from './zoom-setting'
|
||||
|
||||
enum DocumentDetailNavIDs {
|
||||
Details = 1,
|
||||
@@ -136,6 +137,18 @@ enum ContentRenderType {
|
||||
TIFF = 'tiff',
|
||||
}
|
||||
|
||||
export enum ZoomSetting {
|
||||
PageFit = 'page-fit',
|
||||
PageWidth = 'page-width',
|
||||
Quarter = '.25',
|
||||
Half = '.5',
|
||||
ThreeQuarters = '.75',
|
||||
One = '1',
|
||||
OneAndHalf = '1.5',
|
||||
Two = '2',
|
||||
Three = '3',
|
||||
}
|
||||
|
||||
@Component({
|
||||
selector: 'pngx-document-detail',
|
||||
templateUrl: './document-detail.component.html',
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
export enum ZoomSetting {
|
||||
PageFit = 'page-fit',
|
||||
PageWidth = 'page-width',
|
||||
Quarter = '.25',
|
||||
Half = '.5',
|
||||
ThreeQuarters = '.75',
|
||||
One = '1',
|
||||
OneAndHalf = '1.5',
|
||||
Two = '2',
|
||||
Three = '3',
|
||||
}
|
||||
@@ -96,36 +96,14 @@
|
||||
<button ngbDropdownItem (click)="mergeSelected()" [disabled]="!userCanAdd || list.selected.size < 2">
|
||||
<i-bs name="journals"></i-bs> <ng-container i18n>Merge</ng-container>
|
||||
</button>
|
||||
@if (emailEnabled) {
|
||||
<button ngbDropdownItem (click)="emailSelected()">
|
||||
<i-bs name="envelope"></i-bs> <ng-container i18n>Email</ng-container>
|
||||
</button>
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="btn-toolbar" ngbDropdown>
|
||||
<button
|
||||
class="btn btn-sm btn-outline-primary"
|
||||
id="dropdownSend"
|
||||
ngbDropdownToggle
|
||||
[disabled]="disabled || list.selected.size === 0"
|
||||
>
|
||||
<i-bs name="send"></i-bs>
|
||||
<div class="d-none d-sm-inline">
|
||||
<ng-container i18n>Send</ng-container>
|
||||
</div>
|
||||
</button>
|
||||
<div ngbDropdownMenu aria-labelledby="dropdownSend" class="shadow">
|
||||
<button ngbDropdownItem (click)="createShareLinkBundle()">
|
||||
<i-bs name="link"></i-bs> <ng-container i18n>Create a share link bundle</ng-container>
|
||||
</button>
|
||||
<button ngbDropdownItem (click)="manageShareLinkBundles()">
|
||||
<i-bs name="list-ul"></i-bs> <ng-container i18n>Manage share link bundles</ng-container>
|
||||
</button>
|
||||
<div class="dropdown-divider"></div>
|
||||
@if (emailEnabled) {
|
||||
<button ngbDropdownItem (click)="emailSelected()">
|
||||
<i-bs name="envelope"></i-bs> <ng-container i18n>Email</ng-container>
|
||||
</button>
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
<div class="btn-group btn-group-sm">
|
||||
<button class="btn btn-sm btn-outline-primary" [disabled]="awaitingDownload" (click)="downloadSelected()">
|
||||
@if (!awaitingDownload) {
|
||||
|
||||
@@ -3,7 +3,6 @@ import {
|
||||
HttpTestingController,
|
||||
provideHttpClientTesting,
|
||||
} from '@angular/common/http/testing'
|
||||
import { EventEmitter } from '@angular/core'
|
||||
import { ComponentFixture, TestBed } from '@angular/core/testing'
|
||||
import { By } from '@angular/platform-browser'
|
||||
import { NgbModal, NgbModalRef } from '@ng-bootstrap/ng-bootstrap'
|
||||
@@ -26,7 +25,6 @@ import {
|
||||
SelectionData,
|
||||
} from 'src/app/services/rest/document.service'
|
||||
import { GroupService } from 'src/app/services/rest/group.service'
|
||||
import { ShareLinkBundleService } from 'src/app/services/rest/share-link-bundle.service'
|
||||
import { StoragePathService } from 'src/app/services/rest/storage-path.service'
|
||||
import { TagService } from 'src/app/services/rest/tag.service'
|
||||
import { UserService } from 'src/app/services/rest/user.service'
|
||||
@@ -40,8 +38,6 @@ import { EditDialogMode } from '../../common/edit-dialog/edit-dialog.component'
|
||||
import { StoragePathEditDialogComponent } from '../../common/edit-dialog/storage-path-edit-dialog/storage-path-edit-dialog.component'
|
||||
import { TagEditDialogComponent } from '../../common/edit-dialog/tag-edit-dialog/tag-edit-dialog.component'
|
||||
import { FilterableDropdownComponent } from '../../common/filterable-dropdown/filterable-dropdown.component'
|
||||
import { ShareLinkBundleDialogComponent } from '../../common/share-link-bundle-dialog/share-link-bundle-dialog.component'
|
||||
import { ShareLinkBundleManageDialogComponent } from '../../common/share-link-bundle-manage-dialog/share-link-bundle-manage-dialog.component'
|
||||
import { BulkEditorComponent } from './bulk-editor.component'
|
||||
|
||||
const selectionData: SelectionData = {
|
||||
@@ -76,7 +72,6 @@ describe('BulkEditorComponent', () => {
|
||||
let storagePathService: StoragePathService
|
||||
let customFieldsService: CustomFieldsService
|
||||
let httpTestingController: HttpTestingController
|
||||
let shareLinkBundleService: ShareLinkBundleService
|
||||
|
||||
beforeEach(async () => {
|
||||
TestBed.configureTestingModule({
|
||||
@@ -157,15 +152,6 @@ describe('BulkEditorComponent', () => {
|
||||
}),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: ShareLinkBundleService,
|
||||
useValue: {
|
||||
createBundle: jest.fn(),
|
||||
listAllBundles: jest.fn(),
|
||||
rebuildBundle: jest.fn(),
|
||||
delete: jest.fn(),
|
||||
},
|
||||
},
|
||||
provideHttpClient(withInterceptorsFromDi()),
|
||||
provideHttpClientTesting(),
|
||||
],
|
||||
@@ -182,7 +168,6 @@ describe('BulkEditorComponent', () => {
|
||||
storagePathService = TestBed.inject(StoragePathService)
|
||||
customFieldsService = TestBed.inject(CustomFieldsService)
|
||||
httpTestingController = TestBed.inject(HttpTestingController)
|
||||
shareLinkBundleService = TestBed.inject(ShareLinkBundleService)
|
||||
|
||||
fixture = TestBed.createComponent(BulkEditorComponent)
|
||||
component = fixture.componentInstance
|
||||
@@ -1469,130 +1454,4 @@ describe('BulkEditorComponent', () => {
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=100000&fields=id`
|
||||
) // listAllFilteredIds
|
||||
})
|
||||
|
||||
it('should create share link bundle and enable manage callback', () => {
|
||||
jest.spyOn(permissionsService, 'currentUserCan').mockReturnValue(true)
|
||||
jest
|
||||
.spyOn(documentListViewService, 'documents', 'get')
|
||||
.mockReturnValue([{ id: 5 }, { id: 7 }] as any)
|
||||
jest
|
||||
.spyOn(documentListViewService, 'selected', 'get')
|
||||
.mockReturnValue(new Set([5, 7]))
|
||||
|
||||
const confirmClicked = new EventEmitter<void>()
|
||||
const modalRef: Partial<NgbModalRef> = {
|
||||
close: jest.fn(),
|
||||
componentInstance: {
|
||||
documents: [],
|
||||
confirmClicked,
|
||||
payload: {
|
||||
document_ids: [5, 7],
|
||||
file_version: 'archive',
|
||||
expiration_days: 7,
|
||||
},
|
||||
loading: false,
|
||||
buttonsEnabled: true,
|
||||
copied: false,
|
||||
},
|
||||
}
|
||||
|
||||
const openSpy = jest.spyOn(modalService, 'open')
|
||||
openSpy.mockReturnValueOnce(modalRef as NgbModalRef)
|
||||
openSpy.mockReturnValueOnce({} as NgbModalRef)
|
||||
;(shareLinkBundleService.createBundle as jest.Mock).mockReturnValueOnce(
|
||||
of({ id: 42 })
|
||||
)
|
||||
const toastInfoSpy = jest.spyOn(toastService, 'showInfo')
|
||||
|
||||
component.createShareLinkBundle()
|
||||
|
||||
expect(openSpy).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
ShareLinkBundleDialogComponent,
|
||||
expect.objectContaining({ backdrop: 'static', size: 'lg' })
|
||||
)
|
||||
|
||||
const dialogInstance = modalRef.componentInstance as any
|
||||
expect(dialogInstance.documents).toEqual([{ id: 5 }, { id: 7 }])
|
||||
|
||||
confirmClicked.emit()
|
||||
|
||||
expect(shareLinkBundleService.createBundle).toHaveBeenCalledWith({
|
||||
document_ids: [5, 7],
|
||||
file_version: 'archive',
|
||||
expiration_days: 7,
|
||||
})
|
||||
expect(dialogInstance.loading).toBe(false)
|
||||
expect(dialogInstance.buttonsEnabled).toBe(false)
|
||||
expect(dialogInstance.createdBundle).toEqual({ id: 42 })
|
||||
expect(typeof dialogInstance.onOpenManage).toBe('function')
|
||||
expect(toastInfoSpy).toHaveBeenCalledWith(
|
||||
$localize`Share link bundle creation requested.`
|
||||
)
|
||||
|
||||
dialogInstance.onOpenManage()
|
||||
expect(modalRef.close).toHaveBeenCalled()
|
||||
expect(openSpy).toHaveBeenNthCalledWith(
|
||||
2,
|
||||
ShareLinkBundleManageDialogComponent,
|
||||
expect.objectContaining({ backdrop: 'static', size: 'lg' })
|
||||
)
|
||||
openSpy.mockRestore()
|
||||
})
|
||||
|
||||
it('should handle share link bundle creation errors', () => {
|
||||
jest.spyOn(permissionsService, 'currentUserCan').mockReturnValue(true)
|
||||
jest
|
||||
.spyOn(documentListViewService, 'documents', 'get')
|
||||
.mockReturnValue([{ id: 9 }] as any)
|
||||
jest
|
||||
.spyOn(documentListViewService, 'selected', 'get')
|
||||
.mockReturnValue(new Set([9]))
|
||||
|
||||
const confirmClicked = new EventEmitter<void>()
|
||||
const modalRef: Partial<NgbModalRef> = {
|
||||
componentInstance: {
|
||||
documents: [],
|
||||
confirmClicked,
|
||||
payload: {
|
||||
document_ids: [9],
|
||||
file_version: 'original',
|
||||
expiration_days: null,
|
||||
},
|
||||
loading: false,
|
||||
buttonsEnabled: true,
|
||||
},
|
||||
}
|
||||
|
||||
const openSpy = jest
|
||||
.spyOn(modalService, 'open')
|
||||
.mockReturnValue(modalRef as NgbModalRef)
|
||||
;(shareLinkBundleService.createBundle as jest.Mock).mockReturnValueOnce(
|
||||
throwError(() => new Error('bundle failure'))
|
||||
)
|
||||
const toastErrorSpy = jest.spyOn(toastService, 'showError')
|
||||
|
||||
component.createShareLinkBundle()
|
||||
|
||||
const dialogInstance = modalRef.componentInstance as any
|
||||
confirmClicked.emit()
|
||||
|
||||
expect(toastErrorSpy).toHaveBeenCalledWith(
|
||||
$localize`Share link bundle creation is not available yet.`,
|
||||
expect.any(Error)
|
||||
)
|
||||
expect(dialogInstance.loading).toBe(false)
|
||||
expect(dialogInstance.buttonsEnabled).toBe(true)
|
||||
openSpy.mockRestore()
|
||||
})
|
||||
|
||||
it('should open share link bundle management dialog', () => {
|
||||
const openSpy = jest.spyOn(modalService, 'open')
|
||||
component.manageShareLinkBundles()
|
||||
expect(openSpy).toHaveBeenCalledWith(
|
||||
ShareLinkBundleManageDialogComponent,
|
||||
expect.objectContaining({ backdrop: 'static', size: 'lg' })
|
||||
)
|
||||
openSpy.mockRestore()
|
||||
})
|
||||
})
|
||||
|
||||
@@ -33,7 +33,6 @@ import {
|
||||
SelectionDataItem,
|
||||
} from 'src/app/services/rest/document.service'
|
||||
import { SavedViewService } from 'src/app/services/rest/saved-view.service'
|
||||
import { ShareLinkBundleService } from 'src/app/services/rest/share-link-bundle.service'
|
||||
import { StoragePathService } from 'src/app/services/rest/storage-path.service'
|
||||
import { TagService } from 'src/app/services/rest/tag.service'
|
||||
import { SettingsService } from 'src/app/services/settings.service'
|
||||
@@ -55,8 +54,6 @@ import {
|
||||
} from '../../common/filterable-dropdown/filterable-dropdown.component'
|
||||
import { ToggleableItemState } from '../../common/filterable-dropdown/toggleable-dropdown-button/toggleable-dropdown-button.component'
|
||||
import { PermissionsDialogComponent } from '../../common/permissions-dialog/permissions-dialog.component'
|
||||
import { ShareLinkBundleDialogComponent } from '../../common/share-link-bundle-dialog/share-link-bundle-dialog.component'
|
||||
import { ShareLinkBundleManageDialogComponent } from '../../common/share-link-bundle-manage-dialog/share-link-bundle-manage-dialog.component'
|
||||
import { ComponentWithPermissions } from '../../with-permissions/with-permissions.component'
|
||||
import { CustomFieldsBulkEditDialogComponent } from './custom-fields-bulk-edit-dialog/custom-fields-bulk-edit-dialog.component'
|
||||
|
||||
@@ -90,7 +87,6 @@ export class BulkEditorComponent
|
||||
private customFieldService = inject(CustomFieldsService)
|
||||
private permissionService = inject(PermissionsService)
|
||||
private savedViewService = inject(SavedViewService)
|
||||
private readonly shareLinkBundleService = inject(ShareLinkBundleService)
|
||||
|
||||
tagSelectionModel = new FilterableDropdownSelectionModel(true)
|
||||
correspondentSelectionModel = new FilterableDropdownSelectionModel()
|
||||
@@ -912,58 +908,6 @@ export class BulkEditorComponent
|
||||
return this.settings.get(SETTINGS_KEYS.EMAIL_ENABLED)
|
||||
}
|
||||
|
||||
createShareLinkBundle() {
|
||||
const modal = this.modalService.open(ShareLinkBundleDialogComponent, {
|
||||
backdrop: 'static',
|
||||
size: 'lg',
|
||||
})
|
||||
const dialog = modal.componentInstance as ShareLinkBundleDialogComponent
|
||||
const selectedDocuments = this.list.documents.filter((d) =>
|
||||
this.list.selected.has(d.id)
|
||||
)
|
||||
dialog.documents = selectedDocuments
|
||||
dialog.confirmClicked
|
||||
.pipe(takeUntil(this.unsubscribeNotifier))
|
||||
.subscribe(() => {
|
||||
dialog.loading = true
|
||||
dialog.buttonsEnabled = false
|
||||
this.shareLinkBundleService
|
||||
.createBundle(dialog.payload)
|
||||
.pipe(first())
|
||||
.subscribe({
|
||||
next: (result) => {
|
||||
dialog.loading = false
|
||||
dialog.buttonsEnabled = false
|
||||
dialog.createdBundle = result
|
||||
dialog.copied = false
|
||||
dialog.payload = null
|
||||
dialog.onOpenManage = () => {
|
||||
modal.close()
|
||||
this.manageShareLinkBundles()
|
||||
}
|
||||
this.toastService.showInfo(
|
||||
$localize`Share link bundle creation requested.`
|
||||
)
|
||||
},
|
||||
error: (error) => {
|
||||
dialog.loading = false
|
||||
dialog.buttonsEnabled = true
|
||||
this.toastService.showError(
|
||||
$localize`Share link bundle creation is not available yet.`,
|
||||
error
|
||||
)
|
||||
},
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
manageShareLinkBundles() {
|
||||
this.modalService.open(ShareLinkBundleManageDialogComponent, {
|
||||
backdrop: 'static',
|
||||
size: 'lg',
|
||||
})
|
||||
}
|
||||
|
||||
emailSelected() {
|
||||
const allHaveArchiveVersion = this.list.documents
|
||||
.filter((d) => this.list.selected.has(d.id))
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
import { FileVersion } from './share-link'
|
||||
|
||||
export enum ShareLinkBundleStatus {
|
||||
Pending = 'pending',
|
||||
Processing = 'processing',
|
||||
Ready = 'ready',
|
||||
Failed = 'failed',
|
||||
}
|
||||
|
||||
export type ShareLinkBundleError = {
|
||||
bundle_id: number
|
||||
message?: string
|
||||
exception_type?: string
|
||||
timestamp?: string
|
||||
}
|
||||
|
||||
export interface ShareLinkBundleSummary {
|
||||
id: number
|
||||
slug: string
|
||||
created: string // Date
|
||||
expiration?: string // Date
|
||||
documents: number[]
|
||||
document_count: number
|
||||
file_version: FileVersion
|
||||
status: ShareLinkBundleStatus
|
||||
built_at?: string
|
||||
size_bytes?: number
|
||||
last_error?: ShareLinkBundleError
|
||||
}
|
||||
|
||||
export interface ShareLinkBundleCreatePayload {
|
||||
document_ids: number[]
|
||||
file_version: FileVersion
|
||||
expiration_days: number | null
|
||||
}
|
||||
|
||||
export const SHARE_LINK_BUNDLE_STATUS_LABELS: Record<
|
||||
ShareLinkBundleStatus,
|
||||
string
|
||||
> = {
|
||||
[ShareLinkBundleStatus.Pending]: $localize`Pending`,
|
||||
[ShareLinkBundleStatus.Processing]: $localize`Processing`,
|
||||
[ShareLinkBundleStatus.Ready]: $localize`Ready`,
|
||||
[ShareLinkBundleStatus.Failed]: $localize`Failed`,
|
||||
}
|
||||
|
||||
export const SHARE_LINK_BUNDLE_FILE_VERSION_LABELS: Record<
|
||||
FileVersion,
|
||||
string
|
||||
> = {
|
||||
[FileVersion.Archive]: $localize`Archive`,
|
||||
[FileVersion.Original]: $localize`Original`,
|
||||
}
|
||||
@@ -5,18 +5,6 @@ export enum FileVersion {
|
||||
Original = 'original',
|
||||
}
|
||||
|
||||
export interface ShareLinkExpirationOption {
|
||||
label: string
|
||||
value: number | null
|
||||
}
|
||||
|
||||
export const SHARE_LINK_EXPIRATION_OPTIONS: ShareLinkExpirationOption[] = [
|
||||
{ label: $localize`1 day`, value: 1 },
|
||||
{ label: $localize`7 days`, value: 7 },
|
||||
{ label: $localize`30 days`, value: 30 },
|
||||
{ label: $localize`Never`, value: null },
|
||||
]
|
||||
|
||||
export interface ShareLink extends ObjectWithPermissions {
|
||||
created: string // Date
|
||||
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import { PdfEditorEditMode } from '../components/common/pdf-editor/pdf-editor-edit-mode'
|
||||
import { ZoomSetting } from '../components/document-detail/zoom-setting'
|
||||
import { User } from './user'
|
||||
|
||||
export interface UiSettings {
|
||||
@@ -76,8 +74,6 @@ export const SETTINGS_KEYS = {
|
||||
'general-settings:document-details:hidden-fields',
|
||||
SEARCH_DB_ONLY: 'general-settings:search:db-only',
|
||||
SEARCH_FULL_TYPE: 'general-settings:search:more-link',
|
||||
PDF_EDITOR_DEFAULT_EDIT_MODE:
|
||||
'general-settings:document-editing:default-edit-mode',
|
||||
EMPTY_TRASH_DELAY: 'trash_delay',
|
||||
GMAIL_OAUTH_URL: 'gmail_oauth_url',
|
||||
OUTLOOK_OAUTH_URL: 'outlook_oauth_url',
|
||||
@@ -299,16 +295,11 @@ export const SETTINGS: UiSetting[] = [
|
||||
{
|
||||
key: SETTINGS_KEYS.PDF_VIEWER_ZOOM_SETTING,
|
||||
type: 'string',
|
||||
default: ZoomSetting.PageWidth,
|
||||
default: 'page-width', // ZoomSetting from 'document-detail.component'
|
||||
},
|
||||
{
|
||||
key: SETTINGS_KEYS.AI_ENABLED,
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
},
|
||||
{
|
||||
key: SETTINGS_KEYS.PDF_EDITOR_DEFAULT_EDIT_MODE,
|
||||
type: 'string',
|
||||
default: PdfEditorEditMode.Create,
|
||||
},
|
||||
]
|
||||
|
||||
@@ -1,41 +1,30 @@
|
||||
import {
|
||||
HttpClient,
|
||||
provideHttpClient,
|
||||
withInterceptors,
|
||||
} from '@angular/common/http'
|
||||
import {
|
||||
HttpTestingController,
|
||||
provideHttpClientTesting,
|
||||
} from '@angular/common/http/testing'
|
||||
import { HttpEvent, HttpRequest } from '@angular/common/http'
|
||||
import { TestBed } from '@angular/core/testing'
|
||||
import { of } from 'rxjs'
|
||||
import { environment } from 'src/environments/environment'
|
||||
import { withApiVersionInterceptor } from './api-version.interceptor'
|
||||
import { ApiVersionInterceptor } from './api-version.interceptor'
|
||||
|
||||
describe('ApiVersionInterceptor', () => {
|
||||
let httpClient: HttpClient
|
||||
let httpMock: HttpTestingController
|
||||
let interceptor: ApiVersionInterceptor
|
||||
|
||||
beforeEach(() => {
|
||||
TestBed.configureTestingModule({
|
||||
providers: [
|
||||
provideHttpClient(withInterceptors([withApiVersionInterceptor])),
|
||||
provideHttpClientTesting(),
|
||||
],
|
||||
providers: [ApiVersionInterceptor],
|
||||
})
|
||||
|
||||
httpClient = TestBed.inject(HttpClient)
|
||||
httpMock = TestBed.inject(HttpTestingController)
|
||||
interceptor = TestBed.inject(ApiVersionInterceptor)
|
||||
})
|
||||
|
||||
it('should add api version to headers', () => {
|
||||
httpClient.get('https://example.com').subscribe()
|
||||
const request = httpMock.expectOne('https://example.com')
|
||||
const header = request.request.headers['lazyUpdate'][0]
|
||||
|
||||
expect(header.name).toEqual('Accept')
|
||||
expect(header.value).toEqual(
|
||||
`application/json; version=${environment.apiVersion}`
|
||||
)
|
||||
request.flush({})
|
||||
interceptor.intercept(new HttpRequest('GET', 'https://example.com'), {
|
||||
handle: (request) => {
|
||||
const header = request.headers['lazyUpdate'][0]
|
||||
expect(header.name).toEqual('Accept')
|
||||
expect(header.value).toEqual(
|
||||
`application/json; version=${environment.apiVersion}`
|
||||
)
|
||||
return of({} as HttpEvent<any>)
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,20 +1,27 @@
|
||||
import {
|
||||
HttpEvent,
|
||||
HttpHandlerFn,
|
||||
HttpInterceptorFn,
|
||||
HttpHandler,
|
||||
HttpInterceptor,
|
||||
HttpRequest,
|
||||
} from '@angular/common/http'
|
||||
import { Injectable } from '@angular/core'
|
||||
import { Observable } from 'rxjs'
|
||||
import { environment } from 'src/environments/environment'
|
||||
|
||||
export const withApiVersionInterceptor: HttpInterceptorFn = (
|
||||
request: HttpRequest<unknown>,
|
||||
next: HttpHandlerFn
|
||||
): Observable<HttpEvent<unknown>> => {
|
||||
request = request.clone({
|
||||
setHeaders: {
|
||||
Accept: `application/json; version=${environment.apiVersion}`,
|
||||
},
|
||||
})
|
||||
return next(request)
|
||||
@Injectable()
|
||||
export class ApiVersionInterceptor implements HttpInterceptor {
|
||||
constructor() {}
|
||||
|
||||
intercept(
|
||||
request: HttpRequest<unknown>,
|
||||
next: HttpHandler
|
||||
): Observable<HttpEvent<unknown>> {
|
||||
request = request.clone({
|
||||
setHeaders: {
|
||||
Accept: `application/json; version=${environment.apiVersion}`,
|
||||
},
|
||||
})
|
||||
|
||||
return next.handle(request)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,52 +1,35 @@
|
||||
import {
|
||||
HttpClient,
|
||||
provideHttpClient,
|
||||
withInterceptors,
|
||||
} from '@angular/common/http'
|
||||
import {
|
||||
HttpTestingController,
|
||||
provideHttpClientTesting,
|
||||
} from '@angular/common/http/testing'
|
||||
import { HttpEvent, HttpRequest } from '@angular/common/http'
|
||||
import { TestBed } from '@angular/core/testing'
|
||||
import { Meta } from '@angular/platform-browser'
|
||||
import { CookieService } from 'ngx-cookie-service'
|
||||
import { withCsrfInterceptor } from './csrf.interceptor'
|
||||
import { of } from 'rxjs'
|
||||
import { CsrfInterceptor } from './csrf.interceptor'
|
||||
|
||||
describe('CsrfInterceptor', () => {
|
||||
let interceptor: CsrfInterceptor
|
||||
let meta: Meta
|
||||
let cookieService: CookieService
|
||||
let httpClient: HttpClient
|
||||
let httpMock: HttpTestingController
|
||||
|
||||
beforeEach(() => {
|
||||
TestBed.configureTestingModule({
|
||||
providers: [
|
||||
Meta,
|
||||
CookieService,
|
||||
provideHttpClient(withInterceptors([withCsrfInterceptor])),
|
||||
provideHttpClientTesting(),
|
||||
],
|
||||
providers: [CsrfInterceptor, Meta, CookieService],
|
||||
})
|
||||
|
||||
meta = TestBed.inject(Meta)
|
||||
cookieService = TestBed.inject(CookieService)
|
||||
httpClient = TestBed.inject(HttpClient)
|
||||
httpMock = TestBed.inject(HttpTestingController)
|
||||
interceptor = TestBed.inject(CsrfInterceptor)
|
||||
})
|
||||
|
||||
it('should get csrf token', () => {
|
||||
meta.addTag({ name: 'cookie_prefix', content: 'ngx-' }, true)
|
||||
|
||||
const cookieServiceSpy = jest.spyOn(cookieService, 'get')
|
||||
cookieServiceSpy.mockReturnValue('csrftoken')
|
||||
|
||||
httpClient.get('https://example.com').subscribe()
|
||||
const request = httpMock.expectOne('https://example.com')
|
||||
|
||||
expect(request.request.headers['lazyUpdate'][0]['name']).toEqual(
|
||||
'X-CSRFToken'
|
||||
)
|
||||
interceptor.intercept(new HttpRequest('GET', 'https://example.com'), {
|
||||
handle: (request) => {
|
||||
expect(request.headers['lazyUpdate'][0]['name']).toEqual('X-CSRFToken')
|
||||
return of({} as HttpEvent<any>)
|
||||
},
|
||||
})
|
||||
expect(cookieServiceSpy).toHaveBeenCalled()
|
||||
request.flush({})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,32 +1,36 @@
|
||||
import {
|
||||
HttpEvent,
|
||||
HttpHandlerFn,
|
||||
HttpInterceptorFn,
|
||||
HttpHandler,
|
||||
HttpInterceptor,
|
||||
HttpRequest,
|
||||
} from '@angular/common/http'
|
||||
import { inject } from '@angular/core'
|
||||
import { inject, Injectable } from '@angular/core'
|
||||
import { Meta } from '@angular/platform-browser'
|
||||
import { CookieService } from 'ngx-cookie-service'
|
||||
import { Observable } from 'rxjs'
|
||||
|
||||
export const withCsrfInterceptor: HttpInterceptorFn = (
|
||||
request: HttpRequest<unknown>,
|
||||
next: HttpHandlerFn
|
||||
): Observable<HttpEvent<unknown>> => {
|
||||
const cookieService: CookieService = inject(CookieService)
|
||||
const meta: Meta = inject(Meta)
|
||||
@Injectable()
|
||||
export class CsrfInterceptor implements HttpInterceptor {
|
||||
private cookieService: CookieService = inject(CookieService)
|
||||
private meta: Meta = inject(Meta)
|
||||
|
||||
let prefix = ''
|
||||
if (meta.getTag('name=cookie_prefix')) {
|
||||
prefix = meta.getTag('name=cookie_prefix').content
|
||||
intercept(
|
||||
request: HttpRequest<unknown>,
|
||||
next: HttpHandler
|
||||
): Observable<HttpEvent<unknown>> {
|
||||
let prefix = ''
|
||||
if (this.meta.getTag('name=cookie_prefix')) {
|
||||
prefix = this.meta.getTag('name=cookie_prefix').content
|
||||
}
|
||||
let csrfToken = this.cookieService.get(`${prefix}csrftoken`)
|
||||
if (csrfToken) {
|
||||
request = request.clone({
|
||||
setHeaders: {
|
||||
'X-CSRFToken': csrfToken,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
return next.handle(request)
|
||||
}
|
||||
let csrfToken = cookieService.get(`${prefix}csrftoken`)
|
||||
if (csrfToken) {
|
||||
request = request.clone({
|
||||
setHeaders: {
|
||||
'X-CSRFToken': csrfToken,
|
||||
},
|
||||
})
|
||||
}
|
||||
return next(request)
|
||||
}
|
||||
|
||||
@@ -1,60 +0,0 @@
|
||||
import { HttpTestingController } from '@angular/common/http/testing'
|
||||
import { TestBed } from '@angular/core/testing'
|
||||
import { Subscription } from 'rxjs'
|
||||
import { environment } from 'src/environments/environment'
|
||||
import { commonAbstractPaperlessServiceTests } from './abstract-paperless-service.spec'
|
||||
import { ShareLinkBundleService } from './share-link-bundle.service'
|
||||
|
||||
const endpoint = 'share_link_bundles'
|
||||
|
||||
commonAbstractPaperlessServiceTests(endpoint, ShareLinkBundleService)
|
||||
|
||||
describe('ShareLinkBundleService', () => {
|
||||
let httpTestingController: HttpTestingController
|
||||
let service: ShareLinkBundleService
|
||||
let subscription: Subscription | undefined
|
||||
|
||||
beforeEach(() => {
|
||||
httpTestingController = TestBed.inject(HttpTestingController)
|
||||
service = TestBed.inject(ShareLinkBundleService)
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
subscription?.unsubscribe()
|
||||
httpTestingController.verify()
|
||||
})
|
||||
|
||||
it('creates bundled share links', () => {
|
||||
const payload = {
|
||||
document_ids: [1, 2],
|
||||
file_version: 'archive',
|
||||
expiration_days: 7,
|
||||
}
|
||||
subscription = service.createBundle(payload as any).subscribe()
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}${endpoint}/`
|
||||
)
|
||||
expect(req.request.method).toBe('POST')
|
||||
expect(req.request.body).toEqual(payload)
|
||||
req.flush({})
|
||||
})
|
||||
|
||||
it('rebuilds bundles', () => {
|
||||
subscription = service.rebuildBundle(12).subscribe()
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}${endpoint}/12/rebuild/`
|
||||
)
|
||||
expect(req.request.method).toBe('POST')
|
||||
expect(req.request.body).toEqual({})
|
||||
req.flush({})
|
||||
})
|
||||
|
||||
it('lists bundles with expected parameters', () => {
|
||||
subscription = service.listAllBundles().subscribe()
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}${endpoint}/?page=1&page_size=1000&ordering=-created`
|
||||
)
|
||||
expect(req.request.method).toBe('GET')
|
||||
req.flush({ results: [] })
|
||||
})
|
||||
})
|
||||
@@ -1,41 +0,0 @@
|
||||
import { Injectable } from '@angular/core'
|
||||
import { Observable } from 'rxjs'
|
||||
import { map } from 'rxjs/operators'
|
||||
import {
|
||||
ShareLinkBundleCreatePayload,
|
||||
ShareLinkBundleSummary,
|
||||
} from 'src/app/data/share-link-bundle'
|
||||
import { AbstractNameFilterService } from './abstract-name-filter-service'
|
||||
|
||||
@Injectable({
|
||||
providedIn: 'root',
|
||||
})
|
||||
export class ShareLinkBundleService extends AbstractNameFilterService<ShareLinkBundleSummary> {
|
||||
constructor() {
|
||||
super()
|
||||
this.resourceName = 'share_link_bundles'
|
||||
}
|
||||
|
||||
createBundle(
|
||||
payload: ShareLinkBundleCreatePayload
|
||||
): Observable<ShareLinkBundleSummary> {
|
||||
this.clearCache()
|
||||
return this.http.post<ShareLinkBundleSummary>(
|
||||
this.getResourceUrl(),
|
||||
payload
|
||||
)
|
||||
}
|
||||
rebuildBundle(bundleId: number): Observable<ShareLinkBundleSummary> {
|
||||
this.clearCache()
|
||||
return this.http.post<ShareLinkBundleSummary>(
|
||||
this.getResourceUrl(bundleId, 'rebuild'),
|
||||
{}
|
||||
)
|
||||
}
|
||||
|
||||
listAllBundles(): Observable<ShareLinkBundleSummary[]> {
|
||||
return this.list(1, 1000, 'created', true).pipe(
|
||||
map((response) => response.results)
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,16 +1,16 @@
|
||||
import {
|
||||
APP_INITIALIZER,
|
||||
enableProdMode,
|
||||
importProvidersFrom,
|
||||
inject,
|
||||
provideAppInitializer,
|
||||
provideZoneChangeDetection,
|
||||
} from '@angular/core'
|
||||
|
||||
import { DragDropModule } from '@angular/cdk/drag-drop'
|
||||
import { DatePipe, registerLocaleData } from '@angular/common'
|
||||
import {
|
||||
HTTP_INTERCEPTORS,
|
||||
provideHttpClient,
|
||||
withFetch,
|
||||
withInterceptors,
|
||||
withInterceptorsFromDi,
|
||||
} from '@angular/common/http'
|
||||
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
||||
@@ -151,14 +151,15 @@ import { AppComponent } from './app/app.component'
|
||||
import { DirtyDocGuard } from './app/guards/dirty-doc.guard'
|
||||
import { DirtySavedViewGuard } from './app/guards/dirty-saved-view.guard'
|
||||
import { PermissionsGuard } from './app/guards/permissions.guard'
|
||||
import { withApiVersionInterceptor } from './app/interceptors/api-version.interceptor'
|
||||
import { withCsrfInterceptor } from './app/interceptors/csrf.interceptor'
|
||||
import { ApiVersionInterceptor } from './app/interceptors/api-version.interceptor'
|
||||
import { CsrfInterceptor } from './app/interceptors/csrf.interceptor'
|
||||
import { DocumentTitlePipe } from './app/pipes/document-title.pipe'
|
||||
import { FilterPipe } from './app/pipes/filter.pipe'
|
||||
import { UsernamePipe } from './app/pipes/username.pipe'
|
||||
import { SettingsService } from './app/services/settings.service'
|
||||
import { LocalizedDateParserFormatter } from './app/utils/ngb-date-parser-formatter'
|
||||
import { ISODateAdapter } from './app/utils/ngb-iso-date-adapter'
|
||||
import { environment } from './environments/environment'
|
||||
|
||||
import localeAf from '@angular/common/locales/af'
|
||||
import localeAr from '@angular/common/locales/ar'
|
||||
@@ -236,11 +237,11 @@ registerLocaleData(localeUk)
|
||||
registerLocaleData(localeZh)
|
||||
registerLocaleData(localeZhHant)
|
||||
|
||||
function initializeApp() {
|
||||
const settings = inject(SettingsService)
|
||||
return settings.initializeSettings()
|
||||
function initializeApp(settings: SettingsService) {
|
||||
return () => {
|
||||
return settings.initializeSettings()
|
||||
}
|
||||
}
|
||||
|
||||
const icons = {
|
||||
airplane,
|
||||
archive,
|
||||
@@ -362,6 +363,10 @@ const icons = {
|
||||
xLg,
|
||||
}
|
||||
|
||||
if (environment.production) {
|
||||
enableProdMode()
|
||||
}
|
||||
|
||||
bootstrapApplication(AppComponent, {
|
||||
providers: [
|
||||
provideZoneChangeDetection(),
|
||||
@@ -378,9 +383,24 @@ bootstrapApplication(AppComponent, {
|
||||
DragDropModule,
|
||||
NgxBootstrapIconsModule.pick(icons)
|
||||
),
|
||||
provideAppInitializer(initializeApp),
|
||||
{
|
||||
provide: APP_INITIALIZER,
|
||||
useFactory: initializeApp,
|
||||
deps: [SettingsService],
|
||||
multi: true,
|
||||
},
|
||||
DatePipe,
|
||||
CookieService,
|
||||
{
|
||||
provide: HTTP_INTERCEPTORS,
|
||||
useClass: CsrfInterceptor,
|
||||
multi: true,
|
||||
},
|
||||
{
|
||||
provide: HTTP_INTERCEPTORS,
|
||||
useClass: ApiVersionInterceptor,
|
||||
multi: true,
|
||||
},
|
||||
FilterPipe,
|
||||
DocumentTitlePipe,
|
||||
{ provide: NgbDateAdapter, useClass: ISODateAdapter },
|
||||
@@ -392,10 +412,6 @@ bootstrapApplication(AppComponent, {
|
||||
CorrespondentNamePipe,
|
||||
DocumentTypeNamePipe,
|
||||
StoragePathNamePipe,
|
||||
provideHttpClient(
|
||||
withInterceptorsFromDi(),
|
||||
withInterceptors([withCsrfInterceptor, withApiVersionInterceptor]),
|
||||
withFetch()
|
||||
),
|
||||
provideHttpClient(withInterceptorsFromDi(), withFetch()),
|
||||
],
|
||||
}).catch((err) => console.error(err))
|
||||
|
||||
@@ -13,7 +13,6 @@ from documents.models import PaperlessTask
|
||||
from documents.models import SavedView
|
||||
from documents.models import SavedViewFilterRule
|
||||
from documents.models import ShareLink
|
||||
from documents.models import ShareLinkBundle
|
||||
from documents.models import StoragePath
|
||||
from documents.models import Tag
|
||||
from documents.tasks import update_document_parent_tags
|
||||
@@ -185,22 +184,6 @@ class ShareLinksAdmin(GuardedModelAdmin):
|
||||
return super().get_queryset(request).select_related("document__correspondent")
|
||||
|
||||
|
||||
class ShareLinkBundleAdmin(GuardedModelAdmin):
|
||||
list_display = ("created", "status", "expiration", "owner", "slug")
|
||||
list_filter = ("status", "created", "expiration", "owner")
|
||||
search_fields = ("slug",)
|
||||
|
||||
def get_queryset(self, request): # pragma: no cover
|
||||
return (
|
||||
super()
|
||||
.get_queryset(request)
|
||||
.select_related("owner")
|
||||
.prefetch_related(
|
||||
"documents",
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class CustomFieldsAdmin(GuardedModelAdmin):
|
||||
fields = ("name", "created", "data_type")
|
||||
readonly_fields = ("created", "data_type")
|
||||
@@ -232,7 +215,6 @@ admin.site.register(StoragePath, StoragePathAdmin)
|
||||
admin.site.register(PaperlessTask, TaskAdmin)
|
||||
admin.site.register(Note, NotesAdmin)
|
||||
admin.site.register(ShareLink, ShareLinksAdmin)
|
||||
admin.site.register(ShareLinkBundle, ShareLinkBundleAdmin)
|
||||
admin.site.register(CustomField, CustomFieldsAdmin)
|
||||
admin.site.register(CustomFieldInstance, CustomFieldInstancesAdmin)
|
||||
|
||||
|
||||
@@ -39,7 +39,6 @@ from documents.models import Document
|
||||
from documents.models import DocumentType
|
||||
from documents.models import PaperlessTask
|
||||
from documents.models import ShareLink
|
||||
from documents.models import ShareLinkBundle
|
||||
from documents.models import StoragePath
|
||||
from documents.models import Tag
|
||||
|
||||
@@ -797,29 +796,6 @@ class ShareLinkFilterSet(FilterSet):
|
||||
}
|
||||
|
||||
|
||||
class ShareLinkBundleFilterSet(FilterSet):
|
||||
documents = Filter(method="filter_documents")
|
||||
|
||||
class Meta:
|
||||
model = ShareLinkBundle
|
||||
fields = {
|
||||
"created": DATETIME_KWARGS,
|
||||
"expiration": DATETIME_KWARGS,
|
||||
"status": ["exact"],
|
||||
}
|
||||
|
||||
def filter_documents(self, queryset, name, value):
|
||||
ids = []
|
||||
if value:
|
||||
try:
|
||||
ids = [int(item) for item in value.split(",") if item]
|
||||
except ValueError:
|
||||
return queryset.none()
|
||||
if not ids:
|
||||
return queryset
|
||||
return queryset.filter(documents__in=ids).distinct()
|
||||
|
||||
|
||||
class PaperlessTaskFilterSet(FilterSet):
|
||||
acknowledged = BooleanFilter(
|
||||
label="Acknowledged",
|
||||
|
||||
@@ -501,22 +501,9 @@ class Command(BaseCommand):
|
||||
stability_timeout_ms = int(stability_delay * 1000)
|
||||
testing_timeout_ms = int(self.testing_timeout_s * 1000)
|
||||
|
||||
# Calculate appropriate timeout for watch loop
|
||||
# In polling mode, rust_timeout must be significantly longer than poll_delay_ms
|
||||
# to ensure poll cycles can complete before timing out
|
||||
if is_testing:
|
||||
if use_polling:
|
||||
# For polling: timeout must be at least 3x the poll interval to allow
|
||||
# multiple poll cycles. This prevents timeouts from interfering with
|
||||
# the polling mechanism.
|
||||
min_polling_timeout_ms = poll_delay_ms * 3
|
||||
timeout_ms = max(min_polling_timeout_ms, testing_timeout_ms)
|
||||
else:
|
||||
# For native watching, use short timeout to check stop flag
|
||||
timeout_ms = testing_timeout_ms
|
||||
else:
|
||||
# Not testing, wait indefinitely for first event
|
||||
timeout_ms = 0
|
||||
# Start with no timeout (wait indefinitely for first event)
|
||||
# unless in testing mode
|
||||
timeout_ms = testing_timeout_ms if is_testing else 0
|
||||
|
||||
self.stop_flag.clear()
|
||||
|
||||
@@ -556,14 +543,8 @@ class Command(BaseCommand):
|
||||
# Check pending files at stability interval
|
||||
timeout_ms = stability_timeout_ms
|
||||
elif is_testing:
|
||||
# In testing, use appropriate timeout based on watch mode
|
||||
if use_polling:
|
||||
# For polling: ensure timeout allows polls to complete
|
||||
min_polling_timeout_ms = poll_delay_ms * 3
|
||||
timeout_ms = max(min_polling_timeout_ms, testing_timeout_ms)
|
||||
else:
|
||||
# For native watching, use short timeout to check stop flag
|
||||
timeout_ms = testing_timeout_ms
|
||||
# In testing, use short timeout to check stop flag
|
||||
timeout_ms = testing_timeout_ms
|
||||
else: # pragma: nocover
|
||||
# No pending files, wait indefinitely
|
||||
timeout_ms = 0
|
||||
|
||||
598
src/documents/management/commands/document_perf_benchmark.py
Normal file
598
src/documents/management/commands/document_perf_benchmark.py
Normal file
@@ -0,0 +1,598 @@
|
||||
import math
|
||||
import uuid
|
||||
from time import perf_counter
|
||||
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.core.management.base import CommandError
|
||||
from django.db import reset_queries
|
||||
from django.db.models import Count
|
||||
from django.db.models import Q
|
||||
from django.db.models import Subquery
|
||||
from guardian.shortcuts import assign_perm
|
||||
|
||||
from documents.models import CustomField
|
||||
from documents.models import CustomFieldInstance
|
||||
from documents.models import Document
|
||||
from documents.models import Tag
|
||||
from documents.permissions import get_objects_for_user_owner_aware
|
||||
from documents.permissions import permitted_document_ids
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
# e.g. manage.py document_perf_benchmark --documents 500000 --chunk-size 5000 --tags 40 --tags-per-doc 3 --custom-fields 6 --custom-fields-per-doc 2
|
||||
help = (
|
||||
"Seed a synthetic dataset and benchmark permission-filtered document queries "
|
||||
"for superusers vs non-superusers."
|
||||
)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--documents",
|
||||
type=int,
|
||||
default=10000,
|
||||
help="Total documents to generate (default: 10,000)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--owner-ratio",
|
||||
type=float,
|
||||
default=0.6,
|
||||
help="Fraction owned by the benchmarked user (default: 0.6)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--unowned-ratio",
|
||||
type=float,
|
||||
default=0.1,
|
||||
help="Fraction of unowned documents (default: 0.1)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--shared-ratio",
|
||||
type=float,
|
||||
default=0.25,
|
||||
help=(
|
||||
"Fraction of other-user documents that are shared via object perms "
|
||||
"with the benchmarked user (default: 0.25)"
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--chunk-size",
|
||||
type=int,
|
||||
default=2000,
|
||||
help="Bulk create size for documents (default: 2000)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--iterations",
|
||||
type=int,
|
||||
default=3,
|
||||
help="Number of timing runs per query shape (default: 3)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--prefix",
|
||||
default="perf-benchmark",
|
||||
help="Title prefix used to mark generated documents (default: perf-benchmark)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--username",
|
||||
default="perf_user",
|
||||
help="Username of the non-superuser to benchmark (default: perf_user)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--other-username",
|
||||
default="perf_owner",
|
||||
help="Username used for documents not owned by the benchmarked user (default: perf_owner)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--super-username",
|
||||
default="perf_admin",
|
||||
help="Username of the superuser baseline (default: perf_admin)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--tags",
|
||||
type=int,
|
||||
default=0,
|
||||
help="Number of tags to create and assign (default: 0)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--tags-per-doc",
|
||||
type=int,
|
||||
default=1,
|
||||
help="How many tags to attach to each document (default: 1)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--custom-fields",
|
||||
type=int,
|
||||
default=0,
|
||||
help="Number of string custom fields to create (default: 0)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--custom-fields-per-doc",
|
||||
type=int,
|
||||
default=1,
|
||||
help="How many custom field instances per document (default: 1)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--skip-tags",
|
||||
action="store_true",
|
||||
help="Skip tag document_count benchmarks (useful for large datasets on Postgres)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--skip-custom-fields",
|
||||
action="store_true",
|
||||
help="Skip custom field document_count benchmarks",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--reuse-existing",
|
||||
action="store_true",
|
||||
help="Keep previously generated documents with the given prefix instead of recreating",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--cleanup",
|
||||
action="store_true",
|
||||
help="Delete previously generated documents with the given prefix and exit",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
# keep options for downstream checks
|
||||
self.options = options
|
||||
|
||||
document_total = options["documents"]
|
||||
owner_ratio = options["owner_ratio"]
|
||||
unowned_ratio = options["unowned_ratio"]
|
||||
shared_ratio = options["shared_ratio"]
|
||||
chunk_size = options["chunk_size"]
|
||||
iterations = options["iterations"]
|
||||
prefix = options["prefix"]
|
||||
tags = options["tags"]
|
||||
tags_per_doc = options["tags_per_doc"]
|
||||
custom_fields = options["custom_fields"]
|
||||
custom_fields_per_doc = options["custom_fields_per_doc"]
|
||||
|
||||
self._validate_ratios(owner_ratio, unowned_ratio)
|
||||
if tags_per_doc < 0 or custom_fields_per_doc < 0:
|
||||
raise CommandError("Per-document counts must be non-negative")
|
||||
|
||||
target_user, other_user, superuser = self._ensure_users(options)
|
||||
|
||||
skip_seed = False
|
||||
|
||||
if options["cleanup"]:
|
||||
removed = self._cleanup(prefix)
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(f"Removed {removed} generated documents"),
|
||||
)
|
||||
return
|
||||
|
||||
if not options["reuse_existing"]:
|
||||
removed = self._cleanup(prefix)
|
||||
if removed:
|
||||
self.stdout.write(f"Removed existing generated documents: {removed}")
|
||||
else:
|
||||
existing = Document.objects.filter(title__startswith=prefix).count()
|
||||
if existing:
|
||||
skip_seed = True
|
||||
self.stdout.write(
|
||||
f"Reusing existing dataset with prefix '{prefix}': {existing} docs",
|
||||
)
|
||||
|
||||
if skip_seed:
|
||||
dataset_size = Document.objects.filter(title__startswith=prefix).count()
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f"Dataset ready (reused): {dataset_size} docs | prefix={prefix}",
|
||||
),
|
||||
)
|
||||
else:
|
||||
self.stdout.write(
|
||||
f"Seeding {document_total} documents (owner_ratio={owner_ratio}, "
|
||||
f"unowned_ratio={unowned_ratio}, shared_ratio={shared_ratio})",
|
||||
)
|
||||
created_counts = self._seed_documents(
|
||||
total=document_total,
|
||||
owner_ratio=owner_ratio,
|
||||
unowned_ratio=unowned_ratio,
|
||||
shared_ratio=shared_ratio,
|
||||
chunk_size=chunk_size,
|
||||
prefix=prefix,
|
||||
target_user=target_user,
|
||||
other_user=other_user,
|
||||
)
|
||||
|
||||
created_tags = []
|
||||
if tags:
|
||||
created_tags = self._seed_tags(prefix=prefix, count=tags)
|
||||
if tags_per_doc and created_tags:
|
||||
self._assign_tags_to_documents(
|
||||
prefix=prefix,
|
||||
tags=created_tags,
|
||||
tags_per_doc=tags_per_doc,
|
||||
chunk_size=chunk_size,
|
||||
)
|
||||
|
||||
created_custom_fields = []
|
||||
if custom_fields:
|
||||
created_custom_fields = self._seed_custom_fields(prefix, custom_fields)
|
||||
if custom_fields_per_doc and created_custom_fields:
|
||||
self._seed_custom_field_instances(
|
||||
prefix=prefix,
|
||||
custom_fields=created_custom_fields,
|
||||
per_doc=custom_fields_per_doc,
|
||||
chunk_size=chunk_size,
|
||||
)
|
||||
|
||||
dataset_size = Document.objects.filter(title__startswith=prefix).count()
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f"Dataset ready: {dataset_size} docs | owned by target {created_counts['owned']} | "
|
||||
f"owned by other {created_counts['other_owned']} | unowned {created_counts['unowned']} | "
|
||||
f"shared-perms {created_counts['shared']} | tags {len(created_tags)} | "
|
||||
f"custom fields {len(created_custom_fields)}",
|
||||
),
|
||||
)
|
||||
|
||||
self.stdout.write("\nRunning benchmarks...\n")
|
||||
self._run_benchmarks(
|
||||
iterations=iterations,
|
||||
target_user=target_user,
|
||||
superuser=superuser,
|
||||
prefix=prefix,
|
||||
)
|
||||
|
||||
def _validate_ratios(self, owner_ratio: float, unowned_ratio: float):
|
||||
if owner_ratio < 0 or unowned_ratio < 0:
|
||||
raise CommandError("Ratios must be non-negative")
|
||||
if owner_ratio + unowned_ratio > 1:
|
||||
raise CommandError("owner-ratio + unowned-ratio cannot exceed 1.0")
|
||||
|
||||
def _ensure_users(self, options):
|
||||
User = get_user_model()
|
||||
target_user, _ = User.objects.get_or_create(
|
||||
username=options["username"],
|
||||
defaults={"email": "perf_user@example.com"},
|
||||
)
|
||||
other_user, _ = User.objects.get_or_create(
|
||||
username=options["other_username"],
|
||||
defaults={"email": "perf_owner@example.com"},
|
||||
)
|
||||
superuser, _ = User.objects.get_or_create(
|
||||
username=options["super_username"],
|
||||
defaults={
|
||||
"email": "perf_admin@example.com",
|
||||
"is_staff": True,
|
||||
"is_superuser": True,
|
||||
},
|
||||
)
|
||||
return target_user, other_user, superuser
|
||||
|
||||
def _cleanup(self, prefix: str) -> int:
|
||||
docs_qs = Document.global_objects.filter(title__startswith=prefix)
|
||||
doc_count = docs_qs.count()
|
||||
if doc_count:
|
||||
docs_qs.hard_delete()
|
||||
|
||||
tag_count = Tag.objects.filter(name__startswith=prefix).count()
|
||||
if tag_count:
|
||||
Tag.objects.filter(name__startswith=prefix).delete()
|
||||
|
||||
cf_qs = CustomField.objects.filter(name__startswith=prefix)
|
||||
cf_count = cf_qs.count()
|
||||
if cf_count:
|
||||
cf_qs.delete()
|
||||
|
||||
cfi_qs = CustomFieldInstance.global_objects.filter(
|
||||
document__title__startswith=prefix,
|
||||
)
|
||||
cfi_count = cfi_qs.count()
|
||||
if cfi_count:
|
||||
cfi_qs.hard_delete()
|
||||
|
||||
return doc_count + tag_count + cf_count + cfi_count
|
||||
|
||||
def _seed_documents(
|
||||
self,
|
||||
*,
|
||||
total: int,
|
||||
owner_ratio: float,
|
||||
unowned_ratio: float,
|
||||
shared_ratio: float,
|
||||
chunk_size: int,
|
||||
prefix: str,
|
||||
target_user,
|
||||
other_user,
|
||||
) -> dict[str, int]:
|
||||
target_count = math.floor(total * owner_ratio)
|
||||
unowned_count = math.floor(total * unowned_ratio)
|
||||
other_count = total - target_count - unowned_count
|
||||
|
||||
documents: list[Document] = []
|
||||
other_docs: list[Document] = []
|
||||
|
||||
for idx in range(total):
|
||||
if idx < target_count:
|
||||
owner = target_user
|
||||
elif idx < target_count + other_count:
|
||||
owner = other_user
|
||||
else:
|
||||
owner = None
|
||||
|
||||
doc = Document(
|
||||
owner=owner,
|
||||
title=f"{prefix}-{idx:07d}",
|
||||
mime_type="application/pdf",
|
||||
checksum=self._unique_checksum(idx),
|
||||
page_count=1,
|
||||
)
|
||||
|
||||
if owner is other_user:
|
||||
other_docs.append(doc)
|
||||
|
||||
documents.append(doc)
|
||||
|
||||
if len(documents) >= chunk_size:
|
||||
Document.objects.bulk_create(documents, batch_size=chunk_size)
|
||||
documents.clear()
|
||||
|
||||
if documents:
|
||||
Document.objects.bulk_create(documents, batch_size=chunk_size)
|
||||
|
||||
shared_target = math.floor(len(other_docs) * shared_ratio)
|
||||
for doc in other_docs[:shared_target]:
|
||||
assign_perm("documents.view_document", target_user, doc)
|
||||
|
||||
return {
|
||||
"owned": target_count,
|
||||
"other_owned": other_count,
|
||||
"unowned": unowned_count,
|
||||
"shared": shared_target,
|
||||
}
|
||||
|
||||
def _seed_tags(self, *, prefix: str, count: int) -> list[Tag]:
|
||||
tags = [
|
||||
Tag(
|
||||
name=f"{prefix}-tag-{idx:03d}",
|
||||
)
|
||||
for idx in range(count)
|
||||
]
|
||||
Tag.objects.bulk_create(tags, ignore_conflicts=True)
|
||||
return list(Tag.objects.filter(name__startswith=prefix))
|
||||
|
||||
def _assign_tags_to_documents(
|
||||
self,
|
||||
*,
|
||||
prefix: str,
|
||||
tags: list[Tag],
|
||||
tags_per_doc: int,
|
||||
chunk_size: int,
|
||||
):
|
||||
if not tags or tags_per_doc < 1:
|
||||
return
|
||||
|
||||
rels = []
|
||||
through = Document.tags.through
|
||||
tag_ids = [t.id for t in tags]
|
||||
tag_count = len(tag_ids)
|
||||
iterator = (
|
||||
Document.objects.filter(title__startswith=prefix)
|
||||
.values_list(
|
||||
"id",
|
||||
flat=True,
|
||||
)
|
||||
.iterator()
|
||||
)
|
||||
|
||||
for idx, doc_id in enumerate(iterator):
|
||||
start = idx % tag_count
|
||||
chosen = set()
|
||||
for offset in range(tags_per_doc):
|
||||
tag_id = tag_ids[(start + offset) % tag_count]
|
||||
if tag_id in chosen:
|
||||
continue
|
||||
chosen.add(tag_id)
|
||||
rels.append(through(document_id=doc_id, tag_id=tag_id))
|
||||
if len(rels) >= chunk_size:
|
||||
through.objects.bulk_create(rels, ignore_conflicts=True)
|
||||
rels.clear()
|
||||
|
||||
if rels:
|
||||
through.objects.bulk_create(rels, ignore_conflicts=True)
|
||||
|
||||
def _seed_custom_fields(self, prefix: str, count: int) -> list[CustomField]:
|
||||
fields = [
|
||||
CustomField(
|
||||
name=f"{prefix}-cf-{idx:03d}",
|
||||
data_type=CustomField.FieldDataType.STRING,
|
||||
)
|
||||
for idx in range(count)
|
||||
]
|
||||
CustomField.objects.bulk_create(fields, ignore_conflicts=True)
|
||||
return list(CustomField.objects.filter(name__startswith=prefix))
|
||||
|
||||
def _seed_custom_field_instances(
|
||||
self,
|
||||
*,
|
||||
prefix: str,
|
||||
custom_fields: list[CustomField],
|
||||
per_doc: int,
|
||||
chunk_size: int,
|
||||
):
|
||||
if not custom_fields or per_doc < 1:
|
||||
return
|
||||
|
||||
instances = []
|
||||
cf_ids = [cf.id for cf in custom_fields]
|
||||
cf_count = len(cf_ids)
|
||||
iterator = (
|
||||
Document.objects.filter(title__startswith=prefix)
|
||||
.values_list(
|
||||
"id",
|
||||
flat=True,
|
||||
)
|
||||
.iterator()
|
||||
)
|
||||
|
||||
for idx, doc_id in enumerate(iterator):
|
||||
start = idx % cf_count
|
||||
for offset in range(per_doc):
|
||||
cf_id = cf_ids[(start + offset) % cf_count]
|
||||
instances.append(
|
||||
CustomFieldInstance(
|
||||
document_id=doc_id,
|
||||
field_id=cf_id,
|
||||
value_text=f"val-{doc_id}-{cf_id}",
|
||||
),
|
||||
)
|
||||
if len(instances) >= chunk_size:
|
||||
CustomFieldInstance.objects.bulk_create(
|
||||
instances,
|
||||
batch_size=chunk_size,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
instances.clear()
|
||||
|
||||
if instances:
|
||||
CustomFieldInstance.objects.bulk_create(
|
||||
instances,
|
||||
batch_size=chunk_size,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
def _run_benchmarks(self, *, iterations: int, target_user, superuser, prefix: str):
|
||||
self.stdout.write("-> doc counts")
|
||||
self._time_query(
|
||||
label="non-superuser: id__in(values_list flat=True)",
|
||||
iterations=iterations,
|
||||
fn=lambda: self._count_with_values_list(target_user),
|
||||
)
|
||||
self._time_query(
|
||||
label="non-superuser: id__in(Subquery(values_list))",
|
||||
iterations=iterations,
|
||||
fn=lambda: self._count_with_subquery(target_user),
|
||||
)
|
||||
self._time_query(
|
||||
label="superuser baseline",
|
||||
iterations=iterations,
|
||||
fn=lambda: Document.objects.count(),
|
||||
)
|
||||
if not self.options.get("skip_tags"):
|
||||
self.stdout.write("-> tag counts")
|
||||
self._time_tag_counts(
|
||||
iterations=iterations,
|
||||
prefix=prefix,
|
||||
user=target_user,
|
||||
)
|
||||
if not self.options.get("skip_custom_fields"):
|
||||
self.stdout.write("-> custom field counts")
|
||||
self._time_custom_field_counts(
|
||||
iterations=iterations,
|
||||
prefix=prefix,
|
||||
user=target_user,
|
||||
superuser=superuser,
|
||||
)
|
||||
|
||||
def _count_with_values_list(self, user) -> int:
|
||||
qs = get_objects_for_user_owner_aware(
|
||||
user,
|
||||
"documents.view_document",
|
||||
Document,
|
||||
)
|
||||
return Document.objects.filter(id__in=qs.values_list("id", flat=True)).count()
|
||||
|
||||
def _count_with_subquery(self, user) -> int:
|
||||
qs = get_objects_for_user_owner_aware(
|
||||
user,
|
||||
"documents.view_document",
|
||||
Document,
|
||||
)
|
||||
subquery = Subquery(qs.values_list("id"))
|
||||
return Document.objects.filter(id__in=subquery).count()
|
||||
|
||||
def _document_filter(self, user, *, use_subquery: bool):
|
||||
if user is None or getattr(user, "is_superuser", False):
|
||||
return Q(documents__deleted_at__isnull=True)
|
||||
|
||||
qs = get_objects_for_user_owner_aware(
|
||||
user,
|
||||
"documents.view_document",
|
||||
Document,
|
||||
)
|
||||
ids = (
|
||||
Subquery(qs.values_list("id"))
|
||||
if use_subquery
|
||||
else qs.values_list("id", flat=True)
|
||||
)
|
||||
return Q(documents__deleted_at__isnull=True, documents__id__in=ids)
|
||||
|
||||
def _tag_queryset(self, *, prefix: str, filter_q: Q):
|
||||
return Tag.objects.filter(name__startswith=prefix).annotate(
|
||||
document_count=Count("documents", filter=filter_q),
|
||||
)
|
||||
|
||||
def _time_tag_counts(self, *, iterations: int, prefix: str, user):
|
||||
if not Tag.objects.filter(name__startswith=prefix).exists():
|
||||
return
|
||||
|
||||
self._time_query(
|
||||
label="tag document_count (grouped)",
|
||||
iterations=iterations,
|
||||
fn=lambda: list(
|
||||
Tag.documents.through.objects.filter(
|
||||
document_id__in=Subquery(permitted_document_ids(user)),
|
||||
)
|
||||
.values("tag_id")
|
||||
.annotate(c=Count("document_id"))
|
||||
.values_list("tag_id", "c"),
|
||||
),
|
||||
)
|
||||
|
||||
def _time_custom_field_counts(
|
||||
self,
|
||||
*,
|
||||
iterations: int,
|
||||
prefix: str,
|
||||
user,
|
||||
superuser,
|
||||
):
|
||||
if not CustomField.objects.filter(name__startswith=prefix).exists():
|
||||
return
|
||||
|
||||
permitted = Subquery(permitted_document_ids(user))
|
||||
super_permitted = CustomFieldInstance.objects.filter(
|
||||
document__deleted_at__isnull=True,
|
||||
).values_list("document_id")
|
||||
|
||||
def _run(ids_subquery):
|
||||
return list(
|
||||
CustomFieldInstance.objects.filter(
|
||||
document_id__in=ids_subquery,
|
||||
field__name__startswith=prefix,
|
||||
)
|
||||
.values("field_id")
|
||||
.annotate(c=Count("document_id"))
|
||||
.values_list("field_id", "c"),
|
||||
)
|
||||
|
||||
self._time_query(
|
||||
label="custom fields document_count (grouped permitted)",
|
||||
iterations=iterations,
|
||||
fn=lambda: _run(permitted),
|
||||
)
|
||||
self._time_query(
|
||||
label="custom fields document_count superuser baseline",
|
||||
iterations=iterations,
|
||||
fn=lambda: _run(super_permitted),
|
||||
)
|
||||
|
||||
def _time_query(self, *, label: str, iterations: int, fn):
|
||||
durations = []
|
||||
for _ in range(iterations):
|
||||
reset_queries()
|
||||
start = perf_counter()
|
||||
fn()
|
||||
durations.append(perf_counter() - start)
|
||||
|
||||
avg = sum(durations) / len(durations)
|
||||
self.stdout.write(
|
||||
f"{label}: min={min(durations):.4f}s avg={avg:.4f}s max={max(durations):.4f}s",
|
||||
)
|
||||
|
||||
def _unique_checksum(self, idx: int) -> str:
|
||||
return f"{uuid.uuid4().hex}{idx:08d}"[:32]
|
||||
@@ -1,177 +0,0 @@
|
||||
# Generated by Django 5.2.9 on 2026-01-27 01:09
|
||||
|
||||
import django.db.models.deletion
|
||||
import django.db.models.functions.text
|
||||
import django.utils.timezone
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.management import create_permissions
|
||||
from django.contrib.auth.models import Group
|
||||
from django.contrib.auth.models import Permission
|
||||
from django.contrib.auth.models import User
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
def grant_share_link_bundle_permissions(apps, schema_editor):
|
||||
# Ensure newly introduced permissions are created for all apps
|
||||
for app_config in apps.get_app_configs():
|
||||
app_config.models_module = True
|
||||
create_permissions(app_config, apps=apps, verbosity=0)
|
||||
app_config.models_module = None
|
||||
|
||||
add_document_perm = Permission.objects.filter(codename="add_document").first()
|
||||
share_bundle_permissions = Permission.objects.filter(
|
||||
codename__contains="sharelinkbundle",
|
||||
)
|
||||
|
||||
users = User.objects.filter(user_permissions=add_document_perm).distinct()
|
||||
for user in users:
|
||||
user.user_permissions.add(*share_bundle_permissions)
|
||||
|
||||
groups = Group.objects.filter(permissions=add_document_perm).distinct()
|
||||
for group in groups:
|
||||
group.permissions.add(*share_bundle_permissions)
|
||||
|
||||
|
||||
def revoke_share_link_bundle_permissions(apps, schema_editor):
|
||||
share_bundle_permissions = Permission.objects.filter(
|
||||
codename__contains="sharelinkbundle",
|
||||
)
|
||||
for user in User.objects.all():
|
||||
user.user_permissions.remove(*share_bundle_permissions)
|
||||
for group in Group.objects.all():
|
||||
group.permissions.remove(*share_bundle_permissions)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
("documents", "0007_document_content_length"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="ShareLinkBundle",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
(
|
||||
"created",
|
||||
models.DateTimeField(
|
||||
blank=True,
|
||||
db_index=True,
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"expiration",
|
||||
models.DateTimeField(
|
||||
blank=True,
|
||||
db_index=True,
|
||||
null=True,
|
||||
verbose_name="expiration",
|
||||
),
|
||||
),
|
||||
(
|
||||
"slug",
|
||||
models.SlugField(
|
||||
blank=True,
|
||||
editable=False,
|
||||
unique=True,
|
||||
verbose_name="slug",
|
||||
),
|
||||
),
|
||||
(
|
||||
"file_version",
|
||||
models.CharField(
|
||||
choices=[("archive", "Archive"), ("original", "Original")],
|
||||
default="archive",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
(
|
||||
"status",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("pending", "Pending"),
|
||||
("processing", "Processing"),
|
||||
("ready", "Ready"),
|
||||
("failed", "Failed"),
|
||||
],
|
||||
default="pending",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
(
|
||||
"size_bytes",
|
||||
models.PositiveIntegerField(
|
||||
blank=True,
|
||||
null=True,
|
||||
verbose_name="size (bytes)",
|
||||
),
|
||||
),
|
||||
(
|
||||
"last_error",
|
||||
models.JSONField(
|
||||
blank=True,
|
||||
null=True,
|
||||
default=None,
|
||||
verbose_name="last error",
|
||||
),
|
||||
),
|
||||
(
|
||||
"file_path",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
max_length=512,
|
||||
verbose_name="file path",
|
||||
),
|
||||
),
|
||||
(
|
||||
"built_at",
|
||||
models.DateTimeField(
|
||||
blank=True,
|
||||
null=True,
|
||||
verbose_name="built at",
|
||||
),
|
||||
),
|
||||
(
|
||||
"documents",
|
||||
models.ManyToManyField(
|
||||
related_name="share_link_bundles",
|
||||
to="documents.document",
|
||||
verbose_name="documents",
|
||||
),
|
||||
),
|
||||
(
|
||||
"owner",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="share_link_bundles",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
verbose_name="owner",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"ordering": ("-created",),
|
||||
"verbose_name": "share link bundle",
|
||||
"verbose_name_plural": "share link bundles",
|
||||
},
|
||||
),
|
||||
migrations.RunPython(
|
||||
grant_share_link_bundle_permissions,
|
||||
reverse_code=revoke_share_link_bundle_permissions,
|
||||
),
|
||||
]
|
||||
@@ -766,114 +766,6 @@ class ShareLink(SoftDeleteModel):
|
||||
return f"Share Link for {self.document.title}"
|
||||
|
||||
|
||||
class ShareLinkBundle(models.Model):
|
||||
class Status(models.TextChoices):
|
||||
PENDING = ("pending", _("Pending"))
|
||||
PROCESSING = ("processing", _("Processing"))
|
||||
READY = ("ready", _("Ready"))
|
||||
FAILED = ("failed", _("Failed"))
|
||||
|
||||
created = models.DateTimeField(
|
||||
_("created"),
|
||||
default=timezone.now,
|
||||
db_index=True,
|
||||
blank=True,
|
||||
editable=False,
|
||||
)
|
||||
|
||||
expiration = models.DateTimeField(
|
||||
_("expiration"),
|
||||
blank=True,
|
||||
null=True,
|
||||
db_index=True,
|
||||
)
|
||||
|
||||
slug = models.SlugField(
|
||||
_("slug"),
|
||||
db_index=True,
|
||||
unique=True,
|
||||
blank=True,
|
||||
editable=False,
|
||||
)
|
||||
|
||||
owner = models.ForeignKey(
|
||||
User,
|
||||
blank=True,
|
||||
null=True,
|
||||
related_name="share_link_bundles",
|
||||
on_delete=models.SET_NULL,
|
||||
verbose_name=_("owner"),
|
||||
)
|
||||
|
||||
file_version = models.CharField(
|
||||
max_length=50,
|
||||
choices=ShareLink.FileVersion.choices,
|
||||
default=ShareLink.FileVersion.ARCHIVE,
|
||||
)
|
||||
|
||||
status = models.CharField(
|
||||
max_length=50,
|
||||
choices=Status.choices,
|
||||
default=Status.PENDING,
|
||||
)
|
||||
|
||||
size_bytes = models.PositiveIntegerField(
|
||||
_("size (bytes)"),
|
||||
blank=True,
|
||||
null=True,
|
||||
)
|
||||
|
||||
last_error = models.JSONField(
|
||||
_("last error"),
|
||||
blank=True,
|
||||
null=True,
|
||||
default=None,
|
||||
)
|
||||
|
||||
file_path = models.CharField(
|
||||
_("file path"),
|
||||
max_length=512,
|
||||
blank=True,
|
||||
)
|
||||
|
||||
built_at = models.DateTimeField(
|
||||
_("built at"),
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
|
||||
documents = models.ManyToManyField(
|
||||
"documents.Document",
|
||||
related_name="share_link_bundles",
|
||||
verbose_name=_("documents"),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
ordering = ("-created",)
|
||||
verbose_name = _("share link bundle")
|
||||
verbose_name_plural = _("share link bundles")
|
||||
|
||||
def __str__(self):
|
||||
return _("Share link bundle %(slug)s") % {"slug": self.slug}
|
||||
|
||||
@property
|
||||
def absolute_file_path(self) -> Path | None:
|
||||
if not self.file_path:
|
||||
return None
|
||||
return (settings.SHARE_LINK_BUNDLE_DIR / Path(self.file_path)).resolve()
|
||||
|
||||
def remove_file(self):
|
||||
if self.absolute_file_path is not None and self.absolute_file_path.exists():
|
||||
try:
|
||||
self.absolute_file_path.unlink()
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def delete(self, using=None, *, keep_parents=False):
|
||||
self.remove_file()
|
||||
return super().delete(using=using, keep_parents=keep_parents)
|
||||
|
||||
|
||||
class CustomField(models.Model):
|
||||
"""
|
||||
Defines the name and type of a custom field
|
||||
|
||||
@@ -139,15 +139,25 @@ def get_document_count_filter_for_user(user):
|
||||
if getattr(user, "is_superuser", False):
|
||||
return Q(documents__deleted_at__isnull=True)
|
||||
return Q(
|
||||
documents__deleted_at__isnull=True,
|
||||
documents__id__in=get_objects_for_user_owner_aware(
|
||||
user,
|
||||
"documents.view_document",
|
||||
Document,
|
||||
).values_list("id", flat=True),
|
||||
documents__id__in=permitted_document_ids(user),
|
||||
)
|
||||
|
||||
|
||||
def permitted_document_ids(user):
|
||||
"""
|
||||
Return a Subquery of permitted, non-deleted document IDs for the user.
|
||||
Used to avoid repeated joins to the Document table in count annotations.
|
||||
"""
|
||||
if user is None or not getattr(user, "is_authenticated", False):
|
||||
return Document.objects.none().values_list("id")
|
||||
qs = get_objects_for_user_owner_aware(
|
||||
user,
|
||||
"documents.view_document",
|
||||
Document,
|
||||
).filter(deleted_at__isnull=True)
|
||||
return qs.values_list("id")
|
||||
|
||||
|
||||
def get_objects_for_user_owner_aware(
|
||||
user,
|
||||
perms,
|
||||
|
||||
@@ -4,7 +4,6 @@ import logging
|
||||
import math
|
||||
import re
|
||||
from datetime import datetime
|
||||
from datetime import timedelta
|
||||
from decimal import Decimal
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Literal
|
||||
@@ -25,8 +24,8 @@ from django.core.validators import RegexValidator
|
||||
from django.core.validators import integer_validator
|
||||
from django.db.models import Count
|
||||
from django.db.models import Q
|
||||
from django.db.models import Subquery
|
||||
from django.db.models.functions import Lower
|
||||
from django.utils import timezone
|
||||
from django.utils.crypto import get_random_string
|
||||
from django.utils.dateparse import parse_datetime
|
||||
from django.utils.text import slugify
|
||||
@@ -64,7 +63,6 @@ from documents.models import PaperlessTask
|
||||
from documents.models import SavedView
|
||||
from documents.models import SavedViewFilterRule
|
||||
from documents.models import ShareLink
|
||||
from documents.models import ShareLinkBundle
|
||||
from documents.models import StoragePath
|
||||
from documents.models import Tag
|
||||
from documents.models import UiSettings
|
||||
@@ -74,9 +72,9 @@ from documents.models import WorkflowActionEmail
|
||||
from documents.models import WorkflowActionWebhook
|
||||
from documents.models import WorkflowTrigger
|
||||
from documents.parsers import is_mime_type_supported
|
||||
from documents.permissions import get_document_count_filter_for_user
|
||||
from documents.permissions import get_groups_with_only_permission
|
||||
from documents.permissions import get_objects_for_user_owner_aware
|
||||
from documents.permissions import permitted_document_ids
|
||||
from documents.permissions import set_permissions_for_object
|
||||
from documents.regex import validate_regex_pattern
|
||||
from documents.templating.filepath import validate_filepath_template_and_render
|
||||
@@ -592,18 +590,41 @@ class TagSerializer(MatchingModelSerializer, OwnedObjectSerializer):
|
||||
if children_map is not None:
|
||||
children = children_map.get(obj.pk, [])
|
||||
else:
|
||||
filter_q = self.context.get("document_count_filter")
|
||||
request = self.context.get("request")
|
||||
if filter_q is None:
|
||||
user = getattr(request, "user", None) if request else None
|
||||
filter_q = get_document_count_filter_for_user(user)
|
||||
self.context["document_count_filter"] = filter_q
|
||||
user = getattr(request, "user", None) if request else None
|
||||
|
||||
children = (
|
||||
obj.get_children_queryset()
|
||||
.select_related("owner")
|
||||
.annotate(document_count=Count("documents", filter=filter_q))
|
||||
)
|
||||
filter_kind = self.context.get("document_count_filter")
|
||||
if filter_kind is None:
|
||||
filter_kind = (
|
||||
"superuser"
|
||||
if user and getattr(user, "is_superuser", False)
|
||||
else "restricted"
|
||||
)
|
||||
self.context["document_count_filter"] = filter_kind
|
||||
|
||||
queryset = obj.get_children_queryset().select_related("owner")
|
||||
|
||||
if filter_kind == "superuser":
|
||||
children = queryset.annotate(
|
||||
document_count=Count(
|
||||
"documents",
|
||||
filter=Q(documents__deleted_at__isnull=True),
|
||||
distinct=True,
|
||||
),
|
||||
)
|
||||
else:
|
||||
permitted_ids = Subquery(permitted_document_ids(user))
|
||||
counts = dict(
|
||||
Tag.documents.through.objects.filter(
|
||||
document_id__in=permitted_ids,
|
||||
)
|
||||
.values("tag_id")
|
||||
.annotate(c=Count("document_id"))
|
||||
.values_list("tag_id", "c"),
|
||||
)
|
||||
children = list(queryset)
|
||||
for child in children:
|
||||
child.document_count = counts.get(child.id, 0)
|
||||
|
||||
view = self.context.get("view")
|
||||
ordering = (
|
||||
@@ -612,7 +633,11 @@ class TagSerializer(MatchingModelSerializer, OwnedObjectSerializer):
|
||||
else None
|
||||
)
|
||||
ordering = ordering or (Lower("name"),)
|
||||
children = children.order_by(*ordering)
|
||||
if hasattr(children, "order_by"):
|
||||
children = children.order_by(*ordering)
|
||||
else:
|
||||
# children is a list (pre-fetched); apply basic ordering on name
|
||||
children = sorted(children, key=lambda c: (c.name or "").lower())
|
||||
|
||||
serializer = TagSerializer(
|
||||
children,
|
||||
@@ -2231,104 +2256,6 @@ class ShareLinkSerializer(OwnedObjectSerializer):
|
||||
return super().create(validated_data)
|
||||
|
||||
|
||||
class ShareLinkBundleSerializer(OwnedObjectSerializer):
|
||||
document_ids = serializers.ListField(
|
||||
child=serializers.IntegerField(min_value=1),
|
||||
allow_empty=False,
|
||||
write_only=True,
|
||||
)
|
||||
expiration_days = serializers.IntegerField(
|
||||
required=False,
|
||||
allow_null=True,
|
||||
min_value=1,
|
||||
write_only=True,
|
||||
)
|
||||
documents = serializers.PrimaryKeyRelatedField(
|
||||
many=True,
|
||||
read_only=True,
|
||||
)
|
||||
document_count = SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = ShareLinkBundle
|
||||
fields = (
|
||||
"id",
|
||||
"created",
|
||||
"expiration",
|
||||
"expiration_days",
|
||||
"slug",
|
||||
"file_version",
|
||||
"status",
|
||||
"size_bytes",
|
||||
"last_error",
|
||||
"built_at",
|
||||
"documents",
|
||||
"document_ids",
|
||||
"document_count",
|
||||
)
|
||||
read_only_fields = (
|
||||
"id",
|
||||
"created",
|
||||
"expiration",
|
||||
"slug",
|
||||
"status",
|
||||
"size_bytes",
|
||||
"last_error",
|
||||
"built_at",
|
||||
"documents",
|
||||
"document_count",
|
||||
)
|
||||
|
||||
def validate_document_ids(self, value):
|
||||
unique_ids = set(value)
|
||||
if len(unique_ids) != len(value):
|
||||
raise serializers.ValidationError(
|
||||
_("Duplicate document identifiers are not allowed."),
|
||||
)
|
||||
return value
|
||||
|
||||
def create(self, validated_data):
|
||||
document_ids = validated_data.pop("document_ids")
|
||||
expiration_days = validated_data.pop("expiration_days", None)
|
||||
validated_data["slug"] = get_random_string(50)
|
||||
if expiration_days:
|
||||
validated_data["expiration"] = timezone.now() + timedelta(
|
||||
days=expiration_days,
|
||||
)
|
||||
else:
|
||||
validated_data["expiration"] = None
|
||||
|
||||
share_link_bundle = super().create(validated_data)
|
||||
|
||||
documents = list(
|
||||
Document.objects.filter(pk__in=document_ids).only(
|
||||
"pk",
|
||||
),
|
||||
)
|
||||
documents_by_id = {doc.pk: doc for doc in documents}
|
||||
missing = [
|
||||
str(doc_id) for doc_id in document_ids if doc_id not in documents_by_id
|
||||
]
|
||||
if missing:
|
||||
raise serializers.ValidationError(
|
||||
{
|
||||
"document_ids": _(
|
||||
"Documents not found: %(ids)s",
|
||||
)
|
||||
% {"ids": ", ".join(missing)},
|
||||
},
|
||||
)
|
||||
|
||||
ordered_documents = [documents_by_id[doc_id] for doc_id in document_ids]
|
||||
share_link_bundle.documents.set(ordered_documents)
|
||||
share_link_bundle.document_total = len(ordered_documents)
|
||||
|
||||
return share_link_bundle
|
||||
|
||||
def get_document_count(self, obj: ShareLinkBundle) -> int:
|
||||
return getattr(obj, "document_total") or obj.documents.count()
|
||||
|
||||
|
||||
class BulkEditObjectsSerializer(SerializerWithPerms, SetPermissionsMixin):
|
||||
objects = serializers.ListField(
|
||||
required=True,
|
||||
|
||||
@@ -3,10 +3,8 @@ import hashlib
|
||||
import logging
|
||||
import shutil
|
||||
import uuid
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
from tempfile import mkstemp
|
||||
|
||||
import tqdm
|
||||
from celery import Task
|
||||
@@ -24,8 +22,6 @@ from whoosh.writing import AsyncWriter
|
||||
from documents import index
|
||||
from documents import sanity_checker
|
||||
from documents.barcodes import BarcodePlugin
|
||||
from documents.bulk_download import ArchiveOnlyStrategy
|
||||
from documents.bulk_download import OriginalsOnlyStrategy
|
||||
from documents.caching import clear_document_caches
|
||||
from documents.classifier import DocumentClassifier
|
||||
from documents.classifier import load_classifier
|
||||
@@ -43,8 +39,6 @@ from documents.models import CustomFieldInstance
|
||||
from documents.models import Document
|
||||
from documents.models import DocumentType
|
||||
from documents.models import PaperlessTask
|
||||
from documents.models import ShareLink
|
||||
from documents.models import ShareLinkBundle
|
||||
from documents.models import StoragePath
|
||||
from documents.models import Tag
|
||||
from documents.models import WorkflowRun
|
||||
@@ -631,117 +625,3 @@ def update_document_in_llm_index(document):
|
||||
@shared_task
|
||||
def remove_document_from_llm_index(document):
|
||||
llm_index_remove_document(document)
|
||||
|
||||
|
||||
@shared_task
|
||||
def build_share_link_bundle(bundle_id: int):
|
||||
try:
|
||||
bundle = (
|
||||
ShareLinkBundle.objects.filter(pk=bundle_id)
|
||||
.prefetch_related("documents")
|
||||
.get()
|
||||
)
|
||||
except ShareLinkBundle.DoesNotExist:
|
||||
logger.warning("Share link bundle %s no longer exists.", bundle_id)
|
||||
return
|
||||
|
||||
bundle.remove_file()
|
||||
bundle.status = ShareLinkBundle.Status.PROCESSING
|
||||
bundle.last_error = None
|
||||
bundle.size_bytes = None
|
||||
bundle.built_at = None
|
||||
bundle.file_path = ""
|
||||
bundle.save(
|
||||
update_fields=[
|
||||
"status",
|
||||
"last_error",
|
||||
"size_bytes",
|
||||
"built_at",
|
||||
"file_path",
|
||||
],
|
||||
)
|
||||
|
||||
documents = list(bundle.documents.all().order_by("pk"))
|
||||
|
||||
_, temp_zip_path_str = mkstemp(suffix=".zip", dir=settings.SCRATCH_DIR)
|
||||
temp_zip_path = Path(temp_zip_path_str)
|
||||
|
||||
try:
|
||||
strategy_class = (
|
||||
ArchiveOnlyStrategy
|
||||
if bundle.file_version == ShareLink.FileVersion.ARCHIVE
|
||||
else OriginalsOnlyStrategy
|
||||
)
|
||||
with zipfile.ZipFile(temp_zip_path, "w", zipfile.ZIP_DEFLATED) as zipf:
|
||||
strategy = strategy_class(zipf)
|
||||
for document in documents:
|
||||
strategy.add_document(document)
|
||||
|
||||
output_dir = settings.SHARE_LINK_BUNDLE_DIR
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
final_path = (output_dir / f"{bundle.slug}.zip").resolve()
|
||||
if final_path.exists():
|
||||
final_path.unlink()
|
||||
shutil.move(temp_zip_path, final_path)
|
||||
|
||||
bundle.file_path = f"{bundle.slug}.zip"
|
||||
bundle.size_bytes = final_path.stat().st_size
|
||||
bundle.status = ShareLinkBundle.Status.READY
|
||||
bundle.built_at = timezone.now()
|
||||
bundle.last_error = None
|
||||
bundle.save(
|
||||
update_fields=[
|
||||
"file_path",
|
||||
"size_bytes",
|
||||
"status",
|
||||
"built_at",
|
||||
"last_error",
|
||||
],
|
||||
)
|
||||
logger.info("Built share link bundle %s", bundle.pk)
|
||||
except Exception as exc:
|
||||
logger.exception(
|
||||
"Failed to build share link bundle %s: %s",
|
||||
bundle_id,
|
||||
exc,
|
||||
)
|
||||
bundle.status = ShareLinkBundle.Status.FAILED
|
||||
bundle.last_error = {
|
||||
"bundle_id": bundle_id,
|
||||
"exception_type": exc.__class__.__name__,
|
||||
"message": str(exc),
|
||||
"timestamp": timezone.now().isoformat(),
|
||||
}
|
||||
bundle.save(update_fields=["status", "last_error"])
|
||||
try:
|
||||
temp_zip_path.unlink()
|
||||
except OSError:
|
||||
pass
|
||||
raise
|
||||
finally:
|
||||
try:
|
||||
temp_zip_path.unlink(missing_ok=True)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
@shared_task
|
||||
def cleanup_expired_share_link_bundles():
|
||||
now = timezone.now()
|
||||
expired_qs = ShareLinkBundle.objects.filter(
|
||||
expiration__isnull=False,
|
||||
expiration__lt=now,
|
||||
)
|
||||
count = 0
|
||||
for bundle in expired_qs.iterator():
|
||||
count += 1
|
||||
try:
|
||||
bundle.delete()
|
||||
except Exception as exc:
|
||||
logger.warning(
|
||||
"Failed to delete expired share link bundle %s: %s",
|
||||
bundle.pk,
|
||||
exc,
|
||||
)
|
||||
if count:
|
||||
logger.info("Deleted %s expired share link bundle(s)", count)
|
||||
|
||||
@@ -224,18 +224,17 @@ class TestDoubleSided(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
THEN:
|
||||
- The collated file gets put into foo/bar
|
||||
"""
|
||||
# TODO: parameterize this instead
|
||||
for path in [
|
||||
Path("foo") / "bar" / "double-sided",
|
||||
Path("double-sided") / "foo" / "bar",
|
||||
]:
|
||||
with self.subTest(path=str(path)):
|
||||
with self.subTest(path=path):
|
||||
# Ensure we get fresh directories for each run
|
||||
self.tearDown()
|
||||
self.setUp()
|
||||
|
||||
self.create_staging_file()
|
||||
self.consume_file("double-sided-odd.pdf", Path(path) / "foo.pdf")
|
||||
self.consume_file("double-sided-odd.pdf", path / "foo.pdf")
|
||||
self.assertIsFile(
|
||||
self.dirs.consumption_dir / "foo" / "bar" / "foo-collated.pdf",
|
||||
)
|
||||
|
||||
@@ -114,30 +114,6 @@ def mock_supported_extensions(mocker: MockerFixture) -> MagicMock:
|
||||
)
|
||||
|
||||
|
||||
def wait_for_mock_call(
|
||||
mock_obj: MagicMock,
|
||||
timeout_s: float = 5.0,
|
||||
poll_interval_s: float = 0.1,
|
||||
) -> bool:
|
||||
"""
|
||||
Actively wait for a mock to be called.
|
||||
|
||||
Args:
|
||||
mock_obj: The mock object to check (e.g., mock.delay)
|
||||
timeout_s: Maximum time to wait in seconds
|
||||
poll_interval_s: How often to check in seconds
|
||||
|
||||
Returns:
|
||||
True if mock was called within timeout, False otherwise
|
||||
"""
|
||||
start_time = monotonic()
|
||||
while monotonic() - start_time < timeout_s:
|
||||
if mock_obj.called:
|
||||
return True
|
||||
sleep(poll_interval_s)
|
||||
return False
|
||||
|
||||
|
||||
class TestTrackedFile:
|
||||
"""Tests for the TrackedFile dataclass."""
|
||||
|
||||
@@ -748,7 +724,7 @@ def start_consumer(
|
||||
thread = ConsumerThread(consumption_dir, scratch_dir, **kwargs)
|
||||
threads.append(thread)
|
||||
thread.start()
|
||||
sleep(2.0) # Give thread time to start
|
||||
sleep(0.5) # Give thread time to start
|
||||
return thread
|
||||
|
||||
try:
|
||||
@@ -791,8 +767,7 @@ class TestCommandWatch:
|
||||
|
||||
target = consumption_dir / "document.pdf"
|
||||
shutil.copy(sample_pdf, target)
|
||||
|
||||
wait_for_mock_call(mock_consume_file_delay.delay, timeout_s=2.0)
|
||||
sleep(0.5)
|
||||
|
||||
if thread.exception:
|
||||
raise thread.exception
|
||||
@@ -813,12 +788,9 @@ class TestCommandWatch:
|
||||
|
||||
thread = start_consumer()
|
||||
|
||||
sleep(0.5)
|
||||
|
||||
target = consumption_dir / "document.pdf"
|
||||
shutil.move(temp_location, target)
|
||||
|
||||
wait_for_mock_call(mock_consume_file_delay.delay, timeout_s=2.0)
|
||||
sleep(0.5)
|
||||
|
||||
if thread.exception:
|
||||
raise thread.exception
|
||||
@@ -844,7 +816,7 @@ class TestCommandWatch:
|
||||
f.flush()
|
||||
sleep(0.05)
|
||||
|
||||
wait_for_mock_call(mock_consume_file_delay.delay, timeout_s=2.0)
|
||||
sleep(0.5)
|
||||
|
||||
if thread.exception:
|
||||
raise thread.exception
|
||||
@@ -865,7 +837,7 @@ class TestCommandWatch:
|
||||
(consumption_dir / "._document.pdf").write_bytes(b"test")
|
||||
shutil.copy(sample_pdf, consumption_dir / "valid.pdf")
|
||||
|
||||
wait_for_mock_call(mock_consume_file_delay.delay, timeout_s=2.0)
|
||||
sleep(0.5)
|
||||
|
||||
if thread.exception:
|
||||
raise thread.exception
|
||||
@@ -896,10 +868,11 @@ class TestCommandWatch:
|
||||
assert not thread.is_alive()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestCommandWatchPolling:
|
||||
"""Tests for polling mode."""
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.flaky(reruns=2)
|
||||
def test_polling_mode_works(
|
||||
self,
|
||||
consumption_dir: Path,
|
||||
@@ -909,8 +882,7 @@ class TestCommandWatchPolling:
|
||||
) -> None:
|
||||
"""
|
||||
Test polling mode detects files.
|
||||
|
||||
Uses active waiting with timeout to handle CI delays and polling timing.
|
||||
Note: At times, there appears to be a timing issue, where delay has not yet been called, hence this is marked as flaky.
|
||||
"""
|
||||
# Use shorter polling interval for faster test
|
||||
thread = start_consumer(polling_interval=0.5, stability_delay=0.1)
|
||||
@@ -918,9 +890,9 @@ class TestCommandWatchPolling:
|
||||
target = consumption_dir / "document.pdf"
|
||||
shutil.copy(sample_pdf, target)
|
||||
|
||||
# Actively wait for consumption
|
||||
# Polling needs: interval (0.5s) + stability (0.1s) + next poll (0.5s) + margin
|
||||
wait_for_mock_call(mock_consume_file_delay.delay, timeout_s=5.0)
|
||||
# Wait for: poll interval + stability delay + another poll + margin
|
||||
# CI can be slow, so use generous timeout
|
||||
sleep(3.0)
|
||||
|
||||
if thread.exception:
|
||||
raise thread.exception
|
||||
@@ -947,8 +919,7 @@ class TestCommandWatchRecursive:
|
||||
|
||||
target = subdir / "document.pdf"
|
||||
shutil.copy(sample_pdf, target)
|
||||
|
||||
wait_for_mock_call(mock_consume_file_delay.delay, timeout_s=2.0)
|
||||
sleep(0.5)
|
||||
|
||||
if thread.exception:
|
||||
raise thread.exception
|
||||
@@ -977,8 +948,7 @@ class TestCommandWatchRecursive:
|
||||
|
||||
target = subdir / "document.pdf"
|
||||
shutil.copy(sample_pdf, target)
|
||||
|
||||
wait_for_mock_call(mock_consume_file_delay.delay, timeout_s=2.0)
|
||||
sleep(0.5)
|
||||
|
||||
if thread.exception:
|
||||
raise thread.exception
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
from documents.tests.utils import TestMigrations
|
||||
|
||||
|
||||
class TestMigrateShareLinkBundlePermissions(TestMigrations):
|
||||
migrate_from = "0007_document_content_length"
|
||||
migrate_to = "0008_sharelinkbundle"
|
||||
|
||||
def setUpBeforeMigration(self, apps):
|
||||
User = apps.get_model("auth", "User")
|
||||
Group = apps.get_model("auth", "Group")
|
||||
self.Permission = apps.get_model("auth", "Permission")
|
||||
self.user = User.objects.create(username="user1")
|
||||
self.group = Group.objects.create(name="group1")
|
||||
add_document = self.Permission.objects.get(codename="add_document")
|
||||
self.user.user_permissions.add(add_document.id)
|
||||
self.group.permissions.add(add_document.id)
|
||||
|
||||
def test_share_link_permissions_granted_to_add_document_holders(self):
|
||||
share_perms = self.Permission.objects.filter(
|
||||
codename__contains="sharelinkbundle",
|
||||
)
|
||||
self.assertTrue(self.user.user_permissions.filter(pk__in=share_perms).exists())
|
||||
self.assertTrue(self.group.permissions.filter(pk__in=share_perms).exists())
|
||||
|
||||
|
||||
class TestReverseMigrateShareLinkBundlePermissions(TestMigrations):
|
||||
migrate_from = "0008_sharelinkbundle"
|
||||
migrate_to = "0007_document_content_length"
|
||||
|
||||
def setUpBeforeMigration(self, apps):
|
||||
User = apps.get_model("auth", "User")
|
||||
Group = apps.get_model("auth", "Group")
|
||||
self.Permission = apps.get_model("auth", "Permission")
|
||||
self.user = User.objects.create(username="user1")
|
||||
self.group = Group.objects.create(name="group1")
|
||||
add_document = self.Permission.objects.get(codename="add_document")
|
||||
share_perms = self.Permission.objects.filter(
|
||||
codename__contains="sharelinkbundle",
|
||||
)
|
||||
self.share_perm_ids = list(share_perms.values_list("id", flat=True))
|
||||
|
||||
self.user.user_permissions.add(add_document.id, *self.share_perm_ids)
|
||||
self.group.permissions.add(add_document.id, *self.share_perm_ids)
|
||||
|
||||
def test_share_link_permissions_revoked_on_reverse(self):
|
||||
self.assertFalse(
|
||||
self.user.user_permissions.filter(pk__in=self.share_perm_ids).exists(),
|
||||
)
|
||||
self.assertFalse(
|
||||
self.group.permissions.filter(pk__in=self.share_perm_ids).exists(),
|
||||
)
|
||||
@@ -1,536 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import zipfile
|
||||
from datetime import timedelta
|
||||
from pathlib import Path
|
||||
from unittest import mock
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
from django.utils import timezone
|
||||
from rest_framework import serializers
|
||||
from rest_framework import status
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from documents.filters import ShareLinkBundleFilterSet
|
||||
from documents.models import ShareLink
|
||||
from documents.models import ShareLinkBundle
|
||||
from documents.serialisers import ShareLinkBundleSerializer
|
||||
from documents.tasks import build_share_link_bundle
|
||||
from documents.tasks import cleanup_expired_share_link_bundles
|
||||
from documents.tests.factories import DocumentFactory
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
|
||||
|
||||
class ShareLinkBundleAPITests(DirectoriesMixin, APITestCase):
|
||||
ENDPOINT = "/api/share_link_bundles/"
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.user = User.objects.create_superuser(username="bundle_admin")
|
||||
self.client.force_authenticate(self.user)
|
||||
self.document = DocumentFactory.create()
|
||||
|
||||
@mock.patch("documents.views.build_share_link_bundle.delay")
|
||||
def test_create_bundle_triggers_build_job(self, delay_mock):
|
||||
payload = {
|
||||
"document_ids": [self.document.pk],
|
||||
"file_version": ShareLink.FileVersion.ARCHIVE,
|
||||
"expiration_days": 7,
|
||||
}
|
||||
|
||||
response = self.client.post(self.ENDPOINT, payload, format="json")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
|
||||
bundle = ShareLinkBundle.objects.get(pk=response.data["id"])
|
||||
self.assertEqual(bundle.documents.count(), 1)
|
||||
self.assertEqual(bundle.status, ShareLinkBundle.Status.PENDING)
|
||||
delay_mock.assert_called_once_with(bundle.pk)
|
||||
|
||||
def test_create_bundle_rejects_missing_documents(self):
|
||||
payload = {
|
||||
"document_ids": [9999],
|
||||
"file_version": ShareLink.FileVersion.ARCHIVE,
|
||||
"expiration_days": 7,
|
||||
}
|
||||
|
||||
response = self.client.post(self.ENDPOINT, payload, format="json")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
self.assertIn("document_ids", response.data)
|
||||
|
||||
@mock.patch("documents.views.has_perms_owner_aware", return_value=False)
|
||||
def test_create_bundle_rejects_insufficient_permissions(self, perms_mock):
|
||||
payload = {
|
||||
"document_ids": [self.document.pk],
|
||||
"file_version": ShareLink.FileVersion.ARCHIVE,
|
||||
"expiration_days": 7,
|
||||
}
|
||||
|
||||
response = self.client.post(self.ENDPOINT, payload, format="json")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
self.assertIn("document_ids", response.data)
|
||||
perms_mock.assert_called()
|
||||
|
||||
@mock.patch("documents.views.build_share_link_bundle.delay")
|
||||
def test_rebuild_bundle_resets_state(self, delay_mock):
|
||||
bundle = ShareLinkBundle.objects.create(
|
||||
slug="rebuild-slug",
|
||||
file_version=ShareLink.FileVersion.ARCHIVE,
|
||||
status=ShareLinkBundle.Status.FAILED,
|
||||
)
|
||||
bundle.documents.set([self.document])
|
||||
bundle.last_error = {"message": "Something went wrong"}
|
||||
bundle.size_bytes = 100
|
||||
bundle.file_path = "path/to/file.zip"
|
||||
bundle.save()
|
||||
|
||||
response = self.client.post(f"{self.ENDPOINT}{bundle.pk}/rebuild/")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
bundle.refresh_from_db()
|
||||
self.assertEqual(bundle.status, ShareLinkBundle.Status.PENDING)
|
||||
self.assertIsNone(bundle.last_error)
|
||||
self.assertIsNone(bundle.size_bytes)
|
||||
self.assertEqual(bundle.file_path, "")
|
||||
delay_mock.assert_called_once_with(bundle.pk)
|
||||
|
||||
def test_rebuild_bundle_rejects_processing_status(self):
|
||||
bundle = ShareLinkBundle.objects.create(
|
||||
slug="processing-slug",
|
||||
file_version=ShareLink.FileVersion.ARCHIVE,
|
||||
status=ShareLinkBundle.Status.PROCESSING,
|
||||
)
|
||||
bundle.documents.set([self.document])
|
||||
|
||||
response = self.client.post(f"{self.ENDPOINT}{bundle.pk}/rebuild/")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
self.assertIn("detail", response.data)
|
||||
|
||||
def test_create_bundle_rejects_duplicate_documents(self):
|
||||
payload = {
|
||||
"document_ids": [self.document.pk, self.document.pk],
|
||||
"file_version": ShareLink.FileVersion.ARCHIVE,
|
||||
"expiration_days": 7,
|
||||
}
|
||||
|
||||
response = self.client.post(self.ENDPOINT, payload, format="json")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
self.assertIn("document_ids", response.data)
|
||||
|
||||
def test_download_ready_bundle_streams_file(self):
|
||||
bundle_file = Path(self.dirs.media_dir) / "bundles" / "ready.zip"
|
||||
bundle_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
bundle_file.write_bytes(b"binary-zip-content")
|
||||
|
||||
bundle = ShareLinkBundle.objects.create(
|
||||
slug="readyslug",
|
||||
file_version=ShareLink.FileVersion.ARCHIVE,
|
||||
status=ShareLinkBundle.Status.READY,
|
||||
file_path=str(bundle_file),
|
||||
)
|
||||
bundle.documents.set([self.document])
|
||||
|
||||
self.client.logout()
|
||||
response = self.client.get(f"/share/{bundle.slug}/")
|
||||
content = b"".join(response.streaming_content)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(response["Content-Type"], "application/zip")
|
||||
self.assertEqual(content, b"binary-zip-content")
|
||||
self.assertIn("attachment;", response["Content-Disposition"])
|
||||
|
||||
def test_download_pending_bundle_returns_202(self):
|
||||
bundle = ShareLinkBundle.objects.create(
|
||||
slug="pendingslug",
|
||||
file_version=ShareLink.FileVersion.ARCHIVE,
|
||||
status=ShareLinkBundle.Status.PENDING,
|
||||
)
|
||||
bundle.documents.set([self.document])
|
||||
|
||||
self.client.logout()
|
||||
response = self.client.get(f"/share/{bundle.slug}/")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
|
||||
|
||||
def test_download_failed_bundle_returns_503(self):
|
||||
bundle = ShareLinkBundle.objects.create(
|
||||
slug="failedslug",
|
||||
file_version=ShareLink.FileVersion.ARCHIVE,
|
||||
status=ShareLinkBundle.Status.FAILED,
|
||||
)
|
||||
bundle.documents.set([self.document])
|
||||
|
||||
self.client.logout()
|
||||
response = self.client.get(f"/share/{bundle.slug}/")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_503_SERVICE_UNAVAILABLE)
|
||||
|
||||
def test_expired_share_link_redirects(self):
|
||||
share_link = ShareLink.objects.create(
|
||||
slug="expiredlink",
|
||||
document=self.document,
|
||||
file_version=ShareLink.FileVersion.ORIGINAL,
|
||||
expiration=timezone.now() - timedelta(hours=1),
|
||||
)
|
||||
|
||||
self.client.logout()
|
||||
response = self.client.get(f"/share/{share_link.slug}/")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
|
||||
self.assertIn("sharelink_expired=1", response["Location"])
|
||||
|
||||
def test_unknown_share_link_redirects(self):
|
||||
self.client.logout()
|
||||
response = self.client.get("/share/unknownsharelink/")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
|
||||
self.assertIn("sharelink_notfound=1", response["Location"])
|
||||
|
||||
|
||||
class ShareLinkBundleTaskTests(DirectoriesMixin, APITestCase):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.document = DocumentFactory.create()
|
||||
|
||||
def test_cleanup_expired_share_link_bundles(self):
|
||||
expired_path = Path(self.dirs.media_dir) / "expired.zip"
|
||||
expired_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
expired_path.write_bytes(b"expired")
|
||||
|
||||
active_path = Path(self.dirs.media_dir) / "active.zip"
|
||||
active_path.write_bytes(b"active")
|
||||
|
||||
expired_bundle = ShareLinkBundle.objects.create(
|
||||
slug="expired-bundle",
|
||||
file_version=ShareLink.FileVersion.ARCHIVE,
|
||||
status=ShareLinkBundle.Status.READY,
|
||||
expiration=timezone.now() - timedelta(days=1),
|
||||
file_path=str(expired_path),
|
||||
)
|
||||
expired_bundle.documents.set([self.document])
|
||||
|
||||
active_bundle = ShareLinkBundle.objects.create(
|
||||
slug="active-bundle",
|
||||
file_version=ShareLink.FileVersion.ARCHIVE,
|
||||
status=ShareLinkBundle.Status.READY,
|
||||
expiration=timezone.now() + timedelta(days=1),
|
||||
file_path=str(active_path),
|
||||
)
|
||||
active_bundle.documents.set([self.document])
|
||||
|
||||
cleanup_expired_share_link_bundles()
|
||||
|
||||
self.assertFalse(ShareLinkBundle.objects.filter(pk=expired_bundle.pk).exists())
|
||||
self.assertTrue(ShareLinkBundle.objects.filter(pk=active_bundle.pk).exists())
|
||||
self.assertFalse(expired_path.exists())
|
||||
self.assertTrue(active_path.exists())
|
||||
|
||||
def test_cleanup_expired_share_link_bundles_logs_on_failure(self):
|
||||
expired_bundle = ShareLinkBundle.objects.create(
|
||||
slug="expired-bundle",
|
||||
file_version=ShareLink.FileVersion.ARCHIVE,
|
||||
status=ShareLinkBundle.Status.READY,
|
||||
expiration=timezone.now() - timedelta(days=1),
|
||||
)
|
||||
expired_bundle.documents.set([self.document])
|
||||
|
||||
with mock.patch.object(
|
||||
ShareLinkBundle,
|
||||
"delete",
|
||||
side_effect=RuntimeError("fail"),
|
||||
):
|
||||
with self.assertLogs("paperless.tasks", level="WARNING") as logs:
|
||||
cleanup_expired_share_link_bundles()
|
||||
|
||||
self.assertTrue(
|
||||
any(
|
||||
"Failed to delete expired share link bundle" in msg
|
||||
for msg in logs.output
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class ShareLinkBundleBuildTaskTests(DirectoriesMixin, APITestCase):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.document = DocumentFactory.create(
|
||||
mime_type="application/pdf",
|
||||
checksum="123",
|
||||
)
|
||||
self.document.archive_checksum = ""
|
||||
self.document.save()
|
||||
self.addCleanup(
|
||||
setattr,
|
||||
settings,
|
||||
"SHARE_LINK_BUNDLE_DIR",
|
||||
settings.SHARE_LINK_BUNDLE_DIR,
|
||||
)
|
||||
settings.SHARE_LINK_BUNDLE_DIR = (
|
||||
Path(settings.MEDIA_ROOT) / "documents" / "share_link_bundles"
|
||||
)
|
||||
|
||||
def _write_document_file(self, *, archive: bool, content: bytes) -> Path:
|
||||
if archive:
|
||||
self.document.archive_filename = f"{self.document.pk:07}.pdf"
|
||||
self.document.save()
|
||||
path = self.document.archive_path
|
||||
else:
|
||||
path = self.document.source_path
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
path.write_bytes(content)
|
||||
return path
|
||||
|
||||
def test_build_share_link_bundle_creates_zip_and_sets_metadata(self):
|
||||
self._write_document_file(archive=False, content=b"source")
|
||||
archive_path = self._write_document_file(archive=True, content=b"archive")
|
||||
bundle = ShareLinkBundle.objects.create(
|
||||
slug="build-archive",
|
||||
file_version=ShareLink.FileVersion.ARCHIVE,
|
||||
)
|
||||
bundle.documents.set([self.document])
|
||||
|
||||
build_share_link_bundle(bundle.pk)
|
||||
|
||||
bundle.refresh_from_db()
|
||||
self.assertEqual(bundle.status, ShareLinkBundle.Status.READY)
|
||||
self.assertIsNone(bundle.last_error)
|
||||
self.assertIsNotNone(bundle.built_at)
|
||||
self.assertGreater(bundle.size_bytes or 0, 0)
|
||||
final_path = bundle.absolute_file_path
|
||||
self.assertIsNotNone(final_path)
|
||||
self.assertTrue(final_path.exists())
|
||||
with zipfile.ZipFile(final_path) as zipf:
|
||||
names = zipf.namelist()
|
||||
self.assertEqual(len(names), 1)
|
||||
self.assertEqual(zipf.read(names[0]), archive_path.read_bytes())
|
||||
|
||||
def test_build_share_link_bundle_overwrites_existing_file(self):
|
||||
self._write_document_file(archive=False, content=b"source")
|
||||
bundle = ShareLinkBundle.objects.create(
|
||||
slug="overwrite",
|
||||
file_version=ShareLink.FileVersion.ORIGINAL,
|
||||
)
|
||||
bundle.documents.set([self.document])
|
||||
|
||||
existing = settings.SHARE_LINK_BUNDLE_DIR / "overwrite.zip"
|
||||
existing.parent.mkdir(parents=True, exist_ok=True)
|
||||
existing.write_bytes(b"old")
|
||||
|
||||
build_share_link_bundle(bundle.pk)
|
||||
|
||||
bundle.refresh_from_db()
|
||||
final_path = bundle.absolute_file_path
|
||||
self.assertIsNotNone(final_path)
|
||||
self.assertTrue(final_path.exists())
|
||||
self.assertNotEqual(final_path.read_bytes(), b"old")
|
||||
|
||||
def test_build_share_link_bundle_failure_marks_failed(self):
|
||||
self._write_document_file(archive=False, content=b"source")
|
||||
bundle = ShareLinkBundle.objects.create(
|
||||
slug="fail-bundle",
|
||||
file_version=ShareLink.FileVersion.ORIGINAL,
|
||||
)
|
||||
bundle.documents.set([self.document])
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"documents.tasks.OriginalsOnlyStrategy.add_document",
|
||||
side_effect=RuntimeError("zip failure"),
|
||||
),
|
||||
mock.patch("pathlib.Path.unlink") as unlink_mock,
|
||||
):
|
||||
unlink_mock.side_effect = [OSError("unlink"), OSError("unlink-finally")] + [
|
||||
None,
|
||||
] * 5
|
||||
with self.assertRaises(RuntimeError):
|
||||
build_share_link_bundle(bundle.pk)
|
||||
|
||||
bundle.refresh_from_db()
|
||||
self.assertEqual(bundle.status, ShareLinkBundle.Status.FAILED)
|
||||
self.assertIsInstance(bundle.last_error, dict)
|
||||
self.assertEqual(bundle.last_error.get("message"), "zip failure")
|
||||
self.assertEqual(bundle.last_error.get("exception_type"), "RuntimeError")
|
||||
scratch_zips = list(Path(settings.SCRATCH_DIR).glob("*.zip"))
|
||||
self.assertTrue(scratch_zips)
|
||||
for path in scratch_zips:
|
||||
path.unlink(missing_ok=True)
|
||||
|
||||
def test_build_share_link_bundle_missing_bundle_noop(self):
|
||||
# Should not raise when bundle does not exist
|
||||
build_share_link_bundle(99999)
|
||||
|
||||
|
||||
class ShareLinkBundleFilterSetTests(DirectoriesMixin, APITestCase):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.document = DocumentFactory.create()
|
||||
self.document.checksum = "doc1checksum"
|
||||
self.document.save()
|
||||
self.other_document = DocumentFactory.create()
|
||||
self.other_document.checksum = "doc2checksum"
|
||||
self.other_document.save()
|
||||
self.bundle_one = ShareLinkBundle.objects.create(
|
||||
slug="bundle-one",
|
||||
file_version=ShareLink.FileVersion.ORIGINAL,
|
||||
)
|
||||
self.bundle_one.documents.set([self.document])
|
||||
self.bundle_two = ShareLinkBundle.objects.create(
|
||||
slug="bundle-two",
|
||||
file_version=ShareLink.FileVersion.ORIGINAL,
|
||||
)
|
||||
self.bundle_two.documents.set([self.other_document])
|
||||
|
||||
def test_filter_documents_returns_all_for_empty_value(self):
|
||||
filterset = ShareLinkBundleFilterSet(
|
||||
data={"documents": ""},
|
||||
queryset=ShareLinkBundle.objects.all(),
|
||||
)
|
||||
|
||||
self.assertCountEqual(filterset.qs, [self.bundle_one, self.bundle_two])
|
||||
|
||||
def test_filter_documents_handles_invalid_input(self):
|
||||
filterset = ShareLinkBundleFilterSet(
|
||||
data={"documents": "invalid"},
|
||||
queryset=ShareLinkBundle.objects.all(),
|
||||
)
|
||||
|
||||
self.assertFalse(filterset.qs.exists())
|
||||
|
||||
def test_filter_documents_filters_by_multiple_ids(self):
|
||||
filterset = ShareLinkBundleFilterSet(
|
||||
data={"documents": f"{self.document.pk},{self.other_document.pk}"},
|
||||
queryset=ShareLinkBundle.objects.all(),
|
||||
)
|
||||
|
||||
self.assertCountEqual(filterset.qs, [self.bundle_one, self.bundle_two])
|
||||
|
||||
def test_filter_documents_returns_queryset_for_empty_ids(self):
|
||||
filterset = ShareLinkBundleFilterSet(
|
||||
data={"documents": ","},
|
||||
queryset=ShareLinkBundle.objects.all(),
|
||||
)
|
||||
|
||||
self.assertCountEqual(filterset.qs, [self.bundle_one, self.bundle_two])
|
||||
|
||||
|
||||
class ShareLinkBundleModelTests(DirectoriesMixin, APITestCase):
|
||||
def test_absolute_file_path_handles_relative_and_absolute(self):
|
||||
relative_path = Path("relative.zip")
|
||||
bundle = ShareLinkBundle.objects.create(
|
||||
slug="relative-bundle",
|
||||
file_version=ShareLink.FileVersion.ORIGINAL,
|
||||
file_path=str(relative_path),
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
bundle.absolute_file_path,
|
||||
(settings.SHARE_LINK_BUNDLE_DIR / relative_path).resolve(),
|
||||
)
|
||||
|
||||
absolute_path = Path(self.dirs.media_dir) / "absolute.zip"
|
||||
bundle.file_path = str(absolute_path)
|
||||
|
||||
self.assertEqual(bundle.absolute_file_path.resolve(), absolute_path.resolve())
|
||||
|
||||
def test_str_returns_translated_slug(self):
|
||||
bundle = ShareLinkBundle.objects.create(
|
||||
slug="string-slug",
|
||||
file_version=ShareLink.FileVersion.ORIGINAL,
|
||||
)
|
||||
|
||||
self.assertIn("string-slug", str(bundle))
|
||||
|
||||
def test_remove_file_deletes_existing_file(self):
|
||||
bundle_path = settings.SHARE_LINK_BUNDLE_DIR / "remove.zip"
|
||||
bundle_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
bundle_path.write_bytes(b"remove-me")
|
||||
bundle = ShareLinkBundle.objects.create(
|
||||
slug="remove-bundle",
|
||||
file_version=ShareLink.FileVersion.ORIGINAL,
|
||||
file_path=str(bundle_path.relative_to(settings.SHARE_LINK_BUNDLE_DIR)),
|
||||
)
|
||||
|
||||
bundle.remove_file()
|
||||
|
||||
self.assertFalse(bundle_path.exists())
|
||||
|
||||
def test_remove_file_handles_oserror(self):
|
||||
bundle_path = settings.SHARE_LINK_BUNDLE_DIR / "remove-error.zip"
|
||||
bundle_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
bundle_path.write_bytes(b"remove-me")
|
||||
bundle = ShareLinkBundle.objects.create(
|
||||
slug="remove-error",
|
||||
file_version=ShareLink.FileVersion.ORIGINAL,
|
||||
file_path=str(bundle_path.relative_to(settings.SHARE_LINK_BUNDLE_DIR)),
|
||||
)
|
||||
|
||||
with mock.patch("pathlib.Path.unlink", side_effect=OSError("fail")):
|
||||
bundle.remove_file()
|
||||
|
||||
self.assertTrue(bundle_path.exists())
|
||||
|
||||
def test_delete_calls_remove_file(self):
|
||||
bundle_path = settings.SHARE_LINK_BUNDLE_DIR / "delete.zip"
|
||||
bundle_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
bundle_path.write_bytes(b"remove-me")
|
||||
bundle = ShareLinkBundle.objects.create(
|
||||
slug="delete-bundle",
|
||||
file_version=ShareLink.FileVersion.ORIGINAL,
|
||||
file_path=str(bundle_path.relative_to(settings.SHARE_LINK_BUNDLE_DIR)),
|
||||
)
|
||||
|
||||
bundle.delete()
|
||||
self.assertFalse(bundle_path.exists())
|
||||
|
||||
|
||||
class ShareLinkBundleSerializerTests(DirectoriesMixin, APITestCase):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.document = DocumentFactory.create()
|
||||
|
||||
def test_validate_document_ids_rejects_duplicates(self):
|
||||
serializer = ShareLinkBundleSerializer(
|
||||
data={
|
||||
"document_ids": [self.document.pk, self.document.pk],
|
||||
"file_version": ShareLink.FileVersion.ORIGINAL,
|
||||
},
|
||||
)
|
||||
|
||||
self.assertFalse(serializer.is_valid())
|
||||
self.assertIn("document_ids", serializer.errors)
|
||||
|
||||
def test_create_assigns_documents_and_expiration(self):
|
||||
serializer = ShareLinkBundleSerializer(
|
||||
data={
|
||||
"document_ids": [self.document.pk],
|
||||
"file_version": ShareLink.FileVersion.ORIGINAL,
|
||||
"expiration_days": 3,
|
||||
},
|
||||
)
|
||||
|
||||
self.assertTrue(serializer.is_valid(), serializer.errors)
|
||||
bundle = serializer.save()
|
||||
|
||||
self.assertEqual(list(bundle.documents.all()), [self.document])
|
||||
expected_expiration = timezone.now() + timedelta(days=3)
|
||||
self.assertAlmostEqual(
|
||||
bundle.expiration,
|
||||
expected_expiration,
|
||||
delta=timedelta(seconds=10),
|
||||
)
|
||||
|
||||
def test_create_raises_when_missing_documents(self):
|
||||
serializer = ShareLinkBundleSerializer(
|
||||
data={
|
||||
"document_ids": [self.document.pk, 9999],
|
||||
"file_version": ShareLink.FileVersion.ORIGINAL,
|
||||
},
|
||||
)
|
||||
|
||||
self.assertTrue(serializer.is_valid(), serializer.errors)
|
||||
with self.assertRaises(serializers.ValidationError):
|
||||
serializer.save(documents=[self.document])
|
||||
@@ -33,6 +33,7 @@ from django.db.models import IntegerField
|
||||
from django.db.models import Max
|
||||
from django.db.models import Model
|
||||
from django.db.models import Q
|
||||
from django.db.models import Subquery
|
||||
from django.db.models import Sum
|
||||
from django.db.models import When
|
||||
from django.db.models.functions import Lower
|
||||
@@ -50,7 +51,6 @@ from django.utils import timezone
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.utils.timezone import make_aware
|
||||
from django.utils.translation import get_language
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.views import View
|
||||
from django.views.decorators.cache import cache_control
|
||||
from django.views.decorators.csrf import ensure_csrf_cookie
|
||||
@@ -71,7 +71,6 @@ from packaging import version as packaging_version
|
||||
from redis import Redis
|
||||
from rest_framework import parsers
|
||||
from rest_framework import serializers
|
||||
from rest_framework import status
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import NotFound
|
||||
from rest_framework.exceptions import ValidationError
|
||||
@@ -122,7 +121,6 @@ from documents.filters import DocumentTypeFilterSet
|
||||
from documents.filters import ObjectOwnedOrGrantedPermissionsFilter
|
||||
from documents.filters import ObjectOwnedPermissionsFilter
|
||||
from documents.filters import PaperlessTaskFilterSet
|
||||
from documents.filters import ShareLinkBundleFilterSet
|
||||
from documents.filters import ShareLinkFilterSet
|
||||
from documents.filters import StoragePathFilterSet
|
||||
from documents.filters import TagFilterSet
|
||||
@@ -140,7 +138,6 @@ from documents.models import Note
|
||||
from documents.models import PaperlessTask
|
||||
from documents.models import SavedView
|
||||
from documents.models import ShareLink
|
||||
from documents.models import ShareLinkBundle
|
||||
from documents.models import StoragePath
|
||||
from documents.models import Tag
|
||||
from documents.models import UiSettings
|
||||
@@ -157,6 +154,7 @@ from documents.permissions import ViewDocumentsPermissions
|
||||
from documents.permissions import get_document_count_filter_for_user
|
||||
from documents.permissions import get_objects_for_user_owner_aware
|
||||
from documents.permissions import has_perms_owner_aware
|
||||
from documents.permissions import permitted_document_ids
|
||||
from documents.permissions import set_permissions_for_object
|
||||
from documents.schema import generate_object_with_permissions_schema
|
||||
from documents.serialisers import AcknowledgeTasksViewSerializer
|
||||
@@ -174,7 +172,6 @@ from documents.serialisers import PostDocumentSerializer
|
||||
from documents.serialisers import RunTaskViewSerializer
|
||||
from documents.serialisers import SavedViewSerializer
|
||||
from documents.serialisers import SearchResultSerializer
|
||||
from documents.serialisers import ShareLinkBundleSerializer
|
||||
from documents.serialisers import ShareLinkSerializer
|
||||
from documents.serialisers import StoragePathSerializer
|
||||
from documents.serialisers import StoragePathTestSerializer
|
||||
@@ -187,7 +184,6 @@ from documents.serialisers import WorkflowActionSerializer
|
||||
from documents.serialisers import WorkflowSerializer
|
||||
from documents.serialisers import WorkflowTriggerSerializer
|
||||
from documents.signals import document_updated
|
||||
from documents.tasks import build_share_link_bundle
|
||||
from documents.tasks import consume_file
|
||||
from documents.tasks import empty_trash
|
||||
from documents.tasks import index_optimize
|
||||
@@ -2441,7 +2437,7 @@ class BulkDownloadView(GenericAPIView):
|
||||
follow_filename_format = serializer.validated_data.get("follow_formatting")
|
||||
|
||||
for document in documents:
|
||||
if not has_perms_owner_aware(request.user, "change_document", document):
|
||||
if not has_perms_owner_aware(request.user, "view_document", document):
|
||||
return HttpResponseForbidden("Insufficient permissions")
|
||||
|
||||
settings.SCRATCH_DIR.mkdir(parents=True, exist_ok=True)
|
||||
@@ -2796,187 +2792,21 @@ class ShareLinkViewSet(ModelViewSet, PassUserMixin):
|
||||
ordering_fields = ("created", "expiration", "document")
|
||||
|
||||
|
||||
class ShareLinkBundleViewSet(ModelViewSet, PassUserMixin):
|
||||
model = ShareLinkBundle
|
||||
|
||||
queryset = ShareLinkBundle.objects.all()
|
||||
|
||||
serializer_class = ShareLinkBundleSerializer
|
||||
pagination_class = StandardPagination
|
||||
permission_classes = (IsAuthenticated, PaperlessObjectPermissions)
|
||||
filter_backends = (
|
||||
DjangoFilterBackend,
|
||||
OrderingFilter,
|
||||
ObjectOwnedOrGrantedPermissionsFilter,
|
||||
)
|
||||
filterset_class = ShareLinkBundleFilterSet
|
||||
ordering_fields = ("created", "expiration", "status")
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
super()
|
||||
.get_queryset()
|
||||
.prefetch_related("documents")
|
||||
.annotate(document_total=Count("documents", distinct=True))
|
||||
)
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
document_ids = serializer.validated_data["document_ids"]
|
||||
documents_qs = Document.objects.filter(pk__in=document_ids).select_related(
|
||||
"owner",
|
||||
)
|
||||
found_ids = set(documents_qs.values_list("pk", flat=True))
|
||||
missing = sorted(set(document_ids) - found_ids)
|
||||
if missing:
|
||||
raise ValidationError(
|
||||
{
|
||||
"document_ids": _(
|
||||
"Documents not found: %(ids)s",
|
||||
)
|
||||
% {"ids": ", ".join(str(item) for item in missing)},
|
||||
},
|
||||
)
|
||||
|
||||
documents = list(documents_qs)
|
||||
for document in documents:
|
||||
if not has_perms_owner_aware(request.user, "view_document", document):
|
||||
raise ValidationError(
|
||||
{
|
||||
"document_ids": _(
|
||||
"Insufficient permissions to share document %(id)s.",
|
||||
)
|
||||
% {"id": document.pk},
|
||||
},
|
||||
)
|
||||
|
||||
document_map = {document.pk: document for document in documents}
|
||||
ordered_documents = [document_map[doc_id] for doc_id in document_ids]
|
||||
|
||||
bundle = serializer.save(
|
||||
owner=request.user,
|
||||
documents=ordered_documents,
|
||||
)
|
||||
bundle.remove_file()
|
||||
bundle.status = ShareLinkBundle.Status.PENDING
|
||||
bundle.last_error = None
|
||||
bundle.size_bytes = None
|
||||
bundle.built_at = None
|
||||
bundle.file_path = ""
|
||||
bundle.save(
|
||||
update_fields=[
|
||||
"status",
|
||||
"last_error",
|
||||
"size_bytes",
|
||||
"built_at",
|
||||
"file_path",
|
||||
],
|
||||
)
|
||||
build_share_link_bundle.delay(bundle.pk)
|
||||
bundle.document_total = len(ordered_documents)
|
||||
response_serializer = self.get_serializer(bundle)
|
||||
headers = self.get_success_headers(response_serializer.data)
|
||||
return Response(
|
||||
response_serializer.data,
|
||||
status=status.HTTP_201_CREATED,
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
@action(detail=True, methods=["post"])
|
||||
def rebuild(self, request, pk=None):
|
||||
bundle = self.get_object()
|
||||
if bundle.status == ShareLinkBundle.Status.PROCESSING:
|
||||
return Response(
|
||||
{"detail": _("Bundle is already being processed.")},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
bundle.remove_file()
|
||||
bundle.status = ShareLinkBundle.Status.PENDING
|
||||
bundle.last_error = None
|
||||
bundle.size_bytes = None
|
||||
bundle.built_at = None
|
||||
bundle.file_path = ""
|
||||
bundle.save(
|
||||
update_fields=[
|
||||
"status",
|
||||
"last_error",
|
||||
"size_bytes",
|
||||
"built_at",
|
||||
"file_path",
|
||||
],
|
||||
)
|
||||
build_share_link_bundle.delay(bundle.pk)
|
||||
bundle.document_total = (
|
||||
getattr(bundle, "document_total", None) or bundle.documents.count()
|
||||
)
|
||||
serializer = self.get_serializer(bundle)
|
||||
return Response(serializer.data)
|
||||
|
||||
|
||||
class SharedLinkView(View):
|
||||
authentication_classes = []
|
||||
permission_classes = []
|
||||
|
||||
def get(self, request, slug):
|
||||
share_link = ShareLink.objects.filter(slug=slug).first()
|
||||
if share_link is not None:
|
||||
if (
|
||||
share_link.expiration is not None
|
||||
and share_link.expiration < timezone.now()
|
||||
):
|
||||
return HttpResponseRedirect("/accounts/login/?sharelink_expired=1")
|
||||
return serve_file(
|
||||
doc=share_link.document,
|
||||
use_archive=share_link.file_version == "archive",
|
||||
disposition="inline",
|
||||
)
|
||||
|
||||
bundle = ShareLinkBundle.objects.filter(slug=slug).first()
|
||||
if bundle is None:
|
||||
if share_link is None:
|
||||
return HttpResponseRedirect("/accounts/login/?sharelink_notfound=1")
|
||||
|
||||
if bundle.expiration is not None and bundle.expiration < timezone.now():
|
||||
if share_link.expiration is not None and share_link.expiration < timezone.now():
|
||||
return HttpResponseRedirect("/accounts/login/?sharelink_expired=1")
|
||||
|
||||
if bundle.status in {
|
||||
ShareLinkBundle.Status.PENDING,
|
||||
ShareLinkBundle.Status.PROCESSING,
|
||||
}:
|
||||
return HttpResponse(
|
||||
_(
|
||||
"The share link bundle is still being prepared. Please try again later.",
|
||||
),
|
||||
status=status.HTTP_202_ACCEPTED,
|
||||
)
|
||||
|
||||
file_path = bundle.absolute_file_path
|
||||
|
||||
if bundle.status == ShareLinkBundle.Status.FAILED or file_path is None:
|
||||
return HttpResponse(
|
||||
_(
|
||||
"The share link bundle is unavailable.",
|
||||
),
|
||||
status=status.HTTP_503_SERVICE_UNAVAILABLE,
|
||||
)
|
||||
|
||||
response = FileResponse(file_path.open("rb"), content_type="application/zip")
|
||||
short_slug = bundle.slug[:12]
|
||||
download_name = f"paperless-share-{short_slug}.zip"
|
||||
filename_normalized = (
|
||||
normalize("NFKD", download_name)
|
||||
.encode(
|
||||
"ascii",
|
||||
"ignore",
|
||||
)
|
||||
.decode("ascii")
|
||||
return serve_file(
|
||||
doc=share_link.document,
|
||||
use_archive=share_link.file_version == "archive",
|
||||
disposition="inline",
|
||||
)
|
||||
filename_encoded = quote(download_name)
|
||||
response["Content-Disposition"] = (
|
||||
f"attachment; filename='{filename_normalized}'; "
|
||||
f"filename*=utf-8''{filename_encoded}"
|
||||
)
|
||||
return response
|
||||
|
||||
|
||||
def serve_file(*, doc: Document, use_archive: bool, disposition: str):
|
||||
@@ -3179,27 +3009,32 @@ class CustomFieldViewSet(ModelViewSet):
|
||||
queryset = CustomField.objects.all().order_by("-created")
|
||||
|
||||
def get_queryset(self):
|
||||
filter = (
|
||||
Q(fields__document__deleted_at__isnull=True)
|
||||
if self.request.user is None or self.request.user.is_superuser
|
||||
else (
|
||||
Q(
|
||||
fields__document__deleted_at__isnull=True,
|
||||
fields__document__id__in=get_objects_for_user_owner_aware(
|
||||
self.request.user,
|
||||
"documents.view_document",
|
||||
Document,
|
||||
).values_list("id", flat=True),
|
||||
user = self.request.user
|
||||
if user is None or user.is_superuser:
|
||||
return (
|
||||
super()
|
||||
.get_queryset()
|
||||
.annotate(
|
||||
document_count=Count(
|
||||
"fields",
|
||||
filter=Q(fields__document__deleted_at__isnull=True),
|
||||
distinct=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
permitted_ids = Subquery(permitted_document_ids(user))
|
||||
return (
|
||||
super()
|
||||
.get_queryset()
|
||||
.annotate(
|
||||
document_count=Count(
|
||||
"fields",
|
||||
filter=filter,
|
||||
filter=Q(
|
||||
fields__document__deleted_at__isnull=True,
|
||||
fields__document_id__in=permitted_ids,
|
||||
),
|
||||
distinct=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -3,7 +3,7 @@ from urllib.parse import quote
|
||||
|
||||
from allauth.account.adapter import DefaultAccountAdapter
|
||||
from allauth.core import context
|
||||
from allauth.headless.tokens.strategies.sessions import SessionTokenStrategy
|
||||
from allauth.headless.tokens.sessions import SessionTokenStrategy
|
||||
from allauth.socialaccount.adapter import DefaultSocialAccountAdapter
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import Group
|
||||
|
||||
@@ -241,17 +241,6 @@ def _parse_beat_schedule() -> dict:
|
||||
"expires": 23.0 * 60.0 * 60.0,
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "Cleanup expired share link bundles",
|
||||
"env_key": "PAPERLESS_SHARE_LINK_BUNDLE_CLEANUP_CRON",
|
||||
# Default daily at 02:00
|
||||
"env_default": "0 2 * * *",
|
||||
"task": "documents.tasks.cleanup_expired_share_link_bundles",
|
||||
"options": {
|
||||
# 1 hour before default schedule sends again
|
||||
"expires": 23.0 * 60.0 * 60.0,
|
||||
},
|
||||
},
|
||||
]
|
||||
for task in tasks:
|
||||
# Either get the environment setting or use the default
|
||||
@@ -290,7 +279,6 @@ MEDIA_ROOT = __get_path("PAPERLESS_MEDIA_ROOT", BASE_DIR.parent / "media")
|
||||
ORIGINALS_DIR = MEDIA_ROOT / "documents" / "originals"
|
||||
ARCHIVE_DIR = MEDIA_ROOT / "documents" / "archive"
|
||||
THUMBNAIL_DIR = MEDIA_ROOT / "documents" / "thumbnails"
|
||||
SHARE_LINK_BUNDLE_DIR = MEDIA_ROOT / "documents" / "share_link_bundles"
|
||||
|
||||
DATA_DIR = __get_path("PAPERLESS_DATA_DIR", BASE_DIR.parent / "data")
|
||||
|
||||
|
||||
@@ -161,7 +161,6 @@ class TestCeleryScheduleParsing(TestCase):
|
||||
EMPTY_TRASH_EXPIRE_TIME = 23.0 * 60.0 * 60.0
|
||||
RUN_SCHEDULED_WORKFLOWS_EXPIRE_TIME = 59.0 * 60.0
|
||||
LLM_INDEX_EXPIRE_TIME = 23.0 * 60.0 * 60.0
|
||||
CLEANUP_EXPIRED_SHARE_BUNDLES_EXPIRE_TIME = 23.0 * 60.0 * 60.0
|
||||
|
||||
def test_schedule_configuration_default(self):
|
||||
"""
|
||||
@@ -213,13 +212,6 @@ class TestCeleryScheduleParsing(TestCase):
|
||||
"expires": self.LLM_INDEX_EXPIRE_TIME,
|
||||
},
|
||||
},
|
||||
"Cleanup expired share link bundles": {
|
||||
"task": "documents.tasks.cleanup_expired_share_link_bundles",
|
||||
"schedule": crontab(minute=0, hour=2),
|
||||
"options": {
|
||||
"expires": self.CLEANUP_EXPIRED_SHARE_BUNDLES_EXPIRE_TIME,
|
||||
},
|
||||
},
|
||||
},
|
||||
schedule,
|
||||
)
|
||||
@@ -279,13 +271,6 @@ class TestCeleryScheduleParsing(TestCase):
|
||||
"expires": self.LLM_INDEX_EXPIRE_TIME,
|
||||
},
|
||||
},
|
||||
"Cleanup expired share link bundles": {
|
||||
"task": "documents.tasks.cleanup_expired_share_link_bundles",
|
||||
"schedule": crontab(minute=0, hour=2),
|
||||
"options": {
|
||||
"expires": self.CLEANUP_EXPIRED_SHARE_BUNDLES_EXPIRE_TIME,
|
||||
},
|
||||
},
|
||||
},
|
||||
schedule,
|
||||
)
|
||||
@@ -337,13 +322,6 @@ class TestCeleryScheduleParsing(TestCase):
|
||||
"expires": self.LLM_INDEX_EXPIRE_TIME,
|
||||
},
|
||||
},
|
||||
"Cleanup expired share link bundles": {
|
||||
"task": "documents.tasks.cleanup_expired_share_link_bundles",
|
||||
"schedule": crontab(minute=0, hour=2),
|
||||
"options": {
|
||||
"expires": self.CLEANUP_EXPIRED_SHARE_BUNDLES_EXPIRE_TIME,
|
||||
},
|
||||
},
|
||||
},
|
||||
schedule,
|
||||
)
|
||||
@@ -367,7 +345,6 @@ class TestCeleryScheduleParsing(TestCase):
|
||||
"PAPERLESS_EMPTY_TRASH_TASK_CRON": "disable",
|
||||
"PAPERLESS_WORKFLOW_SCHEDULED_TASK_CRON": "disable",
|
||||
"PAPERLESS_LLM_INDEX_TASK_CRON": "disable",
|
||||
"PAPERLESS_SHARE_LINK_BUNDLE_CLEANUP_CRON": "disable",
|
||||
},
|
||||
):
|
||||
schedule = _parse_beat_schedule()
|
||||
|
||||
@@ -31,7 +31,6 @@ from documents.views import SavedViewViewSet
|
||||
from documents.views import SearchAutoCompleteView
|
||||
from documents.views import SelectionDataView
|
||||
from documents.views import SharedLinkView
|
||||
from documents.views import ShareLinkBundleViewSet
|
||||
from documents.views import ShareLinkViewSet
|
||||
from documents.views import StatisticsView
|
||||
from documents.views import StoragePathViewSet
|
||||
@@ -74,7 +73,6 @@ api_router.register(r"users", UserViewSet, basename="users")
|
||||
api_router.register(r"groups", GroupViewSet, basename="groups")
|
||||
api_router.register(r"mail_accounts", MailAccountViewSet)
|
||||
api_router.register(r"mail_rules", MailRuleViewSet)
|
||||
api_router.register(r"share_link_bundles", ShareLinkBundleViewSet)
|
||||
api_router.register(r"share_links", ShareLinkViewSet)
|
||||
api_router.register(r"workflow_triggers", WorkflowTriggerViewSet)
|
||||
api_router.register(r"workflow_actions", WorkflowActionViewSet)
|
||||
|
||||
@@ -1,14 +1,11 @@
|
||||
import logging
|
||||
import shutil
|
||||
from datetime import timedelta
|
||||
from pathlib import Path
|
||||
|
||||
import faiss
|
||||
import llama_index.core.settings as llama_settings
|
||||
import tqdm
|
||||
from celery import states
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
from llama_index.core import Document as LlamaDocument
|
||||
from llama_index.core import StorageContext
|
||||
from llama_index.core import VectorStoreIndex
|
||||
@@ -24,7 +21,6 @@ from llama_index.core.text_splitter import TokenTextSplitter
|
||||
from llama_index.vector_stores.faiss import FaissVectorStore
|
||||
|
||||
from documents.models import Document
|
||||
from documents.models import PaperlessTask
|
||||
from paperless_ai.embedding import build_llm_index_text
|
||||
from paperless_ai.embedding import get_embedding_dim
|
||||
from paperless_ai.embedding import get_embedding_model
|
||||
@@ -32,29 +28,6 @@ from paperless_ai.embedding import get_embedding_model
|
||||
logger = logging.getLogger("paperless_ai.indexing")
|
||||
|
||||
|
||||
def queue_llm_index_update_if_needed(*, rebuild: bool, reason: str) -> bool:
|
||||
from documents.tasks import llmindex_index
|
||||
|
||||
has_running = PaperlessTask.objects.filter(
|
||||
task_name=PaperlessTask.TaskName.LLMINDEX_UPDATE,
|
||||
status__in=[states.PENDING, states.STARTED],
|
||||
).exists()
|
||||
has_recent = PaperlessTask.objects.filter(
|
||||
task_name=PaperlessTask.TaskName.LLMINDEX_UPDATE,
|
||||
date_created__gte=(timezone.now() - timedelta(minutes=5)),
|
||||
).exists()
|
||||
if has_running or has_recent:
|
||||
return False
|
||||
|
||||
llmindex_index.delay(rebuild=rebuild, scheduled=False, auto=True)
|
||||
logger.warning(
|
||||
"Queued LLM index update%s: %s",
|
||||
" (rebuild)" if rebuild else "",
|
||||
reason,
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
def get_or_create_storage_context(*, rebuild=False):
|
||||
"""
|
||||
Loads or creates the StorageContext (vector store, docstore, index store).
|
||||
@@ -120,10 +93,6 @@ def load_or_build_index(nodes=None):
|
||||
except ValueError as e:
|
||||
logger.warning("Failed to load index from storage: %s", e)
|
||||
if not nodes:
|
||||
queue_llm_index_update_if_needed(
|
||||
rebuild=vector_store_file_exists(),
|
||||
reason="LLM index missing or invalid while loading.",
|
||||
)
|
||||
logger.info("No nodes provided for index creation.")
|
||||
raise
|
||||
return VectorStoreIndex(
|
||||
@@ -281,13 +250,6 @@ def query_similar_documents(
|
||||
"""
|
||||
Runs a similarity query and returns top-k similar Document objects.
|
||||
"""
|
||||
if not vector_store_file_exists():
|
||||
queue_llm_index_update_if_needed(
|
||||
rebuild=False,
|
||||
reason="LLM index not found for similarity query.",
|
||||
)
|
||||
return []
|
||||
|
||||
index = load_or_build_index()
|
||||
|
||||
# constrain only the node(s) that match the document IDs, if given
|
||||
|
||||
@@ -3,13 +3,11 @@ from unittest.mock import MagicMock
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from celery import states
|
||||
from django.test import override_settings
|
||||
from django.utils import timezone
|
||||
from llama_index.core.base.embeddings.base import BaseEmbedding
|
||||
|
||||
from documents.models import Document
|
||||
from documents.models import PaperlessTask
|
||||
from paperless_ai import indexing
|
||||
|
||||
|
||||
@@ -290,36 +288,6 @@ def test_update_llm_index_no_documents(
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_queue_llm_index_update_if_needed_enqueues_when_idle_or_skips_recent():
|
||||
# No existing tasks
|
||||
with patch("documents.tasks.llmindex_index") as mock_task:
|
||||
result = indexing.queue_llm_index_update_if_needed(
|
||||
rebuild=True,
|
||||
reason="test enqueue",
|
||||
)
|
||||
|
||||
assert result is True
|
||||
mock_task.delay.assert_called_once_with(rebuild=True, scheduled=False, auto=True)
|
||||
|
||||
PaperlessTask.objects.create(
|
||||
task_id="task-1",
|
||||
task_name=PaperlessTask.TaskName.LLMINDEX_UPDATE,
|
||||
status=states.STARTED,
|
||||
date_created=timezone.now(),
|
||||
)
|
||||
|
||||
# Existing running task
|
||||
with patch("documents.tasks.llmindex_index") as mock_task:
|
||||
result = indexing.queue_llm_index_update_if_needed(
|
||||
rebuild=False,
|
||||
reason="should skip",
|
||||
)
|
||||
|
||||
assert result is False
|
||||
mock_task.delay.assert_not_called()
|
||||
|
||||
|
||||
@override_settings(
|
||||
LLM_EMBEDDING_BACKEND="huggingface",
|
||||
LLM_BACKEND="ollama",
|
||||
@@ -331,15 +299,11 @@ def test_query_similar_documents(
|
||||
with (
|
||||
patch("paperless_ai.indexing.get_or_create_storage_context") as mock_storage,
|
||||
patch("paperless_ai.indexing.load_or_build_index") as mock_load_or_build_index,
|
||||
patch(
|
||||
"paperless_ai.indexing.vector_store_file_exists",
|
||||
) as mock_vector_store_exists,
|
||||
patch("paperless_ai.indexing.VectorIndexRetriever") as mock_retriever_cls,
|
||||
patch("paperless_ai.indexing.Document.objects.filter") as mock_filter,
|
||||
):
|
||||
mock_storage.return_value = MagicMock()
|
||||
mock_storage.return_value.persist_dir = temp_llm_index_dir
|
||||
mock_vector_store_exists.return_value = True
|
||||
|
||||
mock_index = MagicMock()
|
||||
mock_load_or_build_index.return_value = mock_index
|
||||
@@ -368,31 +332,3 @@ def test_query_similar_documents(
|
||||
mock_filter.assert_called_once_with(pk__in=[1, 2])
|
||||
|
||||
assert result == mock_filtered_docs
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_query_similar_documents_triggers_update_when_index_missing(
|
||||
temp_llm_index_dir,
|
||||
real_document,
|
||||
):
|
||||
with (
|
||||
patch(
|
||||
"paperless_ai.indexing.vector_store_file_exists",
|
||||
return_value=False,
|
||||
),
|
||||
patch(
|
||||
"paperless_ai.indexing.queue_llm_index_update_if_needed",
|
||||
) as mock_queue,
|
||||
patch("paperless_ai.indexing.load_or_build_index") as mock_load,
|
||||
):
|
||||
result = indexing.query_similar_documents(
|
||||
real_document,
|
||||
top_k=2,
|
||||
)
|
||||
|
||||
mock_queue.assert_called_once_with(
|
||||
rebuild=False,
|
||||
reason="LLM index not found for similarity query.",
|
||||
)
|
||||
mock_load.assert_not_called()
|
||||
assert result == []
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import os
|
||||
from collections.abc import Generator
|
||||
from pathlib import Path
|
||||
|
||||
@@ -69,31 +70,20 @@ def mail_parser() -> MailDocumentParser:
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def greenmail_mail_account(db: None) -> Generator[MailAccount, None, None]:
|
||||
"""
|
||||
Create a mail account configured for local Greenmail server.
|
||||
"""
|
||||
account = MailAccount.objects.create(
|
||||
name="Greenmail Test",
|
||||
imap_server="localhost",
|
||||
imap_port=3143,
|
||||
imap_security=MailAccount.ImapSecurity.NONE,
|
||||
username="test@localhost",
|
||||
password="test",
|
||||
character_set="UTF-8",
|
||||
)
|
||||
yield account
|
||||
account.delete()
|
||||
def live_mail_account() -> Generator[MailAccount, None, None]:
|
||||
try:
|
||||
account = MailAccount.objects.create(
|
||||
name="test",
|
||||
imap_server=os.environ["PAPERLESS_MAIL_TEST_HOST"],
|
||||
username=os.environ["PAPERLESS_MAIL_TEST_USER"],
|
||||
password=os.environ["PAPERLESS_MAIL_TEST_PASSWD"],
|
||||
imap_port=993,
|
||||
)
|
||||
yield account
|
||||
finally:
|
||||
account.delete()
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def mail_account_handler() -> MailAccountHandler:
|
||||
return MailAccountHandler()
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def nginx_base_url() -> Generator[str, None, None]:
|
||||
"""
|
||||
The base URL for the nginx HTTP server we expect to be alive
|
||||
"""
|
||||
yield "http://localhost:8080"
|
||||
|
||||
@@ -55,7 +55,7 @@ Content-Transfer-Encoding: 7bit
|
||||
<p>Some Text</p>
|
||||
<p>
|
||||
<img src="cid:part1.pNdUSz0s.D3NqVtPg@example.de" alt="Has to be rewritten to work..">
|
||||
<img src="http://localhost:8080/assets/logo_full_white.svg" alt="This image should not be shown.">
|
||||
<img src="https://docs.paperless-ngx.com/assets/logo_full_white.svg" alt="This image should not be shown.">
|
||||
</p>
|
||||
|
||||
<p>and an embedded image.<br>
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
<p>Some Text</p>
|
||||
<p>
|
||||
<img src="cid:part1.pNdUSz0s.D3NqVtPg@example.de" alt="Has to be rewritten to work..">
|
||||
<img src="http://localhost:8080/assets/logo_full_white.svg" alt="This image should not be shown.">
|
||||
<img src="https://docs.paperless-ngx.com/assets/logo_full_white.svg" alt="This image should not be shown.">
|
||||
</p>
|
||||
|
||||
<p>and an embedded image.<br>
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
import os
|
||||
import warnings
|
||||
|
||||
import pytest
|
||||
|
||||
from paperless_mail.mail import MailAccountHandler
|
||||
@@ -6,53 +9,53 @@ from paperless_mail.models import MailAccount
|
||||
from paperless_mail.models import MailRule
|
||||
|
||||
|
||||
@pytest.mark.live
|
||||
@pytest.mark.greenmail
|
||||
@pytest.mark.django_db
|
||||
class TestMailGreenmail:
|
||||
"""
|
||||
Mail tests using local Greenmail server
|
||||
"""
|
||||
|
||||
def test_process_flag(
|
||||
# Only run if the environment is setup
|
||||
# And the environment is not empty (forks, I think)
|
||||
@pytest.mark.skipif(
|
||||
"PAPERLESS_MAIL_TEST_HOST" not in os.environ
|
||||
or not len(os.environ["PAPERLESS_MAIL_TEST_HOST"]),
|
||||
reason="Live server testing not enabled",
|
||||
)
|
||||
@pytest.mark.django_db()
|
||||
class TestMailLiveServer:
|
||||
def test_process_non_gmail_server_flag(
|
||||
self,
|
||||
mail_account_handler: MailAccountHandler,
|
||||
greenmail_mail_account: MailAccount,
|
||||
) -> None:
|
||||
"""
|
||||
Test processing mail with FLAG action.
|
||||
"""
|
||||
rule = MailRule.objects.create(
|
||||
name="testrule",
|
||||
account=greenmail_mail_account,
|
||||
action=MailRule.MailAction.FLAG,
|
||||
)
|
||||
|
||||
live_mail_account: MailAccount,
|
||||
):
|
||||
try:
|
||||
mail_account_handler.handle_mail_account(greenmail_mail_account)
|
||||
rule1 = MailRule.objects.create(
|
||||
name="testrule",
|
||||
account=live_mail_account,
|
||||
action=MailRule.MailAction.FLAG,
|
||||
)
|
||||
|
||||
mail_account_handler.handle_mail_account(live_mail_account)
|
||||
|
||||
rule1.delete()
|
||||
|
||||
except MailError as e:
|
||||
pytest.fail(f"Failure: {e}")
|
||||
finally:
|
||||
rule.delete()
|
||||
except Exception as e:
|
||||
warnings.warn(f"Unhandled exception: {e}")
|
||||
|
||||
def test_process_tag(
|
||||
def test_process_non_gmail_server_tag(
|
||||
self,
|
||||
mail_account_handler: MailAccountHandler,
|
||||
greenmail_mail_account: MailAccount,
|
||||
) -> None:
|
||||
"""
|
||||
Test processing mail with TAG action.
|
||||
"""
|
||||
rule = MailRule.objects.create(
|
||||
name="testrule",
|
||||
account=greenmail_mail_account,
|
||||
action=MailRule.MailAction.TAG,
|
||||
action_parameter="TestTag",
|
||||
)
|
||||
|
||||
live_mail_account: MailAccount,
|
||||
):
|
||||
try:
|
||||
mail_account_handler.handle_mail_account(greenmail_mail_account)
|
||||
rule2 = MailRule.objects.create(
|
||||
name="testrule",
|
||||
account=live_mail_account,
|
||||
action=MailRule.MailAction.TAG,
|
||||
)
|
||||
|
||||
mail_account_handler.handle_mail_account(live_mail_account)
|
||||
|
||||
rule2.delete()
|
||||
|
||||
except MailError as e:
|
||||
pytest.fail(f"Failure: {e}")
|
||||
finally:
|
||||
rule.delete()
|
||||
except Exception as e:
|
||||
warnings.warn(f"Unhandled exception: {e}")
|
||||
|
||||
@@ -17,7 +17,7 @@ from paperless_mail.parsers import MailDocumentParser
|
||||
def extract_text(pdf_path: Path) -> str:
|
||||
"""
|
||||
Using pdftotext from poppler, extracts the text of a PDF into a file,
|
||||
then reads the file contents and returns it.
|
||||
then reads the file contents and returns it
|
||||
"""
|
||||
with tempfile.NamedTemporaryFile(
|
||||
mode="w+",
|
||||
@@ -38,107 +38,71 @@ def extract_text(pdf_path: Path) -> str:
|
||||
|
||||
|
||||
class MailAttachmentMock:
|
||||
def __init__(self, payload: bytes, content_id: str) -> None:
|
||||
def __init__(self, payload, content_id):
|
||||
self.payload = payload
|
||||
self.content_id = content_id
|
||||
self.content_type = "image/png"
|
||||
|
||||
|
||||
@pytest.mark.live
|
||||
@pytest.mark.nginx
|
||||
@pytest.mark.skipif(
|
||||
"PAPERLESS_CI_TEST" not in os.environ,
|
||||
reason="No Gotenberg/Tika servers to test with",
|
||||
)
|
||||
class TestNginxService:
|
||||
class TestUrlCanary:
|
||||
"""
|
||||
Verify the local nginx server is responding correctly.
|
||||
These tests validate that the test infrastructure is working properly
|
||||
before running the actual parser tests that depend on HTTP resources.
|
||||
Verify certain URLs are still available so testing is valid still
|
||||
"""
|
||||
|
||||
def test_non_existent_resource_returns_404(
|
||||
self,
|
||||
nginx_base_url: str,
|
||||
) -> None:
|
||||
def test_online_image_exception_on_not_available(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Local nginx server is running
|
||||
- Fresh start
|
||||
WHEN:
|
||||
- A non-existent resource is requested
|
||||
- nonexistent image is requested
|
||||
THEN:
|
||||
- An HTTP 404 status code shall be returned
|
||||
- An exception shall be thrown
|
||||
"""
|
||||
"""
|
||||
A public image is used in the html sample file. We have no control
|
||||
whether this image stays online forever, so here we check if we can detect if is not
|
||||
available anymore.
|
||||
"""
|
||||
resp = httpx.get(
|
||||
f"{nginx_base_url}/assets/non-existent.png",
|
||||
timeout=5.0,
|
||||
"https://docs.paperless-ngx.com/assets/non-existent.png",
|
||||
)
|
||||
with pytest.raises(httpx.HTTPStatusError) as exec_info:
|
||||
resp.raise_for_status()
|
||||
|
||||
assert exec_info.value.response.status_code == httpx.codes.NOT_FOUND
|
||||
|
||||
def test_valid_resource_is_available(
|
||||
self,
|
||||
nginx_base_url: str,
|
||||
) -> None:
|
||||
def test_is_online_image_still_available(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Local nginx server is running
|
||||
- Fresh start
|
||||
WHEN:
|
||||
- A valid test fixture resource is requested
|
||||
- A public image used in the html sample file is requested
|
||||
THEN:
|
||||
- The resource shall be returned with HTTP 200 status code
|
||||
- The response shall contain the expected content type
|
||||
- No exception shall be thrown
|
||||
"""
|
||||
"""
|
||||
A public image is used in the html sample file. We have no control
|
||||
whether this image stays online forever, so here we check if it is still there
|
||||
"""
|
||||
|
||||
# Now check the URL used in samples/sample.html
|
||||
resp = httpx.get(
|
||||
f"{nginx_base_url}/assets/logo_full_white.svg",
|
||||
timeout=5.0,
|
||||
"https://docs.paperless-ngx.com/assets/logo_full_white.svg",
|
||||
)
|
||||
resp.raise_for_status()
|
||||
|
||||
assert resp.status_code == httpx.codes.OK
|
||||
assert "svg" in resp.headers.get("content-type", "").lower()
|
||||
|
||||
def test_server_connectivity(
|
||||
self,
|
||||
nginx_base_url: str,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Local test fixtures server should be running
|
||||
WHEN:
|
||||
- A request is made to the server root
|
||||
THEN:
|
||||
- The server shall respond without connection errors
|
||||
"""
|
||||
try:
|
||||
resp = httpx.get(
|
||||
nginx_base_url,
|
||||
timeout=5.0,
|
||||
follow_redirects=True,
|
||||
)
|
||||
# We don't care about the status code, just that we can connect
|
||||
assert resp.status_code in {200, 404, 403}
|
||||
except httpx.ConnectError as e:
|
||||
pytest.fail(
|
||||
f"Cannot connect to nginx server at {nginx_base_url}. "
|
||||
f"Ensure the nginx container is running via docker-compose.ci-test.yml. "
|
||||
f"Error: {e}",
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.live
|
||||
@pytest.mark.gotenberg
|
||||
@pytest.mark.tika
|
||||
@pytest.mark.nginx
|
||||
@pytest.mark.skipif(
|
||||
"PAPERLESS_CI_TEST" not in os.environ,
|
||||
reason="No Gotenberg/Tika servers to test with",
|
||||
)
|
||||
class TestParserLive:
|
||||
@staticmethod
|
||||
def imagehash(file: Path, hash_size: int = 18) -> str:
|
||||
def imagehash(file, hash_size=18):
|
||||
return f"{average_hash(Image.open(file), hash_size)}"
|
||||
|
||||
def test_get_thumbnail(
|
||||
@@ -148,15 +112,14 @@ class TestParserLive:
|
||||
simple_txt_email_file: Path,
|
||||
simple_txt_email_pdf_file: Path,
|
||||
simple_txt_email_thumbnail_file: Path,
|
||||
) -> None:
|
||||
):
|
||||
"""
|
||||
GIVEN:
|
||||
- A simple text email file
|
||||
- Mocked PDF generation returning a known PDF
|
||||
- Fresh start
|
||||
WHEN:
|
||||
- The thumbnail is requested
|
||||
- The Thumbnail is requested
|
||||
THEN:
|
||||
- The returned thumbnail image file shall match the expected hash
|
||||
- The returned thumbnail image file is as expected
|
||||
"""
|
||||
mock_generate_pdf = mocker.patch(
|
||||
"paperless_mail.parsers.MailDocumentParser.generate_pdf",
|
||||
@@ -171,28 +134,22 @@ class TestParserLive:
|
||||
assert self.imagehash(thumb) == self.imagehash(
|
||||
simple_txt_email_thumbnail_file,
|
||||
), (
|
||||
f"Created thumbnail {thumb} differs from expected file "
|
||||
f"{simple_txt_email_thumbnail_file}"
|
||||
f"Created Thumbnail {thumb} differs from expected file {simple_txt_email_thumbnail_file}"
|
||||
)
|
||||
|
||||
def test_tika_parse_successful(self, mail_parser: MailDocumentParser) -> None:
|
||||
def test_tika_parse_successful(self, mail_parser: MailDocumentParser):
|
||||
"""
|
||||
GIVEN:
|
||||
- HTML content to parse
|
||||
- Tika server is running
|
||||
- Fresh start
|
||||
WHEN:
|
||||
- Tika parsing is called
|
||||
- tika parsing is called
|
||||
THEN:
|
||||
- A web request to Tika shall be made
|
||||
- The parsed text content shall be returned
|
||||
- a web request to tika shall be done and the reply es returned
|
||||
"""
|
||||
html = (
|
||||
'<html><head><meta http-equiv="content-type" '
|
||||
'content="text/html; charset=UTF-8"></head>'
|
||||
"<body><p>Some Text</p></body></html>"
|
||||
)
|
||||
html = '<html><head><meta http-equiv="content-type" content="text/html; charset=UTF-8"></head><body><p>Some Text</p></body></html>'
|
||||
expected_text = "Some Text"
|
||||
|
||||
# Check successful parsing
|
||||
parsed = mail_parser.tika_parse(html)
|
||||
assert expected_text == parsed.strip()
|
||||
|
||||
@@ -203,17 +160,14 @@ class TestParserLive:
|
||||
html_email_file: Path,
|
||||
merged_pdf_first: Path,
|
||||
merged_pdf_second: Path,
|
||||
) -> None:
|
||||
):
|
||||
"""
|
||||
GIVEN:
|
||||
- Intermediary PDFs to be merged
|
||||
- An HTML email file
|
||||
- Intermediary pdfs to be merged
|
||||
WHEN:
|
||||
- PDF generation is requested with HTML file requiring merging
|
||||
- pdf generation is requested with html file requiring merging of pdfs
|
||||
THEN:
|
||||
- Gotenberg shall be called to merge files
|
||||
- The resulting merged PDF shall be returned
|
||||
- The merged PDF shall contain text from both source PDFs
|
||||
- gotenberg is called to merge files and the resulting file is returned
|
||||
"""
|
||||
mock_generate_pdf_from_html = mocker.patch(
|
||||
"paperless_mail.parsers.MailDocumentParser.generate_pdf_from_html",
|
||||
@@ -246,17 +200,16 @@ class TestParserLive:
|
||||
html_email_file: Path,
|
||||
html_email_pdf_file: Path,
|
||||
html_email_thumbnail_file: Path,
|
||||
) -> None:
|
||||
):
|
||||
"""
|
||||
GIVEN:
|
||||
- An HTML email file
|
||||
- Fresh start
|
||||
WHEN:
|
||||
- PDF generation from the email file is requested
|
||||
- pdf generation from simple eml file is requested
|
||||
THEN:
|
||||
- Gotenberg shall be called to generate the PDF
|
||||
- The archive PDF shall contain the expected content
|
||||
- The generated thumbnail shall match the expected image hash
|
||||
- Gotenberg is called and the resulting file is returned and look as expected.
|
||||
"""
|
||||
|
||||
util_call_with_backoff(mail_parser.parse, [html_email_file, "message/rfc822"])
|
||||
|
||||
# Check the archive PDF
|
||||
@@ -264,7 +217,7 @@ class TestParserLive:
|
||||
archive_text = extract_text(archive_path)
|
||||
expected_archive_text = extract_text(html_email_pdf_file)
|
||||
|
||||
# Archive includes the HTML content
|
||||
# Archive includes the HTML content, so use in
|
||||
assert expected_archive_text in archive_text
|
||||
|
||||
# Check the thumbnail
|
||||
@@ -274,12 +227,9 @@ class TestParserLive:
|
||||
)
|
||||
generated_thumbnail_hash = self.imagehash(generated_thumbnail)
|
||||
|
||||
# The created PDF is not reproducible, but the converted image
|
||||
# should always look the same
|
||||
# The created pdf is not reproducible. But the converted image should always look the same.
|
||||
expected_hash = self.imagehash(html_email_thumbnail_file)
|
||||
|
||||
assert generated_thumbnail_hash == expected_hash, (
|
||||
f"PDF thumbnail differs from expected. "
|
||||
f"Generated: {generated_thumbnail}, "
|
||||
f"Hash: {generated_thumbnail_hash} vs {expected_hash}"
|
||||
f"PDF looks different. Check if {generated_thumbnail} looks weird."
|
||||
)
|
||||
|
||||
@@ -12,9 +12,6 @@ from paperless_tika.parsers import TikaDocumentParser
|
||||
reason="No Gotenberg/Tika servers to test with",
|
||||
)
|
||||
@pytest.mark.django_db()
|
||||
@pytest.mark.live
|
||||
@pytest.mark.gotenberg
|
||||
@pytest.mark.tika
|
||||
class TestTikaParserAgainstServer:
|
||||
"""
|
||||
This test case tests the Tika parsing against a live tika server,
|
||||
|
||||
@@ -128,8 +128,6 @@ class TestTikaParser:
|
||||
|
||||
request = httpx_mock.get_request()
|
||||
|
||||
assert request is not None
|
||||
|
||||
expected_field_name = "pdfa"
|
||||
|
||||
content_type = request.headers["Content-Type"]
|
||||
|
||||
Reference in New Issue
Block a user