Compare commits

..

1 Commits

Author SHA1 Message Date
Trenton Holmes
46345376b0 Updates to the Trixie base image instead of Bookworm 2025-10-09 10:33:36 -07:00
44 changed files with 944 additions and 3437 deletions

View File

@@ -49,12 +49,12 @@ repos:
- 'prettier-plugin-organize-imports@4.1.0' - 'prettier-plugin-organize-imports@4.1.0'
# Python hooks # Python hooks
- repo: https://github.com/astral-sh/ruff-pre-commit - repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.14.0 rev: v0.13.2
hooks: hooks:
- id: ruff-check - id: ruff-check
- id: ruff-format - id: ruff-format
- repo: https://github.com/tox-dev/pyproject-fmt - repo: https://github.com/tox-dev/pyproject-fmt
rev: "v2.11.0" rev: "v2.6.0"
hooks: hooks:
- id: pyproject-fmt - id: pyproject-fmt
# Dockerfile hooks # Dockerfile hooks
@@ -76,9 +76,7 @@ repos:
hooks: hooks:
- id: shellcheck - id: shellcheck
- repo: https://github.com/google/yamlfmt - repo: https://github.com/google/yamlfmt
rev: v0.18.0 rev: v0.17.2
hooks: hooks:
- id: yamlfmt - id: yamlfmt
exclude: "^src-ui/pnpm-lock.yaml" exclude: "^src-ui/pnpm-lock.yaml"
types:
- yaml

View File

@@ -5,7 +5,7 @@
# Purpose: Compiles the frontend # Purpose: Compiles the frontend
# Notes: # Notes:
# - Does PNPM stuff with Typescript and such # - Does PNPM stuff with Typescript and such
FROM --platform=$BUILDPLATFORM docker.io/node:20-bookworm-slim AS compile-frontend FROM --platform=$BUILDPLATFORM docker.io/node:20-trixie-slim AS compile-frontend
COPY ./src-ui /src/src-ui COPY ./src-ui /src/src-ui
@@ -32,7 +32,7 @@ RUN set -eux \
# Purpose: Installs s6-overlay and rootfs # Purpose: Installs s6-overlay and rootfs
# Comments: # Comments:
# - Don't leave anything extra in here either # - Don't leave anything extra in here either
FROM ghcr.io/astral-sh/uv:0.9.2-python3.12-bookworm-slim AS s6-overlay-base FROM ghcr.io/astral-sh/uv:0.8.22-python3.12-bookworm-slim AS s6-overlay-base
WORKDIR /usr/src/s6 WORKDIR /usr/src/s6
@@ -170,20 +170,8 @@ RUN set -eux \
&& apt-get update \ && apt-get update \
&& apt-get install --yes --quiet --no-install-recommends ${RUNTIME_PACKAGES} \ && apt-get install --yes --quiet --no-install-recommends ${RUNTIME_PACKAGES} \
&& echo "Installing pre-built updates" \ && echo "Installing pre-built updates" \
&& curl --fail --silent --no-progress-meter --show-error --location --remote-name-all --parallel --parallel-max 4 \ && curl --fail --silent --no-progress-meter --show-error --location --remote-name-all \
https://github.com/paperless-ngx/builder/releases/download/qpdf-${QPDF_VERSION}/libqpdf29_${QPDF_VERSION}-1_${TARGETARCH}.deb \ https://github.com/paperless-ngx/builder/releases/download/jbig2enc-v${JBIG2ENC_VERSION}/jbig2enc_${JBIG2ENC_VERSION}-1_${TARGETARCH}.deb \
https://github.com/paperless-ngx/builder/releases/download/qpdf-${QPDF_VERSION}/qpdf_${QPDF_VERSION}-1_${TARGETARCH}.deb \
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/libgs10_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/ghostscript_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/libgs10-common_${GS_VERSION}.dfsg-1_all.deb \
https://github.com/paperless-ngx/builder/releases/download/jbig2enc-${JBIG2ENC_VERSION}/jbig2enc_${JBIG2ENC_VERSION}-1_${TARGETARCH}.deb \
&& echo "Installing qpdf ${QPDF_VERSION}" \
&& dpkg --install ./libqpdf29_${QPDF_VERSION}-1_${TARGETARCH}.deb \
&& dpkg --install ./qpdf_${QPDF_VERSION}-1_${TARGETARCH}.deb \
&& echo "Installing Ghostscript ${GS_VERSION}" \
&& dpkg --install ./libgs10-common_${GS_VERSION}.dfsg-1_all.deb \
&& dpkg --install ./libgs10_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
&& dpkg --install ./ghostscript_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
&& echo "Installing jbig2enc" \ && echo "Installing jbig2enc" \
&& dpkg --install ./jbig2enc_${JBIG2ENC_VERSION}-1_${TARGETARCH}.deb \ && dpkg --install ./jbig2enc_${JBIG2ENC_VERSION}-1_${TARGETARCH}.deb \
&& echo "Configuring imagemagick" \ && echo "Configuring imagemagick" \

319
dev.txt Normal file
View File

@@ -0,0 +1,319 @@
adduser 3.134
apt 2.6.1
base-files 12.4+deb12u11
base-passwd 3.6.1
bash 5.2.15-2+b8
bsdutils 1:2.38.1-5+deb12u3
ca-certificates 20230311+deb12u1
coreutils 9.1-1
curl 7.88.1-10+deb12u12
dash 0.5.12-2
debconf 1.5.82
debian-archive-keyring 2023.3+deb12u2
debianutils 5.7-0.5~deb12u1
diffutils 1:3.8-4
dirmngr 2.2.40-1.1
dpkg 1.21.22
e2fsprogs 1.47.0-2
file 1:5.44-3
findutils 4.9.0-4
fontconfig 2.14.1-4
fontconfig-config 2.14.1-4
fonts-liberation 1:1.07.4-11
fonts-urw-base35 20200910-7
gcc-12-base 12.2.0-14+deb12u1
gettext 0.21-12
gettext-base 0.21-12
ghostscript 10.03.1~dfsg-1
gnupg 2.2.40-1.1
gnupg-l10n 2.2.40-1.1
gnupg-utils 2.2.40-1.1
gosu 1.14-1+b10
gpg 2.2.40-1.1
gpg-agent 2.2.40-1.1
gpg-wks-client 2.2.40-1.1
gpg-wks-server 2.2.40-1.1
gpgconf 2.2.40-1.1
gpgsm 2.2.40-1.1
gpgv 2.2.40-1.1
grep 3.8-5
gzip 1.12-1
hicolor-icon-theme 0.17-2
hostname 3.23+nmu1
icc-profiles-free 2.0.1+dfsg-1.1
imagemagick 8:6.9.11.60+dfsg-1.6+deb12u3
imagemagick-6-common 8:6.9.11.60+dfsg-1.6+deb12u3
imagemagick-6.q16 8:6.9.11.60+dfsg-1.6+deb12u3
init-system-helpers 1.65.2
jbig2dec 0.19-3
jbig2enc 0.30-1
libacl1 2.3.1-3
libaom3 3.6.0-1+deb12u1
libapt-pkg6.0 2.6.1
libarchive13 3.6.2-1+deb12u2
libassuan0 2.5.5-5
libattr1 1:2.5.1-4
libaudit-common 1:3.0.9-1
libaudit1 1:3.0.9-1
libavahi-client3 0.8-10+deb12u1
libavahi-common-data 0.8-10+deb12u1
libavahi-common3 0.8-10+deb12u1
libavcodec59 7:5.1.6-0+deb12u1
libavformat59 7:5.1.6-0+deb12u1
libavutil57 7:5.1.6-0+deb12u1
libblkid1 2.38.1-5+deb12u3
libbluray2 1:1.3.4-1
libbrotli1 1.0.9-2+b6
libbsd0 0.11.7-2
libbz2-1.0 1.0.8-5+b1
libc-bin 2.36-9+deb12u10
libc6 2.36-9+deb12u10
libcairo-gobject2 1.16.0-7
libcairo2 1.16.0-7
libcap-ng0 0.8.3-1+b3
libcap2 1:2.66-4+deb12u1
libchromaprint1 1.5.1-2+b1
libcjson1 1.7.15-1+deb12u2
libcodec2-1.0 1.0.5-1
libcom-err2 1.47.0-2
libconfig-inifiles-perl 3.000003-2
libcrypt1 1:4.4.33-2
libcups2 2.4.2-3+deb12u8
libcurl4 7.88.1-10+deb12u12
libdatrie1 0.2.13-2+b1
libdav1d6 1.0.0-2+deb12u1
libdb5.3 5.3.28+dfsg2-1
libdbus-1-3 1.14.10-1~deb12u1
libde265-0 1.0.11-1+deb12u2
libdebconfclient0 0.270
libdeflate0 1.14-1
libdrm-common 2.4.114-1
libdrm2 2.4.114-1+b1
libedit2 3.1-20221030-2
libexpat1 2.5.0-1+deb12u1
libext2fs2 1.47.0-2
libffi8 3.4.4-1
libfftw3-double3 3.3.10-1
libfontconfig1 2.14.1-4
libfontenc1 1:1.1.4-1
libfreetype6 2.12.1+dfsg-5+deb12u4
libfribidi0 1.0.8-2.1
libgcc-s1 12.2.0-14+deb12u1
libgcrypt20 1.10.1-3
libgdbm-compat4 1.23-3
libgdbm6 1.23-3
libgdk-pixbuf-2.0-0 2.42.10+dfsg-1+deb12u2
libgdk-pixbuf2.0-common 2.42.10+dfsg-1+deb12u2
libgif7 5.2.1-2.5
libglib2.0-0 2.74.6-2+deb12u6
libgme0 0.6.3-6
libgmp10 2:6.2.1+dfsg1-1.1
libgnutls30 3.7.9-2+deb12u5
libgomp1 12.2.0-14+deb12u1
libgpg-error0 1.46-1
libgraphite2-3 1.3.14-1
libgs-common 10.0.0~dfsg-11+deb12u7
libgs10 10.03.1~dfsg-1
libgs10-common 10.03.1~dfsg-1
libgsm1 1.0.22-1
libgssapi-krb5-2 1.20.1-2+deb12u3
libharfbuzz0b 6.0.0+dfsg-3
libheif1 1.15.1-1+deb12u1
libhogweed6 3.8.1-2
libhwy1 1.0.3-3+deb12u1
libice6 2:1.0.10-1
libicu72 72.1-3+deb12u1
libidn12 1.41-1
libidn2-0 2.3.3-1+b1
libijs-0.35 0.35-15
libimagequant0 2.17.0-1
libjbig0 2.1-6.1
libjbig2dec0 0.19-3
libjpeg62-turbo 1:2.1.5-2
libjxl0.7 0.7.0-10+deb12u1
libk5crypto3 1.20.1-2+deb12u3
libkeyutils1 1.6.3-2
libkrb5-3 1.20.1-2+deb12u3
libkrb5support0 1.20.1-2+deb12u3
libksba8 1.6.3-2
liblcms2-2 2.14-2
libldap-2.5-0 2.5.13+dfsg-5
liblept5 1.82.0-3+b3
liblerc4 4.0.0+ds-2
liblqr-1-0 0.4.2-2.1
libltdl7 2.4.7-7~deb12u1
liblz4-1 1.9.4-1
liblzma5 5.4.1-1
libmagic-mgc 1:5.44-3
libmagic1 1:5.44-3
libmagickcore-6.q16-6 8:6.9.11.60+dfsg-1.6+deb12u3
libmagickwand-6.q16-6 8:6.9.11.60+dfsg-1.6+deb12u3
libmariadb3 1:10.11.11-0+deb12u1
libmbedcrypto7 2.28.3-1
libmd0 1.0.4-2
libmfx1 22.5.4-1
libmount1 2.38.1-5+deb12u3
libmp3lame0 3.100-6
libmpg123-0 1.31.2-1+deb12u1
libncurses6 6.4-4
libncursesw6 6.4-4
libnettle8 3.8.1-2
libnghttp2-14 1.52.0-1+deb12u2
libnorm1 1.5.9+dfsg-2
libnpth0 1.6-3
libnsl2 1.3.0-2
libnspr4 2:4.35-1
libnss3 2:3.87.1-1+deb12u1
libnuma1 2.0.16-1
libogg0 1.3.5-3
libopenjp2-7 2.5.0-2+deb12u1
libopenmpt0 0.6.9-1
libopus0 1.3.1-3
libp11-kit0 0.24.1-2
libpam-modules 1.5.2-6+deb12u1
libpam-modules-bin 1.5.2-6+deb12u1
libpam-runtime 1.5.2-6+deb12u1
libpam0g 1.5.2-6+deb12u1
libpango-1.0-0 1.50.12+ds-1
libpangocairo-1.0-0 1.50.12+ds-1
libpangoft2-1.0-0 1.50.12+ds-1
libpaper1 1.1.29
libpcre2-8-0 10.42-1
libperl5.36 5.36.0-7+deb12u2
libpgm-5.3-0 5.3.128~dfsg-2
libpixman-1-0 0.42.2-1
libpng16-16 1.6.39-2
libpoppler126 22.12.0-2+deb12u1
libpq5 15.13-0+deb12u1
libpsl5 0.21.2-1
libqpdf29 11.9.0-1
librabbitmq4 0.11.0-1+deb12u1
librav1e0 0.5.1-6
libreadline8 8.2-1.3
librist4 0.2.7+dfsg-1
librsvg2-2 2.54.7+dfsg-1~deb12u1
librtmp1 2.4+20151223.gitfa8646d.1-2+b2
libsasl2-2 2.1.28+dfsg-10
libsasl2-modules-db 2.1.28+dfsg-10
libseccomp2 2.5.4-1+deb12u1
libselinux1 3.4-1+b6
libsemanage-common 3.4-1
libsemanage2 3.4-1+b5
libsepol2 3.4-2.1
libshine3 3.1.1-2
libsm6 2:1.2.3-1
libsmartcols1 2.38.1-5+deb12u3
libsnappy1v5 1.1.9-3
libsodium23 1.0.18-1
libsoxr0 0.1.3-4
libspeex1 1.2.1-2
libsqlite3-0 3.40.1-2+deb12u1
libsrt1.5-gnutls 1.5.1-1+deb12u1
libss2 1.47.0-2
libssh-gcrypt-4 0.10.6-0+deb12u1
libssh2-1 1.10.0-3+b1
libssl3 3.0.17-1~deb12u1
libstdc++6 12.2.0-14+deb12u1
libsvtav1enc1 1.4.1+dfsg-1
libswresample4 7:5.1.6-0+deb12u1
libsystemd0 252.38-1~deb12u1
libtasn1-6 4.19.0-2+deb12u1
libtesseract5 5.3.0-2
libthai-data 0.1.29-1
libthai0 0.1.29-1
libtheora0 1.1.1+dfsg.1-16.1+b1
libtiff6 4.5.0-6+deb12u2
libtinfo6 6.4-4
libtirpc-common 1.3.3+ds-1
libtirpc3 1.3.3+ds-1
libtwolame0 0.4.0-2
libudev1 252.38-1~deb12u1
libudfread0 1.1.2-1
libunistring2 1.0-2
libuuid1 2.38.1-5+deb12u3
libv4l-0 1.22.1-5+b2
libv4lconvert0 1.22.1-5+b2
libva-drm2 2.17.0-1
libva-x11-2 2.17.0-1
libva2 2.17.0-1
libvdpau1 1.5-2
libvorbis0a 1.3.7-1
libvorbisenc2 1.3.7-1
libvorbisfile3 1.3.7-1
libvpx7 1.12.0-1+deb12u4
libwebp7 1.2.4-0.2+deb12u1
libwebpdemux2 1.2.4-0.2+deb12u1
libwebpmux3 1.2.4-0.2+deb12u1
libx11-6 2:1.8.4-2+deb12u2
libx11-data 2:1.8.4-2+deb12u2
libx11-xcb1 2:1.8.4-2+deb12u2
libx264-164 2:0.164.3095+gitbaee400-3
libx265-199 3.5-2+b1
libxau6 1:1.0.9-1
libxcb-dri3-0 1.15-1
libxcb-render0 1.15-1
libxcb-shm0 1.15-1
libxcb1 1.15-1
libxdmcp6 1:1.1.2-3
libxext6 2:1.3.4-1+b1
libxfixes3 1:6.0.0-2
libxml2 2.9.14+dfsg-1.3~deb12u2
libxrender1 1:0.9.10-1.1
libxslt1.1 1.1.35-1+deb12u1
libxt6 1:1.2.1-1.1
libxvidcore4 2:1.3.7-1
libxxhash0 0.8.1-1
libzbar0 0.23.92-7+deb12u1
libzmq5 4.3.4-6
libzstd1 1.5.4+dfsg2-5
libzvbi-common 0.2.41-1
libzvbi0 0.2.41-1
login 1:4.13+dfsg1-1+deb12u1
logsave 1.47.0-2
mariadb-client 1:10.11.11-0+deb12u1
mariadb-client-core 1:10.11.11-0+deb12u1
mariadb-common 1:10.11.11-0+deb12u1
mawk 1.3.4.20200120-3.1
media-types 10.0.0
mount 2.38.1-5+deb12u3
mysql-common 5.8+1.1.0
ncurses-base 6.4-4
ncurses-bin 6.4-4
netbase 6.4
ocl-icd-libopencl1 2.3.1-1
openssl 3.0.17-1~deb12u1
passwd 1:4.13+dfsg1-1+deb12u1
perl 5.36.0-7+deb12u2
perl-base 5.36.0-7+deb12u2
perl-modules-5.36 5.36.0-7+deb12u2
pinentry-curses 1.2.1-1
pngquant 2.17.0-1
poppler-data 0.4.12-1
poppler-utils 22.12.0-2+deb12u1
postgresql-client 15+248
postgresql-client-15 15.13-0+deb12u1
postgresql-client-common 248
qpdf 11.9.0-1
readline-common 8.2-1.3
sed 4.9-1
sensible-utils 0.0.17+nmu1
shared-mime-info 2.2-1
sysvinit-utils 3.06-4
tar 1.34+dfsg-1.2+deb12u1
tesseract-ocr 5.3.0-2
tesseract-ocr-deu 1:4.1.0-2
tesseract-ocr-eng 1:4.1.0-2
tesseract-ocr-fra 1:4.1.0-2
tesseract-ocr-ita 1:4.1.0-2
tesseract-ocr-osd 1:4.1.0-2
tesseract-ocr-spa 1:4.1.0-2
tzdata 2025b-0+deb12u1
ucf 3.0043+nmu1+deb12u1
unpaper 7.0.0-0.1
usr-is-merged 37~deb12u1
util-linux 2.38.1-5+deb12u3
util-linux-extra 2.38.1-5+deb12u3
x11-common 1:7.7+23
xfonts-encodings 1:1.0.4-2.2
xfonts-utils 1:7.7+6
zlib1g 1:1.2.13.dfsg-1

View File

@@ -4,7 +4,7 @@
# correct networking for the tests # correct networking for the tests
services: services:
gotenberg: gotenberg:
image: docker.io/gotenberg/gotenberg:8.24 image: docker.io/gotenberg/gotenberg:8.23
hostname: gotenberg hostname: gotenberg
container_name: gotenberg container_name: gotenberg
network_mode: host network_mode: host

View File

@@ -72,7 +72,7 @@ services:
PAPERLESS_TIKA_GOTENBERG_ENDPOINT: http://gotenberg:3000 PAPERLESS_TIKA_GOTENBERG_ENDPOINT: http://gotenberg:3000
PAPERLESS_TIKA_ENDPOINT: http://tika:9998 PAPERLESS_TIKA_ENDPOINT: http://tika:9998
gotenberg: gotenberg:
image: docker.io/gotenberg/gotenberg:8.24 image: docker.io/gotenberg/gotenberg:8.23
restart: unless-stopped restart: unless-stopped
# The gotenberg chromium route is used to convert .eml files. We do not # The gotenberg chromium route is used to convert .eml files. We do not
# want to allow external content like tracking pixels or even javascript. # want to allow external content like tracking pixels or even javascript.

View File

@@ -35,7 +35,7 @@ services:
image: docker.io/library/postgres:18 image: docker.io/library/postgres:18
restart: unless-stopped restart: unless-stopped
volumes: volumes:
- pgdata:/var/lib/postgresql - pgdata:/var/lib/postgresql/data
environment: environment:
POSTGRES_DB: paperless POSTGRES_DB: paperless
POSTGRES_USER: paperless POSTGRES_USER: paperless

View File

@@ -38,7 +38,7 @@ services:
image: docker.io/library/postgres:18 image: docker.io/library/postgres:18
restart: unless-stopped restart: unless-stopped
volumes: volumes:
- pgdata:/var/lib/postgresql - pgdata:/var/lib/postgresql/data
environment: environment:
POSTGRES_DB: paperless POSTGRES_DB: paperless
POSTGRES_USER: paperless POSTGRES_USER: paperless
@@ -66,7 +66,7 @@ services:
PAPERLESS_TIKA_GOTENBERG_ENDPOINT: http://gotenberg:3000 PAPERLESS_TIKA_GOTENBERG_ENDPOINT: http://gotenberg:3000
PAPERLESS_TIKA_ENDPOINT: http://tika:9998 PAPERLESS_TIKA_ENDPOINT: http://tika:9998
gotenberg: gotenberg:
image: docker.io/gotenberg/gotenberg:8.24 image: docker.io/gotenberg/gotenberg:8.23
restart: unless-stopped restart: unless-stopped
# The gotenberg chromium route is used to convert .eml files. We do not # The gotenberg chromium route is used to convert .eml files. We do not
# want to allow external content like tracking pixels or even javascript. # want to allow external content like tracking pixels or even javascript.

View File

@@ -34,7 +34,7 @@ services:
image: docker.io/library/postgres:18 image: docker.io/library/postgres:18
restart: unless-stopped restart: unless-stopped
volumes: volumes:
- pgdata:/var/lib/postgresql - pgdata:/var/lib/postgresql/data
environment: environment:
POSTGRES_DB: paperless POSTGRES_DB: paperless
POSTGRES_USER: paperless POSTGRES_USER: paperless

View File

@@ -55,7 +55,7 @@ services:
PAPERLESS_TIKA_GOTENBERG_ENDPOINT: http://gotenberg:3000 PAPERLESS_TIKA_GOTENBERG_ENDPOINT: http://gotenberg:3000
PAPERLESS_TIKA_ENDPOINT: http://tika:9998 PAPERLESS_TIKA_ENDPOINT: http://tika:9998
gotenberg: gotenberg:
image: docker.io/gotenberg/gotenberg:8.24 image: docker.io/gotenberg/gotenberg:8.23
restart: unless-stopped restart: unless-stopped
# The gotenberg chromium route is used to convert .eml files. We do not # The gotenberg chromium route is used to convert .eml files. We do not
# want to allow external content like tracking pixels or even javascript. # want to allow external content like tracking pixels or even javascript.

View File

@@ -462,24 +462,15 @@ flowchart TD
Workflows allow you to filter by: Workflows allow you to filter by:
- Source, e.g. documents uploaded via consume folder, API (& the web UI) and mail fetch - Source, e.g. documents uploaded via consume folder, API (& the web UI) and mail fetch
- File name, including wildcards e.g. \*.pdf will apply to all pdfs. - File name, including wildcards e.g. \*.pdf will apply to all pdfs
- File path, including wildcards. Note that enabling `PAPERLESS_CONSUMER_RECURSIVE` would allow, for - File path, including wildcards. Note that enabling `PAPERLESS_CONSUMER_RECURSIVE` would allow, for
example, automatically assigning documents to different owners based on the upload directory. example, automatically assigning documents to different owners based on the upload directory.
- Mail rule. Choosing this option will force 'mail fetch' to be the workflow source. - Mail rule. Choosing this option will force 'mail fetch' to be the workflow source.
- Content matching (`Added`, `Updated` and `Scheduled` triggers only). Filter document content using the matching settings. - Content matching (`Added`, `Updated` and `Scheduled` triggers only). Filter document content using the matching settings.
- Tags (`Added`, `Updated` and `Scheduled` triggers only). Filter for documents with any of the specified tags
There are also 'advanced' filters available for `Added`, `Updated` and `Scheduled` triggers: - Document type (`Added`, `Updated` and `Scheduled` triggers only). Filter documents with this doc type
- Correspondent (`Added`, `Updated` and `Scheduled` triggers only). Filter documents with this correspondent
- Any Tags: Filter for documents with any of the specified tags. - Storage path (`Added`, `Updated` and `Scheduled` triggers only). Filter documents with this storage path
- All Tags: Filter for documents with all of the specified tags.
- No Tags: Filter for documents with none of the specified tags.
- Document type: Filter documents with this document type.
- Not Document types: Filter documents without any of these document types.
- Correspondent: Filter documents with this correspondent.
- Not Correspondents: Filter documents without any of these correspondents.
- Storage path: Filter documents with this storage path.
- Not Storage paths: Filter documents without any of these storage paths.
- Custom field query: Filter documents with a custom field query (the same as used for the document list filters).
### Workflow Actions ### Workflow Actions

View File

@@ -10,7 +10,6 @@ classifiers = [
"Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.13",
"Programming Language :: Python :: 3.14",
] ]
# TODO: Move certain things to groups and then utilize that further # TODO: Move certain things to groups and then utilize that further
# This will allow testing to not install a webserver, mysql, etc # This will allow testing to not install a webserver, mysql, etc
@@ -26,7 +25,7 @@ dependencies = [
# WARNING: django does not use semver. # WARNING: django does not use semver.
# Only patch versions are guaranteed to not introduce breaking changes. # Only patch versions are guaranteed to not introduce breaking changes.
"django~=5.2.5", "django~=5.2.5",
"django-allauth[mfa,socialaccount]~=65.4.0", "django-allauth[socialaccount,mfa]~=65.4.0",
"django-auditlog~=3.2.1", "django-auditlog~=3.2.1",
"django-cachalot~=2.8.0", "django-cachalot~=2.8.0",
"django-celery-results~=2.6.0", "django-celery-results~=2.6.0",
@@ -43,9 +42,9 @@ dependencies = [
"drf-spectacular~=0.28", "drf-spectacular~=0.28",
"drf-spectacular-sidecar~=2025.9.1", "drf-spectacular-sidecar~=2025.9.1",
"drf-writable-nested~=0.7.1", "drf-writable-nested~=0.7.1",
"filelock~=3.20.0", "filelock~=3.19.1",
"flower~=2.0.1", "flower~=2.0.1",
"gotenberg-client~=0.12.0", "gotenberg-client~=0.11.0",
"httpx-oauth~=0.16", "httpx-oauth~=0.16",
"imap-tools~=1.11.0", "imap-tools~=1.11.0",
"inotifyrecursive~=0.3", "inotifyrecursive~=0.3",
@@ -116,8 +115,8 @@ testing = [
lint = [ lint = [
"pre-commit~=4.3.0", "pre-commit~=4.3.0",
"pre-commit-uv~=4.2.0", "pre-commit-uv~=4.1.3",
"ruff~=0.14.0", "ruff~=0.13.0",
] ]
typing = [ typing = [
@@ -139,25 +138,6 @@ typing = [
"types-tqdm", "types-tqdm",
] ]
[tool.uv]
required-version = ">=0.5.14"
package = false
environments = [
"sys_platform == 'darwin'",
"sys_platform == 'linux'",
]
[tool.uv.sources]
# Markers are chosen to select these almost exclusively when building the Docker image
psycopg-c = [
{ url = "https://github.com/paperless-ngx/builder/releases/download/psycopg-3.2.9/psycopg_c-3.2.9-cp312-cp312-linux_x86_64.whl", marker = "sys_platform == 'linux' and platform_machine == 'x86_64' and python_version == '3.12'" },
{ url = "https://github.com/paperless-ngx/builder/releases/download/psycopg-3.2.9/psycopg_c-3.2.9-cp312-cp312-linux_aarch64.whl", marker = "sys_platform == 'linux' and platform_machine == 'aarch64' and python_version == '3.12'" },
]
zxing-cpp = [
{ url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_x86_64.whl", marker = "sys_platform == 'linux' and platform_machine == 'x86_64' and python_version == '3.12'" },
{ url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_aarch64.whl", marker = "sys_platform == 'linux' and platform_machine == 'aarch64' and python_version == '3.12'" },
]
[tool.ruff] [tool.ruff]
target-version = "py310" target-version = "py310"
line-length = 88 line-length = 88
@@ -304,5 +284,24 @@ disallow_untyped_defs = true
warn_redundant_casts = true warn_redundant_casts = true
warn_unused_ignores = true warn_unused_ignores = true
[tool.uv]
required-version = ">=0.5.14"
package = false
environments = [
"sys_platform == 'darwin'",
"sys_platform == 'linux'",
]
[tool.uv.sources]
# Markers are chosen to select these almost exclusively when building the Docker image
psycopg-c = [
{ url = "https://github.com/paperless-ngx/builder/releases/download/psycopg-3.2.9/psycopg_c-3.2.9-cp312-cp312-linux_x86_64.whl", marker = "sys_platform == 'linux' and platform_machine == 'x86_64' and python_version == '3.12'" },
{ url = "https://github.com/paperless-ngx/builder/releases/download/psycopg-3.2.9/psycopg_c-3.2.9-cp312-cp312-linux_aarch64.whl", marker = "sys_platform == 'linux' and platform_machine == 'aarch64' and python_version == '3.12'" },
]
zxing-cpp = [
{ url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_x86_64.whl", marker = "sys_platform == 'linux' and platform_machine == 'x86_64' and python_version == '3.12'" },
{ url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_aarch64.whl", marker = "sys_platform == 'linux' and platform_machine == 'aarch64' and python_version == '3.12'" },
]
[tool.django-stubs] [tool.django-stubs]
django_settings_module = "paperless.settings" django_settings_module = "paperless.settings"

File diff suppressed because it is too large Load Diff

View File

@@ -1,36 +1,28 @@
@if (useDropdown) { <div class="btn-group w-100" role="group" ngbDropdown #dropdown="ngbDropdown" (openChange)="onOpenChange($event)" [popperOptions]="popperOptions">
<div class="btn-group w-100" role="group" ngbDropdown #dropdown="ngbDropdown" (openChange)="onOpenChange($event)" [popperOptions]="popperOptions"> <button class="btn btn-sm btn-outline-primary" id="dropdown_toggle" ngbDropdownToggle [disabled]="disabled">
<button class="btn btn-sm btn-outline-primary" id="dropdown_toggle" ngbDropdownToggle [disabled]="disabled"> <i-bs name="{{icon}}"></i-bs>
<i-bs name="{{icon}}"></i-bs> <div class="d-none d-sm-inline">&nbsp;{{title}}</div>
<div class="d-none d-sm-inline">&nbsp;{{title}}</div> @if (isActive) {
@if (isActive) { <pngx-clearable-badge [selected]="isActive" (cleared)="reset()"></pngx-clearable-badge>
<pngx-clearable-badge [selected]="isActive" (cleared)="reset()"></pngx-clearable-badge> }
</button>
<div class="px-3 shadow" ngbDropdownMenu attr.aria-labelledby="dropdown_{{name}}">
<div class="list-group list-group-flush">
@for (element of selectionModel.queries; track element.id; let i = $index) {
<div class="list-group-item px-0 d-flex flex-nowrap">
@switch (element.type) {
@case (CustomFieldQueryComponentType.Atom) {
<ng-container *ngTemplateOutlet="queryAtom; context: { atom: element }"></ng-container>
}
@case (CustomFieldQueryComponentType.Expression) {
<ng-container *ngTemplateOutlet="queryExpression; context: { expression: element }"></ng-container>
}
}
</div>
} }
</button>
<div class="px-3 shadow" ngbDropdownMenu attr.aria-labelledby="dropdown_{{name}}">
<ng-container *ngTemplateOutlet="list; context: { queries: selectionModel.queries }"></ng-container>
</div> </div>
</div> </div>
} @else { </div>
<ng-container *ngTemplateOutlet="list; context: { queries: selectionModel.queries }"></ng-container>
}
<ng-template #list let-queries="queries">
<div class="list-group list-group-flush">
@for (element of queries; track element.id; let i = $index) {
<div class="list-group-item px-0 d-flex flex-nowrap">
@switch (element.type) {
@case (CustomFieldQueryComponentType.Atom) {
<ng-container *ngTemplateOutlet="queryAtom; context: { atom: element }"></ng-container>
}
@case (CustomFieldQueryComponentType.Expression) {
<ng-container *ngTemplateOutlet="queryExpression; context: { expression: element }"></ng-container>
}
}
</div>
}
</div>
</ng-template>
<ng-template #comparisonValueTemplate let-atom="atom"> <ng-template #comparisonValueTemplate let-atom="atom">
@if (getCustomFieldByID(atom.field)?.data_type === CustomFieldDataType.Date) { @if (getCustomFieldByID(atom.field)?.data_type === CustomFieldDataType.Date) {

View File

@@ -120,12 +120,6 @@ export class CustomFieldQueriesModel {
}) })
} }
addInitialAtom() {
this.addAtom(
new CustomFieldQueryAtom([null, CustomFieldQueryOperator.Exists, 'true'])
)
}
private findElement( private findElement(
queryElement: CustomFieldQueryElement, queryElement: CustomFieldQueryElement,
elements: any[] elements: any[]
@@ -212,9 +206,6 @@ export class CustomFieldsQueryDropdownComponent extends LoadingComponentWithPerm
@Input() @Input()
applyOnClose = false applyOnClose = false
@Input()
useDropdown: boolean = true
get name(): string { get name(): string {
return this.title ? this.title.replace(/\s/g, '_').toLowerCase() : null return this.title ? this.title.replace(/\s/g, '_').toLowerCase() : null
} }
@@ -267,7 +258,13 @@ export class CustomFieldsQueryDropdownComponent extends LoadingComponentWithPerm
public onOpenChange(open: boolean) { public onOpenChange(open: boolean) {
if (open) { if (open) {
if (this.selectionModel.queries.length === 0) { if (this.selectionModel.queries.length === 0) {
this.selectionModel.addInitialAtom() this.selectionModel.addAtom(
new CustomFieldQueryAtom([
null,
CustomFieldQueryOperator.Exists,
'true',
])
)
} }
if ( if (
this.selectionModel.queries.length === 1 && this.selectionModel.queries.length === 1 &&

View File

@@ -156,97 +156,31 @@
<p class="small" i18n>Trigger for documents that match <em>all</em> filters specified below.</p> <p class="small" i18n>Trigger for documents that match <em>all</em> filters specified below.</p>
<div class="row"> <div class="row">
<div class="col"> <div class="col">
<pngx-input-text i18n-title title="Filter filename" formControlName="filter_filename" horizontal="true" i18n-hint hint="Apply to documents that match this filename. Wildcards such as *.pdf or *invoice* are allowed. Case insensitive." [error]="error?.filter_filename"></pngx-input-text> <pngx-input-text i18n-title title="Filter filename" formControlName="filter_filename" i18n-hint hint="Apply to documents that match this filename. Wildcards such as *.pdf or *invoice* are allowed. Case insensitive." [error]="error?.filter_filename"></pngx-input-text>
@if (formGroup.get('type').value === WorkflowTriggerType.Consumption) { @if (formGroup.get('type').value === WorkflowTriggerType.Consumption) {
<pngx-input-select i18n-title title="Filter sources" [items]="sourceOptions" horizontal="true" [multiple]="true" formControlName="sources" [error]="error?.sources"></pngx-input-select> <pngx-input-select i18n-title title="Filter sources" [items]="sourceOptions" [multiple]="true" formControlName="sources" [error]="error?.sources"></pngx-input-select>
<pngx-input-text i18n-title title="Filter path" formControlName="filter_path" horizontal="true" i18n-hint hint="Apply to documents that match this path. Wildcards specified as * are allowed. Case-normalized.</a>" [error]="error?.filter_path"></pngx-input-text> <pngx-input-text i18n-title title="Filter path" formControlName="filter_path" i18n-hint hint="Apply to documents that match this path. Wildcards specified as * are allowed. Case-normalized.</a>" [error]="error?.filter_path"></pngx-input-text>
<pngx-input-select i18n-title title="Filter mail rule" [items]="mailRules" horizontal="true" [allowNull]="true" formControlName="filter_mailrule" i18n-hint hint="Apply to documents consumed via this mail rule." [error]="error?.filter_mailrule"></pngx-input-select> <pngx-input-select i18n-title title="Filter mail rule" [items]="mailRules" [allowNull]="true" formControlName="filter_mailrule" i18n-hint hint="Apply to documents consumed via this mail rule." [error]="error?.filter_mailrule"></pngx-input-select>
} }
@if (formGroup.get('type').value === WorkflowTriggerType.DocumentAdded || formGroup.get('type').value === WorkflowTriggerType.DocumentUpdated || formGroup.get('type').value === WorkflowTriggerType.Scheduled) { @if (formGroup.get('type').value === WorkflowTriggerType.DocumentAdded || formGroup.get('type').value === WorkflowTriggerType.DocumentUpdated || formGroup.get('type').value === WorkflowTriggerType.Scheduled) {
<pngx-input-select i18n-title title="Content matching algorithm" horizontal="true" [items]="getMatchingAlgorithms()" formControlName="matching_algorithm"></pngx-input-select> <pngx-input-select i18n-title title="Content matching algorithm" [items]="getMatchingAlgorithms()" formControlName="matching_algorithm"></pngx-input-select>
@if (matchingPatternRequired(formGroup)) { @if (patternRequired) {
<pngx-input-text i18n-title title="Content matching pattern" horizontal="true" formControlName="match" [error]="error?.match"></pngx-input-text> <pngx-input-text i18n-title title="Content matching pattern" formControlName="match" [error]="error?.match"></pngx-input-text>
} }
@if (matchingPatternRequired(formGroup)) { @if (patternRequired) {
<pngx-input-check i18n-title title="Case insensitive" horizontal="true" formControlName="is_insensitive"></pngx-input-check> <pngx-input-check i18n-title title="Case insensitive" formControlName="is_insensitive"></pngx-input-check>
} }
} }
</div> </div>
</div> @if (formGroup.get('type').value === WorkflowTriggerType.DocumentAdded || formGroup.get('type').value === WorkflowTriggerType.DocumentUpdated || formGroup.get('type').value === WorkflowTriggerType.Scheduled) {
@if (formGroup.get('type').value === WorkflowTriggerType.DocumentAdded || formGroup.get('type').value === WorkflowTriggerType.DocumentUpdated || formGroup.get('type').value === WorkflowTriggerType.Scheduled) { <div class="col-md-6">
<div class="row mt-3"> <pngx-input-tags [allowCreate]="false" i18n-title title="Has any of tags" formControlName="filter_has_tags"></pngx-input-tags>
<div class="col"> <pngx-input-select i18n-title title="Has correspondent" [items]="correspondents" [allowNull]="true" formControlName="filter_has_correspondent"></pngx-input-select>
<div class="trigger-filters mb-3"> <pngx-input-select i18n-title title="Has document type" [items]="documentTypes" [allowNull]="true" formControlName="filter_has_document_type"></pngx-input-select>
<div class="d-flex align-items-center"> <pngx-input-select i18n-title title="Has storage path" [items]="storagePaths" [allowNull]="true" formControlName="filter_has_storage_path"></pngx-input-select>
<label class="form-label mb-0" i18n>Advanced Filters</label>
<button
type="button"
class="btn btn-sm btn-outline-primary ms-auto"
(click)="addFilter(formGroup)"
[disabled]="!canAddFilter(formGroup)"
>
<i-bs name="plus-circle"></i-bs>&nbsp;<span i18n>Add filter</span>
</button>
</div>
<ul class="mt-2 list-group filters" formArrayName="filters">
@if (getFiltersFormArray(formGroup).length === 0) {
<p class="text-muted small" i18n>No advanced workflow filters defined.</p>
}
@for (filter of getFiltersFormArray(formGroup).controls; track filter; let filterIndex = $index) {
<li [formGroupName]="filterIndex" class="list-group-item">
<div class="d-flex align-items-center gap-2">
<div class="w-25">
<pngx-input-select
i18n-title
[items]="getFilterTypeOptions(formGroup, filterIndex)"
formControlName="type"
[allowNull]="false"
></pngx-input-select>
</div>
<div class="flex-grow-1">
@if (isTagsFilter(filter.get('type').value)) {
<pngx-input-tags
[allowCreate]="false"
[title]="null"
formControlName="values"
></pngx-input-tags>
} @else if (
isCustomFieldQueryFilter(filter.get('type').value)
) {
<pngx-custom-fields-query-dropdown
[selectionModel]="getCustomFieldQueryModel(filter)"
(selectionModelChange)="onCustomFieldQuerySelectionChange(filter, $event)"
[useDropdown]="false"
></pngx-custom-fields-query-dropdown>
@if (!isCustomFieldQueryValid(filter)) {
<div class="text-danger small" i18n>
Complete the custom field query configuration.
</div>
}
} @else {
<pngx-input-select
[items]="getFilterSelectItems(filter.get('type').value)"
[allowNull]="true"
[multiple]="isSelectMultiple(filter.get('type').value)"
formControlName="values"
></pngx-input-select>
}
</div>
<button
type="button"
class="btn btn-link text-danger p-0"
(click)="removeFilter(formGroup, filterIndex)"
>
<i-bs name="trash"></i-bs><span class="ms-1" i18n>Delete</span>
</button>
</div>
</li>
}
</ul>
</div>
</div> </div>
</div> }
} </div>
</div> </div>
</ng-template> </ng-template>

View File

@@ -7,7 +7,3 @@
.accordion-button { .accordion-button {
font-size: 1rem; font-size: 1rem;
} }
:host ::ng-deep .filters .paperless-input-select.mb-3 {
margin-bottom: 0 !important;
}

View File

@@ -11,14 +11,8 @@ import {
import { NgbActiveModal, NgbModule } from '@ng-bootstrap/ng-bootstrap' import { NgbActiveModal, NgbModule } from '@ng-bootstrap/ng-bootstrap'
import { NgSelectModule } from '@ng-select/ng-select' import { NgSelectModule } from '@ng-select/ng-select'
import { of } from 'rxjs' import { of } from 'rxjs'
import { CustomFieldQueriesModel } from 'src/app/components/common/custom-fields-query-dropdown/custom-fields-query-dropdown.component'
import { CustomFieldDataType } from 'src/app/data/custom-field' import { CustomFieldDataType } from 'src/app/data/custom-field'
import { CustomFieldQueryLogicalOperator } from 'src/app/data/custom-field-query' import { MATCHING_ALGORITHMS, MATCH_AUTO } from 'src/app/data/matching-model'
import {
MATCHING_ALGORITHMS,
MATCH_AUTO,
MATCH_NONE,
} from 'src/app/data/matching-model'
import { Workflow } from 'src/app/data/workflow' import { Workflow } from 'src/app/data/workflow'
import { import {
WorkflowAction, WorkflowAction,
@@ -37,7 +31,6 @@ import { DocumentTypeService } from 'src/app/services/rest/document-type.service
import { MailRuleService } from 'src/app/services/rest/mail-rule.service' import { MailRuleService } from 'src/app/services/rest/mail-rule.service'
import { StoragePathService } from 'src/app/services/rest/storage-path.service' import { StoragePathService } from 'src/app/services/rest/storage-path.service'
import { SettingsService } from 'src/app/services/settings.service' import { SettingsService } from 'src/app/services/settings.service'
import { CustomFieldQueryExpression } from 'src/app/utils/custom-field-query-element'
import { ConfirmButtonComponent } from '../../confirm-button/confirm-button.component' import { ConfirmButtonComponent } from '../../confirm-button/confirm-button.component'
import { NumberComponent } from '../../input/number/number.component' import { NumberComponent } from '../../input/number/number.component'
import { PermissionsGroupComponent } from '../../input/permissions/permissions-group/permissions-group.component' import { PermissionsGroupComponent } from '../../input/permissions/permissions-group/permissions-group.component'
@@ -50,7 +43,6 @@ import { EditDialogMode } from '../edit-dialog.component'
import { import {
DOCUMENT_SOURCE_OPTIONS, DOCUMENT_SOURCE_OPTIONS,
SCHEDULE_DATE_FIELD_OPTIONS, SCHEDULE_DATE_FIELD_OPTIONS,
TriggerFilterType,
WORKFLOW_ACTION_OPTIONS, WORKFLOW_ACTION_OPTIONS,
WORKFLOW_TYPE_OPTIONS, WORKFLOW_TYPE_OPTIONS,
WorkflowEditDialogComponent, WorkflowEditDialogComponent,
@@ -383,562 +375,6 @@ describe('WorkflowEditDialogComponent', () => {
expect(component.objectForm.get('actions').value[0].webhook).toBeNull() expect(component.objectForm.get('actions').value[0].webhook).toBeNull()
}) })
it('should require matching pattern when algorithm is not none', () => {
const triggerGroup = new FormGroup({
matching_algorithm: new FormControl(MATCH_AUTO),
match: new FormControl(''),
})
expect(component.matchingPatternRequired(triggerGroup)).toBe(true)
triggerGroup.get('matching_algorithm').setValue(MATCHING_ALGORITHMS[0].id)
expect(component.matchingPatternRequired(triggerGroup)).toBe(true)
triggerGroup.get('matching_algorithm').setValue(MATCH_NONE)
expect(component.matchingPatternRequired(triggerGroup)).toBe(false)
})
it('should map filter builder values into trigger filters on save', () => {
component.object = undefined
component.addTrigger()
const triggerGroup = component.triggerFields.at(0)
component.addFilter(triggerGroup as FormGroup)
component.addFilter(triggerGroup as FormGroup)
component.addFilter(triggerGroup as FormGroup)
const filters = component.getFiltersFormArray(triggerGroup as FormGroup)
expect(filters.length).toBe(3)
filters.at(0).get('values').setValue([1])
filters.at(1).get('values').setValue([2, 3])
filters.at(2).get('values').setValue([4])
const addFilterOfType = (type: TriggerFilterType) => {
const newFilter = component.addFilter(triggerGroup as FormGroup)
newFilter.get('type').setValue(type)
return newFilter
}
const correspondentIs = addFilterOfType(TriggerFilterType.CorrespondentIs)
correspondentIs.get('values').setValue(1)
const correspondentNot = addFilterOfType(TriggerFilterType.CorrespondentNot)
correspondentNot.get('values').setValue([1])
const documentTypeIs = addFilterOfType(TriggerFilterType.DocumentTypeIs)
documentTypeIs.get('values').setValue(1)
const documentTypeNot = addFilterOfType(TriggerFilterType.DocumentTypeNot)
documentTypeNot.get('values').setValue([1])
const storagePathIs = addFilterOfType(TriggerFilterType.StoragePathIs)
storagePathIs.get('values').setValue(1)
const storagePathNot = addFilterOfType(TriggerFilterType.StoragePathNot)
storagePathNot.get('values').setValue([1])
const customFieldFilter = addFilterOfType(
TriggerFilterType.CustomFieldQuery
)
const customFieldQuery = JSON.stringify(['AND', [[1, 'exact', 'test']]])
customFieldFilter.get('values').setValue(customFieldQuery)
const formValues = component['getFormValues']()
expect(formValues.triggers[0].filter_has_tags).toEqual([1])
expect(formValues.triggers[0].filter_has_all_tags).toEqual([2, 3])
expect(formValues.triggers[0].filter_has_not_tags).toEqual([4])
expect(formValues.triggers[0].filter_has_correspondent).toEqual(1)
expect(formValues.triggers[0].filter_has_not_correspondents).toEqual([1])
expect(formValues.triggers[0].filter_has_document_type).toEqual(1)
expect(formValues.triggers[0].filter_has_not_document_types).toEqual([1])
expect(formValues.triggers[0].filter_has_storage_path).toEqual(1)
expect(formValues.triggers[0].filter_has_not_storage_paths).toEqual([1])
expect(formValues.triggers[0].filter_custom_field_query).toEqual(
customFieldQuery
)
expect(formValues.triggers[0].filters).toBeUndefined()
})
it('should ignore empty and null filter values when mapping filters', () => {
component.object = undefined
component.addTrigger()
const triggerGroup = component.triggerFields.at(0) as FormGroup
const tagsFilter = component.addFilter(triggerGroup)
tagsFilter.get('type').setValue(TriggerFilterType.TagsAny)
tagsFilter.get('values').setValue([])
const correspondentFilter = component.addFilter(triggerGroup)
correspondentFilter.get('type').setValue(TriggerFilterType.CorrespondentIs)
correspondentFilter.get('values').setValue(null)
const formValues = component['getFormValues']()
expect(formValues.triggers[0].filter_has_tags).toEqual([])
expect(formValues.triggers[0].filter_has_correspondent).toBeNull()
})
it('should derive single select filters from array values', () => {
component.object = undefined
component.addTrigger()
const triggerGroup = component.triggerFields.at(0) as FormGroup
const addFilterOfType = (type: TriggerFilterType, value: any) => {
const filter = component.addFilter(triggerGroup)
filter.get('type').setValue(type)
filter.get('values').setValue(value)
}
addFilterOfType(TriggerFilterType.CorrespondentIs, [5])
addFilterOfType(TriggerFilterType.DocumentTypeIs, [6])
addFilterOfType(TriggerFilterType.StoragePathIs, [7])
const formValues = component['getFormValues']()
expect(formValues.triggers[0].filter_has_correspondent).toEqual(5)
expect(formValues.triggers[0].filter_has_document_type).toEqual(6)
expect(formValues.triggers[0].filter_has_storage_path).toEqual(7)
})
it('should convert multi-value filter values when aggregating filters', () => {
component.object = undefined
component.addTrigger()
const triggerGroup = component.triggerFields.at(0) as FormGroup
const setFilter = (type: TriggerFilterType, value: number): void => {
const filter = component.addFilter(triggerGroup) as FormGroup
filter.get('type').setValue(type)
filter.get('values').setValue(value)
}
setFilter(TriggerFilterType.TagsAll, 11)
setFilter(TriggerFilterType.TagsNone, 12)
setFilter(TriggerFilterType.CorrespondentNot, 13)
setFilter(TriggerFilterType.DocumentTypeNot, 14)
setFilter(TriggerFilterType.StoragePathNot, 15)
const formValues = component['getFormValues']()
expect(formValues.triggers[0].filter_has_all_tags).toEqual([11])
expect(formValues.triggers[0].filter_has_not_tags).toEqual([12])
expect(formValues.triggers[0].filter_has_not_correspondents).toEqual([13])
expect(formValues.triggers[0].filter_has_not_document_types).toEqual([14])
expect(formValues.triggers[0].filter_has_not_storage_paths).toEqual([15])
})
it('should reuse filter type options and update disabled state', () => {
component.object = undefined
component.addTrigger()
const triggerGroup = component.triggerFields.at(0) as FormGroup
component.addFilter(triggerGroup)
const optionsFirst = component.getFilterTypeOptions(triggerGroup, 0)
const optionsSecond = component.getFilterTypeOptions(triggerGroup, 0)
expect(optionsFirst).toBe(optionsSecond)
// to force disabled flag
component.addFilter(triggerGroup)
const filterArray = component.getFiltersFormArray(triggerGroup)
const firstFilter = filterArray.at(0)
firstFilter.get('type').setValue(TriggerFilterType.CorrespondentIs)
component.addFilter(triggerGroup)
const updatedFilters = component.getFiltersFormArray(triggerGroup)
const secondFilter = updatedFilters.at(1)
const options = component.getFilterTypeOptions(triggerGroup, 1)
const correspondentIsOption = options.find(
(option) => option.id === TriggerFilterType.CorrespondentIs
)
expect(correspondentIsOption.disabled).toBe(true)
firstFilter.get('type').setValue(TriggerFilterType.DocumentTypeNot)
secondFilter.get('type').setValue(TriggerFilterType.TagsAll)
const postChangeOptions = component.getFilterTypeOptions(triggerGroup, 1)
const correspondentOptionAfter = postChangeOptions.find(
(option) => option.id === TriggerFilterType.CorrespondentIs
)
expect(correspondentOptionAfter.disabled).toBe(false)
})
it('should keep multi-entry filter options enabled and allow duplicates', () => {
component.object = undefined
component.addTrigger()
const triggerGroup = component.triggerFields.at(0) as FormGroup
component.filterDefinitions = [
{
id: TriggerFilterType.TagsAny,
name: 'Any tags',
inputType: 'tags',
allowMultipleEntries: true,
allowMultipleValues: true,
} as any,
{
id: TriggerFilterType.CorrespondentIs,
name: 'Correspondent is',
inputType: 'select',
allowMultipleEntries: false,
allowMultipleValues: false,
selectItems: 'correspondents',
} as any,
]
const firstFilter = component.addFilter(triggerGroup)
firstFilter.get('type').setValue(TriggerFilterType.TagsAny)
const secondFilter = component.addFilter(triggerGroup)
expect(secondFilter).not.toBeNull()
const options = component.getFilterTypeOptions(triggerGroup, 1)
const multiEntryOption = options.find(
(option) => option.id === TriggerFilterType.TagsAny
)
expect(multiEntryOption.disabled).toBe(false)
expect(component.canAddFilter(triggerGroup)).toBe(true)
})
it('should return null when no filter definitions remain available', () => {
component.object = undefined
component.addTrigger()
const triggerGroup = component.triggerFields.at(0) as FormGroup
component.filterDefinitions = [
{
id: TriggerFilterType.TagsAny,
name: 'Any tags',
inputType: 'tags',
allowMultipleEntries: false,
allowMultipleValues: true,
} as any,
{
id: TriggerFilterType.CorrespondentIs,
name: 'Correspondent is',
inputType: 'select',
allowMultipleEntries: false,
allowMultipleValues: false,
selectItems: 'correspondents',
} as any,
]
const firstFilter = component.addFilter(triggerGroup)
firstFilter.get('type').setValue(TriggerFilterType.TagsAny)
const secondFilter = component.addFilter(triggerGroup)
secondFilter.get('type').setValue(TriggerFilterType.CorrespondentIs)
expect(component.canAddFilter(triggerGroup)).toBe(false)
expect(component.addFilter(triggerGroup)).toBeNull()
})
it('should skip filter definitions without handlers when building form array', () => {
const originalDefinitions = component.filterDefinitions
component.filterDefinitions = [
{
id: 999,
name: 'Unsupported',
inputType: 'text',
allowMultipleEntries: false,
allowMultipleValues: false,
} as any,
]
const trigger = {
filter_has_tags: [],
filter_has_all_tags: [],
filter_has_not_tags: [],
filter_has_not_correspondents: [],
filter_has_not_document_types: [],
filter_has_not_storage_paths: [],
filter_has_correspondent: null,
filter_has_document_type: null,
filter_has_storage_path: null,
filter_custom_field_query: null,
} as any
const filters = component['buildFiltersFormArray'](trigger)
expect(filters.length).toBe(0)
component.filterDefinitions = originalDefinitions
})
it('should return null when adding filter for unknown trigger form group', () => {
expect(component.addFilter(new FormGroup({}) as any)).toBeNull()
})
it('should ignore remove filter calls for unknown trigger form group', () => {
expect(() =>
component.removeFilter(new FormGroup({}) as any, 0)
).not.toThrow()
})
it('should teardown custom field query model when removing a custom field filter', () => {
component.object = undefined
component.addTrigger()
const triggerGroup = component.triggerFields.at(0) as FormGroup
component.addFilter(triggerGroup)
const filters = component.getFiltersFormArray(triggerGroup)
const filterGroup = filters.at(0) as FormGroup
filterGroup.get('type').setValue(TriggerFilterType.CustomFieldQuery)
const model = component.getCustomFieldQueryModel(filterGroup)
expect(model).toBeDefined()
expect(
component['getStoredCustomFieldQueryModel'](filterGroup as any)
).toBe(model)
component.removeFilter(triggerGroup, 0)
expect(
component['getStoredCustomFieldQueryModel'](filterGroup as any)
).toBeNull()
})
it('should return readable filter names', () => {
expect(component.getFilterName(TriggerFilterType.TagsAny)).toBe(
'Has any of these tags'
)
expect(component.getFilterName(999 as any)).toBe('')
})
it('should build filter form array from existing trigger filters', () => {
const trigger = workflow.triggers[0]
trigger.filter_has_tags = [1]
trigger.filter_has_all_tags = [2, 3]
trigger.filter_has_not_tags = [4]
trigger.filter_has_correspondent = 5 as any
trigger.filter_has_not_correspondents = [6] as any
trigger.filter_has_document_type = 7 as any
trigger.filter_has_not_document_types = [8] as any
trigger.filter_has_storage_path = 9 as any
trigger.filter_has_not_storage_paths = [10] as any
trigger.filter_custom_field_query = JSON.stringify([
'AND',
[[1, 'exact', 'value']],
]) as any
component.object = workflow
component.ngOnInit()
const triggerGroup = component.triggerFields.at(0) as FormGroup
const filters = component.getFiltersFormArray(triggerGroup)
expect(filters.length).toBe(10)
const customFieldFilter = filters.at(9) as FormGroup
expect(customFieldFilter.get('type').value).toBe(
TriggerFilterType.CustomFieldQuery
)
const model = component.getCustomFieldQueryModel(customFieldFilter)
expect(model.isValid()).toBe(true)
})
it('should expose select metadata helpers', () => {
expect(component.isSelectMultiple(TriggerFilterType.CorrespondentNot)).toBe(
true
)
expect(component.isSelectMultiple(TriggerFilterType.CorrespondentIs)).toBe(
false
)
component.correspondents = [{ id: 1, name: 'C1' } as any]
component.documentTypes = [{ id: 2, name: 'DT' } as any]
component.storagePaths = [{ id: 3, name: 'SP' } as any]
expect(
component.getFilterSelectItems(TriggerFilterType.CorrespondentIs)
).toEqual(component.correspondents)
expect(
component.getFilterSelectItems(TriggerFilterType.DocumentTypeIs)
).toEqual(component.documentTypes)
expect(
component.getFilterSelectItems(TriggerFilterType.StoragePathIs)
).toEqual(component.storagePaths)
expect(component.getFilterSelectItems(TriggerFilterType.TagsAll)).toEqual(
[]
)
expect(
component.isCustomFieldQueryFilter(TriggerFilterType.CustomFieldQuery)
).toBe(true)
})
it('should return empty select items when definition is missing', () => {
const originalDefinitions = component.filterDefinitions
component.filterDefinitions = []
expect(
component.getFilterSelectItems(TriggerFilterType.CorrespondentIs)
).toEqual([])
component.filterDefinitions = originalDefinitions
})
it('should return empty select items when definition has unknown source', () => {
const originalDefinitions = component.filterDefinitions
component.filterDefinitions = [
{
id: TriggerFilterType.CorrespondentIs,
name: 'Correspondent is',
inputType: 'select',
allowMultipleEntries: false,
allowMultipleValues: false,
selectItems: 'unknown',
} as any,
]
expect(
component.getFilterSelectItems(TriggerFilterType.CorrespondentIs)
).toEqual([])
component.filterDefinitions = originalDefinitions
})
it('should handle custom field query selection change and validation states', () => {
const formGroup = new FormGroup({
values: new FormControl(null),
})
const model = new CustomFieldQueriesModel()
const changeSpy = jest.spyOn(
component as any,
'onCustomFieldQueryModelChanged'
)
component.onCustomFieldQuerySelectionChange(formGroup, model)
expect(changeSpy).toHaveBeenCalledWith(formGroup, model)
expect(component.isCustomFieldQueryValid(formGroup)).toBe(true)
component['setCustomFieldQueryModel'](formGroup as any, model as any)
const validSpy = jest.spyOn(model, 'isValid').mockReturnValue(false)
const emptySpy = jest.spyOn(model, 'isEmpty').mockReturnValue(false)
expect(component.isCustomFieldQueryValid(formGroup)).toBe(false)
expect(validSpy).toHaveBeenCalled()
validSpy.mockReturnValue(true)
emptySpy.mockReturnValue(true)
expect(component.isCustomFieldQueryValid(formGroup)).toBe(true)
emptySpy.mockReturnValue(false)
expect(component.isCustomFieldQueryValid(formGroup)).toBe(true)
component['clearCustomFieldQueryModel'](formGroup as any)
})
it('should recover from invalid custom field query json and update control on changes', () => {
const filterGroup = new FormGroup({
values: new FormControl('not-json'),
})
component['ensureCustomFieldQueryModel'](filterGroup, 'not-json')
const model = component['getStoredCustomFieldQueryModel'](
filterGroup as any
)
expect(model).toBeDefined()
expect(model.queries.length).toBeGreaterThan(0)
const valuesControl = filterGroup.get('values')
expect(valuesControl.value).toBeNull()
const expression = new CustomFieldQueryExpression([
CustomFieldQueryLogicalOperator.And,
[[1, 'exact', 'value']],
])
model.queries = [expression]
jest.spyOn(model, 'isValid').mockReturnValue(true)
jest.spyOn(model, 'isEmpty').mockReturnValue(false)
model.changed.next(model)
expect(valuesControl.value).toEqual(JSON.stringify(expression.serialize()))
component['clearCustomFieldQueryModel'](filterGroup as any)
})
it('should handle custom field query model change edge cases', () => {
const groupWithoutControl = new FormGroup({})
const dummyModel = {
isValid: jest.fn().mockReturnValue(true),
isEmpty: jest.fn().mockReturnValue(false),
}
expect(() =>
component['onCustomFieldQueryModelChanged'](
groupWithoutControl as any,
dummyModel as any
)
).not.toThrow()
const groupWithControl = new FormGroup({
values: new FormControl('initial'),
})
const emptyModel = {
isValid: jest.fn().mockReturnValue(true),
isEmpty: jest.fn().mockReturnValue(true),
}
component['onCustomFieldQueryModelChanged'](
groupWithControl as any,
emptyModel as any
)
expect(groupWithControl.get('values').value).toBeNull()
})
it('should normalize filter values for single and multi selects', () => {
expect(
component['normalizeFilterValue'](TriggerFilterType.TagsAny)
).toEqual([])
expect(
component['normalizeFilterValue'](TriggerFilterType.TagsAny, 5)
).toEqual([5])
expect(
component['normalizeFilterValue'](TriggerFilterType.TagsAny, [5, 6])
).toEqual([5, 6])
expect(
component['normalizeFilterValue'](TriggerFilterType.CorrespondentIs, [7])
).toEqual(7)
expect(
component['normalizeFilterValue'](TriggerFilterType.CorrespondentIs, 8)
).toEqual(8)
const customFieldJson = JSON.stringify(['AND', [[1, 'exact', 'test']]])
expect(
component['normalizeFilterValue'](
TriggerFilterType.CustomFieldQuery,
customFieldJson
)
).toEqual(customFieldJson)
const customFieldObject = ['AND', [[1, 'exact', 'other']]]
expect(
component['normalizeFilterValue'](
TriggerFilterType.CustomFieldQuery,
customFieldObject
)
).toEqual(JSON.stringify(customFieldObject))
expect(
component['normalizeFilterValue'](
TriggerFilterType.CustomFieldQuery,
false
)
).toBeNull()
})
it('should add and remove filter form groups', () => {
component['changeDetector'] = { detectChanges: jest.fn() } as any
component.object = undefined
component.addTrigger()
const triggerGroup = component.triggerFields.at(0) as FormGroup
component.addFilter(triggerGroup)
component.removeFilter(triggerGroup, 0)
expect(component.getFiltersFormArray(triggerGroup).length).toBe(0)
component.addFilter(triggerGroup)
const filterArrayAfterAdd = component.getFiltersFormArray(triggerGroup)
filterArrayAfterAdd.at(0).get('type').setValue(TriggerFilterType.TagsAll)
expect(component.getFiltersFormArray(triggerGroup).length).toBe(1)
})
it('should remove selected custom field from the form group', () => { it('should remove selected custom field from the form group', () => {
const formGroup = new FormGroup({ const formGroup = new FormGroup({
assign_custom_fields: new FormControl([1, 2, 3]), assign_custom_fields: new FormControl([1, 2, 3]),

View File

@@ -6,7 +6,6 @@ import {
import { NgTemplateOutlet } from '@angular/common' import { NgTemplateOutlet } from '@angular/common'
import { Component, OnInit, inject } from '@angular/core' import { Component, OnInit, inject } from '@angular/core'
import { import {
AbstractControl,
FormArray, FormArray,
FormControl, FormControl,
FormGroup, FormGroup,
@@ -15,7 +14,7 @@ import {
} from '@angular/forms' } from '@angular/forms'
import { NgbAccordionModule } from '@ng-bootstrap/ng-bootstrap' import { NgbAccordionModule } from '@ng-bootstrap/ng-bootstrap'
import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons' import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
import { Subscription, first, takeUntil } from 'rxjs' import { first } from 'rxjs'
import { Correspondent } from 'src/app/data/correspondent' import { Correspondent } from 'src/app/data/correspondent'
import { CustomField, CustomFieldDataType } from 'src/app/data/custom-field' import { CustomField, CustomFieldDataType } from 'src/app/data/custom-field'
import { DocumentType } from 'src/app/data/document-type' import { DocumentType } from 'src/app/data/document-type'
@@ -46,12 +45,7 @@ import { StoragePathService } from 'src/app/services/rest/storage-path.service'
import { UserService } from 'src/app/services/rest/user.service' import { UserService } from 'src/app/services/rest/user.service'
import { WorkflowService } from 'src/app/services/rest/workflow.service' import { WorkflowService } from 'src/app/services/rest/workflow.service'
import { SettingsService } from 'src/app/services/settings.service' import { SettingsService } from 'src/app/services/settings.service'
import { CustomFieldQueryExpression } from 'src/app/utils/custom-field-query-element'
import { ConfirmButtonComponent } from '../../confirm-button/confirm-button.component' import { ConfirmButtonComponent } from '../../confirm-button/confirm-button.component'
import {
CustomFieldQueriesModel,
CustomFieldsQueryDropdownComponent,
} from '../../custom-fields-query-dropdown/custom-fields-query-dropdown.component'
import { CheckComponent } from '../../input/check/check.component' import { CheckComponent } from '../../input/check/check.component'
import { CustomFieldsValuesComponent } from '../../input/custom-fields-values/custom-fields-values.component' import { CustomFieldsValuesComponent } from '../../input/custom-fields-values/custom-fields-values.component'
import { EntriesComponent } from '../../input/entries/entries.component' import { EntriesComponent } from '../../input/entries/entries.component'
@@ -141,235 +135,10 @@ export const WORKFLOW_ACTION_OPTIONS = [
}, },
] ]
export enum TriggerFilterType {
TagsAny = 'tags_any',
TagsAll = 'tags_all',
TagsNone = 'tags_none',
CorrespondentIs = 'correspondent_is',
CorrespondentNot = 'correspondent_not',
DocumentTypeIs = 'document_type_is',
DocumentTypeNot = 'document_type_not',
StoragePathIs = 'storage_path_is',
StoragePathNot = 'storage_path_not',
CustomFieldQuery = 'custom_field_query',
}
interface TriggerFilterDefinition {
id: TriggerFilterType
name: string
inputType: 'tags' | 'select' | 'customFieldQuery'
allowMultipleEntries: boolean
allowMultipleValues: boolean
selectItems?: 'correspondents' | 'documentTypes' | 'storagePaths'
disabled?: boolean
}
type TriggerFilterOption = TriggerFilterDefinition & {
disabled?: boolean
}
type TriggerFilterAggregate = {
filter_has_tags: number[]
filter_has_all_tags: number[]
filter_has_not_tags: number[]
filter_has_not_correspondents: number[]
filter_has_not_document_types: number[]
filter_has_not_storage_paths: number[]
filter_has_correspondent: number | null
filter_has_document_type: number | null
filter_has_storage_path: number | null
filter_custom_field_query: string | null
}
interface FilterHandler {
apply: (aggregate: TriggerFilterAggregate, values: any) => void
extract: (trigger: WorkflowTrigger) => any
hasValue: (value: any) => boolean
}
const CUSTOM_FIELD_QUERY_MODEL_KEY = Symbol('customFieldQueryModel')
const CUSTOM_FIELD_QUERY_SUBSCRIPTION_KEY = Symbol(
'customFieldQuerySubscription'
)
type CustomFieldFilterGroup = FormGroup & {
[CUSTOM_FIELD_QUERY_MODEL_KEY]?: CustomFieldQueriesModel
[CUSTOM_FIELD_QUERY_SUBSCRIPTION_KEY]?: Subscription
}
const TRIGGER_FILTER_DEFINITIONS: TriggerFilterDefinition[] = [
{
id: TriggerFilterType.TagsAny,
name: $localize`Has any of these tags`,
inputType: 'tags',
allowMultipleEntries: false,
allowMultipleValues: true,
},
{
id: TriggerFilterType.TagsAll,
name: $localize`Has all of these tags`,
inputType: 'tags',
allowMultipleEntries: false,
allowMultipleValues: true,
},
{
id: TriggerFilterType.TagsNone,
name: $localize`Does not have these tags`,
inputType: 'tags',
allowMultipleEntries: false,
allowMultipleValues: true,
},
{
id: TriggerFilterType.CorrespondentIs,
name: $localize`Has correspondent`,
inputType: 'select',
allowMultipleEntries: false,
allowMultipleValues: false,
selectItems: 'correspondents',
},
{
id: TriggerFilterType.CorrespondentNot,
name: $localize`Does not have correspondents`,
inputType: 'select',
allowMultipleEntries: false,
allowMultipleValues: true,
selectItems: 'correspondents',
},
{
id: TriggerFilterType.DocumentTypeIs,
name: $localize`Has document type`,
inputType: 'select',
allowMultipleEntries: false,
allowMultipleValues: false,
selectItems: 'documentTypes',
},
{
id: TriggerFilterType.DocumentTypeNot,
name: $localize`Does not have document types`,
inputType: 'select',
allowMultipleEntries: false,
allowMultipleValues: true,
selectItems: 'documentTypes',
},
{
id: TriggerFilterType.StoragePathIs,
name: $localize`Has storage path`,
inputType: 'select',
allowMultipleEntries: false,
allowMultipleValues: false,
selectItems: 'storagePaths',
},
{
id: TriggerFilterType.StoragePathNot,
name: $localize`Does not have storage paths`,
inputType: 'select',
allowMultipleEntries: false,
allowMultipleValues: true,
selectItems: 'storagePaths',
},
{
id: TriggerFilterType.CustomFieldQuery,
name: $localize`Matches custom field query`,
inputType: 'customFieldQuery',
allowMultipleEntries: false,
allowMultipleValues: false,
},
]
const TRIGGER_MATCHING_ALGORITHMS = MATCHING_ALGORITHMS.filter( const TRIGGER_MATCHING_ALGORITHMS = MATCHING_ALGORITHMS.filter(
(a) => a.id !== MATCH_AUTO (a) => a.id !== MATCH_AUTO
) )
const FILTER_HANDLERS: Record<TriggerFilterType, FilterHandler> = {
[TriggerFilterType.TagsAny]: {
apply: (aggregate, values) => {
aggregate.filter_has_tags = Array.isArray(values) ? [...values] : [values]
},
extract: (trigger) => trigger.filter_has_tags,
hasValue: (value) => Array.isArray(value) && value.length > 0,
},
[TriggerFilterType.TagsAll]: {
apply: (aggregate, values) => {
aggregate.filter_has_all_tags = Array.isArray(values)
? [...values]
: [values]
},
extract: (trigger) => trigger.filter_has_all_tags,
hasValue: (value) => Array.isArray(value) && value.length > 0,
},
[TriggerFilterType.TagsNone]: {
apply: (aggregate, values) => {
aggregate.filter_has_not_tags = Array.isArray(values)
? [...values]
: [values]
},
extract: (trigger) => trigger.filter_has_not_tags,
hasValue: (value) => Array.isArray(value) && value.length > 0,
},
[TriggerFilterType.CorrespondentIs]: {
apply: (aggregate, values) => {
aggregate.filter_has_correspondent = Array.isArray(values)
? (values[0] ?? null)
: values
},
extract: (trigger) => trigger.filter_has_correspondent,
hasValue: (value) => value !== null && value !== undefined,
},
[TriggerFilterType.CorrespondentNot]: {
apply: (aggregate, values) => {
aggregate.filter_has_not_correspondents = Array.isArray(values)
? [...values]
: [values]
},
extract: (trigger) => trigger.filter_has_not_correspondents,
hasValue: (value) => Array.isArray(value) && value.length > 0,
},
[TriggerFilterType.DocumentTypeIs]: {
apply: (aggregate, values) => {
aggregate.filter_has_document_type = Array.isArray(values)
? (values[0] ?? null)
: values
},
extract: (trigger) => trigger.filter_has_document_type,
hasValue: (value) => value !== null && value !== undefined,
},
[TriggerFilterType.DocumentTypeNot]: {
apply: (aggregate, values) => {
aggregate.filter_has_not_document_types = Array.isArray(values)
? [...values]
: [values]
},
extract: (trigger) => trigger.filter_has_not_document_types,
hasValue: (value) => Array.isArray(value) && value.length > 0,
},
[TriggerFilterType.StoragePathIs]: {
apply: (aggregate, values) => {
aggregate.filter_has_storage_path = Array.isArray(values)
? (values[0] ?? null)
: values
},
extract: (trigger) => trigger.filter_has_storage_path,
hasValue: (value) => value !== null && value !== undefined,
},
[TriggerFilterType.StoragePathNot]: {
apply: (aggregate, values) => {
aggregate.filter_has_not_storage_paths = Array.isArray(values)
? [...values]
: [values]
},
extract: (trigger) => trigger.filter_has_not_storage_paths,
hasValue: (value) => Array.isArray(value) && value.length > 0,
},
[TriggerFilterType.CustomFieldQuery]: {
apply: (aggregate, values) => {
aggregate.filter_custom_field_query = values as string
},
extract: (trigger) => trigger.filter_custom_field_query,
hasValue: (value) =>
typeof value === 'string' && value !== null && value.trim().length > 0,
},
}
@Component({ @Component({
selector: 'pngx-workflow-edit-dialog', selector: 'pngx-workflow-edit-dialog',
templateUrl: './workflow-edit-dialog.component.html', templateUrl: './workflow-edit-dialog.component.html',
@@ -384,7 +153,6 @@ const FILTER_HANDLERS: Record<TriggerFilterType, FilterHandler> = {
TextAreaComponent, TextAreaComponent,
TagsComponent, TagsComponent,
CustomFieldsValuesComponent, CustomFieldsValuesComponent,
CustomFieldsQueryDropdownComponent,
PermissionsGroupComponent, PermissionsGroupComponent,
PermissionsUserComponent, PermissionsUserComponent,
ConfirmButtonComponent, ConfirmButtonComponent,
@@ -402,8 +170,6 @@ export class WorkflowEditDialogComponent
{ {
public WorkflowTriggerType = WorkflowTriggerType public WorkflowTriggerType = WorkflowTriggerType
public WorkflowActionType = WorkflowActionType public WorkflowActionType = WorkflowActionType
public TriggerFilterType = TriggerFilterType
public filterDefinitions = TRIGGER_FILTER_DEFINITIONS
private correspondentService: CorrespondentService private correspondentService: CorrespondentService
private documentTypeService: DocumentTypeService private documentTypeService: DocumentTypeService
@@ -423,11 +189,6 @@ export class WorkflowEditDialogComponent
private allowedActionTypes = [] private allowedActionTypes = []
private readonly triggerFilterOptionsMap = new WeakMap<
FormArray,
TriggerFilterOption[]
>()
constructor() { constructor() {
super() super()
this.service = inject(WorkflowService) this.service = inject(WorkflowService)
@@ -629,416 +390,6 @@ export class WorkflowEditDialogComponent
return this.objectForm.get('actions') as FormArray return this.objectForm.get('actions') as FormArray
} }
protected override getFormValues(): any {
const formValues = super.getFormValues()
if (formValues?.triggers?.length) {
formValues.triggers = formValues.triggers.map(
(trigger: any, index: number) => {
const triggerFormGroup = this.triggerFields.at(index) as FormGroup
const filters = this.getFiltersFormArray(triggerFormGroup)
const aggregate: TriggerFilterAggregate = {
filter_has_tags: [],
filter_has_all_tags: [],
filter_has_not_tags: [],
filter_has_not_correspondents: [],
filter_has_not_document_types: [],
filter_has_not_storage_paths: [],
filter_has_correspondent: null,
filter_has_document_type: null,
filter_has_storage_path: null,
filter_custom_field_query: null,
}
for (const control of filters.controls) {
const type = control.get('type').value as TriggerFilterType
const values = control.get('values').value
if (values === null || values === undefined) {
continue
}
if (Array.isArray(values) && values.length === 0) {
continue
}
const handler = FILTER_HANDLERS[type]
handler?.apply(aggregate, values)
}
trigger.filter_has_tags = aggregate.filter_has_tags
trigger.filter_has_all_tags = aggregate.filter_has_all_tags
trigger.filter_has_not_tags = aggregate.filter_has_not_tags
trigger.filter_has_not_correspondents =
aggregate.filter_has_not_correspondents
trigger.filter_has_not_document_types =
aggregate.filter_has_not_document_types
trigger.filter_has_not_storage_paths =
aggregate.filter_has_not_storage_paths
trigger.filter_has_correspondent =
aggregate.filter_has_correspondent ?? null
trigger.filter_has_document_type =
aggregate.filter_has_document_type ?? null
trigger.filter_has_storage_path =
aggregate.filter_has_storage_path ?? null
trigger.filter_custom_field_query =
aggregate.filter_custom_field_query ?? null
delete trigger.filters
return trigger
}
)
}
return formValues
}
public matchingPatternRequired(formGroup: FormGroup): boolean {
return formGroup.get('matching_algorithm').value !== MATCH_NONE
}
private createFilterFormGroup(
type: TriggerFilterType,
initialValue?: any
): FormGroup {
const group = new FormGroup({
type: new FormControl(type),
values: new FormControl(this.normalizeFilterValue(type, initialValue)),
})
group.get('type').valueChanges.subscribe((newType: TriggerFilterType) => {
if (newType === TriggerFilterType.CustomFieldQuery) {
this.ensureCustomFieldQueryModel(group)
} else {
this.clearCustomFieldQueryModel(group)
group.get('values').setValue(this.getDefaultFilterValue(newType), {
emitEvent: false,
})
}
})
if (type === TriggerFilterType.CustomFieldQuery) {
this.ensureCustomFieldQueryModel(group, initialValue)
}
return group
}
private buildFiltersFormArray(trigger: WorkflowTrigger): FormArray {
const filters = new FormArray([])
for (const definition of this.filterDefinitions) {
const handler = FILTER_HANDLERS[definition.id]
if (!handler) {
continue
}
const value = handler.extract(trigger)
if (!handler.hasValue(value)) {
continue
}
filters.push(this.createFilterFormGroup(definition.id, value))
}
return filters
}
getFiltersFormArray(formGroup: FormGroup): FormArray {
return formGroup.get('filters') as FormArray
}
getFilterTypeOptions(formGroup: FormGroup, filterIndex: number) {
const filters = this.getFiltersFormArray(formGroup)
const options = this.getFilterTypeOptionsForArray(filters)
const currentType = filters.at(filterIndex).get('type')
.value as TriggerFilterType
const usedTypes = new Set(
filters.controls.map(
(control) => control.get('type').value as TriggerFilterType
)
)
for (const option of options) {
if (option.allowMultipleEntries) {
option.disabled = false
continue
}
option.disabled = usedTypes.has(option.id) && option.id !== currentType
}
return options
}
canAddFilter(formGroup: FormGroup): boolean {
const filters = this.getFiltersFormArray(formGroup)
const usedTypes = new Set(
filters.controls.map(
(control) => control.get('type').value as TriggerFilterType
)
)
return this.filterDefinitions.some((definition) => {
if (definition.allowMultipleEntries) {
return true
}
return !usedTypes.has(definition.id)
})
}
addFilter(triggerFormGroup: FormGroup): FormGroup | null {
const triggerIndex = this.triggerFields.controls.indexOf(triggerFormGroup)
if (triggerIndex === -1) {
return null
}
const filters = this.getFiltersFormArray(triggerFormGroup)
const availableDefinition = this.filterDefinitions.find((definition) => {
if (definition.allowMultipleEntries) {
return true
}
return !filters.controls.some(
(control) => control.get('type').value === definition.id
)
})
if (!availableDefinition) {
return null
}
filters.push(this.createFilterFormGroup(availableDefinition.id))
triggerFormGroup.markAsDirty()
triggerFormGroup.markAsTouched()
return filters.at(-1) as FormGroup
}
removeFilter(triggerFormGroup: FormGroup, filterIndex: number) {
const triggerIndex = this.triggerFields.controls.indexOf(triggerFormGroup)
if (triggerIndex === -1) {
return
}
const filters = this.getFiltersFormArray(triggerFormGroup)
const filterGroup = filters.at(filterIndex) as FormGroup
if (filterGroup?.get('type').value === TriggerFilterType.CustomFieldQuery) {
this.clearCustomFieldQueryModel(filterGroup)
}
filters.removeAt(filterIndex)
triggerFormGroup.markAsDirty()
triggerFormGroup.markAsTouched()
}
getFilterDefinition(
type: TriggerFilterType
): TriggerFilterDefinition | undefined {
return this.filterDefinitions.find((definition) => definition.id === type)
}
getFilterName(type: TriggerFilterType): string {
return this.getFilterDefinition(type)?.name ?? ''
}
isTagsFilter(type: TriggerFilterType): boolean {
return this.getFilterDefinition(type)?.inputType === 'tags'
}
isCustomFieldQueryFilter(type: TriggerFilterType): boolean {
return this.getFilterDefinition(type)?.inputType === 'customFieldQuery'
}
isMultiValueFilter(type: TriggerFilterType): boolean {
switch (type) {
case TriggerFilterType.TagsAny:
case TriggerFilterType.TagsAll:
case TriggerFilterType.TagsNone:
case TriggerFilterType.CorrespondentNot:
case TriggerFilterType.DocumentTypeNot:
case TriggerFilterType.StoragePathNot:
return true
default:
return false
}
}
isSelectMultiple(type: TriggerFilterType): boolean {
return !this.isTagsFilter(type) && this.isMultiValueFilter(type)
}
getFilterSelectItems(type: TriggerFilterType) {
const definition = this.getFilterDefinition(type)
if (!definition || definition.inputType !== 'select') {
return []
}
switch (definition.selectItems) {
case 'correspondents':
return this.correspondents
case 'documentTypes':
return this.documentTypes
case 'storagePaths':
return this.storagePaths
default:
return []
}
}
getCustomFieldQueryModel(control: AbstractControl): CustomFieldQueriesModel {
return this.ensureCustomFieldQueryModel(control as FormGroup)
}
onCustomFieldQuerySelectionChange(
control: AbstractControl,
model: CustomFieldQueriesModel
) {
this.onCustomFieldQueryModelChanged(control as FormGroup, model)
}
isCustomFieldQueryValid(control: AbstractControl): boolean {
const model = this.getStoredCustomFieldQueryModel(control as FormGroup)
if (!model) {
return true
}
return model.isEmpty() || model.isValid()
}
private getFilterTypeOptionsForArray(
filters: FormArray
): TriggerFilterOption[] {
let cached = this.triggerFilterOptionsMap.get(filters)
if (!cached) {
cached = this.filterDefinitions.map((definition) => ({
...definition,
disabled: false,
}))
this.triggerFilterOptionsMap.set(filters, cached)
}
return cached
}
private ensureCustomFieldQueryModel(
filterGroup: FormGroup,
initialValue?: any
): CustomFieldQueriesModel {
const existingModel = this.getStoredCustomFieldQueryModel(filterGroup)
if (existingModel) {
return existingModel
}
const model = new CustomFieldQueriesModel()
this.setCustomFieldQueryModel(filterGroup, model)
const rawValue =
typeof initialValue === 'string'
? initialValue
: (filterGroup.get('values').value as string)
if (rawValue) {
try {
const parsed = JSON.parse(rawValue)
const expression = new CustomFieldQueryExpression(parsed)
model.queries = [expression]
} catch {
model.clear(false)
model.addInitialAtom()
}
}
const subscription = model.changed
.pipe(takeUntil(this.unsubscribeNotifier))
.subscribe(() => {
this.onCustomFieldQueryModelChanged(filterGroup, model)
})
filterGroup[CUSTOM_FIELD_QUERY_SUBSCRIPTION_KEY]?.unsubscribe()
filterGroup[CUSTOM_FIELD_QUERY_SUBSCRIPTION_KEY] = subscription
this.onCustomFieldQueryModelChanged(filterGroup, model)
return model
}
private clearCustomFieldQueryModel(filterGroup: FormGroup) {
const group = filterGroup as CustomFieldFilterGroup
group[CUSTOM_FIELD_QUERY_SUBSCRIPTION_KEY]?.unsubscribe()
delete group[CUSTOM_FIELD_QUERY_SUBSCRIPTION_KEY]
delete group[CUSTOM_FIELD_QUERY_MODEL_KEY]
}
private getStoredCustomFieldQueryModel(
filterGroup: FormGroup
): CustomFieldQueriesModel | null {
return (
(filterGroup as CustomFieldFilterGroup)[CUSTOM_FIELD_QUERY_MODEL_KEY] ??
null
)
}
private setCustomFieldQueryModel(
filterGroup: FormGroup,
model: CustomFieldQueriesModel
) {
const group = filterGroup as CustomFieldFilterGroup
group[CUSTOM_FIELD_QUERY_MODEL_KEY] = model
}
private onCustomFieldQueryModelChanged(
filterGroup: FormGroup,
model: CustomFieldQueriesModel
) {
const control = filterGroup.get('values')
if (!control) {
return
}
if (!model.isValid()) {
control.setValue(null, { emitEvent: false })
return
}
if (model.isEmpty()) {
control.setValue(null, { emitEvent: false })
return
}
const serialized = JSON.stringify(model.queries[0].serialize())
control.setValue(serialized, { emitEvent: false })
}
private getDefaultFilterValue(type: TriggerFilterType) {
if (type === TriggerFilterType.CustomFieldQuery) {
return null
}
return this.isMultiValueFilter(type) ? [] : null
}
private normalizeFilterValue(type: TriggerFilterType, value?: any) {
if (value === undefined || value === null) {
return this.getDefaultFilterValue(type)
}
if (type === TriggerFilterType.CustomFieldQuery) {
if (typeof value === 'string') {
return value
}
return value ? JSON.stringify(value) : null
}
if (this.isMultiValueFilter(type)) {
return Array.isArray(value) ? [...value] : [value]
}
if (Array.isArray(value)) {
return value.length > 0 ? value[0] : null
}
return value
}
private createTriggerField( private createTriggerField(
trigger: WorkflowTrigger, trigger: WorkflowTrigger,
emitEvent: boolean = false emitEvent: boolean = false
@@ -1054,7 +405,16 @@ export class WorkflowEditDialogComponent
matching_algorithm: new FormControl(trigger.matching_algorithm), matching_algorithm: new FormControl(trigger.matching_algorithm),
match: new FormControl(trigger.match), match: new FormControl(trigger.match),
is_insensitive: new FormControl(trigger.is_insensitive), is_insensitive: new FormControl(trigger.is_insensitive),
filters: this.buildFiltersFormArray(trigger), filter_has_tags: new FormControl(trigger.filter_has_tags),
filter_has_correspondent: new FormControl(
trigger.filter_has_correspondent
),
filter_has_document_type: new FormControl(
trigger.filter_has_document_type
),
filter_has_storage_path: new FormControl(
trigger.filter_has_storage_path
),
schedule_offset_days: new FormControl(trigger.schedule_offset_days), schedule_offset_days: new FormControl(trigger.schedule_offset_days),
schedule_is_recurring: new FormControl(trigger.schedule_is_recurring), schedule_is_recurring: new FormControl(trigger.schedule_is_recurring),
schedule_recurring_interval_days: new FormControl( schedule_recurring_interval_days: new FormControl(
@@ -1177,12 +537,6 @@ export class WorkflowEditDialogComponent
filter_path: null, filter_path: null,
filter_mailrule: null, filter_mailrule: null,
filter_has_tags: [], filter_has_tags: [],
filter_has_all_tags: [],
filter_has_not_tags: [],
filter_has_not_correspondents: [],
filter_has_not_document_types: [],
filter_has_not_storage_paths: [],
filter_custom_field_query: null,
filter_has_correspondent: null, filter_has_correspondent: null,
filter_has_document_type: null, filter_has_document_type: null,
filter_has_storage_path: null, filter_has_storage_path: null,

View File

@@ -1,9 +1,5 @@
<div class="modal-header"> <div class="modal-header">
<h4 class="modal-title" id="modal-basic-title" i18n>{ <h4 class="modal-title" id="modal-basic-title">{{title}}</h4>
documentIds.length,
plural,
=1 {Email Document} other {Email {{documentIds.length}} Documents}
}</h4>
<button type="button" class="btn-close" aria-label="Close" (click)="close()"></button> <button type="button" class="btn-close" aria-label="Close" (click)="close()"></button>
</div> </div>
<div class="modal-body"> <div class="modal-body">
@@ -26,14 +22,11 @@
<input class="form-check-input mt-0 me-2" type="checkbox" role="switch" id="useArchiveVersion" [disabled]="!hasArchiveVersion" [(ngModel)]="useArchiveVersion"> <input class="form-check-input mt-0 me-2" type="checkbox" role="switch" id="useArchiveVersion" [disabled]="!hasArchiveVersion" [(ngModel)]="useArchiveVersion">
<label class="form-check-label w-100 text-start" for="useArchiveVersion" i18n>Use archive version</label> <label class="form-check-label w-100 text-start" for="useArchiveVersion" i18n>Use archive version</label>
</div> </div>
<button type="submit" class="btn btn-outline-primary" (click)="emailDocuments()" [disabled]="loading || emailAddress.length === 0 || emailMessage.length === 0 || emailSubject.length === 0"> <button type="submit" class="btn btn-outline-primary" (click)="emailDocument()" [disabled]="loading || emailAddress.length === 0 || emailMessage.length === 0 || emailSubject.length === 0">
@if (loading) { @if (loading) {
<div class="spinner-border spinner-border-sm me-2" role="status"></div> <div class="spinner-border spinner-border-sm me-2" role="status"></div>
} }
<ng-container i18n>Send email</ng-container> <ng-container i18n>Send email</ng-container>
</button> </button>
</div> </div>
<div class="text-light fst-italic small mt-2">
<ng-container i18n>Some email servers may reject messages with large attachments.</ng-container>
</div>
</div> </div>

View File

@@ -36,59 +36,31 @@ describe('EmailDocumentDialogComponent', () => {
documentService = TestBed.inject(DocumentService) documentService = TestBed.inject(DocumentService)
toastService = TestBed.inject(ToastService) toastService = TestBed.inject(ToastService)
component = fixture.componentInstance component = fixture.componentInstance
component.documentIds = [1]
fixture.detectChanges() fixture.detectChanges()
}) })
it('should set hasArchiveVersion and useArchiveVersion', () => { it('should set hasArchiveVersion and useArchiveVersion', () => {
expect(component.hasArchiveVersion).toBeTruthy() expect(component.hasArchiveVersion).toBeTruthy()
expect(component.useArchiveVersion).toBeTruthy()
component.hasArchiveVersion = false component.hasArchiveVersion = false
expect(component.hasArchiveVersion).toBeFalsy() expect(component.hasArchiveVersion).toBeFalsy()
expect(component.useArchiveVersion).toBeFalsy() expect(component.useArchiveVersion).toBeFalsy()
}) })
it('should support sending single document via email, showing error if needed', () => { it('should support sending document via email, showing error if needed', () => {
const toastErrorSpy = jest.spyOn(toastService, 'showError') const toastErrorSpy = jest.spyOn(toastService, 'showError')
const toastSuccessSpy = jest.spyOn(toastService, 'showInfo') const toastSuccessSpy = jest.spyOn(toastService, 'showInfo')
component.documentIds = [1]
component.emailAddress = 'hello@paperless-ngx.com' component.emailAddress = 'hello@paperless-ngx.com'
component.emailSubject = 'Hello' component.emailSubject = 'Hello'
component.emailMessage = 'World' component.emailMessage = 'World'
jest jest
.spyOn(documentService, 'emailDocuments') .spyOn(documentService, 'emailDocument')
.mockReturnValue(throwError(() => new Error('Unable to email document'))) .mockReturnValue(throwError(() => new Error('Unable to email document')))
component.emailDocuments() component.emailDocument()
expect(toastErrorSpy).toHaveBeenCalledWith( expect(toastErrorSpy).toHaveBeenCalled()
'Error emailing document',
expect.any(Error)
)
jest.spyOn(documentService, 'emailDocuments').mockReturnValue(of(true)) jest.spyOn(documentService, 'emailDocument').mockReturnValue(of(true))
component.emailDocuments() component.emailDocument()
expect(toastSuccessSpy).toHaveBeenCalledWith('Email sent') expect(toastSuccessSpy).toHaveBeenCalled()
})
it('should support sending multiple documents via email, showing appropriate messages', () => {
const toastErrorSpy = jest.spyOn(toastService, 'showError')
const toastSuccessSpy = jest.spyOn(toastService, 'showInfo')
component.documentIds = [1, 2, 3]
component.emailAddress = 'hello@paperless-ngx.com'
component.emailSubject = 'Hello'
component.emailMessage = 'World'
jest
.spyOn(documentService, 'emailDocuments')
.mockReturnValue(throwError(() => new Error('Unable to email documents')))
component.emailDocuments()
expect(toastErrorSpy).toHaveBeenCalledWith(
'Error emailing documents',
expect.any(Error)
)
jest.spyOn(documentService, 'emailDocuments').mockReturnValue(of(true))
component.emailDocuments()
expect(toastSuccessSpy).toHaveBeenCalledWith('Email sent')
}) })
it('should close the dialog', () => { it('should close the dialog', () => {

View File

@@ -18,7 +18,10 @@ export class EmailDocumentDialogComponent extends LoadingComponentWithPermission
private toastService = inject(ToastService) private toastService = inject(ToastService)
@Input() @Input()
documentIds: number[] title = $localize`Email Document`
@Input()
documentId: number
private _hasArchiveVersion: boolean = true private _hasArchiveVersion: boolean = true
@@ -43,11 +46,11 @@ export class EmailDocumentDialogComponent extends LoadingComponentWithPermission
this.loading = false this.loading = false
} }
public emailDocuments() { public emailDocument() {
this.loading = true this.loading = true
this.documentService this.documentService
.emailDocuments( .emailDocument(
this.documentIds, this.documentId,
this.emailAddress, this.emailAddress,
this.emailSubject, this.emailSubject,
this.emailMessage, this.emailMessage,
@@ -64,11 +67,7 @@ export class EmailDocumentDialogComponent extends LoadingComponentWithPermission
}, },
error: (e) => { error: (e) => {
this.loading = false this.loading = false
const errorMessage = this.toastService.showError($localize`Error emailing document`, e)
this.documentIds.length > 1
? $localize`Error emailing documents`
: $localize`Error emailing document`
this.toastService.showError(errorMessage, e)
}, },
}) })
} }

View File

@@ -14,7 +14,7 @@
</div> </div>
</div> </div>
<div class="mt-2 align-items-center bg-light p-2"> <div class="mt-2 align-items-center bg-light p-2">
<div class="d-flex flex-wrap flex-row gap-2 w-100" style="min-height: 1em;" <div class="d-flex flex-wrap flex-row gap-2 w-100"
cdkDropList #unselectedList="cdkDropList" cdkDropList #unselectedList="cdkDropList"
cdkDropListOrientation="mixed" cdkDropListOrientation="mixed"
(cdkDropListDropped)="drop($event)" (cdkDropListDropped)="drop($event)"

View File

@@ -1,68 +1,66 @@
<div class="mb-3 paperless-input-select" [class.disabled]="disabled"> <div class="mb-3 paperless-input-select" [class.disabled]="disabled">
<div class="row"> <div class="row">
@if (title || removable) { <div class="d-flex align-items-center position-relative hidden-button-container" [class.col-md-3]="horizontal">
<div class="d-flex align-items-center position-relative hidden-button-container" [class.col-md-3]="horizontal"> @if (title) {
@if (title) { <label class="form-label" [class.mb-md-0]="horizontal" [for]="inputId">{{title}}</label>
<label class="form-label" [class.mb-md-0]="horizontal" [for]="inputId">{{title}}</label> }
@if (removable) {
<button type="button" class="btn btn-sm btn-danger position-absolute left-0" (click)="removed.emit(this)">
<i-bs name="x"></i-bs>&nbsp;<ng-container i18n>Remove</ng-container>
</button>
} }
@if (removable) { </div>
<button type="button" class="btn btn-sm btn-danger position-absolute left-0" (click)="removed.emit(this)"> <div [class.col-md-9]="horizontal">
<i-bs name="x"></i-bs>&nbsp;<ng-container i18n>Remove</ng-container> <div [class.input-group]="allowCreateNew || showFilter" [class.is-invalid]="error">
<ng-select name="inputId" [(ngModel)]="value"
[disabled]="disabled"
[style.color]="textColor"
[style.background]="backgroundColor"
[class.private]="isPrivate"
[clearable]="allowNull"
[items]="items"
[addTag]="allowCreateNew && addItemRef"
addTagText="Add item"
i18n-addTagText="Used for both types, correspondents, storage paths"
[placeholder]="placeholder"
[notFoundText]="notFoundText"
[multiple]="multiple"
[bindLabel]="bindLabel"
bindValue="id"
(change)="onChange(value)"
(search)="onSearch($event)"
(focus)="clearLastSearchTerm()"
(clear)="clearLastSearchTerm()"
(blur)="onBlur()">
<ng-template ng-option-tmp let-item="item">
<span [title]="item[bindLabel]">{{item[bindLabel]}}</span>
</ng-template>
</ng-select>
@if (allowCreateNew && !hideAddButton) {
<button class="btn btn-outline-secondary" type="button" (click)="addItem()" [disabled]="disabled">
<i-bs width="1.2em" height="1.2em" name="plus"></i-bs>
</button> </button>
}
</div>
}
<div [class.col-md-9]="horizontal">
<div [class.input-group]="allowCreateNew || showFilter" [class.is-invalid]="error">
<ng-select name="inputId" [(ngModel)]="value"
[disabled]="disabled"
[style.color]="textColor"
[style.background]="backgroundColor"
[class.private]="isPrivate"
[clearable]="allowNull"
[items]="items"
[addTag]="allowCreateNew && addItemRef"
addTagText="Add item"
i18n-addTagText="Used for both types, correspondents, storage paths"
[placeholder]="placeholder"
[notFoundText]="notFoundText"
[multiple]="multiple"
[bindLabel]="bindLabel"
bindValue="id"
(change)="onChange(value)"
(search)="onSearch($event)"
(focus)="clearLastSearchTerm()"
(clear)="clearLastSearchTerm()"
(blur)="onBlur()">
<ng-template ng-option-tmp let-item="item">
<span [title]="item[bindLabel]">{{item[bindLabel]}}</span>
</ng-template>
</ng-select>
@if (allowCreateNew && !hideAddButton) {
<button class="btn btn-outline-secondary" type="button" (click)="addItem()" [disabled]="disabled">
<i-bs width="1.2em" height="1.2em" name="plus"></i-bs>
</button>
}
@if (showFilter) {
<button class="btn btn-outline-secondary" type="button" (click)="onFilterDocuments()" [disabled]="isPrivate || this.value === null" title="{{ filterButtonTitle }}">
<i-bs width="1.2em" height="1.2em" name="filter"></i-bs>
</button>
}
</div>
<div class="invalid-feedback">
{{error}}
</div>
@if (hint) {
<small class="form-text text-muted">{{hint}}</small>
}
@if (getSuggestions().length > 0) {
<small>
<span i18n>Suggestions:</span>&nbsp;
@for (s of getSuggestions(); track s) {
<a (click)="value = s.id; onChange(value)" [routerLink]="[]">{{s.name}}</a>&nbsp;
} }
</small> @if (showFilter) {
} <button class="btn btn-outline-secondary" type="button" (click)="onFilterDocuments()" [disabled]="isPrivate || this.value === null" title="{{ filterButtonTitle }}">
<i-bs width="1.2em" height="1.2em" name="filter"></i-bs>
</button>
}
</div>
<div class="invalid-feedback">
{{error}}
</div>
@if (hint) {
<small class="form-text text-muted">{{hint}}</small>
}
@if (getSuggestions().length > 0) {
<small>
<span i18n>Suggestions:</span>&nbsp;
@for (s of getSuggestions(); track s) {
<a (click)="value = s.id; onChange(value)" [routerLink]="[]">{{s.name}}</a>&nbsp;
}
</small>
}
</div>
</div> </div>
</div> </div>
</div>

View File

@@ -1,10 +1,8 @@
<div class="mb-3 paperless-input-select paperless-input-tags" [class.disabled]="disabled" [class.pb-3]="getSuggestions().length > 0"> <div class="mb-3 paperless-input-select paperless-input-tags" [class.disabled]="disabled" [class.pb-3]="getSuggestions().length > 0">
<div class="row"> <div class="row">
@if (title) { <div class="d-flex align-items-center" [class.col-md-3]="horizontal">
<div class="d-flex align-items-center" [class.col-md-3]="horizontal"> <label class="form-label" [class.mb-md-0]="horizontal" for="tags">{{title}}</label>
<label class="form-label" [class.mb-md-0]="horizontal" for="tags">{{title}}</label> </div>
</div>
}
<div class="position-relative" [class.col-md-9]="horizontal"> <div class="position-relative" [class.col-md-9]="horizontal">
<div class="input-group flex-nowrap"> <div class="input-group flex-nowrap">
<ng-select #tagSelect name="tags" [items]="tags" bindLabel="name" bindValue="id" [(ngModel)]="value" <ng-select #tagSelect name="tags" [items]="tags" bindLabel="name" bindValue="id" [(ngModel)]="value"

View File

@@ -1481,7 +1481,7 @@ export class DocumentDetailComponent
const modal = this.modalService.open(EmailDocumentDialogComponent, { const modal = this.modalService.open(EmailDocumentDialogComponent, {
backdrop: 'static', backdrop: 'static',
}) })
modal.componentInstance.documentIds = [this.document.id] modal.componentInstance.documentId = this.document.id
modal.componentInstance.hasArchiveVersion = modal.componentInstance.hasArchiveVersion =
!!this.document?.archived_file_name !!this.document?.archived_file_name
} }

View File

@@ -96,9 +96,6 @@
<button ngbDropdownItem (click)="mergeSelected()" [disabled]="!userCanAdd || list.selected.size < 2"> <button ngbDropdownItem (click)="mergeSelected()" [disabled]="!userCanAdd || list.selected.size < 2">
<i-bs name="journals"></i-bs>&nbsp;<ng-container i18n>Merge</ng-container> <i-bs name="journals"></i-bs>&nbsp;<ng-container i18n>Merge</ng-container>
</button> </button>
<button ngbDropdownItem (click)="emailSelected()" [disabled]="!userCanEdit">
<i-bs name="envelope"></i-bs>&nbsp;<ng-container i18n>Email</ng-container>
</button>
</div> </div>
</div> </div>
</div> </div>

View File

@@ -46,7 +46,6 @@ import { DocumentTypeEditDialogComponent } from '../../common/edit-dialog/docume
import { EditDialogMode } from '../../common/edit-dialog/edit-dialog.component' import { EditDialogMode } from '../../common/edit-dialog/edit-dialog.component'
import { StoragePathEditDialogComponent } from '../../common/edit-dialog/storage-path-edit-dialog/storage-path-edit-dialog.component' import { StoragePathEditDialogComponent } from '../../common/edit-dialog/storage-path-edit-dialog/storage-path-edit-dialog.component'
import { TagEditDialogComponent } from '../../common/edit-dialog/tag-edit-dialog/tag-edit-dialog.component' import { TagEditDialogComponent } from '../../common/edit-dialog/tag-edit-dialog/tag-edit-dialog.component'
import { EmailDocumentDialogComponent } from '../../common/email-document-dialog/email-document-dialog.component'
import { import {
ChangedItems, ChangedItems,
FilterableDropdownComponent, FilterableDropdownComponent,
@@ -903,16 +902,4 @@ export class BulkEditorComponent
) )
}) })
} }
emailSelected() {
const allHaveArchiveVersion = this.list.documents
.filter((d) => this.list.selected.has(d.id))
.every((doc) => !!doc.archived_file_name)
const modal = this.modalService.open(EmailDocumentDialogComponent, {
backdrop: 'static',
})
modal.componentInstance.documentIds = Array.from(this.list.selected)
modal.componentInstance.hasArchiveVersion = allHaveArchiveVersion
}
} }

View File

@@ -3,7 +3,7 @@
i18n-title i18n-title
info="Manage e-mail accounts and rules for automatically importing documents." info="Manage e-mail accounts and rules for automatically importing documents."
i18n-info i18n-info
infoLink="usage/#incoming-mail" infoLink="usage/#usage-email"
> >
</pngx-page-header> </pngx-page-header>

View File

@@ -40,18 +40,6 @@ export interface WorkflowTrigger extends ObjectWithId {
filter_has_tags?: number[] // Tag.id[] filter_has_tags?: number[] // Tag.id[]
filter_has_all_tags?: number[] // Tag.id[]
filter_has_not_tags?: number[] // Tag.id[]
filter_has_not_correspondents?: number[] // Correspondent.id[]
filter_has_not_document_types?: number[] // DocumentType.id[]
filter_has_not_storage_paths?: number[] // StoragePath.id[]
filter_custom_field_query?: string
filter_has_correspondent?: number // Correspondent.id filter_has_correspondent?: number // Correspondent.id
filter_has_document_type?: number // DocumentType.id filter_has_document_type?: number // DocumentType.id

View File

@@ -357,15 +357,17 @@ it('should include custom fields in sort fields if user has permission', () => {
it('should call appropriate api endpoint for email document', () => { it('should call appropriate api endpoint for email document', () => {
subscription = service subscription = service
.emailDocuments( .emailDocument(
[documents[0].id], documents[0].id,
'hello@paperless-ngx.com', 'hello@paperless-ngx.com',
'hello', 'hello',
'world', 'world',
true true
) )
.subscribe() .subscribe()
httpTestingController.expectOne(`${environment.apiBaseUrl}${endpoint}/email/`) httpTestingController.expectOne(
`${environment.apiBaseUrl}${endpoint}/${documents[0].id}/email/`
)
}) })
afterEach(() => { afterEach(() => {

View File

@@ -256,15 +256,14 @@ export class DocumentService extends AbstractPaperlessService<Document> {
return this._searchQuery return this._searchQuery
} }
emailDocuments( emailDocument(
documentIds: number[], documentId: number,
addresses: string, addresses: string,
subject: string, subject: string,
message: string, message: string,
useArchiveVersion: boolean useArchiveVersion: boolean
): Observable<any> { ): Observable<any> {
return this.http.post(this.getResourceUrl(null, 'email'), { return this.http.post(this.getResourceUrl(documentId, 'email'), {
documents: documentIds,
addresses: addresses, addresses: addresses,
subject: subject, subject: subject,
message: message, message: message,

View File

@@ -1,37 +1,20 @@
from __future__ import annotations
from email import message_from_bytes from email import message_from_bytes
from typing import TYPE_CHECKING from pathlib import Path
from django.conf import settings from django.conf import settings
from django.core.mail import EmailMessage from django.core.mail import EmailMessage
from filelock import FileLock from filelock import FileLock
if TYPE_CHECKING:
from documents.models import Document
def send_email( def send_email(
subject: str, subject: str,
body: str, body: str,
to: list[str], to: list[str],
attachments: list[Document], attachment: Path | None = None,
*, attachment_mime_type: str | None = None,
use_archive: bool,
) -> int: ) -> int:
""" """
Send an email with attachments. Send an email with an optional attachment.
Args:
subject: Email subject
body: Email body text
to: List of recipient email addresses
attachments: List of documents to attach (the list may be empty)
use_archive: Whether to attach archive versions when available
Returns:
Number of emails sent
TODO: re-evaluate this pending https://code.djangoproject.com/ticket/35581 / https://github.com/django/django/pull/18966 TODO: re-evaluate this pending https://code.djangoproject.com/ticket/35581 / https://github.com/django/django/pull/18966
""" """
email = EmailMessage( email = EmailMessage(
@@ -39,49 +22,17 @@ def send_email(
body=body, body=body,
to=to, to=to,
) )
if attachment:
# Something could be renaming the file concurrently so it can't be attached
with FileLock(settings.MEDIA_LOCK), attachment.open("rb") as f:
content = f.read()
if attachment_mime_type == "message/rfc822":
# See https://forum.djangoproject.com/t/using-emailmessage-with-an-attached-email-file-crashes-due-to-non-ascii/37981
content = message_from_bytes(f.read())
used_filenames: set[str] = set() email.attach(
filename=attachment.name,
# Something could be renaming the file concurrently so it can't be attached content=content,
with FileLock(settings.MEDIA_LOCK): mimetype=attachment_mime_type,
for document in attachments:
attachment_path = (
document.archive_path
if use_archive and document.has_archive_version
else document.source_path
) )
friendly_filename = _get_unique_filename(
document,
used_filenames,
archive=use_archive and document.has_archive_version,
)
used_filenames.add(friendly_filename)
with attachment_path.open("rb") as f:
content = f.read()
if document.mime_type == "message/rfc822":
# See https://forum.djangoproject.com/t/using-emailmessage-with-an-attached-email-file-crashes-due-to-non-ascii/37981
content = message_from_bytes(content)
email.attach(
filename=friendly_filename,
content=content,
mimetype=document.mime_type,
)
return email.send() return email.send()
def _get_unique_filename(doc: Document, used_names: set[str], *, archive: bool) -> str:
"""
Constructs a unique friendly filename for the given document.
The filename might not be unique enough, so a counter is appended if needed.
"""
counter = 0
while True:
filename = doc.get_public_filename(archive=archive, counter=counter)
if filename not in used_names:
return filename
counter += 1

View File

@@ -6,11 +6,8 @@ from fnmatch import fnmatch
from fnmatch import translate as fnmatch_translate from fnmatch import translate as fnmatch_translate
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from rest_framework import serializers
from documents.data_models import ConsumableDocument from documents.data_models import ConsumableDocument
from documents.data_models import DocumentSource from documents.data_models import DocumentSource
from documents.filters import CustomFieldQueryParser
from documents.models import Correspondent from documents.models import Correspondent
from documents.models import Document from documents.models import Document
from documents.models import DocumentType from documents.models import DocumentType
@@ -345,147 +342,67 @@ def consumable_document_matches_workflow(
def existing_document_matches_workflow( def existing_document_matches_workflow(
document: Document, document: Document,
trigger: WorkflowTrigger, trigger: WorkflowTrigger,
) -> tuple[bool, str | None]: ) -> tuple[bool, str]:
""" """
Returns True if the Document matches all filters from the workflow trigger, Returns True if the Document matches all filters from the workflow trigger,
False otherwise. Includes a reason if doesn't match False otherwise. Includes a reason if doesn't match
""" """
# Check content matching algorithm trigger_matched = True
reason = ""
if trigger.matching_algorithm > MatchingModel.MATCH_NONE and not matches( if trigger.matching_algorithm > MatchingModel.MATCH_NONE and not matches(
trigger, trigger,
document, document,
): ):
return ( reason = (
False,
f"Document content matching settings for algorithm '{trigger.matching_algorithm}' did not match", f"Document content matching settings for algorithm '{trigger.matching_algorithm}' did not match",
) )
trigger_matched = False
# Check if any tag filters exist to determine if we need to load document tags # Document tags vs trigger has_tags
trigger_has_tags_qs = trigger.filter_has_tags.all() if (
trigger_has_all_tags_qs = trigger.filter_has_all_tags.all() trigger.filter_has_tags.all().count() > 0
trigger_has_not_tags_qs = trigger.filter_has_not_tags.all() and document.tags.filter(
id__in=trigger.filter_has_tags.all().values_list("id"),
has_tags_filter = trigger_has_tags_qs.exists() ).count()
has_all_tags_filter = trigger_has_all_tags_qs.exists() == 0
has_not_tags_filter = trigger_has_not_tags_qs.exists() ):
reason = (
# Load document tags once if any tag filters exist f"Document tags {document.tags.all()} do not include"
document_tag_ids = None f" {trigger.filter_has_tags.all()}",
if has_tags_filter or has_all_tags_filter or has_not_tags_filter: )
document_tag_ids = set(document.tags.values_list("id", flat=True)) trigger_matched = False
# Document tags vs trigger has_tags (any of)
if has_tags_filter:
trigger_has_tag_ids = set(trigger_has_tags_qs.values_list("id", flat=True))
if not (document_tag_ids & trigger_has_tag_ids):
# For error message, load the actual tag objects
return (
False,
f"Document tags {list(document.tags.all())} do not include {list(trigger_has_tags_qs)}",
)
# Document tags vs trigger has_all_tags (all of)
if has_all_tags_filter:
required_tag_ids = set(trigger_has_all_tags_qs.values_list("id", flat=True))
if not required_tag_ids.issubset(document_tag_ids):
return (
False,
f"Document tags {list(document.tags.all())} do not contain all of {list(trigger_has_all_tags_qs)}",
)
# Document tags vs trigger has_not_tags (none of)
if has_not_tags_filter:
excluded_tag_ids = set(trigger_has_not_tags_qs.values_list("id", flat=True))
if document_tag_ids & excluded_tag_ids:
return (
False,
f"Document tags {list(document.tags.all())} include excluded tags {list(trigger_has_not_tags_qs)}",
)
# Document correspondent vs trigger has_correspondent # Document correspondent vs trigger has_correspondent
if ( if (
trigger.filter_has_correspondent_id is not None trigger.filter_has_correspondent is not None
and document.correspondent_id != trigger.filter_has_correspondent_id and document.correspondent != trigger.filter_has_correspondent
): ):
return ( reason = (
False,
f"Document correspondent {document.correspondent} does not match {trigger.filter_has_correspondent}", f"Document correspondent {document.correspondent} does not match {trigger.filter_has_correspondent}",
) )
trigger_matched = False
if (
document.correspondent_id
and trigger.filter_has_not_correspondents.filter(
id=document.correspondent_id,
).exists()
):
return (
False,
f"Document correspondent {document.correspondent} is excluded by {list(trigger.filter_has_not_correspondents.all())}",
)
# Document document_type vs trigger has_document_type # Document document_type vs trigger has_document_type
if ( if (
trigger.filter_has_document_type_id is not None trigger.filter_has_document_type is not None
and document.document_type_id != trigger.filter_has_document_type_id and document.document_type != trigger.filter_has_document_type
): ):
return ( reason = (
False,
f"Document doc type {document.document_type} does not match {trigger.filter_has_document_type}", f"Document doc type {document.document_type} does not match {trigger.filter_has_document_type}",
) )
trigger_matched = False
if (
document.document_type_id
and trigger.filter_has_not_document_types.filter(
id=document.document_type_id,
).exists()
):
return (
False,
f"Document doc type {document.document_type} is excluded by {list(trigger.filter_has_not_document_types.all())}",
)
# Document storage_path vs trigger has_storage_path # Document storage_path vs trigger has_storage_path
if ( if (
trigger.filter_has_storage_path_id is not None trigger.filter_has_storage_path is not None
and document.storage_path_id != trigger.filter_has_storage_path_id and document.storage_path != trigger.filter_has_storage_path
): ):
return ( reason = (
False,
f"Document storage path {document.storage_path} does not match {trigger.filter_has_storage_path}", f"Document storage path {document.storage_path} does not match {trigger.filter_has_storage_path}",
) )
trigger_matched = False
if (
document.storage_path_id
and trigger.filter_has_not_storage_paths.filter(
id=document.storage_path_id,
).exists()
):
return (
False,
f"Document storage path {document.storage_path} is excluded by {list(trigger.filter_has_not_storage_paths.all())}",
)
# Custom field query check
if trigger.filter_custom_field_query:
parser = CustomFieldQueryParser("filter_custom_field_query")
try:
custom_field_q, annotations = parser.parse(
trigger.filter_custom_field_query,
)
except serializers.ValidationError:
return (False, "Invalid custom field query configuration")
qs = (
Document.objects.filter(id=document.id)
.annotate(**annotations)
.filter(custom_field_q)
)
if not qs.exists():
return (
False,
"Document custom fields do not match the configured custom field query",
)
# Document original_filename vs trigger filename # Document original_filename vs trigger filename
if ( if (
@@ -497,12 +414,13 @@ def existing_document_matches_workflow(
trigger.filter_filename.lower(), trigger.filter_filename.lower(),
) )
): ):
return ( reason = (
False, f"Document filename {document.original_filename} does not match"
f"Document filename {document.original_filename} does not match {trigger.filter_filename.lower()}", f" {trigger.filter_filename.lower()}",
) )
trigger_matched = False
return (True, None) return (trigger_matched, reason)
def prefilter_documents_by_workflowtrigger( def prefilter_documents_by_workflowtrigger(
@@ -515,66 +433,31 @@ def prefilter_documents_by_workflowtrigger(
document_matches_workflow in run_workflows document_matches_workflow in run_workflows
""" """
# Filter for documents that have AT LEAST ONE of the specified tags. if trigger.filter_has_tags.all().count() > 0:
if trigger.filter_has_tags.exists(): documents = documents.filter(
documents = documents.filter(tags__in=trigger.filter_has_tags.all()).distinct() tags__in=trigger.filter_has_tags.all(),
).distinct()
# Filter for documents that have ALL of the specified tags.
if trigger.filter_has_all_tags.exists():
for tag in trigger.filter_has_all_tags.all():
documents = documents.filter(tags=tag)
# Multiple JOINs can create duplicate results.
documents = documents.distinct()
# Exclude documents that have ANY of the specified tags.
if trigger.filter_has_not_tags.exists():
documents = documents.exclude(tags__in=trigger.filter_has_not_tags.all())
# Correspondent, DocumentType, etc. filtering
if trigger.filter_has_correspondent is not None: if trigger.filter_has_correspondent is not None:
documents = documents.filter( documents = documents.filter(
correspondent=trigger.filter_has_correspondent, correspondent=trigger.filter_has_correspondent,
) )
if trigger.filter_has_not_correspondents.exists():
documents = documents.exclude(
correspondent__in=trigger.filter_has_not_correspondents.all(),
)
if trigger.filter_has_document_type is not None: if trigger.filter_has_document_type is not None:
documents = documents.filter( documents = documents.filter(
document_type=trigger.filter_has_document_type, document_type=trigger.filter_has_document_type,
) )
if trigger.filter_has_not_document_types.exists():
documents = documents.exclude(
document_type__in=trigger.filter_has_not_document_types.all(),
)
if trigger.filter_has_storage_path is not None: if trigger.filter_has_storage_path is not None:
documents = documents.filter( documents = documents.filter(
storage_path=trigger.filter_has_storage_path, storage_path=trigger.filter_has_storage_path,
) )
if trigger.filter_has_not_storage_paths.exists():
documents = documents.exclude(
storage_path__in=trigger.filter_has_not_storage_paths.all(),
)
# Custom Field & Filename Filtering if trigger.filter_filename is not None and len(trigger.filter_filename) > 0:
# the true fnmatch will actually run later so we just want a loose filter here
if trigger.filter_custom_field_query:
parser = CustomFieldQueryParser("filter_custom_field_query")
try:
custom_field_q, annotations = parser.parse(
trigger.filter_custom_field_query,
)
except serializers.ValidationError:
return documents.none()
documents = documents.annotate(**annotations).filter(custom_field_q)
if trigger.filter_filename:
regex = fnmatch_translate(trigger.filter_filename).lstrip("^").rstrip("$") regex = fnmatch_translate(trigger.filter_filename).lstrip("^").rstrip("$")
documents = documents.filter(original_filename__iregex=regex) regex = f"(?i){regex}"
documents = documents.filter(original_filename__regex=regex)
return documents return documents
@@ -589,34 +472,13 @@ def document_matches_workflow(
settings from the workflow trigger, False otherwise settings from the workflow trigger, False otherwise
""" """
triggers_queryset = (
workflow.triggers.filter(
type=trigger_type,
)
.select_related(
"filter_mailrule",
"filter_has_document_type",
"filter_has_correspondent",
"filter_has_storage_path",
"schedule_date_custom_field",
)
.prefetch_related(
"filter_has_tags",
"filter_has_all_tags",
"filter_has_not_tags",
"filter_has_not_document_types",
"filter_has_not_correspondents",
"filter_has_not_storage_paths",
)
)
trigger_matched = True trigger_matched = True
if not triggers_queryset.exists(): if workflow.triggers.filter(type=trigger_type).count() == 0:
trigger_matched = False trigger_matched = False
logger.info(f"Document did not match {workflow}") logger.info(f"Document did not match {workflow}")
logger.debug(f"No matching triggers with type {trigger_type} found") logger.debug(f"No matching triggers with type {trigger_type} found")
else: else:
for trigger in triggers_queryset: for trigger in workflow.triggers.filter(type=trigger_type):
if trigger_type == WorkflowTrigger.WorkflowTriggerType.CONSUMPTION: if trigger_type == WorkflowTrigger.WorkflowTriggerType.CONSUMPTION:
trigger_matched, reason = consumable_document_matches_workflow( trigger_matched, reason = consumable_document_matches_workflow(
document, document,

View File

@@ -1,73 +0,0 @@
# Generated by Django 5.2.6 on 2025-10-07 18:52
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "1071_tag_tn_ancestors_count_tag_tn_ancestors_pks_and_more"),
]
operations = [
migrations.AddField(
model_name="workflowtrigger",
name="filter_custom_field_query",
field=models.TextField(
blank=True,
help_text="JSON-encoded custom field query expression.",
null=True,
verbose_name="filter custom field query",
),
),
migrations.AddField(
model_name="workflowtrigger",
name="filter_has_all_tags",
field=models.ManyToManyField(
blank=True,
related_name="workflowtriggers_has_all",
to="documents.tag",
verbose_name="has all of these tag(s)",
),
),
migrations.AddField(
model_name="workflowtrigger",
name="filter_has_not_correspondents",
field=models.ManyToManyField(
blank=True,
related_name="workflowtriggers_has_not_correspondent",
to="documents.correspondent",
verbose_name="does not have these correspondent(s)",
),
),
migrations.AddField(
model_name="workflowtrigger",
name="filter_has_not_document_types",
field=models.ManyToManyField(
blank=True,
related_name="workflowtriggers_has_not_document_type",
to="documents.documenttype",
verbose_name="does not have these document type(s)",
),
),
migrations.AddField(
model_name="workflowtrigger",
name="filter_has_not_storage_paths",
field=models.ManyToManyField(
blank=True,
related_name="workflowtriggers_has_not_storage_path",
to="documents.storagepath",
verbose_name="does not have these storage path(s)",
),
),
migrations.AddField(
model_name="workflowtrigger",
name="filter_has_not_tags",
field=models.ManyToManyField(
blank=True,
related_name="workflowtriggers_has_not",
to="documents.tag",
verbose_name="does not have these tag(s)",
),
),
]

View File

@@ -1065,20 +1065,6 @@ class WorkflowTrigger(models.Model):
verbose_name=_("has these tag(s)"), verbose_name=_("has these tag(s)"),
) )
filter_has_all_tags = models.ManyToManyField(
Tag,
blank=True,
related_name="workflowtriggers_has_all",
verbose_name=_("has all of these tag(s)"),
)
filter_has_not_tags = models.ManyToManyField(
Tag,
blank=True,
related_name="workflowtriggers_has_not",
verbose_name=_("does not have these tag(s)"),
)
filter_has_document_type = models.ForeignKey( filter_has_document_type = models.ForeignKey(
DocumentType, DocumentType,
null=True, null=True,
@@ -1087,13 +1073,6 @@ class WorkflowTrigger(models.Model):
verbose_name=_("has this document type"), verbose_name=_("has this document type"),
) )
filter_has_not_document_types = models.ManyToManyField(
DocumentType,
blank=True,
related_name="workflowtriggers_has_not_document_type",
verbose_name=_("does not have these document type(s)"),
)
filter_has_correspondent = models.ForeignKey( filter_has_correspondent = models.ForeignKey(
Correspondent, Correspondent,
null=True, null=True,
@@ -1102,13 +1081,6 @@ class WorkflowTrigger(models.Model):
verbose_name=_("has this correspondent"), verbose_name=_("has this correspondent"),
) )
filter_has_not_correspondents = models.ManyToManyField(
Correspondent,
blank=True,
related_name="workflowtriggers_has_not_correspondent",
verbose_name=_("does not have these correspondent(s)"),
)
filter_has_storage_path = models.ForeignKey( filter_has_storage_path = models.ForeignKey(
StoragePath, StoragePath,
null=True, null=True,
@@ -1117,20 +1089,6 @@ class WorkflowTrigger(models.Model):
verbose_name=_("has this storage path"), verbose_name=_("has this storage path"),
) )
filter_has_not_storage_paths = models.ManyToManyField(
StoragePath,
blank=True,
related_name="workflowtriggers_has_not_storage_path",
verbose_name=_("does not have these storage path(s)"),
)
filter_custom_field_query = models.TextField(
_("filter custom field query"),
null=True,
blank=True,
help_text=_("JSON-encoded custom field query expression."),
)
schedule_offset_days = models.IntegerField( schedule_offset_days = models.IntegerField(
_("schedule offset days"), _("schedule offset days"),
default=0, default=0,

View File

@@ -16,7 +16,6 @@ from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ValidationError from django.core.exceptions import ValidationError
from django.core.validators import DecimalValidator from django.core.validators import DecimalValidator
from django.core.validators import EmailValidator
from django.core.validators import MaxLengthValidator from django.core.validators import MaxLengthValidator
from django.core.validators import RegexValidator from django.core.validators import RegexValidator
from django.core.validators import integer_validator from django.core.validators import integer_validator
@@ -44,7 +43,6 @@ if settings.AUDIT_LOG_ENABLED:
from documents import bulk_edit from documents import bulk_edit
from documents.data_models import DocumentSource from documents.data_models import DocumentSource
from documents.filters import CustomFieldQueryParser
from documents.models import Correspondent from documents.models import Correspondent
from documents.models import CustomField from documents.models import CustomField
from documents.models import CustomFieldInstance from documents.models import CustomFieldInstance
@@ -1908,51 +1906,6 @@ class BulkDownloadSerializer(DocumentListSerializer):
}[compression] }[compression]
class EmailSerializer(DocumentListSerializer):
addresses = serializers.CharField(
required=True,
label="Email addresses",
help_text="Comma-separated email addresses",
)
subject = serializers.CharField(
required=True,
label="Email subject",
)
message = serializers.CharField(
required=True,
label="Email message",
)
use_archive_version = serializers.BooleanField(
default=True,
label="Use archive version",
help_text="Use archive version of documents if available",
)
def validate_addresses(self, addresses):
address_list = [addr.strip() for addr in addresses.split(",")]
if not address_list:
raise serializers.ValidationError("At least one email address is required")
email_validator = EmailValidator()
try:
for address in address_list:
email_validator(address)
except ValidationError:
raise serializers.ValidationError(f"Invalid email address: {address}")
return ",".join(address_list)
def validate_documents(self, documents):
super().validate_documents(documents)
if not documents:
raise serializers.ValidationError("At least one document is required")
return documents
class StoragePathSerializer(MatchingModelSerializer, OwnedObjectSerializer): class StoragePathSerializer(MatchingModelSerializer, OwnedObjectSerializer):
class Meta: class Meta:
model = StoragePath model = StoragePath
@@ -2241,12 +2194,6 @@ class WorkflowTriggerSerializer(serializers.ModelSerializer):
"match", "match",
"is_insensitive", "is_insensitive",
"filter_has_tags", "filter_has_tags",
"filter_has_all_tags",
"filter_has_not_tags",
"filter_custom_field_query",
"filter_has_not_correspondents",
"filter_has_not_document_types",
"filter_has_not_storage_paths",
"filter_has_correspondent", "filter_has_correspondent",
"filter_has_document_type", "filter_has_document_type",
"filter_has_storage_path", "filter_has_storage_path",
@@ -2272,20 +2219,6 @@ class WorkflowTriggerSerializer(serializers.ModelSerializer):
): ):
attrs["filter_path"] = None attrs["filter_path"] = None
if (
"filter_custom_field_query" in attrs
and attrs["filter_custom_field_query"] is not None
and len(attrs["filter_custom_field_query"]) == 0
):
attrs["filter_custom_field_query"] = None
if (
"filter_custom_field_query" in attrs
and attrs["filter_custom_field_query"] is not None
):
parser = CustomFieldQueryParser("filter_custom_field_query")
parser.parse(attrs["filter_custom_field_query"])
trigger_type = attrs.get("type", getattr(self.instance, "type", None)) trigger_type = attrs.get("type", getattr(self.instance, "type", None))
if ( if (
trigger_type == WorkflowTrigger.WorkflowTriggerType.CONSUMPTION trigger_type == WorkflowTrigger.WorkflowTriggerType.CONSUMPTION
@@ -2481,20 +2414,6 @@ class WorkflowSerializer(serializers.ModelSerializer):
if triggers is not None and triggers is not serializers.empty: if triggers is not None and triggers is not serializers.empty:
for trigger in triggers: for trigger in triggers:
filter_has_tags = trigger.pop("filter_has_tags", None) filter_has_tags = trigger.pop("filter_has_tags", None)
filter_has_all_tags = trigger.pop("filter_has_all_tags", None)
filter_has_not_tags = trigger.pop("filter_has_not_tags", None)
filter_has_not_correspondents = trigger.pop(
"filter_has_not_correspondents",
None,
)
filter_has_not_document_types = trigger.pop(
"filter_has_not_document_types",
None,
)
filter_has_not_storage_paths = trigger.pop(
"filter_has_not_storage_paths",
None,
)
# Convert sources to strings to handle django-multiselectfield v1.0 changes # Convert sources to strings to handle django-multiselectfield v1.0 changes
WorkflowTriggerSerializer.normalize_workflow_trigger_sources(trigger) WorkflowTriggerSerializer.normalize_workflow_trigger_sources(trigger)
trigger_instance, _ = WorkflowTrigger.objects.update_or_create( trigger_instance, _ = WorkflowTrigger.objects.update_or_create(
@@ -2503,22 +2422,6 @@ class WorkflowSerializer(serializers.ModelSerializer):
) )
if filter_has_tags is not None: if filter_has_tags is not None:
trigger_instance.filter_has_tags.set(filter_has_tags) trigger_instance.filter_has_tags.set(filter_has_tags)
if filter_has_all_tags is not None:
trigger_instance.filter_has_all_tags.set(filter_has_all_tags)
if filter_has_not_tags is not None:
trigger_instance.filter_has_not_tags.set(filter_has_not_tags)
if filter_has_not_correspondents is not None:
trigger_instance.filter_has_not_correspondents.set(
filter_has_not_correspondents,
)
if filter_has_not_document_types is not None:
trigger_instance.filter_has_not_document_types.set(
filter_has_not_document_types,
)
if filter_has_not_storage_paths is not None:
trigger_instance.filter_has_not_storage_paths.set(
filter_has_not_storage_paths,
)
set_triggers.append(trigger_instance) set_triggers.append(trigger_instance)
if actions is not None and actions is not serializers.empty: if actions is not None and actions is not serializers.empty:

View File

@@ -1162,15 +1162,12 @@ def run_workflows(
else "" else ""
) )
try: try:
attachments = []
if action.email.include_document and original_file:
attachments = [document]
n_messages = send_email( n_messages = send_email(
subject=subject, subject=subject,
body=body, body=body,
to=action.email.to.split(","), to=action.email.to.split(","),
attachments=attachments, attachment=original_file if action.email.include_document else None,
use_archive=False, attachment_mime_type=document.mime_type,
) )
logger.debug( logger.debug(
f"Sent {n_messages} notification email(s) to {action.email.to}", f"Sent {n_messages} notification email(s) to {action.email.to}",

View File

@@ -3022,8 +3022,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
) )
self.assertEqual(len(mail.outbox), 1) self.assertEqual(len(mail.outbox), 1)
expected_filename = f"{doc.created} test.pdf" self.assertEqual(mail.outbox[0].attachments[0][0], "archive.pdf")
self.assertEqual(mail.outbox[0].attachments[0][0], expected_filename)
self.client.post( self.client.post(
f"/api/documents/{doc2.pk}/email/", f"/api/documents/{doc2.pk}/email/",
@@ -3036,8 +3035,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
) )
self.assertEqual(len(mail.outbox), 2) self.assertEqual(len(mail.outbox), 2)
expected_filename2 = f"{doc2.created} test2.pdf" self.assertEqual(mail.outbox[1].attachments[0][0], "test2.pdf")
self.assertEqual(mail.outbox[1].attachments[0][0], expected_filename2)
@mock.patch("django.core.mail.message.EmailMessage.send", side_effect=Exception) @mock.patch("django.core.mail.message.EmailMessage.send", side_effect=Exception)
def test_email_document_errors(self, mocked_send): def test_email_document_errors(self, mocked_send):
@@ -3095,7 +3093,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
"message": "hello", "message": "hello",
}, },
) )
self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND)
resp = self.client.post( resp = self.client.post(
f"/api/documents/{doc.pk}/email/", f"/api/documents/{doc.pk}/email/",

View File

@@ -1,411 +0,0 @@
import json
import shutil
from unittest import mock
from django.contrib.auth.models import Permission
from django.contrib.auth.models import User
from django.core import mail
from django.test import override_settings
from rest_framework import status
from rest_framework.test import APITestCase
from documents.models import Document
from documents.tests.utils import DirectoriesMixin
from documents.tests.utils import SampleDirMixin
class TestEmail(DirectoriesMixin, SampleDirMixin, APITestCase):
ENDPOINT = "/api/documents/email/"
def setUp(self):
super().setUp()
self.user = User.objects.create_superuser(username="temp_admin")
self.client.force_authenticate(user=self.user)
self.doc1 = Document.objects.create(
title="test1",
mime_type="application/pdf",
content="this is document 1",
checksum="1",
filename="test1.pdf",
archive_checksum="A1",
archive_filename="archive1.pdf",
)
self.doc2 = Document.objects.create(
title="test2",
mime_type="application/pdf",
content="this is document 2",
checksum="2",
filename="test2.pdf",
)
# Copy sample files to document paths (using different files to distinguish versions)
shutil.copy(
self.SAMPLE_DIR / "documents" / "originals" / "0000001.pdf",
self.doc1.archive_path,
)
shutil.copy(
self.SAMPLE_DIR / "documents" / "originals" / "0000002.pdf",
self.doc1.source_path,
)
shutil.copy(
self.SAMPLE_DIR / "documents" / "originals" / "0000003.pdf",
self.doc2.source_path,
)
@override_settings(
EMAIL_ENABLED=True,
EMAIL_BACKEND="django.core.mail.backends.locmem.EmailBackend",
)
def test_email_success(self):
"""
GIVEN:
- Multiple existing documents (doc1 with archive, doc2 without)
WHEN:
- API request is made to bulk email documents
THEN:
- Email is sent with all documents attached
- Archive version used by default for doc1
- Original version used for doc2 (no archive available)
"""
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"documents": [self.doc1.pk, self.doc2.pk],
"addresses": "hello@paperless-ngx.com,test@example.com",
"subject": "Bulk email test",
"message": "Here are your documents",
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["message"], "Email sent")
self.assertEqual(len(mail.outbox), 1)
email = mail.outbox[0]
self.assertEqual(email.to, ["hello@paperless-ngx.com", "test@example.com"])
self.assertEqual(email.subject, "Bulk email test")
self.assertEqual(email.body, "Here are your documents")
self.assertEqual(len(email.attachments), 2)
attachment_names = [att[0] for att in email.attachments]
self.assertEqual(len(attachment_names), 2)
self.assertIn(f"{self.doc1!s}.pdf", attachment_names)
self.assertIn(f"{self.doc2!s}.pdf", attachment_names)
doc1_attachment = next(
att for att in email.attachments if att[0] == f"{self.doc1!s}.pdf"
)
archive_size = self.doc1.archive_path.stat().st_size
self.assertEqual(len(doc1_attachment[1]), archive_size)
doc2_attachment = next(
att for att in email.attachments if att[0] == f"{self.doc2!s}.pdf"
)
original_size = self.doc2.source_path.stat().st_size
self.assertEqual(len(doc2_attachment[1]), original_size)
@override_settings(
EMAIL_ENABLED=True,
EMAIL_BACKEND="django.core.mail.backends.locmem.EmailBackend",
)
def test_email_use_original_version(self):
"""
GIVEN:
- Documents with archive versions
WHEN:
- API request is made to bulk email with use_archive_version=False
THEN:
- Original files are attached instead of archive versions
"""
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"documents": [self.doc1.pk],
"addresses": "test@example.com",
"subject": "Test",
"message": "Test message",
"use_archive_version": False,
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(mail.outbox), 1)
attachment = mail.outbox[0].attachments[0]
self.assertEqual(attachment[0], f"{self.doc1!s}.pdf")
original_size = self.doc1.source_path.stat().st_size
self.assertEqual(len(attachment[1]), original_size)
def test_email_missing_required_fields(self):
"""
GIVEN:
- Request with missing required fields
WHEN:
- API request is made to bulk email endpoint
THEN:
- Bad request response is returned
"""
# Missing addresses
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"documents": [self.doc1.pk],
"subject": "Test",
"message": "Test message",
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
# Missing subject
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"documents": [self.doc1.pk],
"addresses": "test@example.com",
"message": "Test message",
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
# Missing message
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"documents": [self.doc1.pk],
"addresses": "test@example.com",
"subject": "Test",
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
# Missing documents
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"addresses": "test@example.com",
"subject": "Test",
"message": "Test message",
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_email_empty_document_list(self):
"""
GIVEN:
- Request with empty document list
WHEN:
- API request is made to bulk email endpoint
THEN:
- Bad request response is returned
"""
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"documents": [],
"addresses": "test@example.com",
"subject": "Test",
"message": "Test message",
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_email_invalid_document_id(self):
"""
GIVEN:
- Request with non-existent document ID
WHEN:
- API request is made to bulk email endpoint
THEN:
- Bad request response is returned
"""
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"documents": [999],
"addresses": "test@example.com",
"subject": "Test",
"message": "Test message",
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_email_invalid_email_address(self):
"""
GIVEN:
- Request with invalid email address
WHEN:
- API request is made to bulk email endpoint
THEN:
- Bad request response is returned
"""
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"documents": [self.doc1.pk],
"addresses": "invalid-email",
"subject": "Test",
"message": "Test message",
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
# Test multiple addresses with one invalid
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"documents": [self.doc1.pk],
"addresses": "valid@example.com,invalid-email",
"subject": "Test",
"message": "Test message",
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_email_insufficient_permissions(self):
"""
GIVEN:
- User without permissions to view document
WHEN:
- API request is made to bulk email documents
THEN:
- Forbidden response is returned
"""
user1 = User.objects.create_user(username="test1")
user1.user_permissions.add(*Permission.objects.filter(codename="view_document"))
doc_owned = Document.objects.create(
title="owned_doc",
mime_type="application/pdf",
checksum="owned",
owner=self.user,
)
self.client.force_authenticate(user1)
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"documents": [self.doc1.pk, doc_owned.pk],
"addresses": "test@example.com",
"subject": "Test",
"message": "Test message",
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
@override_settings(
EMAIL_ENABLED=True,
EMAIL_BACKEND="django.core.mail.backends.locmem.EmailBackend",
)
def test_email_duplicate_filenames(self):
"""
GIVEN:
- Multiple documents with the same title
WHEN:
- API request is made to bulk email documents
THEN:
- Filenames are made unique with counters
"""
doc3 = Document.objects.create(
title="test1",
mime_type="application/pdf",
content="this is document 3",
checksum="3",
filename="test3.pdf",
)
shutil.copy(self.SAMPLE_DIR / "simple.pdf", doc3.source_path)
doc4 = Document.objects.create(
title="test1",
mime_type="application/pdf",
content="this is document 4",
checksum="4",
filename="test4.pdf",
)
shutil.copy(self.SAMPLE_DIR / "simple.pdf", doc4.source_path)
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"documents": [self.doc1.pk, doc3.pk, doc4.pk],
"addresses": "test@example.com",
"subject": "Test",
"message": "Test message",
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(mail.outbox), 1)
attachment_names = [att[0] for att in mail.outbox[0].attachments]
self.assertEqual(len(attachment_names), 3)
self.assertIn(f"{self.doc1!s}.pdf", attachment_names)
self.assertIn(f"{doc3!s}_01.pdf", attachment_names)
self.assertIn(f"{doc3!s}_02.pdf", attachment_names)
@mock.patch(
"django.core.mail.message.EmailMessage.send",
side_effect=Exception("Email error"),
)
def test_email_send_error(self, mocked_send):
"""
GIVEN:
- Existing documents
WHEN:
- API request is made to bulk email and error occurs during email send
THEN:
- Server error response is returned
"""
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"documents": [self.doc1.pk],
"addresses": "test@example.com",
"subject": "Test",
"message": "Test message",
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
self.assertIn("Error emailing documents", response.content.decode())

View File

@@ -184,17 +184,6 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
"filter_filename": "*", "filter_filename": "*",
"filter_path": "*/samples/*", "filter_path": "*/samples/*",
"filter_has_tags": [self.t1.id], "filter_has_tags": [self.t1.id],
"filter_has_all_tags": [self.t2.id],
"filter_has_not_tags": [self.t3.id],
"filter_has_not_correspondents": [self.c2.id],
"filter_has_not_document_types": [self.dt2.id],
"filter_has_not_storage_paths": [self.sp2.id],
"filter_custom_field_query": json.dumps(
[
"AND",
[[self.cf1.id, "exact", "value"]],
],
),
"filter_has_document_type": self.dt.id, "filter_has_document_type": self.dt.id,
"filter_has_correspondent": self.c.id, "filter_has_correspondent": self.c.id,
"filter_has_storage_path": self.sp.id, "filter_has_storage_path": self.sp.id,
@@ -234,36 +223,6 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
) )
self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(Workflow.objects.count(), 2) self.assertEqual(Workflow.objects.count(), 2)
workflow = Workflow.objects.get(name="Workflow 2")
trigger = workflow.triggers.first()
self.assertSetEqual(
set(trigger.filter_has_tags.values_list("id", flat=True)),
{self.t1.id},
)
self.assertSetEqual(
set(trigger.filter_has_all_tags.values_list("id", flat=True)),
{self.t2.id},
)
self.assertSetEqual(
set(trigger.filter_has_not_tags.values_list("id", flat=True)),
{self.t3.id},
)
self.assertSetEqual(
set(trigger.filter_has_not_correspondents.values_list("id", flat=True)),
{self.c2.id},
)
self.assertSetEqual(
set(trigger.filter_has_not_document_types.values_list("id", flat=True)),
{self.dt2.id},
)
self.assertSetEqual(
set(trigger.filter_has_not_storage_paths.values_list("id", flat=True)),
{self.sp2.id},
)
self.assertEqual(
trigger.filter_custom_field_query,
json.dumps(["AND", [[self.cf1.id, "exact", "value"]]]),
)
def test_api_create_invalid_workflow_trigger(self): def test_api_create_invalid_workflow_trigger(self):
""" """
@@ -417,14 +376,6 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
{ {
"type": WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED, "type": WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
"filter_has_tags": [self.t1.id], "filter_has_tags": [self.t1.id],
"filter_has_all_tags": [self.t2.id],
"filter_has_not_tags": [self.t3.id],
"filter_has_not_correspondents": [self.c2.id],
"filter_has_not_document_types": [self.dt2.id],
"filter_has_not_storage_paths": [self.sp2.id],
"filter_custom_field_query": json.dumps(
["AND", [[self.cf1.id, "exact", "value"]]],
),
"filter_has_correspondent": self.c.id, "filter_has_correspondent": self.c.id,
"filter_has_document_type": self.dt.id, "filter_has_document_type": self.dt.id,
}, },
@@ -442,30 +393,6 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
workflow = Workflow.objects.get(id=response.data["id"]) workflow = Workflow.objects.get(id=response.data["id"])
self.assertEqual(workflow.name, "Workflow Updated") self.assertEqual(workflow.name, "Workflow Updated")
self.assertEqual(workflow.triggers.first().filter_has_tags.first(), self.t1) self.assertEqual(workflow.triggers.first().filter_has_tags.first(), self.t1)
self.assertEqual(
workflow.triggers.first().filter_has_all_tags.first(),
self.t2,
)
self.assertEqual(
workflow.triggers.first().filter_has_not_tags.first(),
self.t3,
)
self.assertEqual(
workflow.triggers.first().filter_has_not_correspondents.first(),
self.c2,
)
self.assertEqual(
workflow.triggers.first().filter_has_not_document_types.first(),
self.dt2,
)
self.assertEqual(
workflow.triggers.first().filter_has_not_storage_paths.first(),
self.sp2,
)
self.assertEqual(
workflow.triggers.first().filter_custom_field_query,
json.dumps(["AND", [[self.cf1.id, "exact", "value"]]]),
)
self.assertEqual(workflow.actions.first().assign_title, "Action New Title") self.assertEqual(workflow.actions.first().assign_title, "Action New Title")
def test_api_update_workflow_no_trigger_actions(self): def test_api_update_workflow_no_trigger_actions(self):

View File

@@ -1,5 +1,4 @@
import datetime import datetime
import json
import shutil import shutil
import socket import socket
from datetime import timedelta from datetime import timedelta
@@ -32,7 +31,6 @@ from documents import tasks
from documents.data_models import ConsumableDocument from documents.data_models import ConsumableDocument
from documents.data_models import DocumentSource from documents.data_models import DocumentSource
from documents.matching import document_matches_workflow from documents.matching import document_matches_workflow
from documents.matching import existing_document_matches_workflow
from documents.matching import prefilter_documents_by_workflowtrigger from documents.matching import prefilter_documents_by_workflowtrigger
from documents.models import Correspondent from documents.models import Correspondent
from documents.models import CustomField from documents.models import CustomField
@@ -48,7 +46,6 @@ from documents.models import WorkflowActionEmail
from documents.models import WorkflowActionWebhook from documents.models import WorkflowActionWebhook
from documents.models import WorkflowRun from documents.models import WorkflowRun
from documents.models import WorkflowTrigger from documents.models import WorkflowTrigger
from documents.serialisers import WorkflowTriggerSerializer
from documents.signals import document_consumption_finished from documents.signals import document_consumption_finished
from documents.tests.utils import DirectoriesMixin from documents.tests.utils import DirectoriesMixin
from documents.tests.utils import DummyProgressManager from documents.tests.utils import DummyProgressManager
@@ -1083,409 +1080,9 @@ class TestWorkflows(
) )
expected_str = f"Document did not match {w}" expected_str = f"Document did not match {w}"
self.assertIn(expected_str, cm.output[0]) self.assertIn(expected_str, cm.output[0])
expected_str = f"Document tags {list(doc.tags.all())} do not include {list(trigger.filter_has_tags.all())}" expected_str = f"Document tags {doc.tags.all()} do not include {trigger.filter_has_tags.all()}"
self.assertIn(expected_str, cm.output[1]) self.assertIn(expected_str, cm.output[1])
def test_document_added_no_match_all_tags(self):
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
)
trigger.filter_has_all_tags.set([self.t1, self.t2])
action = WorkflowAction.objects.create(
assign_title="Doc assign owner",
assign_owner=self.user2,
)
w = Workflow.objects.create(
name="Workflow 1",
order=0,
)
w.triggers.add(trigger)
w.actions.add(action)
w.save()
doc = Document.objects.create(
title="sample test",
correspondent=self.c,
original_filename="sample.pdf",
)
doc.tags.set([self.t1])
doc.save()
with self.assertLogs("paperless.matching", level="DEBUG") as cm:
document_consumption_finished.send(
sender=self.__class__,
document=doc,
)
expected_str = f"Document did not match {w}"
self.assertIn(expected_str, cm.output[0])
expected_str = (
f"Document tags {list(doc.tags.all())} do not contain all of"
f" {list(trigger.filter_has_all_tags.all())}"
)
self.assertIn(expected_str, cm.output[1])
def test_document_added_excluded_tags(self):
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
)
trigger.filter_has_not_tags.set([self.t3])
action = WorkflowAction.objects.create(
assign_title="Doc assign owner",
assign_owner=self.user2,
)
w = Workflow.objects.create(
name="Workflow 1",
order=0,
)
w.triggers.add(trigger)
w.actions.add(action)
w.save()
doc = Document.objects.create(
title="sample test",
correspondent=self.c,
original_filename="sample.pdf",
)
doc.tags.set([self.t3])
doc.save()
with self.assertLogs("paperless.matching", level="DEBUG") as cm:
document_consumption_finished.send(
sender=self.__class__,
document=doc,
)
expected_str = f"Document did not match {w}"
self.assertIn(expected_str, cm.output[0])
expected_str = (
f"Document tags {list(doc.tags.all())} include excluded tags"
f" {list(trigger.filter_has_not_tags.all())}"
)
self.assertIn(expected_str, cm.output[1])
def test_document_added_excluded_correspondent(self):
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
)
trigger.filter_has_not_correspondents.set([self.c])
action = WorkflowAction.objects.create(
assign_title="Doc assign owner",
assign_owner=self.user2,
)
w = Workflow.objects.create(
name="Workflow 1",
order=0,
)
w.triggers.add(trigger)
w.actions.add(action)
w.save()
doc = Document.objects.create(
title="sample test",
correspondent=self.c,
original_filename="sample.pdf",
)
with self.assertLogs("paperless.matching", level="DEBUG") as cm:
document_consumption_finished.send(
sender=self.__class__,
document=doc,
)
expected_str = f"Document did not match {w}"
self.assertIn(expected_str, cm.output[0])
expected_str = (
f"Document correspondent {doc.correspondent} is excluded by"
f" {list(trigger.filter_has_not_correspondents.all())}"
)
self.assertIn(expected_str, cm.output[1])
def test_document_added_excluded_document_types(self):
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
)
trigger.filter_has_not_document_types.set([self.dt])
action = WorkflowAction.objects.create(
assign_title="Doc assign owner",
assign_owner=self.user2,
)
w = Workflow.objects.create(
name="Workflow 1",
order=0,
)
w.triggers.add(trigger)
w.actions.add(action)
w.save()
doc = Document.objects.create(
title="sample test",
document_type=self.dt,
original_filename="sample.pdf",
)
with self.assertLogs("paperless.matching", level="DEBUG") as cm:
document_consumption_finished.send(
sender=self.__class__,
document=doc,
)
expected_str = f"Document did not match {w}"
self.assertIn(expected_str, cm.output[0])
expected_str = (
f"Document doc type {doc.document_type} is excluded by"
f" {list(trigger.filter_has_not_document_types.all())}"
)
self.assertIn(expected_str, cm.output[1])
def test_document_added_excluded_storage_paths(self):
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
)
trigger.filter_has_not_storage_paths.set([self.sp])
action = WorkflowAction.objects.create(
assign_title="Doc assign owner",
assign_owner=self.user2,
)
w = Workflow.objects.create(
name="Workflow 1",
order=0,
)
w.triggers.add(trigger)
w.actions.add(action)
w.save()
doc = Document.objects.create(
title="sample test",
storage_path=self.sp,
original_filename="sample.pdf",
)
with self.assertLogs("paperless.matching", level="DEBUG") as cm:
document_consumption_finished.send(
sender=self.__class__,
document=doc,
)
expected_str = f"Document did not match {w}"
self.assertIn(expected_str, cm.output[0])
expected_str = (
f"Document storage path {doc.storage_path} is excluded by"
f" {list(trigger.filter_has_not_storage_paths.all())}"
)
self.assertIn(expected_str, cm.output[1])
def test_document_added_custom_field_query_no_match(self):
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
filter_custom_field_query=json.dumps(
[
"AND",
[[self.cf1.id, "exact", "expected"]],
],
),
)
action = WorkflowAction.objects.create(
assign_title="Doc assign owner",
assign_owner=self.user2,
)
workflow = Workflow.objects.create(name="Workflow 1", order=0)
workflow.triggers.add(trigger)
workflow.actions.add(action)
workflow.save()
doc = Document.objects.create(
title="sample test",
correspondent=self.c,
original_filename="sample.pdf",
)
CustomFieldInstance.objects.create(
document=doc,
field=self.cf1,
value_text="other",
)
with self.assertLogs("paperless.matching", level="DEBUG") as cm:
document_consumption_finished.send(
sender=self.__class__,
document=doc,
)
expected_str = f"Document did not match {workflow}"
self.assertIn(expected_str, cm.output[0])
self.assertIn(
"Document custom fields do not match the configured custom field query",
cm.output[1],
)
def test_document_added_custom_field_query_match(self):
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
filter_custom_field_query=json.dumps(
[
"AND",
[[self.cf1.id, "exact", "expected"]],
],
),
)
doc = Document.objects.create(
title="sample test",
correspondent=self.c,
original_filename="sample.pdf",
)
CustomFieldInstance.objects.create(
document=doc,
field=self.cf1,
value_text="expected",
)
matched, reason = existing_document_matches_workflow(doc, trigger)
self.assertTrue(matched)
self.assertIsNone(reason)
def test_prefilter_documents_custom_field_query(self):
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
filter_custom_field_query=json.dumps(
[
"AND",
[[self.cf1.id, "exact", "match"]],
],
),
)
doc1 = Document.objects.create(
title="doc 1",
correspondent=self.c,
original_filename="doc1.pdf",
checksum="checksum1",
)
CustomFieldInstance.objects.create(
document=doc1,
field=self.cf1,
value_text="match",
)
doc2 = Document.objects.create(
title="doc 2",
correspondent=self.c,
original_filename="doc2.pdf",
checksum="checksum2",
)
CustomFieldInstance.objects.create(
document=doc2,
field=self.cf1,
value_text="different",
)
filtered = prefilter_documents_by_workflowtrigger(
Document.objects.all(),
trigger,
)
self.assertIn(doc1, filtered)
self.assertNotIn(doc2, filtered)
def test_consumption_trigger_requires_filter_configuration(self):
serializer = WorkflowTriggerSerializer(
data={
"type": WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
},
)
self.assertFalse(serializer.is_valid())
errors = serializer.errors.get("non_field_errors", [])
self.assertIn(
"File name, path or mail rule filter are required",
[str(error) for error in errors],
)
def test_workflow_trigger_serializer_clears_empty_custom_field_query(self):
serializer = WorkflowTriggerSerializer(
data={
"type": WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
"filter_custom_field_query": "",
},
)
self.assertTrue(serializer.is_valid(), serializer.errors)
self.assertIsNone(serializer.validated_data.get("filter_custom_field_query"))
def test_existing_document_invalid_custom_field_query_configuration(self):
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
filter_custom_field_query="{ not json",
)
document = Document.objects.create(
title="doc invalid query",
original_filename="invalid.pdf",
checksum="checksum-invalid-query",
)
matched, reason = existing_document_matches_workflow(document, trigger)
self.assertFalse(matched)
self.assertEqual(reason, "Invalid custom field query configuration")
def test_prefilter_documents_returns_none_for_invalid_custom_field_query(self):
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
filter_custom_field_query="{ not json",
)
Document.objects.create(
title="doc",
original_filename="doc.pdf",
checksum="checksum-prefilter-invalid",
)
filtered = prefilter_documents_by_workflowtrigger(
Document.objects.all(),
trigger,
)
self.assertEqual(list(filtered), [])
def test_prefilter_documents_applies_all_filters(self):
other_document_type = DocumentType.objects.create(name="Other Type")
other_storage_path = StoragePath.objects.create(
name="Blocked path",
path="/blocked/",
)
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
filter_has_correspondent=self.c,
filter_has_document_type=self.dt,
filter_has_storage_path=self.sp,
)
trigger.filter_has_tags.set([self.t1])
trigger.filter_has_all_tags.set([self.t1, self.t2])
trigger.filter_has_not_tags.set([self.t3])
trigger.filter_has_not_correspondents.set([self.c2])
trigger.filter_has_not_document_types.set([other_document_type])
trigger.filter_has_not_storage_paths.set([other_storage_path])
allowed_document = Document.objects.create(
title="allowed",
correspondent=self.c,
document_type=self.dt,
storage_path=self.sp,
original_filename="allow.pdf",
checksum="checksum-prefilter-allowed",
)
allowed_document.tags.set([self.t1, self.t2])
blocked_document = Document.objects.create(
title="blocked",
correspondent=self.c2,
document_type=other_document_type,
storage_path=other_storage_path,
original_filename="block.pdf",
checksum="checksum-prefilter-blocked",
)
blocked_document.tags.set([self.t1, self.t3])
filtered = prefilter_documents_by_workflowtrigger(
Document.objects.all(),
trigger,
)
self.assertIn(allowed_document, filtered)
self.assertNotIn(blocked_document, filtered)
def test_document_added_no_match_doctype(self): def test_document_added_no_match_doctype(self):
trigger = WorkflowTrigger.objects.create( trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED, type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,

View File

@@ -57,7 +57,6 @@ from django_filters.rest_framework import DjangoFilterBackend
from drf_spectacular.types import OpenApiTypes from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter from drf_spectacular.utils import OpenApiParameter
from drf_spectacular.utils import extend_schema from drf_spectacular.utils import extend_schema
from drf_spectacular.utils import extend_schema_serializer
from drf_spectacular.utils import extend_schema_view from drf_spectacular.utils import extend_schema_view
from drf_spectacular.utils import inline_serializer from drf_spectacular.utils import inline_serializer
from guardian.utils import get_group_obj_perms_model from guardian.utils import get_group_obj_perms_model
@@ -154,7 +153,6 @@ from documents.serialisers import CustomFieldSerializer
from documents.serialisers import DocumentListSerializer from documents.serialisers import DocumentListSerializer
from documents.serialisers import DocumentSerializer from documents.serialisers import DocumentSerializer
from documents.serialisers import DocumentTypeSerializer from documents.serialisers import DocumentTypeSerializer
from documents.serialisers import EmailSerializer
from documents.serialisers import NotesSerializer from documents.serialisers import NotesSerializer
from documents.serialisers import PostDocumentSerializer from documents.serialisers import PostDocumentSerializer
from documents.serialisers import RunTaskViewSerializer from documents.serialisers import RunTaskViewSerializer
@@ -473,14 +471,6 @@ class DocumentTypeViewSet(ModelViewSet, PermissionsAwareDocumentCountMixin):
ordering_fields = ("name", "matching_algorithm", "match", "document_count") ordering_fields = ("name", "matching_algorithm", "match", "document_count")
@extend_schema_serializer(
component_name="EmailDocumentRequest",
exclude_fields=("documents",),
)
class EmailDocumentDetailSchema(EmailSerializer):
pass
@extend_schema_view( @extend_schema_view(
retrieve=extend_schema( retrieve=extend_schema(
description="Retrieve a single document", description="Retrieve a single document",
@@ -648,28 +638,20 @@ class EmailDocumentDetailSchema(EmailSerializer):
404: None, 404: None,
}, },
), ),
email_document=extend_schema( email=extend_schema(
description="Email the document to one or more recipients as an attachment.", description="Email the document to one or more recipients as an attachment.",
request=EmailDocumentDetailSchema, request=inline_serializer(
name="EmailRequest",
fields={
"addresses": serializers.CharField(),
"subject": serializers.CharField(),
"message": serializers.CharField(),
"use_archive_version": serializers.BooleanField(default=True),
},
),
responses={ responses={
200: inline_serializer( 200: inline_serializer(
name="EmailDocumentResponse", name="EmailResponse",
fields={"message": serializers.CharField()},
),
400: None,
403: None,
404: None,
500: None,
},
deprecated=True,
),
email_documents=extend_schema(
operation_id="email_documents",
description="Email one or more documents as attachments to one or more recipients.",
request=EmailSerializer,
responses={
200: inline_serializer(
name="EmailDocumentsResponse",
fields={"message": serializers.CharField()}, fields={"message": serializers.CharField()},
), ),
400: None, 400: None,
@@ -1173,57 +1155,55 @@ class DocumentViewSet(
return Response(sorted(entries, key=lambda x: x["timestamp"], reverse=True)) return Response(sorted(entries, key=lambda x: x["timestamp"], reverse=True))
@action(methods=["post"], detail=True, url_path="email") @action(methods=["post"], detail=True)
# TODO: deprecated as of 2.19, remove in future release def email(self, request, pk=None):
def email_document(self, request, pk=None): try:
request_data = request.data.copy() doc = Document.objects.select_related("owner").get(pk=pk)
request_data.setlist("documents", [pk])
return self.email_documents(request, data=request_data)
@action(
methods=["post"],
detail=False,
url_path="email",
serializer_class=EmailSerializer,
)
def email_documents(self, request, data=None):
serializer = EmailSerializer(data=data or request.data)
serializer.is_valid(raise_exception=True)
validated_data = serializer.validated_data
document_ids = validated_data.get("documents")
addresses = validated_data.get("addresses").split(",")
addresses = [addr.strip() for addr in addresses]
subject = validated_data.get("subject")
message = validated_data.get("message")
use_archive_version = validated_data.get("use_archive_version", True)
documents = Document.objects.select_related("owner").filter(pk__in=document_ids)
for document in documents:
if request.user is not None and not has_perms_owner_aware( if request.user is not None and not has_perms_owner_aware(
request.user, request.user,
"view_document", "view_document",
document, doc,
): ):
return HttpResponseForbidden("Insufficient permissions") return HttpResponseForbidden("Insufficient permissions")
except Document.DoesNotExist:
raise Http404
try: try:
send_email( if (
subject=subject, "addresses" not in request.data
body=message, or "subject" not in request.data
to=addresses, or "message" not in request.data
attachments=documents, ):
use_archive=use_archive_version, return HttpResponseBadRequest("Missing required fields")
)
use_archive_version = request.data.get("use_archive_version", True)
addresses = request.data.get("addresses").split(",")
if not all(
re.match(r"[^@]+@[^@]+\.[^@]+", address.strip())
for address in addresses
):
return HttpResponseBadRequest("Invalid email address found")
send_email(
subject=request.data.get("subject"),
body=request.data.get("message"),
to=addresses,
attachment=(
doc.archive_path
if use_archive_version and doc.has_archive_version
else doc.source_path
),
attachment_mime_type=doc.mime_type,
)
logger.debug( logger.debug(
f"Sent documents {[doc.id for doc in documents]} via email to {addresses}", f"Sent document {doc.id} via email to {addresses}",
) )
return Response({"message": "Email sent"}) return Response({"message": "Email sent"})
except Exception as e: except Exception as e:
logger.warning(f"An error occurred emailing documents: {e!s}") logger.warning(f"An error occurred emailing document: {e!s}")
return HttpResponseServerError( return HttpResponseServerError(
"Error emailing documents, check logs for more detail.", "Error emailing document, check logs for more detail.",
) )

View File

@@ -2,7 +2,7 @@ msgid ""
msgstr "" msgstr ""
"Project-Id-Version: paperless-ngx\n" "Project-Id-Version: paperless-ngx\n"
"Report-Msgid-Bugs-To: \n" "Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2025-10-13 22:25+0000\n" "POT-Creation-Date: 2025-09-30 16:50+0000\n"
"PO-Revision-Date: 2022-02-17 04:17\n" "PO-Revision-Date: 2022-02-17 04:17\n"
"Last-Translator: \n" "Last-Translator: \n"
"Language-Team: English\n" "Language-Team: English\n"
@@ -89,7 +89,7 @@ msgstr ""
msgid "Automatic" msgid "Automatic"
msgstr "" msgstr ""
#: documents/models.py:64 documents/models.py:456 documents/models.py:1526 #: documents/models.py:64 documents/models.py:456 documents/models.py:1484
#: paperless_mail/models.py:23 paperless_mail/models.py:143 #: paperless_mail/models.py:23 paperless_mail/models.py:143
msgid "name" msgid "name"
msgstr "" msgstr ""
@@ -264,7 +264,7 @@ msgid "The position of this document in your physical document archive."
msgstr "" msgstr ""
#: documents/models.py:318 documents/models.py:699 documents/models.py:753 #: documents/models.py:318 documents/models.py:699 documents/models.py:753
#: documents/models.py:1569 #: documents/models.py:1527
msgid "document" msgid "document"
msgstr "" msgstr ""
@@ -864,399 +864,371 @@ msgstr ""
msgid "has these tag(s)" msgid "has these tag(s)"
msgstr "" msgstr ""
#: documents/models.py:1072 #: documents/models.py:1073
msgid "has all of these tag(s)"
msgstr ""
#: documents/models.py:1079
msgid "does not have these tag(s)"
msgstr ""
#: documents/models.py:1087
msgid "has this document type" msgid "has this document type"
msgstr "" msgstr ""
#: documents/models.py:1094 #: documents/models.py:1081
msgid "does not have these document type(s)"
msgstr ""
#: documents/models.py:1102
msgid "has this correspondent" msgid "has this correspondent"
msgstr "" msgstr ""
#: documents/models.py:1109 #: documents/models.py:1089
msgid "does not have these correspondent(s)"
msgstr ""
#: documents/models.py:1117
msgid "has this storage path" msgid "has this storage path"
msgstr "" msgstr ""
#: documents/models.py:1124 #: documents/models.py:1093
msgid "does not have these storage path(s)"
msgstr ""
#: documents/models.py:1128
msgid "filter custom field query"
msgstr ""
#: documents/models.py:1131
msgid "JSON-encoded custom field query expression."
msgstr ""
#: documents/models.py:1135
msgid "schedule offset days" msgid "schedule offset days"
msgstr "" msgstr ""
#: documents/models.py:1138 #: documents/models.py:1096
msgid "The number of days to offset the schedule trigger by." msgid "The number of days to offset the schedule trigger by."
msgstr "" msgstr ""
#: documents/models.py:1143 #: documents/models.py:1101
msgid "schedule is recurring" msgid "schedule is recurring"
msgstr "" msgstr ""
#: documents/models.py:1146 #: documents/models.py:1104
msgid "If the schedule should be recurring." msgid "If the schedule should be recurring."
msgstr "" msgstr ""
#: documents/models.py:1151 #: documents/models.py:1109
msgid "schedule recurring delay in days" msgid "schedule recurring delay in days"
msgstr "" msgstr ""
#: documents/models.py:1155 #: documents/models.py:1113
msgid "The number of days between recurring schedule triggers." msgid "The number of days between recurring schedule triggers."
msgstr "" msgstr ""
#: documents/models.py:1160 #: documents/models.py:1118
msgid "schedule date field" msgid "schedule date field"
msgstr "" msgstr ""
#: documents/models.py:1165 #: documents/models.py:1123
msgid "The field to check for a schedule trigger." msgid "The field to check for a schedule trigger."
msgstr "" msgstr ""
#: documents/models.py:1174 #: documents/models.py:1132
msgid "schedule date custom field" msgid "schedule date custom field"
msgstr "" msgstr ""
#: documents/models.py:1178 #: documents/models.py:1136
msgid "workflow trigger" msgid "workflow trigger"
msgstr "" msgstr ""
#: documents/models.py:1179 #: documents/models.py:1137
msgid "workflow triggers" msgid "workflow triggers"
msgstr "" msgstr ""
#: documents/models.py:1187 #: documents/models.py:1145
msgid "email subject" msgid "email subject"
msgstr "" msgstr ""
#: documents/models.py:1191 #: documents/models.py:1149
msgid "" msgid ""
"The subject of the email, can include some placeholders, see documentation." "The subject of the email, can include some placeholders, see documentation."
msgstr "" msgstr ""
#: documents/models.py:1197 #: documents/models.py:1155
msgid "email body" msgid "email body"
msgstr "" msgstr ""
#: documents/models.py:1200 #: documents/models.py:1158
msgid "" msgid ""
"The body (message) of the email, can include some placeholders, see " "The body (message) of the email, can include some placeholders, see "
"documentation." "documentation."
msgstr "" msgstr ""
#: documents/models.py:1206 #: documents/models.py:1164
msgid "emails to" msgid "emails to"
msgstr "" msgstr ""
#: documents/models.py:1209 #: documents/models.py:1167
msgid "The destination email addresses, comma separated." msgid "The destination email addresses, comma separated."
msgstr "" msgstr ""
#: documents/models.py:1215 #: documents/models.py:1173
msgid "include document in email" msgid "include document in email"
msgstr "" msgstr ""
#: documents/models.py:1226 #: documents/models.py:1184
msgid "webhook url" msgid "webhook url"
msgstr "" msgstr ""
#: documents/models.py:1229 #: documents/models.py:1187
msgid "The destination URL for the notification." msgid "The destination URL for the notification."
msgstr "" msgstr ""
#: documents/models.py:1234 #: documents/models.py:1192
msgid "use parameters" msgid "use parameters"
msgstr "" msgstr ""
#: documents/models.py:1239 #: documents/models.py:1197
msgid "send as JSON" msgid "send as JSON"
msgstr "" msgstr ""
#: documents/models.py:1243 #: documents/models.py:1201
msgid "webhook parameters" msgid "webhook parameters"
msgstr "" msgstr ""
#: documents/models.py:1246 #: documents/models.py:1204
msgid "The parameters to send with the webhook URL if body not used." msgid "The parameters to send with the webhook URL if body not used."
msgstr "" msgstr ""
#: documents/models.py:1250 #: documents/models.py:1208
msgid "webhook body" msgid "webhook body"
msgstr "" msgstr ""
#: documents/models.py:1253 #: documents/models.py:1211
msgid "The body to send with the webhook URL if parameters not used." msgid "The body to send with the webhook URL if parameters not used."
msgstr "" msgstr ""
#: documents/models.py:1257 #: documents/models.py:1215
msgid "webhook headers" msgid "webhook headers"
msgstr "" msgstr ""
#: documents/models.py:1260 #: documents/models.py:1218
msgid "The headers to send with the webhook URL." msgid "The headers to send with the webhook URL."
msgstr "" msgstr ""
#: documents/models.py:1265 #: documents/models.py:1223
msgid "include document in webhook" msgid "include document in webhook"
msgstr "" msgstr ""
#: documents/models.py:1276 #: documents/models.py:1234
msgid "Assignment" msgid "Assignment"
msgstr "" msgstr ""
#: documents/models.py:1280 #: documents/models.py:1238
msgid "Removal" msgid "Removal"
msgstr "" msgstr ""
#: documents/models.py:1284 documents/templates/account/password_reset.html:15 #: documents/models.py:1242 documents/templates/account/password_reset.html:15
msgid "Email" msgid "Email"
msgstr "" msgstr ""
#: documents/models.py:1288 #: documents/models.py:1246
msgid "Webhook" msgid "Webhook"
msgstr "" msgstr ""
#: documents/models.py:1292 #: documents/models.py:1250
msgid "Workflow Action Type" msgid "Workflow Action Type"
msgstr "" msgstr ""
#: documents/models.py:1298 #: documents/models.py:1256
msgid "assign title" msgid "assign title"
msgstr "" msgstr ""
#: documents/models.py:1302 #: documents/models.py:1260
msgid "Assign a document title, must be a Jinja2 template, see documentation." msgid "Assign a document title, must be a Jinja2 template, see documentation."
msgstr "" msgstr ""
#: documents/models.py:1310 paperless_mail/models.py:274 #: documents/models.py:1268 paperless_mail/models.py:274
msgid "assign this tag" msgid "assign this tag"
msgstr "" msgstr ""
#: documents/models.py:1319 paperless_mail/models.py:282 #: documents/models.py:1277 paperless_mail/models.py:282
msgid "assign this document type" msgid "assign this document type"
msgstr "" msgstr ""
#: documents/models.py:1328 paperless_mail/models.py:296 #: documents/models.py:1286 paperless_mail/models.py:296
msgid "assign this correspondent" msgid "assign this correspondent"
msgstr "" msgstr ""
#: documents/models.py:1337 #: documents/models.py:1295
msgid "assign this storage path" msgid "assign this storage path"
msgstr "" msgstr ""
#: documents/models.py:1346 #: documents/models.py:1304
msgid "assign this owner" msgid "assign this owner"
msgstr "" msgstr ""
#: documents/models.py:1353 #: documents/models.py:1311
msgid "grant view permissions to these users" msgid "grant view permissions to these users"
msgstr "" msgstr ""
#: documents/models.py:1360 #: documents/models.py:1318
msgid "grant view permissions to these groups" msgid "grant view permissions to these groups"
msgstr "" msgstr ""
#: documents/models.py:1367 #: documents/models.py:1325
msgid "grant change permissions to these users" msgid "grant change permissions to these users"
msgstr "" msgstr ""
#: documents/models.py:1374 #: documents/models.py:1332
msgid "grant change permissions to these groups" msgid "grant change permissions to these groups"
msgstr "" msgstr ""
#: documents/models.py:1381 #: documents/models.py:1339
msgid "assign these custom fields" msgid "assign these custom fields"
msgstr "" msgstr ""
#: documents/models.py:1385 #: documents/models.py:1343
msgid "custom field values" msgid "custom field values"
msgstr "" msgstr ""
#: documents/models.py:1389 #: documents/models.py:1347
msgid "Optional values to assign to the custom fields." msgid "Optional values to assign to the custom fields."
msgstr "" msgstr ""
#: documents/models.py:1398 #: documents/models.py:1356
msgid "remove these tag(s)" msgid "remove these tag(s)"
msgstr "" msgstr ""
#: documents/models.py:1403 #: documents/models.py:1361
msgid "remove all tags" msgid "remove all tags"
msgstr "" msgstr ""
#: documents/models.py:1410 #: documents/models.py:1368
msgid "remove these document type(s)" msgid "remove these document type(s)"
msgstr "" msgstr ""
#: documents/models.py:1415 #: documents/models.py:1373
msgid "remove all document types" msgid "remove all document types"
msgstr "" msgstr ""
#: documents/models.py:1422 #: documents/models.py:1380
msgid "remove these correspondent(s)" msgid "remove these correspondent(s)"
msgstr "" msgstr ""
#: documents/models.py:1427 #: documents/models.py:1385
msgid "remove all correspondents" msgid "remove all correspondents"
msgstr "" msgstr ""
#: documents/models.py:1434 #: documents/models.py:1392
msgid "remove these storage path(s)" msgid "remove these storage path(s)"
msgstr "" msgstr ""
#: documents/models.py:1439 #: documents/models.py:1397
msgid "remove all storage paths" msgid "remove all storage paths"
msgstr "" msgstr ""
#: documents/models.py:1446 #: documents/models.py:1404
msgid "remove these owner(s)" msgid "remove these owner(s)"
msgstr "" msgstr ""
#: documents/models.py:1451 #: documents/models.py:1409
msgid "remove all owners" msgid "remove all owners"
msgstr "" msgstr ""
#: documents/models.py:1458 #: documents/models.py:1416
msgid "remove view permissions for these users" msgid "remove view permissions for these users"
msgstr "" msgstr ""
#: documents/models.py:1465 #: documents/models.py:1423
msgid "remove view permissions for these groups" msgid "remove view permissions for these groups"
msgstr "" msgstr ""
#: documents/models.py:1472 #: documents/models.py:1430
msgid "remove change permissions for these users" msgid "remove change permissions for these users"
msgstr "" msgstr ""
#: documents/models.py:1479 #: documents/models.py:1437
msgid "remove change permissions for these groups" msgid "remove change permissions for these groups"
msgstr "" msgstr ""
#: documents/models.py:1484 #: documents/models.py:1442
msgid "remove all permissions" msgid "remove all permissions"
msgstr "" msgstr ""
#: documents/models.py:1491 #: documents/models.py:1449
msgid "remove these custom fields" msgid "remove these custom fields"
msgstr "" msgstr ""
#: documents/models.py:1496 #: documents/models.py:1454
msgid "remove all custom fields" msgid "remove all custom fields"
msgstr "" msgstr ""
#: documents/models.py:1505 #: documents/models.py:1463
msgid "email" msgid "email"
msgstr "" msgstr ""
#: documents/models.py:1514 #: documents/models.py:1472
msgid "webhook" msgid "webhook"
msgstr "" msgstr ""
#: documents/models.py:1518 #: documents/models.py:1476
msgid "workflow action" msgid "workflow action"
msgstr "" msgstr ""
#: documents/models.py:1519 #: documents/models.py:1477
msgid "workflow actions" msgid "workflow actions"
msgstr "" msgstr ""
#: documents/models.py:1528 paperless_mail/models.py:145 #: documents/models.py:1486 paperless_mail/models.py:145
msgid "order" msgid "order"
msgstr "" msgstr ""
#: documents/models.py:1534 #: documents/models.py:1492
msgid "triggers" msgid "triggers"
msgstr "" msgstr ""
#: documents/models.py:1541 #: documents/models.py:1499
msgid "actions" msgid "actions"
msgstr "" msgstr ""
#: documents/models.py:1544 paperless_mail/models.py:154 #: documents/models.py:1502 paperless_mail/models.py:154
msgid "enabled" msgid "enabled"
msgstr "" msgstr ""
#: documents/models.py:1555 #: documents/models.py:1513
msgid "workflow" msgid "workflow"
msgstr "" msgstr ""
#: documents/models.py:1559 #: documents/models.py:1517
msgid "workflow trigger type" msgid "workflow trigger type"
msgstr "" msgstr ""
#: documents/models.py:1573 #: documents/models.py:1531
msgid "date run" msgid "date run"
msgstr "" msgstr ""
#: documents/models.py:1579 #: documents/models.py:1537
msgid "workflow run" msgid "workflow run"
msgstr "" msgstr ""
#: documents/models.py:1580 #: documents/models.py:1538
msgid "workflow runs" msgid "workflow runs"
msgstr "" msgstr ""
#: documents/serialisers.py:143 #: documents/serialisers.py:141
#, python-format #, python-format
msgid "Invalid regular expression: %(error)s" msgid "Invalid regular expression: %(error)s"
msgstr "" msgstr ""
#: documents/serialisers.py:609 #: documents/serialisers.py:607
msgid "Invalid color." msgid "Invalid color."
msgstr "" msgstr ""
#: documents/serialisers.py:638 #: documents/serialisers.py:636
msgid "Invalid parent tag." msgid "Invalid parent tag."
msgstr "" msgstr ""
#: documents/serialisers.py:1795 #: documents/serialisers.py:1793
#, python-format #, python-format
msgid "File type %(type)s not supported" msgid "File type %(type)s not supported"
msgstr "" msgstr ""
#: documents/serialisers.py:1839 #: documents/serialisers.py:1837
#, python-format #, python-format
msgid "Custom field id must be an integer: %(id)s" msgid "Custom field id must be an integer: %(id)s"
msgstr "" msgstr ""
#: documents/serialisers.py:1846 #: documents/serialisers.py:1844
#, python-format #, python-format
msgid "Custom field with id %(id)s does not exist" msgid "Custom field with id %(id)s does not exist"
msgstr "" msgstr ""
#: documents/serialisers.py:1863 documents/serialisers.py:1873 #: documents/serialisers.py:1861 documents/serialisers.py:1871
msgid "" msgid ""
"Custom fields must be a list of integers or an object mapping ids to values." "Custom fields must be a list of integers or an object mapping ids to values."
msgstr "" msgstr ""
#: documents/serialisers.py:1868 #: documents/serialisers.py:1866
msgid "Some custom fields don't exist or were specified twice." msgid "Some custom fields don't exist or were specified twice."
msgstr "" msgstr ""
#: documents/serialisers.py:1983 #: documents/serialisers.py:1936
msgid "Invalid variable detected." msgid "Invalid variable detected."
msgstr "" msgstr ""

123
uv.lock generated
View File

@@ -1036,11 +1036,11 @@ wheels = [
[[package]] [[package]]
name = "filelock" name = "filelock"
version = "3.20.0" version = "3.19.1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/58/46/0028a82567109b5ef6e4d2a1f04a583fb513e6cf9527fcdd09afd817deeb/filelock-3.20.0.tar.gz", hash = "sha256:711e943b4ec6be42e1d4e6690b48dc175c822967466bb31c0c293f34334c13f4", size = 18922, upload-time = "2025-10-08T18:03:50.056Z" } sdist = { url = "https://files.pythonhosted.org/packages/40/bb/0ab3e58d22305b6f5440629d20683af28959bf793d98d11950e305c1c326/filelock-3.19.1.tar.gz", hash = "sha256:66eda1888b0171c998b35be2bcc0f6d75c388a7ce20c3f3f37aa8e96c2dddf58", size = 17687, upload-time = "2025-08-14T16:56:03.016Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/76/91/7216b27286936c16f5b4d0c530087e4a54eead683e6b0b73dd0c64844af6/filelock-3.20.0-py3-none-any.whl", hash = "sha256:339b4732ffda5cd79b13f4e2711a31b0365ce445d95d243bb996273d072546a2", size = 16054, upload-time = "2025-10-08T18:03:48.35Z" }, { url = "https://files.pythonhosted.org/packages/42/14/42b2651a2f46b022ccd948bca9f2d5af0fd8929c4eec235b8d6d844fbe67/filelock-3.19.1-py3-none-any.whl", hash = "sha256:d38e30481def20772f5baf097c122c3babc4fcdb7e14e57049eb9d88c6dc017d", size = 15988, upload-time = "2025-08-14T16:56:01.633Z" },
] ]
[[package]] [[package]]
@@ -1073,15 +1073,15 @@ wheels = [
[[package]] [[package]]
name = "gotenberg-client" name = "gotenberg-client"
version = "0.12.0" version = "0.11.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "httpx", extra = ["http2"], marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "httpx", extra = ["http2"], marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "typing-extensions", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux')" }, { name = "typing-extensions", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux')" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/61/6d/07ea213c146bbe91dffebff2d8f4dc61e7076d3dd34d4fd1467f9163e752/gotenberg_client-0.12.0.tar.gz", hash = "sha256:1ab50878024469fc003c414ee9810ceeb00d4d7d7c36bd2fb75318fbff139e9b", size = 1210884, upload-time = "2025-10-15T15:32:37.669Z" } sdist = { url = "https://files.pythonhosted.org/packages/c4/e8/65928856a46023eda0af83d65987a99aa5190557f64c3c30478b91229070/gotenberg_client-0.11.0.tar.gz", hash = "sha256:44479d996fb4103fc324d84395cc4a762863a033833ac1fc63490e96109f50d7", size = 1210349, upload-time = "2025-08-28T15:34:50.814Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/12/39/fcb24ff053b1be7e5124f56c3d358706a23a328f685c6db33bc9dbc5472d/gotenberg_client-0.12.0-py3-none-any.whl", hash = "sha256:a540b35ac518e902c2860a88fbe448c15fe5a56fe8ec8604e6a2c8c2228fd0cb", size = 51051, upload-time = "2025-10-15T15:32:36.32Z" }, { url = "https://files.pythonhosted.org/packages/93/cd/39cb6a32fa17632daddf0a1d26f0b7930cec0b5bb7ed585f792af98ab5b5/gotenberg_client-0.11.0-py3-none-any.whl", hash = "sha256:641891f26912c3201d4faae8d40fd47584229d63059b3c8b1c0efd476b2eff88", size = 50849, upload-time = "2025-08-28T15:34:49.184Z" },
] ]
[[package]] [[package]]
@@ -1795,11 +1795,12 @@ wheels = [
[[package]] [[package]]
name = "mkdocs-material" name = "mkdocs-material"
version = "9.6.22" version = "9.6.20"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "babel", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "babel", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "backrefs", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "backrefs", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "click", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "colorama", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "colorama", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "jinja2", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "jinja2", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "markdown", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "markdown", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
@@ -1810,9 +1811,9 @@ dependencies = [
{ name = "pymdown-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "pymdown-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/5f/5d/317e37b6c43325cb376a1d6439df9cc743b8ee41c84603c2faf7286afc82/mkdocs_material-9.6.22.tar.gz", hash = "sha256:87c158b0642e1ada6da0cbd798a3389b0bc5516b90e5ece4a0fb939f00bacd1c", size = 4044968, upload-time = "2025-10-15T09:21:15.409Z" } sdist = { url = "https://files.pythonhosted.org/packages/ba/ee/6ed7fc739bd7591485c8bec67d5984508d3f2733e708f32714c21593341a/mkdocs_material-9.6.20.tar.gz", hash = "sha256:e1f84d21ec5fb730673c4259b2e0d39f8d32a3fef613e3a8e7094b012d43e790", size = 4037822, upload-time = "2025-09-15T08:48:01.816Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/cc/82/6fdb9a7a04fb222f4849ffec1006f891a0280825a20314d11f3ccdee14eb/mkdocs_material-9.6.22-py3-none-any.whl", hash = "sha256:14ac5f72d38898b2f98ac75a5531aaca9366eaa427b0f49fc2ecf04d99b7ad84", size = 9206252, upload-time = "2025-10-15T09:21:12.175Z" }, { url = "https://files.pythonhosted.org/packages/67/d8/a31dd52e657bf12b20574706d07df8d767e1ab4340f9bfb9ce73950e5e59/mkdocs_material-9.6.20-py3-none-any.whl", hash = "sha256:b8d8c8b0444c7c06dd984b55ba456ce731f0035c5a1533cc86793618eb1e6c82", size = 9193367, upload-time = "2025-09-15T08:47:58.722Z" },
] ]
[[package]] [[package]]
@@ -1921,7 +1922,7 @@ sdist = { url = "https://files.pythonhosted.org/packages/61/68/810093cb579daae42
[[package]] [[package]]
name = "nltk" name = "nltk"
version = "3.9.2" version = "3.9.1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "click", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "click", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
@@ -1929,9 +1930,9 @@ dependencies = [
{ name = "regex", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "regex", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "tqdm", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "tqdm", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/f9/76/3a5e4312c19a028770f86fd7c058cf9f4ec4321c6cf7526bab998a5b683c/nltk-3.9.2.tar.gz", hash = "sha256:0f409e9b069ca4177c1903c3e843eef90c7e92992fa4931ae607da6de49e1419", size = 2887629, upload-time = "2025-10-01T07:19:23.764Z" } sdist = { url = "https://files.pythonhosted.org/packages/3c/87/db8be88ad32c2d042420b6fd9ffd4a149f9a0d7f0e86b3f543be2eeeedd2/nltk-3.9.1.tar.gz", hash = "sha256:87d127bd3de4bd89a4f81265e5fa59cb1b199b27440175370f7417d2bc7ae868", size = 2904691, upload-time = "2024-08-18T19:48:37.769Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/60/90/81ac364ef94209c100e12579629dc92bf7a709a84af32f8c551b02c07e94/nltk-3.9.2-py3-none-any.whl", hash = "sha256:1e209d2b3009110635ed9709a67a1a3e33a10f799490fa71cf4bec218c11c88a", size = 1513404, upload-time = "2025-10-01T07:19:21.648Z" }, { url = "https://files.pythonhosted.org/packages/4d/66/7d9e26593edda06e8cb531874633f7c2372279c3b0f46235539fe546df8b/nltk-3.9.1-py3-none-any.whl", hash = "sha256:4fa26829c5b00715afe3061398a8989dc643b92ce7dd93fb4585a70930d168a1", size = 1505442, upload-time = "2024-08-18T19:48:21.909Z" },
] ]
[[package]] [[package]]
@@ -2279,9 +2280,9 @@ requires-dist = [
{ name = "drf-spectacular", specifier = "~=0.28" }, { name = "drf-spectacular", specifier = "~=0.28" },
{ name = "drf-spectacular-sidecar", specifier = "~=2025.9.1" }, { name = "drf-spectacular-sidecar", specifier = "~=2025.9.1" },
{ name = "drf-writable-nested", specifier = "~=0.7.1" }, { name = "drf-writable-nested", specifier = "~=0.7.1" },
{ name = "filelock", specifier = "~=3.20.0" }, { name = "filelock", specifier = "~=3.19.1" },
{ name = "flower", specifier = "~=2.0.1" }, { name = "flower", specifier = "~=2.0.1" },
{ name = "gotenberg-client", specifier = "~=0.12.0" }, { name = "gotenberg-client", specifier = "~=0.11.0" },
{ name = "granian", extras = ["uvloop"], marker = "extra == 'webserver'", specifier = "~=2.5.1" }, { name = "granian", extras = ["uvloop"], marker = "extra == 'webserver'", specifier = "~=2.5.1" },
{ name = "httpx-oauth", specifier = "~=0.16" }, { name = "httpx-oauth", specifier = "~=0.16" },
{ name = "imap-tools", specifier = "~=1.11.0" }, { name = "imap-tools", specifier = "~=1.11.0" },
@@ -2327,7 +2328,7 @@ dev = [
{ name = "mkdocs-glightbox", specifier = "~=0.5.1" }, { name = "mkdocs-glightbox", specifier = "~=0.5.1" },
{ name = "mkdocs-material", specifier = "~=9.6.4" }, { name = "mkdocs-material", specifier = "~=9.6.4" },
{ name = "pre-commit", specifier = "~=4.3.0" }, { name = "pre-commit", specifier = "~=4.3.0" },
{ name = "pre-commit-uv", specifier = "~=4.2.0" }, { name = "pre-commit-uv", specifier = "~=4.1.3" },
{ name = "pytest", specifier = "~=8.4.1" }, { name = "pytest", specifier = "~=8.4.1" },
{ name = "pytest-cov", specifier = "~=7.0.0" }, { name = "pytest-cov", specifier = "~=7.0.0" },
{ name = "pytest-django", specifier = "~=4.11.1" }, { name = "pytest-django", specifier = "~=4.11.1" },
@@ -2337,7 +2338,7 @@ dev = [
{ name = "pytest-rerunfailures" }, { name = "pytest-rerunfailures" },
{ name = "pytest-sugar" }, { name = "pytest-sugar" },
{ name = "pytest-xdist" }, { name = "pytest-xdist" },
{ name = "ruff", specifier = "~=0.14.0" }, { name = "ruff", specifier = "~=0.13.0" },
] ]
docs = [ docs = [
{ name = "mkdocs-glightbox", specifier = "~=0.5.1" }, { name = "mkdocs-glightbox", specifier = "~=0.5.1" },
@@ -2345,8 +2346,8 @@ docs = [
] ]
lint = [ lint = [
{ name = "pre-commit", specifier = "~=4.3.0" }, { name = "pre-commit", specifier = "~=4.3.0" },
{ name = "pre-commit-uv", specifier = "~=4.2.0" }, { name = "pre-commit-uv", specifier = "~=4.1.3" },
{ name = "ruff", specifier = "~=0.14.0" }, { name = "ruff", specifier = "~=0.13.0" },
] ]
testing = [ testing = [
{ name = "daphne" }, { name = "daphne" },
@@ -2641,15 +2642,15 @@ wheels = [
[[package]] [[package]]
name = "pre-commit-uv" name = "pre-commit-uv"
version = "4.2.0" version = "4.1.5"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "pre-commit", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "pre-commit", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "uv", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "uv", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/f6/42/84372bc99a841bfdd8b182a50186471a7f5e873d8e8bcec0d0cb6dabcbb0/pre_commit_uv-4.2.0.tar.gz", hash = "sha256:c32bb1d90235507726eee2aeef2be5fdab431a6f1906e3f1addb0a4e99b369d1", size = 6912, upload-time = "2025-10-09T19:30:48.354Z" } sdist = { url = "https://files.pythonhosted.org/packages/3d/0c/e6ab71e93d8e78ffa36a1f8b6ce12014679e2b83b401404c12bb2840078f/pre_commit_uv-4.1.5.tar.gz", hash = "sha256:3f40714152b4f4aa484703b8dbfeb9baa0aaedb17207e0012b3561da756d577d", size = 6920, upload-time = "2025-08-27T14:44:40.178Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/87/9f/ec8491f6b3022489a4d36ce372214c10a34f90b425aa61ff2e0a8dc5b9d5/pre_commit_uv-4.2.0-py3-none-any.whl", hash = "sha256:cc1b56641e6c62d90a4d8b4f0af6f2610f1c397ce81af024e768c0f33715cb81", size = 5650, upload-time = "2025-10-09T19:30:47.257Z" }, { url = "https://files.pythonhosted.org/packages/f7/c6/747bc58da9f0665c607890c73b349b3934381e312272f584808182655898/pre_commit_uv-4.1.5-py3-none-any.whl", hash = "sha256:f4805e45615b898c4ca6ea37bdb60a05bb7830f986c303a06a378d6b50c3aa9e", size = 5653, upload-time = "2025-08-27T14:44:39.187Z" },
] ]
[[package]] [[package]]
@@ -2865,15 +2866,15 @@ wheels = [
[[package]] [[package]]
name = "pytest-env" name = "pytest-env"
version = "1.2.0" version = "1.1.5"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "pytest", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "pytest", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "tomli", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux')" }, { name = "tomli", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux')" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/13/12/9c87d0ca45d5992473208bcef2828169fa7d39b8d7fc6e3401f5c08b8bf7/pytest_env-1.2.0.tar.gz", hash = "sha256:475e2ebe8626cee01f491f304a74b12137742397d6c784ea4bc258f069232b80", size = 8973, upload-time = "2025-10-09T19:15:47.42Z" } sdist = { url = "https://files.pythonhosted.org/packages/1f/31/27f28431a16b83cab7a636dce59cf397517807d247caa38ee67d65e71ef8/pytest_env-1.1.5.tar.gz", hash = "sha256:91209840aa0e43385073ac464a554ad2947cc2fd663a9debf88d03b01e0cc1cf", size = 8911, upload-time = "2024-09-17T22:39:18.566Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/27/98/822b924a4a3eb58aacba84444c7439fce32680592f394de26af9c76e2569/pytest_env-1.2.0-py3-none-any.whl", hash = "sha256:d7e5b7198f9b83c795377c09feefa45d56083834e60d04767efd64819fc9da00", size = 6251, upload-time = "2025-10-09T19:15:46.077Z" }, { url = "https://files.pythonhosted.org/packages/de/b8/87cfb16045c9d4092cfcf526135d73b88101aac83bc1adcf82dfb5fd3833/pytest_env-1.1.5-py3-none-any.whl", hash = "sha256:ce90cf8772878515c24b31cd97c7fa1f4481cd68d588419fd45f10ecaee6bc30", size = 6141, upload-time = "2024-09-17T22:39:16.942Z" },
] ]
[[package]] [[package]]
@@ -2903,15 +2904,15 @@ wheels = [
[[package]] [[package]]
name = "pytest-rerunfailures" name = "pytest-rerunfailures"
version = "16.1" version = "16.0.1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "packaging", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "packaging", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "pytest", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "pytest", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/de/04/71e9520551fc8fe2cf5c1a1842e4e600265b0815f2016b7c27ec85688682/pytest_rerunfailures-16.1.tar.gz", hash = "sha256:c38b266db8a808953ebd71ac25c381cb1981a78ff9340a14bcb9f1b9bff1899e", size = 30889, upload-time = "2025-10-10T07:06:01.238Z" } sdist = { url = "https://files.pythonhosted.org/packages/26/53/a543a76f922a5337d10df22441af8bf68f1b421cadf9aedf8a77943b81f6/pytest_rerunfailures-16.0.1.tar.gz", hash = "sha256:ed4b3a6e7badb0a720ddd93f9de1e124ba99a0cb13bc88561b3c168c16062559", size = 27612, upload-time = "2025-09-02T06:48:25.193Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/77/54/60eabb34445e3db3d3d874dc1dfa72751bfec3265bd611cb13c8b290adea/pytest_rerunfailures-16.1-py3-none-any.whl", hash = "sha256:5d11b12c0ca9a1665b5054052fcc1084f8deadd9328962745ef6b04e26382e86", size = 14093, upload-time = "2025-10-10T07:06:00.019Z" }, { url = "https://files.pythonhosted.org/packages/38/73/67dc14cda1942914e70fbb117fceaf11e259362c517bdadd76b0dd752524/pytest_rerunfailures-16.0.1-py3-none-any.whl", hash = "sha256:0bccc0e3b0e3388275c25a100f7077081318196569a121217688ed05e58984b9", size = 13610, upload-time = "2025-09-02T06:48:23.615Z" },
] ]
[[package]] [[package]]
@@ -3523,25 +3524,25 @@ wheels = [
[[package]] [[package]]
name = "ruff" name = "ruff"
version = "0.14.0" version = "0.13.2"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/41/b9/9bd84453ed6dd04688de9b3f3a4146a1698e8faae2ceeccce4e14c67ae17/ruff-0.14.0.tar.gz", hash = "sha256:62ec8969b7510f77945df916de15da55311fade8d6050995ff7f680afe582c57", size = 5452071, upload-time = "2025-10-07T18:21:55.763Z" } sdist = { url = "https://files.pythonhosted.org/packages/02/df/8d7d8c515d33adfc540e2edf6c6021ea1c5a58a678d8cfce9fae59aabcab/ruff-0.13.2.tar.gz", hash = "sha256:cb12fffd32fb16d32cef4ed16d8c7cdc27ed7c944eaa98d99d01ab7ab0b710ff", size = 5416417, upload-time = "2025-09-25T14:54:09.936Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/3a/4e/79d463a5f80654e93fa653ebfb98e0becc3f0e7cf6219c9ddedf1e197072/ruff-0.14.0-py3-none-linux_armv6l.whl", hash = "sha256:58e15bffa7054299becf4bab8a1187062c6f8cafbe9f6e39e0d5aface455d6b3", size = 12494532, upload-time = "2025-10-07T18:21:00.373Z" }, { url = "https://files.pythonhosted.org/packages/6e/84/5716a7fa4758e41bf70e603e13637c42cfb9dbf7ceb07180211b9bbf75ef/ruff-0.13.2-py3-none-linux_armv6l.whl", hash = "sha256:3796345842b55f033a78285e4f1641078f902020d8450cade03aad01bffd81c3", size = 12343254, upload-time = "2025-09-25T14:53:27.784Z" },
{ url = "https://files.pythonhosted.org/packages/ee/40/e2392f445ed8e02aa6105d49db4bfff01957379064c30f4811c3bf38aece/ruff-0.14.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:838d1b065f4df676b7c9957992f2304e41ead7a50a568185efd404297d5701e8", size = 13160768, upload-time = "2025-10-07T18:21:04.73Z" }, { url = "https://files.pythonhosted.org/packages/9b/77/c7042582401bb9ac8eff25360e9335e901d7a1c0749a2b28ba4ecb239991/ruff-0.13.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ff7e4dda12e683e9709ac89e2dd436abf31a4d8a8fc3d89656231ed808e231d2", size = 13040891, upload-time = "2025-09-25T14:53:31.38Z" },
{ url = "https://files.pythonhosted.org/packages/75/da/2a656ea7c6b9bd14c7209918268dd40e1e6cea65f4bb9880eaaa43b055cd/ruff-0.14.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:703799d059ba50f745605b04638fa7e9682cc3da084b2092feee63500ff3d9b8", size = 12363376, upload-time = "2025-10-07T18:21:07.833Z" }, { url = "https://files.pythonhosted.org/packages/c6/15/125a7f76eb295cb34d19c6778e3a82ace33730ad4e6f28d3427e134a02e0/ruff-0.13.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c75e9d2a2fafd1fdd895d0e7e24b44355984affdde1c412a6f6d3f6e16b22d46", size = 12243588, upload-time = "2025-09-25T14:53:33.543Z" },
{ url = "https://files.pythonhosted.org/packages/42/e2/1ffef5a1875add82416ff388fcb7ea8b22a53be67a638487937aea81af27/ruff-0.14.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ba9a8925e90f861502f7d974cc60e18ca29c72bb0ee8bfeabb6ade35a3abde7", size = 12608055, upload-time = "2025-10-07T18:21:10.72Z" }, { url = "https://files.pythonhosted.org/packages/9e/eb/0093ae04a70f81f8be7fd7ed6456e926b65d238fc122311293d033fdf91e/ruff-0.13.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cceac74e7bbc53ed7d15d1042ffe7b6577bf294611ad90393bf9b2a0f0ec7cb6", size = 12491359, upload-time = "2025-09-25T14:53:35.892Z" },
{ url = "https://files.pythonhosted.org/packages/4a/32/986725199d7cee510d9f1dfdf95bf1efc5fa9dd714d0d85c1fb1f6be3bc3/ruff-0.14.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e41f785498bd200ffc276eb9e1570c019c1d907b07cfb081092c8ad51975bbe7", size = 12318544, upload-time = "2025-10-07T18:21:13.741Z" }, { url = "https://files.pythonhosted.org/packages/43/fe/72b525948a6956f07dad4a6f122336b6a05f2e3fd27471cea612349fedb9/ruff-0.13.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6ae3f469b5465ba6d9721383ae9d49310c19b452a161b57507764d7ef15f4b07", size = 12162486, upload-time = "2025-09-25T14:53:38.171Z" },
{ url = "https://files.pythonhosted.org/packages/9a/ed/4969cefd53315164c94eaf4da7cfba1f267dc275b0abdd593d11c90829a3/ruff-0.14.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30a58c087aef4584c193aebf2700f0fbcfc1e77b89c7385e3139956fa90434e2", size = 14001280, upload-time = "2025-10-07T18:21:16.411Z" }, { url = "https://files.pythonhosted.org/packages/6a/e3/0fac422bbbfb2ea838023e0d9fcf1f30183d83ab2482800e2cb892d02dfe/ruff-0.13.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f8f9e3cd6714358238cd6626b9d43026ed19c0c018376ac1ef3c3a04ffb42d8", size = 13871203, upload-time = "2025-09-25T14:53:41.943Z" },
{ url = "https://files.pythonhosted.org/packages/ab/ad/96c1fc9f8854c37681c9613d825925c7f24ca1acfc62a4eb3896b50bacd2/ruff-0.14.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f8d07350bc7af0a5ce8812b7d5c1a7293cf02476752f23fdfc500d24b79b783c", size = 15027286, upload-time = "2025-10-07T18:21:19.577Z" }, { url = "https://files.pythonhosted.org/packages/6b/82/b721c8e3ec5df6d83ba0e45dcf00892c4f98b325256c42c38ef136496cbf/ruff-0.13.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c6ed79584a8f6cbe2e5d7dbacf7cc1ee29cbdb5df1172e77fbdadc8bb85a1f89", size = 14929635, upload-time = "2025-09-25T14:53:43.953Z" },
{ url = "https://files.pythonhosted.org/packages/b3/00/1426978f97df4fe331074baf69615f579dc4e7c37bb4c6f57c2aad80c87f/ruff-0.14.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eec3bbbf3a7d5482b5c1f42d5fc972774d71d107d447919fca620b0be3e3b75e", size = 14451506, upload-time = "2025-10-07T18:21:22.779Z" }, { url = "https://files.pythonhosted.org/packages/c4/a0/ad56faf6daa507b83079a1ad7a11694b87d61e6bf01c66bd82b466f21821/ruff-0.13.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aed130b2fde049cea2019f55deb939103123cdd191105f97a0599a3e753d61b0", size = 14338783, upload-time = "2025-09-25T14:53:46.205Z" },
{ url = "https://files.pythonhosted.org/packages/58/d5/9c1cea6e493c0cf0647674cca26b579ea9d2a213b74b5c195fbeb9678e15/ruff-0.14.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16b68e183a0e28e5c176d51004aaa40559e8f90065a10a559176713fcf435206", size = 13437384, upload-time = "2025-10-07T18:21:25.758Z" }, { url = "https://files.pythonhosted.org/packages/47/77/ad1d9156db8f99cd01ee7e29d74b34050e8075a8438e589121fcd25c4b08/ruff-0.13.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1887c230c2c9d65ed1b4e4cfe4d255577ea28b718ae226c348ae68df958191aa", size = 13355322, upload-time = "2025-09-25T14:53:48.164Z" },
{ url = "https://files.pythonhosted.org/packages/29/b4/4cd6a4331e999fc05d9d77729c95503f99eae3ba1160469f2b64866964e3/ruff-0.14.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb732d17db2e945cfcbbc52af0143eda1da36ca8ae25083dd4f66f1542fdf82e", size = 13447976, upload-time = "2025-10-07T18:21:28.83Z" }, { url = "https://files.pythonhosted.org/packages/64/8b/e87cfca2be6f8b9f41f0bb12dc48c6455e2d66df46fe61bb441a226f1089/ruff-0.13.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5bcb10276b69b3cfea3a102ca119ffe5c6ba3901e20e60cf9efb53fa417633c3", size = 13354427, upload-time = "2025-09-25T14:53:50.486Z" },
{ url = "https://files.pythonhosted.org/packages/3b/c0/ac42f546d07e4f49f62332576cb845d45c67cf5610d1851254e341d563b6/ruff-0.14.0-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:c958f66ab884b7873e72df38dcabee03d556a8f2ee1b8538ee1c2bbd619883dd", size = 13682850, upload-time = "2025-10-07T18:21:31.842Z" }, { url = "https://files.pythonhosted.org/packages/7f/df/bf382f3fbead082a575edb860897287f42b1b3c694bafa16bc9904c11ed3/ruff-0.13.2-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:afa721017aa55a555b2ff7944816587f1cb813c2c0a882d158f59b832da1660d", size = 13537637, upload-time = "2025-09-25T14:53:52.887Z" },
{ url = "https://files.pythonhosted.org/packages/5f/c4/4b0c9bcadd45b4c29fe1af9c5d1dc0ca87b4021665dfbe1c4688d407aa20/ruff-0.14.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:7eb0499a2e01f6e0c285afc5bac43ab380cbfc17cd43a2e1dd10ec97d6f2c42d", size = 12449825, upload-time = "2025-10-07T18:21:35.074Z" }, { url = "https://files.pythonhosted.org/packages/51/70/1fb7a7c8a6fc8bd15636288a46e209e81913b87988f26e1913d0851e54f4/ruff-0.13.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1dbc875cf3720c64b3990fef8939334e74cb0ca65b8dbc61d1f439201a38101b", size = 12340025, upload-time = "2025-09-25T14:53:54.88Z" },
{ url = "https://files.pythonhosted.org/packages/4b/a8/e2e76288e6c16540fa820d148d83e55f15e994d852485f221b9524514730/ruff-0.14.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4c63b2d99fafa05efca0ab198fd48fa6030d57e4423df3f18e03aa62518c565f", size = 12272599, upload-time = "2025-10-07T18:21:38.08Z" }, { url = "https://files.pythonhosted.org/packages/4c/27/1e5b3f1c23ca5dd4106d9d580e5c13d9acb70288bff614b3d7b638378cc9/ruff-0.13.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:5b939a1b2a960e9742e9a347e5bbc9b3c3d2c716f86c6ae273d9cbd64f193f22", size = 12133449, upload-time = "2025-09-25T14:53:57.089Z" },
{ url = "https://files.pythonhosted.org/packages/18/14/e2815d8eff847391af632b22422b8207704222ff575dec8d044f9ab779b2/ruff-0.14.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:668fce701b7a222f3f5327f86909db2bbe99c30877c8001ff934c5413812ac02", size = 13193828, upload-time = "2025-10-07T18:21:41.216Z" }, { url = "https://files.pythonhosted.org/packages/2d/09/b92a5ccee289f11ab128df57d5911224197d8d55ef3bd2043534ff72ca54/ruff-0.13.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:50e2d52acb8de3804fc5f6e2fa3ae9bdc6812410a9e46837e673ad1f90a18736", size = 13051369, upload-time = "2025-09-25T14:53:59.124Z" },
{ url = "https://files.pythonhosted.org/packages/44/c6/61ccc2987cf0aecc588ff8f3212dea64840770e60d78f5606cd7dc34de32/ruff-0.14.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a86bf575e05cb68dcb34e4c7dfe1064d44d3f0c04bbc0491949092192b515296", size = 13628617, upload-time = "2025-10-07T18:21:44.04Z" }, { url = "https://files.pythonhosted.org/packages/89/99/26c9d1c7d8150f45e346dc045cc49f23e961efceb4a70c47dea0960dea9a/ruff-0.13.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3196bc13ab2110c176b9a4ae5ff7ab676faaa1964b330a1383ba20e1e19645f2", size = 13523644, upload-time = "2025-09-25T14:54:01.622Z" },
] ]
[[package]] [[package]]
@@ -4201,25 +4202,25 @@ wheels = [
[[package]] [[package]]
name = "uv" name = "uv"
version = "0.9.3" version = "0.8.22"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/0d/dc/4a0e01bcb38c756130c8118a8561d4bf0a0bb685b70ad11e8f40a0cbfa10/uv-0.9.3.tar.gz", hash = "sha256:a290a1a8783bf04ca2d4a63d5d72191b255dfa4cc3426a9c9b5af4da49a7b5af", size = 3699151, upload-time = "2025-10-15T15:20:15.498Z" } sdist = { url = "https://files.pythonhosted.org/packages/a6/39/231e123458d50dd497cf6d27b592f5d3bc3e2e50f496b56859865a7b22e3/uv-0.8.22.tar.gz", hash = "sha256:e6e1289c411d43e0ca245f46e76457f3807de646d90b656591b6cf46348bed5c", size = 3667007, upload-time = "2025-09-23T20:35:14.736Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/c3/ad/194e550062e4b3b9a74cb06401dc0afd83490af8e2ec0f414737868d0262/uv-0.9.3-py3-none-linux_armv6l.whl", hash = "sha256:7b1b79dd435ade1de97c6f0b8b90811a6ccf1bd0bdd70f4d034a93696cf0d0a3", size = 20584531, upload-time = "2025-10-15T15:19:14.26Z" }, { url = "https://files.pythonhosted.org/packages/7c/e6/bb440171dd8a36d0f9874b4c71778f7bbc83e62ccf42c62bd1583c802793/uv-0.8.22-py3-none-linux_armv6l.whl", hash = "sha256:7350c5f82d9c38944e6466933edcf96a90e0cb85eae5c0e53a5bc716d6f62332", size = 20554993, upload-time = "2025-09-23T20:34:26.549Z" },
{ url = "https://files.pythonhosted.org/packages/d0/1a/8e68d0020c29f6f329a265773c23b0c01e002794ea884b8bdbd594c7ea97/uv-0.9.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:596a982c5a061d58412824a2ebe2960b52db23f1b1658083ba9c0e7ae390308a", size = 19577639, upload-time = "2025-10-15T15:19:18.668Z" }, { url = "https://files.pythonhosted.org/packages/28/e9/813f7eb9fb9694c4024362782c8933e37887b5195e189f80dc40f2da5958/uv-0.8.22-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:89944e99b04cc8542cb5931306f1c593f00c9d6f2b652fffc4d84d12b915f911", size = 19565276, upload-time = "2025-09-23T20:34:30.436Z" },
{ url = "https://files.pythonhosted.org/packages/16/25/6df8be6cd549200e80d19374579689fda39b18735afde841345284fb113d/uv-0.9.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:741e80c4230e1b9a5d0869aca2fb082b3832b251ef61537bc9278364b8e74df2", size = 18210073, upload-time = "2025-10-15T15:19:22.16Z" }, { url = "https://files.pythonhosted.org/packages/d7/ca/bf37d86af6e16e45fa2b1a03300784ff3297aa9252a23dfbeaf6e391e72e/uv-0.8.22-py3-none-macosx_11_0_arm64.whl", hash = "sha256:6706b782ad75662df794e186d16b9ffa4946d57c88f21d0eadfd43425794d1b0", size = 18162303, upload-time = "2025-09-23T20:34:32.761Z" },
{ url = "https://files.pythonhosted.org/packages/07/19/bb8aa38b4441e03c742e71a31779f91b42d9db255ede66f80cdfdb672618/uv-0.9.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:406ab1a8b313b4b3cf67ad747fb8713a0c0cf3d3daf11942b5a4e49f60882339", size = 20022427, upload-time = "2025-10-15T15:19:25.453Z" }, { url = "https://files.pythonhosted.org/packages/e4/eb/289b6a59fff1613958499a886283f52403c5ce4f0a8a550b86fbd70e8e4f/uv-0.8.22-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:d6a33bd5309f8fb77d9fc249bb17f77a23426e6153e43b03ca1cd6640f0a423d", size = 19982769, upload-time = "2025-09-23T20:34:34.962Z" },
{ url = "https://files.pythonhosted.org/packages/40/15/f190004dd855b443cfc1cc36edb1765e6cd0b6b340a50bb8015531dfff2e/uv-0.9.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:73dbd91581a82e53bb4352243d7bc491cf78ac3ebb951d95bb8b7964e5ee0659", size = 20150307, upload-time = "2025-10-15T15:19:28.99Z" }, { url = "https://files.pythonhosted.org/packages/df/ba/2fcc3ce75be62eecf280f3cbe74d186f371a468fad3167b5a34dee2f904e/uv-0.8.22-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4a982bdd5d239dd6dd2b4219165e209c75af1e1819730454ee46d65b3ccf77a3", size = 20163849, upload-time = "2025-09-23T20:34:37.744Z" },
{ url = "https://files.pythonhosted.org/packages/dd/55/553e90bc2b881f168de9cd57f9e0b0464304a12aee289e71b54c42559e1a/uv-0.9.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:970ac8428678b92eddb990dc132d75e893234bb1b809e87b90a4acd96bb054e4", size = 21152942, upload-time = "2025-10-15T15:19:32.461Z" }, { url = "https://files.pythonhosted.org/packages/f4/4d/4fc9a508c2c497a80c41710c96f1782a29edecffcac742f3843af061ba8f/uv-0.8.22-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58b6fb191a04b922dc3c8fea6660f58545a651843d7d0efa9ae69164fca9e05d", size = 21130147, upload-time = "2025-09-23T20:34:40.414Z" },
{ url = "https://files.pythonhosted.org/packages/30/fb/768647a31622c2c1da7a9394eaab937e2e7ca0e8c983ca3d1918ec623620/uv-0.9.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:32694e64d6e4ea44b647866c4240659f3964b0317e98f539b73915dbcca7d973", size = 22632018, upload-time = "2025-10-15T15:19:36.091Z" }, { url = "https://files.pythonhosted.org/packages/71/79/6bcb3c3c3b7c9cb1a162a76dca2b166752e4ba39ec90e802b252f0a54039/uv-0.8.22-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:8ea724ae9f15c0cb4964e9e2e1b21df65c56ae02a54dc1d8a6ea44a52d819268", size = 22561974, upload-time = "2025-09-23T20:34:42.843Z" },
{ url = "https://files.pythonhosted.org/packages/98/92/66d660414aed123686bf9a2a3ea167967b847b97c08cacd13d6b2b6d1267/uv-0.9.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36df7eb562b103e3263a03df1b04cee91ee52af88d005d07ee494137c7a5782a", size = 22241856, upload-time = "2025-10-15T15:19:39.662Z" }, { url = "https://files.pythonhosted.org/packages/3f/98/89bb29d82ff7e5ab1b5e862d9bdc12b1d3a4d5201cf558432487e29cc448/uv-0.8.22-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7378127cbd6ebce8ba6d9bdb88aa8ea995b579824abb5ec381c63b3a123a43be", size = 22183189, upload-time = "2025-09-23T20:34:45.57Z" },
{ url = "https://files.pythonhosted.org/packages/0d/99/af8b0cd2c958e8cb9c20e6e2d417de9476338a2b155643492a8ee2baf077/uv-0.9.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:117c5921bcfdac04b88211ee830c6c7e412eaf93a34aa3ad4bb3230bc61646aa", size = 21391699, upload-time = "2025-10-15T15:19:42.933Z" }, { url = "https://files.pythonhosted.org/packages/95/b0/354c7d7d11fff2ee97bb208f0fec6b09ae885c0d591b6eff2d7b84cc6695/uv-0.8.22-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e761ca7df8a0059b3fae6bc2c1db24583fa00b016e35bd22a5599d7084471a7", size = 21492888, upload-time = "2025-09-23T20:34:48.45Z" },
{ url = "https://files.pythonhosted.org/packages/82/45/488417c6c0127c00bcdfac3556ae2ea0597df8245fe5f9bcfda35ebdbe85/uv-0.9.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73ae4bbc7d555ba1738da08c64b55f21ab0ea0ff85636708cebaf460d98a440d", size = 21318117, upload-time = "2025-10-15T15:19:46.341Z" }, { url = "https://files.pythonhosted.org/packages/3a/a9/a83cee9b8cf63e57ce64ba27c77777cc66410e144fd178368f55af1fa18d/uv-0.8.22-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8efec4ef5acddc35f0867998c44e0b15fc4dace1e4c26d01443871a2fbb04bf6", size = 21252972, upload-time = "2025-09-23T20:34:50.862Z" },
{ url = "https://files.pythonhosted.org/packages/1d/62/508c20f8dbdd2342cc4821ab6f41e29a9b36e2a469dfb5cbbd042e15218c/uv-0.9.3-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:2e75ce14c9375e7e99422d5383fb415e8f0eab9ebdcdfba45756749dee0c42b2", size = 20132999, upload-time = "2025-10-15T15:19:49.578Z" }, { url = "https://files.pythonhosted.org/packages/0f/0c/71d5d5d3fca7aa788d63297a06ca26d3585270342277b52312bb693b100c/uv-0.8.22-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:9eb3b4abfa25e07d7e1bb4c9bb8dbbdd51878356a37c3c4a2ece3d68d4286f28", size = 20115520, upload-time = "2025-09-23T20:34:53.165Z" },
{ url = "https://files.pythonhosted.org/packages/2d/fc/ea673d1c68915ea53f1ab7e134b330a2351c543f06e9d0009b4f27cc3057/uv-0.9.3-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:71faefa9805ccf3f2db645ae27c9e719e47aaa8781e43dfa3760d993aadecb8c", size = 21223810, upload-time = "2025-10-15T15:19:52.711Z" }, { url = "https://files.pythonhosted.org/packages/da/90/57fae2798be1e71692872b8304e2e2c345eacbe2070bdcbba6d5a7675fa1/uv-0.8.22-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:b1fdffc2e71892ce648b66317e478fe8884d0007e20cfa582fff3dcea588a450", size = 21168787, upload-time = "2025-09-23T20:34:55.638Z" },
{ url = "https://files.pythonhosted.org/packages/97/1f/af8ced7f6c8f6af887c52369088058ecae92ff21819e385531023f9ec923/uv-0.9.3-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:8844103e0b4074821fb2814abf30af59d66f33b6ca1bb2276dd37d4e5997c292", size = 20156823, upload-time = "2025-10-15T15:19:56.552Z" }, { url = "https://files.pythonhosted.org/packages/fe/f6/23c8d8fdd1084603795f6344eee8e763ba06f891e863397fe5b7b532cb58/uv-0.8.22-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:f6ded9bacb31441d788afca397b8b884ebc2e70f903bea0a38806194be4b249c", size = 20170112, upload-time = "2025-09-23T20:34:58.008Z" },
{ url = "https://files.pythonhosted.org/packages/05/2d/e1d8f74ec9d95daf57f3c53083c98a2145ee895a4f8502c61c9013c9bf5a/uv-0.9.3-py3-none-musllinux_1_1_i686.whl", hash = "sha256:214bb2fb4d87a55e2ba2bc038a8b646a24ec66980528d2ed1e6e7d0612d246e1", size = 20564971, upload-time = "2025-10-15T15:20:00.012Z" }, { url = "https://files.pythonhosted.org/packages/96/23/801d517964a7200014897522ae067bf7111fc2e138b38d13d9df9544bf06/uv-0.8.22-py3-none-musllinux_1_1_i686.whl", hash = "sha256:aefa0cb27a86d2145ca9290a1e99c16a17ea26a4f14a89fb7336bc19388427cc", size = 20537608, upload-time = "2025-09-23T20:35:00.44Z" },
{ url = "https://files.pythonhosted.org/packages/bc/04/4aaf90e031f0735795407a208c9528f85b0b27b63409abe4ee3bee0d4527/uv-0.9.3-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:ccf4cd2e1907fb011764f6f4bc0e514c500e8d300288f04a4680400d5aa205ec", size = 21506573, upload-time = "2025-10-15T15:20:03.304Z" }, { url = "https://files.pythonhosted.org/packages/20/8a/1bd4159089f8df0128e4ceb7f4c31c23a451984a5b49c13489c70e721335/uv-0.8.22-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:9757f0b0c7d296f1e354db442ed0ce39721c06d11635ce4ee6638c5e809a9cb4", size = 21471224, upload-time = "2025-09-23T20:35:03.718Z" },
] ]
[[package]] [[package]]