mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2026-01-14 21:54:22 -06:00
Compare commits
63 Commits
main
...
feature-si
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7f4faa4b71 | ||
|
|
6de8ba0ca1 | ||
|
|
97bd98d442 | ||
|
|
f4ea33f39c | ||
|
|
486c899396 | ||
|
|
cf2d75120c | ||
|
|
f03452829a | ||
|
|
f2de60643a | ||
|
|
66b23a9e68 | ||
|
|
76b4975fbe | ||
|
|
36910f5696 | ||
|
|
e7f6a755ba | ||
|
|
e0d399153c | ||
|
|
545ac1f96e | ||
|
|
e095f3cd14 | ||
|
|
7b979f1c20 | ||
|
|
6ae450df34 | ||
|
|
906b82deff | ||
|
|
e7f48f220f | ||
|
|
694cf1669b | ||
|
|
064646ad72 | ||
|
|
762ffccd18 | ||
|
|
ef1ba88c79 | ||
|
|
f71dfe2730 | ||
|
|
eeb5639990 | ||
|
|
6cf8abc5d3 | ||
|
|
9c0de249a6 | ||
|
|
fb82146c10 | ||
|
|
e940764fe0 | ||
|
|
4347ba1f9c | ||
|
|
7b666e7569 | ||
|
|
07eb3c4761 | ||
|
|
d210f3091d | ||
|
|
402ed6b9e7 | ||
|
|
b748362509 | ||
|
|
505a2f0dc3 | ||
|
|
3261297910 | ||
|
|
b76d0dd616 | ||
|
|
ba4d88c801 | ||
|
|
58d88440f1 | ||
|
|
cb5f09c04e | ||
|
|
5b1e66be91 | ||
|
|
f3e3ba49d1 | ||
|
|
4c2f5f3473 | ||
|
|
39d46bc2df | ||
|
|
cf59853f34 | ||
|
|
9cce212910 | ||
|
|
ba42f0eb4f | ||
|
|
a0744f179f | ||
|
|
e7260838d6 | ||
|
|
b145878d50 | ||
|
|
72fd05501b | ||
|
|
a3c19b1e2d | ||
|
|
2e6458dbcc | ||
|
|
8471507115 | ||
|
|
99724a25a2 | ||
|
|
504c824cfe | ||
|
|
01c7a345cb | ||
|
|
890c2d6757 | ||
|
|
00cf026524 | ||
|
|
7604a0b583 | ||
|
|
4e789acf2d | ||
|
|
d9459d04ea |
61
.codecov.yml
61
.codecov.yml
@@ -1,6 +1,7 @@
|
||||
# https://docs.codecov.com/docs/codecovyml-reference#codecov
|
||||
codecov:
|
||||
require_ci_to_pass: true
|
||||
# https://docs.codecov.com/docs/components
|
||||
# https://docs.codecov.com/docs/components
|
||||
component_management:
|
||||
individual_components:
|
||||
- component_id: backend
|
||||
@@ -9,26 +10,70 @@ component_management:
|
||||
- component_id: frontend
|
||||
paths:
|
||||
- src-ui/**
|
||||
# https://docs.codecov.com/docs/pull-request-comments
|
||||
# https://docs.codecov.com/docs/flags#step-2-flag-management-in-yaml
|
||||
# https://docs.codecov.com/docs/carryforward-flags
|
||||
flags:
|
||||
# Backend Python versions
|
||||
backend-python-3.10:
|
||||
paths:
|
||||
- src/**
|
||||
carryforward: true
|
||||
backend-python-3.11:
|
||||
paths:
|
||||
- src/**
|
||||
carryforward: true
|
||||
backend-python-3.12:
|
||||
paths:
|
||||
- src/**
|
||||
carryforward: true
|
||||
# Frontend (shards merge into single flag)
|
||||
frontend-node-24.x:
|
||||
paths:
|
||||
- src-ui/**
|
||||
carryforward: true
|
||||
comment:
|
||||
layout: "header, diff, components, flags, files"
|
||||
# https://docs.codecov.com/docs/javascript-bundle-analysis
|
||||
require_bundle_changes: true
|
||||
bundle_change_threshold: "50Kb"
|
||||
coverage:
|
||||
# https://docs.codecov.com/docs/commit-status
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
backend:
|
||||
flags:
|
||||
- backend-python-3.10
|
||||
- backend-python-3.11
|
||||
- backend-python-3.12
|
||||
paths:
|
||||
- src/**
|
||||
# https://docs.codecov.com/docs/commit-status#threshold
|
||||
threshold: 1%
|
||||
removed_code_behavior: adjust_base
|
||||
frontend:
|
||||
flags:
|
||||
- frontend-node-24.x
|
||||
paths:
|
||||
- src-ui/**
|
||||
threshold: 1%
|
||||
removed_code_behavior: adjust_base
|
||||
patch:
|
||||
default:
|
||||
# For the changed lines only, target 100% covered, but
|
||||
# allow as low as 75%
|
||||
backend:
|
||||
flags:
|
||||
- backend-python-3.10
|
||||
- backend-python-3.11
|
||||
- backend-python-3.12
|
||||
paths:
|
||||
- src/**
|
||||
target: 100%
|
||||
threshold: 25%
|
||||
frontend:
|
||||
flags:
|
||||
- frontend-node-24.x
|
||||
paths:
|
||||
- src-ui/**
|
||||
target: 100%
|
||||
threshold: 25%
|
||||
# https://docs.codecov.com/docs/javascript-bundle-analysis
|
||||
bundle_analysis:
|
||||
# Fail if the bundle size increases by more than 1MB
|
||||
warning_threshold: "1MB"
|
||||
status: true
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
FROM --platform=$BUILDPLATFORM docker.io/node:20-trixie-slim as main-app
|
||||
FROM --platform=$BUILDPLATFORM docker.io/node:24-trixie-slim as main-app
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
|
||||
104
.github/workflows/ci-backend.yml
vendored
Normal file
104
.github/workflows/ci-backend.yml
vendored
Normal file
@@ -0,0 +1,104 @@
|
||||
name: Backend Tests
|
||||
on:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'translations**'
|
||||
paths:
|
||||
- 'src/**'
|
||||
- 'pyproject.toml'
|
||||
- 'uv.lock'
|
||||
- 'docker/compose/docker-compose.ci-test.yml'
|
||||
- '.github/workflows/ci-backend.yml'
|
||||
pull_request:
|
||||
branches-ignore:
|
||||
- 'translations**'
|
||||
paths:
|
||||
- 'src/**'
|
||||
- 'pyproject.toml'
|
||||
- 'uv.lock'
|
||||
- 'docker/compose/docker-compose.ci-test.yml'
|
||||
- '.github/workflows/ci-backend.yml'
|
||||
workflow_dispatch:
|
||||
concurrency:
|
||||
group: backend-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
env:
|
||||
DEFAULT_UV_VERSION: "0.9.x"
|
||||
NLTK_DATA: "/usr/share/nltk_data"
|
||||
jobs:
|
||||
test:
|
||||
name: "Python ${{ matrix.python-version }}"
|
||||
runs-on: ubuntu-24.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.10', '3.11', '3.12']
|
||||
fail-fast: false
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
- name: Start containers
|
||||
run: |
|
||||
docker compose --file docker/compose/docker-compose.ci-test.yml pull --quiet
|
||||
docker compose --file docker/compose/docker-compose.ci-test.yml up --detach
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "${{ matrix.python-version }}"
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||
enable-cache: true
|
||||
python-version: ${{ steps.setup-python.outputs.python-version }}
|
||||
- name: Install system dependencies
|
||||
run: |
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -qq --no-install-recommends \
|
||||
unpaper tesseract-ocr imagemagick ghostscript libzbar0 poppler-utils
|
||||
- name: Configure ImageMagick
|
||||
run: |
|
||||
sudo cp docker/rootfs/etc/ImageMagick-6/paperless-policy.xml /etc/ImageMagick-6/policy.xml
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
uv sync \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
--group testing \
|
||||
--frozen
|
||||
- name: List installed Python dependencies
|
||||
run: |
|
||||
uv pip list
|
||||
- name: Install NLTK data
|
||||
run: |
|
||||
uv run python -m nltk.downloader punkt punkt_tab snowball_data stopwords -d ${{ env.NLTK_DATA }}
|
||||
- name: Run tests
|
||||
env:
|
||||
NLTK_DATA: ${{ env.NLTK_DATA }}
|
||||
PAPERLESS_CI_TEST: 1
|
||||
PAPERLESS_MAIL_TEST_HOST: ${{ secrets.TEST_MAIL_HOST }}
|
||||
PAPERLESS_MAIL_TEST_USER: ${{ secrets.TEST_MAIL_USER }}
|
||||
PAPERLESS_MAIL_TEST_PASSWD: ${{ secrets.TEST_MAIL_PASSWD }}
|
||||
run: |
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
--dev \
|
||||
--frozen \
|
||||
pytest
|
||||
- name: Upload test results to Codecov
|
||||
if: always()
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: backend-python-${{ matrix.python-version }}
|
||||
files: junit.xml
|
||||
report_type: test_results
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: backend-python-${{ matrix.python-version }}
|
||||
files: coverage.xml
|
||||
report_type: coverage
|
||||
- name: Stop containers
|
||||
if: always()
|
||||
run: |
|
||||
docker compose --file docker/compose/docker-compose.ci-test.yml logs
|
||||
docker compose --file docker/compose/docker-compose.ci-test.yml down
|
||||
239
.github/workflows/ci-docker.yml
vendored
Normal file
239
.github/workflows/ci-docker.yml
vendored
Normal file
@@ -0,0 +1,239 @@
|
||||
name: Docker Build
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-beta.rc[0-9]+'
|
||||
branches:
|
||||
- dev
|
||||
- beta
|
||||
pull_request:
|
||||
branches:
|
||||
- dev
|
||||
- main
|
||||
workflow_dispatch:
|
||||
concurrency:
|
||||
group: docker-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
jobs:
|
||||
build-arch:
|
||||
name: Build ${{ matrix.arch }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: ubuntu-24.04
|
||||
arch: amd64
|
||||
platform: linux/amd64
|
||||
- runner: ubuntu-24.04-arm
|
||||
arch: arm64
|
||||
platform: linux/arm64
|
||||
runs-on: ${{ matrix.runner }}
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
outputs:
|
||||
can-push: ${{ steps.check-push.outputs.can-push }}
|
||||
push-external: ${{ steps.check-push.outputs.push-external }}
|
||||
repository: ${{ steps.repo.outputs.name }}
|
||||
ref-name: ${{ steps.ref.outputs.name }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6.0.1
|
||||
- name: Determine ref name
|
||||
id: ref
|
||||
run: |
|
||||
ref_name="${GITHUB_HEAD_REF:-$GITHUB_REF_NAME}"
|
||||
# Sanitize by replacing / with - for cache keys
|
||||
cache_ref="${ref_name//\//-}"
|
||||
|
||||
echo "ref_name=${ref_name}"
|
||||
echo "cache_ref=${cache_ref}"
|
||||
|
||||
echo "name=${ref_name}" >> $GITHUB_OUTPUT
|
||||
echo "cache-ref=${cache_ref}" >> $GITHUB_OUTPUT
|
||||
- name: Check push permissions
|
||||
id: check-push
|
||||
env:
|
||||
REF_NAME: ${{ steps.ref.outputs.name }}
|
||||
run: |
|
||||
# can-push: Can we push to GHCR?
|
||||
# True for: pushes, or PRs from the same repo (not forks)
|
||||
can_push=${{ github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository }}
|
||||
echo "can-push=${can_push}"
|
||||
echo "can-push=${can_push}" >> $GITHUB_OUTPUT
|
||||
|
||||
# push-external: Should we also push to Docker Hub and Quay.io?
|
||||
# Only for main repo on dev/beta branches or version tags
|
||||
push_external="false"
|
||||
if [[ "${can_push}" == "true" && "${{ github.repository_owner }}" == "paperless-ngx" ]]; then
|
||||
case "${REF_NAME}" in
|
||||
dev|beta)
|
||||
push_external="true"
|
||||
;;
|
||||
esac
|
||||
case "${{ github.ref }}" in
|
||||
refs/tags/v*|*beta.rc*)
|
||||
push_external="true"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
echo "push-external=${push_external}"
|
||||
echo "push-external=${push_external}" >> $GITHUB_OUTPUT
|
||||
- name: Set repository name
|
||||
id: repo
|
||||
run: |
|
||||
repo_name="${{ github.repository }}"
|
||||
repo_name="${repo_name,,}"
|
||||
|
||||
echo "repository=${repo_name}"
|
||||
echo "name=${repo_name}" >> $GITHUB_OUTPUT
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.12.0
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3.6.0
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Maximize space
|
||||
run: |
|
||||
sudo rm -rf /usr/share/dotnet
|
||||
sudo rm -rf /opt/ghc
|
||||
sudo rm -rf /usr/local/share/boost
|
||||
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
|
||||
- name: Docker metadata
|
||||
id: docker-meta
|
||||
uses: docker/metadata-action@v5.10.0
|
||||
with:
|
||||
images: |
|
||||
${{ env.REGISTRY }}/${{ steps.repo.outputs.name }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=raw,value=${{ steps.ref.outputs.name }},enable=${{ github.event_name == 'pull_request' }}
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
- name: Build and push by digest
|
||||
id: build
|
||||
uses: docker/build-push-action@v6.18.0
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: ${{ matrix.platform }}
|
||||
labels: ${{ steps.docker-meta.outputs.labels }}
|
||||
build-args: |
|
||||
PNGX_TAG_VERSION=${{ steps.docker-meta.outputs.version }}
|
||||
outputs: type=image,name=${{ env.REGISTRY }}/${{ steps.repo.outputs.name }},push-by-digest=true,name-canonical=true,push=${{ steps.check-push.outputs.can-push }}
|
||||
cache-from: |
|
||||
type=registry,ref=${{ env.REGISTRY }}/${{ steps.repo.outputs.name }}/cache/app:${{ steps.ref.outputs.cache-ref }}-${{ matrix.arch }}
|
||||
type=registry,ref=${{ env.REGISTRY }}/${{ steps.repo.outputs.name }}/cache/app:dev-${{ matrix.arch }}
|
||||
cache-to: ${{ steps.check-push.outputs.can-push == 'true' && format('type=registry,mode=max,ref={0}/{1}/cache/app:{2}-{3}', env.REGISTRY, steps.repo.outputs.name, steps.ref.outputs.cache-ref, matrix.arch) || '' }}
|
||||
- name: Export digest
|
||||
if: steps.check-push.outputs.can-push == 'true'
|
||||
run: |
|
||||
mkdir -p /tmp/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
echo "digest=${digest}"
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
- name: Upload digest
|
||||
if: steps.check-push.outputs.can-push == 'true'
|
||||
uses: actions/upload-artifact@v6.0.0
|
||||
with:
|
||||
name: digests-${{ matrix.arch }}
|
||||
path: /tmp/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
merge-and-push:
|
||||
name: Merge and Push Manifest
|
||||
runs-on: ubuntu-24.04
|
||||
needs: build-arch
|
||||
if: needs.build-arch.outputs.can-push == 'true'
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v7.0.0
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
- name: List digests
|
||||
run: |
|
||||
echo "Downloaded digests:"
|
||||
ls -la /tmp/digests/
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.12.0
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3.6.0
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Login to Docker Hub
|
||||
if: needs.build-arch.outputs.push-external == 'true'
|
||||
uses: docker/login-action@v3.6.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Login to Quay.io
|
||||
if: needs.build-arch.outputs.push-external == 'true'
|
||||
uses: docker/login-action@v3.6.0
|
||||
with:
|
||||
registry: quay.io
|
||||
username: ${{ secrets.QUAY_USERNAME }}
|
||||
password: ${{ secrets.QUAY_ROBOT_TOKEN }}
|
||||
- name: Docker metadata
|
||||
id: docker-meta
|
||||
uses: docker/metadata-action@v5.10.0
|
||||
with:
|
||||
images: |
|
||||
${{ env.REGISTRY }}/${{ needs.build-arch.outputs.repository }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=raw,value=${{ needs.build-arch.outputs.ref-name }},enable=${{ github.event_name == 'pull_request' }}
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
env:
|
||||
REPOSITORY: ${{ needs.build-arch.outputs.repository }}
|
||||
run: |
|
||||
tags=$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "${DOCKER_METADATA_OUTPUT_JSON}")
|
||||
|
||||
digests=""
|
||||
for digest in *; do
|
||||
digests+="${{ env.REGISTRY }}/${REPOSITORY}@sha256:${digest} "
|
||||
done
|
||||
|
||||
echo "Creating manifest with tags: ${tags}"
|
||||
echo "From digests: ${digests}"
|
||||
|
||||
docker buildx imagetools create ${tags} ${digests}
|
||||
- name: Inspect image
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
|
||||
- name: Copy to Docker Hub
|
||||
if: needs.build-arch.outputs.push-external == 'true'
|
||||
env:
|
||||
TAGS: ${{ steps.docker-meta.outputs.tags }}
|
||||
GHCR_REPO: ${{ env.REGISTRY }}/${{ needs.build-arch.outputs.repository }}
|
||||
run: |
|
||||
for tag in ${TAGS}; do
|
||||
dockerhub_tag="${tag/${GHCR_REPO}/docker.io/paperlessngx/paperless-ngx}"
|
||||
echo "Copying ${tag} to ${dockerhub_tag}"
|
||||
skopeo copy --all "docker://${tag}" "docker://${dockerhub_tag}"
|
||||
done
|
||||
- name: Copy to Quay.io
|
||||
if: needs.build-arch.outputs.push-external == 'true'
|
||||
env:
|
||||
TAGS: ${{ steps.docker-meta.outputs.tags }}
|
||||
GHCR_REPO: ${{ env.REGISTRY }}/${{ needs.build-arch.outputs.repository }}
|
||||
run: |
|
||||
for tag in ${TAGS}; do
|
||||
quay_tag="${tag/${GHCR_REPO}/quay.io/paperlessngx/paperless-ngx}"
|
||||
echo "Copying ${tag} to ${quay_tag}"
|
||||
skopeo copy --all "docker://${tag}" "docker://${quay_tag}"
|
||||
done
|
||||
88
.github/workflows/ci-docs.yml
vendored
Normal file
88
.github/workflows/ci-docs.yml
vendored
Normal file
@@ -0,0 +1,88 @@
|
||||
name: Documentation
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- dev
|
||||
paths:
|
||||
- 'docs/**'
|
||||
- 'mkdocs.yml'
|
||||
- '.github/workflows/ci-docs.yml'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'docs/**'
|
||||
- 'mkdocs.yml'
|
||||
- '.github/workflows/ci-docs.yml'
|
||||
workflow_dispatch:
|
||||
concurrency:
|
||||
group: docs-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
env:
|
||||
DEFAULT_UV_VERSION: "0.9.x"
|
||||
DEFAULT_PYTHON_VERSION: "3.11"
|
||||
jobs:
|
||||
build:
|
||||
name: Build Documentation
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||
enable-cache: true
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
|
||||
- name: Build documentation
|
||||
run: |
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
--dev \
|
||||
--frozen \
|
||||
mkdocs build --config-file ./mkdocs.yml
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: documentation
|
||||
path: site/
|
||||
retention-days: 7
|
||||
deploy:
|
||||
name: Deploy Documentation
|
||||
needs: build
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||
enable-cache: true
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
|
||||
- name: Deploy documentation
|
||||
run: |
|
||||
echo "docs.paperless-ngx.com" > "${{ github.workspace }}/docs/CNAME"
|
||||
git config --global user.name "${{ github.actor }}"
|
||||
git config --global user.email "${{ github.actor }}@users.noreply.github.com"
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
--dev \
|
||||
--frozen \
|
||||
mkdocs gh-deploy --force --no-history
|
||||
189
.github/workflows/ci-frontend.yml
vendored
Normal file
189
.github/workflows/ci-frontend.yml
vendored
Normal file
@@ -0,0 +1,189 @@
|
||||
name: Frontend Tests
|
||||
on:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'translations**'
|
||||
paths:
|
||||
- 'src-ui/**'
|
||||
- '.github/workflows/ci-frontend.yml'
|
||||
pull_request:
|
||||
branches-ignore:
|
||||
- 'translations**'
|
||||
paths:
|
||||
- 'src-ui/**'
|
||||
- '.github/workflows/ci-frontend.yml'
|
||||
workflow_dispatch:
|
||||
concurrency:
|
||||
group: frontend-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
jobs:
|
||||
install-dependencies:
|
||||
name: Install Dependencies
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
- name: Use Node.js 24
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 24.x
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
- name: Cache frontend dependencies
|
||||
id: cache-frontend-deps
|
||||
uses: actions/cache@v5
|
||||
with:
|
||||
path: |
|
||||
~/.pnpm-store
|
||||
~/.cache
|
||||
key: ${{ runner.os }}-frontend-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
||||
- name: Install dependencies
|
||||
run: cd src-ui && pnpm install
|
||||
lint:
|
||||
name: Lint
|
||||
needs: install-dependencies
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
- name: Use Node.js 24
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 24.x
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
- name: Cache frontend dependencies
|
||||
uses: actions/cache@v5
|
||||
with:
|
||||
path: |
|
||||
~/.pnpm-store
|
||||
~/.cache
|
||||
key: ${{ runner.os }}-frontend-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
||||
- name: Re-link Angular CLI
|
||||
run: cd src-ui && pnpm link @angular/cli
|
||||
- name: Run lint
|
||||
run: cd src-ui && pnpm run lint
|
||||
unit-tests:
|
||||
name: "Unit Tests (${{ matrix.shard-index }}/${{ matrix.shard-count }})"
|
||||
needs: install-dependencies
|
||||
runs-on: ubuntu-24.04
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [24.x]
|
||||
shard-index: [1, 2, 3, 4]
|
||||
shard-count: [4]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
- name: Use Node.js 24
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 24.x
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
- name: Cache frontend dependencies
|
||||
uses: actions/cache@v5
|
||||
with:
|
||||
path: |
|
||||
~/.pnpm-store
|
||||
~/.cache
|
||||
key: ${{ runner.os }}-frontend-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
||||
- name: Re-link Angular CLI
|
||||
run: cd src-ui && pnpm link @angular/cli
|
||||
- name: Run Jest unit tests
|
||||
run: cd src-ui && pnpm run test --max-workers=2 --shard=${{ matrix.shard-index }}/${{ matrix.shard-count }}
|
||||
- name: Upload test results to Codecov
|
||||
if: always()
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: frontend-node-${{ matrix.node-version }}
|
||||
directory: src-ui/
|
||||
report_type: test_results
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: frontend-node-${{ matrix.node-version }}
|
||||
directory: src-ui/coverage/
|
||||
e2e-tests:
|
||||
name: "E2E Tests (${{ matrix.shard-index }}/${{ matrix.shard-count }})"
|
||||
needs: install-dependencies
|
||||
runs-on: ubuntu-24.04
|
||||
container: mcr.microsoft.com/playwright:v1.57.0-noble
|
||||
env:
|
||||
PLAYWRIGHT_BROWSERS_PATH: /ms-playwright
|
||||
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [24.x]
|
||||
shard-index: [1, 2]
|
||||
shard-count: [2]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
- name: Use Node.js 24
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 24.x
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
- name: Cache frontend dependencies
|
||||
uses: actions/cache@v5
|
||||
with:
|
||||
path: |
|
||||
~/.pnpm-store
|
||||
~/.cache
|
||||
key: ${{ runner.os }}-frontend-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
||||
- name: Re-link Angular CLI
|
||||
run: cd src-ui && pnpm link @angular/cli
|
||||
- name: Install dependencies
|
||||
run: cd src-ui && pnpm install --no-frozen-lockfile
|
||||
- name: Run Playwright E2E tests
|
||||
run: cd src-ui && pnpm exec playwright test --shard ${{ matrix.shard-index }}/${{ matrix.shard-count }}
|
||||
bundle-analysis:
|
||||
name: Bundle Analysis
|
||||
needs: [unit-tests, e2e-tests]
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
- name: Use Node.js 24
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 24.x
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
- name: Cache frontend dependencies
|
||||
uses: actions/cache@v5
|
||||
with:
|
||||
path: |
|
||||
~/.pnpm-store
|
||||
~/.cache
|
||||
key: ${{ runner.os }}-frontend-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
||||
- name: Re-link Angular CLI
|
||||
run: cd src-ui && pnpm link @angular/cli
|
||||
- name: Build and analyze
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
run: cd src-ui && pnpm run build --configuration=production
|
||||
24
.github/workflows/ci-lint.yml
vendored
Normal file
24
.github/workflows/ci-lint.yml
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
name: Lint
|
||||
on:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'translations**'
|
||||
pull_request:
|
||||
branches-ignore:
|
||||
- 'translations**'
|
||||
concurrency:
|
||||
group: lint-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
jobs:
|
||||
pre-commit:
|
||||
name: Pre-commit Checks
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
- name: Install Python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.11"
|
||||
- name: Run pre-commit
|
||||
uses: pre-commit/action@v3.0.1
|
||||
237
.github/workflows/ci-release.yml
vendored
Normal file
237
.github/workflows/ci-release.yml
vendored
Normal file
@@ -0,0 +1,237 @@
|
||||
name: Release
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-beta.rc[0-9]+'
|
||||
concurrency:
|
||||
group: release-${{ github.ref }}
|
||||
cancel-in-progress: false
|
||||
env:
|
||||
DEFAULT_UV_VERSION: "0.9.x"
|
||||
DEFAULT_PYTHON_VERSION: "3.11"
|
||||
jobs:
|
||||
wait-for-docker:
|
||||
name: Wait for Docker Build
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Wait for Docker build
|
||||
uses: lewagon/wait-on-check-action@v1.4.1
|
||||
with:
|
||||
ref: ${{ github.sha }}
|
||||
check-name: 'Build Docker Image'
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
wait-interval: 60
|
||||
build-release:
|
||||
name: Build Release
|
||||
needs: wait-for-docker
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
# ---- Frontend Build ----
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
- name: Use Node.js 24
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 24.x
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
- name: Install frontend dependencies
|
||||
run: cd src-ui && pnpm install
|
||||
- name: Build frontend
|
||||
run: cd src-ui && pnpm run build --configuration production
|
||||
# ---- Backend Setup ----
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||
enable-cache: true
|
||||
python-version: ${{ steps.setup-python.outputs.python-version }}
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
|
||||
- name: Install system dependencies
|
||||
run: |
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -qq --no-install-recommends gettext liblept5
|
||||
# ---- Build Documentation ----
|
||||
- name: Build documentation
|
||||
run: |
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
--dev \
|
||||
--frozen \
|
||||
mkdocs build --config-file ./mkdocs.yml
|
||||
# ---- Prepare Release ----
|
||||
- name: Generate requirements file
|
||||
run: |
|
||||
uv export --quiet --no-dev --all-extras --format requirements-txt --output-file requirements.txt
|
||||
- name: Compile messages
|
||||
run: |
|
||||
cd src/
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
manage.py compilemessages
|
||||
- name: Collect static files
|
||||
run: |
|
||||
cd src/
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
manage.py collectstatic --no-input --clear
|
||||
- name: Assemble release package
|
||||
run: |
|
||||
mkdir -p dist/paperless-ngx/scripts
|
||||
|
||||
for file_name in .dockerignore \
|
||||
.env \
|
||||
Dockerfile \
|
||||
pyproject.toml \
|
||||
uv.lock \
|
||||
requirements.txt \
|
||||
LICENSE \
|
||||
README.md \
|
||||
paperless.conf.example
|
||||
do
|
||||
cp --verbose ${file_name} dist/paperless-ngx/
|
||||
done
|
||||
mv dist/paperless-ngx/paperless.conf.example dist/paperless-ngx/paperless.conf
|
||||
|
||||
cp --recursive docker/ dist/paperless-ngx/docker
|
||||
cp scripts/*.service scripts/*.sh scripts/*.socket dist/paperless-ngx/scripts/
|
||||
cp --recursive src/ dist/paperless-ngx/src
|
||||
cp --recursive site/ dist/paperless-ngx/docs
|
||||
mv static dist/paperless-ngx/
|
||||
|
||||
find dist/paperless-ngx -name "__pycache__" -type d -exec rm -rf {} +
|
||||
- name: Create release archive
|
||||
run: |
|
||||
cd dist
|
||||
sudo chown -R 1000:1000 paperless-ngx/
|
||||
tar -cJf paperless-ngx.tar.xz paperless-ngx/
|
||||
- name: Upload release artifact
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: release
|
||||
path: dist/paperless-ngx.tar.xz
|
||||
retention-days: 7
|
||||
publish-release:
|
||||
name: Publish Release
|
||||
needs: build-release
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
prerelease: ${{ steps.get-version.outputs.prerelease }}
|
||||
changelog: ${{ steps.create-release.outputs.body }}
|
||||
version: ${{ steps.get-version.outputs.version }}
|
||||
steps:
|
||||
- name: Download release artifact
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
name: release
|
||||
path: ./
|
||||
- name: Get version info
|
||||
id: get-version
|
||||
run: |
|
||||
echo "version=${{ github.ref_name }}" >> $GITHUB_OUTPUT
|
||||
if [[ "${{ github.ref_name }}" == *"-beta.rc"* ]]; then
|
||||
echo "prerelease=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "prerelease=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Create release and changelog
|
||||
id: create-release
|
||||
uses: release-drafter/release-drafter@v6
|
||||
with:
|
||||
name: Paperless-ngx ${{ steps.get-version.outputs.version }}
|
||||
tag: ${{ steps.get-version.outputs.version }}
|
||||
version: ${{ steps.get-version.outputs.version }}
|
||||
prerelease: ${{ steps.get-version.outputs.prerelease }}
|
||||
publish: true
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Upload release archive
|
||||
uses: shogo82148/actions-upload-release-asset@v1
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
upload_url: ${{ steps.create-release.outputs.upload_url }}
|
||||
asset_path: ./paperless-ngx.tar.xz
|
||||
asset_name: paperless-ngx-${{ steps.get-version.outputs.version }}.tar.xz
|
||||
asset_content_type: application/x-xz
|
||||
# ---------------------------------------------------------------------------
|
||||
# Append changelog to docs (only on non-prerelease)
|
||||
# ---------------------------------------------------------------------------
|
||||
append-changelog:
|
||||
name: Append Changelog
|
||||
needs: publish-release
|
||||
if: needs.publish-release.outputs.prerelease == 'false'
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: main
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||
enable-cache: true
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Update changelog
|
||||
working-directory: docs
|
||||
run: |
|
||||
git branch ${{ needs.publish-release.outputs.version }}-changelog
|
||||
git checkout ${{ needs.publish-release.outputs.version }}-changelog
|
||||
|
||||
echo -e "# Changelog\n\n${{ needs.publish-release.outputs.changelog }}\n" > changelog-new.md
|
||||
|
||||
echo "Manually linking usernames"
|
||||
sed -i -r 's|@([a-zA-Z0-9_]+) \(\[#|[@\1](https://github.com/\1) ([#|g' changelog-new.md
|
||||
|
||||
echo "Removing unneeded comment tags"
|
||||
sed -i -r 's|@<!---->|@|g' changelog-new.md
|
||||
|
||||
CURRENT_CHANGELOG=$(tail --lines +2 changelog.md)
|
||||
echo -e "$CURRENT_CHANGELOG" >> changelog-new.md
|
||||
mv changelog-new.md changelog.md
|
||||
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
--dev \
|
||||
pre-commit run --files changelog.md || true
|
||||
|
||||
git config --global user.name "github-actions"
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git commit -am "Changelog ${{ needs.publish-release.outputs.version }} - GHA"
|
||||
git push origin ${{ needs.publish-release.outputs.version }}-changelog
|
||||
- name: Create pull request
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
const { repo, owner } = context.repo;
|
||||
const result = await github.rest.pulls.create({
|
||||
title: 'Documentation: Add ${{ needs.publish-release.outputs.version }} changelog',
|
||||
owner,
|
||||
repo,
|
||||
head: '${{ needs.publish-release.outputs.version }}-changelog',
|
||||
base: 'main',
|
||||
body: 'This PR is auto-generated by CI.'
|
||||
});
|
||||
github.rest.issues.addLabels({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: result.data.number,
|
||||
labels: ['documentation', 'skip-changelog']
|
||||
});
|
||||
693
.github/workflows/ci.yml
vendored
693
.github/workflows/ci.yml
vendored
@@ -1,693 +0,0 @@
|
||||
name: ci
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
# https://semver.org/#spec-item-2
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
# https://semver.org/#spec-item-9
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-beta.rc[0-9]+'
|
||||
branches-ignore:
|
||||
- 'translations**'
|
||||
pull_request:
|
||||
branches-ignore:
|
||||
- 'translations**'
|
||||
env:
|
||||
DEFAULT_UV_VERSION: "0.9.x"
|
||||
# This is the default version of Python to use in most steps which aren't specific
|
||||
DEFAULT_PYTHON_VERSION: "3.11"
|
||||
NLTK_DATA: "/usr/share/nltk_data"
|
||||
jobs:
|
||||
detect-duplicate:
|
||||
name: Detect Duplicate Run
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
should_run: ${{ steps.check.outputs.should_run }}
|
||||
steps:
|
||||
- name: Check if workflow should run
|
||||
id: check
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
if (context.eventName !== 'push') {
|
||||
core.info('Not a push event; running workflow.');
|
||||
core.setOutput('should_run', 'true');
|
||||
return;
|
||||
}
|
||||
|
||||
const ref = context.ref || '';
|
||||
if (!ref.startsWith('refs/heads/')) {
|
||||
core.info('Push is not to a branch; running workflow.');
|
||||
core.setOutput('should_run', 'true');
|
||||
return;
|
||||
}
|
||||
|
||||
const branch = ref.substring('refs/heads/'.length);
|
||||
const { owner, repo } = context.repo;
|
||||
const prs = await github.paginate(github.rest.pulls.list, {
|
||||
owner,
|
||||
repo,
|
||||
state: 'open',
|
||||
head: `${owner}:${branch}`,
|
||||
per_page: 100,
|
||||
});
|
||||
|
||||
if (prs.length === 0) {
|
||||
core.info(`No open PR found for ${branch}; running workflow.`);
|
||||
core.setOutput('should_run', 'true');
|
||||
} else {
|
||||
core.info(`Found ${prs.length} open PR(s) for ${branch}; skipping duplicate push run.`);
|
||||
core.setOutput('should_run', 'false');
|
||||
}
|
||||
pre-commit:
|
||||
needs:
|
||||
- detect-duplicate
|
||||
if: needs.detect-duplicate.outputs.should_run == 'true'
|
||||
name: Linting Checks
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
- name: Install python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Check files
|
||||
uses: pre-commit/action@v3.0.1
|
||||
documentation:
|
||||
name: "Build & Deploy Documentation"
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- pre-commit
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||
enable-cache: true
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
|
||||
- name: Make documentation
|
||||
run: |
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
--dev \
|
||||
--frozen \
|
||||
mkdocs build --config-file ./mkdocs.yml
|
||||
- name: Deploy documentation
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
run: |
|
||||
echo "docs.paperless-ngx.com" > "${{ github.workspace }}/docs/CNAME"
|
||||
git config --global user.name "${{ github.actor }}"
|
||||
git config --global user.email "${{ github.actor }}@users.noreply.github.com"
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
--dev \
|
||||
--frozen \
|
||||
mkdocs gh-deploy --force --no-history
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: documentation
|
||||
path: site/
|
||||
retention-days: 7
|
||||
tests-backend:
|
||||
name: "Backend Tests (Python ${{ matrix.python-version }})"
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- pre-commit
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.10', '3.11', '3.12']
|
||||
fail-fast: false
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
- name: Start containers
|
||||
run: |
|
||||
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml pull --quiet
|
||||
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml up --detach
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "${{ matrix.python-version }}"
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||
enable-cache: true
|
||||
python-version: ${{ steps.setup-python.outputs.python-version }}
|
||||
- name: Install system dependencies
|
||||
run: |
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -qq --no-install-recommends unpaper tesseract-ocr imagemagick ghostscript libzbar0 poppler-utils
|
||||
- name: Configure ImageMagick
|
||||
run: |
|
||||
sudo cp docker/rootfs/etc/ImageMagick-6/paperless-policy.xml /etc/ImageMagick-6/policy.xml
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
uv sync \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
--group testing \
|
||||
--frozen
|
||||
- name: List installed Python dependencies
|
||||
run: |
|
||||
uv pip list
|
||||
- name: Install or update NLTK dependencies
|
||||
run: uv run python -m nltk.downloader punkt punkt_tab snowball_data stopwords -d ${{ env.NLTK_DATA }}
|
||||
- name: Tests
|
||||
env:
|
||||
NLTK_DATA: ${{ env.NLTK_DATA }}
|
||||
PAPERLESS_CI_TEST: 1
|
||||
# Enable paperless_mail testing against real server
|
||||
PAPERLESS_MAIL_TEST_HOST: ${{ secrets.TEST_MAIL_HOST }}
|
||||
PAPERLESS_MAIL_TEST_USER: ${{ secrets.TEST_MAIL_USER }}
|
||||
PAPERLESS_MAIL_TEST_PASSWD: ${{ secrets.TEST_MAIL_PASSWD }}
|
||||
run: |
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
--dev \
|
||||
--frozen \
|
||||
pytest
|
||||
- name: Upload backend test results to Codecov
|
||||
if: always()
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: backend-python-${{ matrix.python-version }}
|
||||
files: junit.xml
|
||||
report_type: test_results
|
||||
- name: Upload backend coverage to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: backend-python-${{ matrix.python-version }}
|
||||
files: coverage.xml
|
||||
- name: Stop containers
|
||||
if: always()
|
||||
run: |
|
||||
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml logs
|
||||
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml down
|
||||
install-frontend-dependencies:
|
||||
name: "Install Frontend Dependencies"
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- pre-commit
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
- name: Cache frontend dependencies
|
||||
id: cache-frontend-deps
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.pnpm-store
|
||||
~/.cache
|
||||
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
||||
- name: Install dependencies
|
||||
run: cd src-ui && pnpm install
|
||||
tests-frontend:
|
||||
name: "Frontend Unit Tests (Node ${{ matrix.node-version }} - ${{ matrix.shard-index }}/${{ matrix.shard-count }})"
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- install-frontend-dependencies
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [20.x]
|
||||
shard-index: [1, 2, 3, 4]
|
||||
shard-count: [4]
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
- name: Cache frontend dependencies
|
||||
id: cache-frontend-deps
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.pnpm-store
|
||||
~/.cache
|
||||
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
||||
- name: Re-link Angular cli
|
||||
run: cd src-ui && pnpm link @angular/cli
|
||||
- name: Linting checks
|
||||
run: cd src-ui && pnpm run lint
|
||||
- name: Run Jest unit tests
|
||||
run: cd src-ui && pnpm run test --max-workers=2 --shard=${{ matrix.shard-index }}/${{ matrix.shard-count }}
|
||||
- name: Upload frontend test results to Codecov
|
||||
if: always()
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: frontend-node-${{ matrix.node-version }}
|
||||
directory: src-ui/
|
||||
report_type: test_results
|
||||
- name: Upload frontend coverage to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: frontend-node-${{ matrix.node-version }}
|
||||
directory: src-ui/coverage/
|
||||
tests-frontend-e2e:
|
||||
name: "Frontend E2E Tests (Node ${{ matrix.node-version }} - ${{ matrix.shard-index }}/${{ matrix.shard-count }})"
|
||||
runs-on: ubuntu-24.04
|
||||
container: mcr.microsoft.com/playwright:v1.57.0-noble
|
||||
needs:
|
||||
- install-frontend-dependencies
|
||||
env:
|
||||
PLAYWRIGHT_BROWSERS_PATH: /ms-playwright
|
||||
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [20.x]
|
||||
shard-index: [1, 2]
|
||||
shard-count: [2]
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
- name: Cache frontend dependencies
|
||||
id: cache-frontend-deps
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.pnpm-store
|
||||
~/.cache
|
||||
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
||||
- name: Re-link Angular cli
|
||||
run: cd src-ui && pnpm link @angular/cli
|
||||
- name: Install dependencies
|
||||
run: cd src-ui && pnpm install --no-frozen-lockfile
|
||||
- name: Run Playwright e2e tests
|
||||
run: cd src-ui && pnpm exec playwright test --shard ${{ matrix.shard-index }}/${{ matrix.shard-count }}
|
||||
frontend-bundle-analysis:
|
||||
name: "Frontend Bundle Analysis"
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- tests-frontend
|
||||
- tests-frontend-e2e
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
- name: Cache frontend dependencies
|
||||
id: cache-frontend-deps
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.pnpm-store
|
||||
~/.cache
|
||||
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/package-lock.json') }}
|
||||
- name: Re-link Angular cli
|
||||
run: cd src-ui && pnpm link @angular/cli
|
||||
- name: Build frontend and upload analysis
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
run: cd src-ui && pnpm run build --configuration=production
|
||||
build-docker-image:
|
||||
name: Build Docker image for ${{ github.event_name == 'pull_request' && github.head_ref || github.ref_name }}
|
||||
runs-on: ubuntu-24.04
|
||||
if: (github.event_name == 'push' && (startsWith(github.ref, 'refs/heads/feature-') || startsWith(github.ref, 'refs/heads/fix-') || github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/beta' || contains(github.ref, 'beta.rc') || startsWith(github.ref, 'refs/tags/v') || startsWith(github.ref, 'refs/heads/l10n_'))) || (github.event_name == 'pull_request' && (startsWith(github.head_ref, 'feature-') || startsWith(github.head_ref, 'fix-') || github.head_ref == 'dev' || github.head_ref == 'beta' || contains(github.head_ref, 'beta.rc') || startsWith(github.head_ref, 'l10n_')))
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-build-docker-image-${{ github.ref_name }}
|
||||
cancel-in-progress: true
|
||||
needs:
|
||||
- tests-backend
|
||||
- tests-frontend
|
||||
- tests-frontend-e2e
|
||||
steps:
|
||||
- name: Prepare build variables
|
||||
id: build-vars
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
result-encoding: string
|
||||
script: |
|
||||
const isPR = context.eventName === 'pull_request';
|
||||
const defaultRefName = context.ref.replace('refs/heads/', '');
|
||||
const headRef = isPR ? context.payload.pull_request.head.ref : defaultRefName;
|
||||
const buildRef = isPR ? `refs/heads/${headRef}` : context.ref;
|
||||
const buildCacheKey = headRef.split('/').join('-');
|
||||
const canPush = context.eventName === 'push' || (isPR && context.payload.pull_request.head.repo.full_name === `${context.repo.owner}/${context.repo.repo}`);
|
||||
|
||||
core.setOutput('build-ref', buildRef);
|
||||
core.setOutput('build-ref-name', headRef);
|
||||
core.setOutput('build-cache-key', buildCacheKey);
|
||||
core.setOutput('can-push', canPush ? 'true' : 'false');
|
||||
- name: Check pushing to Docker Hub
|
||||
id: push-other-places
|
||||
# Only push to Dockerhub from the main repo AND the ref is either:
|
||||
# main
|
||||
# dev
|
||||
# beta
|
||||
# a tag
|
||||
# Otherwise forks would require a Docker Hub account and secrets setup
|
||||
env:
|
||||
BUILD_REF: ${{ steps.build-vars.outputs.build-ref }}
|
||||
BUILD_REF_NAME: ${{ steps.build-vars.outputs.build-ref-name }}
|
||||
run: |
|
||||
if [[ ${{ github.repository_owner }} == "paperless-ngx" && ( "$BUILD_REF_NAME" == "dev" || "$BUILD_REF_NAME" == "beta" || $BUILD_REF == refs/tags/v* || $BUILD_REF == *beta.rc* ) ]] ; then
|
||||
echo "Enabling DockerHub image push"
|
||||
echo "enable=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "Not pushing to DockerHub"
|
||||
echo "enable=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Set ghcr repository name
|
||||
id: set-ghcr-repository
|
||||
run: |
|
||||
ghcr_name=$(echo "${{ github.repository }}" | awk '{ print tolower($0) }')
|
||||
echo "Name is ${ghcr_name}"
|
||||
echo "ghcr-repository=${ghcr_name}" >> $GITHUB_OUTPUT
|
||||
- name: Gather Docker metadata
|
||||
id: docker-meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}
|
||||
name=paperlessngx/paperless-ngx,enable=${{ steps.push-other-places.outputs.enable }}
|
||||
name=quay.io/paperlessngx/paperless-ngx,enable=${{ steps.push-other-places.outputs.enable }}
|
||||
tags: |
|
||||
# Tag branches with branch name
|
||||
type=ref,event=branch
|
||||
# Pull requests need a sanitized branch tag for pushing images
|
||||
type=raw,value=${{ steps.build-vars.outputs.build-cache-key }},enable=${{ github.event_name == 'pull_request' }}
|
||||
# Process semver tags
|
||||
# For a tag x.y.z or vX.Y.Z, output an x.y.z and x.y image tag
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
# If https://github.com/docker/buildx/issues/1044 is resolved,
|
||||
# the append input with a native arm64 arch could be used to
|
||||
# significantly speed up building
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
with:
|
||||
platforms: arm64
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
# Don't attempt to login if not pushing to Docker Hub
|
||||
if: steps.push-other-places.outputs.enable == 'true'
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Login to Quay.io
|
||||
uses: docker/login-action@v3
|
||||
# Don't attempt to login if not pushing to Quay.io
|
||||
if: steps.push-other-places.outputs.enable == 'true'
|
||||
with:
|
||||
registry: quay.io
|
||||
username: ${{ secrets.QUAY_USERNAME }}
|
||||
password: ${{ secrets.QUAY_ROBOT_TOKEN }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: ${{ steps.build-vars.outputs.can-push == 'true' }}
|
||||
tags: ${{ steps.docker-meta.outputs.tags }}
|
||||
labels: ${{ steps.docker-meta.outputs.labels }}
|
||||
build-args: |
|
||||
PNGX_TAG_VERSION=${{ steps.docker-meta.outputs.version }}
|
||||
# Get cache layers from this branch, then dev
|
||||
# This allows new branches to get at least some cache benefits, generally from dev
|
||||
cache-from: |
|
||||
type=registry,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:${{ steps.build-vars.outputs.build-cache-key }}
|
||||
type=registry,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:dev
|
||||
cache-to: ${{ steps.build-vars.outputs.can-push == 'true' && format('type=registry,mode=max,ref=ghcr.io/{0}/builder/cache/app:{1}', steps.set-ghcr-repository.outputs.ghcr-repository, steps.build-vars.outputs.build-cache-key) || '' }}
|
||||
- name: Inspect image
|
||||
if: steps.build-vars.outputs.can-push == 'true'
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
|
||||
- name: Export frontend artifact from docker
|
||||
if: steps.build-vars.outputs.can-push == 'true'
|
||||
run: |
|
||||
docker create --name frontend-extract ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
|
||||
docker cp frontend-extract:/usr/src/paperless/src/documents/static/frontend src/documents/static/frontend/
|
||||
- name: Upload frontend artifact
|
||||
if: steps.build-vars.outputs.can-push == 'true'
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: frontend-compiled
|
||||
path: src/documents/static/frontend/
|
||||
retention-days: 7
|
||||
build-release:
|
||||
name: "Build Release"
|
||||
needs:
|
||||
- build-docker-image
|
||||
- documentation
|
||||
if: github.event_name == 'push'
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||
enable-cache: true
|
||||
python-version: ${{ steps.setup-python.outputs.python-version }}
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
|
||||
- name: Install system dependencies
|
||||
run: |
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -qq --no-install-recommends gettext liblept5
|
||||
- name: Download frontend artifact
|
||||
uses: actions/download-artifact@v6
|
||||
with:
|
||||
name: frontend-compiled
|
||||
path: src/documents/static/frontend/
|
||||
- name: Download documentation artifact
|
||||
uses: actions/download-artifact@v6
|
||||
with:
|
||||
name: documentation
|
||||
path: docs/_build/html/
|
||||
- name: Generate requirements file
|
||||
run: |
|
||||
uv export --quiet --no-dev --all-extras --format requirements-txt --output-file requirements.txt
|
||||
- name: Compile messages
|
||||
run: |
|
||||
cd src/
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
manage.py compilemessages
|
||||
- name: Collect static files
|
||||
run: |
|
||||
cd src/
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
manage.py collectstatic --no-input
|
||||
- name: Move files
|
||||
run: |
|
||||
echo "Making dist folders"
|
||||
for directory in dist \
|
||||
dist/paperless-ngx \
|
||||
dist/paperless-ngx/scripts;
|
||||
do
|
||||
mkdir --verbose --parents ${directory}
|
||||
done
|
||||
|
||||
echo "Copying basic files"
|
||||
for file_name in .dockerignore \
|
||||
.env \
|
||||
Dockerfile \
|
||||
pyproject.toml \
|
||||
uv.lock \
|
||||
requirements.txt \
|
||||
LICENSE \
|
||||
README.md \
|
||||
paperless.conf.example
|
||||
do
|
||||
cp --verbose ${file_name} dist/paperless-ngx/
|
||||
done
|
||||
mv --verbose dist/paperless-ngx/paperless.conf.example dist/paperless-ngx/paperless.conf
|
||||
|
||||
echo "Copying Docker related files"
|
||||
cp --recursive docker/ dist/paperless-ngx/docker
|
||||
|
||||
echo "Copying startup scripts"
|
||||
cp --verbose scripts/*.service scripts/*.sh scripts/*.socket dist/paperless-ngx/scripts/
|
||||
|
||||
echo "Copying source files"
|
||||
cp --recursive src/ dist/paperless-ngx/src
|
||||
echo "Copying documentation"
|
||||
cp --recursive docs/_build/html/ dist/paperless-ngx/docs
|
||||
|
||||
mv --verbose static dist/paperless-ngx
|
||||
- name: Make release package
|
||||
run: |
|
||||
echo "Creating release archive"
|
||||
cd dist
|
||||
sudo chown -R 1000:1000 paperless-ngx/
|
||||
tar -cJf paperless-ngx.tar.xz paperless-ngx/
|
||||
- name: Upload release artifact
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: release
|
||||
path: dist/paperless-ngx.tar.xz
|
||||
retention-days: 7
|
||||
publish-release:
|
||||
name: "Publish Release"
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
prerelease: ${{ steps.get_version.outputs.prerelease }}
|
||||
changelog: ${{ steps.create-release.outputs.body }}
|
||||
version: ${{ steps.get_version.outputs.version }}
|
||||
needs:
|
||||
- build-release
|
||||
if: github.ref_type == 'tag' && (startsWith(github.ref_name, 'v') || contains(github.ref_name, '-beta.rc'))
|
||||
steps:
|
||||
- name: Download release artifact
|
||||
uses: actions/download-artifact@v6
|
||||
with:
|
||||
name: release
|
||||
path: ./
|
||||
- name: Get version
|
||||
id: get_version
|
||||
run: |
|
||||
echo "version=${{ github.ref_name }}" >> $GITHUB_OUTPUT
|
||||
if [[ ${{ contains(github.ref_name, '-beta.rc') }} == 'true' ]]; then
|
||||
echo "prerelease=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "prerelease=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Create Release and Changelog
|
||||
id: create-release
|
||||
uses: release-drafter/release-drafter@v6
|
||||
with:
|
||||
name: Paperless-ngx ${{ steps.get_version.outputs.version }}
|
||||
tag: ${{ steps.get_version.outputs.version }}
|
||||
version: ${{ steps.get_version.outputs.version }}
|
||||
prerelease: ${{ steps.get_version.outputs.prerelease }}
|
||||
publish: true # ensures release is not marked as draft
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Upload release archive
|
||||
id: upload-release-asset
|
||||
uses: shogo82148/actions-upload-release-asset@v1
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
upload_url: ${{ steps.create-release.outputs.upload_url }}
|
||||
asset_path: ./paperless-ngx.tar.xz
|
||||
asset_name: paperless-ngx-${{ steps.get_version.outputs.version }}.tar.xz
|
||||
asset_content_type: application/x-xz
|
||||
append-changelog:
|
||||
name: "Append Changelog"
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- publish-release
|
||||
if: needs.publish-release.outputs.prerelease == 'false'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: main
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||
enable-cache: true
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Append Changelog to docs
|
||||
id: append-Changelog
|
||||
working-directory: docs
|
||||
run: |
|
||||
git branch ${{ needs.publish-release.outputs.version }}-changelog
|
||||
git checkout ${{ needs.publish-release.outputs.version }}-changelog
|
||||
echo -e "# Changelog\n\n${{ needs.publish-release.outputs.changelog }}\n" > changelog-new.md
|
||||
echo "Manually linking usernames"
|
||||
sed -i -r 's|@([a-zA-Z0-9_]+) \(\[#|[@\1](https://github.com/\1) ([#|g' changelog-new.md
|
||||
echo "Removing unneeded comment tags"
|
||||
sed -i -r 's|@<!---->|@|g' changelog-new.md
|
||||
CURRENT_CHANGELOG=`tail --lines +2 changelog.md`
|
||||
echo -e "$CURRENT_CHANGELOG" >> changelog-new.md
|
||||
mv changelog-new.md changelog.md
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
--dev \
|
||||
pre-commit run --files changelog.md || true
|
||||
git config --global user.name "github-actions"
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git commit -am "Changelog ${{ needs.publish-release.outputs.version }} - GHA"
|
||||
git push origin ${{ needs.publish-release.outputs.version }}-changelog
|
||||
- name: Create Pull Request
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
const { repo, owner } = context.repo;
|
||||
const result = await github.rest.pulls.create({
|
||||
title: 'Documentation: Add ${{ needs.publish-release.outputs.version }} changelog',
|
||||
owner,
|
||||
repo,
|
||||
head: '${{ needs.publish-release.outputs.version }}-changelog',
|
||||
base: 'main',
|
||||
body: 'This PR is auto-generated by CI.'
|
||||
});
|
||||
github.rest.issues.addLabels({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: result.data.number,
|
||||
labels: ['documentation', 'skip-changelog']
|
||||
});
|
||||
2
.github/workflows/repo-maintenance.yml
vendored
2
.github/workflows/repo-maintenance.yml
vendored
@@ -37,7 +37,7 @@ jobs:
|
||||
if: github.repository_owner == 'paperless-ngx'
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: dessant/lock-threads@v5
|
||||
- uses: dessant/lock-threads@v6
|
||||
with:
|
||||
issue-inactive-days: '30'
|
||||
pr-inactive-days: '30'
|
||||
|
||||
10
.github/workflows/translate-strings.yml
vendored
10
.github/workflows/translate-strings.yml
vendored
@@ -12,9 +12,11 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
env:
|
||||
GH_REF: ${{ github.ref }} # sonar rule:githubactions:S7630 - avoid injection
|
||||
with:
|
||||
token: ${{ secrets.PNGX_BOT_PAT }}
|
||||
ref: ${{ github.head_ref }}
|
||||
ref: ${{ env.GH_REF }}
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v6
|
||||
@@ -37,15 +39,15 @@ jobs:
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
- name: Use Node.js 20
|
||||
- name: Use Node.js 24
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20.x
|
||||
node-version: 24.x
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
- name: Cache frontend dependencies
|
||||
id: cache-frontend-deps
|
||||
uses: actions/cache@v4
|
||||
uses: actions/cache@v5
|
||||
with:
|
||||
path: |
|
||||
~/.pnpm-store
|
||||
|
||||
12
Dockerfile
12
Dockerfile
@@ -5,14 +5,12 @@
|
||||
# Purpose: Compiles the frontend
|
||||
# Notes:
|
||||
# - Does PNPM stuff with Typescript and such
|
||||
FROM --platform=$BUILDPLATFORM docker.io/node:20-trixie-slim AS compile-frontend
|
||||
FROM --platform=$BUILDPLATFORM docker.io/node:24-trixie-slim AS compile-frontend
|
||||
|
||||
COPY ./src-ui /src/src-ui
|
||||
|
||||
WORKDIR /src/src-ui
|
||||
RUN set -eux \
|
||||
&& npm update -g pnpm \
|
||||
&& npm install -g corepack@latest \
|
||||
&& corepack enable \
|
||||
&& pnpm install
|
||||
|
||||
@@ -110,8 +108,7 @@ ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONWARNINGS="ignore:::django.http.response:517" \
|
||||
PNGX_CONTAINERIZED=1 \
|
||||
# https://docs.astral.sh/uv/reference/settings/#link-mode
|
||||
UV_LINK_MODE=copy \
|
||||
UV_CACHE_DIR=/cache/uv/
|
||||
UV_LINK_MODE=copy
|
||||
|
||||
#
|
||||
# Begin installation and configuration
|
||||
@@ -193,14 +190,13 @@ ARG BUILD_PACKAGES="\
|
||||
pkg-config"
|
||||
|
||||
# hadolint ignore=DL3042
|
||||
RUN --mount=type=cache,target=${UV_CACHE_DIR},id=python-cache \
|
||||
set -eux \
|
||||
RUN set -eux \
|
||||
&& echo "Installing build system packages" \
|
||||
&& apt-get update \
|
||||
&& apt-get install --yes --quiet --no-install-recommends ${BUILD_PACKAGES} \
|
||||
&& echo "Installing Python requirements" \
|
||||
&& uv export --quiet --no-dev --all-extras --format requirements-txt --output-file requirements.txt \
|
||||
&& uv pip install --system --no-python-downloads --python-preference system --requirements requirements.txt \
|
||||
&& uv pip install --no-cache --system --no-python-downloads --python-preference system --requirements requirements.txt \
|
||||
&& echo "Installing NLTK data" \
|
||||
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" snowball_data \
|
||||
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" stopwords \
|
||||
|
||||
@@ -11,6 +11,7 @@ for command in decrypt_documents \
|
||||
mail_fetcher \
|
||||
document_create_classifier \
|
||||
document_index \
|
||||
document_llmindex \
|
||||
document_renamer \
|
||||
document_retagger \
|
||||
document_thumbnails \
|
||||
|
||||
14
docker/rootfs/usr/local/bin/document_llmindex
Executable file
14
docker/rootfs/usr/local/bin/document_llmindex
Executable file
@@ -0,0 +1,14 @@
|
||||
#!/command/with-contenv /usr/bin/bash
|
||||
# shellcheck shell=bash
|
||||
|
||||
set -e
|
||||
|
||||
cd "${PAPERLESS_SRC_DIR}"
|
||||
|
||||
if [[ $(id -u) == 0 ]]; then
|
||||
s6-setuidgid paperless python3 manage.py document_llmindex "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
python3 manage.py document_llmindex "$@"
|
||||
else
|
||||
echo "Unknown user."
|
||||
fi
|
||||
@@ -501,7 +501,7 @@ The `datetime` filter formats a datetime string or datetime object using Python'
|
||||
See the [strftime format code documentation](https://docs.python.org/3.13/library/datetime.html#strftime-and-strptime-format-codes)
|
||||
for the possible codes and their meanings.
|
||||
|
||||
##### Date Localization
|
||||
##### Date Localization {#date-localization}
|
||||
|
||||
The `localize_date` filter formats a date or datetime object into a localized string using Babel internationalization.
|
||||
This takes into account the provided locale for translation. Since this must be used on a date or datetime object,
|
||||
@@ -851,8 +851,8 @@ followed by the even pages.
|
||||
|
||||
It's important that the scan files get consumed in the correct order, and one at a time.
|
||||
You therefore need to make sure that Paperless is running while you upload the files into
|
||||
the directory; and if you're using [polling](configuration.md#polling), make sure that
|
||||
`CONSUMER_POLLING` is set to a value lower than it takes for the second scan to appear,
|
||||
the directory; and if you're using polling, make sure that
|
||||
`CONSUMER_POLLING_INTERVAL` is set to a value lower than it takes for the second scan to appear,
|
||||
like 5-10 or even lower.
|
||||
|
||||
Another thing that might happen is that you start a double sided scan, but then forget
|
||||
|
||||
@@ -294,6 +294,13 @@ The following methods are supported:
|
||||
- `"delete_original": true` to delete the original documents after editing.
|
||||
- `"update_document": true` to update the existing document with the edited PDF.
|
||||
- `"include_metadata": true` to copy metadata from the original document to the edited document.
|
||||
- `remove_password`
|
||||
- Requires `parameters`:
|
||||
- `"password": "PASSWORD_STRING"` The password to remove from the PDF documents.
|
||||
- Optional `parameters`:
|
||||
- `"update_document": true` to replace the existing document with the password-less PDF.
|
||||
- `"delete_original": true` to delete the original document after editing.
|
||||
- `"include_metadata": true` to copy metadata from the original document to the new password-less document.
|
||||
- `merge`
|
||||
- No additional `parameters` required.
|
||||
- The ordering of the merged document is determined by the list of IDs.
|
||||
|
||||
@@ -1175,21 +1175,45 @@ don't exist yet.
|
||||
|
||||
#### [`PAPERLESS_CONSUMER_IGNORE_PATTERNS=<json>`](#PAPERLESS_CONSUMER_IGNORE_PATTERNS) {#PAPERLESS_CONSUMER_IGNORE_PATTERNS}
|
||||
|
||||
: By default, paperless ignores certain files and folders in the
|
||||
consumption directory, such as system files created by the Mac OS
|
||||
or hidden folders some tools use to store data.
|
||||
: Additional regex patterns for files to ignore in the consumption directory. Patterns are matched against filenames only (not full paths)
|
||||
using Python's `re.match()`, which anchors at the start of the filename.
|
||||
|
||||
This can be adjusted by configuring a custom json array with
|
||||
patterns to exclude.
|
||||
See the [watchfiles documentation](https://watchfiles.helpmanual.io/api/filters/#watchfiles.BaseFilter.ignore_entity_patterns)
|
||||
|
||||
For example, `.DS_STORE/*` will ignore any files found in a folder
|
||||
named `.DS_STORE`, including `.DS_STORE/bar.pdf` and `foo/.DS_STORE/bar.pdf`
|
||||
This setting is for additional patterns beyond the built-in defaults. Common system files and directories are already ignored automatically.
|
||||
The patterns will be compiled via Python's standard `re` module.
|
||||
|
||||
A pattern like `._*` will ignore anything starting with `._`, including:
|
||||
`._foo.pdf` and `._bar/foo.pdf`
|
||||
Example custom patterns:
|
||||
|
||||
Defaults to
|
||||
`[".DS_Store", ".DS_STORE", "._*", ".stfolder/*", ".stversions/*", ".localized/*", "desktop.ini", "@eaDir/*", "Thumbs.db"]`.
|
||||
```json
|
||||
["^temp_", "\\.bak$", "^~"]
|
||||
```
|
||||
|
||||
This would ignore:
|
||||
|
||||
- Files starting with `temp_` (e.g., `temp_scan.pdf`)
|
||||
- Files ending with `.bak` (e.g., `document.pdf.bak`)
|
||||
- Files starting with `~` (e.g., `~$document.docx`)
|
||||
|
||||
Defaults to `[]` (empty list, uses only built-in defaults).
|
||||
|
||||
The default ignores are `[.DS_Store, .DS_STORE, ._*, desktop.ini, Thumbs.db]` and cannot be overridden.
|
||||
|
||||
#### [`PAPERLESS_CONSUMER_IGNORE_DIRS=<json>`](#PAPERLESS_CONSUMER_IGNORE_DIRS) {#PAPERLESS_CONSUMER_IGNORE_DIRS}
|
||||
|
||||
: Additional directory names to ignore in the consumption directory. Directories matching these names (and all their contents) will be skipped.
|
||||
|
||||
This setting is for additional directories beyond the built-in defaults. Matching is done by directory name only, not full path.
|
||||
|
||||
Example:
|
||||
|
||||
```json
|
||||
["temp", "incoming", ".hidden"]
|
||||
```
|
||||
|
||||
Defaults to `[]` (empty list, uses only built-in defaults).
|
||||
|
||||
The default ignores are `[.stfolder, .stversions, .localized, @eaDir, .Spotlight-V100, .Trashes, __MACOSX]` and cannot be overridden.
|
||||
|
||||
#### [`PAPERLESS_CONSUMER_BARCODE_SCANNER=<string>`](#PAPERLESS_CONSUMER_BARCODE_SCANNER) {#PAPERLESS_CONSUMER_BARCODE_SCANNER}
|
||||
|
||||
@@ -1288,48 +1312,24 @@ within your documents.
|
||||
|
||||
Defaults to false.
|
||||
|
||||
### Polling {#polling}
|
||||
#### [`PAPERLESS_CONSUMER_POLLING_INTERVAL=<num>`](#PAPERLESS_CONSUMER_POLLING_INTERVAL) {#PAPERLESS_CONSUMER_POLLING_INTERVAL}
|
||||
|
||||
#### [`PAPERLESS_CONSUMER_POLLING=<num>`](#PAPERLESS_CONSUMER_POLLING) {#PAPERLESS_CONSUMER_POLLING}
|
||||
: Configures how the consumer detects new files in the consumption directory.
|
||||
|
||||
: If paperless won't find documents added to your consume folder, it
|
||||
might not be able to automatically detect filesystem changes. In
|
||||
that case, specify a polling interval in seconds here, which will
|
||||
then cause paperless to periodically check your consumption
|
||||
directory for changes. This will also disable listening for file
|
||||
system changes with `inotify`.
|
||||
When set to `0` (default), paperless uses native filesystem notifications for efficient, immediate detection of new files.
|
||||
|
||||
Defaults to 0, which disables polling and uses filesystem
|
||||
notifications.
|
||||
When set to a positive number, paperless polls the consumption directory at that interval in seconds. Use polling for network filesystems (NFS, SMB/CIFS) where native notifications may not work reliably.
|
||||
|
||||
#### [`PAPERLESS_CONSUMER_POLLING_RETRY_COUNT=<num>`](#PAPERLESS_CONSUMER_POLLING_RETRY_COUNT) {#PAPERLESS_CONSUMER_POLLING_RETRY_COUNT}
|
||||
Defaults to 0.
|
||||
|
||||
: If consumer polling is enabled, sets the maximum number of times
|
||||
paperless will check for a file to remain unmodified. If a file's
|
||||
modification time and size are identical for two consecutive checks, it
|
||||
will be consumed.
|
||||
#### [`PAPERLESS_CONSUMER_STABILITY_DELAY=<num>`](#PAPERLESS_CONSUMER_STABILITY_DELAY) {#PAPERLESS_CONSUMER_STABILITY_DELAY}
|
||||
|
||||
Defaults to 5.
|
||||
: Sets the time in seconds that a file must remain unchanged (same size and modification time) before paperless will begin consuming it.
|
||||
|
||||
#### [`PAPERLESS_CONSUMER_POLLING_DELAY=<num>`](#PAPERLESS_CONSUMER_POLLING_DELAY) {#PAPERLESS_CONSUMER_POLLING_DELAY}
|
||||
Increase this value if you experience issues with files being consumed before they are fully written, particularly on slower network storage or
|
||||
with certain scanner quirks
|
||||
|
||||
: If consumer polling is enabled, sets the delay in seconds between
|
||||
each check (above) paperless will do while waiting for a file to
|
||||
remain unmodified.
|
||||
|
||||
Defaults to 5.
|
||||
|
||||
### iNotify {#inotify}
|
||||
|
||||
#### [`PAPERLESS_CONSUMER_INOTIFY_DELAY=<num>`](#PAPERLESS_CONSUMER_INOTIFY_DELAY) {#PAPERLESS_CONSUMER_INOTIFY_DELAY}
|
||||
|
||||
: Sets the time in seconds the consumer will wait for additional
|
||||
events from inotify before the consumer will consider a file ready
|
||||
and begin consumption. Certain scanners or network setups may
|
||||
generate multiple events for a single file, leading to multiple
|
||||
consumers working on the same file. Configure this to prevent that.
|
||||
|
||||
Defaults to 0.5 seconds.
|
||||
Defaults to 5.0 seconds.
|
||||
|
||||
## Workflow webhooks
|
||||
|
||||
@@ -1811,3 +1811,87 @@ password. All of these options come from their similarly-named [Django settings]
|
||||
#### [`PAPERLESS_EMAIL_USE_SSL=<bool>`](#PAPERLESS_EMAIL_USE_SSL) {#PAPERLESS_EMAIL_USE_SSL}
|
||||
|
||||
: Defaults to false.
|
||||
|
||||
## Remote OCR
|
||||
|
||||
#### [`PAPERLESS_REMOTE_OCR_ENGINE=<str>`](#PAPERLESS_REMOTE_OCR_ENGINE) {#PAPERLESS_REMOTE_OCR_ENGINE}
|
||||
|
||||
: The remote OCR engine to use. Currently only Azure AI is supported as "azureai".
|
||||
|
||||
Defaults to None, which disables remote OCR.
|
||||
|
||||
#### [`PAPERLESS_REMOTE_OCR_API_KEY=<str>`](#PAPERLESS_REMOTE_OCR_API_KEY) {#PAPERLESS_REMOTE_OCR_API_KEY}
|
||||
|
||||
: The API key to use for the remote OCR engine.
|
||||
|
||||
Defaults to None.
|
||||
|
||||
#### [`PAPERLESS_REMOTE_OCR_ENDPOINT=<str>`](#PAPERLESS_REMOTE_OCR_ENDPOINT) {#PAPERLESS_REMOTE_OCR_ENDPOINT}
|
||||
|
||||
: The endpoint to use for the remote OCR engine. This is required for Azure AI.
|
||||
|
||||
Defaults to None.
|
||||
|
||||
## AI {#ai}
|
||||
|
||||
#### [`PAPERLESS_AI_ENABLED=<bool>`](#PAPERLESS_AI_ENABLED) {#PAPERLESS_AI_ENABLED}
|
||||
|
||||
: Enables the AI features in Paperless. This includes the AI-based
|
||||
suggestions. This setting is required to be set to true in order to use the AI features.
|
||||
|
||||
Defaults to false.
|
||||
|
||||
#### [`PAPERLESS_AI_LLM_EMBEDDING_BACKEND=<str>`](#PAPERLESS_AI_LLM_EMBEDDING_BACKEND) {#PAPERLESS_AI_LLM_EMBEDDING_BACKEND}
|
||||
|
||||
: The embedding backend to use for RAG. This can be either "openai" or "huggingface".
|
||||
|
||||
Defaults to None.
|
||||
|
||||
#### [`PAPERLESS_AI_LLM_EMBEDDING_MODEL=<str>`](#PAPERLESS_AI_LLM_EMBEDDING_MODEL) {#PAPERLESS_AI_LLM_EMBEDDING_MODEL}
|
||||
|
||||
: The model to use for the embedding backend for RAG. This can be set to any of the embedding models supported by the current embedding backend. If not supplied, defaults to "text-embedding-3-small" for OpenAI and "sentence-transformers/all-MiniLM-L6-v2" for Huggingface.
|
||||
|
||||
Defaults to None.
|
||||
|
||||
#### [`PAPERLESS_AI_LLM_BACKEND=<str>`](#PAPERLESS_AI_LLM_BACKEND) {#PAPERLESS_AI_LLM_BACKEND}
|
||||
|
||||
: The AI backend to use. This can be either "openai" or "ollama". If set to "ollama", the AI
|
||||
features will be run locally on your machine. If set to "openai", the AI features will be run
|
||||
using the OpenAI API. This setting is required to be set to use the AI features.
|
||||
|
||||
Defaults to None.
|
||||
|
||||
!!! note
|
||||
|
||||
The OpenAI API is a paid service. You will need to set up an OpenAI account and
|
||||
will be charged for usage incurred by Paperless-ngx features and your document data
|
||||
will (of course) be sent to the OpenAI API. Paperless-ngx does not endorse the use of the
|
||||
OpenAI API in any way.
|
||||
|
||||
Refer to the OpenAI terms of service, and use at your own risk.
|
||||
|
||||
#### [`PAPERLESS_AI_LLM_MODEL=<str>`](#PAPERLESS_AI_LLM_MODEL) {#PAPERLESS_AI_LLM_MODEL}
|
||||
|
||||
: The model to use for the AI backend, i.e. "gpt-3.5-turbo", "gpt-4" or any of the models supported by the
|
||||
current backend. If not supplied, defaults to "gpt-3.5-turbo" for OpenAI and "llama3" for Ollama.
|
||||
|
||||
Defaults to None.
|
||||
|
||||
#### [`PAPERLESS_AI_LLM_API_KEY=<str>`](#PAPERLESS_AI_LLM_API_KEY) {#PAPERLESS_AI_LLM_API_KEY}
|
||||
|
||||
: The API key to use for the AI backend. This is required for the OpenAI backend (optional for others).
|
||||
|
||||
Defaults to None.
|
||||
|
||||
#### [`PAPERLESS_AI_LLM_ENDPOINT=<str>`](#PAPERLESS_AI_LLM_ENDPOINT) {#PAPERLESS_AI_LLM_ENDPOINT}
|
||||
|
||||
: The endpoint / url to use for the AI backend. This is required for the Ollama backend (optional for others).
|
||||
|
||||
Defaults to None.
|
||||
|
||||
#### [`PAPERLESS_AI_LLM_INDEX_TASK_CRON=<cron expression>`](#PAPERLESS_AI_LLM_INDEX_TASK_CRON) {#PAPERLESS_AI_LLM_INDEX_TASK_CRON}
|
||||
|
||||
: Configures the schedule to update the AI embeddings of text content and metadata for all documents. Only performed if
|
||||
AI is enabled and the LLM embedding backend is set.
|
||||
|
||||
Defaults to `10 2 * * *`, once per day.
|
||||
|
||||
@@ -175,7 +175,7 @@ To add a new development package `uv add --dev <package>`
|
||||
|
||||
## Front end development
|
||||
|
||||
The front end is built using AngularJS. In order to get started, you need Node.js (version 14.15+) and
|
||||
The front end is built using AngularJS. In order to get started, you need Node.js (version 24+) and
|
||||
`pnpm`.
|
||||
|
||||
!!! note
|
||||
|
||||
@@ -25,11 +25,13 @@ physical documents into a searchable online archive so you can keep, well, _less
|
||||
## Features
|
||||
|
||||
- **Organize and index** your scanned documents with tags, correspondents, types, and more.
|
||||
- _Your_ data is stored locally on _your_ server and is never transmitted or shared in any way.
|
||||
- _Your_ data is stored locally on _your_ server and is never transmitted or shared in any way, unless you explicitly choose to do so.
|
||||
- Performs **OCR** on your documents, adding searchable and selectable text, even to documents scanned with only images.
|
||||
- Utilizes the open-source Tesseract engine to recognize more than 100 languages.
|
||||
- Utilizes the open-source Tesseract engine to recognize more than 100 languages.
|
||||
- _New!_ Supports remote OCR with Azure AI (opt-in).
|
||||
- Documents are saved as PDF/A format which is designed for long term storage, alongside the unaltered originals.
|
||||
- Uses machine-learning to automatically add tags, correspondents and document types to your documents.
|
||||
- **New**: Paperless-ngx can now leverage AI (Large Language Models or LLMs) for document suggestions. This is an optional feature that can be enabled (and is disabled by default).
|
||||
- Supports PDF documents, images, plain text files, Office documents (Word, Excel, PowerPoint, and LibreOffice equivalents)[^1] and more.
|
||||
- Paperless stores your documents plain on disk. Filenames and folders are managed by paperless and their format can be configured freely with different configurations assigned to different documents.
|
||||
- **Beautiful, modern web application** that features:
|
||||
|
||||
19
docs/migration.md
Normal file
19
docs/migration.md
Normal file
@@ -0,0 +1,19 @@
|
||||
# v3 Migration Guide
|
||||
|
||||
## Consumer Settings Changes
|
||||
|
||||
The v3 consumer command uses a [different library](https://watchfiles.helpmanual.io/) to unify
|
||||
the watching for new files in the consume directory. For the user, this removes several configuration options related to delays and retries
|
||||
and replaces with a single unified setting. It also adjusts how the consumer ignore filtering happens, replaced `fnmatch` with `regex` and
|
||||
separating the directory ignore from the file ignore.
|
||||
|
||||
### Summary
|
||||
|
||||
| Old Setting | New Setting | Notes |
|
||||
| ------------------------------ | ----------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------ |
|
||||
| `CONSUMER_POLLING` | [`CONSUMER_POLLING_INTERVAL`](configuration.md#PAPERLESS_CONSUMER_POLLING_INTERVAL) | Renamed for clarity |
|
||||
| `CONSUMER_INOTIFY_DELAY` | [`CONSUMER_STABILITY_DELAY`](configuration.md#PAPERLESS_CONSUMER_STABILITY_DELAY) | Unified for all modes |
|
||||
| `CONSUMER_POLLING_DELAY` | _Removed_ | Use `CONSUMER_STABILITY_DELAY` |
|
||||
| `CONSUMER_POLLING_RETRY_COUNT` | _Removed_ | Automatic with stability tracking |
|
||||
| `CONSUMER_IGNORE_PATTERNS` | [`CONSUMER_IGNORE_PATTERNS`](configuration.md#PAPERLESS_CONSUMER_IGNORE_PATTERNS) | **Now regex, not fnmatch**; user patterns are added to (not replacing) default ones |
|
||||
| _New_ | [`CONSUMER_IGNORE_DIRS`](configuration.md#PAPERLESS_CONSUMER_IGNORE_DIRS) | Additional directories to ignore; user entries are added to (not replacing) defaults |
|
||||
@@ -124,8 +124,7 @@ account. The script essentially automatically performs the steps described in [D
|
||||
system notifications with `inotify`. When storing the consumption
|
||||
directory on such a file system, paperless will not pick up new
|
||||
files with the default configuration. You will need to use
|
||||
[`PAPERLESS_CONSUMER_POLLING`](configuration.md#PAPERLESS_CONSUMER_POLLING), which will disable inotify. See
|
||||
[here](configuration.md#polling).
|
||||
[`PAPERLESS_CONSUMER_POLLING_INTERVAL`](configuration.md#PAPERLESS_CONSUMER_POLLING_INTERVAL), which will disable inotify.
|
||||
|
||||
5. Run `docker compose pull`. This will pull the image from the GitHub container registry
|
||||
by default but you can change the image to pull from Docker Hub by changing the `image`
|
||||
|
||||
@@ -46,7 +46,7 @@ run:
|
||||
If you notice that the consumer will only pickup files in the
|
||||
consumption directory at startup, but won't find any other files added
|
||||
later, you will need to enable filesystem polling with the configuration
|
||||
option [`PAPERLESS_CONSUMER_POLLING`](configuration.md#PAPERLESS_CONSUMER_POLLING).
|
||||
option [`PAPERLESS_CONSUMER_POLLING_INTERVAL`](configuration.md#PAPERLESS_CONSUMER_POLLING_INTERVAL).
|
||||
|
||||
This will disable listening to filesystem changes with inotify and
|
||||
paperless will manually check the consumption directory for changes
|
||||
@@ -234,47 +234,9 @@ FileNotFoundError: [Errno 2] No such file or directory: '/tmp/ocrmypdf.io.yhk3zb
|
||||
|
||||
This probably indicates paperless tried to consume the same file twice.
|
||||
This can happen for a number of reasons, depending on how documents are
|
||||
placed into the consume folder. If paperless is using inotify (the
|
||||
default) to check for documents, try adjusting the
|
||||
[inotify configuration](configuration.md#inotify). If polling is enabled, try adjusting the
|
||||
[polling configuration](configuration.md#polling).
|
||||
|
||||
## Consumer fails waiting for file to remain unmodified.
|
||||
|
||||
You might find messages like these in your log files:
|
||||
|
||||
```
|
||||
[ERROR] [paperless.management.consumer] Timeout while waiting on file /usr/src/paperless/src/../consume/SCN_0001.pdf to remain unmodified.
|
||||
```
|
||||
|
||||
This indicates paperless timed out while waiting for the file to be
|
||||
completely written to the consume folder. Adjusting
|
||||
[polling configuration](configuration.md#polling) values should resolve the issue.
|
||||
|
||||
!!! note
|
||||
|
||||
The user will need to manually move the file out of the consume folder
|
||||
and back in, for the initial failing file to be consumed.
|
||||
|
||||
## Consumer fails reporting "OS reports file as busy still".
|
||||
|
||||
You might find messages like these in your log files:
|
||||
|
||||
```
|
||||
[WARNING] [paperless.management.consumer] Not consuming file /usr/src/paperless/src/../consume/SCN_0001.pdf: OS reports file as busy still
|
||||
```
|
||||
|
||||
This indicates paperless was unable to open the file, as the OS reported
|
||||
the file as still being in use. To prevent a crash, paperless did not
|
||||
try to consume the file. If paperless is using inotify (the default) to
|
||||
check for documents, try adjusting the
|
||||
[inotify configuration](configuration.md#inotify). If polling is enabled, try adjusting the
|
||||
[polling configuration](configuration.md#polling).
|
||||
|
||||
!!! note
|
||||
|
||||
The user will need to manually move the file out of the consume folder
|
||||
and back in, for the initial failing file to be consumed.
|
||||
placed into the consume folder, such as how a scanner may modify a file multiple times as it scans.
|
||||
Try adjusting the
|
||||
[file stability delay](configuration.md#PAPERLESS_CONSUMER_STABILITY_DELAY) to a larger value.
|
||||
|
||||
## Log reports "Creating PaperlessTask failed".
|
||||
|
||||
|
||||
@@ -278,6 +278,28 @@ Once setup, navigating to the email settings page in Paperless-ngx will allow yo
|
||||
You can also submit a document using the REST API, see [POSTing documents](api.md#file-uploads)
|
||||
for details.
|
||||
|
||||
## Document Suggestions
|
||||
|
||||
Paperless-ngx can suggest tags, correspondents, document types and storage paths for documents based on the content of the document. This is done using a (non-LLM) machine learning model that is trained on the documents in your database. The suggestions are shown in the document detail page and can be accepted or rejected by the user.
|
||||
|
||||
## AI Features
|
||||
|
||||
Paperless-ngx includes several features that use AI to enhance the document management experience. These features are optional and can be enabled or disabled in the settings. If you are using the AI features, you may want to also enable the "LLM index" feature, which supports Retrieval-Augmented Generation (RAG) designed to improve the quality of AI responses. The LLM index feature is not enabled by default and requires additional configuration.
|
||||
|
||||
!!! warning
|
||||
|
||||
Remember that Paperless-ngx will send document content to the AI provider you have configured, so consider the privacy implications of using these features, especially if using a remote model (e.g. OpenAI), instead of the default local model.
|
||||
|
||||
The AI features work by creating an embedding of the text content and metadata of documents, which is then used for various tasks such as similarity search and question answering. This uses the FAISS vector store.
|
||||
|
||||
### AI-Enhanced Suggestions
|
||||
|
||||
If enabled, Paperless-ngx can use an AI LLM model to suggest document titles, dates, tags, correspondents and document types for documents. This feature will always be "opt-in" and does not disable the existing classifier-based suggestion system. Currently, both remote (via the OpenAI API) and local (via Ollama) models are supported, see [configuration](configuration.md#ai) for details.
|
||||
|
||||
### Document Chat
|
||||
|
||||
Paperless-ngx can use an AI LLM model to answer questions about a document or across multiple documents. Again, this feature works best when RAG is enabled. The chat feature is available in the upper app toolbar and will switch between chatting across multiple documents or a single document based on the current view.
|
||||
|
||||
## Sharing documents from Paperless-ngx
|
||||
|
||||
Paperless-ngx supports sharing documents with other users by assigning them [permissions](#object-permissions)
|
||||
@@ -543,7 +565,7 @@ This allows for complex logic to be used to generate the title, including [logic
|
||||
and [filters](https://jinja.palletsprojects.com/en/3.1.x/templates/#id11).
|
||||
The template is provided as a string.
|
||||
|
||||
Using Jinja2 Templates is also useful for [Date localization](advanced_usage.md#Date-Localization) in the title.
|
||||
Using Jinja2 Templates is also useful for [Date localization](advanced_usage.md#date-localization) in the title.
|
||||
|
||||
The available inputs differ depending on the type of workflow trigger.
|
||||
This is because at the time of consumption (when the text is to be set), no automatic tags etc. have been
|
||||
@@ -901,6 +923,21 @@ how regularly you intend to scan documents and use paperless.
|
||||
performed the task associated with the document, move it to the
|
||||
inbox.
|
||||
|
||||
## Remote OCR
|
||||
|
||||
!!! important
|
||||
|
||||
This feature is disabled by default and will always remain strictly "opt-in".
|
||||
|
||||
Paperless-ngx supports performing OCR on documents using remote services. At the moment, this is limited to
|
||||
[Microsoft's Azure "Document Intelligence" service](https://azure.microsoft.com/en-us/products/ai-services/ai-document-intelligence).
|
||||
This is of course a paid service (with a free tier) which requires an Azure account and subscription. Azure AI is not affiliated with
|
||||
Paperless-ngx in any way. When enabled, Paperless-ngx will automatically send appropriate documents to Azure for OCR processing, bypassing
|
||||
the local OCR engine. See the [configuration](configuration.md#PAPERLESS_REMOTE_OCR_ENGINE) options for more details.
|
||||
|
||||
Additionally, when using a commercial service with this feature, consider both potential costs as well as any associated file size
|
||||
or page limitations (e.g. with a free tier).
|
||||
|
||||
## Architecture
|
||||
|
||||
Paperless-ngx consists of the following components:
|
||||
|
||||
@@ -69,8 +69,9 @@ nav:
|
||||
- development.md
|
||||
- 'FAQs': faq.md
|
||||
- troubleshooting.md
|
||||
- 'Migration to v3': migration.md
|
||||
- changelog.md
|
||||
copyright: Copyright © 2016 - 2023 Daniel Quinn, Jonas Winkler, and the Paperless-ngx team
|
||||
copyright: Copyright © 2016 - 2026 Daniel Quinn, Jonas Winkler, and the Paperless-ngx team
|
||||
extra:
|
||||
social:
|
||||
- icon: fontawesome/brands/github
|
||||
|
||||
@@ -55,10 +55,10 @@
|
||||
#PAPERLESS_TASK_WORKERS=1
|
||||
#PAPERLESS_THREADS_PER_WORKER=1
|
||||
#PAPERLESS_TIME_ZONE=UTC
|
||||
#PAPERLESS_CONSUMER_POLLING=10
|
||||
#PAPERLESS_CONSUMER_POLLING_INTERVAL=10
|
||||
#PAPERLESS_CONSUMER_DELETE_DUPLICATES=false
|
||||
#PAPERLESS_CONSUMER_RECURSIVE=false
|
||||
#PAPERLESS_CONSUMER_IGNORE_PATTERNS=[".DS_STORE/*", "._*", ".stfolder/*", ".stversions/*", ".localized/*", "desktop.ini"]
|
||||
#PAPERLESS_CONSUMER_IGNORE_PATTERNS=[] # Defaults are built in; add filename regexes, e.g. ["^\\.DS_Store$", "^desktop\\.ini$"]
|
||||
#PAPERLESS_CONSUMER_SUBDIRS_AS_TAGS=false
|
||||
#PAPERLESS_CONSUMER_ENABLE_BARCODES=false
|
||||
#PAPERLESS_CONSUMER_BARCODE_STRING=PATCHT
|
||||
|
||||
@@ -16,6 +16,7 @@ classifiers = [
|
||||
# This will allow testing to not install a webserver, mysql, etc
|
||||
|
||||
dependencies = [
|
||||
"azure-ai-documentintelligence>=1.0.2",
|
||||
"babel>=2.17",
|
||||
"bleach~=6.3.0",
|
||||
"celery[redis]~=5.5.1",
|
||||
@@ -43,16 +44,23 @@ dependencies = [
|
||||
"drf-spectacular~=0.28",
|
||||
"drf-spectacular-sidecar~=2025.10.1",
|
||||
"drf-writable-nested~=0.7.1",
|
||||
"faiss-cpu>=1.10",
|
||||
"filelock~=3.20.0",
|
||||
"flower~=2.0.1",
|
||||
"gotenberg-client~=0.12.0",
|
||||
"httpx-oauth~=0.16",
|
||||
"imap-tools~=1.11.0",
|
||||
"inotifyrecursive~=0.3",
|
||||
"jinja2~=3.1.5",
|
||||
"langdetect~=1.0.9",
|
||||
"llama-index-core>=0.12.33.post1",
|
||||
"llama-index-embeddings-huggingface>=0.5.3",
|
||||
"llama-index-embeddings-openai>=0.3.1",
|
||||
"llama-index-llms-ollama>=0.5.4",
|
||||
"llama-index-llms-openai>=0.3.38",
|
||||
"llama-index-vector-stores-faiss>=0.3",
|
||||
"nltk~=3.9.1",
|
||||
"ocrmypdf~=16.12.0",
|
||||
"openai>=1.76",
|
||||
"pathvalidate~=3.3.1",
|
||||
"pdf2image~=1.17.0",
|
||||
"python-dateutil~=2.9.0",
|
||||
@@ -65,10 +73,11 @@ dependencies = [
|
||||
"redis[hiredis]~=5.2.1",
|
||||
"regex>=2025.9.18",
|
||||
"scikit-learn~=1.7.0",
|
||||
"sentence-transformers>=4.1",
|
||||
"setproctitle~=1.3.4",
|
||||
"tika-client~=0.10.0",
|
||||
"tqdm~=4.67.1",
|
||||
"watchdog~=6.0",
|
||||
"watchfiles>=1.1.1",
|
||||
"whitenoise~=6.9",
|
||||
"whoosh-reloaded>=2.7.5",
|
||||
"zxing-cpp~=2.3.0",
|
||||
@@ -253,6 +262,8 @@ testpaths = [
|
||||
"src/paperless_tesseract/tests/",
|
||||
"src/paperless_tika/tests",
|
||||
"src/paperless_text/tests/",
|
||||
"src/paperless_remote/tests/",
|
||||
"src/paperless_ai/tests",
|
||||
]
|
||||
addopts = [
|
||||
"--pythonwarnings=all",
|
||||
|
||||
@@ -31,6 +31,7 @@
|
||||
"fi-FI": "src/locale/messages.fi_FI.xlf",
|
||||
"fr-FR": "src/locale/messages.fr_FR.xlf",
|
||||
"hu-HU": "src/locale/messages.hu_HU.xlf",
|
||||
"id-ID": "src/locale/messages.id_ID.xlf",
|
||||
"it-IT": "src/locale/messages.it_IT.xlf",
|
||||
"ja-JP": "src/locale/messages.ja_JP.xlf",
|
||||
"lb-LU": "src/locale/messages.lb_LU.xlf",
|
||||
@@ -155,16 +156,7 @@
|
||||
"builder": "@angular-builders/jest:run",
|
||||
"options": {
|
||||
"tsConfig": "tsconfig.spec.json",
|
||||
"assets": [
|
||||
"src/favicon.ico",
|
||||
"src/apple-touch-icon.png",
|
||||
"src/assets",
|
||||
"src/manifest.webmanifest"
|
||||
],
|
||||
"styles": [
|
||||
"src/styles.scss"
|
||||
],
|
||||
"scripts": []
|
||||
"zoneless": false
|
||||
}
|
||||
},
|
||||
"lint": {
|
||||
|
||||
@@ -1,5 +1,23 @@
|
||||
const { createEsmPreset } = require('jest-preset-angular/presets')
|
||||
|
||||
const esmPreset = createEsmPreset({
|
||||
tsconfig: '<rootDir>/tsconfig.spec.json',
|
||||
stringifyContentPathRegex: '\\.(html|svg)$',
|
||||
})
|
||||
|
||||
module.exports = {
|
||||
preset: 'jest-preset-angular',
|
||||
...esmPreset,
|
||||
transform: {
|
||||
...esmPreset.transform,
|
||||
'^.+\\.(ts|js|mjs|html|svg)$': [
|
||||
'jest-preset-angular',
|
||||
{
|
||||
tsconfig: '<rootDir>/tsconfig.spec.json',
|
||||
stringifyContentPathRegex: '\\.(html|svg)$',
|
||||
useESM: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
setupFilesAfterEnv: ['<rootDir>/setup-jest.ts'],
|
||||
testPathIgnorePatterns: [
|
||||
'/node_modules/',
|
||||
@@ -8,9 +26,10 @@ module.exports = {
|
||||
'abstract-paperless-service',
|
||||
],
|
||||
transformIgnorePatterns: [
|
||||
`<rootDir>/node_modules/.pnpm/(?!.*\\.mjs$|lodash-es|@angular\\+common.*locales)`,
|
||||
'node_modules/(?!.*(\\.mjs$|tslib|lodash-es|@angular/common/locales/.*\\.js$))',
|
||||
],
|
||||
moduleNameMapper: {
|
||||
...esmPreset.moduleNameMapper,
|
||||
'^src/(.*)': '<rootDir>/src/$1',
|
||||
},
|
||||
workerIdleMemoryLimit: '512MB',
|
||||
|
||||
1011
src-ui/messages.xlf
1011
src-ui/messages.xlf
File diff suppressed because it is too large
Load Diff
@@ -11,17 +11,17 @@
|
||||
},
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"@angular/cdk": "^20.2.13",
|
||||
"@angular/common": "~20.3.15",
|
||||
"@angular/compiler": "~20.3.15",
|
||||
"@angular/core": "~20.3.15",
|
||||
"@angular/forms": "~20.3.15",
|
||||
"@angular/localize": "~20.3.15",
|
||||
"@angular/platform-browser": "~20.3.15",
|
||||
"@angular/platform-browser-dynamic": "~20.3.15",
|
||||
"@angular/router": "~20.3.15",
|
||||
"@ng-bootstrap/ng-bootstrap": "^19.0.1",
|
||||
"@ng-select/ng-select": "^20.7.0",
|
||||
"@angular/cdk": "^21.0.6",
|
||||
"@angular/common": "~21.0.8",
|
||||
"@angular/compiler": "~21.0.8",
|
||||
"@angular/core": "~21.0.8",
|
||||
"@angular/forms": "~21.0.8",
|
||||
"@angular/localize": "~21.0.8",
|
||||
"@angular/platform-browser": "~21.0.8",
|
||||
"@angular/platform-browser-dynamic": "~21.0.8",
|
||||
"@angular/router": "~21.0.8",
|
||||
"@ng-bootstrap/ng-bootstrap": "^20.0.0",
|
||||
"@ng-select/ng-select": "^21.1.4",
|
||||
"@ngneat/dirty-check-forms": "^3.0.3",
|
||||
"@popperjs/core": "^2.11.8",
|
||||
"bootstrap": "^5.3.8",
|
||||
@@ -30,8 +30,8 @@
|
||||
"ng2-pdf-viewer": "^10.4.0",
|
||||
"ngx-bootstrap-icons": "^1.9.3",
|
||||
"ngx-color": "^10.1.0",
|
||||
"ngx-cookie-service": "^20.1.1",
|
||||
"ngx-device-detector": "^10.1.0",
|
||||
"ngx-cookie-service": "^21.1.0",
|
||||
"ngx-device-detector": "^11.0.0",
|
||||
"ngx-ui-tour-ng-bootstrap": "^17.0.1",
|
||||
"rxjs": "^7.8.2",
|
||||
"tslib": "^2.8.1",
|
||||
@@ -40,18 +40,18 @@
|
||||
"zone.js": "^0.15.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@angular-builders/custom-webpack": "^20.0.0",
|
||||
"@angular-builders/jest": "^20.0.0",
|
||||
"@angular-devkit/core": "^20.3.13",
|
||||
"@angular-devkit/schematics": "^20.3.13",
|
||||
"@angular-eslint/builder": "20.6.0",
|
||||
"@angular-eslint/eslint-plugin": "20.6.0",
|
||||
"@angular-eslint/eslint-plugin-template": "20.6.0",
|
||||
"@angular-eslint/schematics": "20.6.0",
|
||||
"@angular-eslint/template-parser": "20.6.0",
|
||||
"@angular/build": "^20.3.13",
|
||||
"@angular/cli": "~20.3.13",
|
||||
"@angular/compiler-cli": "~20.3.15",
|
||||
"@angular-builders/custom-webpack": "^21.0.0-beta.1",
|
||||
"@angular-builders/jest": "^21.0.0-beta.1",
|
||||
"@angular-devkit/core": "^21.0.5",
|
||||
"@angular-devkit/schematics": "^21.0.5",
|
||||
"@angular-eslint/builder": "21.1.0",
|
||||
"@angular-eslint/eslint-plugin": "21.1.0",
|
||||
"@angular-eslint/eslint-plugin-template": "21.1.0",
|
||||
"@angular-eslint/schematics": "21.1.0",
|
||||
"@angular-eslint/template-parser": "21.1.0",
|
||||
"@angular/build": "^21.0.5",
|
||||
"@angular/cli": "~21.0.5",
|
||||
"@angular/compiler-cli": "~21.0.8",
|
||||
"@codecov/webpack-plugin": "^1.9.1",
|
||||
"@playwright/test": "^1.57.0",
|
||||
"@types/jest": "^30.0.0",
|
||||
@@ -63,11 +63,11 @@
|
||||
"jest": "30.2.0",
|
||||
"jest-environment-jsdom": "^30.2.0",
|
||||
"jest-junit": "^16.0.0",
|
||||
"jest-preset-angular": "^15.0.3",
|
||||
"jest-preset-angular": "^16.0.0",
|
||||
"jest-websocket-mock": "^2.5.0",
|
||||
"prettier-plugin-organize-imports": "^4.3.0",
|
||||
"ts-node": "~10.9.1",
|
||||
"typescript": "^5.8.3",
|
||||
"typescript": "^5.9.3",
|
||||
"webpack": "^5.103.0"
|
||||
},
|
||||
"packageManager": "pnpm@10.17.1",
|
||||
|
||||
4760
src-ui/pnpm-lock.yaml
generated
4760
src-ui/pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -28,6 +28,7 @@ import localeFa from '@angular/common/locales/fa'
|
||||
import localeFi from '@angular/common/locales/fi'
|
||||
import localeFr from '@angular/common/locales/fr'
|
||||
import localeHu from '@angular/common/locales/hu'
|
||||
import localeId from '@angular/common/locales/id'
|
||||
import localeIt from '@angular/common/locales/it'
|
||||
import localeJa from '@angular/common/locales/ja'
|
||||
import localeKo from '@angular/common/locales/ko'
|
||||
@@ -63,6 +64,7 @@ registerLocaleData(localeFa)
|
||||
registerLocaleData(localeFi)
|
||||
registerLocaleData(localeFr)
|
||||
registerLocaleData(localeHu)
|
||||
registerLocaleData(localeId)
|
||||
registerLocaleData(localeIt)
|
||||
registerLocaleData(localeJa)
|
||||
registerLocaleData(localeKo)
|
||||
|
||||
@@ -35,8 +35,12 @@
|
||||
@case (ConfigOptionType.String) { <pngx-input-text [formControlName]="option.key" [error]="errors[option.key]"></pngx-input-text> }
|
||||
@case (ConfigOptionType.JSON) { <pngx-input-text [formControlName]="option.key" [error]="errors[option.key]"></pngx-input-text> }
|
||||
@case (ConfigOptionType.File) { <pngx-input-file [formControlName]="option.key" (upload)="uploadFile($event, option.key)" [error]="errors[option.key]"></pngx-input-file> }
|
||||
@case (ConfigOptionType.Password) { <pngx-input-password [formControlName]="option.key" [error]="errors[option.key]"></pngx-input-password> }
|
||||
}
|
||||
</div>
|
||||
@if (option.note) {
|
||||
<div class="form-text fst-italic">{{option.note}}</div>
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -29,6 +29,7 @@ import { SettingsService } from 'src/app/services/settings.service'
|
||||
import { ToastService } from 'src/app/services/toast.service'
|
||||
import { FileComponent } from '../../common/input/file/file.component'
|
||||
import { NumberComponent } from '../../common/input/number/number.component'
|
||||
import { PasswordComponent } from '../../common/input/password/password.component'
|
||||
import { SelectComponent } from '../../common/input/select/select.component'
|
||||
import { SwitchComponent } from '../../common/input/switch/switch.component'
|
||||
import { TextComponent } from '../../common/input/text/text.component'
|
||||
@@ -46,6 +47,7 @@ import { LoadingComponentWithPermissions } from '../../loading-component/loading
|
||||
TextComponent,
|
||||
NumberComponent,
|
||||
FileComponent,
|
||||
PasswordComponent,
|
||||
AsyncPipe,
|
||||
NgbNavModule,
|
||||
FormsModule,
|
||||
|
||||
@@ -91,6 +91,9 @@ const status: SystemStatus = {
|
||||
sanity_check_status: SystemStatusItemStatus.ERROR,
|
||||
sanity_check_last_run: new Date().toISOString(),
|
||||
sanity_check_error: 'Error running sanity check.',
|
||||
llmindex_status: SystemStatusItemStatus.DISABLED,
|
||||
llmindex_last_modified: new Date().toISOString(),
|
||||
llmindex_error: null,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -30,6 +30,9 @@
|
||||
</div>
|
||||
</div>
|
||||
<ul ngbNav class="order-sm-3">
|
||||
@if (aiEnabled) {
|
||||
<pngx-chat></pngx-chat>
|
||||
}
|
||||
<pngx-toasts-dropdown></pngx-toasts-dropdown>
|
||||
<li ngbDropdown class="nav-item dropdown">
|
||||
<button class="btn ps-1 border-0" id="userDropdown" ngbDropdownToggle>
|
||||
|
||||
@@ -44,6 +44,7 @@ import { SettingsService } from 'src/app/services/settings.service'
|
||||
import { TasksService } from 'src/app/services/tasks.service'
|
||||
import { ToastService } from 'src/app/services/toast.service'
|
||||
import { environment } from 'src/environments/environment'
|
||||
import { ChatComponent } from '../chat/chat/chat.component'
|
||||
import { ProfileEditDialogComponent } from '../common/profile-edit-dialog/profile-edit-dialog.component'
|
||||
import { DocumentDetailComponent } from '../document-detail/document-detail.component'
|
||||
import { ComponentWithPermissions } from '../with-permissions/with-permissions.component'
|
||||
@@ -59,6 +60,7 @@ import { ToastsDropdownComponent } from './toasts-dropdown/toasts-dropdown.compo
|
||||
DocumentTitlePipe,
|
||||
IfPermissionsDirective,
|
||||
ToastsDropdownComponent,
|
||||
ChatComponent,
|
||||
RouterModule,
|
||||
NgClass,
|
||||
NgbDropdownModule,
|
||||
@@ -184,6 +186,10 @@ export class AppFrameComponent
|
||||
})
|
||||
}
|
||||
|
||||
get aiEnabled(): boolean {
|
||||
return this.settingsService.get(SETTINGS_KEYS.AI_ENABLED)
|
||||
}
|
||||
|
||||
closeMenu() {
|
||||
this.isMenuCollapsed = true
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
|
||||
<li ngbDropdown class="nav-item" (openChange)="onOpenChange($event)">
|
||||
<li ngbDropdown class="nav-item mx-1" (openChange)="onOpenChange($event)">
|
||||
@if (toasts.length) {
|
||||
<span class="badge rounded-pill z-3 pe-none bg-secondary me-2 position-absolute top-0 left-0">{{ toasts.length }}</span>
|
||||
}
|
||||
|
||||
35
src-ui/src/app/components/chat/chat/chat.component.html
Normal file
35
src-ui/src/app/components/chat/chat/chat.component.html
Normal file
@@ -0,0 +1,35 @@
|
||||
|
||||
<li ngbDropdown class="nav-item me-n2" (openChange)="onOpenChange($event)">
|
||||
<button class="btn border-0" id="chatDropdown" ngbDropdownToggle>
|
||||
<i-bs width="1.3em" height="1.3em" name="chatSquareDots"></i-bs>
|
||||
</button>
|
||||
<div ngbDropdownMenu class="dropdown-menu-end shadow p-3" aria-labelledby="chatDropdown">
|
||||
<div class="chat-container bg-light p-2">
|
||||
<div class="chat-messages font-monospace small">
|
||||
@for (message of messages; track message) {
|
||||
<div class="message d-flex flex-row small" [class.justify-content-end]="message.role === 'user'">
|
||||
<span class="p-2 m-2" [class.bg-dark]="message.role === 'user'">
|
||||
{{ message.content }}
|
||||
@if (message.isStreaming) { <span class="blinking-cursor">|</span> }
|
||||
</span>
|
||||
</div>
|
||||
}
|
||||
<div #scrollAnchor></div>
|
||||
</div>
|
||||
|
||||
<form class="chat-input">
|
||||
<div class="input-group">
|
||||
<input
|
||||
#chatInput
|
||||
class="form-control form-control-sm" name="chatInput" type="text"
|
||||
[placeholder]="placeholder"
|
||||
[disabled]="loading"
|
||||
[(ngModel)]="input"
|
||||
(keydown)="searchInputKeyDown($event)"
|
||||
/>
|
||||
<button class="btn btn-sm btn-secondary" type="button" (click)="sendMessage()" [disabled]="loading">Send</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
</li>
|
||||
37
src-ui/src/app/components/chat/chat/chat.component.scss
Normal file
37
src-ui/src/app/components/chat/chat/chat.component.scss
Normal file
@@ -0,0 +1,37 @@
|
||||
.dropdown-menu {
|
||||
width: var(--pngx-toast-max-width);
|
||||
}
|
||||
|
||||
.chat-messages {
|
||||
max-height: 350px;
|
||||
overflow-y: auto;
|
||||
}
|
||||
|
||||
.dropdown-toggle::after {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.dropdown-item {
|
||||
white-space: initial;
|
||||
}
|
||||
|
||||
@media screen and (max-width: 400px) {
|
||||
:host ::ng-deep .dropdown-menu-end {
|
||||
right: -3rem;
|
||||
}
|
||||
}
|
||||
|
||||
.blinking-cursor {
|
||||
font-weight: bold;
|
||||
font-size: 1.2em;
|
||||
animation: blink 1s step-end infinite;
|
||||
}
|
||||
|
||||
@keyframes blink {
|
||||
from, to {
|
||||
opacity: 0;
|
||||
}
|
||||
50% {
|
||||
opacity: 1;
|
||||
}
|
||||
}
|
||||
132
src-ui/src/app/components/chat/chat/chat.component.spec.ts
Normal file
132
src-ui/src/app/components/chat/chat/chat.component.spec.ts
Normal file
@@ -0,0 +1,132 @@
|
||||
import { provideHttpClient, withInterceptorsFromDi } from '@angular/common/http'
|
||||
import { provideHttpClientTesting } from '@angular/common/http/testing'
|
||||
import { ElementRef } from '@angular/core'
|
||||
import { ComponentFixture, TestBed } from '@angular/core/testing'
|
||||
import { NavigationEnd, Router } from '@angular/router'
|
||||
import { allIcons, NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
|
||||
import { Subject } from 'rxjs'
|
||||
import { ChatService } from 'src/app/services/chat.service'
|
||||
import { ChatComponent } from './chat.component'
|
||||
|
||||
describe('ChatComponent', () => {
|
||||
let component: ChatComponent
|
||||
let fixture: ComponentFixture<ChatComponent>
|
||||
let chatService: ChatService
|
||||
let router: Router
|
||||
let routerEvents$: Subject<NavigationEnd>
|
||||
let mockStream$: Subject<string>
|
||||
|
||||
beforeEach(async () => {
|
||||
TestBed.configureTestingModule({
|
||||
imports: [NgxBootstrapIconsModule.pick(allIcons), ChatComponent],
|
||||
providers: [
|
||||
provideHttpClient(withInterceptorsFromDi()),
|
||||
provideHttpClientTesting(),
|
||||
],
|
||||
}).compileComponents()
|
||||
|
||||
fixture = TestBed.createComponent(ChatComponent)
|
||||
router = TestBed.inject(Router)
|
||||
routerEvents$ = new Subject<any>()
|
||||
jest
|
||||
.spyOn(router, 'events', 'get')
|
||||
.mockReturnValue(routerEvents$.asObservable())
|
||||
chatService = TestBed.inject(ChatService)
|
||||
mockStream$ = new Subject<string>()
|
||||
jest
|
||||
.spyOn(chatService, 'streamChat')
|
||||
.mockReturnValue(mockStream$.asObservable())
|
||||
component = fixture.componentInstance
|
||||
|
||||
jest.useFakeTimers()
|
||||
|
||||
fixture.detectChanges()
|
||||
|
||||
component.scrollAnchor.nativeElement.scrollIntoView = jest.fn()
|
||||
})
|
||||
|
||||
it('should update documentId on initialization', () => {
|
||||
jest.spyOn(router, 'url', 'get').mockReturnValue('/documents/123')
|
||||
component.ngOnInit()
|
||||
expect(component.documentId).toBe(123)
|
||||
})
|
||||
|
||||
it('should update documentId on navigation', () => {
|
||||
component.ngOnInit()
|
||||
routerEvents$.next(new NavigationEnd(1, '/documents/456', '/documents/456'))
|
||||
expect(component.documentId).toBe(456)
|
||||
})
|
||||
|
||||
it('should return correct placeholder based on documentId', () => {
|
||||
component.documentId = 123
|
||||
expect(component.placeholder).toBe('Ask a question about this document...')
|
||||
component.documentId = undefined
|
||||
expect(component.placeholder).toBe('Ask a question about a document...')
|
||||
})
|
||||
|
||||
it('should send a message and handle streaming response', () => {
|
||||
component.input = 'Hello'
|
||||
component.sendMessage()
|
||||
|
||||
expect(component.messages.length).toBe(2)
|
||||
expect(component.messages[0].content).toBe('Hello')
|
||||
expect(component.loading).toBe(true)
|
||||
|
||||
mockStream$.next('Hi')
|
||||
expect(component.messages[1].content).toBe('H')
|
||||
mockStream$.next('Hi there')
|
||||
// advance time to process the typewriter effect
|
||||
jest.advanceTimersByTime(1000)
|
||||
expect(component.messages[1].content).toBe('Hi there')
|
||||
|
||||
mockStream$.complete()
|
||||
expect(component.loading).toBe(false)
|
||||
expect(component.messages[1].isStreaming).toBe(false)
|
||||
})
|
||||
|
||||
it('should handle errors during streaming', () => {
|
||||
component.input = 'Hello'
|
||||
component.sendMessage()
|
||||
|
||||
mockStream$.error('Error')
|
||||
expect(component.messages[1].content).toContain(
|
||||
'⚠️ Error receiving response.'
|
||||
)
|
||||
expect(component.loading).toBe(false)
|
||||
})
|
||||
|
||||
it('should enqueue typewriter chunks correctly', () => {
|
||||
const message = { content: '', role: 'assistant', isStreaming: true }
|
||||
component.enqueueTypewriter(null, message as any) // coverage for null
|
||||
component.enqueueTypewriter('Hello', message as any)
|
||||
expect(component['typewriterBuffer'].length).toBe(4)
|
||||
})
|
||||
|
||||
it('should scroll to bottom after sending a message', () => {
|
||||
const scrollSpy = jest.spyOn(
|
||||
ChatComponent.prototype as any,
|
||||
'scrollToBottom'
|
||||
)
|
||||
component.input = 'Test'
|
||||
component.sendMessage()
|
||||
expect(scrollSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should focus chat input when dropdown is opened', () => {
|
||||
const focus = jest.fn()
|
||||
component.chatInput = {
|
||||
nativeElement: { focus: focus },
|
||||
} as unknown as ElementRef<HTMLInputElement>
|
||||
|
||||
component.onOpenChange(true)
|
||||
jest.advanceTimersByTime(15)
|
||||
expect(focus).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should send message on Enter key press', () => {
|
||||
jest.spyOn(component, 'sendMessage')
|
||||
const event = new KeyboardEvent('keydown', { key: 'Enter' })
|
||||
component.searchInputKeyDown(event)
|
||||
expect(component.sendMessage).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
140
src-ui/src/app/components/chat/chat/chat.component.ts
Normal file
140
src-ui/src/app/components/chat/chat/chat.component.ts
Normal file
@@ -0,0 +1,140 @@
|
||||
import { Component, ElementRef, inject, OnInit, ViewChild } from '@angular/core'
|
||||
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
||||
import { NavigationEnd, Router } from '@angular/router'
|
||||
import { NgbDropdownModule } from '@ng-bootstrap/ng-bootstrap'
|
||||
import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
|
||||
import { filter, map } from 'rxjs'
|
||||
import { ChatMessage, ChatService } from 'src/app/services/chat.service'
|
||||
|
||||
@Component({
|
||||
selector: 'pngx-chat',
|
||||
imports: [
|
||||
FormsModule,
|
||||
ReactiveFormsModule,
|
||||
NgxBootstrapIconsModule,
|
||||
NgbDropdownModule,
|
||||
],
|
||||
templateUrl: './chat.component.html',
|
||||
styleUrl: './chat.component.scss',
|
||||
})
|
||||
export class ChatComponent implements OnInit {
|
||||
public messages: ChatMessage[] = []
|
||||
public loading = false
|
||||
public input: string = ''
|
||||
public documentId!: number
|
||||
|
||||
private chatService: ChatService = inject(ChatService)
|
||||
private router: Router = inject(Router)
|
||||
|
||||
@ViewChild('scrollAnchor') scrollAnchor!: ElementRef<HTMLDivElement>
|
||||
@ViewChild('chatInput') chatInput!: ElementRef<HTMLInputElement>
|
||||
|
||||
private typewriterBuffer: string[] = []
|
||||
private typewriterActive = false
|
||||
|
||||
public get placeholder(): string {
|
||||
return this.documentId
|
||||
? $localize`Ask a question about this document...`
|
||||
: $localize`Ask a question about a document...`
|
||||
}
|
||||
|
||||
ngOnInit(): void {
|
||||
this.updateDocumentId(this.router.url)
|
||||
this.router.events
|
||||
.pipe(
|
||||
filter((event) => event instanceof NavigationEnd),
|
||||
map((event) => (event as NavigationEnd).url)
|
||||
)
|
||||
.subscribe((url) => {
|
||||
this.updateDocumentId(url)
|
||||
})
|
||||
}
|
||||
|
||||
private updateDocumentId(url: string): void {
|
||||
const docIdRe = url.match(/^\/documents\/(\d+)/)
|
||||
this.documentId = docIdRe ? +docIdRe[1] : undefined
|
||||
}
|
||||
|
||||
sendMessage(): void {
|
||||
if (!this.input.trim()) return
|
||||
|
||||
const userMessage: ChatMessage = { role: 'user', content: this.input }
|
||||
this.messages.push(userMessage)
|
||||
this.scrollToBottom()
|
||||
|
||||
const assistantMessage: ChatMessage = {
|
||||
role: 'assistant',
|
||||
content: '',
|
||||
isStreaming: true,
|
||||
}
|
||||
this.messages.push(assistantMessage)
|
||||
this.loading = true
|
||||
|
||||
let lastPartialLength = 0
|
||||
|
||||
this.chatService.streamChat(this.documentId, this.input).subscribe({
|
||||
next: (chunk) => {
|
||||
const delta = chunk.substring(lastPartialLength)
|
||||
lastPartialLength = chunk.length
|
||||
this.enqueueTypewriter(delta, assistantMessage)
|
||||
},
|
||||
error: () => {
|
||||
assistantMessage.content += '\n\n⚠️ Error receiving response.'
|
||||
assistantMessage.isStreaming = false
|
||||
this.loading = false
|
||||
},
|
||||
complete: () => {
|
||||
assistantMessage.isStreaming = false
|
||||
this.loading = false
|
||||
this.scrollToBottom()
|
||||
},
|
||||
})
|
||||
|
||||
this.input = ''
|
||||
}
|
||||
|
||||
enqueueTypewriter(chunk: string, message: ChatMessage): void {
|
||||
if (!chunk) return
|
||||
|
||||
this.typewriterBuffer.push(...chunk.split(''))
|
||||
|
||||
if (!this.typewriterActive) {
|
||||
this.typewriterActive = true
|
||||
this.playTypewriter(message)
|
||||
}
|
||||
}
|
||||
|
||||
playTypewriter(message: ChatMessage): void {
|
||||
if (this.typewriterBuffer.length === 0) {
|
||||
this.typewriterActive = false
|
||||
return
|
||||
}
|
||||
|
||||
const nextChar = this.typewriterBuffer.shift()
|
||||
message.content += nextChar
|
||||
this.scrollToBottom()
|
||||
|
||||
setTimeout(() => this.playTypewriter(message), 10) // 10ms per character
|
||||
}
|
||||
|
||||
private scrollToBottom(): void {
|
||||
setTimeout(() => {
|
||||
this.scrollAnchor?.nativeElement?.scrollIntoView({ behavior: 'smooth' })
|
||||
}, 50)
|
||||
}
|
||||
|
||||
public onOpenChange(open: boolean): void {
|
||||
if (open) {
|
||||
setTimeout(() => {
|
||||
this.chatInput.nativeElement.focus()
|
||||
}, 10)
|
||||
}
|
||||
}
|
||||
|
||||
public searchInputKeyDown(event: KeyboardEvent) {
|
||||
if (event.key === 'Enter') {
|
||||
event.preventDefault()
|
||||
this.sendMessage()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,75 @@
|
||||
<div class="modal-header">
|
||||
<h4 class="modal-title" id="modal-basic-title">{{title}}</h4>
|
||||
<button type="button" class="btn-close" aria-label="Close" (click)="cancel()">
|
||||
</button>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
@if (message) {
|
||||
<p class="mb-3" [innerHTML]="message"></p>
|
||||
}
|
||||
<div class="btn-group mb-3" role="group">
|
||||
<input
|
||||
type="radio"
|
||||
class="btn-check"
|
||||
name="passwordRemoveMode"
|
||||
id="removeReplace"
|
||||
[(ngModel)]="updateDocument"
|
||||
[value]="true"
|
||||
(ngModelChange)="onUpdateDocumentChange($event)"
|
||||
/>
|
||||
<label class="btn btn-outline-primary btn-sm" for="removeReplace">
|
||||
<i-bs name="pencil"></i-bs>
|
||||
<span class="ms-2" i18n>Replace current document</span>
|
||||
</label>
|
||||
<input
|
||||
type="radio"
|
||||
class="btn-check"
|
||||
name="passwordRemoveMode"
|
||||
id="removeCreate"
|
||||
[(ngModel)]="updateDocument"
|
||||
[value]="false"
|
||||
(ngModelChange)="onUpdateDocumentChange($event)"
|
||||
/>
|
||||
<label class="btn btn-outline-primary btn-sm" for="removeCreate">
|
||||
<i-bs name="plus"></i-bs>
|
||||
<span class="ms-2" i18n>Create new document</span>
|
||||
</label>
|
||||
</div>
|
||||
@if (!updateDocument) {
|
||||
<div class="d-flex flex-column flex-md-row w-100 gap-3 align-items-center">
|
||||
<div class="form-group d-flex">
|
||||
<div class="form-check">
|
||||
<input class="form-check-input" type="checkbox" id="copyMetaRemove" [(ngModel)]="includeMetadata" />
|
||||
<label class="form-check-label" for="copyMetaRemove" i18n> Copy metadata
|
||||
</label>
|
||||
</div>
|
||||
<div class="form-check ms-3">
|
||||
<input class="form-check-input" type="checkbox" id="deleteOriginalRemove" [(ngModel)]="deleteOriginal" />
|
||||
<label class="form-check-label" for="deleteOriginalRemove" i18n> Delete original</label>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
<div class="modal-footer flex-nowrap gap-2">
|
||||
<button
|
||||
type="button"
|
||||
class="btn"
|
||||
[class]="cancelBtnClass"
|
||||
(click)="cancel()"
|
||||
[disabled]="!buttonsEnabled"
|
||||
>
|
||||
<span class="d-inline-block" style="padding-bottom: 1px;">
|
||||
{{cancelBtnCaption}}
|
||||
</span>
|
||||
</button>
|
||||
<button
|
||||
type="button"
|
||||
class="btn"
|
||||
[class]="btnClass"
|
||||
(click)="confirm()"
|
||||
[disabled]="!confirmButtonEnabled || !buttonsEnabled"
|
||||
>
|
||||
{{btnCaption}}
|
||||
</button>
|
||||
</div>
|
||||
@@ -0,0 +1,53 @@
|
||||
import { ComponentFixture, TestBed } from '@angular/core/testing'
|
||||
import { By } from '@angular/platform-browser'
|
||||
import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap'
|
||||
import { NgxBootstrapIconsModule, allIcons } from 'ngx-bootstrap-icons'
|
||||
import { PasswordRemovalConfirmDialogComponent } from './password-removal-confirm-dialog.component'
|
||||
|
||||
describe('PasswordRemovalConfirmDialogComponent', () => {
|
||||
let component: PasswordRemovalConfirmDialogComponent
|
||||
let fixture: ComponentFixture<PasswordRemovalConfirmDialogComponent>
|
||||
|
||||
beforeEach(async () => {
|
||||
await TestBed.configureTestingModule({
|
||||
providers: [NgbActiveModal],
|
||||
imports: [
|
||||
NgxBootstrapIconsModule.pick(allIcons),
|
||||
PasswordRemovalConfirmDialogComponent,
|
||||
],
|
||||
}).compileComponents()
|
||||
|
||||
fixture = TestBed.createComponent(PasswordRemovalConfirmDialogComponent)
|
||||
component = fixture.componentInstance
|
||||
fixture.detectChanges()
|
||||
})
|
||||
|
||||
it('should default to replacing the document', () => {
|
||||
expect(component.updateDocument).toBe(true)
|
||||
expect(
|
||||
fixture.debugElement.query(By.css('#removeReplace')).nativeElement.checked
|
||||
).toBe(true)
|
||||
})
|
||||
|
||||
it('should allow creating a new document with metadata and delete toggle', () => {
|
||||
component.onUpdateDocumentChange(false)
|
||||
fixture.detectChanges()
|
||||
|
||||
expect(component.updateDocument).toBe(false)
|
||||
expect(fixture.debugElement.query(By.css('#copyMetaRemove'))).not.toBeNull()
|
||||
|
||||
component.includeMetadata = false
|
||||
component.deleteOriginal = true
|
||||
component.onUpdateDocumentChange(true)
|
||||
expect(component.updateDocument).toBe(true)
|
||||
expect(component.includeMetadata).toBe(true)
|
||||
expect(component.deleteOriginal).toBe(false)
|
||||
})
|
||||
|
||||
it('should emit confirm when confirmed', () => {
|
||||
let confirmed = false
|
||||
component.confirmClicked.subscribe(() => (confirmed = true))
|
||||
component.confirm()
|
||||
expect(confirmed).toBe(true)
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,38 @@
|
||||
import { Component, Input } from '@angular/core'
|
||||
import { FormsModule } from '@angular/forms'
|
||||
import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
|
||||
import { ConfirmDialogComponent } from '../confirm-dialog.component'
|
||||
|
||||
@Component({
|
||||
selector: 'pngx-password-removal-confirm-dialog',
|
||||
templateUrl: './password-removal-confirm-dialog.component.html',
|
||||
styleUrls: ['./password-removal-confirm-dialog.component.scss'],
|
||||
imports: [FormsModule, NgxBootstrapIconsModule],
|
||||
})
|
||||
export class PasswordRemovalConfirmDialogComponent extends ConfirmDialogComponent {
|
||||
updateDocument: boolean = true
|
||||
includeMetadata: boolean = true
|
||||
deleteOriginal: boolean = false
|
||||
|
||||
@Input()
|
||||
override title = $localize`Remove password protection`
|
||||
|
||||
@Input()
|
||||
override message =
|
||||
$localize`Create an unprotected copy or replace the existing file.`
|
||||
|
||||
@Input()
|
||||
override btnCaption = $localize`Start`
|
||||
|
||||
constructor() {
|
||||
super()
|
||||
}
|
||||
|
||||
onUpdateDocumentChange(updateDocument: boolean) {
|
||||
this.updateDocument = updateDocument
|
||||
if (this.updateDocument) {
|
||||
this.deleteOriginal = false
|
||||
this.includeMetadata = true
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
<div ngbDropdown #fieldDropdown="ngbDropdown" (openChange)="onOpenClose($event)" [popperOptions]="popperOptions" placement="bottom-end">
|
||||
<button class="btn btn-sm btn-outline-primary" id="customFieldsDropdown" [disabled]="disabled" ngbDropdownToggle>
|
||||
<div ngbDropdown #fieldDropdown="ngbDropdown" (openChange)="onOpenClose($event)" [popperOptions]="popperOptions">
|
||||
<button type="button" class="btn btn-sm btn-outline-primary" id="customFieldsDropdown" [disabled]="disabled" ngbDropdownToggle>
|
||||
<i-bs name="ui-radios"></i-bs>
|
||||
<div class="d-none d-sm-inline"> <ng-container i18n>Custom Fields</ng-container></div>
|
||||
<div class="d-none d-lg-inline"> <ng-container i18n>Custom Fields</ng-container></div>
|
||||
</button>
|
||||
<div ngbDropdownMenu aria-labelledby="customFieldsDropdown" class="shadow custom-fields-dropdown">
|
||||
<div class="list-group list-group-flush" (keydown)="listKeyDown($event)">
|
||||
|
||||
@@ -1,5 +1,11 @@
|
||||
<div class="mb-3">
|
||||
<label class="form-label" [for]="inputId">{{title}}</label>
|
||||
<div class="mb-3" [class.pb-3]="error">
|
||||
<div class="row">
|
||||
<div class="d-flex align-items-center position-relative hidden-button-container" [class.col-md-3]="horizontal">
|
||||
@if (title) {
|
||||
<label class="form-label" [class.mb-md-0]="horizontal" [for]="inputId">{{title}}</label>
|
||||
}
|
||||
</div>
|
||||
<div class="position-relative" [class.col-md-9]="horizontal">
|
||||
<div class="input-group" [class.is-invalid]="error">
|
||||
<input #inputField [type]="showReveal && textVisible ? 'text' : 'password'" class="form-control" [class.is-invalid]="error" [id]="inputId" [(ngModel)]="value" (focus)="onFocus()" (focusout)="onFocusOut()" (change)="onChange(value)" [disabled]="disabled" [autocomplete]="autocomplete">
|
||||
@if (showReveal) {
|
||||
@@ -14,4 +20,5 @@
|
||||
@if (hint) {
|
||||
<small class="form-text text-muted" [innerHTML]="hint"></small>
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -15,6 +15,12 @@
|
||||
@if (hint) {
|
||||
<small class="form-text text-muted" [innerHTML]="hint"></small>
|
||||
}
|
||||
@if (getSuggestion()?.length > 0) {
|
||||
<small>
|
||||
<span i18n>Suggestion:</span>
|
||||
<a (click)="applySuggestion(s)" [routerLink]="[]">{{getSuggestion()}}</a>
|
||||
</small>
|
||||
}
|
||||
<div class="invalid-feedback position-absolute top-100">
|
||||
{{error}}
|
||||
</div>
|
||||
|
||||
@@ -26,10 +26,20 @@ describe('TextComponent', () => {
|
||||
|
||||
it('should support use of input field', () => {
|
||||
expect(component.value).toBeUndefined()
|
||||
// TODO: why doesn't this work?
|
||||
// input.value = 'foo'
|
||||
// input.dispatchEvent(new Event('change'))
|
||||
// fixture.detectChanges()
|
||||
// expect(component.value).toEqual('foo')
|
||||
input.value = 'foo'
|
||||
input.dispatchEvent(new Event('input'))
|
||||
fixture.detectChanges()
|
||||
expect(component.value).toBe('foo')
|
||||
})
|
||||
|
||||
it('should support suggestion', () => {
|
||||
component.value = 'foo'
|
||||
component.suggestion = 'foo'
|
||||
expect(component.getSuggestion()).toBe('')
|
||||
component.value = 'bar'
|
||||
expect(component.getSuggestion()).toBe('foo')
|
||||
component.applySuggestion()
|
||||
fixture.detectChanges()
|
||||
expect(component.value).toBe('foo')
|
||||
})
|
||||
})
|
||||
|
||||
@@ -4,6 +4,7 @@ import {
|
||||
NG_VALUE_ACCESSOR,
|
||||
ReactiveFormsModule,
|
||||
} from '@angular/forms'
|
||||
import { RouterLink } from '@angular/router'
|
||||
import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
|
||||
import { AbstractInputComponent } from '../abstract-input'
|
||||
|
||||
@@ -18,7 +19,12 @@ import { AbstractInputComponent } from '../abstract-input'
|
||||
selector: 'pngx-input-text',
|
||||
templateUrl: './text.component.html',
|
||||
styleUrls: ['./text.component.scss'],
|
||||
imports: [FormsModule, ReactiveFormsModule, NgxBootstrapIconsModule],
|
||||
imports: [
|
||||
FormsModule,
|
||||
ReactiveFormsModule,
|
||||
NgxBootstrapIconsModule,
|
||||
RouterLink,
|
||||
],
|
||||
})
|
||||
export class TextComponent extends AbstractInputComponent<string> {
|
||||
@Input()
|
||||
@@ -27,7 +33,19 @@ export class TextComponent extends AbstractInputComponent<string> {
|
||||
@Input()
|
||||
placeholder: string = ''
|
||||
|
||||
@Input()
|
||||
suggestion: string = ''
|
||||
|
||||
constructor() {
|
||||
super()
|
||||
}
|
||||
|
||||
getSuggestion() {
|
||||
return this.value !== this.suggestion ? this.suggestion : ''
|
||||
}
|
||||
|
||||
applySuggestion() {
|
||||
this.value = this.suggestion
|
||||
this.onChange(this.value)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
@if (previewText) {
|
||||
<div class="bg-light p-3 overflow-auto whitespace-preserve" width="100%">{{previewText}}</div>
|
||||
} @else {
|
||||
<object [data]="previewURL | safeUrl" width="100%" class="bg-light" [class.p-2]="!isPdf"></object>
|
||||
<object [data]="previewUrl | safeUrl" width="100%" class="bg-light" [class.p-2]="!isPdf"></object>
|
||||
}
|
||||
} @else {
|
||||
@if (requiresPassword) {
|
||||
@@ -24,7 +24,7 @@
|
||||
}
|
||||
@if (!requiresPassword) {
|
||||
<pdf-viewer
|
||||
[src]="previewURL"
|
||||
[src]="previewUrl"
|
||||
[original-size]="false"
|
||||
[show-borders]="false"
|
||||
[show-all]="true"
|
||||
|
||||
@@ -71,7 +71,7 @@ export class PreviewPopupComponent implements OnDestroy {
|
||||
return (this.isPdf && this.useNativePdfViewer) || !this.isPdf
|
||||
}
|
||||
|
||||
get previewURL() {
|
||||
get previewUrl() {
|
||||
return this.documentService.getPreviewUrl(this.document.id)
|
||||
}
|
||||
|
||||
@@ -93,7 +93,7 @@ export class PreviewPopupComponent implements OnDestroy {
|
||||
init() {
|
||||
if (this.document.mime_type?.includes('text')) {
|
||||
this.http
|
||||
.get(this.previewURL, { responseType: 'text' })
|
||||
.get(this.previewUrl, { responseType: 'text' })
|
||||
.pipe(first(), takeUntil(this.unsubscribeNotifier))
|
||||
.subscribe({
|
||||
next: (res) => {
|
||||
@@ -126,10 +126,6 @@ export class PreviewPopupComponent implements OnDestroy {
|
||||
}
|
||||
}
|
||||
|
||||
get previewUrl() {
|
||||
return this.documentService.getPreviewUrl(this.document.id)
|
||||
}
|
||||
|
||||
mouseEnterPreview() {
|
||||
this.mouseOnPreview = true
|
||||
if (!this.popover.isOpen()) {
|
||||
|
||||
@@ -0,0 +1,49 @@
|
||||
<div class="btn-group">
|
||||
<button type="button" class="btn btn-sm btn-outline-primary" (click)="clickSuggest()" [disabled]="loading || (suggestions && !aiEnabled)">
|
||||
@if (loading) {
|
||||
<div class="spinner-border spinner-border-sm" role="status"></div>
|
||||
} @else {
|
||||
<i-bs width="1.2em" height="1.2em" name="stars"></i-bs>
|
||||
}
|
||||
<span class="d-none d-lg-inline ps-1" i18n>Suggest</span>
|
||||
@if (totalSuggestions > 0) {
|
||||
<span class="badge bg-primary ms-2">{{ totalSuggestions }}</span>
|
||||
}
|
||||
</button>
|
||||
|
||||
@if (aiEnabled) {
|
||||
<div class="btn-group" ngbDropdown #dropdown="ngbDropdown" [popperOptions]="popperOptions">
|
||||
<button type="button" class="btn btn-sm btn-outline-primary" ngbDropdownToggle [disabled]="loading || !suggestions" aria-expanded="false" aria-controls="suggestionsDropdown" aria-label="Suggestions dropdown">
|
||||
<span class="visually-hidden" i18n>Show suggestions</span>
|
||||
</button>
|
||||
|
||||
<div ngbDropdownMenu aria-labelledby="suggestionsDropdown" class="shadow suggestions-dropdown">
|
||||
<div class="list-group list-group-flush small pb-0">
|
||||
@if (!suggestions?.suggested_tags && !suggestions?.suggested_document_types && !suggestions?.suggested_correspondents) {
|
||||
<div class="list-group-item text-muted fst-italic">
|
||||
<small class="text-muted small fst-italic" i18n>No novel suggestions</small>
|
||||
</div>
|
||||
}
|
||||
@if (suggestions?.suggested_tags.length > 0) {
|
||||
<small class="list-group-item text-uppercase text-muted small">Tags</small>
|
||||
@for (tag of suggestions.suggested_tags; track tag) {
|
||||
<button type="button" class="list-group-item list-group-item-action bg-light" (click)="addTag.emit(tag)" i18n>{{ tag }}</button>
|
||||
}
|
||||
}
|
||||
@if (suggestions?.suggested_document_types.length > 0) {
|
||||
<div class="list-group-item text-uppercase text-muted small">Document Types</div>
|
||||
@for (type of suggestions.suggested_document_types; track type) {
|
||||
<button type="button" class="list-group-item list-group-item-action bg-light" (click)="addDocumentType.emit(type)" i18n>{{ type }}</button>
|
||||
}
|
||||
}
|
||||
@if (suggestions?.suggested_correspondents.length > 0) {
|
||||
<div class="list-group-item text-uppercase text-muted small">Correspondents</div>
|
||||
@for (correspondent of suggestions.suggested_correspondents; track correspondent) {
|
||||
<button type="button" class="list-group-item list-group-item-action bg-light" (click)="addCorrespondent.emit(correspondent)" i18n>{{ correspondent }}</button>
|
||||
}
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
@@ -0,0 +1,3 @@
|
||||
.suggestions-dropdown {
|
||||
min-width: 250px;
|
||||
}
|
||||
@@ -0,0 +1,51 @@
|
||||
import { ComponentFixture, TestBed } from '@angular/core/testing'
|
||||
import { NgbDropdownModule } from '@ng-bootstrap/ng-bootstrap'
|
||||
import { NgxBootstrapIconsModule, allIcons } from 'ngx-bootstrap-icons'
|
||||
import { SuggestionsDropdownComponent } from './suggestions-dropdown.component'
|
||||
|
||||
describe('SuggestionsDropdownComponent', () => {
|
||||
let component: SuggestionsDropdownComponent
|
||||
let fixture: ComponentFixture<SuggestionsDropdownComponent>
|
||||
|
||||
beforeEach(() => {
|
||||
TestBed.configureTestingModule({
|
||||
imports: [
|
||||
NgbDropdownModule,
|
||||
NgxBootstrapIconsModule.pick(allIcons),
|
||||
SuggestionsDropdownComponent,
|
||||
],
|
||||
providers: [],
|
||||
})
|
||||
fixture = TestBed.createComponent(SuggestionsDropdownComponent)
|
||||
component = fixture.componentInstance
|
||||
fixture.detectChanges()
|
||||
})
|
||||
|
||||
it('should calculate totalSuggestions', () => {
|
||||
component.suggestions = {
|
||||
suggested_correspondents: ['John Doe'],
|
||||
suggested_tags: ['Tag1', 'Tag2'],
|
||||
suggested_document_types: ['Type1'],
|
||||
}
|
||||
expect(component.totalSuggestions).toBe(4)
|
||||
})
|
||||
|
||||
it('should emit getSuggestions when clickSuggest is called and suggestions are null', () => {
|
||||
jest.spyOn(component.getSuggestions, 'emit')
|
||||
component.suggestions = null
|
||||
component.clickSuggest()
|
||||
expect(component.getSuggestions.emit).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should toggle dropdown when clickSuggest is called and suggestions are not null', () => {
|
||||
component.aiEnabled = true
|
||||
fixture.detectChanges()
|
||||
component.suggestions = {
|
||||
suggested_correspondents: [],
|
||||
suggested_tags: [],
|
||||
suggested_document_types: [],
|
||||
}
|
||||
component.clickSuggest()
|
||||
expect(component.dropdown.open).toBeTruthy()
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,64 @@
|
||||
import {
|
||||
Component,
|
||||
EventEmitter,
|
||||
Input,
|
||||
Output,
|
||||
ViewChild,
|
||||
} from '@angular/core'
|
||||
import { NgbDropdown, NgbDropdownModule } from '@ng-bootstrap/ng-bootstrap'
|
||||
import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
|
||||
import { DocumentSuggestions } from 'src/app/data/document-suggestions'
|
||||
import { pngxPopperOptions } from 'src/app/utils/popper-options'
|
||||
|
||||
@Component({
|
||||
selector: 'pngx-suggestions-dropdown',
|
||||
imports: [NgbDropdownModule, NgxBootstrapIconsModule],
|
||||
templateUrl: './suggestions-dropdown.component.html',
|
||||
styleUrl: './suggestions-dropdown.component.scss',
|
||||
})
|
||||
export class SuggestionsDropdownComponent {
|
||||
public popperOptions = pngxPopperOptions
|
||||
|
||||
@ViewChild('dropdown') dropdown: NgbDropdown
|
||||
|
||||
@Input()
|
||||
suggestions: DocumentSuggestions = null
|
||||
|
||||
@Input()
|
||||
aiEnabled: boolean = false
|
||||
|
||||
@Input()
|
||||
loading: boolean = false
|
||||
|
||||
@Input()
|
||||
disabled: boolean = false
|
||||
|
||||
@Output()
|
||||
getSuggestions: EventEmitter<SuggestionsDropdownComponent> =
|
||||
new EventEmitter()
|
||||
|
||||
@Output()
|
||||
addTag: EventEmitter<string> = new EventEmitter()
|
||||
|
||||
@Output()
|
||||
addDocumentType: EventEmitter<string> = new EventEmitter()
|
||||
|
||||
@Output()
|
||||
addCorrespondent: EventEmitter<string> = new EventEmitter()
|
||||
|
||||
public clickSuggest(): void {
|
||||
if (!this.suggestions) {
|
||||
this.getSuggestions.emit(this)
|
||||
} else {
|
||||
this.dropdown?.toggle()
|
||||
}
|
||||
}
|
||||
|
||||
get totalSuggestions(): number {
|
||||
return (
|
||||
this.suggestions?.suggested_correspondents?.length +
|
||||
this.suggestions?.suggested_tags?.length +
|
||||
this.suggestions?.suggested_document_types?.length || 0
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -266,6 +266,43 @@
|
||||
}
|
||||
</span>
|
||||
</dd>
|
||||
@if (aiEnabled) {
|
||||
<dt i18n>AI Index</dt>
|
||||
<dd class="d-flex align-items-center">
|
||||
<button class="btn btn-sm d-flex align-items-center btn-dark text-uppercase small" [ngbPopover]="llmIndexStatus" triggers="click mouseenter:mouseleave">
|
||||
{{status.tasks.llmindex_status}}
|
||||
@if (status.tasks.llmindex_status === 'OK') {
|
||||
@if (isStale(status.tasks.llmindex_last_modified)) {
|
||||
<i-bs name="exclamation-triangle-fill" class="text-warning ms-2 lh-1"></i-bs>
|
||||
} @else {
|
||||
<i-bs name="check-circle-fill" class="text-primary ms-2 lh-1"></i-bs>
|
||||
}
|
||||
} @else {
|
||||
<i-bs name="exclamation-triangle-fill" class="ms-2 lh-1"
|
||||
[class.text-danger]="status.tasks.llmindex_status === SystemStatusItemStatus.ERROR"
|
||||
[class.text-warning]="status.tasks.llmindex_status === SystemStatusItemStatus.WARNING"
|
||||
[class.text-muted]="status.tasks.llmindex_status === SystemStatusItemStatus.DISABLED"></i-bs>
|
||||
}
|
||||
</button>
|
||||
@if (currentUserIsSuperUser) {
|
||||
@if (isRunning(PaperlessTaskName.LLMIndexUpdate)) {
|
||||
<div class="spinner-border spinner-border-sm ms-2" role="status"></div>
|
||||
} @else {
|
||||
<button class="btn btn-sm d-flex align-items-center btn-dark small ms-2" (click)="runTask(PaperlessTaskName.LLMIndexUpdate)">
|
||||
<i-bs name="play-fill"></i-bs>
|
||||
<ng-container i18n>Run Task</ng-container>
|
||||
</button>
|
||||
}
|
||||
}
|
||||
</dd>
|
||||
<ng-template #llmIndexStatus>
|
||||
@if (status.tasks.llmindex_status === 'OK') {
|
||||
<h6><ng-container i18n>Last Run</ng-container>:</h6> <span class="font-monospace small">{{status.tasks.llmindex_last_modified | customDate:'medium'}}</span>
|
||||
} @else {
|
||||
<h6><ng-container i18n>Error</ng-container>:</h6> <span class="font-monospace small">{{status.tasks.llmindex_error}}</span>
|
||||
}
|
||||
</ng-template>
|
||||
}
|
||||
</dl>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -68,6 +68,9 @@ const status: SystemStatus = {
|
||||
sanity_check_status: SystemStatusItemStatus.OK,
|
||||
sanity_check_last_run: new Date().toISOString(),
|
||||
sanity_check_error: null,
|
||||
llmindex_status: SystemStatusItemStatus.OK,
|
||||
llmindex_last_modified: new Date().toISOString(),
|
||||
llmindex_error: null,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -13,9 +13,11 @@ import {
|
||||
SystemStatus,
|
||||
SystemStatusItemStatus,
|
||||
} from 'src/app/data/system-status'
|
||||
import { SETTINGS_KEYS } from 'src/app/data/ui-settings'
|
||||
import { CustomDatePipe } from 'src/app/pipes/custom-date.pipe'
|
||||
import { FileSizePipe } from 'src/app/pipes/file-size.pipe'
|
||||
import { PermissionsService } from 'src/app/services/permissions.service'
|
||||
import { SettingsService } from 'src/app/services/settings.service'
|
||||
import { SystemStatusService } from 'src/app/services/system-status.service'
|
||||
import { TasksService } from 'src/app/services/tasks.service'
|
||||
import { ToastService } from 'src/app/services/toast.service'
|
||||
@@ -44,6 +46,7 @@ export class SystemStatusDialogComponent implements OnInit, OnDestroy {
|
||||
private toastService = inject(ToastService)
|
||||
private permissionsService = inject(PermissionsService)
|
||||
private websocketStatusService = inject(WebsocketStatusService)
|
||||
private settingsService = inject(SettingsService)
|
||||
|
||||
public SystemStatusItemStatus = SystemStatusItemStatus
|
||||
public PaperlessTaskName = PaperlessTaskName
|
||||
@@ -60,6 +63,10 @@ export class SystemStatusDialogComponent implements OnInit, OnDestroy {
|
||||
return this.permissionsService.isSuperUser()
|
||||
}
|
||||
|
||||
get aiEnabled(): boolean {
|
||||
return this.settingsService.get(SETTINGS_KEYS.AI_ENABLED)
|
||||
}
|
||||
|
||||
public ngOnInit() {
|
||||
this.versionMismatch =
|
||||
environment.production &&
|
||||
|
||||
@@ -65,19 +65,15 @@
|
||||
<button ngbDropdownItem (click)="editPdf()" [disabled]="!userIsOwner || !userCanEdit || originalContentRenderType !== ContentRenderType.PDF">
|
||||
<i-bs name="pencil"></i-bs> <ng-container i18n>PDF Editor</ng-container>
|
||||
</button>
|
||||
|
||||
@if (userIsOwner && (requiresPassword || password)) {
|
||||
<button ngbDropdownItem (click)="removePassword()" [disabled]="!password">
|
||||
<i-bs name="unlock"></i-bs> <ng-container i18n>Remove Password</ng-container>
|
||||
</button>
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<pngx-custom-fields-dropdown
|
||||
*pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.CustomField }"
|
||||
[documentId]="documentId"
|
||||
[disabled]="!userCanEdit"
|
||||
[existingFields]="document?.custom_fields"
|
||||
(created)="refreshCustomFields()"
|
||||
(added)="addField($event)">
|
||||
</pngx-custom-fields-dropdown>
|
||||
|
||||
|
||||
<div class="ms-auto" ngbDropdown>
|
||||
<button class="btn btn-sm btn-outline-primary" id="sendDropdown" ngbDropdownToggle>
|
||||
<i-bs name="send"></i-bs>
|
||||
@@ -98,7 +94,7 @@
|
||||
</pngx-page-header>
|
||||
|
||||
<div class="row">
|
||||
<div class="col-md-6 col-xl-4 mb-4">
|
||||
<div class="col-md-6 col-xl-5 mb-4">
|
||||
|
||||
<form [formGroup]='documentForm' (ngSubmit)="save()">
|
||||
|
||||
@@ -115,6 +111,32 @@
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<ng-container *pngxIfPermissions="{ action: PermissionAction.Change, type: PermissionType.Document }">
|
||||
<div class="btn-group pb-3 ms-auto">
|
||||
<pngx-suggestions-dropdown *pngxIfPermissions="{ action: PermissionAction.Change, type: PermissionType.Document }"
|
||||
[disabled]="!userCanEdit || suggestionsLoading"
|
||||
[loading]="suggestionsLoading"
|
||||
[suggestions]="suggestions"
|
||||
[aiEnabled]="aiEnabled"
|
||||
(getSuggestions)="getSuggestions()"
|
||||
(addTag)="createTag($event)"
|
||||
(addDocumentType)="createDocumentType($event)"
|
||||
(addCorrespondent)="createCorrespondent($event)">
|
||||
</pngx-suggestions-dropdown>
|
||||
</div>
|
||||
|
||||
<div class="btn-group pb-3 ms-2">
|
||||
<pngx-custom-fields-dropdown
|
||||
*pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.CustomField }"
|
||||
[documentId]="documentId"
|
||||
[disabled]="!userCanEdit"
|
||||
[existingFields]="document?.custom_fields"
|
||||
(created)="refreshCustomFields()"
|
||||
(added)="addField($event)">
|
||||
</pngx-custom-fields-dropdown>
|
||||
</div>
|
||||
</ng-container>
|
||||
|
||||
<ng-container *ngTemplateOutlet="saveButtons"></ng-container>
|
||||
</div>
|
||||
|
||||
@@ -123,7 +145,7 @@
|
||||
<a ngbNavLink i18n>Details</a>
|
||||
<ng-template ngbNavContent>
|
||||
<div>
|
||||
<pngx-input-text #inputTitle i18n-title title="Title" formControlName="title" [horizontal]="true" (keyup)="titleKeyUp($event)" [error]="error?.title"></pngx-input-text>
|
||||
<pngx-input-text #inputTitle i18n-title title="Title" formControlName="title" [horizontal]="true" [suggestion]="suggestions?.title" (keyup)="titleKeyUp($event)" [error]="error?.title"></pngx-input-text>
|
||||
<pngx-input-number i18n-title title="Archive serial number" [error]="error?.archive_serial_number" [horizontal]="true" formControlName='archive_serial_number'></pngx-input-number>
|
||||
<pngx-input-date i18n-title title="Date created" formControlName="created" [suggestions]="suggestions?.dates" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event)"
|
||||
[error]="error?.created"></pngx-input-date>
|
||||
@@ -133,7 +155,7 @@
|
||||
(createNew)="createDocumentType($event)" [hideAddButton]="createDisabled(DataType.DocumentType)" [suggestions]="suggestions?.document_types" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.DocumentType }"></pngx-input-select>
|
||||
<pngx-input-select [items]="storagePaths" i18n-title title="Storage path" formControlName="storage_path" [allowNull]="true" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event, DataType.StoragePath)"
|
||||
(createNew)="createStoragePath($event)" [hideAddButton]="createDisabled(DataType.StoragePath)" [suggestions]="suggestions?.storage_paths" i18n-placeholder placeholder="Default" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.StoragePath }"></pngx-input-select>
|
||||
<pngx-input-tags formControlName="tags" [suggestions]="suggestions?.tags" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event, DataType.Tag)" [hideAddButton]="createDisabled(DataType.Tag)" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Tag }"></pngx-input-tags>
|
||||
<pngx-input-tags #tagsInput formControlName="tags" [suggestions]="suggestions?.tags" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event, DataType.Tag)" [hideAddButton]="createDisabled(DataType.Tag)" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Tag }"></pngx-input-tags>
|
||||
@for (fieldInstance of document?.custom_fields; track fieldInstance.field; let i = $index) {
|
||||
<div [formGroup]="customFieldFormFields.controls[i]">
|
||||
@switch (getCustomFieldFromInstance(fieldInstance)?.data_type) {
|
||||
@@ -355,14 +377,14 @@
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div class="col-md-6 col-xl-8 mb-3 d-none d-md-block position-relative" #pdfPreview>
|
||||
<div class="col-md-6 col-xl-7 mb-3 d-none d-md-block position-relative" #pdfPreview>
|
||||
<ng-container *ngTemplateOutlet="previewContent"></ng-container>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
<ng-template #saveButtons>
|
||||
<div class="btn-group pb-3 ms-auto">
|
||||
<div class="btn-group pb-3 ms-4">
|
||||
<ng-container *pngxIfPermissions="{ action: PermissionAction.Change, type: PermissionType.Document }">
|
||||
<button type="submit" class="order-3 btn btn-sm btn-primary" i18n [disabled]="!userCanEdit || networkActive || (isDirty$ | async) !== true">Save</button>
|
||||
@if (hasNext()) {
|
||||
@@ -379,7 +401,7 @@
|
||||
<ng-template #previewContent>
|
||||
<div class="thumb-preview position-absolute pe-none text-center" [class.fade]="previewLoaded">
|
||||
@if (showThumbnailOverlay) {
|
||||
<img [src]="thumbUrl | safeUrl" class="mx-auto" [attr.width]="previewZoomScale === 'page-fit' ? 'auto' : '100%'" [attr.height]="previewZoomScale === 'page-fit' ? '100%' : 'auto'" alt="Document loading..." i18n-alt />
|
||||
<img [src]="thumbUrl" class="mx-auto" [attr.width]="previewZoomScale === 'page-fit' ? 'auto' : '100%'" [attr.height]="previewZoomScale === 'page-fit' ? '100%' : 'auto'" alt="Document loading..." i18n-alt />
|
||||
}
|
||||
<div class="position-absolute top-0 start-0 m-2 p-2 d-flex align-items-center justify-content-center">
|
||||
<div>
|
||||
@@ -414,7 +436,7 @@
|
||||
}
|
||||
@case (ContentRenderType.Image) {
|
||||
<div class="preview-sticky">
|
||||
<img [src]="previewUrl | safeUrl" width="100%" height="100%" alt="{{title}}" />
|
||||
<img [src]="previewUrl" width="100%" height="100%" alt="{{title}}" />
|
||||
</div>
|
||||
}
|
||||
@case (ContentRenderType.TIFF) {
|
||||
|
||||
@@ -66,6 +66,7 @@ import { SettingsService } from 'src/app/services/settings.service'
|
||||
import { ToastService } from 'src/app/services/toast.service'
|
||||
import { environment } from 'src/environments/environment'
|
||||
import { ConfirmDialogComponent } from '../common/confirm-dialog/confirm-dialog.component'
|
||||
import { PasswordRemovalConfirmDialogComponent } from '../common/confirm-dialog/password-removal-confirm-dialog/password-removal-confirm-dialog.component'
|
||||
import { CustomFieldsDropdownComponent } from '../common/custom-fields-dropdown/custom-fields-dropdown.component'
|
||||
import {
|
||||
DocumentDetailComponent,
|
||||
@@ -156,6 +157,16 @@ describe('DocumentDetailComponent', () => {
|
||||
{
|
||||
provide: TagService,
|
||||
useValue: {
|
||||
getCachedMany: (ids: number[]) =>
|
||||
of(
|
||||
ids.map((id) => ({
|
||||
id,
|
||||
name: `Tag${id}`,
|
||||
is_inbox_tag: true,
|
||||
color: '#ff0000',
|
||||
text_color: '#000000',
|
||||
}))
|
||||
),
|
||||
listAll: () =>
|
||||
of({
|
||||
count: 3,
|
||||
@@ -382,8 +393,32 @@ describe('DocumentDetailComponent', () => {
|
||||
currentUserCan = true
|
||||
})
|
||||
|
||||
it('should support creating document type', () => {
|
||||
it('should support creating tag, remove from suggestions', () => {
|
||||
initNormally()
|
||||
component.suggestions = {
|
||||
suggested_tags: ['Tag1', 'NewTag12'],
|
||||
}
|
||||
let openModal: NgbModalRef
|
||||
modalService.activeInstances.subscribe((modal) => (openModal = modal[0]))
|
||||
const modalSpy = jest.spyOn(modalService, 'open')
|
||||
component.createTag('NewTag12')
|
||||
expect(modalSpy).toHaveBeenCalled()
|
||||
openModal.componentInstance.succeeded.next({
|
||||
id: 12,
|
||||
name: 'NewTag12',
|
||||
is_inbox_tag: true,
|
||||
color: '#ff0000',
|
||||
text_color: '#000000',
|
||||
})
|
||||
expect(component.tagsInput.value).toContain(12)
|
||||
expect(component.suggestions.suggested_tags).not.toContain('NewTag12')
|
||||
})
|
||||
|
||||
it('should support creating document type, remove from suggestions', () => {
|
||||
initNormally()
|
||||
component.suggestions = {
|
||||
suggested_document_types: ['DocumentType1', 'NewDocType2'],
|
||||
}
|
||||
let openModal: NgbModalRef
|
||||
modalService.activeInstances.subscribe((modal) => (openModal = modal[0]))
|
||||
const modalSpy = jest.spyOn(modalService, 'open')
|
||||
@@ -391,10 +426,16 @@ describe('DocumentDetailComponent', () => {
|
||||
expect(modalSpy).toHaveBeenCalled()
|
||||
openModal.componentInstance.succeeded.next({ id: 12, name: 'NewDocType12' })
|
||||
expect(component.documentForm.get('document_type').value).toEqual(12)
|
||||
expect(component.suggestions.suggested_document_types).not.toContain(
|
||||
'NewDocType2'
|
||||
)
|
||||
})
|
||||
|
||||
it('should support creating correspondent', () => {
|
||||
it('should support creating correspondent, remove from suggestions', () => {
|
||||
initNormally()
|
||||
component.suggestions = {
|
||||
suggested_correspondents: ['Correspondent1', 'NewCorrrespondent12'],
|
||||
}
|
||||
let openModal: NgbModalRef
|
||||
modalService.activeInstances.subscribe((modal) => (openModal = modal[0]))
|
||||
const modalSpy = jest.spyOn(modalService, 'open')
|
||||
@@ -405,6 +446,9 @@ describe('DocumentDetailComponent', () => {
|
||||
name: 'NewCorrrespondent12',
|
||||
})
|
||||
expect(component.documentForm.get('correspondent').value).toEqual(12)
|
||||
expect(component.suggestions.suggested_correspondents).not.toContain(
|
||||
'NewCorrrespondent12'
|
||||
)
|
||||
})
|
||||
|
||||
it('should support creating storage path', () => {
|
||||
@@ -995,7 +1039,7 @@ describe('DocumentDetailComponent', () => {
|
||||
expect(component.document.custom_fields).toHaveLength(initialLength - 1)
|
||||
expect(component.customFieldFormFields).toHaveLength(initialLength - 1)
|
||||
expect(
|
||||
fixture.debugElement.query(By.css('form')).nativeElement.textContent
|
||||
fixture.debugElement.query(By.css('form ul')).nativeElement.textContent
|
||||
).not.toContain('Field 1')
|
||||
const patchSpy = jest.spyOn(documentService, 'patch')
|
||||
component.save(true)
|
||||
@@ -1086,10 +1130,22 @@ describe('DocumentDetailComponent', () => {
|
||||
|
||||
it('should get suggestions', () => {
|
||||
const suggestionsSpy = jest.spyOn(documentService, 'getSuggestions')
|
||||
suggestionsSpy.mockReturnValue(of({ tags: [42, 43] }))
|
||||
suggestionsSpy.mockReturnValue(
|
||||
of({
|
||||
tags: [42, 43],
|
||||
suggested_tags: [],
|
||||
suggested_document_types: [],
|
||||
suggested_correspondents: [],
|
||||
})
|
||||
)
|
||||
initNormally()
|
||||
expect(suggestionsSpy).toHaveBeenCalled()
|
||||
expect(component.suggestions).toEqual({ tags: [42, 43] })
|
||||
expect(component.suggestions).toEqual({
|
||||
tags: [42, 43],
|
||||
suggested_tags: [],
|
||||
suggested_document_types: [],
|
||||
suggested_correspondents: [],
|
||||
})
|
||||
})
|
||||
|
||||
it('should show error if needed for get suggestions', () => {
|
||||
@@ -1209,6 +1265,88 @@ describe('DocumentDetailComponent', () => {
|
||||
expect(closeSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should support removing password protection from pdfs', () => {
|
||||
let modal: NgbModalRef
|
||||
modalService.activeInstances.subscribe((m) => (modal = m[0]))
|
||||
initNormally()
|
||||
component.password = 'secret'
|
||||
component.removePassword()
|
||||
const dialog =
|
||||
modal.componentInstance as PasswordRemovalConfirmDialogComponent
|
||||
dialog.updateDocument = false
|
||||
dialog.includeMetadata = false
|
||||
dialog.deleteOriginal = true
|
||||
dialog.confirm()
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/bulk_edit/`
|
||||
)
|
||||
expect(req.request.body).toEqual({
|
||||
documents: [doc.id],
|
||||
method: 'remove_password',
|
||||
parameters: {
|
||||
password: 'secret',
|
||||
update_document: false,
|
||||
include_metadata: false,
|
||||
delete_original: true,
|
||||
},
|
||||
})
|
||||
req.flush(true)
|
||||
})
|
||||
|
||||
it('should require the current password before removing it', () => {
|
||||
initNormally()
|
||||
const errorSpy = jest.spyOn(toastService, 'showError')
|
||||
component.requiresPassword = true
|
||||
component.password = ''
|
||||
|
||||
component.removePassword()
|
||||
|
||||
expect(errorSpy).toHaveBeenCalled()
|
||||
httpTestingController.expectNone(
|
||||
`${environment.apiBaseUrl}documents/bulk_edit/`
|
||||
)
|
||||
})
|
||||
|
||||
it('should handle failures when removing password protection', () => {
|
||||
let modal: NgbModalRef
|
||||
modalService.activeInstances.subscribe((m) => (modal = m[0]))
|
||||
initNormally()
|
||||
const errorSpy = jest.spyOn(toastService, 'showError')
|
||||
component.password = 'secret'
|
||||
|
||||
component.removePassword()
|
||||
const dialog =
|
||||
modal.componentInstance as PasswordRemovalConfirmDialogComponent
|
||||
dialog.confirm()
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/bulk_edit/`
|
||||
)
|
||||
req.error(new ErrorEvent('failed'))
|
||||
|
||||
expect(errorSpy).toHaveBeenCalled()
|
||||
expect(component.networkActive).toBe(false)
|
||||
expect(dialog.buttonsEnabled).toBe(true)
|
||||
})
|
||||
|
||||
it('should refresh the document when removing password in update mode', () => {
|
||||
let modal: NgbModalRef
|
||||
modalService.activeInstances.subscribe((m) => (modal = m[0]))
|
||||
const refreshSpy = jest.spyOn(openDocumentsService, 'refreshDocument')
|
||||
initNormally()
|
||||
component.password = 'secret'
|
||||
|
||||
component.removePassword()
|
||||
const dialog =
|
||||
modal.componentInstance as PasswordRemovalConfirmDialogComponent
|
||||
dialog.confirm()
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/bulk_edit/`
|
||||
)
|
||||
req.flush(true)
|
||||
|
||||
expect(refreshSpy).toHaveBeenCalledWith(doc.id)
|
||||
})
|
||||
|
||||
it('should support keyboard shortcuts', () => {
|
||||
initNormally()
|
||||
|
||||
|
||||
@@ -31,6 +31,7 @@ import {
|
||||
map,
|
||||
switchMap,
|
||||
takeUntil,
|
||||
tap,
|
||||
} from 'rxjs/operators'
|
||||
import { Correspondent } from 'src/app/data/correspondent'
|
||||
import { CustomField, CustomFieldDataType } from 'src/app/data/custom-field'
|
||||
@@ -76,6 +77,7 @@ import { DocumentTypeService } from 'src/app/services/rest/document-type.service
|
||||
import { DocumentService } from 'src/app/services/rest/document.service'
|
||||
import { SavedViewService } from 'src/app/services/rest/saved-view.service'
|
||||
import { StoragePathService } from 'src/app/services/rest/storage-path.service'
|
||||
import { TagService } from 'src/app/services/rest/tag.service'
|
||||
import { UserService } from 'src/app/services/rest/user.service'
|
||||
import { SettingsService } from 'src/app/services/settings.service'
|
||||
import { ToastService } from 'src/app/services/toast.service'
|
||||
@@ -83,11 +85,13 @@ import { getFilenameFromContentDisposition } from 'src/app/utils/http'
|
||||
import { ISODateAdapter } from 'src/app/utils/ngb-iso-date-adapter'
|
||||
import * as UTIF from 'utif'
|
||||
import { ConfirmDialogComponent } from '../common/confirm-dialog/confirm-dialog.component'
|
||||
import { PasswordRemovalConfirmDialogComponent } from '../common/confirm-dialog/password-removal-confirm-dialog/password-removal-confirm-dialog.component'
|
||||
import { CustomFieldsDropdownComponent } from '../common/custom-fields-dropdown/custom-fields-dropdown.component'
|
||||
import { CorrespondentEditDialogComponent } from '../common/edit-dialog/correspondent-edit-dialog/correspondent-edit-dialog.component'
|
||||
import { DocumentTypeEditDialogComponent } from '../common/edit-dialog/document-type-edit-dialog/document-type-edit-dialog.component'
|
||||
import { EditDialogMode } from '../common/edit-dialog/edit-dialog.component'
|
||||
import { StoragePathEditDialogComponent } from '../common/edit-dialog/storage-path-edit-dialog/storage-path-edit-dialog.component'
|
||||
import { TagEditDialogComponent } from '../common/edit-dialog/tag-edit-dialog/tag-edit-dialog.component'
|
||||
import { EmailDocumentDialogComponent } from '../common/email-document-dialog/email-document-dialog.component'
|
||||
import { CheckComponent } from '../common/input/check/check.component'
|
||||
import { DateComponent } from '../common/input/date/date.component'
|
||||
@@ -106,6 +110,7 @@ import {
|
||||
PdfEditorEditMode,
|
||||
} from '../common/pdf-editor/pdf-editor.component'
|
||||
import { ShareLinksDialogComponent } from '../common/share-links-dialog/share-links-dialog.component'
|
||||
import { SuggestionsDropdownComponent } from '../common/suggestions-dropdown/suggestions-dropdown.component'
|
||||
import { DocumentHistoryComponent } from '../document-history/document-history.component'
|
||||
import { DocumentNotesComponent } from '../document-notes/document-notes.component'
|
||||
import { ComponentWithPermissions } from '../with-permissions/with-permissions.component'
|
||||
@@ -162,6 +167,7 @@ export enum ZoomSetting {
|
||||
NumberComponent,
|
||||
MonetaryComponent,
|
||||
UrlComponent,
|
||||
SuggestionsDropdownComponent,
|
||||
CustomDatePipe,
|
||||
FileSizePipe,
|
||||
IfPermissionsDirective,
|
||||
@@ -183,6 +189,7 @@ export class DocumentDetailComponent
|
||||
{
|
||||
private documentsService = inject(DocumentService)
|
||||
private route = inject(ActivatedRoute)
|
||||
private tagService = inject(TagService)
|
||||
private correspondentService = inject(CorrespondentService)
|
||||
private documentTypeService = inject(DocumentTypeService)
|
||||
private router = inject(Router)
|
||||
@@ -205,6 +212,8 @@ export class DocumentDetailComponent
|
||||
@ViewChild('inputTitle')
|
||||
titleInput: TextComponent
|
||||
|
||||
@ViewChild('tagsInput') tagsInput: TagsComponent
|
||||
|
||||
expandOriginalMetadata = false
|
||||
expandArchivedMetadata = false
|
||||
|
||||
@@ -216,6 +225,7 @@ export class DocumentDetailComponent
|
||||
document: Document
|
||||
metadata: DocumentMetadata
|
||||
suggestions: DocumentSuggestions
|
||||
suggestionsLoading: boolean = false
|
||||
users: User[]
|
||||
|
||||
title: string
|
||||
@@ -297,6 +307,10 @@ export class DocumentDetailComponent
|
||||
return this.deviceDetectorService.isMobile()
|
||||
}
|
||||
|
||||
get aiEnabled(): boolean {
|
||||
return this.settings.get(SETTINGS_KEYS.AI_ENABLED)
|
||||
}
|
||||
|
||||
get archiveContentRenderType(): ContentRenderType {
|
||||
return this.document?.archived_file_name
|
||||
? this.getRenderType('application/pdf')
|
||||
@@ -681,24 +695,11 @@ export class DocumentDetailComponent
|
||||
PermissionType.Document
|
||||
)
|
||||
) {
|
||||
this.documentsService
|
||||
.getSuggestions(doc.id)
|
||||
.pipe(
|
||||
first(),
|
||||
takeUntil(this.unsubscribeNotifier),
|
||||
takeUntil(this.docChangeNotifier)
|
||||
)
|
||||
.subscribe({
|
||||
next: (result) => {
|
||||
this.suggestions = result
|
||||
},
|
||||
error: (error) => {
|
||||
this.suggestions = null
|
||||
this.toastService.showError(
|
||||
$localize`Error retrieving suggestions.`,
|
||||
error
|
||||
)
|
||||
},
|
||||
this.tagService.getCachedMany(doc.tags).subscribe((tags) => {
|
||||
// only show suggestions if document has inbox tags
|
||||
if (tags.some((tag) => tag.is_inbox_tag)) {
|
||||
this.getSuggestions()
|
||||
}
|
||||
})
|
||||
}
|
||||
this.title = this.documentTitlePipe.transform(doc.title)
|
||||
@@ -709,6 +710,63 @@ export class DocumentDetailComponent
|
||||
return this.documentForm.get('custom_fields') as FormArray
|
||||
}
|
||||
|
||||
getSuggestions() {
|
||||
this.suggestionsLoading = true
|
||||
this.documentsService
|
||||
.getSuggestions(this.documentId)
|
||||
.pipe(
|
||||
first(),
|
||||
takeUntil(this.unsubscribeNotifier),
|
||||
takeUntil(this.docChangeNotifier)
|
||||
)
|
||||
.subscribe({
|
||||
next: (result) => {
|
||||
this.suggestions = result
|
||||
this.suggestionsLoading = false
|
||||
},
|
||||
error: (error) => {
|
||||
this.suggestions = null
|
||||
this.suggestionsLoading = false
|
||||
this.toastService.showError(
|
||||
$localize`Error retrieving suggestions.`,
|
||||
error
|
||||
)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
createTag(newName: string) {
|
||||
var modal = this.modalService.open(TagEditDialogComponent, {
|
||||
backdrop: 'static',
|
||||
})
|
||||
modal.componentInstance.dialogMode = EditDialogMode.CREATE
|
||||
if (newName) modal.componentInstance.object = { name: newName }
|
||||
modal.componentInstance.succeeded
|
||||
.pipe(
|
||||
tap((newTag: Tag) => {
|
||||
// remove from suggestions if present
|
||||
if (this.suggestions) {
|
||||
this.suggestions = {
|
||||
...this.suggestions,
|
||||
suggested_tags: this.suggestions.suggested_tags.filter(
|
||||
(tag) => tag !== newTag.name
|
||||
),
|
||||
}
|
||||
}
|
||||
}),
|
||||
switchMap((newTag: Tag) => {
|
||||
return this.tagService
|
||||
.listAll()
|
||||
.pipe(map((tags) => ({ newTag, tags })))
|
||||
}),
|
||||
takeUntil(this.unsubscribeNotifier)
|
||||
)
|
||||
.subscribe(({ newTag, tags }) => {
|
||||
this.tagsInput.tags = tags.results
|
||||
this.tagsInput.addTag(newTag.id)
|
||||
})
|
||||
}
|
||||
|
||||
createDocumentType(newName: string) {
|
||||
var modal = this.modalService.open(DocumentTypeEditDialogComponent, {
|
||||
backdrop: 'static',
|
||||
@@ -728,6 +786,12 @@ export class DocumentDetailComponent
|
||||
this.documentTypes = documentTypes.results
|
||||
this.documentForm.get('document_type').setValue(newDocumentType.id)
|
||||
this.documentForm.get('document_type').markAsDirty()
|
||||
if (this.suggestions) {
|
||||
this.suggestions.suggested_document_types =
|
||||
this.suggestions.suggested_document_types.filter(
|
||||
(dt) => dt !== newName
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -752,6 +816,12 @@ export class DocumentDetailComponent
|
||||
this.correspondents = correspondents.results
|
||||
this.documentForm.get('correspondent').setValue(newCorrespondent.id)
|
||||
this.documentForm.get('correspondent').markAsDirty()
|
||||
if (this.suggestions) {
|
||||
this.suggestions.suggested_correspondents =
|
||||
this.suggestions.suggested_correspondents.filter(
|
||||
(c) => c !== newName
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1428,6 +1498,63 @@ export class DocumentDetailComponent
|
||||
})
|
||||
}
|
||||
|
||||
removePassword() {
|
||||
if (this.requiresPassword || !this.password) {
|
||||
this.toastService.showError(
|
||||
$localize`Please enter the current password before attempting to remove it.`
|
||||
)
|
||||
return
|
||||
}
|
||||
const modal = this.modalService.open(
|
||||
PasswordRemovalConfirmDialogComponent,
|
||||
{
|
||||
backdrop: 'static',
|
||||
}
|
||||
)
|
||||
modal.componentInstance.title = $localize`Remove password protection`
|
||||
modal.componentInstance.message = $localize`Create an unprotected copy or replace the existing file.`
|
||||
modal.componentInstance.btnCaption = $localize`Start`
|
||||
|
||||
modal.componentInstance.confirmClicked
|
||||
.pipe(takeUntil(this.unsubscribeNotifier))
|
||||
.subscribe(() => {
|
||||
const dialog =
|
||||
modal.componentInstance as PasswordRemovalConfirmDialogComponent
|
||||
dialog.buttonsEnabled = false
|
||||
this.networkActive = true
|
||||
this.documentsService
|
||||
.bulkEdit([this.document.id], 'remove_password', {
|
||||
password: this.password,
|
||||
update_document: dialog.updateDocument,
|
||||
include_metadata: dialog.includeMetadata,
|
||||
delete_original: dialog.deleteOriginal,
|
||||
})
|
||||
.pipe(first(), takeUntil(this.unsubscribeNotifier))
|
||||
.subscribe({
|
||||
next: () => {
|
||||
this.toastService.showInfo(
|
||||
$localize`Password removal operation for "${this.document.title}" will begin in the background.`
|
||||
)
|
||||
this.networkActive = false
|
||||
modal.close()
|
||||
if (!dialog.updateDocument && dialog.deleteOriginal) {
|
||||
this.openDocumentService.closeDocument(this.document)
|
||||
} else if (dialog.updateDocument) {
|
||||
this.openDocumentService.refreshDocument(this.documentId)
|
||||
}
|
||||
},
|
||||
error: (error) => {
|
||||
dialog.buttonsEnabled = true
|
||||
this.networkActive = false
|
||||
this.toastService.showError(
|
||||
$localize`Error executing password removal operation`,
|
||||
error
|
||||
)
|
||||
},
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
printDocument() {
|
||||
const printUrl = this.documentsService.getDownloadUrl(
|
||||
this.document.id,
|
||||
|
||||
@@ -150,12 +150,11 @@ export class FileDropComponent {
|
||||
this.onDragLeave(event, true)
|
||||
}
|
||||
|
||||
@HostListener('window:blur', ['$event']) public onWindowBlur() {
|
||||
@HostListener('window:blur') public onWindowBlur() {
|
||||
if (this.fileIsOver) this.onDragLeave(null)
|
||||
}
|
||||
|
||||
@HostListener('document:visibilitychange', ['$event'])
|
||||
public onVisibilityChange() {
|
||||
@HostListener('document:visibilitychange') public onVisibilityChange() {
|
||||
if (document.hidden && this.fileIsOver) this.onDragLeave(null)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { NgClass, NgTemplateOutlet, TitleCasePipe } from '@angular/common'
|
||||
import { Component, inject } from '@angular/core'
|
||||
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
||||
import { RouterModule } from '@angular/router'
|
||||
import {
|
||||
NgbDropdownModule,
|
||||
NgbPaginationModule,
|
||||
@@ -29,6 +30,7 @@ import { ManagementListComponent } from '../management-list/management-list.comp
|
||||
TitleCasePipe,
|
||||
FormsModule,
|
||||
ReactiveFormsModule,
|
||||
RouterModule,
|
||||
NgClass,
|
||||
NgTemplateOutlet,
|
||||
NgbDropdownModule,
|
||||
|
||||
@@ -42,7 +42,13 @@
|
||||
<button (click)="editField(field)" *pngxIfPermissions="{ action: PermissionAction.Change, type: PermissionType.CustomField }" ngbDropdownItem i18n>Edit</button>
|
||||
<button class="text-danger" (click)="deleteField(field)" *pngxIfPermissions="{ action: PermissionAction.Delete, type: PermissionType.CustomField }" ngbDropdownItem i18n>Delete</button>
|
||||
@if (field.document_count > 0) {
|
||||
<button (click)="filterDocuments(field)" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Document }" ngbDropdownItem i18n>Filter Documents ({{ field.document_count }})</button>
|
||||
<a
|
||||
*pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Document }"
|
||||
ngbDropdownItem
|
||||
[routerLink]="getDocumentFilterUrl(field)"
|
||||
i18n
|
||||
>Filter Documents ({{ field.document_count }})</a
|
||||
>
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
@@ -57,9 +63,13 @@
|
||||
</div>
|
||||
@if (field.document_count > 0) {
|
||||
<div class="btn-group d-none d-sm-inline-block ms-2">
|
||||
<button class="btn btn-sm btn-outline-secondary" type="button" (click)="filterDocuments(field)">
|
||||
<i-bs width="1em" height="1em" name="filter"></i-bs> <ng-container i18n>Documents</ng-container><span class="badge bg-light text-secondary ms-2">{{ field.document_count }}</span>
|
||||
</button>
|
||||
<a
|
||||
class="btn btn-sm btn-outline-secondary"
|
||||
[routerLink]="getDocumentFilterUrl(field)"
|
||||
>
|
||||
<i-bs width="1em" height="1em" name="filter"></i-bs> <ng-container i18n>Documents</ng-container
|
||||
><span class="badge bg-light text-secondary ms-2">{{ field.document_count }}</span>
|
||||
</a>
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
|
||||
@@ -4,6 +4,7 @@ import { provideHttpClient, withInterceptorsFromDi } from '@angular/common/http'
|
||||
import { provideHttpClientTesting } from '@angular/common/http/testing'
|
||||
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
||||
import { By } from '@angular/platform-browser'
|
||||
import { RouterTestingModule } from '@angular/router/testing'
|
||||
import {
|
||||
NgbModal,
|
||||
NgbModalModule,
|
||||
@@ -61,6 +62,7 @@ describe('CustomFieldsComponent', () => {
|
||||
NgbModalModule,
|
||||
NgbPopoverModule,
|
||||
NgxBootstrapIconsModule.pick(allIcons),
|
||||
RouterTestingModule,
|
||||
CustomFieldsComponent,
|
||||
IfPermissionsDirective,
|
||||
PageHeaderComponent,
|
||||
@@ -108,7 +110,9 @@ describe('CustomFieldsComponent', () => {
|
||||
const toastInfoSpy = jest.spyOn(toastService, 'showInfo')
|
||||
const reloadSpy = jest.spyOn(component, 'reload')
|
||||
|
||||
const createButton = fixture.debugElement.queryAll(By.css('button'))[1]
|
||||
const createButton = fixture.debugElement
|
||||
.queryAll(By.css('button'))
|
||||
.find((btn) => btn.nativeElement.textContent.trim().includes('Add Field'))
|
||||
createButton.triggerEventHandler('click')
|
||||
|
||||
expect(modal).not.toBeUndefined()
|
||||
@@ -133,7 +137,11 @@ describe('CustomFieldsComponent', () => {
|
||||
const toastInfoSpy = jest.spyOn(toastService, 'showInfo')
|
||||
const reloadSpy = jest.spyOn(component, 'reload')
|
||||
|
||||
const editButton = fixture.debugElement.queryAll(By.css('button'))[2]
|
||||
const editButton = fixture.debugElement
|
||||
.queryAll(By.css('button'))
|
||||
.find((btn) =>
|
||||
btn.nativeElement.textContent.trim().includes(fields[0].name)
|
||||
)
|
||||
editButton.triggerEventHandler('click')
|
||||
|
||||
expect(modal).not.toBeUndefined()
|
||||
@@ -158,7 +166,9 @@ describe('CustomFieldsComponent', () => {
|
||||
const deleteSpy = jest.spyOn(customFieldsService, 'delete')
|
||||
const reloadSpy = jest.spyOn(component, 'reload')
|
||||
|
||||
const deleteButton = fixture.debugElement.queryAll(By.css('button'))[5]
|
||||
const deleteButton = fixture.debugElement
|
||||
.queryAll(By.css('button'))
|
||||
.find((btn) => btn.nativeElement.textContent.trim().includes('Delete'))
|
||||
deleteButton.triggerEventHandler('click')
|
||||
|
||||
expect(modal).not.toBeUndefined()
|
||||
@@ -176,10 +186,10 @@ describe('CustomFieldsComponent', () => {
|
||||
expect(reloadSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should support filter documents', () => {
|
||||
const filterSpy = jest.spyOn(listViewService, 'quickFilter')
|
||||
component.filterDocuments(fields[0])
|
||||
expect(filterSpy).toHaveBeenCalledWith([
|
||||
it('should provide document filter url', () => {
|
||||
const urlSpy = jest.spyOn(listViewService, 'getQuickFilterUrl')
|
||||
component.getDocumentFilterUrl(fields[0])
|
||||
expect(urlSpy).toHaveBeenCalledWith([
|
||||
{
|
||||
rule_type: FILTER_CUSTOM_FIELDS_QUERY,
|
||||
value: JSON.stringify([
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { Component, OnInit, inject } from '@angular/core'
|
||||
import { RouterModule } from '@angular/router'
|
||||
import {
|
||||
NgbDropdownModule,
|
||||
NgbModal,
|
||||
@@ -36,6 +37,7 @@ import { LoadingComponentWithPermissions } from '../../loading-component/loading
|
||||
NgbDropdownModule,
|
||||
NgbPaginationModule,
|
||||
NgxBootstrapIconsModule,
|
||||
RouterModule,
|
||||
],
|
||||
})
|
||||
export class CustomFieldsComponent
|
||||
@@ -130,8 +132,8 @@ export class CustomFieldsComponent
|
||||
return DATA_TYPE_LABELS.find((l) => l.id === field.data_type).name
|
||||
}
|
||||
|
||||
filterDocuments(field: CustomField) {
|
||||
this.documentListViewService.quickFilter([
|
||||
getDocumentFilterUrl(field: CustomField) {
|
||||
return this.documentListViewService.getQuickFilterUrl([
|
||||
{
|
||||
rule_type: FILTER_CUSTOM_FIELDS_QUERY,
|
||||
value: JSON.stringify([
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { NgClass, NgTemplateOutlet, TitleCasePipe } from '@angular/common'
|
||||
import { Component, inject } from '@angular/core'
|
||||
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
||||
import { RouterModule } from '@angular/router'
|
||||
import {
|
||||
NgbDropdownModule,
|
||||
NgbPaginationModule,
|
||||
@@ -27,6 +28,7 @@ import { ManagementListComponent } from '../management-list/management-list.comp
|
||||
IfPermissionsDirective,
|
||||
FormsModule,
|
||||
ReactiveFormsModule,
|
||||
RouterModule,
|
||||
NgClass,
|
||||
NgTemplateOutlet,
|
||||
NgbDropdownModule,
|
||||
|
||||
@@ -120,7 +120,14 @@
|
||||
<button (click)="openEditDialog(object)" *pngxIfPermissions="{ action: PermissionAction.Change, type: permissionType }" ngbDropdownItem i18n>Edit</button>
|
||||
<button class="text-danger" (click)="openDeleteDialog(object)" *pngxIfPermissions="{ action: PermissionAction.Delete, type: permissionType }" ngbDropdownItem i18n>Delete</button>
|
||||
@if (getDocumentCount(object) > 0) {
|
||||
<button (click)="filterDocuments(object)" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Document }" ngbDropdownItem i18n>Filter Documents ({{ getDocumentCount(object) }})</button>
|
||||
<a
|
||||
*pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Document }"
|
||||
ngbDropdownItem
|
||||
[routerLink]="getDocumentFilterUrl(object)"
|
||||
(click)="$event?.stopPropagation()"
|
||||
i18n
|
||||
>Filter Documents ({{ getDocumentCount(object) }})</a
|
||||
>
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
@@ -135,9 +142,15 @@
|
||||
</div>
|
||||
@if (getDocumentCount(object) > 0) {
|
||||
<div class="btn-group d-none d-sm-inline-block">
|
||||
<button class="btn btn-sm btn-outline-secondary" (click)="filterDocuments(object); $event.stopPropagation();" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Document }">
|
||||
<i-bs width="1em" height="1em" name="filter"></i-bs> <ng-container i18n>Documents</ng-container><span class="badge bg-light text-secondary ms-2">{{ getDocumentCount(object) }}</span>
|
||||
</button>
|
||||
<a
|
||||
class="btn btn-sm btn-outline-secondary"
|
||||
*pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Document }"
|
||||
[routerLink]="getDocumentFilterUrl(object)"
|
||||
(click)="$event?.stopPropagation()"
|
||||
>
|
||||
<i-bs width="1em" height="1em" name="filter"></i-bs> <ng-container i18n>Documents</ng-container
|
||||
><span class="badge bg-light text-secondary ms-2">{{ getDocumentCount(object) }}</span>
|
||||
</a>
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
|
||||
@@ -13,6 +13,7 @@ import {
|
||||
} from '@angular/core/testing'
|
||||
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
||||
import { By } from '@angular/platform-browser'
|
||||
import { RouterLinkWithHref } from '@angular/router'
|
||||
import { RouterTestingModule } from '@angular/router/testing'
|
||||
import {
|
||||
NgbModal,
|
||||
@@ -230,12 +231,15 @@ describe('ManagementListComponent', () => {
|
||||
})
|
||||
|
||||
it('should support quick filter for objects', () => {
|
||||
const qfSpy = jest.spyOn(documentListViewService, 'quickFilter')
|
||||
const filterButton = fixture.debugElement.queryAll(By.css('button'))[9]
|
||||
filterButton.triggerEventHandler('click')
|
||||
expect(qfSpy).toHaveBeenCalledWith([
|
||||
const expectedUrl = documentListViewService.getQuickFilterUrl([
|
||||
{ rule_type: FILTER_HAS_TAGS_ALL, value: tags[0].id.toString() },
|
||||
]) // subclasses set the filter rule type
|
||||
])
|
||||
const filterLink = fixture.debugElement.query(
|
||||
By.css('a.btn-outline-secondary')
|
||||
)
|
||||
expect(filterLink).toBeTruthy()
|
||||
const routerLink = filterLink.injector.get(RouterLinkWithHref)
|
||||
expect(routerLink.urlTree).toEqual(expectedUrl)
|
||||
})
|
||||
|
||||
it('should reload on sort', () => {
|
||||
|
||||
@@ -230,8 +230,8 @@ export abstract class ManagementListComponent<T extends MatchingModel>
|
||||
|
||||
abstract getDeleteMessage(object: T)
|
||||
|
||||
filterDocuments(object: MatchingModel) {
|
||||
this.documentListViewService.quickFilter([
|
||||
getDocumentFilterUrl(object: MatchingModel) {
|
||||
return this.documentListViewService.getQuickFilterUrl([
|
||||
{ rule_type: this.filterRuleType, value: object.id.toString() },
|
||||
])
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { NgClass, NgTemplateOutlet, TitleCasePipe } from '@angular/common'
|
||||
import { Component, inject } from '@angular/core'
|
||||
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
||||
import { RouterModule } from '@angular/router'
|
||||
import {
|
||||
NgbDropdownModule,
|
||||
NgbPaginationModule,
|
||||
@@ -27,6 +28,7 @@ import { ManagementListComponent } from '../management-list/management-list.comp
|
||||
IfPermissionsDirective,
|
||||
FormsModule,
|
||||
ReactiveFormsModule,
|
||||
RouterModule,
|
||||
NgClass,
|
||||
NgTemplateOutlet,
|
||||
NgbDropdownModule,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { NgClass, NgTemplateOutlet, TitleCasePipe } from '@angular/common'
|
||||
import { Component, inject } from '@angular/core'
|
||||
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
||||
import { RouterModule } from '@angular/router'
|
||||
import {
|
||||
NgbDropdownModule,
|
||||
NgbPaginationModule,
|
||||
@@ -27,6 +28,7 @@ import { ManagementListComponent } from '../management-list/management-list.comp
|
||||
IfPermissionsDirective,
|
||||
FormsModule,
|
||||
ReactiveFormsModule,
|
||||
RouterModule,
|
||||
NgClass,
|
||||
NgTemplateOutlet,
|
||||
NgbDropdownModule,
|
||||
|
||||
@@ -1,11 +1,17 @@
|
||||
export interface DocumentSuggestions {
|
||||
title?: string
|
||||
|
||||
tags?: number[]
|
||||
suggested_tags?: string[]
|
||||
|
||||
correspondents?: number[]
|
||||
suggested_correspondents?: string[]
|
||||
|
||||
document_types?: number[]
|
||||
suggested_document_types?: string[]
|
||||
|
||||
storage_paths?: number[]
|
||||
suggested_storage_paths?: string[]
|
||||
|
||||
dates?: string[] // ISO-formatted date string e.g. 2022-11-03
|
||||
}
|
||||
|
||||
@@ -44,12 +44,24 @@ export enum ConfigOptionType {
|
||||
Boolean = 'boolean',
|
||||
JSON = 'json',
|
||||
File = 'file',
|
||||
Password = 'password',
|
||||
}
|
||||
|
||||
export const ConfigCategory = {
|
||||
General: $localize`General Settings`,
|
||||
OCR: $localize`OCR Settings`,
|
||||
Barcode: $localize`Barcode Settings`,
|
||||
AI: $localize`AI Settings`,
|
||||
}
|
||||
|
||||
export const LLMEmbeddingBackendConfig = {
|
||||
OPENAI: 'openai',
|
||||
HUGGINGFACE: 'huggingface',
|
||||
}
|
||||
|
||||
export const LLMBackendConfig = {
|
||||
OPENAI: 'openai',
|
||||
OLLAMA: 'ollama',
|
||||
}
|
||||
|
||||
export interface ConfigOption {
|
||||
@@ -59,6 +71,7 @@ export interface ConfigOption {
|
||||
choices?: Array<{ id: string; name: string }>
|
||||
config_key?: string
|
||||
category: string
|
||||
note?: string
|
||||
}
|
||||
|
||||
function mapToItems(enumObj: Object): Array<{ id: string; name: string }> {
|
||||
@@ -258,6 +271,58 @@ export const PaperlessConfigOptions: ConfigOption[] = [
|
||||
config_key: 'PAPERLESS_CONSUMER_TAG_BARCODE_MAPPING',
|
||||
category: ConfigCategory.Barcode,
|
||||
},
|
||||
{
|
||||
key: 'ai_enabled',
|
||||
title: $localize`AI Enabled`,
|
||||
type: ConfigOptionType.Boolean,
|
||||
config_key: 'PAPERLESS_AI_ENABLED',
|
||||
category: ConfigCategory.AI,
|
||||
note: $localize`Consider privacy implications when enabling AI features, especially if using a remote model.`,
|
||||
},
|
||||
{
|
||||
key: 'llm_embedding_backend',
|
||||
title: $localize`LLM Embedding Backend`,
|
||||
type: ConfigOptionType.Select,
|
||||
choices: mapToItems(LLMEmbeddingBackendConfig),
|
||||
config_key: 'PAPERLESS_AI_LLM_EMBEDDING_BACKEND',
|
||||
category: ConfigCategory.AI,
|
||||
},
|
||||
{
|
||||
key: 'llm_embedding_model',
|
||||
title: $localize`LLM Embedding Model`,
|
||||
type: ConfigOptionType.String,
|
||||
config_key: 'PAPERLESS_AI_LLM_EMBEDDING_MODEL',
|
||||
category: ConfigCategory.AI,
|
||||
},
|
||||
{
|
||||
key: 'llm_backend',
|
||||
title: $localize`LLM Backend`,
|
||||
type: ConfigOptionType.Select,
|
||||
choices: mapToItems(LLMBackendConfig),
|
||||
config_key: 'PAPERLESS_AI_LLM_BACKEND',
|
||||
category: ConfigCategory.AI,
|
||||
},
|
||||
{
|
||||
key: 'llm_model',
|
||||
title: $localize`LLM Model`,
|
||||
type: ConfigOptionType.String,
|
||||
config_key: 'PAPERLESS_AI_LLM_MODEL',
|
||||
category: ConfigCategory.AI,
|
||||
},
|
||||
{
|
||||
key: 'llm_api_key',
|
||||
title: $localize`LLM API Key`,
|
||||
type: ConfigOptionType.Password,
|
||||
config_key: 'PAPERLESS_AI_LLM_API_KEY',
|
||||
category: ConfigCategory.AI,
|
||||
},
|
||||
{
|
||||
key: 'llm_endpoint',
|
||||
title: $localize`LLM Endpoint`,
|
||||
type: ConfigOptionType.String,
|
||||
config_key: 'PAPERLESS_AI_LLM_ENDPOINT',
|
||||
category: ConfigCategory.AI,
|
||||
},
|
||||
]
|
||||
|
||||
export interface PaperlessConfig extends ObjectWithId {
|
||||
@@ -287,4 +352,11 @@ export interface PaperlessConfig extends ObjectWithId {
|
||||
barcode_max_pages: number
|
||||
barcode_enable_tag: boolean
|
||||
barcode_tag_mapping: object
|
||||
ai_enabled: boolean
|
||||
llm_embedding_backend: string
|
||||
llm_embedding_model: string
|
||||
llm_backend: string
|
||||
llm_model: string
|
||||
llm_api_key: string
|
||||
llm_endpoint: string
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ export enum PaperlessTaskName {
|
||||
TrainClassifier = 'train_classifier',
|
||||
SanityCheck = 'check_sanity',
|
||||
IndexOptimize = 'index_optimize',
|
||||
LLMIndexUpdate = 'llmindex_update',
|
||||
}
|
||||
|
||||
export enum PaperlessTaskStatus {
|
||||
|
||||
@@ -7,6 +7,7 @@ export enum SystemStatusItemStatus {
|
||||
OK = 'OK',
|
||||
ERROR = 'ERROR',
|
||||
WARNING = 'WARNING',
|
||||
DISABLED = 'DISABLED',
|
||||
}
|
||||
|
||||
export interface SystemStatus {
|
||||
@@ -43,6 +44,9 @@ export interface SystemStatus {
|
||||
sanity_check_status: SystemStatusItemStatus
|
||||
sanity_check_last_run: string // ISO date string
|
||||
sanity_check_error: string
|
||||
llmindex_status: SystemStatusItemStatus
|
||||
llmindex_last_modified: string // ISO date string
|
||||
llmindex_error: string
|
||||
}
|
||||
websocket_connected?: SystemStatusItemStatus // added client-side
|
||||
}
|
||||
|
||||
@@ -76,6 +76,7 @@ export const SETTINGS_KEYS = {
|
||||
GMAIL_OAUTH_URL: 'gmail_oauth_url',
|
||||
OUTLOOK_OAUTH_URL: 'outlook_oauth_url',
|
||||
EMAIL_ENABLED: 'email_enabled',
|
||||
AI_ENABLED: 'ai_enabled',
|
||||
}
|
||||
|
||||
export const SETTINGS: UiSetting[] = [
|
||||
@@ -289,4 +290,9 @@ export const SETTINGS: UiSetting[] = [
|
||||
type: 'string',
|
||||
default: 'page-width', // ZoomSetting from 'document-detail.component'
|
||||
},
|
||||
{
|
||||
key: SETTINGS_KEYS.AI_ENABLED,
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
},
|
||||
]
|
||||
|
||||
@@ -4,15 +4,15 @@ import {
|
||||
HttpInterceptor,
|
||||
HttpRequest,
|
||||
} from '@angular/common/http'
|
||||
import { Injectable, inject } from '@angular/core'
|
||||
import { inject, Injectable } from '@angular/core'
|
||||
import { Meta } from '@angular/platform-browser'
|
||||
import { CookieService } from 'ngx-cookie-service'
|
||||
import { Observable } from 'rxjs'
|
||||
|
||||
@Injectable()
|
||||
export class CsrfInterceptor implements HttpInterceptor {
|
||||
private cookieService = inject(CookieService)
|
||||
private meta = inject(Meta)
|
||||
private cookieService: CookieService = inject(CookieService)
|
||||
private meta: Meta = inject(Meta)
|
||||
|
||||
intercept(
|
||||
request: HttpRequest<unknown>,
|
||||
|
||||
58
src-ui/src/app/services/chat.service.spec.ts
Normal file
58
src-ui/src/app/services/chat.service.spec.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import {
|
||||
HttpEventType,
|
||||
provideHttpClient,
|
||||
withInterceptorsFromDi,
|
||||
} from '@angular/common/http'
|
||||
import {
|
||||
HttpTestingController,
|
||||
provideHttpClientTesting,
|
||||
} from '@angular/common/http/testing'
|
||||
import { TestBed } from '@angular/core/testing'
|
||||
import { environment } from 'src/environments/environment'
|
||||
import { ChatService } from './chat.service'
|
||||
|
||||
describe('ChatService', () => {
|
||||
let service: ChatService
|
||||
let httpMock: HttpTestingController
|
||||
|
||||
beforeEach(() => {
|
||||
TestBed.configureTestingModule({
|
||||
imports: [],
|
||||
providers: [
|
||||
ChatService,
|
||||
provideHttpClient(withInterceptorsFromDi()),
|
||||
provideHttpClientTesting(),
|
||||
],
|
||||
})
|
||||
service = TestBed.inject(ChatService)
|
||||
httpMock = TestBed.inject(HttpTestingController)
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
httpMock.verify()
|
||||
})
|
||||
|
||||
it('should stream chat messages', (done) => {
|
||||
const documentId = 1
|
||||
const prompt = 'Hello, world!'
|
||||
const mockResponse = 'Partial response text'
|
||||
const apiUrl = `${environment.apiBaseUrl}documents/chat/`
|
||||
|
||||
service.streamChat(documentId, prompt).subscribe((chunk) => {
|
||||
expect(chunk).toBe(mockResponse)
|
||||
done()
|
||||
})
|
||||
|
||||
const req = httpMock.expectOne(apiUrl)
|
||||
expect(req.request.method).toBe('POST')
|
||||
expect(req.request.body).toEqual({
|
||||
document_id: documentId,
|
||||
q: prompt,
|
||||
})
|
||||
|
||||
req.event({
|
||||
type: HttpEventType.DownloadProgress,
|
||||
partialText: mockResponse,
|
||||
} as any)
|
||||
})
|
||||
})
|
||||
46
src-ui/src/app/services/chat.service.ts
Normal file
46
src-ui/src/app/services/chat.service.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import {
|
||||
HttpClient,
|
||||
HttpDownloadProgressEvent,
|
||||
HttpEventType,
|
||||
} from '@angular/common/http'
|
||||
import { inject, Injectable } from '@angular/core'
|
||||
import { filter, map, Observable } from 'rxjs'
|
||||
import { environment } from 'src/environments/environment'
|
||||
|
||||
export interface ChatMessage {
|
||||
role: 'user' | 'assistant'
|
||||
content: string
|
||||
isStreaming?: boolean
|
||||
}
|
||||
|
||||
@Injectable({
|
||||
providedIn: 'root',
|
||||
})
|
||||
export class ChatService {
|
||||
private http: HttpClient = inject(HttpClient)
|
||||
|
||||
streamChat(documentId: number, prompt: string): Observable<string> {
|
||||
return this.http
|
||||
.post(
|
||||
`${environment.apiBaseUrl}documents/chat/`,
|
||||
{
|
||||
document_id: documentId,
|
||||
q: prompt,
|
||||
},
|
||||
{
|
||||
observe: 'events',
|
||||
reportProgress: true,
|
||||
responseType: 'text',
|
||||
withCredentials: true,
|
||||
}
|
||||
)
|
||||
.pipe(
|
||||
map((event) => {
|
||||
if (event.type === HttpEventType.DownloadProgress) {
|
||||
return (event as HttpDownloadProgressEvent).partialText!
|
||||
}
|
||||
}),
|
||||
filter((chunk) => !!chunk)
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -651,4 +651,25 @@ describe('DocumentListViewService', () => {
|
||||
documentListViewService.displayFields = customFields as any
|
||||
expect(documentListViewService.displayFields).toEqual(['custom_field_1'])
|
||||
})
|
||||
|
||||
it('should generate quick filter URL with filter rules', () => {
|
||||
const routerSpy = jest.spyOn(router, 'createUrlTree')
|
||||
const urlTree = documentListViewService.getQuickFilterUrl(filterRules)
|
||||
expect(routerSpy).toHaveBeenCalledWith(['/documents'], {
|
||||
queryParams: expect.objectContaining({
|
||||
tags__id__all: tags__id__all,
|
||||
}),
|
||||
})
|
||||
expect(urlTree).toBeDefined()
|
||||
})
|
||||
|
||||
it('should generate quick filter URL preserving default state', () => {
|
||||
documentListViewService.reload()
|
||||
httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
)
|
||||
const urlTree = documentListViewService.getQuickFilterUrl(filterRules)
|
||||
expect(urlTree).toBeDefined()
|
||||
expect(router.createUrlTree).toBeDefined()
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Injectable, inject } from '@angular/core'
|
||||
import { ParamMap, Router } from '@angular/router'
|
||||
import { ParamMap, Router, UrlTree } from '@angular/router'
|
||||
import { Observable, Subject, first, takeUntil } from 'rxjs'
|
||||
import {
|
||||
DEFAULT_DISPLAY_FIELDS,
|
||||
@@ -483,6 +483,18 @@ export class DocumentListViewService {
|
||||
this.router.navigate(['documents'])
|
||||
}
|
||||
|
||||
getQuickFilterUrl(filterRules: FilterRule[]): UrlTree {
|
||||
const defaultState = {
|
||||
...this.defaultListViewState(),
|
||||
...this.listViewStates.get(null),
|
||||
filterRules,
|
||||
}
|
||||
const params = paramsFromViewState(defaultState)
|
||||
return this.router.createUrlTree(['/documents'], {
|
||||
queryParams: params,
|
||||
})
|
||||
}
|
||||
|
||||
getLastPage(): number {
|
||||
return Math.ceil(this.collectionSize / this.pageSize)
|
||||
}
|
||||
|
||||
@@ -136,6 +136,12 @@ const LANGUAGE_OPTIONS = [
|
||||
englishName: 'Hungarian',
|
||||
dateInputFormat: 'yyyy.mm.dd',
|
||||
},
|
||||
{
|
||||
code: 'id-id',
|
||||
name: $localize`Indonesian`,
|
||||
englishName: 'Indonesian',
|
||||
dateInputFormat: 'dd-mm-yyyy',
|
||||
},
|
||||
{
|
||||
code: 'it-it',
|
||||
name: $localize`Italian`,
|
||||
|
||||
@@ -2,6 +2,7 @@ import {
|
||||
APP_INITIALIZER,
|
||||
enableProdMode,
|
||||
importProvidersFrom,
|
||||
provideZoneChangeDetection,
|
||||
} from '@angular/core'
|
||||
|
||||
import { DragDropModule } from '@angular/cdk/drag-drop'
|
||||
@@ -9,6 +10,7 @@ import { DatePipe, registerLocaleData } from '@angular/common'
|
||||
import {
|
||||
HTTP_INTERCEPTORS,
|
||||
provideHttpClient,
|
||||
withFetch,
|
||||
withInterceptorsFromDi,
|
||||
} from '@angular/common/http'
|
||||
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
||||
@@ -48,6 +50,7 @@ import {
|
||||
caretDown,
|
||||
caretUp,
|
||||
chatLeftText,
|
||||
chatSquareDots,
|
||||
check,
|
||||
check2All,
|
||||
checkAll,
|
||||
@@ -123,6 +126,7 @@ import {
|
||||
sliders2Vertical,
|
||||
sortAlphaDown,
|
||||
sortAlphaUpAlt,
|
||||
stars,
|
||||
tag,
|
||||
tagFill,
|
||||
tags,
|
||||
@@ -132,6 +136,7 @@ import {
|
||||
threeDotsVertical,
|
||||
trash,
|
||||
uiRadios,
|
||||
unlock,
|
||||
upcScan,
|
||||
windowStack,
|
||||
x,
|
||||
@@ -171,6 +176,7 @@ import localeFa from '@angular/common/locales/fa'
|
||||
import localeFi from '@angular/common/locales/fi'
|
||||
import localeFr from '@angular/common/locales/fr'
|
||||
import localeHu from '@angular/common/locales/hu'
|
||||
import localeId from '@angular/common/locales/id'
|
||||
import localeIt from '@angular/common/locales/it'
|
||||
import localeJa from '@angular/common/locales/ja'
|
||||
import localeKo from '@angular/common/locales/ko'
|
||||
@@ -209,6 +215,7 @@ registerLocaleData(localeFa)
|
||||
registerLocaleData(localeFi)
|
||||
registerLocaleData(localeFr)
|
||||
registerLocaleData(localeHu)
|
||||
registerLocaleData(localeId)
|
||||
registerLocaleData(localeIt)
|
||||
registerLocaleData(localeJa)
|
||||
registerLocaleData(localeKo)
|
||||
@@ -262,6 +269,7 @@ const icons = {
|
||||
caretDown,
|
||||
caretUp,
|
||||
chatLeftText,
|
||||
chatSquareDots,
|
||||
check,
|
||||
check2All,
|
||||
checkAll,
|
||||
@@ -337,6 +345,7 @@ const icons = {
|
||||
sliders2Vertical,
|
||||
sortAlphaDown,
|
||||
sortAlphaUpAlt,
|
||||
stars,
|
||||
tagFill,
|
||||
tag,
|
||||
tags,
|
||||
@@ -346,6 +355,7 @@ const icons = {
|
||||
threeDotsVertical,
|
||||
trash,
|
||||
uiRadios,
|
||||
unlock,
|
||||
upcScan,
|
||||
windowStack,
|
||||
x,
|
||||
@@ -359,6 +369,7 @@ if (environment.production) {
|
||||
|
||||
bootstrapApplication(AppComponent, {
|
||||
providers: [
|
||||
provideZoneChangeDetection(),
|
||||
importProvidersFrom(
|
||||
BrowserModule,
|
||||
AppRoutingModule,
|
||||
@@ -401,6 +412,6 @@ bootstrapApplication(AppComponent, {
|
||||
CorrespondentNamePipe,
|
||||
DocumentTypeNamePipe,
|
||||
StoragePathNamePipe,
|
||||
provideHttpClient(withInterceptorsFromDi()),
|
||||
provideHttpClient(withInterceptorsFromDi(), withFetch()),
|
||||
],
|
||||
}).catch((err) => console.error(err))
|
||||
|
||||
@@ -6,9 +6,11 @@
|
||||
"jest",
|
||||
"node",
|
||||
],
|
||||
"module": "commonjs",
|
||||
"emitDecoratorMetadata": true,
|
||||
"allowJs": true
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"emitDecoratorMetadata": false,
|
||||
"allowJs": true,
|
||||
"isolatedModules": true
|
||||
},
|
||||
"files": [
|
||||
"src/polyfills.ts"
|
||||
|
||||
@@ -11,6 +11,7 @@ class DocumentsConfig(AppConfig):
|
||||
from documents.signals import document_consumption_finished
|
||||
from documents.signals import document_updated
|
||||
from documents.signals.handlers import add_inbox_tags
|
||||
from documents.signals.handlers import add_or_update_document_in_llm_index
|
||||
from documents.signals.handlers import add_to_index
|
||||
from documents.signals.handlers import run_workflows_added
|
||||
from documents.signals.handlers import run_workflows_updated
|
||||
@@ -26,6 +27,7 @@ class DocumentsConfig(AppConfig):
|
||||
document_consumption_finished.connect(set_storage_path)
|
||||
document_consumption_finished.connect(add_to_index)
|
||||
document_consumption_finished.connect(run_workflows_added)
|
||||
document_consumption_finished.connect(add_or_update_document_in_llm_index)
|
||||
document_updated.connect(run_workflows_updated)
|
||||
|
||||
import documents.schema # noqa: F401
|
||||
|
||||
@@ -186,7 +186,11 @@ class BarcodePlugin(ConsumeTaskPlugin):
|
||||
|
||||
# Update/overwrite an ASN if possible
|
||||
# After splitting, as otherwise each split document gets the same ASN
|
||||
if self.settings.barcode_enable_asn and (located_asn := self.asn) is not None:
|
||||
if (
|
||||
self.settings.barcode_enable_asn
|
||||
and not self.metadata.skip_asn
|
||||
and (located_asn := self.asn) is not None
|
||||
):
|
||||
logger.info(f"Found ASN in barcode: {located_asn}")
|
||||
self.metadata.asn = located_asn
|
||||
|
||||
|
||||
@@ -433,6 +433,8 @@ def merge(
|
||||
|
||||
if user is not None:
|
||||
overrides.owner_id = user.id
|
||||
# Avoid copying or detecting ASN from merged PDFs to prevent collision
|
||||
overrides.skip_asn = True
|
||||
|
||||
logger.info("Adding merged document to the task queue.")
|
||||
|
||||
@@ -644,6 +646,77 @@ def edit_pdf(
|
||||
return "OK"
|
||||
|
||||
|
||||
def remove_password(
|
||||
doc_ids: list[int],
|
||||
password: str,
|
||||
*,
|
||||
update_document: bool = False,
|
||||
delete_original: bool = False,
|
||||
include_metadata: bool = True,
|
||||
user: User | None = None,
|
||||
) -> Literal["OK"]:
|
||||
"""
|
||||
Remove password protection from PDF documents.
|
||||
"""
|
||||
import pikepdf
|
||||
|
||||
for doc_id in doc_ids:
|
||||
doc = Document.objects.get(id=doc_id)
|
||||
try:
|
||||
logger.info(
|
||||
f"Attempting password removal from document {doc_ids[0]}",
|
||||
)
|
||||
with pikepdf.open(doc.source_path, password=password) as pdf:
|
||||
temp_path = doc.source_path.with_suffix(".tmp.pdf")
|
||||
pdf.remove_unreferenced_resources()
|
||||
pdf.save(temp_path)
|
||||
|
||||
if update_document:
|
||||
# replace the original document with the unprotected one
|
||||
temp_path.replace(doc.source_path)
|
||||
doc.checksum = hashlib.md5(doc.source_path.read_bytes()).hexdigest()
|
||||
doc.page_count = len(pdf.pages)
|
||||
doc.save()
|
||||
update_document_content_maybe_archive_file.delay(document_id=doc.id)
|
||||
else:
|
||||
consume_tasks = []
|
||||
overrides = (
|
||||
DocumentMetadataOverrides().from_document(doc)
|
||||
if include_metadata
|
||||
else DocumentMetadataOverrides()
|
||||
)
|
||||
if user is not None:
|
||||
overrides.owner_id = user.id
|
||||
|
||||
filepath: Path = (
|
||||
Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR))
|
||||
/ f"{doc.id}_unprotected.pdf"
|
||||
)
|
||||
temp_path.replace(filepath)
|
||||
consume_tasks.append(
|
||||
consume_file.s(
|
||||
ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=filepath,
|
||||
),
|
||||
overrides,
|
||||
),
|
||||
)
|
||||
|
||||
if delete_original:
|
||||
chord(header=consume_tasks, body=delete.si([doc.id])).delay()
|
||||
else:
|
||||
group(consume_tasks).delay()
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"Error removing password from document {doc.id}: {e}")
|
||||
raise ValueError(
|
||||
f"An error occurred while removing the password: {e}",
|
||||
) from e
|
||||
|
||||
return "OK"
|
||||
|
||||
|
||||
def reflect_doclinks(
|
||||
document: Document,
|
||||
field: CustomField,
|
||||
|
||||
@@ -41,6 +41,7 @@ class SuggestionCacheData:
|
||||
CLASSIFIER_VERSION_KEY: Final[str] = "classifier_version"
|
||||
CLASSIFIER_HASH_KEY: Final[str] = "classifier_hash"
|
||||
CLASSIFIER_MODIFIED_KEY: Final[str] = "classifier_modified"
|
||||
LLM_CACHE_CLASSIFIER_VERSION: Final[int] = 1000 # Marker distinguishing LLM suggestions
|
||||
|
||||
CACHE_1_MINUTE: Final[int] = 60
|
||||
CACHE_5_MINUTES: Final[int] = 5 * CACHE_1_MINUTE
|
||||
@@ -196,6 +197,54 @@ def refresh_suggestions_cache(
|
||||
cache.touch(doc_key, timeout)
|
||||
|
||||
|
||||
def get_llm_suggestion_cache(
|
||||
document_id: int,
|
||||
backend: str,
|
||||
) -> SuggestionCacheData | None:
|
||||
doc_key = get_suggestion_cache_key(document_id)
|
||||
data: SuggestionCacheData = cache.get(doc_key)
|
||||
|
||||
if data and data.classifier_hash == backend:
|
||||
return data
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def set_llm_suggestions_cache(
|
||||
document_id: int,
|
||||
suggestions: dict,
|
||||
*,
|
||||
backend: str,
|
||||
timeout: int = CACHE_50_MINUTES,
|
||||
) -> None:
|
||||
"""
|
||||
Cache LLM-generated suggestions using a backend-specific identifier (e.g. 'openai:gpt-4').
|
||||
"""
|
||||
doc_key = get_suggestion_cache_key(document_id)
|
||||
cache.set(
|
||||
doc_key,
|
||||
SuggestionCacheData(
|
||||
classifier_version=LLM_CACHE_CLASSIFIER_VERSION,
|
||||
classifier_hash=backend,
|
||||
suggestions=suggestions,
|
||||
),
|
||||
timeout,
|
||||
)
|
||||
|
||||
|
||||
def invalidate_llm_suggestions_cache(
|
||||
document_id: int,
|
||||
) -> None:
|
||||
"""
|
||||
Invalidate the LLM suggestions cache for a specific document and backend.
|
||||
"""
|
||||
doc_key = get_suggestion_cache_key(document_id)
|
||||
data: SuggestionCacheData = cache.get(doc_key)
|
||||
|
||||
if data:
|
||||
cache.delete(doc_key)
|
||||
|
||||
|
||||
def get_metadata_cache_key(document_id: int) -> str:
|
||||
"""
|
||||
Returns the basic key for a document's metadata
|
||||
|
||||
@@ -696,7 +696,7 @@ class ConsumerPlugin(
|
||||
pk=self.metadata.storage_path_id,
|
||||
)
|
||||
|
||||
if self.metadata.asn is not None:
|
||||
if self.metadata.asn is not None and not self.metadata.skip_asn:
|
||||
document.archive_serial_number = self.metadata.asn
|
||||
|
||||
if self.metadata.owner_id:
|
||||
@@ -812,8 +812,8 @@ class ConsumerPreflightPlugin(
|
||||
"""
|
||||
Check that if override_asn is given, it is unique and within a valid range
|
||||
"""
|
||||
if self.metadata.asn is None:
|
||||
# check not necessary in case no ASN gets set
|
||||
if self.metadata.skip_asn or self.metadata.asn is None:
|
||||
# if skip is set or ASN is None
|
||||
return
|
||||
# Validate the range is above zero and less than uint32_t max
|
||||
# otherwise, Whoosh can't handle it in the index
|
||||
|
||||
@@ -30,6 +30,7 @@ class DocumentMetadataOverrides:
|
||||
change_users: list[int] | None = None
|
||||
change_groups: list[int] | None = None
|
||||
custom_fields: dict | None = None
|
||||
skip_asn: bool = False
|
||||
|
||||
def update(self, other: "DocumentMetadataOverrides") -> "DocumentMetadataOverrides":
|
||||
"""
|
||||
@@ -49,6 +50,8 @@ class DocumentMetadataOverrides:
|
||||
self.storage_path_id = other.storage_path_id
|
||||
if other.owner_id is not None:
|
||||
self.owner_id = other.owner_id
|
||||
if other.skip_asn:
|
||||
self.skip_asn = True
|
||||
|
||||
# merge
|
||||
if self.tag_ids is None:
|
||||
|
||||
@@ -1,135 +1,343 @@
|
||||
"""
|
||||
Document consumer management command.
|
||||
|
||||
Watches a consumption directory for new documents and queues them for processing.
|
||||
Uses watchfiles for efficient file system monitoring with support for both
|
||||
native OS notifications and polling fallback.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from fnmatch import filter
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from pathlib import PurePath
|
||||
from threading import Event
|
||||
from time import monotonic
|
||||
from time import sleep
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Final
|
||||
|
||||
from django import db
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.core.management.base import CommandError
|
||||
from watchdog.events import FileSystemEventHandler
|
||||
from watchdog.observers.polling import PollingObserver
|
||||
from watchfiles import Change
|
||||
from watchfiles import DefaultFilter
|
||||
from watchfiles import watch
|
||||
|
||||
from documents.data_models import ConsumableDocument
|
||||
from documents.data_models import DocumentMetadataOverrides
|
||||
from documents.data_models import DocumentSource
|
||||
from documents.models import Tag
|
||||
from documents.parsers import is_file_ext_supported
|
||||
from documents.parsers import get_supported_file_extensions
|
||||
from documents.tasks import consume_file
|
||||
|
||||
try:
|
||||
from inotifyrecursive import INotify
|
||||
from inotifyrecursive import flags
|
||||
except ImportError: # pragma: no cover
|
||||
INotify = flags = None
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterator
|
||||
|
||||
|
||||
logger = logging.getLogger("paperless.management.consumer")
|
||||
|
||||
|
||||
def _tags_from_path(filepath: Path) -> list[int]:
|
||||
@dataclass
|
||||
class TrackedFile:
|
||||
"""Represents a file being tracked for stability."""
|
||||
|
||||
path: Path
|
||||
last_event_time: float
|
||||
last_mtime: float | None = None
|
||||
last_size: int | None = None
|
||||
|
||||
def update_stats(self) -> bool:
|
||||
"""
|
||||
Walk up the directory tree from filepath to CONSUMPTION_DIR
|
||||
Update file stats. Returns True if file exists and stats were updated.
|
||||
"""
|
||||
try:
|
||||
stat = self.path.stat()
|
||||
self.last_mtime = stat.st_mtime
|
||||
self.last_size = stat.st_size
|
||||
return True
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
def is_unchanged(self) -> bool:
|
||||
"""
|
||||
Check if file stats match the previously recorded values.
|
||||
Returns False if file doesn't exist or stats changed.
|
||||
"""
|
||||
try:
|
||||
stat = self.path.stat()
|
||||
return stat.st_mtime == self.last_mtime and stat.st_size == self.last_size
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
|
||||
class FileStabilityTracker:
|
||||
"""
|
||||
Tracks file events and determines when files are stable for consumption.
|
||||
|
||||
A file is considered stable when:
|
||||
1. No new events have been received for it within the stability delay
|
||||
2. Its size and modification time haven't changed
|
||||
3. It still exists as a regular file
|
||||
|
||||
This handles various edge cases:
|
||||
- Network copies that write in chunks
|
||||
- Scanners that open/close files multiple times
|
||||
- Temporary files that get renamed
|
||||
- Files that are deleted before becoming stable
|
||||
"""
|
||||
|
||||
def __init__(self, stability_delay: float = 1.0) -> None:
|
||||
"""
|
||||
Initialize the tracker.
|
||||
|
||||
Args:
|
||||
stability_delay: Time in seconds a file must remain unchanged
|
||||
before being considered stable.
|
||||
"""
|
||||
self.stability_delay = stability_delay
|
||||
self._tracked: dict[Path, TrackedFile] = {}
|
||||
|
||||
def track(self, path: Path, change: Change) -> None:
|
||||
"""
|
||||
Register a file event.
|
||||
|
||||
Args:
|
||||
path: The file path that changed.
|
||||
change: The type of change (added, modified, deleted).
|
||||
"""
|
||||
path = path.resolve()
|
||||
|
||||
match change:
|
||||
case Change.deleted:
|
||||
self._tracked.pop(path, None)
|
||||
logger.debug(f"Stopped tracking deleted file: {path}")
|
||||
case Change.added | Change.modified:
|
||||
current_time = monotonic()
|
||||
if path in self._tracked:
|
||||
tracked = self._tracked[path]
|
||||
tracked.last_event_time = current_time
|
||||
tracked.update_stats()
|
||||
logger.debug(f"Updated tracking for: {path}")
|
||||
else:
|
||||
tracked = TrackedFile(path=path, last_event_time=current_time)
|
||||
if tracked.update_stats():
|
||||
self._tracked[path] = tracked
|
||||
logger.debug(f"Started tracking: {path}")
|
||||
else:
|
||||
logger.debug(f"Could not stat file, not tracking: {path}")
|
||||
|
||||
def get_stable_files(self) -> Iterator[Path]:
|
||||
"""
|
||||
Yield files that have been stable for the configured delay.
|
||||
|
||||
Files are removed from tracking once yielded or determined to be invalid.
|
||||
"""
|
||||
current_time = monotonic()
|
||||
to_remove: list[Path] = []
|
||||
to_yield: list[Path] = []
|
||||
|
||||
for path, tracked in self._tracked.items():
|
||||
time_since_event = current_time - tracked.last_event_time
|
||||
|
||||
if time_since_event < self.stability_delay:
|
||||
continue
|
||||
|
||||
# File has waited long enough, verify it's unchanged
|
||||
if not tracked.is_unchanged():
|
||||
# Stats changed or file gone - update and wait again
|
||||
if tracked.update_stats():
|
||||
tracked.last_event_time = current_time
|
||||
logger.debug(f"File changed during stability check: {path}")
|
||||
else:
|
||||
# File no longer exists, remove from tracking
|
||||
to_remove.append(path)
|
||||
logger.debug(f"File disappeared during stability check: {path}")
|
||||
continue
|
||||
|
||||
# File is stable, we can return it
|
||||
to_yield.append(path)
|
||||
logger.info(f"File is stable: {path}")
|
||||
|
||||
# Remove files that are no longer valid
|
||||
for path in to_remove:
|
||||
self._tracked.pop(path, None)
|
||||
|
||||
# Remove and yield stable files
|
||||
for path in to_yield:
|
||||
self._tracked.pop(path, None)
|
||||
yield path
|
||||
|
||||
def has_pending_files(self) -> bool:
|
||||
"""Check if there are files waiting for stability check."""
|
||||
return len(self._tracked) > 0
|
||||
|
||||
@property
|
||||
def pending_count(self) -> int:
|
||||
"""Number of files being tracked."""
|
||||
return len(self._tracked)
|
||||
|
||||
|
||||
class ConsumerFilter(DefaultFilter):
|
||||
"""
|
||||
Filter for watchfiles that accepts only supported document types
|
||||
and ignores system files/directories.
|
||||
|
||||
Extends DefaultFilter leveraging its built-in filtering:
|
||||
- `ignore_dirs`: Directory names to ignore (and all their contents)
|
||||
- `ignore_entity_patterns`: Regex patterns matched against filename/dirname only
|
||||
|
||||
We add custom logic for file extension filtering (only accept supported
|
||||
document types), which the library doesn't provide.
|
||||
"""
|
||||
|
||||
# Regex patterns for files to always ignore (matched against filename only)
|
||||
# These are passed to DefaultFilter.ignore_entity_patterns
|
||||
DEFAULT_IGNORE_PATTERNS: Final[tuple[str, ...]] = (
|
||||
r"^\.DS_Store$",
|
||||
r"^\.DS_STORE$",
|
||||
r"^\._.*",
|
||||
r"^desktop\.ini$",
|
||||
r"^Thumbs\.db$",
|
||||
)
|
||||
|
||||
# Directories to always ignore (passed to DefaultFilter.ignore_dirs)
|
||||
# These are matched by directory name, not full path
|
||||
DEFAULT_IGNORE_DIRS: Final[tuple[str, ...]] = (
|
||||
".stfolder", # Syncthing
|
||||
".stversions", # Syncthing
|
||||
".localized", # macOS
|
||||
"@eaDir", # Synology NAS
|
||||
".Spotlight-V100", # macOS
|
||||
".Trashes", # macOS
|
||||
"__MACOSX", # macOS archive artifacts
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
supported_extensions: frozenset[str] | None = None,
|
||||
ignore_patterns: list[str] | None = None,
|
||||
ignore_dirs: list[str] | None = None,
|
||||
) -> None:
|
||||
"""
|
||||
Initialize the consumer filter.
|
||||
|
||||
Args:
|
||||
supported_extensions: Set of file extensions to accept (e.g., {".pdf", ".png"}).
|
||||
If None, uses get_supported_file_extensions().
|
||||
ignore_patterns: Additional regex patterns to ignore (matched against filename).
|
||||
ignore_dirs: Additional directory names to ignore (merged with defaults).
|
||||
"""
|
||||
# Get supported extensions
|
||||
if supported_extensions is None:
|
||||
supported_extensions = frozenset(get_supported_file_extensions())
|
||||
self._supported_extensions = supported_extensions
|
||||
|
||||
# Combine default and user patterns
|
||||
all_patterns: list[str] = list(self.DEFAULT_IGNORE_PATTERNS)
|
||||
if ignore_patterns:
|
||||
all_patterns.extend(ignore_patterns)
|
||||
|
||||
# Combine default and user ignore_dirs
|
||||
all_ignore_dirs: list[str] = list(self.DEFAULT_IGNORE_DIRS)
|
||||
if ignore_dirs:
|
||||
all_ignore_dirs.extend(ignore_dirs)
|
||||
|
||||
# Let DefaultFilter handle all the pattern and directory filtering
|
||||
super().__init__(
|
||||
ignore_dirs=tuple(all_ignore_dirs),
|
||||
ignore_entity_patterns=tuple(all_patterns),
|
||||
ignore_paths=(),
|
||||
)
|
||||
|
||||
def __call__(self, change: Change, path: str) -> bool:
|
||||
"""
|
||||
Filter function for watchfiles.
|
||||
|
||||
Returns True if the path should be watched, False to ignore.
|
||||
|
||||
The parent DefaultFilter handles:
|
||||
- Hidden files/directories (starting with .)
|
||||
- Directories in ignore_dirs
|
||||
- Files/directories matching ignore_entity_patterns
|
||||
|
||||
We additionally filter files by extension.
|
||||
"""
|
||||
# Let parent filter handle directory ignoring and pattern matching
|
||||
if not super().__call__(change, path):
|
||||
return False
|
||||
|
||||
path_obj = Path(path)
|
||||
|
||||
# For directories, parent filter already handled everything
|
||||
if path_obj.is_dir():
|
||||
return True
|
||||
|
||||
# For files, check extension
|
||||
return self._has_supported_extension(path_obj)
|
||||
|
||||
def _has_supported_extension(self, path: Path) -> bool:
|
||||
"""Check if the file has a supported extension."""
|
||||
suffix = path.suffix.lower()
|
||||
return suffix in self._supported_extensions
|
||||
|
||||
|
||||
def _tags_from_path(filepath: Path, consumption_dir: Path) -> list[int]:
|
||||
"""
|
||||
Walk up the directory tree from filepath to consumption_dir
|
||||
and get or create Tag IDs for every directory.
|
||||
|
||||
Returns set of Tag models
|
||||
Returns list of Tag primary keys.
|
||||
"""
|
||||
db.close_old_connections()
|
||||
tag_ids = set()
|
||||
path_parts = filepath.relative_to(settings.CONSUMPTION_DIR).parent.parts
|
||||
tag_ids: set[int] = set()
|
||||
path_parts = filepath.relative_to(consumption_dir).parent.parts
|
||||
|
||||
for part in path_parts:
|
||||
tag_ids.add(
|
||||
Tag.objects.get_or_create(name__iexact=part, defaults={"name": part})[0].pk,
|
||||
tag, _ = Tag.objects.get_or_create(
|
||||
name__iexact=part,
|
||||
defaults={"name": part},
|
||||
)
|
||||
tag_ids.add(tag.pk)
|
||||
|
||||
return list(tag_ids)
|
||||
|
||||
|
||||
def _is_ignored(filepath: Path) -> bool:
|
||||
def _consume_file(
|
||||
filepath: Path,
|
||||
consumption_dir: Path,
|
||||
*,
|
||||
subdirs_as_tags: bool,
|
||||
) -> None:
|
||||
"""
|
||||
Checks if the given file should be ignored, based on configured
|
||||
patterns.
|
||||
Queue a file for consumption.
|
||||
|
||||
Returns True if the file is ignored, False otherwise
|
||||
Args:
|
||||
filepath: Path to the file to consume.
|
||||
consumption_dir: Base consumption directory.
|
||||
subdirs_as_tags: Whether to create tags from subdirectory names.
|
||||
"""
|
||||
# Trim out the consume directory, leaving only filename and it's
|
||||
# path relative to the consume directory
|
||||
filepath_relative = PurePath(filepath).relative_to(settings.CONSUMPTION_DIR)
|
||||
|
||||
# March through the components of the path, including directories and the filename
|
||||
# looking for anything matching
|
||||
# foo/bar/baz/file.pdf -> (foo, bar, baz, file.pdf)
|
||||
parts = []
|
||||
for part in filepath_relative.parts:
|
||||
# If the part is not the name (ie, it's a dir)
|
||||
# Need to append the trailing slash or fnmatch doesn't match
|
||||
# fnmatch("dir", "dir/*") == False
|
||||
# fnmatch("dir/", "dir/*") == True
|
||||
if part != filepath_relative.name:
|
||||
part = part + "/"
|
||||
parts.append(part)
|
||||
|
||||
for pattern in settings.CONSUMER_IGNORE_PATTERNS:
|
||||
if len(filter(parts, pattern)):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _consume(filepath: Path) -> None:
|
||||
# Check permissions early
|
||||
# Verify file still exists and is accessible
|
||||
try:
|
||||
filepath.stat()
|
||||
except (PermissionError, OSError):
|
||||
logger.warning(f"Not consuming file {filepath}: Permission denied.")
|
||||
return
|
||||
|
||||
if filepath.is_dir() or _is_ignored(filepath):
|
||||
return
|
||||
|
||||
if not filepath.is_file():
|
||||
logger.debug(f"Not consuming file {filepath}: File has moved.")
|
||||
logger.debug(f"Not consuming {filepath}: not a file or doesn't exist")
|
||||
return
|
||||
|
||||
if not is_file_ext_supported(filepath.suffix):
|
||||
logger.warning(f"Not consuming file {filepath}: Unknown file extension.")
|
||||
return
|
||||
|
||||
# Total wait time: up to 500ms
|
||||
os_error_retry_count: Final[int] = 50
|
||||
os_error_retry_wait: Final[float] = 0.01
|
||||
|
||||
read_try_count = 0
|
||||
file_open_ok = False
|
||||
os_error_str = None
|
||||
|
||||
while (read_try_count < os_error_retry_count) and not file_open_ok:
|
||||
try:
|
||||
with filepath.open("rb"):
|
||||
file_open_ok = True
|
||||
except OSError as e:
|
||||
read_try_count += 1
|
||||
os_error_str = str(e)
|
||||
sleep(os_error_retry_wait)
|
||||
|
||||
if read_try_count >= os_error_retry_count:
|
||||
logger.warning(f"Not consuming file {filepath}: OS reports {os_error_str}")
|
||||
logger.warning(f"Not consuming {filepath}: {e}")
|
||||
return
|
||||
|
||||
tag_ids = None
|
||||
# Get tags from path if configured
|
||||
tag_ids: list[int] | None = None
|
||||
if subdirs_as_tags:
|
||||
try:
|
||||
if settings.CONSUMER_SUBDIRS_AS_TAGS:
|
||||
tag_ids = _tags_from_path(filepath)
|
||||
tag_ids = _tags_from_path(filepath, consumption_dir)
|
||||
except Exception:
|
||||
logger.exception("Error creating tags from path")
|
||||
logger.exception(f"Error creating tags from path for {filepath}")
|
||||
|
||||
# Queue for consumption
|
||||
try:
|
||||
logger.info(f"Adding {filepath} to the task queue.")
|
||||
logger.info(f"Adding {filepath} to the task queue")
|
||||
consume_file.delay(
|
||||
ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
@@ -138,228 +346,209 @@ def _consume(filepath: Path) -> None:
|
||||
DocumentMetadataOverrides(tag_ids=tag_ids),
|
||||
)
|
||||
except Exception:
|
||||
# Catch all so that the consumer won't crash.
|
||||
# This is also what the test case is listening for to check for
|
||||
# errors.
|
||||
logger.exception("Error while consuming document")
|
||||
|
||||
|
||||
def _consume_wait_unmodified(file: Path) -> None:
|
||||
"""
|
||||
Waits for the given file to appear unmodified based on file size
|
||||
and modification time. Will wait a configured number of seconds
|
||||
and retry a configured number of times before either consuming or
|
||||
giving up
|
||||
"""
|
||||
if _is_ignored(file):
|
||||
return
|
||||
|
||||
logger.debug(f"Waiting for file {file} to remain unmodified")
|
||||
mtime = -1
|
||||
size = -1
|
||||
current_try = 0
|
||||
while current_try < settings.CONSUMER_POLLING_RETRY_COUNT:
|
||||
try:
|
||||
stat_data = file.stat()
|
||||
new_mtime = stat_data.st_mtime
|
||||
new_size = stat_data.st_size
|
||||
except FileNotFoundError:
|
||||
logger.debug(
|
||||
f"File {file} moved while waiting for it to remain unmodified.",
|
||||
)
|
||||
return
|
||||
if new_mtime == mtime and new_size == size:
|
||||
_consume(file)
|
||||
return
|
||||
mtime = new_mtime
|
||||
size = new_size
|
||||
sleep(settings.CONSUMER_POLLING_DELAY)
|
||||
current_try += 1
|
||||
|
||||
logger.error(f"Timeout while waiting on file {file} to remain unmodified.")
|
||||
|
||||
|
||||
class Handler(FileSystemEventHandler):
|
||||
def __init__(self, pool: ThreadPoolExecutor) -> None:
|
||||
super().__init__()
|
||||
self._pool = pool
|
||||
|
||||
def on_created(self, event):
|
||||
self._pool.submit(_consume_wait_unmodified, Path(event.src_path))
|
||||
|
||||
def on_moved(self, event):
|
||||
self._pool.submit(_consume_wait_unmodified, Path(event.dest_path))
|
||||
logger.exception(f"Error while queuing document {filepath}")
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""
|
||||
On every iteration of an infinite loop, consume what we can from the
|
||||
consumption directory.
|
||||
Watch a consumption directory and queue new documents for processing.
|
||||
|
||||
Uses watchfiles for efficient file system monitoring. Supports both
|
||||
native OS notifications (inotify on Linux, FSEvents on macOS) and
|
||||
polling for network filesystems.
|
||||
"""
|
||||
|
||||
# This is here primarily for the tests and is irrelevant in production.
|
||||
stop_flag = Event()
|
||||
# Also only for testing, configures in one place the timeout used before checking
|
||||
# the stop flag
|
||||
testing_timeout_s: Final[float] = 0.5
|
||||
testing_timeout_ms: Final[float] = testing_timeout_s * 1000.0
|
||||
help = "Watch the consumption directory for new documents"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
# For testing - allows tests to stop the consumer
|
||||
stop_flag: Event = Event()
|
||||
|
||||
# Testing timeout in seconds
|
||||
testing_timeout_s: Final[float] = 0.5
|
||||
|
||||
def add_arguments(self, parser) -> None:
|
||||
parser.add_argument(
|
||||
"directory",
|
||||
default=settings.CONSUMPTION_DIR,
|
||||
default=None,
|
||||
nargs="?",
|
||||
help="The consumption directory.",
|
||||
help="The consumption directory (defaults to CONSUMPTION_DIR setting)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--oneshot",
|
||||
action="store_true",
|
||||
help="Process existing files and exit without watching",
|
||||
)
|
||||
parser.add_argument("--oneshot", action="store_true", help="Run only once.")
|
||||
|
||||
# Only use during unit testing, will configure a timeout
|
||||
# Leaving it unset or false and the consumer will exit when it
|
||||
# receives SIGINT
|
||||
parser.add_argument(
|
||||
"--testing",
|
||||
action="store_true",
|
||||
help="Flag used only for unit testing",
|
||||
help="Enable testing mode with shorter timeouts",
|
||||
default=False,
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
directory = options["directory"]
|
||||
recursive = settings.CONSUMER_RECURSIVE
|
||||
|
||||
def handle(self, *args, **options) -> None:
|
||||
# Resolve consumption directory
|
||||
directory = options.get("directory")
|
||||
if not directory:
|
||||
raise CommandError("CONSUMPTION_DIR does not appear to be set.")
|
||||
directory = getattr(settings, "CONSUMPTION_DIR", None)
|
||||
if not directory:
|
||||
raise CommandError("CONSUMPTION_DIR is not configured")
|
||||
|
||||
directory = Path(directory).resolve()
|
||||
|
||||
if not directory.is_dir():
|
||||
raise CommandError(f"Consumption directory {directory} does not exist")
|
||||
if not directory.exists():
|
||||
raise CommandError(f"Consumption directory does not exist: {directory}")
|
||||
|
||||
# Consumer will need this
|
||||
if not directory.is_dir():
|
||||
raise CommandError(f"Consumption path is not a directory: {directory}")
|
||||
|
||||
# Ensure scratch directory exists
|
||||
settings.SCRATCH_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if recursive:
|
||||
for dirpath, _, filenames in os.walk(directory):
|
||||
for filename in filenames:
|
||||
filepath = Path(dirpath) / filename
|
||||
_consume(filepath)
|
||||
else:
|
||||
for filepath in directory.iterdir():
|
||||
_consume(filepath)
|
||||
# Get settings
|
||||
recursive: bool = settings.CONSUMER_RECURSIVE
|
||||
subdirs_as_tags: bool = settings.CONSUMER_SUBDIRS_AS_TAGS
|
||||
polling_interval: float = settings.CONSUMER_POLLING_INTERVAL
|
||||
stability_delay: float = settings.CONSUMER_STABILITY_DELAY
|
||||
ignore_patterns: list[str] = settings.CONSUMER_IGNORE_PATTERNS
|
||||
ignore_dirs: list[str] = settings.CONSUMER_IGNORE_DIRS
|
||||
is_testing: bool = options.get("testing", False)
|
||||
is_oneshot: bool = options.get("oneshot", False)
|
||||
|
||||
if options["oneshot"]:
|
||||
# Create filter
|
||||
consumer_filter = ConsumerFilter(
|
||||
ignore_patterns=ignore_patterns,
|
||||
ignore_dirs=ignore_dirs,
|
||||
)
|
||||
|
||||
# Process existing files
|
||||
self._process_existing_files(
|
||||
directory=directory,
|
||||
recursive=recursive,
|
||||
subdirs_as_tags=subdirs_as_tags,
|
||||
consumer_filter=consumer_filter,
|
||||
)
|
||||
|
||||
if is_oneshot:
|
||||
logger.info("Oneshot mode: processed existing files, exiting")
|
||||
return
|
||||
|
||||
if settings.CONSUMER_POLLING == 0 and INotify:
|
||||
self.handle_inotify(directory, recursive, is_testing=options["testing"])
|
||||
else:
|
||||
if INotify is None and settings.CONSUMER_POLLING == 0: # pragma: no cover
|
||||
logger.warning("Using polling as INotify import failed")
|
||||
self.handle_polling(directory, recursive, is_testing=options["testing"])
|
||||
# Start watching
|
||||
self._watch_directory(
|
||||
directory=directory,
|
||||
recursive=recursive,
|
||||
subdirs_as_tags=subdirs_as_tags,
|
||||
consumer_filter=consumer_filter,
|
||||
polling_interval=polling_interval,
|
||||
stability_delay=stability_delay,
|
||||
is_testing=is_testing,
|
||||
)
|
||||
|
||||
logger.debug("Consumer exiting.")
|
||||
logger.debug("Consumer exiting")
|
||||
|
||||
def handle_polling(self, directory, recursive, *, is_testing: bool):
|
||||
logger.info(f"Polling directory for changes: {directory}")
|
||||
def _process_existing_files(
|
||||
self,
|
||||
*,
|
||||
directory: Path,
|
||||
recursive: bool,
|
||||
subdirs_as_tags: bool,
|
||||
consumer_filter: ConsumerFilter,
|
||||
) -> None:
|
||||
"""Process any existing files in the consumption directory."""
|
||||
logger.info(f"Processing existing files in {directory}")
|
||||
|
||||
timeout = None
|
||||
if is_testing:
|
||||
timeout = self.testing_timeout_s
|
||||
logger.debug(f"Configuring timeout to {timeout}s")
|
||||
glob_pattern = "**/*" if recursive else "*"
|
||||
|
||||
polling_interval = settings.CONSUMER_POLLING
|
||||
if polling_interval == 0: # pragma: no cover
|
||||
# Only happens if INotify failed to import
|
||||
logger.warning("Using polling of 10s, consider setting this")
|
||||
polling_interval = 10
|
||||
|
||||
with ThreadPoolExecutor(max_workers=4) as pool:
|
||||
observer = PollingObserver(timeout=polling_interval)
|
||||
observer.schedule(Handler(pool), directory, recursive=recursive)
|
||||
observer.start()
|
||||
try:
|
||||
while observer.is_alive():
|
||||
observer.join(timeout)
|
||||
if self.stop_flag.is_set():
|
||||
observer.stop()
|
||||
except KeyboardInterrupt:
|
||||
observer.stop()
|
||||
observer.join()
|
||||
|
||||
def handle_inotify(self, directory, recursive, *, is_testing: bool):
|
||||
logger.info(f"Using inotify to watch directory for changes: {directory}")
|
||||
|
||||
timeout_ms = None
|
||||
if is_testing:
|
||||
timeout_ms = self.testing_timeout_ms
|
||||
logger.debug(f"Configuring timeout to {timeout_ms}ms")
|
||||
|
||||
inotify = INotify()
|
||||
inotify_flags = flags.CLOSE_WRITE | flags.MOVED_TO | flags.MODIFY
|
||||
if recursive:
|
||||
inotify.add_watch_recursive(directory, inotify_flags)
|
||||
else:
|
||||
inotify.add_watch(directory, inotify_flags)
|
||||
|
||||
inotify_debounce_secs: Final[float] = settings.CONSUMER_INOTIFY_DELAY
|
||||
inotify_debounce_ms: Final[int] = inotify_debounce_secs * 1000
|
||||
|
||||
finished = False
|
||||
|
||||
notified_files = {}
|
||||
|
||||
try:
|
||||
while not finished:
|
||||
try:
|
||||
for event in inotify.read(timeout=timeout_ms):
|
||||
path = inotify.get_path(event.wd) if recursive else directory
|
||||
filepath = Path(path) / event.name
|
||||
if flags.MODIFY in flags.from_mask(event.mask):
|
||||
notified_files.pop(filepath, None)
|
||||
else:
|
||||
notified_files[filepath] = monotonic()
|
||||
|
||||
# Check the files against the timeout
|
||||
still_waiting = {}
|
||||
# last_event_time is time of the last inotify event for this file
|
||||
for filepath, last_event_time in notified_files.items():
|
||||
# Current time - last time over the configured timeout
|
||||
waited_long_enough = (
|
||||
monotonic() - last_event_time
|
||||
) > inotify_debounce_secs
|
||||
|
||||
# Also make sure the file exists still, some scanners might write a
|
||||
# temporary file first
|
||||
try:
|
||||
file_still_exists = filepath.exists() and filepath.is_file()
|
||||
except (PermissionError, OSError): # pragma: no cover
|
||||
# If we can't check, let it fail in the _consume function
|
||||
file_still_exists = True
|
||||
for filepath in directory.glob(glob_pattern):
|
||||
# Use filter to check if file should be processed
|
||||
if not filepath.is_file():
|
||||
continue
|
||||
|
||||
if waited_long_enough and file_still_exists:
|
||||
_consume(filepath)
|
||||
elif file_still_exists:
|
||||
still_waiting[filepath] = last_event_time
|
||||
if not consumer_filter(Change.added, str(filepath)):
|
||||
continue
|
||||
|
||||
# These files are still waiting to hit the timeout
|
||||
notified_files = still_waiting
|
||||
_consume_file(
|
||||
filepath=filepath,
|
||||
consumption_dir=directory,
|
||||
subdirs_as_tags=subdirs_as_tags,
|
||||
)
|
||||
|
||||
# If files are waiting, need to exit read() to check them
|
||||
# Otherwise, go back to infinite sleep time, but only if not testing
|
||||
if len(notified_files) > 0:
|
||||
timeout_ms = inotify_debounce_ms
|
||||
elif is_testing:
|
||||
timeout_ms = self.testing_timeout_ms
|
||||
def _watch_directory(
|
||||
self,
|
||||
*,
|
||||
directory: Path,
|
||||
recursive: bool,
|
||||
subdirs_as_tags: bool,
|
||||
consumer_filter: ConsumerFilter,
|
||||
polling_interval: float,
|
||||
stability_delay: float,
|
||||
is_testing: bool,
|
||||
) -> None:
|
||||
"""Watch directory for changes and process stable files."""
|
||||
use_polling = polling_interval > 0
|
||||
poll_delay_ms = int(polling_interval * 1000) if use_polling else 0
|
||||
|
||||
if use_polling:
|
||||
logger.info(
|
||||
f"Watching {directory} using polling (interval: {polling_interval}s)",
|
||||
)
|
||||
else:
|
||||
timeout_ms = None
|
||||
logger.info(f"Watching {directory} using native file system events")
|
||||
|
||||
if self.stop_flag.is_set():
|
||||
logger.debug("Finishing because event is set")
|
||||
finished = True
|
||||
# Create stability tracker
|
||||
tracker = FileStabilityTracker(stability_delay=stability_delay)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
logger.info("Received SIGINT, stopping inotify")
|
||||
finished = True
|
||||
finally:
|
||||
inotify.close()
|
||||
# Calculate timeouts
|
||||
stability_timeout_ms = int(stability_delay * 1000)
|
||||
testing_timeout_ms = int(self.testing_timeout_s * 1000)
|
||||
|
||||
# Start with no timeout (wait indefinitely for first event)
|
||||
# unless in testing mode
|
||||
timeout_ms = testing_timeout_ms if is_testing else 0
|
||||
|
||||
self.stop_flag.clear()
|
||||
|
||||
while not self.stop_flag.is_set():
|
||||
try:
|
||||
for changes in watch(
|
||||
directory,
|
||||
watch_filter=consumer_filter,
|
||||
rust_timeout=timeout_ms,
|
||||
yield_on_timeout=True,
|
||||
force_polling=use_polling,
|
||||
poll_delay_ms=poll_delay_ms,
|
||||
recursive=recursive,
|
||||
stop_event=self.stop_flag,
|
||||
):
|
||||
# Process each change
|
||||
for change_type, path in changes:
|
||||
path = Path(path).resolve()
|
||||
if not path.is_file():
|
||||
continue
|
||||
logger.debug(f"Event: {change_type.name} for {path}")
|
||||
tracker.track(path, change_type)
|
||||
|
||||
# Check for stable files
|
||||
for stable_path in tracker.get_stable_files():
|
||||
_consume_file(
|
||||
filepath=stable_path,
|
||||
consumption_dir=directory,
|
||||
subdirs_as_tags=subdirs_as_tags,
|
||||
)
|
||||
|
||||
# Exit watch loop to reconfigure timeout
|
||||
break
|
||||
|
||||
# Determine next timeout
|
||||
if tracker.has_pending_files():
|
||||
# Check pending files at stability interval
|
||||
timeout_ms = stability_timeout_ms
|
||||
elif is_testing:
|
||||
# In testing, use short timeout to check stop flag
|
||||
timeout_ms = testing_timeout_ms
|
||||
else: # pragma: nocover
|
||||
# No pending files, wait indefinitely
|
||||
timeout_ms = 0
|
||||
|
||||
except KeyboardInterrupt: # pragma: nocover
|
||||
logger.info("Received interrupt, stopping consumer")
|
||||
self.stop_flag.set()
|
||||
|
||||
22
src/documents/management/commands/document_llmindex.py
Normal file
22
src/documents/management/commands/document_llmindex.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from django.core.management import BaseCommand
|
||||
from django.db import transaction
|
||||
|
||||
from documents.management.commands.mixins import ProgressBarMixin
|
||||
from documents.tasks import llmindex_index
|
||||
|
||||
|
||||
class Command(ProgressBarMixin, BaseCommand):
|
||||
help = "Manages the LLM-based vector index for Paperless."
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument("command", choices=["rebuild", "update"])
|
||||
self.add_argument_progress_bar_mixin(parser)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
self.handle_progress_bar_mixin(**options)
|
||||
with transaction.atomic():
|
||||
llmindex_index(
|
||||
progress_bar_disable=self.no_progress_bar,
|
||||
rebuild=options["command"] == "rebuild",
|
||||
scheduled=False,
|
||||
)
|
||||
@@ -0,0 +1,30 @@
|
||||
# Generated by Django 5.1.8 on 2025-04-30 02:38
|
||||
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "1074_workflowrun_deleted_at_workflowrun_restored_at_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="paperlesstask",
|
||||
name="task_name",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("consume_file", "Consume File"),
|
||||
("train_classifier", "Train Classifier"),
|
||||
("check_sanity", "Check Sanity"),
|
||||
("index_optimize", "Index Optimize"),
|
||||
("llmindex_update", "LLM Index Update"),
|
||||
],
|
||||
help_text="Name of the task that was run",
|
||||
max_length=255,
|
||||
null=True,
|
||||
verbose_name="Task Name",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -598,6 +598,7 @@ class PaperlessTask(ModelWithOwner):
|
||||
TRAIN_CLASSIFIER = ("train_classifier", _("Train Classifier"))
|
||||
CHECK_SANITY = ("check_sanity", _("Check Sanity"))
|
||||
INDEX_OPTIMIZE = ("index_optimize", _("Index Optimize"))
|
||||
LLMINDEX_UPDATE = ("llmindex_update", _("LLM Index Update"))
|
||||
|
||||
task_id = models.CharField(
|
||||
max_length=255,
|
||||
|
||||
@@ -580,6 +580,10 @@ class TagSerializer(MatchingModelSerializer, OwnedObjectSerializer):
|
||||
),
|
||||
)
|
||||
def get_children(self, obj):
|
||||
children_map = self.context.get("children_map")
|
||||
if children_map is not None:
|
||||
children = children_map.get(obj.pk, [])
|
||||
else:
|
||||
filter_q = self.context.get("document_count_filter")
|
||||
request = self.context.get("request")
|
||||
if filter_q is None:
|
||||
@@ -587,7 +591,7 @@ class TagSerializer(MatchingModelSerializer, OwnedObjectSerializer):
|
||||
filter_q = get_document_count_filter_for_user(user)
|
||||
self.context["document_count_filter"] = filter_q
|
||||
|
||||
children_queryset = (
|
||||
children = (
|
||||
obj.get_children_queryset()
|
||||
.select_related("owner")
|
||||
.annotate(document_count=Count("documents", filter=filter_q))
|
||||
@@ -595,15 +599,15 @@ class TagSerializer(MatchingModelSerializer, OwnedObjectSerializer):
|
||||
|
||||
view = self.context.get("view")
|
||||
ordering = (
|
||||
OrderingFilter().get_ordering(request, children_queryset, view)
|
||||
OrderingFilter().get_ordering(request, children, view)
|
||||
if request and view
|
||||
else None
|
||||
)
|
||||
ordering = ordering or (Lower("name"),)
|
||||
children_queryset = children_queryset.order_by(*ordering)
|
||||
children = children.order_by(*ordering)
|
||||
|
||||
serializer = TagSerializer(
|
||||
children_queryset,
|
||||
children,
|
||||
many=True,
|
||||
user=self.user,
|
||||
full_perms=self.full_perms,
|
||||
@@ -1430,6 +1434,7 @@ class BulkEditSerializer(
|
||||
"split",
|
||||
"delete_pages",
|
||||
"edit_pdf",
|
||||
"remove_password",
|
||||
],
|
||||
label="Method",
|
||||
write_only=True,
|
||||
@@ -1505,6 +1510,8 @@ class BulkEditSerializer(
|
||||
return bulk_edit.delete_pages
|
||||
elif method == "edit_pdf":
|
||||
return bulk_edit.edit_pdf
|
||||
elif method == "remove_password":
|
||||
return bulk_edit.remove_password
|
||||
else: # pragma: no cover
|
||||
# This will never happen as it is handled by the ChoiceField
|
||||
raise serializers.ValidationError("Unsupported method.")
|
||||
@@ -1701,6 +1708,12 @@ class BulkEditSerializer(
|
||||
f"Page {op['page']} is out of bounds for document with {doc.page_count} pages.",
|
||||
)
|
||||
|
||||
def validate_parameters_remove_password(self, parameters):
|
||||
if "password" not in parameters:
|
||||
raise serializers.ValidationError("password not specified")
|
||||
if not isinstance(parameters["password"], str):
|
||||
raise serializers.ValidationError("password must be a string")
|
||||
|
||||
def validate(self, attrs):
|
||||
method = attrs["method"]
|
||||
parameters = attrs["parameters"]
|
||||
@@ -1741,6 +1754,8 @@ class BulkEditSerializer(
|
||||
"Edit PDF method only supports one document",
|
||||
)
|
||||
self._validate_parameters_edit_pdf(parameters, attrs["documents"][0])
|
||||
elif method == bulk_edit.remove_password:
|
||||
self.validate_parameters_remove_password(parameters)
|
||||
|
||||
return attrs
|
||||
|
||||
|
||||
@@ -26,6 +26,8 @@ from filelock import FileLock
|
||||
|
||||
from documents import matching
|
||||
from documents.caching import clear_document_caches
|
||||
from documents.caching import invalidate_llm_suggestions_cache
|
||||
from documents.data_models import ConsumableDocument
|
||||
from documents.file_handling import create_source_path_directory
|
||||
from documents.file_handling import delete_empty_directories
|
||||
from documents.file_handling import generate_filename
|
||||
@@ -52,6 +54,7 @@ from documents.workflows.mutations import apply_assignment_to_overrides
|
||||
from documents.workflows.mutations import apply_removal_to_document
|
||||
from documents.workflows.mutations import apply_removal_to_overrides
|
||||
from documents.workflows.utils import get_workflows_for_trigger
|
||||
from paperless.config import AIConfig
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from documents.classifier import DocumentClassifier
|
||||
@@ -656,6 +659,15 @@ def cleanup_custom_field_deletion(sender, instance: CustomField, **kwargs):
|
||||
)
|
||||
|
||||
|
||||
@receiver(models.signals.post_save, sender=Document)
|
||||
def update_llm_suggestions_cache(sender, instance, **kwargs):
|
||||
"""
|
||||
Invalidate the LLM suggestions cache when a document is saved.
|
||||
"""
|
||||
# Invalidate the cache for the document
|
||||
invalidate_llm_suggestions_cache(instance.pk)
|
||||
|
||||
|
||||
@receiver(models.signals.post_delete, sender=User)
|
||||
@receiver(models.signals.post_delete, sender=Group)
|
||||
def cleanup_user_deletion(sender, instance: User | Group, **kwargs):
|
||||
@@ -962,3 +974,26 @@ def close_connection_pool_on_worker_init(**kwargs):
|
||||
for conn in connections.all(initialized_only=True):
|
||||
if conn.alias == "default" and hasattr(conn, "pool") and conn.pool:
|
||||
conn.close_pool()
|
||||
|
||||
|
||||
def add_or_update_document_in_llm_index(sender, document, **kwargs):
|
||||
"""
|
||||
Add or update a document in the LLM index when it is created or updated.
|
||||
"""
|
||||
ai_config = AIConfig()
|
||||
if ai_config.llm_index_enabled:
|
||||
from documents.tasks import update_document_in_llm_index
|
||||
|
||||
update_document_in_llm_index.delay(document)
|
||||
|
||||
|
||||
@receiver(models.signals.post_delete, sender=Document)
|
||||
def delete_document_from_llm_index(sender, instance: Document, **kwargs):
|
||||
"""
|
||||
Delete a document from the LLM index when it is deleted.
|
||||
"""
|
||||
ai_config = AIConfig()
|
||||
if ai_config.llm_index_enabled:
|
||||
from documents.tasks import remove_document_from_llm_index
|
||||
|
||||
remove_document_from_llm_index.delay(instance)
|
||||
|
||||
@@ -54,6 +54,10 @@ from documents.signals import document_updated
|
||||
from documents.signals.handlers import cleanup_document_deletion
|
||||
from documents.signals.handlers import run_workflows
|
||||
from documents.workflows.utils import get_workflows_for_trigger
|
||||
from paperless.config import AIConfig
|
||||
from paperless_ai.indexing import llm_index_add_or_update_document
|
||||
from paperless_ai.indexing import llm_index_remove_document
|
||||
from paperless_ai.indexing import update_llm_index
|
||||
|
||||
if settings.AUDIT_LOG_ENABLED:
|
||||
from auditlog.models import LogEntry
|
||||
@@ -242,6 +246,13 @@ def bulk_update_documents(document_ids):
|
||||
for doc in documents:
|
||||
index.update_document(writer, doc)
|
||||
|
||||
ai_config = AIConfig()
|
||||
if ai_config.llm_index_enabled:
|
||||
update_llm_index(
|
||||
progress_bar_disable=True,
|
||||
rebuild=False,
|
||||
)
|
||||
|
||||
|
||||
@shared_task
|
||||
def update_document_content_maybe_archive_file(document_id):
|
||||
@@ -341,6 +352,10 @@ def update_document_content_maybe_archive_file(document_id):
|
||||
with index.open_index_writer() as writer:
|
||||
index.update_document(writer, document)
|
||||
|
||||
ai_config = AIConfig()
|
||||
if ai_config.llm_index_enabled:
|
||||
llm_index_add_or_update_document(document)
|
||||
|
||||
clear_document_caches(document.pk)
|
||||
|
||||
except Exception:
|
||||
@@ -558,3 +573,55 @@ def update_document_parent_tags(tag: Tag, new_parent: Tag) -> None:
|
||||
|
||||
if affected:
|
||||
bulk_update_documents.delay(document_ids=list(affected))
|
||||
|
||||
|
||||
@shared_task
|
||||
def llmindex_index(
|
||||
*,
|
||||
progress_bar_disable=True,
|
||||
rebuild=False,
|
||||
scheduled=True,
|
||||
auto=False,
|
||||
):
|
||||
ai_config = AIConfig()
|
||||
if ai_config.llm_index_enabled:
|
||||
task = PaperlessTask.objects.create(
|
||||
type=PaperlessTask.TaskType.SCHEDULED_TASK
|
||||
if scheduled
|
||||
else PaperlessTask.TaskType.AUTO
|
||||
if auto
|
||||
else PaperlessTask.TaskType.MANUAL_TASK,
|
||||
task_id=uuid.uuid4(),
|
||||
task_name=PaperlessTask.TaskName.LLMINDEX_UPDATE,
|
||||
status=states.STARTED,
|
||||
date_created=timezone.now(),
|
||||
date_started=timezone.now(),
|
||||
)
|
||||
from paperless_ai.indexing import update_llm_index
|
||||
|
||||
try:
|
||||
result = update_llm_index(
|
||||
progress_bar_disable=progress_bar_disable,
|
||||
rebuild=rebuild,
|
||||
)
|
||||
task.status = states.SUCCESS
|
||||
task.result = result
|
||||
except Exception as e:
|
||||
logger.error("LLM index error: " + str(e))
|
||||
task.status = states.FAILURE
|
||||
task.result = str(e)
|
||||
|
||||
task.date_done = timezone.now()
|
||||
task.save(update_fields=["status", "result", "date_done"])
|
||||
else:
|
||||
logger.info("LLM index is disabled, skipping update.")
|
||||
|
||||
|
||||
@shared_task
|
||||
def update_document_in_llm_index(document):
|
||||
llm_index_add_or_update_document(document)
|
||||
|
||||
|
||||
@shared_task
|
||||
def remove_document_from_llm_index(document):
|
||||
llm_index_remove_document(document)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user